lang
stringclasses
2 values
license
stringclasses
13 values
stderr
stringlengths
0
343
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
6
87.7k
new_contents
stringlengths
0
6.23M
new_file
stringlengths
3
311
old_contents
stringlengths
0
6.23M
message
stringlengths
6
9.1k
old_file
stringlengths
3
311
subject
stringlengths
0
4k
git_diff
stringlengths
0
6.31M
Java
agpl-3.0
d00fe6b9b92631139c0009af012d28af4dba999c
0
JanMarvin/rstudio,jar1karp/rstudio,jar1karp/rstudio,JanMarvin/rstudio,jrnold/rstudio,jar1karp/rstudio,JanMarvin/rstudio,jar1karp/rstudio,jar1karp/rstudio,jrnold/rstudio,jar1karp/rstudio,jar1karp/rstudio,jar1karp/rstudio,JanMarvin/rstudio,JanMarvin/rstudio,JanMarvin/rstudio,JanMarvin/rstudio,jrnold/rstudio,JanMarvin/rstudio,jrnold/rstudio,jrnold/rstudio,jrnold/rstudio,jrnold/rstudio,JanMarvin/rstudio,jrnold/rstudio,jar1karp/rstudio,jrnold/rstudio
/* * NewSparkConnectionDialog.java * * Copyright (C) 2009-12 by RStudio, Inc. * * Unless you have received this program directly from RStudio pursuant * to the terms of a commercial license agreement with RStudio, then * this program is licensed to you under the terms of version 3 of the * GNU Affero General Public License. This program is distributed WITHOUT * ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT, * MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the * AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details. * */ package org.rstudio.studio.client.workbench.views.connections.ui; import org.rstudio.core.client.js.JsObject; import org.rstudio.core.client.widget.FocusHelper; import org.rstudio.core.client.widget.ModalDialog; import org.rstudio.core.client.widget.OperationWithInput; import org.rstudio.core.client.widget.VerticalSpacer; import org.rstudio.studio.client.RStudioGinjector; import org.rstudio.studio.client.common.HelpLink; import org.rstudio.studio.client.workbench.model.ClientState; import org.rstudio.studio.client.workbench.model.Session; import org.rstudio.studio.client.workbench.model.helper.JSObjectStateValue; import org.rstudio.studio.client.workbench.views.connections.ConnectionsPresenter; import org.rstudio.studio.client.workbench.views.connections.model.HadoopVersion; import org.rstudio.studio.client.workbench.views.connections.model.NewSparkConnectionContext; import org.rstudio.studio.client.workbench.views.connections.model.SparkVersion; import com.google.gwt.core.client.GWT; import com.google.gwt.core.client.JavaScriptObject; import com.google.gwt.core.client.JsArray; import com.google.gwt.event.dom.client.ChangeEvent; import com.google.gwt.event.dom.client.ChangeHandler; import com.google.gwt.resources.client.ClientBundle; import com.google.gwt.resources.client.CssResource; import com.google.gwt.user.client.Command; import com.google.gwt.user.client.ui.CheckBox; import com.google.gwt.user.client.ui.Grid; import com.google.gwt.user.client.ui.Label; import com.google.gwt.user.client.ui.ListBox; import com.google.gwt.user.client.ui.VerticalPanel; import com.google.gwt.user.client.ui.Widget; import com.google.gwt.view.client.SelectionChangeEvent; import com.google.inject.Inject; public class NewSparkConnectionDialog extends ModalDialog<NewSparkConnectionDialog.Result> { // extends JavaScriptObject for easy serialization (as client state) public static class Result extends JavaScriptObject { protected Result() {} public static final Result create() { return create(null, false, "auto", null, null); } public static final native Result create(String master, boolean reconnect, String cores, String sparkVersion, String hadoopVersion) /*-{ return { "master": master, "reconnect": reconnect, "cores": cores, "spark_version": sparkVersion, "hadoop_version": hadoopVersion }; }-*/; public final native String getMaster() /*-{ return this.master; }-*/; public final native boolean getReconnect() /*-{ return this.reconnect; }-*/; public final native String getCores() /*-{ return this.cores; }-*/; public final native String getSparkVersion() /*-{ return this.spark_version; }-*/; public final native String getHadoopVersion() /*-{ return this.hadoop_version; }-*/; } @Inject private void initialize(Session session) { session_ = session; } public NewSparkConnectionDialog(NewSparkConnectionContext context, OperationWithInput<Result> operation) { super("Connect to Spark Cluster", operation); RStudioGinjector.INSTANCE.injectMembers(this); context_ = context; loadAndPersistClientState(); HelpLink helpLink = new HelpLink( "Using Spark with RStudio", "about_shiny", false); helpLink.addStyleName(RES.styles().helpLink()); addLeftWidget(helpLink); } @Override protected void onDialogShown() { super.onDialogShown(); FocusHelper.setFocusDeferred(master_); } @Override protected boolean validate(Result result) { return true; } @Override protected Widget createMainWidget() { VerticalPanel container = new VerticalPanel(); // master final Grid masterGrid = new Grid(2, 2); masterGrid.addStyleName(RES.styles().grid()); masterGrid.addStyleName(RES.styles().masterGrid()); Label masterLabel = new Label("Master:"); masterLabel.addStyleName(RES.styles().label()); masterGrid.setWidget(0, 0, masterLabel); master_ = new SparkMasterChooser(context_); master_.addStyleName(RES.styles().spanningInput()); if (lastResult_.getMaster() != null) master_.setSelection(lastResult_.getMaster()); masterGrid.setWidget(0, 1, master_); Label coresLabel = new Label("Local cores:"); masterGrid.setWidget(1, 0, coresLabel); cores_ = new ListBox(); cores_.addItem("Auto (" + context_.getCores() + ")", "auto"); for (int i = context_.getCores(); i>0; i--) { String value = String.valueOf(i); String item = value; cores_.addItem(item, value); } setValue(cores_, lastResult_.getCores()); masterGrid.setWidget(1, 1, cores_); container.add(masterGrid); // auto-reconnect autoReconnect_ = new CheckBox( "Reconnect automatically if connection is dropped"); autoReconnect_.setValue(lastResult_.getReconnect()); container.add(autoReconnect_); // manage visiblity of master UI components final Command manageMasterUI = new Command() { @Override public void execute() { boolean local = master_.isLocalMaster(master_.getSelection()); autoReconnect_.setVisible(!local); if (local) masterGrid.removeStyleName(RES.styles().remote()); else masterGrid.addStyleName(RES.styles().remote()); } }; manageMasterUI.execute(); master_.addSelectionChangeHandler(new SelectionChangeEvent.Handler() { @Override public void onSelectionChange(SelectionChangeEvent event) { manageMasterUI.execute(); } }); // versions Grid versionGrid = new Grid(2, 2); versionGrid.addStyleName(RES.styles().grid()); versionGrid.addStyleName(RES.styles().versionGrid()); Label sparkLabel = new Label("Spark version:"); sparkLabel.addStyleName(RES.styles().label()); versionGrid.setWidget(0, 0, sparkLabel); sparkVersion_ = new ListBox(); sparkVersion_.addStyleName(RES.styles().spanningInput()); final JsArray<SparkVersion> sparkVersions = context_.getSparkVersions(); for (int i = 0; i<sparkVersions.length(); i++) { String version = sparkVersions.get(i).getNumber(); sparkVersion_.addItem("Spark " + version, version); } if (lastResult_.getSparkVersion() != null) setValue(sparkVersion_, lastResult_.getSparkVersion()); else setValue(sparkVersion_, context_.getDefaultSparkVersion()); versionGrid.setWidget(0, 1, sparkVersion_); versionGrid.setWidget(1, 0, new Label("Hadoop version:")); hadoopVersion_ = new ListBox(); hadoopVersion_.addStyleName(RES.styles().spanningInput()); final Command updateHadoopVersionsCommand = new Command() { @Override public void execute() { String sparkVersionNumber = sparkVersion_.getSelectedValue(); for (int i = 0; i<sparkVersions.length(); i++) { SparkVersion sparkVersion = sparkVersions.get(i); if (sparkVersion.getNumber().equals(sparkVersionNumber)) { JsArray<HadoopVersion> hadoopVersions = sparkVersion.getHadoopVersions(); hadoopVersion_.clear(); for (int h = 0; h<hadoopVersions.length(); h++) { HadoopVersion hadoopVersion = hadoopVersions.get(h); String label = hadoopVersion.getLabel(); if (h == 0) label = label + " (Default)"; hadoopVersion_.addItem(label, hadoopVersion.getId()); } break; } } } }; updateHadoopVersionsCommand.execute(); if (lastResult_.getHadoopVersion() != null) setValue(hadoopVersion_, lastResult_.getHadoopVersion()); versionGrid.setWidget(1, 1, hadoopVersion_); sparkVersion_.addChangeHandler( commandChangeHandler(updateHadoopVersionsCommand)); container.add(versionGrid); // info regarding installation final InstallInfoPanel infoPanel = new InstallInfoPanel(); container.add(infoPanel); // update info panel state Command updateInfoPanel = new Command() { @Override public void execute() { SparkVersion sparkVersion = context_.getSparkVersions() .get(sparkVersion_.getSelectedIndex()); HadoopVersion hadoopVersion = sparkVersion.getHadoopVersions() .get(hadoopVersion_.getSelectedIndex()); boolean remote = !master_.isLocalMaster(master_.getSelection()); infoPanel.setVisible(!hadoopVersion.isInstalled()); if (!hadoopVersion.isInstalled()) infoPanel.update(sparkVersion, hadoopVersion, remote); } }; updateInfoPanel.execute(); sparkVersion_.addChangeHandler(commandChangeHandler(updateInfoPanel)); hadoopVersion_.addChangeHandler(commandChangeHandler(updateInfoPanel)); // connection code container.add(new VerticalSpacer("20px")); //container.add(new Label("Code")); //container.add(new Label("Insert into:")); return container; } @Override protected Result collectInput() { // collect the result Result result = Result.create( master_.getSelection(), autoReconnect_.getValue(), cores_.getSelectedValue(), sparkVersion_.getSelectedValue(), hadoopVersion_.getSelectedValue()); // update client state lastResult_ = result; // return result return result; } @Override protected void onUnload() { super.onUnload(); session_.persistClientState(); } private ChangeHandler commandChangeHandler(final Command command) { return new ChangeHandler() { @Override public void onChange(ChangeEvent event) { command.execute(); } }; } private boolean setValue(ListBox listBox, String value) { for (int i = 0; i < listBox.getItemCount(); i++) if (value.equals(listBox.getValue(i))) { listBox.setSelectedIndex(i); return true; } return false; } private class NewSparkConnectionClientState extends JSObjectStateValue { public NewSparkConnectionClientState() { super(ConnectionsPresenter.MODULE_CONNECTIONS, "last-spark-connection-dialog-result", ClientState.PERSISTENT, session_.getSessionInfo().getClientState(), false); } @Override protected void onInit(JsObject value) { if (value != null) lastResult_ = value.cast(); else lastResult_ = Result.create(); } @Override protected JsObject getValue() { return lastResult_.cast(); } } private final void loadAndPersistClientState() { if (clientStateValue_ == null) clientStateValue_ = new NewSparkConnectionClientState(); } private static NewSparkConnectionClientState clientStateValue_; private static Result lastResult_ = Result.create(); public interface Styles extends CssResource { String label(); String grid(); String versionGrid(); String masterGrid(); String remote(); String helpLink(); String spanningInput(); String installCheckBox(); String infoPanel(); } public interface Resources extends ClientBundle { @Source("NewSparkConnectionDialog.css") Styles styles(); } public static Resources RES = GWT.create(Resources.class); static { RES.styles().ensureInjected(); } private final NewSparkConnectionContext context_; private SparkMasterChooser master_; private CheckBox autoReconnect_; private ListBox cores_; private ListBox sparkVersion_; private ListBox hadoopVersion_; private Session session_; }
src/gwt/src/org/rstudio/studio/client/workbench/views/connections/ui/NewSparkConnectionDialog.java
/* * NewSparkConnectionDialog.java * * Copyright (C) 2009-12 by RStudio, Inc. * * Unless you have received this program directly from RStudio pursuant * to the terms of a commercial license agreement with RStudio, then * this program is licensed to you under the terms of version 3 of the * GNU Affero General Public License. This program is distributed WITHOUT * ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT, * MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the * AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details. * */ package org.rstudio.studio.client.workbench.views.connections.ui; import org.rstudio.core.client.js.JsObject; import org.rstudio.core.client.widget.FocusHelper; import org.rstudio.core.client.widget.ModalDialog; import org.rstudio.core.client.widget.OperationWithInput; import org.rstudio.core.client.widget.VerticalSpacer; import org.rstudio.studio.client.RStudioGinjector; import org.rstudio.studio.client.common.HelpLink; import org.rstudio.studio.client.workbench.model.ClientState; import org.rstudio.studio.client.workbench.model.Session; import org.rstudio.studio.client.workbench.model.helper.JSObjectStateValue; import org.rstudio.studio.client.workbench.views.connections.ConnectionsPresenter; import org.rstudio.studio.client.workbench.views.connections.model.HadoopVersion; import org.rstudio.studio.client.workbench.views.connections.model.NewSparkConnectionContext; import org.rstudio.studio.client.workbench.views.connections.model.SparkVersion; import com.google.gwt.core.client.GWT; import com.google.gwt.core.client.JavaScriptObject; import com.google.gwt.core.client.JsArray; import com.google.gwt.event.dom.client.ChangeEvent; import com.google.gwt.event.dom.client.ChangeHandler; import com.google.gwt.resources.client.ClientBundle; import com.google.gwt.resources.client.CssResource; import com.google.gwt.user.client.Command; import com.google.gwt.user.client.ui.CheckBox; import com.google.gwt.user.client.ui.Grid; import com.google.gwt.user.client.ui.Label; import com.google.gwt.user.client.ui.ListBox; import com.google.gwt.user.client.ui.VerticalPanel; import com.google.gwt.user.client.ui.Widget; import com.google.gwt.view.client.SelectionChangeEvent; import com.google.inject.Inject; public class NewSparkConnectionDialog extends ModalDialog<NewSparkConnectionDialog.Result> { // extends JavaScriptObject for easy serialization (as client state) public static class Result extends JavaScriptObject { protected Result() {} public static final Result create() { return create(null, false, "auto", null, null); } public static final native Result create(String master, boolean reconnect, String cores, String sparkVersion, String hadoopVersion) /*-{ return { "master": master, "reconnect": reconnect, "cores": cores, "spark_version": sparkVersion, "hadoop_version": hadoopVersion }; }-*/; public final native String getMaster() /*-{ return this.master; }-*/; public final native boolean getReconnect() /*-{ return this.reconnect; }-*/; public final native String getCores() /*-{ return this.cores; }-*/; public final native String getSparkVersion() /*-{ return this.spark_version; }-*/; public final native String getHadoopVersion() /*-{ return this.hadoop_version; }-*/; } @Inject private void initialize(Session session) { session_ = session; } public NewSparkConnectionDialog(NewSparkConnectionContext context, OperationWithInput<Result> operation) { super("Connect to Spark Cluster", operation); RStudioGinjector.INSTANCE.injectMembers(this); context_ = context; loadAndPersistClientState(); HelpLink helpLink = new HelpLink( "Using Spark with RStudio", "about_shiny", false); helpLink.addStyleName(RES.styles().helpLink()); addLeftWidget(helpLink); } @Override protected void onDialogShown() { super.onDialogShown(); FocusHelper.setFocusDeferred(master_); } @Override protected boolean validate(Result result) { return true; } @Override protected Widget createMainWidget() { VerticalPanel container = new VerticalPanel(); // master final Grid masterGrid = new Grid(2, 2); masterGrid.addStyleName(RES.styles().grid()); masterGrid.addStyleName(RES.styles().masterGrid()); Label masterLabel = new Label("Master node:"); masterLabel.addStyleName(RES.styles().label()); masterGrid.setWidget(0, 0, masterLabel); master_ = new SparkMasterChooser(context_); master_.addStyleName(RES.styles().spanningInput()); if (lastResult_.getMaster() != null) master_.setSelection(lastResult_.getMaster()); masterGrid.setWidget(0, 1, master_); Label coresLabel = new Label("Local cores:"); masterGrid.setWidget(1, 0, coresLabel); cores_ = new ListBox(); cores_.addItem("Auto (" + context_.getCores() + ")", "auto"); for (int i = context_.getCores(); i>0; i--) { String value = String.valueOf(i); String item = value; cores_.addItem(item, value); } setValue(cores_, lastResult_.getCores()); masterGrid.setWidget(1, 1, cores_); container.add(masterGrid); // auto-reconnect autoReconnect_ = new CheckBox( "Reconnect automatically if connection is dropped"); autoReconnect_.setValue(lastResult_.getReconnect()); container.add(autoReconnect_); // manage visiblity of master UI components final Command manageMasterUI = new Command() { @Override public void execute() { boolean local = master_.isLocalMaster(master_.getSelection()); autoReconnect_.setVisible(!local); if (local) masterGrid.removeStyleName(RES.styles().remote()); else masterGrid.addStyleName(RES.styles().remote()); } }; manageMasterUI.execute(); master_.addSelectionChangeHandler(new SelectionChangeEvent.Handler() { @Override public void onSelectionChange(SelectionChangeEvent event) { manageMasterUI.execute(); } }); // versions Grid versionGrid = new Grid(2, 2); versionGrid.addStyleName(RES.styles().grid()); versionGrid.addStyleName(RES.styles().versionGrid()); Label sparkLabel = new Label("Spark version:"); sparkLabel.addStyleName(RES.styles().label()); versionGrid.setWidget(0, 0, sparkLabel); sparkVersion_ = new ListBox(); sparkVersion_.addStyleName(RES.styles().spanningInput()); final JsArray<SparkVersion> sparkVersions = context_.getSparkVersions(); for (int i = 0; i<sparkVersions.length(); i++) { String version = sparkVersions.get(i).getNumber(); sparkVersion_.addItem("Spark " + version, version); } if (lastResult_.getSparkVersion() != null) setValue(sparkVersion_, lastResult_.getSparkVersion()); else setValue(sparkVersion_, context_.getDefaultSparkVersion()); versionGrid.setWidget(0, 1, sparkVersion_); versionGrid.setWidget(1, 0, new Label("Hadoop version:")); hadoopVersion_ = new ListBox(); hadoopVersion_.addStyleName(RES.styles().spanningInput()); final Command updateHadoopVersionsCommand = new Command() { @Override public void execute() { String sparkVersionNumber = sparkVersion_.getSelectedValue(); for (int i = 0; i<sparkVersions.length(); i++) { SparkVersion sparkVersion = sparkVersions.get(i); if (sparkVersion.getNumber().equals(sparkVersionNumber)) { JsArray<HadoopVersion> hadoopVersions = sparkVersion.getHadoopVersions(); hadoopVersion_.clear(); for (int h = 0; h<hadoopVersions.length(); h++) { HadoopVersion hadoopVersion = hadoopVersions.get(h); String label = hadoopVersion.getLabel(); if (h == 0) label = label + " (Default)"; hadoopVersion_.addItem(label, hadoopVersion.getId()); } break; } } } }; updateHadoopVersionsCommand.execute(); if (lastResult_.getHadoopVersion() != null) setValue(hadoopVersion_, lastResult_.getHadoopVersion()); versionGrid.setWidget(1, 1, hadoopVersion_); sparkVersion_.addChangeHandler( commandChangeHandler(updateHadoopVersionsCommand)); container.add(versionGrid); // info regarding installation final InstallInfoPanel infoPanel = new InstallInfoPanel(); container.add(infoPanel); // update info panel state Command updateInfoPanel = new Command() { @Override public void execute() { SparkVersion sparkVersion = context_.getSparkVersions() .get(sparkVersion_.getSelectedIndex()); HadoopVersion hadoopVersion = sparkVersion.getHadoopVersions() .get(hadoopVersion_.getSelectedIndex()); boolean remote = !master_.isLocalMaster(master_.getSelection()); infoPanel.setVisible(!hadoopVersion.isInstalled()); if (!hadoopVersion.isInstalled()) infoPanel.update(sparkVersion, hadoopVersion, remote); } }; updateInfoPanel.execute(); sparkVersion_.addChangeHandler(commandChangeHandler(updateInfoPanel)); hadoopVersion_.addChangeHandler(commandChangeHandler(updateInfoPanel)); // connection code container.add(new VerticalSpacer("20px")); //container.add(new Label("Code")); //container.add(new Label("Insert into:")); return container; } @Override protected Result collectInput() { // collect the result Result result = Result.create( master_.getSelection(), autoReconnect_.getValue(), cores_.getSelectedValue(), sparkVersion_.getSelectedValue(), hadoopVersion_.getSelectedValue()); // update client state lastResult_ = result; // return result return result; } @Override protected void onUnload() { super.onUnload(); session_.persistClientState(); } private ChangeHandler commandChangeHandler(final Command command) { return new ChangeHandler() { @Override public void onChange(ChangeEvent event) { command.execute(); } }; } private boolean setValue(ListBox listBox, String value) { for (int i = 0; i < listBox.getItemCount(); i++) if (value.equals(listBox.getValue(i))) { listBox.setSelectedIndex(i); return true; } return false; } private class NewSparkConnectionClientState extends JSObjectStateValue { public NewSparkConnectionClientState() { super(ConnectionsPresenter.MODULE_CONNECTIONS, "last-spark-connection-dialog-result", ClientState.PERSISTENT, session_.getSessionInfo().getClientState(), false); } @Override protected void onInit(JsObject value) { if (value != null) lastResult_ = value.cast(); else lastResult_ = Result.create(); } @Override protected JsObject getValue() { return lastResult_.cast(); } } private final void loadAndPersistClientState() { if (clientStateValue_ == null) clientStateValue_ = new NewSparkConnectionClientState(); } private static NewSparkConnectionClientState clientStateValue_; private static Result lastResult_ = Result.create(); public interface Styles extends CssResource { String label(); String grid(); String versionGrid(); String masterGrid(); String remote(); String helpLink(); String spanningInput(); String installCheckBox(); String infoPanel(); } public interface Resources extends ClientBundle { @Source("NewSparkConnectionDialog.css") Styles styles(); } public static Resources RES = GWT.create(Resources.class); static { RES.styles().ensureInjected(); } private final NewSparkConnectionContext context_; private SparkMasterChooser master_; private CheckBox autoReconnect_; private ListBox cores_; private ListBox sparkVersion_; private ListBox hadoopVersion_; private Session session_; }
tweak label
src/gwt/src/org/rstudio/studio/client/workbench/views/connections/ui/NewSparkConnectionDialog.java
tweak label
<ide><path>rc/gwt/src/org/rstudio/studio/client/workbench/views/connections/ui/NewSparkConnectionDialog.java <ide> final Grid masterGrid = new Grid(2, 2); <ide> masterGrid.addStyleName(RES.styles().grid()); <ide> masterGrid.addStyleName(RES.styles().masterGrid()); <del> Label masterLabel = new Label("Master node:"); <add> Label masterLabel = new Label("Master:"); <ide> masterLabel.addStyleName(RES.styles().label()); <ide> masterGrid.setWidget(0, 0, masterLabel); <ide> master_ = new SparkMasterChooser(context_);
JavaScript
mit
cb2b777f97f47a5d0bcb81ddd9e29428d2abb498
0
runkalicious/lullaby
// Copyright (c) 2013 Matt Runkle. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. function l_isPlaying() { // Google adds the class 'playing' to the play/pause button // when the player is active. The class is removed when paused // or not active. if ( $('#player').find('button.playing').length > 0 ) return true; return false; } function l_pause() { console.log("Pausing Google Play"); // Get pause button var pauseButton = $('#player').find('button.playing')[0]; $(pauseButton).click(); }
connectors/googlemusic.js
// Copyright (c) 2013 Matt Runkle. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. function l_isPlaying() { // Google adds the class 'playing' to the play/pause button // when the player is active. The class is removed when paused // or not active. if ( $('#player').find('button.playing') ) return true; return false; } function l_pause() { console.log("Pausing Google Play"); // Get pause button var pauseButton = $('#player').find('button.playing')[0]; $(pauseButton).click(); }
fix truthiness return value
connectors/googlemusic.js
fix truthiness return value
<ide><path>onnectors/googlemusic.js <ide> // Google adds the class 'playing' to the play/pause button <ide> // when the player is active. The class is removed when paused <ide> // or not active. <del> if ( $('#player').find('button.playing') ) <add> if ( $('#player').find('button.playing').length > 0 ) <ide> return true; <ide> <ide> return false;
Java
apache-2.0
3d1759c6773f3308f7e9dddbada94edc0a1c14c7
0
jangorecki/h2o-3,YzPaul3/h2o-3,michalkurka/h2o-3,spennihana/h2o-3,spennihana/h2o-3,spennihana/h2o-3,mathemage/h2o-3,h2oai/h2o-3,YzPaul3/h2o-3,h2oai/h2o-dev,YzPaul3/h2o-3,YzPaul3/h2o-3,michalkurka/h2o-3,jangorecki/h2o-3,michalkurka/h2o-3,spennihana/h2o-3,YzPaul3/h2o-3,h2oai/h2o-3,jangorecki/h2o-3,h2oai/h2o-3,h2oai/h2o-3,mathemage/h2o-3,jangorecki/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-dev,mathemage/h2o-3,YzPaul3/h2o-3,spennihana/h2o-3,h2oai/h2o-dev,mathemage/h2o-3,h2oai/h2o-3,h2oai/h2o-dev,michalkurka/h2o-3,michalkurka/h2o-3,jangorecki/h2o-3,jangorecki/h2o-3,michalkurka/h2o-3,h2oai/h2o-dev,h2oai/h2o-3,mathemage/h2o-3,h2oai/h2o-dev,mathemage/h2o-3,h2oai/h2o-3,YzPaul3/h2o-3,jangorecki/h2o-3,spennihana/h2o-3,h2oai/h2o-dev,spennihana/h2o-3,mathemage/h2o-3
package water.fvec; import water.Futures; import jsr166y.CountedCompleter; import jsr166y.ForkJoinTask; import water.*; import water.H2O.H2OCallback; import water.H2O.H2OCountedCompleter; import water.nbhm.NonBlockingHashMap; import water.parser.Categorical; import water.parser.BufferedString; import water.util.ArrayUtils; import java.util.Arrays; /** A class to compute the rollup stats. These are computed lazily, thrown * away if the Vec is written into, and then recomputed lazily. Error to ask * for them if the Vec is actively being written into. It is common for all * cores to ask for the same Vec rollup at once, so it is crucial that it be * computed once across the cluster. * * Rollups are kept in the K/V store, which also controls who manages the * rollup work and final results. Winner of a DKV CAS/PutIfMatch race gets to * manage the M/R job computing the rollups. Losers block for the same * rollup. Remote requests *always* forward to the Rollup Key's master. */ final class RollupStats extends Iced { /** The count of missing elements.... or -2 if we have active writers and no * rollup info can be computed (because the vector is being rapidly * modified!), or -1 if rollups have not been computed since the last * modification. */ volatile transient ForkJoinTask _tsk; // Computed in 1st pass volatile long _naCnt; //count(!isNA(X)) double _mean, _sigma; //sum(X) and sum(X^2) for non-NA values long _rows, //count(X) for non-NA values _nzCnt, //count(X!=0) for non-NA values _size, //byte size _pinfs, //count(+inf) _ninfs; //count(-inf) boolean _isInt=true; double[] _mins, _maxs; long _checksum; // Expensive histogram & percentiles // Computed in a 2nd pass, on-demand, by calling computeHisto private static final int MAX_SIZE = 1000; // Standard bin count; categoricals can have more bins // the choice of MAX_SIZE being a power of 10 (rather than 1024) just aligns-to-the-grid of the common input of fixed decimal // precision numbers. It is still an estimate and makes no difference mathematically. It just gives tidier output in some // simple cases without penalty. volatile long[] _bins; // Approximate data value closest to the Xth percentile double[] _pctiles; public boolean hasHisto(){return _bins != null;} // Check for: Vector is mutating and rollups cannot be asked for boolean isMutating() { return _naCnt==-2; } // Check for: Rollups currently being computed private boolean isComputing() { return _naCnt==-1; } // Check for: Rollups available private boolean isReady() { return _naCnt>=0; } private RollupStats(int mode) { _mins = new double[5]; _maxs = new double[5]; Arrays.fill(_mins, Double.MAX_VALUE); Arrays.fill(_maxs,-Double.MAX_VALUE); _pctiles = new double[Vec.PERCENTILES.length]; Arrays.fill(_pctiles, Double.NaN); _mean = _sigma = 0; _size = 0; _naCnt = mode; } private static RollupStats makeComputing() { return new RollupStats(-1); } static RollupStats makeMutating () { return new RollupStats(-2); } private RollupStats map( Chunk c ) { _size = c.byteSize(); boolean isUUID = c._vec.isUUID(); boolean isString = c._vec.isString(); BufferedString tmpStr = new BufferedString(); if (isString) _isInt = false; // Checksum support long checksum = 0; long start = c._start; long l = 81985529216486895L; // Check for popular easy cases: All Constant double min=c.min(), max=c.max(); if( min==max ) { // All constant or all NaN double d = min; // It's the min, it's the max, it's the alpha and omega _checksum = (c.hasFloat()?Double.doubleToRawLongBits(d):(long)d)*c._len; Arrays.fill(_mins, d); Arrays.fill(_maxs, d); if( d == Double.POSITIVE_INFINITY) _pinfs++; else if( d == Double.NEGATIVE_INFINITY) _ninfs++; else { if( Double.isNaN(d)) _naCnt=c._len; else if( d != 0 ) _nzCnt=c._len; _mean = d; _rows=c._len; } _isInt = ((long)d) == d; _sigma = 0; // No variance for constants return this; } //all const NaNs if ((c instanceof C0DChunk && c.isNA_impl(0))) { _sigma=0; //count of non-NAs * variance of non-NAs _mean = 0; //sum of non-NAs (will get turned into mean) _naCnt=c._len; _nzCnt=0; return this; } // Check for popular easy cases: Boolean, possibly sparse, possibly NaN if( min==0 && max==1 ) { int zs = c._len-c.sparseLenZero(); // Easy zeros int nans = 0; // Hard-count sparse-but-zero (weird case of setting a zero over a non-zero) for( int i=c.nextNZ(-1); i< c._len; i=c.nextNZ(i) ) if( c.isNA(i) ) nans++; else if( c.at8(i)==0 ) zs++; int os = c._len-zs-nans; // Ones _nzCnt += os; _naCnt += nans; for( int i=0; i<Math.min(_mins.length,zs); i++ ) { min(0); max(0); } for( int i=0; i<Math.min(_mins.length,os); i++ ) { min(1); max(1); } _rows += zs+os; _mean = (double)os/_rows; _sigma = zs*(0.0-_mean)*(0.0-_mean) + os*(1.0-_mean)*(1.0-_mean); return this; } // Walk the non-zeros if( isUUID ) { // UUID columns do not compute min/max/mean/sigma for( int i=c.nextNZ(-1); i< c._len; i=c.nextNZ(i) ) { if( c.isNA(i) ) _naCnt++; else { long lo = c.at16l(i), hi = c.at16h(i); if (lo != 0 || hi != 0) _nzCnt++; l = lo ^ 37*hi; } if(l != 0) // ignore 0s in checksum to be consistent with sparse chunks checksum ^= (17 * (start+i)) ^ 23*l; } } else if( isString ) { // String columns do not compute min/max/mean/sigma for (int i = c.nextNZ(-1); i < c._len; i = c.nextNZ(i)) { if (c.isNA(i)) _naCnt++; else { _nzCnt++; l = c.atStr(tmpStr, i).hashCode(); } if (l != 0) // ignore 0s in checksum to be consistent with sparse chunks checksum ^= (17 * (start + i)) ^ 23 * l; } } else { // Work off all numeric rows, or only the nonzeros for sparse if (c instanceof C1Chunk) checksum=new RollupStatsHelpers(this).numericChunkRollup((C1Chunk) c, start, checksum); else if (c instanceof C1SChunk) checksum=new RollupStatsHelpers(this).numericChunkRollup((C1SChunk) c, start, checksum); else if (c instanceof C1NChunk) checksum=new RollupStatsHelpers(this).numericChunkRollup((C1NChunk) c, start, checksum); else if (c instanceof C2Chunk) checksum=new RollupStatsHelpers(this).numericChunkRollup((C2Chunk) c, start, checksum); else if (c instanceof C2SChunk) checksum=new RollupStatsHelpers(this).numericChunkRollup((C2SChunk) c, start, checksum); else if (c instanceof C4SChunk) checksum=new RollupStatsHelpers(this).numericChunkRollup((C4SChunk) c, start, checksum); else if (c instanceof C4FChunk) checksum=new RollupStatsHelpers(this).numericChunkRollup((C4FChunk) c, start, checksum); else if (c instanceof C4Chunk) checksum=new RollupStatsHelpers(this).numericChunkRollup((C4Chunk) c, start, checksum); else if (c instanceof C8Chunk) checksum=new RollupStatsHelpers(this).numericChunkRollup((C8Chunk) c, start, checksum); else if (c instanceof C8DChunk) checksum=new RollupStatsHelpers(this).numericChunkRollup((C8DChunk) c, start, checksum); else checksum=new RollupStatsHelpers(this).numericChunkRollup(c, start, checksum); // special case for sparse chunks // we need to merge with the mean (0) and variance (0) of the zeros count of 0s of the sparse chunk - which were skipped above // _rows is the count of non-zero rows // _mean is the mean of non-zero rows // _sigma is the mean of non-zero rows // handle the zeros if( c.isSparseZero() ) { int zeros = c._len - c.sparseLenZero(); if (zeros > 0) { for( int i=0; i<Math.min(_mins.length,zeros); i++ ) { min(0); max(0); } double zeromean = 0; double zeroM2 = 0; double delta = _mean - zeromean; _mean = (_mean * _rows + zeromean * zeros) / (_rows + zeros); _sigma += zeroM2 + delta*delta * _rows * zeros / (_rows + zeros); //this is the variance*(N-1), will do sqrt(_sigma/(N-1)) later in postGlobal _rows += zeros; } } } _checksum = checksum; // UUID and String columns do not compute min/max/mean/sigma if( isUUID || isString) { Arrays.fill(_mins,Double.NaN); Arrays.fill(_maxs,Double.NaN); _mean = _sigma = Double.NaN; } return this; } private void reduce( RollupStats rs ) { for( double d : rs._mins ) if (!Double.isNaN(d)) min(d); for( double d : rs._maxs ) if (!Double.isNaN(d)) max(d); _naCnt += rs._naCnt; _nzCnt += rs._nzCnt; _pinfs += rs._pinfs; _ninfs += rs._ninfs; if (_rows == 0) { _mean = rs._mean; _sigma = rs._sigma; } else if(rs._rows != 0){ double delta = _mean - rs._mean; _mean = (_mean * _rows + rs._mean * rs._rows) / (_rows + rs._rows); _sigma += rs._sigma + delta*delta * _rows*rs._rows / (_rows+rs._rows); } _rows += rs._rows; _size += rs._size; _isInt &= rs._isInt; _checksum ^= rs._checksum; } double min( double d ) { assert(!Double.isNaN(d)); for( int i=0; i<_mins.length; i++ ) if( d < _mins[i] ) { double tmp = _mins[i]; _mins[i] = d; d = tmp; } return _mins[_mins.length-1]; } double max( double d ) { assert(!Double.isNaN(d)); for( int i=0; i<_maxs.length; i++ ) if( d > _maxs[i] ) { double tmp = _maxs[i]; _maxs[i] = d; d = tmp; } return _maxs[_maxs.length-1]; } private static class Roll extends MRTask<Roll> { final Key _rskey; RollupStats _rs; Roll( H2OCountedCompleter cmp, Key rskey ) { super(cmp); _rskey=rskey; } @Override public void map( Chunk c ) { _rs = new RollupStats(0).map(c); } @Override public void reduce( Roll roll ) { _rs.reduce(roll._rs); } @Override public void postGlobal() { if( _rs == null ) _rs = new RollupStats(0); else { _rs._sigma = Math.sqrt(_rs._sigma/(_rs._rows-1)); if (_rs._rows == 1) _rs._sigma = 0; if (_rs._rows < 5) for (int i=0; i<5-_rs._rows; i++) { // Fix PUBDEV-150 for files under 5 rows _rs._maxs[4-i] = Double.NaN; _rs._mins[4-i] = Double.NaN; } } // mean & sigma not allowed on more than 2 classes; for 2 classes the assumption is that it's true/false if( _fr.anyVec().isCategorical() && _fr.anyVec().domain().length > 2 ) _rs._mean = _rs._sigma = Double.NaN; } // Just toooo common to report always. Drowning in multi-megabyte log file writes. @Override public boolean logVerbose() { return false; } /** * Added to avoid deadlocks when running from idea in debug mode (evaluating toSgtring on mr task causes rollups to be computed) * @return */ @Override public String toString(){return "Roll(" + _fr.anyVec()._key +")";} } static void start(final Vec vec, Futures fs, boolean computeHisto) { if( DKV.get(vec._key)== null ) throw new RuntimeException("Rollups not possible, because Vec was deleted: "+vec._key); if( vec.isString() ) computeHisto = false; // No histogram for string columns final Key rskey = vec.rollupStatsKey(); RollupStats rs = getOrNull(vec,rskey); if(rs == null || (computeHisto && !rs.hasHisto())) fs.add(new RPC(rskey.home_node(),new ComputeRollupsTask(vec,computeHisto)).addCompleter(new H2OCallback() { @Override public void callback(H2OCountedCompleter h2OCountedCompleter) { DKV.get(rskey); // fetch new results via DKV to enable caching of the results. } }).call()); } private static NonBlockingHashMap<Key,RPC> _pendingRollups = new NonBlockingHashMap<>(); static RollupStats get(Vec vec, boolean computeHisto) { if( DKV.get(vec._key)== null ) throw new RuntimeException("Rollups not possible, because Vec was deleted: "+vec._key); if( vec.isString() ) computeHisto = false; // No histogram for string columns final Key rskey = vec.rollupStatsKey(); RollupStats rs = DKV.getGet(rskey); while(rs == null || (!rs.isReady() || (computeHisto && !rs.hasHisto()))){ if(rs != null && rs.isMutating()) throw new IllegalArgumentException("Can not compute rollup stats while vec is being modified. (1)"); // 1. compute only once try { RPC rpcNew = new RPC(rskey.home_node(),new ComputeRollupsTask(vec, computeHisto)); RPC rpcOld = _pendingRollups.putIfAbsent(rskey, rpcNew); if(rpcOld == null) { // no prior pending task, need to send this one rpcNew.call().get(); _pendingRollups.remove(rskey); } else // rollups computation is already in progress, wait for it to finish rpcOld.get(); } catch( Throwable t ) { System.err.println("Remote rollups failed with an exception, wrapping and rethrowing: "+t); throw new RuntimeException(t); } // 2. fetch - done in two steps to go through standard DKV.get and enable local caching rs = DKV.getGet(rskey); } return rs; } // Allow a bunch of rollups to run in parallel. If Futures is passed in, run // the rollup in the background and do not return. static RollupStats get(Vec vec) { return get(vec,false);} // Fetch if present, but do not compute static RollupStats getOrNull(Vec vec, final Key rskey ) { Value val = DKV.get(rskey); if( val == null ) // No rollup stats present? return vec.length() > 0 ? /*not computed*/null : /*empty vec*/new RollupStats(0); RollupStats rs = val.get(RollupStats.class); return rs.isReady() ? rs : null; } // Histogram base & stride double h_base() { return _mins[0]; } double h_stride() { return h_stride(_bins.length); } private double h_stride(int nbins) { return (_maxs[0]-_mins[0]+(_isInt?1:0))/nbins; } // Compute expensive histogram private static class Histo extends MRTask<Histo> { final double _base, _stride; // Inputs final int _nbins; // Inputs long[] _bins; // Outputs Histo( H2OCountedCompleter cmp, RollupStats rs, int nbins ) { super(cmp);_base = rs.h_base(); _stride = rs.h_stride(nbins); _nbins = nbins; } @Override public void map( Chunk c ) { _bins = new long[_nbins]; for( int i=c.nextNZ(-1); i< c._len; i=c.nextNZ(i) ) { double d = c.atd(i); if( !Double.isNaN(d) ) _bins[idx(d)]++; } // Sparse? We skipped all the zeros; do them now if( c.isSparseZero() ) _bins[idx(0.0)] += (c._len - c.sparseLenZero()); } private int idx( double d ) { int idx = (int)((d-_base)/_stride); return Math.min(idx,_bins.length-1); } @Override public void reduce( Histo h ) { ArrayUtils.add(_bins,h._bins); } // Just toooo common to report always. Drowning in multi-megabyte log file writes. @Override public boolean logVerbose() { return false; } } // Task to compute rollups on its homenode if needed. // Only computes the rollups, does not fetch them, caller should fetch them via DKV store (to preserve caching). // Only comutes the rollups if needed (i.e. are null or do not have histo and histo is required) // If rs computation is already in progress, it will wait for it to finish. // Throws IAE if the Vec is being modified (or removed) while this task is in progress. static final class ComputeRollupsTask extends DTask<ComputeRollupsTask>{ final Key _vecKey; final Key _rsKey; final boolean _computeHisto; public ComputeRollupsTask(Vec v, boolean computeHisto){ super((byte)(Thread.currentThread() instanceof H2O.FJWThr ? currThrPriority()+1 : H2O.MIN_HI_PRIORITY-3)); _vecKey = v._key; _rsKey = v.rollupStatsKey(); _computeHisto = computeHisto; } private Value makeComputing(){ RollupStats newRs = RollupStats.makeComputing(); CountedCompleter cc = getCompleter(); // should be null or RPCCall if(cc != null) assert cc.getCompleter() == null; newRs._tsk = cc == null?this:cc; return new Value(_rsKey,newRs); } private void installResponse(Value nnn, RollupStats rs) { Futures fs = new Futures(); Value old = DKV.DputIfMatch(_rsKey, new Value(_rsKey, rs), nnn, fs); assert rs.isReady(); if(old != nnn) throw new IllegalArgumentException("Can not compute rollup stats while vec is being modified. (2)"); fs.blockForPending(); } @Override public void compute2() { assert _rsKey.home(); final Vec vec = DKV.getGet(_vecKey); while(true) { Value v = DKV.get(_rsKey); RollupStats rs = (v == null) ? null : v.<RollupStats>get(); // Fetched current rs from the DKV, rs can be: // a) computed // a.1) has histo or histo not required => do nothing // a.2) no histo and histo is required => only compute histo // b) computing => wait for the task computing it to finish and check again // c) mutating => throw IAE // d) null => compute new rollups if (rs != null) { if (rs.isReady()) { if (_computeHisto && !rs.hasHisto()) { // a.2 => compute rollups CountedCompleter cc = getCompleter(); // should be null or RPCCall if(cc != null) assert cc.getCompleter() == null; // note: if cc == null then onExceptionalCompletion tasks waiting on this may be woken up before exception handling iff exception is thrown. Value nnn = makeComputing(); Futures fs = new Futures(); Value oldv = DKV.DputIfMatch(_rsKey, nnn, v, fs); fs.blockForPending(); if(oldv == v){ // got the lock computeHisto(rs, vec, nnn); break; } // else someone else is modifying the rollups => try again } else break; // a.1 => do nothing } else if (rs.isComputing()) { // b) => wait for current computation to finish rs._tsk.join(); } else if(rs.isMutating()) // c) => throw IAE throw new IllegalArgumentException("Can not compute rollup stats while vec is being modified. (3)"); } else { // d) => compute the rollups final Value nnn = makeComputing(); Futures fs = new Futures(); Value oldv = DKV.DputIfMatch(_rsKey, nnn, v, fs); fs.blockForPending(); if(oldv == v){ // got the lock, compute the rollups Roll r = new Roll(null,_rsKey).doAll(vec); // computed the stats, now compute histo if needed and install the response and quit r._rs._checksum ^= vec.length(); if(_computeHisto) computeHisto(r._rs, vec, nnn); else installResponse(nnn, r._rs); break; } // else someone else is modifying the rollups => try again } } tryComplete(); } final void computeHisto(final RollupStats rs, Vec vec, final Value nnn) { // All NAs or non-math; histogram has zero bins if (rs._naCnt == vec.length() || vec.isUUID()) { rs._bins = new long[0]; installResponse(nnn, rs); return; } // Constant: use a single bin double span = rs._maxs[0] - rs._mins[0]; final long rows = vec.length() - rs._naCnt; assert rows > 0 : "rows = " + rows + ", vec.len() = " + vec.length() + ", naCnt = " + rs._naCnt; if (span == 0) { rs._bins = new long[]{rows}; installResponse(nnn, rs); return; } // Number of bins: MAX_SIZE by default. For integers, bins for each unique int // - unless the count gets too high; allow a very high count for categoricals. int nbins = MAX_SIZE; if (rs._isInt && span < Integer.MAX_VALUE) { nbins = (int) span + 1; // 1 bin per int int lim = vec.isCategorical() ? Categorical.MAX_CATEGORICAL_COUNT : MAX_SIZE; nbins = Math.min(lim, nbins); // Cap nbins at sane levels } Histo histo = new Histo(null, rs, nbins).doAll(vec); assert ArrayUtils.sum(histo._bins) == rows; rs._bins = histo._bins; // Compute percentiles from histogram rs._pctiles = new double[Vec.PERCENTILES.length]; int j = 0; // Histogram bin number int k = 0; // The next non-zero bin after j long hsum = 0; // Rolling histogram sum double base = rs.h_base(); double stride = rs.h_stride(); double lastP = -1.0; // any negative value to pass assert below first time for (int i = 0; i < Vec.PERCENTILES.length; i++) { final double P = Vec.PERCENTILES[i]; assert P >= 0 && P <= 1 && P >= lastP; // rely on increasing percentiles here. If P has dup then strange but accept, hence >= not > lastP = P; double pdouble = 1.0 + P * (rows - 1); // following stats:::quantile.default type 7 long pint = (long) pdouble; // 1-based into bin vector double h = pdouble - pint; // any fraction h to linearly interpolate between? assert P != 1 || (h == 0.0 && pint == rows); // i.e. max while (hsum < pint) hsum += rs._bins[j++]; // j overshot by 1 bin; we added _bins[j-1] and this goes from too low to either exactly right or too big // pint now falls in bin j-1 (the ++ happened even when hsum==pint), so grab that bin value now rs._pctiles[i] = base + stride * (j - 1); if (h > 0 && pint == hsum) { // linearly interpolate between adjacent non-zero bins // i) pint is the last of (j-1)'s bin count (>1 when either duplicates exist in input, or stride makes dups at lower accuracy) // AND ii) h>0 so we do need to find the next non-zero bin if (k < j) k = j; // if j jumped over the k needed for the last P, catch k up to j // Saves potentially winding k forward over the same zero stretch many times while (rs._bins[k] == 0) k++; // find the next non-zero bin rs._pctiles[i] += h * stride * (k - j + 1); } // otherwise either h==0 and we know which bin, or fraction is between two positions that fall in the same bin // this guarantees we are within one bin of the exact answer; i.e. within (max-min)/MAX_SIZE } installResponse(nnn, rs); } } }
h2o-core/src/main/java/water/fvec/RollupStats.java
package water.fvec; import water.Futures; import jsr166y.CountedCompleter; import jsr166y.ForkJoinTask; import water.*; import water.H2O.H2OCallback; import water.H2O.H2OCountedCompleter; import water.nbhm.NonBlockingHashMap; import water.parser.Categorical; import water.parser.BufferedString; import water.util.ArrayUtils; import java.util.Arrays; /** A class to compute the rollup stats. These are computed lazily, thrown * away if the Vec is written into, and then recomputed lazily. Error to ask * for them if the Vec is actively being written into. It is common for all * cores to ask for the same Vec rollup at once, so it is crucial that it be * computed once across the cluster. * * Rollups are kept in the K/V store, which also controls who manages the * rollup work and final results. Winner of a DKV CAS/PutIfMatch race gets to * manage the M/R job computing the rollups. Losers block for the same * rollup. Remote requests *always* forward to the Rollup Key's master. */ final class RollupStats extends Iced { /** The count of missing elements.... or -2 if we have active writers and no * rollup info can be computed (because the vector is being rapidly * modified!), or -1 if rollups have not been computed since the last * modification. */ volatile transient ForkJoinTask _tsk; // Computed in 1st pass volatile long _naCnt; //count(!isNA(X)) double _mean, _sigma; //sum(X) and sum(X^2) for non-NA values long _rows, //count(X) for non-NA values _nzCnt, //count(X!=0) for non-NA values _size, //byte size _pinfs, //count(+inf) _ninfs; //count(-inf) boolean _isInt=true; double[] _mins, _maxs; long _checksum; // Expensive histogram & percentiles // Computed in a 2nd pass, on-demand, by calling computeHisto private static final int MAX_SIZE = 1000; // Standard bin count; categoricals can have more bins // the choice of MAX_SIZE being a power of 10 (rather than 1024) just aligns-to-the-grid of the common input of fixed decimal // precision numbers. It is still an estimate and makes no difference mathematically. It just gives tidier output in some // simple cases without penalty. volatile long[] _bins; // Approximate data value closest to the Xth percentile double[] _pctiles; public boolean hasHisto(){return _bins != null;} // Check for: Vector is mutating and rollups cannot be asked for boolean isMutating() { return _naCnt==-2; } // Check for: Rollups currently being computed private boolean isComputing() { return _naCnt==-1; } // Check for: Rollups available private boolean isReady() { return _naCnt>=0; } private RollupStats(int mode) { _mins = new double[5]; _maxs = new double[5]; Arrays.fill(_mins, Double.MAX_VALUE); Arrays.fill(_maxs,-Double.MAX_VALUE); _pctiles = new double[Vec.PERCENTILES.length]; Arrays.fill(_pctiles, Double.NaN); _mean = _sigma = 0; _size = 0; _naCnt = mode; } private static RollupStats makeComputing() { return new RollupStats(-1); } static RollupStats makeMutating () { return new RollupStats(-2); } private RollupStats map( Chunk c ) { _size = c.byteSize(); boolean isUUID = c._vec.isUUID(); boolean isString = c._vec.isString(); BufferedString tmpStr = new BufferedString(); if (isString) _isInt = false; // Checksum support long checksum = 0; long start = c._start; long l = 81985529216486895L; // Check for popular easy cases: All Constant double min=c.min(), max=c.max(); if( min==max ) { // All constant or all NaN double d = min; // It's the min, it's the max, it's the alpha and omega _checksum = (c.hasFloat()?Double.doubleToRawLongBits(d):(long)d)*c._len; Arrays.fill(_mins, d); Arrays.fill(_maxs, d); if( d == Double.POSITIVE_INFINITY) _pinfs++; else if( d == Double.NEGATIVE_INFINITY) _ninfs++; else { if( Double.isNaN(d)) _naCnt=c._len; else if( d != 0 ) _nzCnt=c._len; _mean = d; _rows=c._len; } _isInt = ((long)d) == d; _sigma = 0; // No variance for constants return this; } //all const NaNs if ((c instanceof C0DChunk && c.isNA_impl(0))) { _sigma=0; //count of non-NAs * variance of non-NAs _mean = 0; //sum of non-NAs (will get turned into mean) _naCnt=c._len; _nzCnt=0; return this; } // Check for popular easy cases: Boolean, possibly sparse, possibly NaN if( min==0 && max==1 ) { int zs = c._len-c.sparseLenZero(); // Easy zeros int nans = 0; // Hard-count sparse-but-zero (weird case of setting a zero over a non-zero) for( int i=c.nextNZ(-1); i< c._len; i=c.nextNZ(i) ) if( c.isNA(i) ) nans++; else if( c.at8(i)==0 ) zs++; int os = c._len-zs-nans; // Ones _nzCnt += os; _naCnt += nans; for( int i=0; i<Math.min(_mins.length,zs); i++ ) { min(0); max(0); } for( int i=0; i<Math.min(_mins.length,os); i++ ) { min(1); max(1); } _rows += zs+os; _mean = (double)os/_rows; _sigma = zs*(0.0-_mean)*(0.0-_mean) + os*(1.0-_mean)*(1.0-_mean); return this; } // Walk the non-zeros if( isUUID ) { // UUID columns do not compute min/max/mean/sigma for( int i=c.nextNZ(-1); i< c._len; i=c.nextNZ(i) ) { if( c.isNA(i) ) _naCnt++; else { long lo = c.at16l(i), hi = c.at16h(i); if (lo != 0 || hi != 0) _nzCnt++; l = lo ^ 37*hi; } if(l != 0) // ignore 0s in checksum to be consistent with sparse chunks checksum ^= (17 * (start+i)) ^ 23*l; } } else if( isString ) { // String columns do not compute min/max/mean/sigma for (int i = c.nextNZ(-1); i < c._len; i = c.nextNZ(i)) { if (c.isNA(i)) _naCnt++; else { _nzCnt++; l = c.atStr(tmpStr, i).hashCode(); } if (l != 0) // ignore 0s in checksum to be consistent with sparse chunks checksum ^= (17 * (start + i)) ^ 23 * l; } } else { // Work off all numeric rows, or only the nonzeros for sparse if (c instanceof C1Chunk) checksum=new RollupStatsHelpers(this).numericChunkRollup((C1Chunk) c, start, checksum); else if (c instanceof C1SChunk) checksum=new RollupStatsHelpers(this).numericChunkRollup((C1SChunk) c, start, checksum); else if (c instanceof C1NChunk) checksum=new RollupStatsHelpers(this).numericChunkRollup((C1NChunk) c, start, checksum); else if (c instanceof C2Chunk) checksum=new RollupStatsHelpers(this).numericChunkRollup((C2Chunk) c, start, checksum); else if (c instanceof C2SChunk) checksum=new RollupStatsHelpers(this).numericChunkRollup((C2SChunk) c, start, checksum); else if (c instanceof C4SChunk) checksum=new RollupStatsHelpers(this).numericChunkRollup((C4SChunk) c, start, checksum); else if (c instanceof C4FChunk) checksum=new RollupStatsHelpers(this).numericChunkRollup((C4FChunk) c, start, checksum); else if (c instanceof C4Chunk) checksum=new RollupStatsHelpers(this).numericChunkRollup((C4Chunk) c, start, checksum); else if (c instanceof C8Chunk) checksum=new RollupStatsHelpers(this).numericChunkRollup((C8Chunk) c, start, checksum); else if (c instanceof C8DChunk) checksum=new RollupStatsHelpers(this).numericChunkRollup((C8DChunk) c, start, checksum); else checksum=new RollupStatsHelpers(this).numericChunkRollup(c, start, checksum); // special case for sparse chunks // we need to merge with the mean (0) and variance (0) of the zeros count of 0s of the sparse chunk - which were skipped above // _rows is the count of non-zero rows // _mean is the mean of non-zero rows // _sigma is the mean of non-zero rows // handle the zeros if( c.isSparseZero() ) { int zeros = c._len - c.sparseLenZero(); if (zeros > 0) { for( int i=0; i<Math.min(_mins.length,zeros); i++ ) { min(0); max(0); } double zeromean = 0; double zeroM2 = 0; double delta = _mean - zeromean; _mean = (_mean * _rows + zeromean * zeros) / (_rows + zeros); _sigma += zeroM2 + delta*delta * _rows * zeros / (_rows + zeros); //this is the variance*(N-1), will do sqrt(_sigma/(N-1)) later in postGlobal _rows += zeros; } } } _checksum = checksum; // UUID and String columns do not compute min/max/mean/sigma if( isUUID || isString) { Arrays.fill(_mins,Double.NaN); Arrays.fill(_maxs,Double.NaN); _mean = _sigma = Double.NaN; } return this; } private void reduce( RollupStats rs ) { for( double d : rs._mins ) if (!Double.isNaN(d)) min(d); for( double d : rs._maxs ) if (!Double.isNaN(d)) max(d); _naCnt += rs._naCnt; _nzCnt += rs._nzCnt; _pinfs += rs._pinfs; _ninfs += rs._ninfs; if (_rows == 0) { _mean = rs._mean; _sigma = rs._sigma; } else if(rs._rows != 0){ double delta = _mean - rs._mean; _mean = (_mean * _rows + rs._mean * rs._rows) / (_rows + rs._rows); _sigma += rs._sigma + delta*delta * _rows*rs._rows / (_rows+rs._rows); } _rows += rs._rows; _size += rs._size; _isInt &= rs._isInt; _checksum ^= rs._checksum; } double min( double d ) { assert(!Double.isNaN(d)); for( int i=0; i<_mins.length; i++ ) if( d < _mins[i] ) { double tmp = _mins[i]; _mins[i] = d; d = tmp; } return _mins[_mins.length-1]; } double max( double d ) { assert(!Double.isNaN(d)); for( int i=0; i<_maxs.length; i++ ) if( d > _maxs[i] ) { double tmp = _maxs[i]; _maxs[i] = d; d = tmp; } return _maxs[_maxs.length-1]; } private static class Roll extends MRTask<Roll> { final Key _rskey; RollupStats _rs; Roll( H2OCountedCompleter cmp, Key rskey ) { super(cmp); _rskey=rskey; } @Override public void map( Chunk c ) { _rs = new RollupStats(0).map(c); } @Override public void reduce( Roll roll ) { _rs.reduce(roll._rs); } @Override public void postGlobal() { if( _rs == null ) _rs = new RollupStats(0); else { _rs._sigma = Math.sqrt(_rs._sigma/(_rs._rows-1)); if (_rs._rows == 1) _rs._sigma = 0; if (_rs._rows < 5) for (int i=0; i<5-_rs._rows; i++) { // Fix PUBDEV-150 for files under 5 rows _rs._maxs[4-i] = Double.NaN; _rs._mins[4-i] = Double.NaN; } } // mean & sigma not allowed on more than 2 classes; for 2 classes the assumption is that it's true/false if( _fr.anyVec().isCategorical() && _fr.anyVec().domain().length > 2 ) _rs._mean = _rs._sigma = Double.NaN; } // Just toooo common to report always. Drowning in multi-megabyte log file writes. @Override public boolean logVerbose() { return false; } /** * Added to avoid deadlocks when running from idea in debug mode (evaluating toSgtring on mr task causes rollups to be computed) * @return */ @Override public String toString(){return "Roll(" + _fr.anyVec()._key +")";} } static void start(final Vec vec, Futures fs, boolean computeHisto) { if( DKV.get(vec._key)== null ) throw new RuntimeException("Rollups not possible, because Vec was deleted: "+vec._key); if( vec.isString() ) computeHisto = false; // No histogram for string columns final Key rskey = vec.rollupStatsKey(); RollupStats rs = getOrNull(vec,rskey); if(rs == null || (computeHisto && !rs.hasHisto())) fs.add(new RPC(rskey.home_node(),new ComputeRollupsTask(vec,computeHisto)).addCompleter(new H2OCallback() { @Override public void callback(H2OCountedCompleter h2OCountedCompleter) { DKV.get(rskey); // fetch new results via DKV to enable caching of the results. } }).call()); } private static NonBlockingHashMap<Key,RPC> _pendingRollups = new NonBlockingHashMap<>(); static RollupStats get(Vec vec, boolean computeHisto) { if( DKV.get(vec._key)== null ) throw new RuntimeException("Rollups not possible, because Vec was deleted: "+vec._key); if( vec.isString() ) computeHisto = false; // No histogram for string columns final Key rskey = vec.rollupStatsKey(); RollupStats rs = DKV.getGet(rskey); while(rs == null || (!rs.isReady() || (computeHisto && !rs.hasHisto()))){ if(rs != null && rs.isMutating()) throw new IllegalArgumentException("Can not compute rollup stats while vec is being modified. (1)"); // 1. compute only once try { RPC rpcNew = new RPC(rskey.home_node(),new ComputeRollupsTask(vec, computeHisto)); RPC rpcOld = _pendingRollups.putIfAbsent(rskey, rpcNew); if(rpcOld == null) { // no prior pending task, need to send this one rpcNew.call().get(); _pendingRollups.remove(rskey); } else // rollups computation is already in progress, wait for it to finish rpcOld.get(); } catch( Throwable t ) { System.err.println("Remote rollups failed with an exception, wrapping and rethrowing: "+t); throw new RuntimeException(t); } // 2. fetch - done in two steps to go through standard DKV.get and enable local caching rs = DKV.getGet(rskey); } return rs; } // Allow a bunch of rollups to run in parallel. If Futures is passed in, run // the rollup in the background and do not return. static RollupStats get(Vec vec) { return get(vec,false);} // Fetch if present, but do not compute static RollupStats getOrNull(Vec vec, final Key rskey ) { Value val = DKV.get(rskey); if( val == null ) // No rollup stats present? return vec.length() > 0 ? /*not computed*/null : /*empty vec*/new RollupStats(0); RollupStats rs = val.get(RollupStats.class); return rs.isReady() ? rs : null; } // Histogram base & stride double h_base() { return _mins[0]; } double h_stride() { return h_stride(_bins.length); } private double h_stride(int nbins) { return (_maxs[0]-_mins[0]+(_isInt?1:0))/nbins; } // Compute expensive histogram private static class Histo extends MRTask<Histo> { final double _base, _stride; // Inputs final int _nbins; // Inputs long[] _bins; // Outputs Histo( H2OCountedCompleter cmp, RollupStats rs, int nbins ) { super(cmp);_base = rs.h_base(); _stride = rs.h_stride(nbins); _nbins = nbins; } @Override public void map( Chunk c ) { _bins = new long[_nbins]; for( int i=c.nextNZ(-1); i< c._len; i=c.nextNZ(i) ) { double d = c.atd(i); if( !Double.isNaN(d) ) _bins[idx(d)]++; } // Sparse? We skipped all the zeros; do them now if( c.isSparseZero() ) _bins[idx(0.0)] += (c._len - c.sparseLenZero()); } private int idx( double d ) { int idx = (int)((d-_base)/_stride); return Math.min(idx,_bins.length-1); } @Override public void reduce( Histo h ) { ArrayUtils.add(_bins,h._bins); } // Just toooo common to report always. Drowning in multi-megabyte log file writes. @Override public boolean logVerbose() { return false; } } // Task to compute rollups on its homenode if needed. // Only computes the rollups, does not fetch them, caller should fetch them via DKV store (to preserve caching). // Only comutes the rollups if needed (i.e. are null or do not have histo and histo is required) // If rs computation is already in progress, it will wait for it to finish. // Throws IAE if the Vec is being modified (or removed) while this task is in progress. static final class ComputeRollupsTask extends DTask<ComputeRollupsTask>{ final Key _vecKey; final Key _rsKey; final boolean _computeHisto; public ComputeRollupsTask(Vec v, boolean computeHisto){ super((byte)(Thread.currentThread() instanceof H2O.FJWThr ? currThrPriority()+1 : H2O.MIN_HI_PRIORITY-3)); _vecKey = v._key; _rsKey = v.rollupStatsKey(); _computeHisto = computeHisto; } private Value makeComputing(){ RollupStats newRs = RollupStats.makeComputing(); CountedCompleter cc = getCompleter(); // should be null or RPCCall if(cc != null) assert cc.getCompleter() == null; newRs._tsk = cc == null?this:cc; return new Value(_rsKey,newRs); } private void installResponse(Value nnn, RollupStats rs) { Futures fs = new Futures(); Value old = DKV.DputIfMatch(_rsKey, new Value(_rsKey, rs), nnn, fs); assert rs.isReady(); if(old != nnn) throw new IllegalArgumentException("Can not compute rollup stats while vec is being modified. (2)"); fs.blockForPending(); } @Override public void compute2() { assert _rsKey.home(); final Vec vec = DKV.getGet(_vecKey); while(true) { Value v = DKV.get(_rsKey); RollupStats rs = (v == null) ? null : v.<RollupStats>get(); // Fetched current rs from the DKV, rs can be: // a) computed // a.1) has histo or histo not required => do nothing // a.2) no histo and histo is required => only compute histo // b) computing => wait for the task computing it to finish and check again // c) mutating => throw IAE // d) null => compute new rollups if (rs != null) { if (rs.isReady()) { if (_computeHisto && !rs.hasHisto()) { // a.2 => compute rollups CountedCompleter cc = getCompleter(); // should be null or RPCCall if(cc != null) assert cc.getCompleter() == null; // note: if cc == null then onExceptionalCompletion tasks waiting on this may be woken up before exception handling iff exception is thrown. Value nnn = makeComputing(); Futures fs = new Futures(); Value oldv = DKV.DputIfMatch(_rsKey, nnn, v, fs); fs.blockForPending(); if(oldv == v){ // got the lock computeHisto(rs, vec, nnn); break; } // else someone else is modifying the rollups => try again } else break; // a.1 => do nothing } else if (rs.isComputing()) { // b) => wait for current computation to finish rs._tsk.join(); } else if(rs.isMutating()) // c) => throw IAE throw new IllegalArgumentException("Can not compute rollup stats while vec is being modified. (3)"); } else { // d) => compute the rollups final Value nnn = makeComputing(); Futures fs = new Futures(); Value oldv = DKV.DputIfMatch(_rsKey, nnn, v, fs); fs.blockForPending(); if(oldv == v){ // got the lock, compute the rollups Roll r = new Roll(null,_rsKey).doAll(vec); // computed the stats, now compute histo if needed and install the response and quit r._rs._checksum ^= vec.length(); if(_computeHisto) computeHisto(r._rs, vec, nnn); else installResponse(nnn, r._rs); break; } // else someone else is modifying the rollups => try again } } tryComplete(); } final void computeHisto(final RollupStats rs, Vec vec, final Value nnn){ // All NAs or non-math; histogram has zero bins if( rs._naCnt == vec.length() || vec.isUUID() ) { rs._bins = new long[0]; installResponse(nnn, rs); return; } // Constant: use a single bin double span = rs._maxs[0]-rs._mins[0]; final long rows = vec.length()-rs._naCnt; assert rows > 0:"rows = " + rows + ", vec.len() = " + vec.length() + ", naCnt = " + rs._naCnt; if( span==0 ) { rs._bins = new long[]{rows}; installResponse(nnn, rs); return; } // Number of bins: MAX_SIZE by default. For integers, bins for each unique int // - unless the count gets too high; allow a very high count for categoricals. int nbins=MAX_SIZE; if( rs._isInt && span < Integer.MAX_VALUE ) { nbins = (int)span+1; // 1 bin per int int lim = vec.isCategorical() ? Categorical.MAX_CATEGORICAL_COUNT : MAX_SIZE; nbins = Math.min(lim,nbins); // Cap nbins at sane levels } addToPendingCount(1); new Histo(new H2OCallback<Histo>(this){ @Override public void callback(Histo histo) { assert ArrayUtils.sum(histo._bins) == rows; rs._bins = histo._bins; // Compute percentiles from histogram rs._pctiles = new double[Vec.PERCENTILES.length]; int j = 0; // Histogram bin number int k = 0; // The next non-zero bin after j long hsum = 0; // Rolling histogram sum double base = rs.h_base(); double stride = rs.h_stride(); double lastP = -1.0; // any negative value to pass assert below first time for (int i = 0; i < Vec.PERCENTILES.length; i++) { final double P = Vec.PERCENTILES[i]; assert P>=0 && P<=1 && P>=lastP; // rely on increasing percentiles here. If P has dup then strange but accept, hence >= not > lastP = P; double pdouble = 1.0 + P*(rows-1); // following stats:::quantile.default type 7 long pint = (long) pdouble; // 1-based into bin vector double h = pdouble - pint; // any fraction h to linearly interpolate between? assert P!=1 || (h==0.0 && pint==rows); // i.e. max while (hsum < pint) hsum += rs._bins[j++]; // j overshot by 1 bin; we added _bins[j-1] and this goes from too low to either exactly right or too big // pint now falls in bin j-1 (the ++ happened even when hsum==pint), so grab that bin value now rs._pctiles[i] = base + stride * (j - 1); if (h>0 && pint==hsum) { // linearly interpolate between adjacent non-zero bins // i) pint is the last of (j-1)'s bin count (>1 when either duplicates exist in input, or stride makes dups at lower accuracy) // AND ii) h>0 so we do need to find the next non-zero bin if (k<j) k=j; // if j jumped over the k needed for the last P, catch k up to j // Saves potentially winding k forward over the same zero stretch many times while (rs._bins[k]==0) k++; // find the next non-zero bin rs._pctiles[i] += h * stride * (k-j+1); } // otherwise either h==0 and we know which bin, or fraction is between two positions that fall in the same bin // this guarantees we are within one bin of the exact answer; i.e. within (max-min)/MAX_SIZE } installResponse(nnn, rs); } },rs,nbins).dfork(vec); // intentionally using dfork here to increase priority level } } }
Rollups change - launch compute histo task in blocking way via doAll() (same as compute basic rollups) to prevent potential deadlock.
h2o-core/src/main/java/water/fvec/RollupStats.java
Rollups change - launch compute histo task in blocking way via doAll() (same as compute basic rollups) to prevent potential deadlock.
<ide><path>2o-core/src/main/java/water/fvec/RollupStats.java <ide> tryComplete(); <ide> } <ide> <del> final void computeHisto(final RollupStats rs, Vec vec, final Value nnn){ <add> final void computeHisto(final RollupStats rs, Vec vec, final Value nnn) { <ide> // All NAs or non-math; histogram has zero bins <del> if( rs._naCnt == vec.length() || vec.isUUID() ) { <add> if (rs._naCnt == vec.length() || vec.isUUID()) { <ide> rs._bins = new long[0]; <ide> installResponse(nnn, rs); <ide> return; <ide> } <ide> // Constant: use a single bin <del> double span = rs._maxs[0]-rs._mins[0]; <del> final long rows = vec.length()-rs._naCnt; <del> assert rows > 0:"rows = " + rows + ", vec.len() = " + vec.length() + ", naCnt = " + rs._naCnt; <del> if( span==0 ) { <add> double span = rs._maxs[0] - rs._mins[0]; <add> final long rows = vec.length() - rs._naCnt; <add> assert rows > 0 : "rows = " + rows + ", vec.len() = " + vec.length() + ", naCnt = " + rs._naCnt; <add> if (span == 0) { <ide> rs._bins = new long[]{rows}; <ide> installResponse(nnn, rs); <ide> return; <ide> } <ide> // Number of bins: MAX_SIZE by default. For integers, bins for each unique int <ide> // - unless the count gets too high; allow a very high count for categoricals. <del> int nbins=MAX_SIZE; <del> if( rs._isInt && span < Integer.MAX_VALUE ) { <del> nbins = (int)span+1; // 1 bin per int <add> int nbins = MAX_SIZE; <add> if (rs._isInt && span < Integer.MAX_VALUE) { <add> nbins = (int) span + 1; // 1 bin per int <ide> int lim = vec.isCategorical() ? Categorical.MAX_CATEGORICAL_COUNT : MAX_SIZE; <del> nbins = Math.min(lim,nbins); // Cap nbins at sane levels <del> } <del> addToPendingCount(1); <del> new Histo(new H2OCallback<Histo>(this){ <del> @Override public void callback(Histo histo) { <del> assert ArrayUtils.sum(histo._bins) == rows; <del> rs._bins = histo._bins; <del> // Compute percentiles from histogram <del> rs._pctiles = new double[Vec.PERCENTILES.length]; <del> int j = 0; // Histogram bin number <del> int k = 0; // The next non-zero bin after j <del> long hsum = 0; // Rolling histogram sum <del> double base = rs.h_base(); <del> double stride = rs.h_stride(); <del> double lastP = -1.0; // any negative value to pass assert below first time <del> for (int i = 0; i < Vec.PERCENTILES.length; i++) { <del> final double P = Vec.PERCENTILES[i]; <del> assert P>=0 && P<=1 && P>=lastP; // rely on increasing percentiles here. If P has dup then strange but accept, hence >= not > <del> lastP = P; <del> double pdouble = 1.0 + P*(rows-1); // following stats:::quantile.default type 7 <del> long pint = (long) pdouble; // 1-based into bin vector <del> double h = pdouble - pint; // any fraction h to linearly interpolate between? <del> assert P!=1 || (h==0.0 && pint==rows); // i.e. max <del> while (hsum < pint) hsum += rs._bins[j++]; <del> // j overshot by 1 bin; we added _bins[j-1] and this goes from too low to either exactly right or too big <del> // pint now falls in bin j-1 (the ++ happened even when hsum==pint), so grab that bin value now <del> rs._pctiles[i] = base + stride * (j - 1); <del> if (h>0 && pint==hsum) { <del> // linearly interpolate between adjacent non-zero bins <del> // i) pint is the last of (j-1)'s bin count (>1 when either duplicates exist in input, or stride makes dups at lower accuracy) <del> // AND ii) h>0 so we do need to find the next non-zero bin <del> if (k<j) k=j; // if j jumped over the k needed for the last P, catch k up to j <del> // Saves potentially winding k forward over the same zero stretch many times <del> while (rs._bins[k]==0) k++; // find the next non-zero bin <del> rs._pctiles[i] += h * stride * (k-j+1); <del> } // otherwise either h==0 and we know which bin, or fraction is between two positions that fall in the same bin <del> // this guarantees we are within one bin of the exact answer; i.e. within (max-min)/MAX_SIZE <del> } <del> installResponse(nnn, rs); <del> } <del> },rs,nbins).dfork(vec); // intentionally using dfork here to increase priority level <add> nbins = Math.min(lim, nbins); // Cap nbins at sane levels <add> } <add> Histo histo = new Histo(null, rs, nbins).doAll(vec); <add> assert ArrayUtils.sum(histo._bins) == rows; <add> rs._bins = histo._bins; <add> // Compute percentiles from histogram <add> rs._pctiles = new double[Vec.PERCENTILES.length]; <add> int j = 0; // Histogram bin number <add> int k = 0; // The next non-zero bin after j <add> long hsum = 0; // Rolling histogram sum <add> double base = rs.h_base(); <add> double stride = rs.h_stride(); <add> double lastP = -1.0; // any negative value to pass assert below first time <add> for (int i = 0; i < Vec.PERCENTILES.length; i++) { <add> final double P = Vec.PERCENTILES[i]; <add> assert P >= 0 && P <= 1 && P >= lastP; // rely on increasing percentiles here. If P has dup then strange but accept, hence >= not > <add> lastP = P; <add> double pdouble = 1.0 + P * (rows - 1); // following stats:::quantile.default type 7 <add> long pint = (long) pdouble; // 1-based into bin vector <add> double h = pdouble - pint; // any fraction h to linearly interpolate between? <add> assert P != 1 || (h == 0.0 && pint == rows); // i.e. max <add> while (hsum < pint) hsum += rs._bins[j++]; <add> // j overshot by 1 bin; we added _bins[j-1] and this goes from too low to either exactly right or too big <add> // pint now falls in bin j-1 (the ++ happened even when hsum==pint), so grab that bin value now <add> rs._pctiles[i] = base + stride * (j - 1); <add> if (h > 0 && pint == hsum) { <add> // linearly interpolate between adjacent non-zero bins <add> // i) pint is the last of (j-1)'s bin count (>1 when either duplicates exist in input, or stride makes dups at lower accuracy) <add> // AND ii) h>0 so we do need to find the next non-zero bin <add> if (k < j) k = j; // if j jumped over the k needed for the last P, catch k up to j <add> // Saves potentially winding k forward over the same zero stretch many times <add> while (rs._bins[k] == 0) k++; // find the next non-zero bin <add> rs._pctiles[i] += h * stride * (k - j + 1); <add> } // otherwise either h==0 and we know which bin, or fraction is between two positions that fall in the same bin <add> // this guarantees we are within one bin of the exact answer; i.e. within (max-min)/MAX_SIZE <add> } <add> installResponse(nnn, rs); <ide> } <ide> } <ide> }
Java
apache-2.0
7ceba1217c1f36c57ea9dc301f96e2b7e2e9e6a1
0
marklogic/java-client-api,marklogic/java-client-api,marklogic/java-client-api,marklogic/java-client-api,marklogic/java-client-api
/* * Copyright 2015 MarkLogic Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.marklogic.client.datamovement; import com.marklogic.client.DatabaseClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.http.NoHttpResponseException; import java.net.SocketException; import java.net.UnknownHostException; import javax.net.ssl.SSLException; import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; public class HostAvailabilityListener implements QueryFailureListener, WriteFailureListener { private static Logger logger = LoggerFactory.getLogger(HostAvailabilityListener.class); private DataMovementManager moveMgr; private Duration suspendTimeForHostUnavailable = Duration.ofMinutes(10); private int minHosts = 1; private ScheduledFuture<?> future; List<Class<?>> hostUnavailableExceptions = new ArrayList<>(); { hostUnavailableExceptions.add(NoHttpResponseException.class); hostUnavailableExceptions.add(SocketException.class); hostUnavailableExceptions.add(SSLException.class); hostUnavailableExceptions.add(UnknownHostException.class); } /** * @param moveMgr the DataMovementManager (used to call readForestConfig to reset after black-listing an unavailable host) * @param batcher the WriteBatcher or QueryBatcher instance this will listen to (used to call withForestConfig to black-list an unavailable host) */ public HostAvailabilityListener(DataMovementManager moveMgr) { if (moveMgr == null) throw new IllegalArgumentException("moveMgr must not be null"); this.moveMgr = moveMgr; } /** If a host becomes unavailable (NoHttpResponseException, SocketException, SSLException, * UnknownHostException), adds it to the blacklist * * @param duration the amount of time an unavailable host will be suspended * * @return this instance (for method chaining) */ public HostAvailabilityListener withSuspendTimeForHostUnavailable(Duration duration) { if (duration == null) throw new IllegalArgumentException("duration must not be null"); this.suspendTimeForHostUnavailable = duration; return this; } /** If less than minHosts are left, calls stopJob. * * @param numHosts the minimum number of hosts before this will call dataMovementMangaer.stopJob(batcher) * * @return this instance (for method chaining) */ public HostAvailabilityListener withMinHosts(int numHosts) { if (numHosts <= 0) throw new IllegalArgumentException("numHosts must be > 0"); this.minHosts = numHosts; return this; } /** Overwrites the list of exceptions for which a host will be blacklisted * * @param exceptionTypes the list of types of Throwable, any of which constitute a host that's unavailable * * @return this instance (for method chaining) */ public HostAvailabilityListener withHostUnavailableExceptions(Class<Throwable>... exceptionTypes) { hostUnavailableExceptions = new ArrayList<>(); for ( Class<Throwable> exception : exceptionTypes ) { hostUnavailableExceptions.add(exception); } return this; } /** * @return the list of types of Throwable, any of which constitute a host that's unavailable */ public Throwable[] getHostUnavailableExceptions() { return hostUnavailableExceptions.toArray(new Throwable[hostUnavailableExceptions.size()]); } /** * @return the amount of time an unavailable host will be suspended */ public Duration getSuspendTimeForHostUnavailable() { return suspendTimeForHostUnavailable; } /** * @return the minimum number of hosts before this will call dataMovementMangaer.stopJob(batcher) */ public int getMinHosts() { return minHosts; } /** * This implements the WriteFailureListener interface * * @param hostClient the database client * @param batch the batch of WriteEvents * @param throwable the exception */ public void processFailure(DatabaseClient hostClient, WriteBatch batch, Throwable throwable) { boolean isHostUnavailableException = processException(batch.getBatcher(), throwable, hostClient.getHost()); if ( isHostUnavailableException == true ) { try { logger.warn("Retrying failed batch: {}, results so far: {}, uris: {}", batch.getJobBatchNumber(), batch.getJobWritesSoFar(), Stream.of(batch.getItems()).map(event->event.getTargetUri()).collect(Collectors.toList()); batch.getBatcher().retry(batch); } catch (RuntimeException e) { logger.error("Exception during retry", e); } } } /** * This implements the QueryFailureListener interface * * @param client the host-specific client * @param queryBatch the exception with information about the failed query attempt */ public void processFailure(DatabaseClient client, QueryHostException queryBatch) { boolean isHostUnavailableException = processException(queryBatch.getBatcher(), queryBatch, client.getHost()); if ( isHostUnavailableException == true ) { try { logger.warn("Retrying failed batch: {}, results so far: {}, forest: {}, forestBatch: {}, forest results so far: {}", queryBatch.getJobBatchNumber(), queryBatch.getJobResultsSoFar(), queryBatch.getForest().getForestName(), queryBatch.getForestBatchNumber(), queryBatch.getForestResultsSoFar()); queryBatch.getBatcher().retry(queryBatch); } catch (RuntimeException e) { logger.error("Exception during retry", e); } } } private boolean processException(Batcher batcher, Throwable throwable, String host) { // we only do something if this throwable is on our list of exceptions // which we consider marking a host as unavilable boolean isHostUnavailableException = isHostUnavailableException(throwable, new HashSet<>()); if ( isHostUnavailableException == true ) { ForestConfiguration existingForestConfig = batcher.getForestConfig(); String[] preferredHosts = existingForestConfig.getPreferredHosts(); if ( ! Arrays.asList(preferredHosts).contains(host) ) { // skip all the logic below because the host in question here is already // missing from the list of hosts for this batcher return isHostUnavailableException; } if ( preferredHosts.length > minHosts ) { logger.error("ERROR: host unavailable \"" + host + "\", black-listing it for " + suspendTimeForHostUnavailable.toString(), throwable); FilteredForestConfiguration filteredForestConfig = new FilteredForestConfiguration(existingForestConfig); if ( batcher instanceof WriteBatcher ) { filteredForestConfig = filteredForestConfig.withBlackList(host); } else if ( batcher instanceof QueryBatcher ) { List<String> availableHosts = Stream.of(preferredHosts) .filter( (availableHost) -> ! availableHost.equals(host) ) .collect(Collectors.toList()); int randomPos = Math.abs(host.hashCode()) % availableHosts.size(); String randomAvailableHost = availableHosts.get(randomPos); filteredForestConfig = filteredForestConfig.withRenamedHost(host, randomAvailableHost); } batcher.withForestConfig(filteredForestConfig); // cancel any previously scheduled re-sync if ( future != null ) future.cancel(false); // schedule a re-sync with the server forest config future = Executors.newScheduledThreadPool(1) .schedule( () -> { if ( batcher.isStopped() ) { logger.debug("Job \"{}\" is stopped, so cancelling re-sync with the server forest config", batcher.getJobName()); } else { ForestConfiguration updatedForestConfig = moveMgr.readForestConfig(); logger.info("it's been {} since host {} failed, opening communication to all server hosts [{}]", suspendTimeForHostUnavailable.toString(), host, Arrays.asList(updatedForestConfig.getPreferredHosts())); // set the forestConfig back to whatever the server says it is batcher.withForestConfig(updatedForestConfig); } } , suspendTimeForHostUnavailable.toMillis(), TimeUnit.MILLISECONDS); } else { // by black-listing this host we'd move below minHosts, so it's time to // stop this job logger.error("Encountered [" + throwable + "] on host \"" + host + "\" but black-listing it would drop job below minHosts (" + minHosts + "), so stopping job \"" + batcher.getJobName() + "\"", throwable); moveMgr.stopJob(batcher); } } return isHostUnavailableException; } private boolean isHostUnavailableException(Throwable throwable, Set<Throwable> path) { for ( Class<?> type : hostUnavailableExceptions ) { if ( type.isInstance(throwable) ) { return true; } } // we need to check our recursion path to avoid infinite recursion if a // getCause() pointed to itself or an ancestor if ( throwable.getCause() != null && ! path.contains(throwable.getCause()) ) { path.add(throwable.getCause()); boolean isCauseHostUnavailableException = isHostUnavailableException(throwable.getCause(), path); if ( isCauseHostUnavailableException == true ) return true; } return false; } }
src/main/java/com/marklogic/client/datamovement/HostAvailabilityListener.java
/* * Copyright 2015 MarkLogic Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.marklogic.client.datamovement; import com.marklogic.client.DatabaseClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.http.NoHttpResponseException; import java.net.SocketException; import java.net.UnknownHostException; import javax.net.ssl.SSLException; import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; public class HostAvailabilityListener implements QueryFailureListener, WriteFailureListener { private static Logger logger = LoggerFactory.getLogger(HostAvailabilityListener.class); private DataMovementManager moveMgr; private Duration suspendTimeForHostUnavailable = Duration.ofMinutes(10); private int minHosts = 1; private ScheduledFuture<?> future; List<Class<?>> hostUnavailableExceptions = new ArrayList<>(); { hostUnavailableExceptions.add(NoHttpResponseException.class); hostUnavailableExceptions.add(SocketException.class); hostUnavailableExceptions.add(SSLException.class); hostUnavailableExceptions.add(UnknownHostException.class); } /** * @param moveMgr the DataMovementManager (used to call readForestConfig to reset after black-listing an unavailable host) * @param batcher the WriteBatcher or QueryBatcher instance this will listen to (used to call withForestConfig to black-list an unavailable host) */ public HostAvailabilityListener(DataMovementManager moveMgr) { if (moveMgr == null) throw new IllegalArgumentException("moveMgr must not be null"); this.moveMgr = moveMgr; } /** If a host becomes unavailable (NoHttpResponseException, SocketException, SSLException, * UnknownHostException), adds it to the blacklist * * @param duration the amount of time an unavailable host will be suspended * * @return this instance (for method chaining) */ public HostAvailabilityListener withSuspendTimeForHostUnavailable(Duration duration) { if (duration == null) throw new IllegalArgumentException("duration must not be null"); this.suspendTimeForHostUnavailable = duration; return this; } /** If less than minHosts are left, calls stopJob. * * @param numHosts the minimum number of hosts before this will call dataMovementMangaer.stopJob(batcher) * * @return this instance (for method chaining) */ public HostAvailabilityListener withMinHosts(int numHosts) { if (numHosts <= 0) throw new IllegalArgumentException("numHosts must be > 0"); this.minHosts = numHosts; return this; } /** Overwrites the list of exceptions for which a host will be blacklisted * * @param exceptionTypes the list of types of Throwable, any of which constitute a host that's unavailable * * @return this instance (for method chaining) */ public HostAvailabilityListener withHostUnavailableExceptions(Class<Throwable>... exceptionTypes) { hostUnavailableExceptions = new ArrayList<>(); for ( Class<Throwable> exception : exceptionTypes ) { hostUnavailableExceptions.add(exception); } return this; } /** * @return the list of types of Throwable, any of which constitute a host that's unavailable */ public Throwable[] getHostUnavailableExceptions() { return hostUnavailableExceptions.toArray(new Throwable[hostUnavailableExceptions.size()]); } /** * @return the amount of time an unavailable host will be suspended */ public Duration getSuspendTimeForHostUnavailable() { return suspendTimeForHostUnavailable; } /** * @return the minimum number of hosts before this will call dataMovementMangaer.stopJob(batcher) */ public int getMinHosts() { return minHosts; } /** * This implements the BatchFailureListener interface * * @param hostClient the host-specific client * @param batch the batch of WriteEvents * @param throwable the exception */ public void processFailure(DatabaseClient hostClient, WriteBatch batch, Throwable throwable) { boolean isHostUnavailableException = processException(batch.getBatcher(), throwable, hostClient.getHost()); if ( isHostUnavailableException == true ) { try { logger.warn("Retrying failed batch: {}, results so far: {}, uris: {}", batch.getJobBatchNumber(), batch.getJobWritesSoFar(), Stream.of(batch.getItems()).map(event->event.getTargetUri()).collect(Collectors.toList()); batch.getBatcher().retry(batch); } catch (RuntimeException e) { logger.error("Exception during retry", e); } } } /** * This implements the FailureListener interface * * @param client the host-specific client * @param queryBatch the exception with information about the status of the job */ public void processFailure(DatabaseClient client, QueryHostException queryBatch) { boolean isHostUnavailableException = processException(queryBatch.getBatcher(), queryBatch, client.getHost()); if ( isHostUnavailableException == true ) { try { logger.warn("Retrying failed batch: {}, results so far: {}, forest: {}, forestBatch: {}, forest results so far: {}", queryBatch.getJobBatchNumber(), queryBatch.getJobResultsSoFar(), queryBatch.getForest().getForestName(), queryBatch.getForestBatchNumber(), queryBatch.getForestResultsSoFar()); queryBatch.getBatcher().retry(queryBatch); } catch (RuntimeException e) { logger.error("Exception during retry", e); } } } private boolean processException(Batcher batcher, Throwable throwable, String host) { // we only do something if this throwable is on our list of exceptions // which we consider marking a host as unavilable boolean isHostUnavailableException = isHostUnavailableException(throwable, new HashSet<>()); if ( isHostUnavailableException == true ) { ForestConfiguration existingForestConfig = batcher.getForestConfig(); String[] preferredHosts = existingForestConfig.getPreferredHosts(); if ( ! Arrays.asList(preferredHosts).contains(host) ) { // skip all the logic below because the host in question here is already // missing from the list of hosts for this batcher return isHostUnavailableException; } if ( preferredHosts.length > minHosts ) { logger.error("ERROR: host unavailable \"" + host + "\", black-listing it for " + suspendTimeForHostUnavailable.toString(), throwable); FilteredForestConfiguration filteredForestConfig = new FilteredForestConfiguration(existingForestConfig); if ( batcher instanceof WriteBatcher ) { filteredForestConfig = filteredForestConfig.withBlackList(host); } else if ( batcher instanceof QueryBatcher ) { List<String> availableHosts = Stream.of(preferredHosts) .filter( (availableHost) -> ! availableHost.equals(host) ) .collect(Collectors.toList()); int randomPos = Math.abs(host.hashCode()) % availableHosts.size(); String randomAvailableHost = availableHosts.get(randomPos); filteredForestConfig = filteredForestConfig.withRenamedHost(host, randomAvailableHost); } batcher.withForestConfig(filteredForestConfig); // cancel any previously scheduled re-sync if ( future != null ) future.cancel(false); // schedule a re-sync with the server forest config future = Executors.newScheduledThreadPool(1) .schedule( () -> { if ( batcher.isStopped() ) { logger.debug("Job \"{}\" is stopped, so cancelling re-sync with the server forest config", batcher.getJobName()); } else { ForestConfiguration updatedForestConfig = moveMgr.readForestConfig(); logger.info("it's been {} since host {} failed, opening communication to all server hosts [{}]", suspendTimeForHostUnavailable.toString(), host, Arrays.asList(updatedForestConfig.getPreferredHosts())); // set the forestConfig back to whatever the server says it is batcher.withForestConfig(updatedForestConfig); } } , suspendTimeForHostUnavailable.toMillis(), TimeUnit.MILLISECONDS); } else { // by black-listing this host we'd move below minHosts, so it's time to // stop this job logger.error("Encountered [" + throwable + "] on host \"" + host + "\" but black-listing it would drop job below minHosts (" + minHosts + "), so stopping job \"" + batcher.getJobName() + "\"", throwable); moveMgr.stopJob(batcher); } } return isHostUnavailableException; } private boolean isHostUnavailableException(Throwable throwable, Set<Throwable> path) { for ( Class<?> type : hostUnavailableExceptions ) { if ( type.isInstance(throwable) ) { return true; } } // we need to check our recursion path to avoid infinite recursion if a // getCause() pointed to itself or an ancestor if ( throwable.getCause() != null && ! path.contains(throwable.getCause()) ) { path.add(throwable.getCause()); boolean isCauseHostUnavailableException = isHostUnavailableException(throwable.getCause(), path); if ( isCauseHostUnavailableException == true ) return true; } return false; } }
minor javadoc clarifications
src/main/java/com/marklogic/client/datamovement/HostAvailabilityListener.java
minor javadoc clarifications
<ide><path>rc/main/java/com/marklogic/client/datamovement/HostAvailabilityListener.java <ide> } <ide> <ide> /** <del> * This implements the BatchFailureListener interface <del> * <del> * @param hostClient the host-specific client <add> * This implements the WriteFailureListener interface <add> * <add> * @param hostClient the database client <ide> * @param batch the batch of WriteEvents <ide> * @param throwable the exception <ide> */ <ide> } <ide> <ide> /** <del> * This implements the FailureListener interface <add> * This implements the QueryFailureListener interface <ide> * <ide> * @param client the host-specific client <del> * @param queryBatch the exception with information about the status of the job <add> * @param queryBatch the exception with information about the failed query attempt <ide> */ <ide> public void processFailure(DatabaseClient client, QueryHostException queryBatch) { <ide> boolean isHostUnavailableException = processException(queryBatch.getBatcher(), queryBatch, client.getHost());
Java
mit
1d4e69cf20f0d86166b8f1346981874873b9847a
0
wkh237/react-native-fetch-blob,wkh237/react-native-fetch-blob,wkh237/react-native-fetch-blob
package com.RNFetchBlob; import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import android.content.res.AssetFileDescriptor; import android.media.MediaScannerConnection; import android.net.Uri; import android.os.AsyncTask; import android.os.Build; import android.os.Environment; import android.os.StatFs; import android.os.SystemClock; import android.util.Base64; import com.RNFetchBlob.Utils.PathResolver; import com.facebook.react.bridge.Arguments; import com.facebook.react.bridge.Callback; import com.facebook.react.bridge.Promise; import com.facebook.react.bridge.ReactApplicationContext; import com.facebook.react.bridge.ReadableArray; import com.facebook.react.bridge.WritableArray; import com.facebook.react.bridge.WritableMap; import com.facebook.react.modules.core.DeviceEventManagerModule; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.charset.Charset; import java.nio.charset.CharsetEncoder; import java.util.HashMap; import java.util.Map; import java.util.UUID; public class RNFetchBlobFS { ReactApplicationContext mCtx; DeviceEventManagerModule.RCTDeviceEventEmitter emitter; String encoding = "base64"; boolean append = false; OutputStream writeStreamInstance = null; static HashMap<String, RNFetchBlobFS> fileStreams = new HashMap<>(); RNFetchBlobFS(ReactApplicationContext ctx) { this.mCtx = ctx; this.emitter = ctx.getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class); } static String getExternalFilePath(ReactApplicationContext ctx, String taskId, RNFetchBlobConfig config) { if(config.path != null) return config.path; else if(config.fileCache && config.appendExt != null) return RNFetchBlobFS.getTmpPath(ctx, taskId) + "." + config.appendExt; else return RNFetchBlobFS.getTmpPath(ctx, taskId); } /** * Write string with encoding to file * @param path Destination file path. * @param encoding Encoding of the string. * @param data Array passed from JS context. * @param promise RCT Promise */ static public void writeFile(String path, String encoding, String data, final boolean append, final Promise promise) { try { int written = 0; File f = new File(path); File dir = f.getParentFile(); if(!dir.exists()) dir.mkdirs(); FileOutputStream fout = new FileOutputStream(f, append); // write data from a file if(encoding.equalsIgnoreCase(RNFetchBlobConst.DATA_ENCODE_URI)) { data = normalizePath(data); File src = new File(data); if(!src.exists()) { promise.reject("RNfetchBlob writeFileError", "source file : " + data + "not exists"); fout.close(); return ; } FileInputStream fin = new FileInputStream(src); byte [] buffer = new byte [10240]; int read; written = 0; while((read = fin.read(buffer)) > 0) { fout.write(buffer, 0, read); written += read; } fin.close(); } else { byte[] bytes = stringToBytes(data, encoding); fout.write(bytes); written = bytes.length; } fout.close(); promise.resolve(written); } catch (Exception e) { promise.reject("RNFetchBlob writeFileError", e.getLocalizedMessage()); } } /** * Write array of bytes into file * @param path Destination file path. * @param data Array passed from JS context. * @param promise RCT Promise */ static public void writeFile(String path, ReadableArray data, final boolean append, final Promise promise) { try { File f = new File(path); File dir = f.getParentFile(); if(!dir.exists()) dir.mkdirs(); FileOutputStream os = new FileOutputStream(f, append); byte [] bytes = new byte[data.size()]; for(int i=0;i<data.size();i++) { bytes[i] = (byte) data.getInt(i); } os.write(bytes); os.close(); promise.resolve(data.size()); } catch (Exception e) { promise.reject("RNFetchBlob writeFileError", e.getLocalizedMessage()); } } /** * Read file with a buffer that has the same size as the target file. * @param path Path of the file. * @param encoding Encoding of read stream. * @param promise */ static public void readFile(String path, String encoding, final Promise promise ) { path = normalizePath(path); try { byte[] bytes; if(path.startsWith(RNFetchBlobConst.FILE_PREFIX_BUNDLE_ASSET)) { String assetName = path.replace(RNFetchBlobConst.FILE_PREFIX_BUNDLE_ASSET, ""); long length = RNFetchBlob.RCTContext.getAssets().openFd(assetName).getLength(); bytes = new byte[(int) length]; InputStream in = RNFetchBlob.RCTContext.getAssets().open(assetName); in.read(bytes, 0, (int) length); in.close(); } else { File f = new File(path); int length = (int) f.length(); bytes = new byte[length]; FileInputStream in = new FileInputStream(f); in.read(bytes); in.close(); } switch (encoding.toLowerCase()) { case "base64" : promise.resolve(Base64.encodeToString(bytes, Base64.NO_WRAP)); break; case "ascii" : WritableArray asciiResult = Arguments.createArray(); for(byte b : bytes) { asciiResult.pushInt((int)b); } promise.resolve(asciiResult); break; case "utf8" : promise.resolve(new String(bytes)); break; default: promise.resolve(new String(bytes)); break; } } catch(Exception err) { promise.reject("ReadFile Error", err.getLocalizedMessage()); } } /** * Static method that returns system folders to JS context * @param ctx React Native application context */ static public Map<String, Object> getSystemfolders(ReactApplicationContext ctx) { Map<String, Object> res = new HashMap<>(); res.put("DocumentDir", ctx.getFilesDir().getAbsolutePath()); res.put("CacheDir", ctx.getCacheDir().getAbsolutePath()); res.put("DCIMDir", Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM).getAbsolutePath()); res.put("PictureDir", Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES).getAbsolutePath()); res.put("MusicDir", Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MUSIC).getAbsolutePath()); res.put("DownloadDir", Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS).getAbsolutePath()); res.put("MovieDir", Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MOVIES).getAbsolutePath()); res.put("RingtoneDir", Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_RINGTONES).getAbsolutePath()); String state; state = Environment.getExternalStorageState(); if (state.equals(Environment.MEDIA_MOUNTED)) { res.put("SDCardDir", Environment.getExternalStorageDirectory().getAbsolutePath()); res.put("SDCardApplicationDir", ctx.getExternalFilesDir(null).getParentFile().getAbsolutePath()); } res.put("MainBundleDir", ctx.getApplicationInfo().dataDir); return res; } /** * Static method that returns a temp file path * @param ctx React Native application context * @param taskId An unique string for identify * @return */ static public String getTmpPath(ReactApplicationContext ctx, String taskId) { return RNFetchBlob.RCTContext.getFilesDir() + "/RNFetchBlobTmp_" + taskId; } /** * Create a file stream for read * @param path File stream target path * @param encoding File stream decoder, should be one of `base64`, `utf8`, `ascii` * @param bufferSize Buffer size of read stream, default to 4096 (4095 when encode is `base64`) */ public void readStream(String path, String encoding, int bufferSize, int tick, final String streamId) { path = normalizePath(path); try { int chunkSize = encoding.equalsIgnoreCase("base64") ? 4095 : 4096; if(bufferSize > 0) chunkSize = bufferSize; InputStream fs; if(path.startsWith(RNFetchBlobConst.FILE_PREFIX_BUNDLE_ASSET)) { fs = RNFetchBlob.RCTContext.getAssets() .open(path.replace(RNFetchBlobConst.FILE_PREFIX_BUNDLE_ASSET, "")); } else { fs = new FileInputStream(new File(path)); } byte[] buffer = new byte[chunkSize]; int cursor = 0; boolean error = false; if (encoding.equalsIgnoreCase("utf8")) { CharsetEncoder encoder = Charset.forName("UTF-8").newEncoder(); while ((cursor = fs.read(buffer)) != -1) { encoder.encode(ByteBuffer.wrap(buffer).asCharBuffer()); String chunk = new String(buffer, 0, cursor); emitStreamEvent(streamId, "data", chunk); if(tick > 0) SystemClock.sleep(tick); } } else if (encoding.equalsIgnoreCase("ascii")) { while ((cursor = fs.read(buffer)) != -1) { WritableArray chunk = Arguments.createArray(); for(int i =0;i<cursor;i++) { chunk.pushInt((int)buffer[i]); } emitStreamEvent(streamId, "data", chunk); if(tick > 0) SystemClock.sleep(tick); } } else if (encoding.equalsIgnoreCase("base64")) { while ((cursor = fs.read(buffer)) != -1) { if(cursor < chunkSize) { byte [] copy = new byte[cursor]; for(int i =0;i<cursor;i++) { copy[i] = buffer[i]; } emitStreamEvent(streamId, "data", Base64.encodeToString(copy, Base64.NO_WRAP)); } else emitStreamEvent(streamId, "data", Base64.encodeToString(buffer, Base64.NO_WRAP)); if(tick > 0) SystemClock.sleep(tick); } } else { String msg = "unrecognized encoding `" + encoding + "`"; emitStreamEvent(streamId, "error", msg); error = true; } if(!error) emitStreamEvent(streamId, "end", ""); fs.close(); buffer = null; } catch (Exception err) { emitStreamEvent(streamId, "warn", "Failed to convert data to "+encoding+" encoded string, this might due to the source data is not able to convert using this encoding."); err.printStackTrace(); } } /** * Create a write stream and store its instance in RNFetchBlobFS.fileStreams * @param path Target file path * @param encoding Should be one of `base64`, `utf8`, `ascii` * @param append Flag represents if the file stream overwrite existing content * @param callback */ public void writeStream(String path, String encoding, boolean append, Callback callback) { File dest = new File(path); if(!dest.exists() || dest.isDirectory()) { callback.invoke("write stream error: target path `" + path + "` may not exists or it's a folder"); return; } try { OutputStream fs = new FileOutputStream(path, append); this.encoding = encoding; this.append = append; String streamId = UUID.randomUUID().toString(); RNFetchBlobFS.fileStreams.put(streamId, this); this.writeStreamInstance = fs; callback.invoke(null, streamId); } catch(Exception err) { callback.invoke("write stream error: failed to create write stream at path `"+path+"` "+ err.getLocalizedMessage()); } } /** * Write a chunk of data into a file stream. * @param streamId File stream ID * @param data Data chunk in string format * @param callback JS context callback */ static void writeChunk(String streamId, String data, Callback callback) { RNFetchBlobFS fs = fileStreams.get(streamId); OutputStream stream = fs.writeStreamInstance; byte [] chunk = RNFetchBlobFS.stringToBytes(data, fs.encoding); try { stream.write(chunk); callback.invoke(); } catch (Exception e) { callback.invoke(e.getLocalizedMessage()); } } /** * Write data using ascii array * @param streamId File stream ID * @param data Data chunk in ascii array format * @param callback JS context callback */ static void writeArrayChunk(String streamId, ReadableArray data, Callback callback) { try { RNFetchBlobFS fs = fileStreams.get(streamId); OutputStream stream = fs.writeStreamInstance; byte [] chunk = new byte[data.size()]; for(int i =0; i< data.size();i++) { chunk[i] = (byte) data.getInt(i); } stream.write(chunk); callback.invoke(); } catch (Exception e) { callback.invoke(e.getLocalizedMessage()); } } /** * Close file write stream by ID * @param streamId Stream ID * @param callback JS context callback */ static void closeStream(String streamId, Callback callback) { try { RNFetchBlobFS fs = fileStreams.get(streamId); OutputStream stream = fs.writeStreamInstance; fileStreams.remove(streamId); stream.close(); callback.invoke(); } catch(Exception err) { callback.invoke(err.getLocalizedMessage()); } } /** * Unlink file at path * @param path Path of target * @param callback JS context callback */ static void unlink(String path, Callback callback) { try { RNFetchBlobFS.deleteRecursive(new File(path)); callback.invoke(null, true); } catch(Exception err) { if(err != null) callback.invoke(err.getLocalizedMessage(), false); } } static void deleteRecursive(File fileOrDirectory) { if (fileOrDirectory.isDirectory()) { for (File child : fileOrDirectory.listFiles()) { deleteRecursive(child); } } fileOrDirectory.delete(); } /** * Make a folder * @param path Source path * @param callback JS context callback */ static void mkdir(String path, Callback callback) { File dest = new File(path); if(dest.exists()) { callback.invoke("mkdir error: failed to create folder at `" + path + "` folder already exists"); return; } dest.mkdirs(); callback.invoke(); } /** * Copy file to destination path * @param path Source path * @param dest Target path * @param callback JS context callback */ static void cp(String path, String dest, Callback callback) { path = normalizePath(path); InputStream in = null; OutputStream out = null; try { if(!isPathExists(path)) { callback.invoke("cp error: source file at path`" + path + "` not exists"); return; } if(!new File(dest).exists()) new File(dest).createNewFile(); in = inputStreamFromPath(path); out = new FileOutputStream(dest); byte[] buf = new byte[10240]; int len; while ((len = in.read(buf)) > 0) { out.write(buf, 0, len); } } catch (Exception err) { callback.invoke(err.getLocalizedMessage()); } finally { try { if (in != null) { in.close(); } if (out != null) { out.close(); } callback.invoke(); } catch (Exception e) { callback.invoke(e.getLocalizedMessage()); } } } /** * Move file * @param path Source file path * @param dest Destination file path * @param callback JS context callback */ static void mv(String path, String dest, Callback callback) { File src = new File(path); if(!src.exists()) { callback.invoke("mv error: source file at path `" + path + "` does not exists"); return; } src.renameTo(new File(dest)); callback.invoke(); } /** * Check if the path exists, also check if it is a folder when exists. * @param path Path to check * @param callback JS context callback */ static void exists(String path, Callback callback) { if(isAsset(path)) { try { String filename = path.replace(RNFetchBlobConst.FILE_PREFIX_BUNDLE_ASSET, ""); AssetFileDescriptor fd = RNFetchBlob.RCTContext.getAssets().openFd(filename); callback.invoke(true, false); } catch (IOException e) { callback.invoke(false, false); } } else { path = normalizePath(path); boolean exist = new File(path).exists(); boolean isDir = new File(path).isDirectory(); callback.invoke(exist, isDir); } } /** * List content of folder * @param path Target folder * @param callback JS context callback */ static void ls(String path, Callback callback) { path = normalizePath(path); File src = new File(path); if (!src.exists() || !src.isDirectory()) { callback.invoke("ls error: failed to list path `" + path + "` for it is not exist or it is not a folder"); return; } String[] files = new File(path).list(); WritableArray arg = Arguments.createArray(); for (String i : files) { arg.pushString(i); } callback.invoke(null, arg); } /** * Create a file by slicing given file path * @param src Source file path * @param dest Destination of created file * @param start Start byte offset in source file * @param end End byte offset * @param encode NOT IMPLEMENTED */ public static void slice(String src, String dest, int start, int end, String encode, Promise promise) { try { src = normalizePath(src); File source = new File(src); if(!source.exists()) { promise.reject("RNFetchBlob.slice error", "source file : " + src + " not exists"); return; } long size = source.length(); long max = Math.min(size, end); long expected = max - start; long now = 0; FileInputStream in = new FileInputStream(new File(src)); FileOutputStream out = new FileOutputStream(new File(dest)); in.skip(start); byte [] buffer = new byte[10240]; while(now < expected) { long read = in.read(buffer, 0, 10240); long remain = expected - now; if(read <= 0) { break; } out.write(buffer, 0, (int) Math.min(remain, read)); now += read; } in.close(); out.flush(); out.close(); promise.resolve(dest); } catch (Exception e) { e.printStackTrace(); promise.reject(e.getLocalizedMessage()); } } static void lstat(String path, final Callback callback) { path = normalizePath(path); new AsyncTask<String, Integer, Integer>() { @Override protected Integer doInBackground(String ...args) { WritableArray res = Arguments.createArray(); if(args[0] == null) { callback.invoke("lstat error: the path specified for lstat is either `null` or `undefined`."); return 0; } File src = new File(args[0]); if(!src.exists()) { callback.invoke("lstat error: failed to list path `" + args[0] + "` for it is not exist or it is not a folder"); return 0; } if(src.isDirectory()) { String [] files = src.list(); for(String p : files) { res.pushMap(statFile ( src.getPath() + "/" + p)); } } else { res.pushMap(statFile(src.getAbsolutePath())); } callback.invoke(null, res); return 0; } }.execute(path); } /** * show status of a file or directory * @param path * @param callback */ static void stat(String path, Callback callback) { try { path = normalizePath(path); WritableMap result = statFile(path); if(result == null) callback.invoke("stat error: failed to list path `" + path + "` for it is not exist or it is not a folder", null); else callback.invoke(null, result); } catch(Exception err) { callback.invoke(err.getLocalizedMessage()); } } /** * Basic stat method * @param path * @return Stat result of a file or path */ static WritableMap statFile(String path) { try { path = normalizePath(path); WritableMap stat = Arguments.createMap(); if(isAsset(path)) { String name = path.replace(RNFetchBlobConst.FILE_PREFIX_BUNDLE_ASSET, ""); AssetFileDescriptor fd = RNFetchBlob.RCTContext.getAssets().openFd(name); stat.putString("filename", name); stat.putString("path", path); stat.putString("type", "asset"); stat.putString("size", String.valueOf(fd.getLength())); stat.putInt("lastModified", 0); } else { File target = new File(path); if (!target.exists()) { return null; } stat.putString("filename", target.getName()); stat.putString("path", target.getPath()); stat.putString("type", target.isDirectory() ? "directory" : "file"); stat.putString("size", String.valueOf(target.length())); String lastModified = String.valueOf(target.lastModified()); stat.putString("lastModified", lastModified); } return stat; } catch(Exception err) { return null; } } /** * Media scanner scan file * @param path * @param mimes * @param callback */ void scanFile(String [] path, String[] mimes, final Callback callback) { try { MediaScannerConnection.scanFile(mCtx, path, mimes, new MediaScannerConnection.OnScanCompletedListener() { @Override public void onScanCompleted(String s, Uri uri) { callback.invoke(null, true); } }); } catch(Exception err) { callback.invoke(err.getLocalizedMessage(), null); } } /** * Create new file at path * @param path The destination path of the new file. * @param data Initial data of the new file. * @param encoding Encoding of initial data. * @param callback RCT bridge callback. */ static void createFile(String path, String data, String encoding, Callback callback) { try { File dest = new File(path); boolean created = dest.createNewFile(); if(encoding.equals(RNFetchBlobConst.DATA_ENCODE_URI)) { String orgPath = data.replace(RNFetchBlobConst.FILE_PREFIX, ""); File src = new File(orgPath); if(!src.exists()) { callback.invoke("RNfetchBlob writeFileError", "source file : " + data + "not exists"); return ; } FileInputStream fin = new FileInputStream(src); OutputStream ostream = new FileOutputStream(dest); byte [] buffer = new byte [10240]; int read = fin.read(buffer); while(read > 0) { ostream.write(buffer, 0, read); read = fin.read(buffer); } fin.close(); ostream.close(); } else { if (!created) { callback.invoke("create file error: failed to create file at path `" + path + "` for its parent path may not exists, or the file already exists. If you intended to overwrite the existing file use fs.writeFile instead."); return; } OutputStream ostream = new FileOutputStream(dest); ostream.write(RNFetchBlobFS.stringToBytes(data, encoding)); } callback.invoke(null, path); } catch(Exception err) { callback.invoke(err.getLocalizedMessage()); } } /** * Create file for ASCII encoding * @param path Path of new file. * @param data Content of new file * @param callback JS context callback */ static void createFileASCII(String path, ReadableArray data, Callback callback) { try { File dest = new File(path); if(dest.exists()) { callback.invoke("create file error: failed to create file at path `" + path + "`, file already exists."); return; } boolean created = dest.createNewFile(); if(!created) { callback.invoke("create file error: failed to create file at path `" + path + "` for its parent path may not exists"); return; } OutputStream ostream = new FileOutputStream(dest); byte [] chunk = new byte[data.size()]; for(int i =0; i<data.size();i++) { chunk[i] = (byte) data.getInt(i); } ostream.write(chunk); chunk = null; callback.invoke(null, path); } catch(Exception err) { callback.invoke(err.getLocalizedMessage()); } } static void df(Callback callback) { StatFs stat = new StatFs(Environment.getDataDirectory().getPath()); WritableMap args = Arguments.createMap(); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) { args.putString("internal_free", String.valueOf(stat.getFreeBytes())); args.putString("internal_total", String.valueOf(stat.getTotalBytes())); StatFs statEx = new StatFs(Environment.getExternalStorageDirectory().getPath()); args.putString("external_free", String.valueOf(statEx.getFreeBytes())); args.putString("external_total", String.valueOf(statEx.getTotalBytes())); } callback.invoke(null ,args); } /** * Remove files in session. * @param paths An array of file paths. * @param callback JS contest callback */ static void removeSession(ReadableArray paths, final Callback callback) { AsyncTask<ReadableArray, Integer, Integer> task = new AsyncTask<ReadableArray, Integer, Integer>() { @Override protected Integer doInBackground(ReadableArray ...paths) { try { for (int i = 0; i < paths[0].size(); i++) { File f = new File(paths[0].getString(i)); if (f.exists()) f.delete(); } callback.invoke(null, true); } catch(Exception err) { callback.invoke(err.getLocalizedMessage()); } return paths[0].size(); } }; task.execute(paths); } /** * String to byte converter method * @param data Raw data in string format * @param encoding Decoder name * @return Converted data byte array */ private static byte[] stringToBytes(String data, String encoding) { if(encoding.equalsIgnoreCase("ascii")) { return data.getBytes(Charset.forName("US-ASCII")); } else if(encoding.toLowerCase().contains("base64")) { return Base64.decode(data, Base64.NO_WRAP); } else if(encoding.equalsIgnoreCase("utf8")) { return data.getBytes(Charset.forName("UTF-8")); } return data.getBytes(Charset.forName("US-ASCII")); } /** * Private method for emit read stream event. * @param streamName ID of the read stream * @param event Event name, `data`, `end`, `error`, etc. * @param data Event data */ private void emitStreamEvent(String streamName, String event, String data) { WritableMap eventData = Arguments.createMap(); eventData.putString("event", event); eventData.putString("detail", data); this.emitter.emit(streamName, eventData); } private void emitStreamEvent(String streamName, String event, WritableArray data) { WritableMap eventData = Arguments.createMap(); eventData.putString("event", event); eventData.putArray("detail", data); this.emitter.emit(streamName, eventData); } // TODO : should we remove this ? void emitFSData(String taskId, String event, String data) { WritableMap eventData = Arguments.createMap(); eventData.putString("event", event); eventData.putString("detail", data); this.emitter.emit("RNFetchBlobStream" + taskId, eventData); } /** * Get input stream of the given path, when the path is a string starts with bundle-assets:// * the stream is created by Assets Manager, otherwise use FileInputStream. * @param path The file to open stream * @return InputStream instance * @throws IOException */ static InputStream inputStreamFromPath(String path) throws IOException { if (path.startsWith(RNFetchBlobConst.FILE_PREFIX_BUNDLE_ASSET)) { return RNFetchBlob.RCTContext.getAssets().open(path.replace(RNFetchBlobConst.FILE_PREFIX_BUNDLE_ASSET, "")); } return new FileInputStream(new File(path)); } /** * Check if the asset or the file exists * @param path A file path URI string * @return A boolean value represents if the path exists. */ static boolean isPathExists(String path) { if(path.startsWith(RNFetchBlobConst.FILE_PREFIX_BUNDLE_ASSET)) { try { RNFetchBlob.RCTContext.getAssets().open(path.replace(RNFetchBlobConst.FILE_PREFIX_BUNDLE_ASSET, "")); } catch (IOException e) { return false; } return true; } else { return new File(path).exists(); } } static boolean isAsset(String path) { if(path != null) return path.startsWith(RNFetchBlobConst.FILE_PREFIX_BUNDLE_ASSET); return false; } /** * Normalize the path, remove URI scheme (xxx://) so that we can handle it. * @param path URI string. * @return Normalized string */ static String normalizePath(String path) { if(path == null) return null; if(!path.matches("\\w+\\:.*")) return path; if(path.startsWith("file://")) { return path.replace("file://", ""); } Uri uri = Uri.parse(path); if(path.startsWith(RNFetchBlobConst.FILE_PREFIX_BUNDLE_ASSET)) { return path; } else return PathResolver.getRealPathFromURI(RNFetchBlob.RCTContext, uri); } }
android/src/main/java/com/RNFetchBlob/RNFetchBlobFS.java
package com.RNFetchBlob; import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import android.content.res.AssetFileDescriptor; import android.media.MediaScannerConnection; import android.net.Uri; import android.os.AsyncTask; import android.os.Build; import android.os.Environment; import android.os.StatFs; import android.os.SystemClock; import android.util.Base64; import com.RNFetchBlob.Utils.PathResolver; import com.facebook.react.bridge.Arguments; import com.facebook.react.bridge.Callback; import com.facebook.react.bridge.Promise; import com.facebook.react.bridge.ReactApplicationContext; import com.facebook.react.bridge.ReadableArray; import com.facebook.react.bridge.WritableArray; import com.facebook.react.bridge.WritableMap; import com.facebook.react.modules.core.DeviceEventManagerModule; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.charset.Charset; import java.nio.charset.CharsetEncoder; import java.util.HashMap; import java.util.Map; import java.util.UUID; public class RNFetchBlobFS { ReactApplicationContext mCtx; DeviceEventManagerModule.RCTDeviceEventEmitter emitter; String encoding = "base64"; boolean append = false; OutputStream writeStreamInstance = null; static HashMap<String, RNFetchBlobFS> fileStreams = new HashMap<>(); RNFetchBlobFS(ReactApplicationContext ctx) { this.mCtx = ctx; this.emitter = ctx.getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class); } static String getExternalFilePath(ReactApplicationContext ctx, String taskId, RNFetchBlobConfig config) { if(config.path != null) return config.path; else if(config.fileCache && config.appendExt != null) return RNFetchBlobFS.getTmpPath(ctx, taskId) + "." + config.appendExt; else return RNFetchBlobFS.getTmpPath(ctx, taskId); } /** * Write string with encoding to file * @param path Destination file path. * @param encoding Encoding of the string. * @param data Array passed from JS context. * @param promise RCT Promise */ static public void writeFile(String path, String encoding, String data, final boolean append, final Promise promise) { try { int written = 0; File f = new File(path); File dir = f.getParentFile(); if(!dir.exists()) dir.mkdirs(); FileOutputStream fout = new FileOutputStream(f, append); // write data from a file if(encoding.equalsIgnoreCase(RNFetchBlobConst.DATA_ENCODE_URI)) { data = normalizePath(data); File src = new File(data); if(!src.exists()) { promise.reject("RNfetchBlob writeFileError", "source file : " + data + "not exists"); fout.close(); return ; } FileInputStream fin = new FileInputStream(src); byte [] buffer = new byte [10240]; int read; written = 0; while((read = fin.read(buffer)) > 0) { fout.write(buffer, 0, read); written += read; } fin.close(); } else { byte[] bytes = stringToBytes(data, encoding); fout.write(bytes); written = bytes.length; } fout.close(); promise.resolve(written); } catch (Exception e) { promise.reject("RNFetchBlob writeFileError", e.getLocalizedMessage()); } } /** * Write array of bytes into file * @param path Destination file path. * @param data Array passed from JS context. * @param promise RCT Promise */ static public void writeFile(String path, ReadableArray data, final boolean append, final Promise promise) { try { File f = new File(path); File dir = f.getParentFile(); if(!dir.exists()) dir.mkdirs(); FileOutputStream os = new FileOutputStream(f, append); byte [] bytes = new byte[data.size()]; for(int i=0;i<data.size();i++) { bytes[i] = (byte) data.getInt(i); } os.write(bytes); os.close(); promise.resolve(data.size()); } catch (Exception e) { promise.reject("RNFetchBlob writeFileError", e.getLocalizedMessage()); } } /** * Read file with a buffer that has the same size as the target file. * @param path Path of the file. * @param encoding Encoding of read stream. * @param promise */ static public void readFile(String path, String encoding, final Promise promise ) { path = normalizePath(path); try { byte[] bytes; if(path.startsWith(RNFetchBlobConst.FILE_PREFIX_BUNDLE_ASSET)) { String assetName = path.replace(RNFetchBlobConst.FILE_PREFIX_BUNDLE_ASSET, ""); long length = RNFetchBlob.RCTContext.getAssets().openFd(assetName).getLength(); bytes = new byte[(int) length]; InputStream in = RNFetchBlob.RCTContext.getAssets().open(assetName); in.read(bytes, 0, (int) length); in.close(); } else { File f = new File(path); int length = (int) f.length(); bytes = new byte[length]; FileInputStream in = new FileInputStream(f); in.read(bytes); in.close(); } switch (encoding.toLowerCase()) { case "base64" : promise.resolve(Base64.encodeToString(bytes, Base64.NO_WRAP)); break; case "ascii" : WritableArray asciiResult = Arguments.createArray(); for(byte b : bytes) { asciiResult.pushInt((int)b); } promise.resolve(asciiResult); break; case "utf8" : promise.resolve(new String(bytes)); break; default: promise.resolve(new String(bytes)); break; } } catch(Exception err) { promise.reject("ReadFile Error", err.getLocalizedMessage()); } } /** * Static method that returns system folders to JS context * @param ctx React Native application context */ static public Map<String, Object> getSystemfolders(ReactApplicationContext ctx) { Map<String, Object> res = new HashMap<>(); res.put("DocumentDir", ctx.getFilesDir().getAbsolutePath()); res.put("CacheDir", ctx.getCacheDir().getAbsolutePath()); res.put("DCIMDir", Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM).getAbsolutePath()); res.put("PictureDir", Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES).getAbsolutePath()); res.put("MusicDir", Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MUSIC).getAbsolutePath()); res.put("DownloadDir", Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS).getAbsolutePath()); res.put("MovieDir", Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MOVIES).getAbsolutePath()); res.put("RingtoneDir", Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_RINGTONES).getAbsolutePath()); String state; state = Environment.getExternalStorageState(); if (state.equals(Environment.MEDIA_MOUNTED)) { res.put("SDCardDir", Environment.getExternalStorageDirectory().getAbsolutePath()); res.put("SDCardApplicationDir", ctx.getExternalFilesDir(null).getParentFile().getAbsolutePath()); } res.put("MainBundleDir", ctx.getApplicationInfo().dataDir); return res; } /** * Static method that returns a temp file path * @param ctx React Native application context * @param taskId An unique string for identify * @return */ static public String getTmpPath(ReactApplicationContext ctx, String taskId) { return RNFetchBlob.RCTContext.getFilesDir() + "/RNFetchBlobTmp_" + taskId; } /** * Create a file stream for read * @param path File stream target path * @param encoding File stream decoder, should be one of `base64`, `utf8`, `ascii` * @param bufferSize Buffer size of read stream, default to 4096 (4095 when encode is `base64`) */ public void readStream(String path, String encoding, int bufferSize, int tick, final String streamId) { path = normalizePath(path); try { int chunkSize = encoding.equalsIgnoreCase("base64") ? 4095 : 4096; if(bufferSize > 0) chunkSize = bufferSize; InputStream fs; if(path.startsWith(RNFetchBlobConst.FILE_PREFIX_BUNDLE_ASSET)) { fs = RNFetchBlob.RCTContext.getAssets() .open(path.replace(RNFetchBlobConst.FILE_PREFIX_BUNDLE_ASSET, "")); } else { fs = new FileInputStream(new File(path)); } byte[] buffer = new byte[chunkSize]; int cursor = 0; boolean error = false; if (encoding.equalsIgnoreCase("utf8")) { CharsetEncoder encoder = Charset.forName("UTF-8").newEncoder(); while ((cursor = fs.read(buffer)) != -1) { encoder.encode(ByteBuffer.wrap(buffer).asCharBuffer()); String chunk = new String(buffer); if(cursor != bufferSize) { chunk = chunk.substring(0, cursor); } emitStreamEvent(streamId, "data", chunk); if(tick > 0) SystemClock.sleep(tick); } } else if (encoding.equalsIgnoreCase("ascii")) { while ((cursor = fs.read(buffer)) != -1) { WritableArray chunk = Arguments.createArray(); for(int i =0;i<cursor;i++) { chunk.pushInt((int)buffer[i]); } emitStreamEvent(streamId, "data", chunk); if(tick > 0) SystemClock.sleep(tick); } } else if (encoding.equalsIgnoreCase("base64")) { while ((cursor = fs.read(buffer)) != -1) { if(cursor < chunkSize) { byte [] copy = new byte[cursor]; for(int i =0;i<cursor;i++) { copy[i] = buffer[i]; } emitStreamEvent(streamId, "data", Base64.encodeToString(copy, Base64.NO_WRAP)); } else emitStreamEvent(streamId, "data", Base64.encodeToString(buffer, Base64.NO_WRAP)); if(tick > 0) SystemClock.sleep(tick); } } else { String msg = "unrecognized encoding `" + encoding + "`"; emitStreamEvent(streamId, "error", msg); error = true; } if(!error) emitStreamEvent(streamId, "end", ""); fs.close(); buffer = null; } catch (Exception err) { emitStreamEvent(streamId, "warn", "Failed to convert data to "+encoding+" encoded string, this might due to the source data is not able to convert using this encoding."); err.printStackTrace(); } } /** * Create a write stream and store its instance in RNFetchBlobFS.fileStreams * @param path Target file path * @param encoding Should be one of `base64`, `utf8`, `ascii` * @param append Flag represents if the file stream overwrite existing content * @param callback */ public void writeStream(String path, String encoding, boolean append, Callback callback) { File dest = new File(path); if(!dest.exists() || dest.isDirectory()) { callback.invoke("write stream error: target path `" + path + "` may not exists or it's a folder"); return; } try { OutputStream fs = new FileOutputStream(path, append); this.encoding = encoding; this.append = append; String streamId = UUID.randomUUID().toString(); RNFetchBlobFS.fileStreams.put(streamId, this); this.writeStreamInstance = fs; callback.invoke(null, streamId); } catch(Exception err) { callback.invoke("write stream error: failed to create write stream at path `"+path+"` "+ err.getLocalizedMessage()); } } /** * Write a chunk of data into a file stream. * @param streamId File stream ID * @param data Data chunk in string format * @param callback JS context callback */ static void writeChunk(String streamId, String data, Callback callback) { RNFetchBlobFS fs = fileStreams.get(streamId); OutputStream stream = fs.writeStreamInstance; byte [] chunk = RNFetchBlobFS.stringToBytes(data, fs.encoding); try { stream.write(chunk); callback.invoke(); } catch (Exception e) { callback.invoke(e.getLocalizedMessage()); } } /** * Write data using ascii array * @param streamId File stream ID * @param data Data chunk in ascii array format * @param callback JS context callback */ static void writeArrayChunk(String streamId, ReadableArray data, Callback callback) { try { RNFetchBlobFS fs = fileStreams.get(streamId); OutputStream stream = fs.writeStreamInstance; byte [] chunk = new byte[data.size()]; for(int i =0; i< data.size();i++) { chunk[i] = (byte) data.getInt(i); } stream.write(chunk); callback.invoke(); } catch (Exception e) { callback.invoke(e.getLocalizedMessage()); } } /** * Close file write stream by ID * @param streamId Stream ID * @param callback JS context callback */ static void closeStream(String streamId, Callback callback) { try { RNFetchBlobFS fs = fileStreams.get(streamId); OutputStream stream = fs.writeStreamInstance; fileStreams.remove(streamId); stream.close(); callback.invoke(); } catch(Exception err) { callback.invoke(err.getLocalizedMessage()); } } /** * Unlink file at path * @param path Path of target * @param callback JS context callback */ static void unlink(String path, Callback callback) { try { RNFetchBlobFS.deleteRecursive(new File(path)); callback.invoke(null, true); } catch(Exception err) { if(err != null) callback.invoke(err.getLocalizedMessage(), false); } } static void deleteRecursive(File fileOrDirectory) { if (fileOrDirectory.isDirectory()) { for (File child : fileOrDirectory.listFiles()) { deleteRecursive(child); } } fileOrDirectory.delete(); } /** * Make a folder * @param path Source path * @param callback JS context callback */ static void mkdir(String path, Callback callback) { File dest = new File(path); if(dest.exists()) { callback.invoke("mkdir error: failed to create folder at `" + path + "` folder already exists"); return; } dest.mkdirs(); callback.invoke(); } /** * Copy file to destination path * @param path Source path * @param dest Target path * @param callback JS context callback */ static void cp(String path, String dest, Callback callback) { path = normalizePath(path); InputStream in = null; OutputStream out = null; try { if(!isPathExists(path)) { callback.invoke("cp error: source file at path`" + path + "` not exists"); return; } if(!new File(dest).exists()) new File(dest).createNewFile(); in = inputStreamFromPath(path); out = new FileOutputStream(dest); byte[] buf = new byte[10240]; int len; while ((len = in.read(buf)) > 0) { out.write(buf, 0, len); } } catch (Exception err) { callback.invoke(err.getLocalizedMessage()); } finally { try { if (in != null) { in.close(); } if (out != null) { out.close(); } callback.invoke(); } catch (Exception e) { callback.invoke(e.getLocalizedMessage()); } } } /** * Move file * @param path Source file path * @param dest Destination file path * @param callback JS context callback */ static void mv(String path, String dest, Callback callback) { File src = new File(path); if(!src.exists()) { callback.invoke("mv error: source file at path `" + path + "` does not exists"); return; } src.renameTo(new File(dest)); callback.invoke(); } /** * Check if the path exists, also check if it is a folder when exists. * @param path Path to check * @param callback JS context callback */ static void exists(String path, Callback callback) { if(isAsset(path)) { try { String filename = path.replace(RNFetchBlobConst.FILE_PREFIX_BUNDLE_ASSET, ""); AssetFileDescriptor fd = RNFetchBlob.RCTContext.getAssets().openFd(filename); callback.invoke(true, false); } catch (IOException e) { callback.invoke(false, false); } } else { path = normalizePath(path); boolean exist = new File(path).exists(); boolean isDir = new File(path).isDirectory(); callback.invoke(exist, isDir); } } /** * List content of folder * @param path Target folder * @param callback JS context callback */ static void ls(String path, Callback callback) { path = normalizePath(path); File src = new File(path); if (!src.exists() || !src.isDirectory()) { callback.invoke("ls error: failed to list path `" + path + "` for it is not exist or it is not a folder"); return; } String[] files = new File(path).list(); WritableArray arg = Arguments.createArray(); for (String i : files) { arg.pushString(i); } callback.invoke(null, arg); } /** * Create a file by slicing given file path * @param src Source file path * @param dest Destination of created file * @param start Start byte offset in source file * @param end End byte offset * @param encode NOT IMPLEMENTED */ public static void slice(String src, String dest, int start, int end, String encode, Promise promise) { try { src = normalizePath(src); File source = new File(src); if(!source.exists()) { promise.reject("RNFetchBlob.slice error", "source file : " + src + " not exists"); return; } long size = source.length(); long max = Math.min(size, end); long expected = max - start; long now = 0; FileInputStream in = new FileInputStream(new File(src)); FileOutputStream out = new FileOutputStream(new File(dest)); in.skip(start); byte [] buffer = new byte[10240]; while(now < expected) { long read = in.read(buffer, 0, 10240); long remain = expected - now; if(read <= 0) { break; } out.write(buffer, 0, (int) Math.min(remain, read)); now += read; } in.close(); out.flush(); out.close(); promise.resolve(dest); } catch (Exception e) { e.printStackTrace(); promise.reject(e.getLocalizedMessage()); } } static void lstat(String path, final Callback callback) { path = normalizePath(path); new AsyncTask<String, Integer, Integer>() { @Override protected Integer doInBackground(String ...args) { WritableArray res = Arguments.createArray(); if(args[0] == null) { callback.invoke("lstat error: the path specified for lstat is either `null` or `undefined`."); return 0; } File src = new File(args[0]); if(!src.exists()) { callback.invoke("lstat error: failed to list path `" + args[0] + "` for it is not exist or it is not a folder"); return 0; } if(src.isDirectory()) { String [] files = src.list(); for(String p : files) { res.pushMap(statFile ( src.getPath() + "/" + p)); } } else { res.pushMap(statFile(src.getAbsolutePath())); } callback.invoke(null, res); return 0; } }.execute(path); } /** * show status of a file or directory * @param path * @param callback */ static void stat(String path, Callback callback) { try { path = normalizePath(path); WritableMap result = statFile(path); if(result == null) callback.invoke("stat error: failed to list path `" + path + "` for it is not exist or it is not a folder", null); else callback.invoke(null, result); } catch(Exception err) { callback.invoke(err.getLocalizedMessage()); } } /** * Basic stat method * @param path * @return Stat result of a file or path */ static WritableMap statFile(String path) { try { path = normalizePath(path); WritableMap stat = Arguments.createMap(); if(isAsset(path)) { String name = path.replace(RNFetchBlobConst.FILE_PREFIX_BUNDLE_ASSET, ""); AssetFileDescriptor fd = RNFetchBlob.RCTContext.getAssets().openFd(name); stat.putString("filename", name); stat.putString("path", path); stat.putString("type", "asset"); stat.putString("size", String.valueOf(fd.getLength())); stat.putInt("lastModified", 0); } else { File target = new File(path); if (!target.exists()) { return null; } stat.putString("filename", target.getName()); stat.putString("path", target.getPath()); stat.putString("type", target.isDirectory() ? "directory" : "file"); stat.putString("size", String.valueOf(target.length())); String lastModified = String.valueOf(target.lastModified()); stat.putString("lastModified", lastModified); } return stat; } catch(Exception err) { return null; } } /** * Media scanner scan file * @param path * @param mimes * @param callback */ void scanFile(String [] path, String[] mimes, final Callback callback) { try { MediaScannerConnection.scanFile(mCtx, path, mimes, new MediaScannerConnection.OnScanCompletedListener() { @Override public void onScanCompleted(String s, Uri uri) { callback.invoke(null, true); } }); } catch(Exception err) { callback.invoke(err.getLocalizedMessage(), null); } } /** * Create new file at path * @param path The destination path of the new file. * @param data Initial data of the new file. * @param encoding Encoding of initial data. * @param callback RCT bridge callback. */ static void createFile(String path, String data, String encoding, Callback callback) { try { File dest = new File(path); boolean created = dest.createNewFile(); if(encoding.equals(RNFetchBlobConst.DATA_ENCODE_URI)) { String orgPath = data.replace(RNFetchBlobConst.FILE_PREFIX, ""); File src = new File(orgPath); if(!src.exists()) { callback.invoke("RNfetchBlob writeFileError", "source file : " + data + "not exists"); return ; } FileInputStream fin = new FileInputStream(src); OutputStream ostream = new FileOutputStream(dest); byte [] buffer = new byte [10240]; int read = fin.read(buffer); while(read > 0) { ostream.write(buffer, 0, read); read = fin.read(buffer); } fin.close(); ostream.close(); } else { if (!created) { callback.invoke("create file error: failed to create file at path `" + path + "` for its parent path may not exists, or the file already exists. If you intended to overwrite the existing file use fs.writeFile instead."); return; } OutputStream ostream = new FileOutputStream(dest); ostream.write(RNFetchBlobFS.stringToBytes(data, encoding)); } callback.invoke(null, path); } catch(Exception err) { callback.invoke(err.getLocalizedMessage()); } } /** * Create file for ASCII encoding * @param path Path of new file. * @param data Content of new file * @param callback JS context callback */ static void createFileASCII(String path, ReadableArray data, Callback callback) { try { File dest = new File(path); if(dest.exists()) { callback.invoke("create file error: failed to create file at path `" + path + "`, file already exists."); return; } boolean created = dest.createNewFile(); if(!created) { callback.invoke("create file error: failed to create file at path `" + path + "` for its parent path may not exists"); return; } OutputStream ostream = new FileOutputStream(dest); byte [] chunk = new byte[data.size()]; for(int i =0; i<data.size();i++) { chunk[i] = (byte) data.getInt(i); } ostream.write(chunk); chunk = null; callback.invoke(null, path); } catch(Exception err) { callback.invoke(err.getLocalizedMessage()); } } static void df(Callback callback) { StatFs stat = new StatFs(Environment.getDataDirectory().getPath()); WritableMap args = Arguments.createMap(); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) { args.putString("internal_free", String.valueOf(stat.getFreeBytes())); args.putString("internal_total", String.valueOf(stat.getTotalBytes())); StatFs statEx = new StatFs(Environment.getExternalStorageDirectory().getPath()); args.putString("external_free", String.valueOf(statEx.getFreeBytes())); args.putString("external_total", String.valueOf(statEx.getTotalBytes())); } callback.invoke(null ,args); } /** * Remove files in session. * @param paths An array of file paths. * @param callback JS contest callback */ static void removeSession(ReadableArray paths, final Callback callback) { AsyncTask<ReadableArray, Integer, Integer> task = new AsyncTask<ReadableArray, Integer, Integer>() { @Override protected Integer doInBackground(ReadableArray ...paths) { try { for (int i = 0; i < paths[0].size(); i++) { File f = new File(paths[0].getString(i)); if (f.exists()) f.delete(); } callback.invoke(null, true); } catch(Exception err) { callback.invoke(err.getLocalizedMessage()); } return paths[0].size(); } }; task.execute(paths); } /** * String to byte converter method * @param data Raw data in string format * @param encoding Decoder name * @return Converted data byte array */ private static byte[] stringToBytes(String data, String encoding) { if(encoding.equalsIgnoreCase("ascii")) { return data.getBytes(Charset.forName("US-ASCII")); } else if(encoding.toLowerCase().contains("base64")) { return Base64.decode(data, Base64.NO_WRAP); } else if(encoding.equalsIgnoreCase("utf8")) { return data.getBytes(Charset.forName("UTF-8")); } return data.getBytes(Charset.forName("US-ASCII")); } /** * Private method for emit read stream event. * @param streamName ID of the read stream * @param event Event name, `data`, `end`, `error`, etc. * @param data Event data */ private void emitStreamEvent(String streamName, String event, String data) { WritableMap eventData = Arguments.createMap(); eventData.putString("event", event); eventData.putString("detail", data); this.emitter.emit(streamName, eventData); } private void emitStreamEvent(String streamName, String event, WritableArray data) { WritableMap eventData = Arguments.createMap(); eventData.putString("event", event); eventData.putArray("detail", data); this.emitter.emit(streamName, eventData); } // TODO : should we remove this ? void emitFSData(String taskId, String event, String data) { WritableMap eventData = Arguments.createMap(); eventData.putString("event", event); eventData.putString("detail", data); this.emitter.emit("RNFetchBlobStream" + taskId, eventData); } /** * Get input stream of the given path, when the path is a string starts with bundle-assets:// * the stream is created by Assets Manager, otherwise use FileInputStream. * @param path The file to open stream * @return InputStream instance * @throws IOException */ static InputStream inputStreamFromPath(String path) throws IOException { if (path.startsWith(RNFetchBlobConst.FILE_PREFIX_BUNDLE_ASSET)) { return RNFetchBlob.RCTContext.getAssets().open(path.replace(RNFetchBlobConst.FILE_PREFIX_BUNDLE_ASSET, "")); } return new FileInputStream(new File(path)); } /** * Check if the asset or the file exists * @param path A file path URI string * @return A boolean value represents if the path exists. */ static boolean isPathExists(String path) { if(path.startsWith(RNFetchBlobConst.FILE_PREFIX_BUNDLE_ASSET)) { try { RNFetchBlob.RCTContext.getAssets().open(path.replace(RNFetchBlobConst.FILE_PREFIX_BUNDLE_ASSET, "")); } catch (IOException e) { return false; } return true; } else { return new File(path).exists(); } } static boolean isAsset(String path) { if(path != null) return path.startsWith(RNFetchBlobConst.FILE_PREFIX_BUNDLE_ASSET); return false; } /** * Normalize the path, remove URI scheme (xxx://) so that we can handle it. * @param path URI string. * @return Normalized string */ static String normalizePath(String path) { if(path == null) return null; if(!path.matches("\\w+\\:.*")) return path; if(path.startsWith("file://")) { return path.replace("file://", ""); } Uri uri = Uri.parse(path); if(path.startsWith(RNFetchBlobConst.FILE_PREFIX_BUNDLE_ASSET)) { return path; } else return PathResolver.getRealPathFromURI(RNFetchBlob.RCTContext, uri); } }
Fix incorrect UTF-8 data conversion issue when the data contains special characters.
android/src/main/java/com/RNFetchBlob/RNFetchBlobFS.java
Fix incorrect UTF-8 data conversion issue when the data contains special characters.
<ide><path>ndroid/src/main/java/com/RNFetchBlob/RNFetchBlobFS.java <ide> CharsetEncoder encoder = Charset.forName("UTF-8").newEncoder(); <ide> while ((cursor = fs.read(buffer)) != -1) { <ide> encoder.encode(ByteBuffer.wrap(buffer).asCharBuffer()); <del> String chunk = new String(buffer); <del> if(cursor != bufferSize) { <del> chunk = chunk.substring(0, cursor); <del> } <add> String chunk = new String(buffer, 0, cursor); <ide> emitStreamEvent(streamId, "data", chunk); <ide> if(tick > 0) <ide> SystemClock.sleep(tick);
Java
apache-2.0
3879aae2eaa214bd1b49f7e3a770b6b425c76281
0
JetBrains/teamcity-deployer-plugin,JetBrains/teamcity-deployer-plugin
package jetbrains.buildServer.deployer.agent.smb; import com.hierynomus.msdtyp.AccessMask; import com.hierynomus.mssmb.SMB1NotSupportedException; import com.hierynomus.mssmb2.SMB2ShareAccess; import com.hierynomus.protocol.transport.TransportException; import com.hierynomus.smbj.SMBClient; import com.hierynomus.smbj.SmbConfig; import com.hierynomus.smbj.auth.AuthenticationContext; import com.hierynomus.smbj.common.SMBRuntimeException; import com.hierynomus.smbj.connection.Connection; import com.hierynomus.smbj.session.Session; import com.hierynomus.smbj.share.DiskShare; import com.hierynomus.smbj.share.Share; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.text.StringUtil; import jetbrains.buildServer.agent.BuildFinishedStatus; import jetbrains.buildServer.agent.BuildRunnerContext; import jetbrains.buildServer.agent.impl.artifacts.ArtifactsCollection; import jetbrains.buildServer.deployer.agent.SyncBuildProcessAdapter; import jetbrains.buildServer.deployer.agent.UploadInterruptedException; import jetbrains.buildServer.log.Loggers; import jetbrains.buildServer.util.FileUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.OutputStream; import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Stack; import static com.hierynomus.mssmb2.SMB2CreateDisposition.FILE_OVERWRITE_IF; import static jetbrains.buildServer.deployer.agent.DeployerAgentUtils.logBuildProblem; @SuppressWarnings("unused") // used via reflection public class SMBJBuildProcessAdapter extends SyncBuildProcessAdapter { private static final Logger LOG = Logger.getInstance(SMBJBuildProcessAdapter.class.getName()); private static final int STREAM_BUFFER_SIZE = 1024 * 1024; // use 1 Mb buffer private final String myTarget; private final String myUsername; private final String myPassword; private final List<ArtifactsCollection> myArtifactsCollections; private final String myDomain; @SuppressWarnings("unused") // used via reflection public SMBJBuildProcessAdapter(@NotNull final BuildRunnerContext context, @NotNull final String username, @NotNull final String password, @Nullable final String domain, @NotNull final String target, @NotNull final List<ArtifactsCollection> artifactsCollections) { super(context.getBuild().getBuildLogger()); myTarget = target; myUsername = username; myPassword = password; myDomain = domain; myArtifactsCollections = artifactsCollections; } @Override public BuildFinishedStatus runProcess() { String target; if (myTarget.startsWith("\\\\")) { target = myTarget.substring(2); } else { target = myTarget; } target = target.replaceAll("/", java.util.regex.Matcher.quoteReplacement("\\")); if (!target.endsWith("\\")) { target = target + "\\"; } final String settingsString = "Trying to connect with following parameters:\n" + "username=[" + myUsername + "]\n" + "domain=[" + (myDomain == null ? "" : myDomain) + "]\n" + "target=[" + target + "]"; Loggers.AGENT.debug(settingsString); myLogger.message("Starting upload via SMBj to " + myTarget); final List<String> components = StringUtil.split(target, "\\"); final String host = components.remove(0); final String shareName = components.remove(0); final String pathInShare = StringUtil.join(components, "\\"); try { SmbConfig config = SmbConfig .builder() .withMultiProtocolNegotiate(true) .withSigningRequired(true).build(); SMBClient client = new SMBClient(config); Connection connection = client.connect(host); Session session = connection.authenticate(new AuthenticationContext(myUsername, myPassword.toCharArray(), myDomain)); Share share = session.connectShare(shareName); if (share instanceof DiskShare) { DiskShare diskShare = (DiskShare)share; for (ArtifactsCollection artifactsCollection : myArtifactsCollections) { final int numOfUploadedFiles = upload(artifactsCollection.getFilePathMap(), diskShare, pathInShare); myLogger.message("Uploaded [" + numOfUploadedFiles + "] files for [" + artifactsCollection.getSourcePath() + "] pattern"); } } else { logBuildProblem(myLogger, "Shared resource [" + shareName + "] is not a folder, can not upload files."); return BuildFinishedStatus.FINISHED_FAILED; } return BuildFinishedStatus.FINISHED_SUCCESS; } catch (TransportException e) { final String message; if (hasCauseOfType(SMB1NotSupportedException.class, e)) { message = "The remote host [" + host + "] does not support SMBv2 or support was explicitly disabled. Please, check the remote host configuration"; } else { message = e.getMessage(); } logBuildProblem(myLogger, message); LOG.warnAndDebugDetails("Error executing SMB command", e); return BuildFinishedStatus.FINISHED_FAILED; } catch (UploadInterruptedException e) { myLogger.warning("SMB upload interrupted."); return BuildFinishedStatus.FINISHED_FAILED; } catch (IOException | SMBRuntimeException e) { logBuildProblem(myLogger, e.getMessage()); LOG.warnAndDebugDetails("Error executing SMB command", e); return BuildFinishedStatus.FINISHED_FAILED; } } private boolean hasCauseOfType(@NotNull Class<? extends Throwable> exceptionClass, @NotNull Throwable e) { Throwable current = e; if (exceptionClass.isAssignableFrom(e.getClass())) { return true; } while (current != null && current.getCause() != current) { if (exceptionClass.isAssignableFrom(current.getClass())) { return true; } current = current.getCause(); } return false; } private void maybeCreate(@NotNull final DiskShare diskShare, @NotNull final String pathInShare) { String existingPrefix = FileUtil.normalizeRelativePath(pathInShare).replace('/', '\\'); final Stack<String> toCreate = new Stack<>(); while (existingPrefix.length() > 0 && !diskShare.folderExists(existingPrefix)) { final int endIndex = existingPrefix.lastIndexOf('\\'); if (endIndex > -1) { toCreate.push(existingPrefix.substring(endIndex + 1)); existingPrefix = existingPrefix.substring(0, endIndex); } else { toCreate.push(existingPrefix); existingPrefix = ""; } } while (!toCreate.empty()) { existingPrefix = (existingPrefix.length() > 0 ? existingPrefix + "\\" : "") + toCreate.pop(); diskShare.mkdir(existingPrefix); } } private int upload(Map<File, String> filePathMap, DiskShare share, String prefixPath) throws IOException { int count = 0; Map<File, String> fileFullPathMap = new HashMap<>(); if (prefixPath.length() > 0) { for (Map.Entry<File, String> entry : filePathMap.entrySet()) { fileFullPathMap.put(entry.getKey(), prefixPath + "\\" + entry.getValue()); } } else { fileFullPathMap.putAll(filePathMap); } for (Map.Entry<File, String> fileDestEntry : fileFullPathMap.entrySet()) { checkIsInterrupted(); final File source = fileDestEntry.getKey(); final String targetPath = fileDestEntry.getValue().replace('/', '\\'); maybeCreate(share, targetPath); final String targetName = (targetPath.length() > 0 ? targetPath + "\\" : "") + source.getName(); final com.hierynomus.smbj.share.File targetFile = share.openFile(targetName, EnumSet.of(AccessMask.GENERIC_WRITE), null, SMB2ShareAccess.ALL, FILE_OVERWRITE_IF, null); Loggers.AGENT.debug("Uploading source=[" + source.getAbsolutePath() + "] to \n" + " destFile=[" + targetName + "]"); FileInputStream inputStream = null; OutputStream outputStream = null; try { inputStream = new FileInputStream(source); outputStream = targetFile.getOutputStream(); copyInterruptibly(inputStream, outputStream); outputStream.flush(); } finally { FileUtil.close(inputStream); FileUtil.close(outputStream); targetFile.close(); } LOG.debug("Done transferring [" + source.getAbsolutePath() + "]"); count++; } return count; } private void copyInterruptibly(@NotNull FileInputStream inputStream, @NotNull OutputStream outputStream) throws IOException { byte[] buf = new byte[STREAM_BUFFER_SIZE]; int read; while ((read = inputStream.read(buf)) > -1) { checkIsInterrupted(); outputStream.write(buf, 0, read); } } }
deploy-runner-agent-smb2/src/main/java/jetbrains/buildServer/deployer/agent/smb/SMBJBuildProcessAdapter.java
package jetbrains.buildServer.deployer.agent.smb; import com.hierynomus.msdtyp.AccessMask; import com.hierynomus.mssmb.SMB1NotSupportedException; import com.hierynomus.mssmb2.SMB2ShareAccess; import com.hierynomus.protocol.transport.TransportException; import com.hierynomus.smbj.SMBClient; import com.hierynomus.smbj.SmbConfig; import com.hierynomus.smbj.auth.AuthenticationContext; import com.hierynomus.smbj.common.SMBRuntimeException; import com.hierynomus.smbj.connection.Connection; import com.hierynomus.smbj.session.Session; import com.hierynomus.smbj.share.DiskShare; import com.hierynomus.smbj.share.Share; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.text.StringUtil; import jetbrains.buildServer.agent.BuildFinishedStatus; import jetbrains.buildServer.agent.BuildRunnerContext; import jetbrains.buildServer.agent.impl.artifacts.ArtifactsCollection; import jetbrains.buildServer.deployer.agent.SyncBuildProcessAdapter; import jetbrains.buildServer.deployer.agent.UploadInterruptedException; import jetbrains.buildServer.log.Loggers; import jetbrains.buildServer.util.FileUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.OutputStream; import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Stack; import static com.hierynomus.mssmb2.SMB2CreateDisposition.FILE_OVERWRITE_IF; import static jetbrains.buildServer.deployer.agent.DeployerAgentUtils.logBuildProblem; @SuppressWarnings("unused") // used via reflection public class SMBJBuildProcessAdapter extends SyncBuildProcessAdapter { private static final Logger LOG = Logger.getInstance(SMBJBuildProcessAdapter.class.getName()); private static final int STREAM_BUFFER_SIZE = 1024 * 1024; // use 1 Mb buffer private final String myTarget; private final String myUsername; private final String myPassword; private final List<ArtifactsCollection> myArtifactsCollections; private final String myDomain; @SuppressWarnings("unused") // used via reflection public SMBJBuildProcessAdapter(@NotNull final BuildRunnerContext context, @NotNull final String username, @NotNull final String password, @Nullable final String domain, @NotNull final String target, @NotNull final List<ArtifactsCollection> artifactsCollections) { super(context.getBuild().getBuildLogger()); myTarget = target; myUsername = username; myPassword = password; myDomain = domain; myArtifactsCollections = artifactsCollections; } @Override public BuildFinishedStatus runProcess() { String target; if (myTarget.startsWith("\\\\")) { target = myTarget.substring(2); } else { target = myTarget; } target = target.replaceAll("/", java.util.regex.Matcher.quoteReplacement("\\")); final String settingsString = "Trying to connect with following parameters:\n" + "username=[" + myUsername + "]\n" + "domain=[" + (myDomain == null ? "" : myDomain) + "]\n" + "target=[" + target + "]"; Loggers.AGENT.debug(settingsString); myLogger.message("Starting upload via SMBj to " + myTarget); final List<String> components = StringUtil.split(target, "\\"); final String host = components.remove(0); final String shareName = components.remove(0); final String pathInShare = StringUtil.join(components, "\\"); try { SmbConfig config = SmbConfig .builder() .withMultiProtocolNegotiate(true) .withSigningRequired(true).build(); SMBClient client = new SMBClient(config); Connection connection = client.connect(host); Session session = connection.authenticate(new AuthenticationContext(myUsername, myPassword.toCharArray(), myDomain)); Share share = session.connectShare(shareName); if (share instanceof DiskShare) { DiskShare diskShare = (DiskShare)share; for (ArtifactsCollection artifactsCollection : myArtifactsCollections) { final int numOfUploadedFiles = upload(artifactsCollection.getFilePathMap(), diskShare, pathInShare); myLogger.message("Uploaded [" + numOfUploadedFiles + "] files for [" + artifactsCollection.getSourcePath() + "] pattern"); } } else { logBuildProblem(myLogger, "Shared resource [" + shareName + "] is not a folder, can not upload files."); return BuildFinishedStatus.FINISHED_FAILED; } return BuildFinishedStatus.FINISHED_SUCCESS; } catch (TransportException e) { final String message; if (hasCauseOfType(SMB1NotSupportedException.class, e)) { message = "The remote host [" + host + "] does not support SMBv2 or support was explicitly disabled. Please, check the remote host configuration"; } else { message = e.getMessage(); } logBuildProblem(myLogger, message); LOG.warnAndDebugDetails("Error executing SMB command", e); return BuildFinishedStatus.FINISHED_FAILED; } catch (UploadInterruptedException e) { myLogger.warning("SMB upload interrupted."); return BuildFinishedStatus.FINISHED_FAILED; } catch (IOException | SMBRuntimeException e) { logBuildProblem(myLogger, e.getMessage()); LOG.warnAndDebugDetails("Error executing SMB command", e); return BuildFinishedStatus.FINISHED_FAILED; } } private boolean hasCauseOfType(@NotNull Class<? extends Throwable> exceptionClass, @NotNull Throwable e) { Throwable current = e; if (exceptionClass.isAssignableFrom(e.getClass())) { return true; } while (current != null && current.getCause() != current) { if (exceptionClass.isAssignableFrom(current.getClass())) { return true; } current = current.getCause(); } return false; } private void maybeCreate(@NotNull final DiskShare diskShare, @NotNull final String pathInShare) { String existingPrefix = FileUtil.normalizeRelativePath(pathInShare).replace('/', '\\'); final Stack<String> toCreate = new Stack<>(); while (existingPrefix.length() > 0 && !diskShare.folderExists(existingPrefix)) { final int endIndex = existingPrefix.lastIndexOf('\\'); if (endIndex > -1) { toCreate.push(existingPrefix.substring(endIndex + 1)); existingPrefix = existingPrefix.substring(0, endIndex); } else { toCreate.push(existingPrefix); existingPrefix = ""; } } while (!toCreate.empty()) { existingPrefix = (existingPrefix.length() > 0 ? existingPrefix + "\\" : "") + toCreate.pop(); diskShare.mkdir(existingPrefix); } } private int upload(Map<File, String> filePathMap, DiskShare share, String prefixPath) throws IOException { int count = 0; Map<File, String> fileFullPathMap = new HashMap<>(); if (prefixPath.length() > 0) { for (Map.Entry<File, String> entry : filePathMap.entrySet()) { fileFullPathMap.put(entry.getKey(), prefixPath + "\\" + entry.getValue()); } } else { fileFullPathMap.putAll(filePathMap); } for (Map.Entry<File, String> fileDestEntry : fileFullPathMap.entrySet()) { checkIsInterrupted(); final File source = fileDestEntry.getKey(); final String targetPath = fileDestEntry.getValue().replace('/', '\\'); maybeCreate(share, targetPath); final String targetName = (targetPath.length() > 0 ? targetPath + "\\" : "") + source.getName(); final com.hierynomus.smbj.share.File targetFile = share.openFile(targetName, EnumSet.of(AccessMask.GENERIC_WRITE), null, SMB2ShareAccess.ALL, FILE_OVERWRITE_IF, null); Loggers.AGENT.debug("Uploading source=[" + source.getAbsolutePath() + "] to \n" + " destFile=[" + targetName + "]"); FileInputStream inputStream = null; OutputStream outputStream = null; try { inputStream = new FileInputStream(source); outputStream = targetFile.getOutputStream(); copyInterruptibly(inputStream, outputStream); outputStream.flush(); } finally { FileUtil.close(inputStream); FileUtil.close(outputStream); targetFile.close(); } LOG.debug("Done transferring [" + source.getAbsolutePath() + "]"); count++; } return count; } private void copyInterruptibly(@NotNull FileInputStream inputStream, @NotNull OutputStream outputStream) throws IOException { byte[] buf = new byte[STREAM_BUFFER_SIZE]; int read; while ((read = inputStream.read(buf)) > -1) { checkIsInterrupted(); outputStream.write(buf, 0, read); } } }
fix TW-59320: handle empty shareName case
deploy-runner-agent-smb2/src/main/java/jetbrains/buildServer/deployer/agent/smb/SMBJBuildProcessAdapter.java
fix TW-59320: handle empty shareName case
<ide><path>eploy-runner-agent-smb2/src/main/java/jetbrains/buildServer/deployer/agent/smb/SMBJBuildProcessAdapter.java <ide> } <ide> <ide> target = target.replaceAll("/", java.util.regex.Matcher.quoteReplacement("\\")); <add> if (!target.endsWith("\\")) { <add> target = target + "\\"; <add> } <ide> <ide> final String settingsString = "Trying to connect with following parameters:\n" + <ide> "username=[" + myUsername + "]\n" +
JavaScript
mit
f2105d9094e3181b10416c0d43ba0ffefcb98bc9
0
aiueogawa/ccxt,ccxt/ccxt,tritoanst/ccxt,aiueogawa/ccxt,ccxt/ccxt,aiueogawa/ccxt,tritoanst/ccxt,ccxt/ccxt,tritoanst/ccxt,tritoanst/ccxt,ccxt/ccxt,aiueogawa/ccxt,ccxt/ccxt
"use strict"; const fs = require ('fs') const ccxt = require ('./ccxt') const countries = require ('./countries') const asTable = require ('as-table') const util = require ('util') const execSync = require ('child_process').execSync const log = require ('ololog') const ansi = require ('ansicolor').nice let exchanges let verbose = false let wikiPath = 'ccxt.wiki' if (!fs.existsSync (wikiPath)) { log.bright.cyan ('Checking out ccxt.wiki...') execSync ('git clone https://github.com/kroitor/ccxt.wiki.git') } try { exchanges = require ('./config') } catch (e) { log.bright.cyan ('Exporting exchanges → ./ccxt.js'.yellow) let ccxtjs = fs.readFileSync ('./ccxt.js', 'utf8') let exchangesMatches = /var exchanges \= \{([^\}]+)\}/g.exec (ccxtjs) let idRegex = /\'([^\'\n\s]+)\'/g let ids = [] let idMatch while (idMatch = idRegex.exec (exchangesMatches[1])) { ids.push (idMatch[1]) } let idString = " '" + ids.join ("',\n '") + "'," log.bright.cyan ('Exporting exchanges → ./ccxt/exchanges.py'.yellow) let ccxtpyFilename = './ccxt/exchanges.py' let ccxtpy = fs.readFileSync (ccxtpyFilename, 'utf8') let ccxtpyParts = ccxtpy.split (/exchanges \= \[[^\]]+\]/) let ccxtpyNewContent = ccxtpyParts[0] + "exchanges = [\n" + idString + "\n]" + ccxtpyParts[1] fs.truncateSync (ccxtpyFilename) fs.writeFileSync (ccxtpyFilename, ccxtpyNewContent) log.bright.cyan ('Exporting exchanges → ./ccxt.php'.yellow) idString = " '" + ids.join ("',\n '") + "'," let ccxtphpFilename = './ccxt.php' let ccxtphp = fs.readFileSync (ccxtphpFilename, 'utf8') let ccxtphpParts = ccxtphp.split (/public static \$exchanges \= array \([^\)]+\)/) let ccxtphpNewContent = ccxtphpParts[0] + "public static $exchanges = array (\n" + idString + "\n )" + ccxtphpParts[1] fs.truncateSync (ccxtphpFilename) fs.writeFileSync (ccxtphpFilename, ccxtphpNewContent) exchanges = {} ids.forEach (id => { exchanges[id] = { 'verbose': verbose, 'apiKey': '', 'secret': '' } }) log.bright.green ('Base sources updated successfully.') } for (let id in exchanges) { exchanges[id] = new (ccxt)[id] (exchanges[id]) exchanges[id].verbose = verbose } // console.log (Object.values (ccxt).length) var countryName = function (code) { return ((typeof countries[code] !== 'undefined') ? countries[code] : code) } let sleep = async ms => await new Promise (resolve => setTimeout (resolve, ms)) //------------------------------------------------------------------------- // list all supported exchanges let values = Object.values (exchanges).map (exchange => { let logo = exchange.urls['logo'] let website = Array.isArray (exchange.urls.www) ? exchange.urls.www[0] : exchange.urls.www let countries = Array.isArray (exchange.countries) ? exchange.countries.map (countryName).join (', ') : countryName (exchange.countries) let doc = Array.isArray (exchange.urls.doc) ? exchange.urls.doc[0] : exchange.urls.doc let version = exchange.version ? exchange.version : '\*' let matches = version.match (/[^0-9]*([0-9].*)/) if (matches) version = matches[1]; return { '': '![' + exchange.id + '](' + logo + ')', 'id': exchange.id, 'name': '[' + exchange.name + '](' + website + ')', 'ver': version, 'doc': '[API](' + doc + ')', 'countries': countries, } }) let numExchanges = Object.keys (exchanges).length let table = asTable.configure ({ delimiter: ' | ' }) (values) let lines = table.split ("\n") lines[1] = lines[0].replace (/[^\|]/g, '-') let headerLine = lines[1].split ('|') headerLine[3] = ':' + headerLine[3].slice (1, headerLine[3].length - 1) + ':' headerLine[4] = ':' + headerLine[4].slice (1, headerLine[4].length - 1) + ':' lines[1] = headerLine.join ('|') lines = lines.map (line => '|' + line + '|').join ("\n") let changeInFile = (filename) => { log.bright ('Exporting exchanges to'.cyan, filename.yellow, '...') let oldContent = fs.readFileSync (filename, 'utf8') let beginning = "The ccxt library currently supports the following " let ending = " cryptocurrency exchange markets and trading APIs:\n\n" let regex = new RegExp ("[^\n]+[\n][\n]\\|[^#]+\\|([\n][\n]|[\n]$|$)", 'm') let totalString = beginning + numExchanges + ending let replacement = totalString + lines + "$1" let newContent = oldContent.replace (regex, replacement) fs.truncateSync (filename) fs.writeFileSync (filename, newContent) } changeInFile ('README.md') changeInFile (wikiPath + '/Exchange-Markets.md') changeInFile (wikiPath + '/Manual.md') // console.log (typeof countries) // console.log (countries) let exchangesByCountries = [] Object.keys (countries).forEach (code => { let country = countries[code] let result = [] Object.keys (exchanges).forEach (id => { let exchange = exchanges[id] let logo = exchange.urls['logo'] let website = Array.isArray (exchange.urls.www) ? exchange.urls.www[0] : exchange.urls.www let doc = Array.isArray (exchange.urls.doc) ? exchange.urls.doc[0] : exchange.urls.doc let version = exchange.version ? exchange.version : '\*' let matches = version.match (/[^0-9]*([0-9].*)/) if (matches) version = matches[1]; let shouldInclude = false if (Array.isArray (exchange.countries)) { if (exchange.countries.indexOf (code) > -1) shouldInclude = true } else { if (code == exchange.countries) shouldInclude = true } if (shouldInclude) { result.push ({ 'country / region': country, 'logo': ' ![' + exchange.id + '](' + logo + ') ', 'id': exchange.id, 'name': '[' + exchange.name + '](' + website + ')', 'ver': version, 'doc': ' [API](' + doc + ') ', }) } }) exchangesByCountries = exchangesByCountries.concat (result) }); exchangesByCountries = exchangesByCountries.sort ((a, b) => { let countryA = a['country / region'].toLowerCase () let countryB = b['country / region'].toLowerCase () let idA = a['id'] let idB = b['id'] if (countryA > countryB) { return 1 } else if (countryA < countryB) { return -1; } else { if (a['id'] > b['id']) return 1; else if (a['id'] < b['id']) return -1; else return 0; } return 0; }) ;(() => { let table = asTable.configure ({ delimiter: ' | ' }) (exchangesByCountries) let lines = table.split ("\n") lines[1] = lines[0].replace (/[^\|]/g, '-') let headerLine = lines[1].split ('|') headerLine[4] = ':' + headerLine[4].slice (1, headerLine[4].length - 1) + ':' headerLine[5] = ':' + headerLine[5].slice (1, headerLine[5].length - 1) + ':' lines[1] = headerLine.join ('|') lines = lines.map (line => '|' + line + '|').join ("\n") let result = "The ccxt library currently supports the following cryptocurrency exchange markets and trading APIs:\n\n" + lines + "\n\n" let filename = wikiPath + '/Exchange-Markets-By-Country.md' fs.truncateSync (filename) fs.writeFileSync (filename, result) }) (); log.bright ('Exporting exchange ids to'.cyan, 'exchanges.json'.yellow) fs.writeFileSync ('exchanges.json', JSON.stringify ({ ids: Object.keys (exchanges).filter (x => x != 'btce') }, null, 4)) log.bright.green ('Exchanges exported successfully.')
export-exchanges.js
"use strict"; const fs = require ('fs') const ccxt = require ('./ccxt') const countries = require ('./countries') const asTable = require ('as-table') const util = require ('util') const execSync = require ('child_process').execSync const log = require ('ololog') const ansi = require ('ansicolor').nice let exchanges let verbose = false let wikiPath = 'ccxt.wiki' if (!fs.existsSync (wikiPath)) { log.bright.cyan ('Checking out ccxt.wiki...') execSync ('git clone https://github.com/kroitor/ccxt.wiki.git') } try { exchanges = require ('./config') } catch (e) { log.bright.cyan ('Exporting exchanges → ./ccxt.js'.yellow) let ccxtjs = fs.readFileSync ('./ccxt.js', 'utf8') let exchangesMatches = /var exchanges \= \{([^\}]+)\}/g.exec (ccxtjs) let idRegex = /\'([^\'\n\s]+)\'/g let ids = [] let idMatch while (idMatch = idRegex.exec (exchangesMatches[1])) { ids.push (idMatch[1]) } let idString = " '" + ids.join ("',\n '") + "'," log.bright.cyan ('Exporting exchanges → ./ccxt/exchanges.py'.yellow) let ccxtpyFilename = './ccxt/exchanges.py' let ccxtpy = fs.readFileSync (ccxtpyFilename, 'utf8') let ccxtpyParts = ccxtpy.split (/exchanges \= \[[^\]]+\]/) let ccxtpyNewContent = ccxtpyParts[0] + "exchanges = [\n" + idString + "\n]" + ccxtpyParts[1] fs.truncateSync (ccxtpyFilename) fs.writeFileSync (ccxtpyFilename, ccxtpyNewContent) log.bright.cyan ('Exporting exchanges → ./ccxt.php'.yellow) idString = " '" + ids.join ("',\n '") + "'," let ccxtphpFilename = './ccxt.php' let ccxtphp = fs.readFileSync (ccxtphpFilename, 'utf8') let ccxtphpParts = ccxtphp.split (/public static \$exchanges \= array \([^\)]+\)/) let ccxtphpNewContent = ccxtphpParts[0] + "public static $exchanges = array (\n" + idString + "\n )" + ccxtphpParts[1] fs.truncateSync (ccxtphpFilename) fs.writeFileSync (ccxtphpFilename, ccxtphpNewContent) exchanges = {} ids.forEach (id => { exchanges[id] = { 'verbose': verbose, 'apiKey': '', 'secret': '' } }) log.bright.green ('Base sources updated successfully.') } for (let id in exchanges) { exchanges[id] = new (ccxt)[id] (exchanges[id]) exchanges[id].verbose = verbose } // console.log (Object.values (ccxt).length) var countryName = function (code) { return ((typeof countries[code] !== 'undefined') ? countries[code] : code) } let sleep = async ms => await new Promise (resolve => setTimeout (resolve, ms)) //------------------------------------------------------------------------- // list all supported exchanges let values = Object.values (exchanges).map (exchange => { let logo = exchange.urls['logo'] let website = Array.isArray (exchange.urls.www) ? exchange.urls.www[0] : exchange.urls.www let countries = Array.isArray (exchange.countries) ? exchange.countries.map (countryName).join (', ') : countryName (exchange.countries) let doc = Array.isArray (exchange.urls.doc) ? exchange.urls.doc[0] : exchange.urls.doc let version = exchange.version ? exchange.version : '\*' let matches = version.match (/[^0-9]*([0-9].*)/) if (matches) version = matches[1]; return { '': '![' + exchange.id + '](' + logo + ')', 'id': exchange.id, 'name': '[' + exchange.name + '](' + website + ')', 'ver': version, 'doc': '[API](' + doc + ')', 'countries': countries, } }) let numExchanges = Object.keys (exchanges).length let table = asTable.configure ({ delimiter: ' | ' }) (values) let lines = table.split ("\n") lines[1] = lines[0].replace (/[^\|]/g, '-') let headerLine = lines[1].split ('|') headerLine[3] = ':' + headerLine[3].slice (1, headerLine[3].length - 1) + ':' headerLine[4] = ':' + headerLine[4].slice (1, headerLine[4].length - 1) + ':' lines[1] = headerLine.join ('|') lines = lines.map (line => '|' + line + '|').join ("\n") let changeInFile = (filename) => { log.bright ('Exporting exchanges to'.cyan, filename.yellow, '...') let oldContent = fs.readFileSync (filename, 'utf8') let beginning = "The ccxt library currently supports the following " let ending = " cryptocurrency exchange markets and trading APIs:\n\n" let regex = new RegExp ("[^\n]+[\n][\n]\\|[^#]+\\|([\n][\n]|[\n]$|$)", 'm') let totalString = beginning + numExchanges + ending let replacement = totalString + lines + "$1" let newContent = oldContent.replace (regex, replacement) fs.truncateSync (filename) fs.writeFileSync (filename, newContent) } changeInFile ('README.md') changeInFile (wikiPath + '/Exchange-Markets.md') changeInFile (wikiPath + '/Manual.md') // console.log (typeof countries) // console.log (countries) let exchangesByCountries = [] Object.keys (countries).forEach (code => { let country = countries[code] let result = [] Object.keys (exchanges).forEach (id => { let exchange = exchanges[id] let logo = exchange.urls['logo'] let website = Array.isArray (exchange.urls.www) ? exchange.urls.www[0] : exchange.urls.www let doc = Array.isArray (exchange.urls.doc) ? exchange.urls.doc[0] : exchange.urls.doc let version = exchange.version ? exchange.version : '\*' let matches = version.match (/[^0-9]*([0-9].*)/) if (matches) version = matches[1]; let shouldInclude = false if (Array.isArray (exchange.countries)) { if (exchange.countries.indexOf (code) > -1) shouldInclude = true } else { if (code == exchange.countries) shouldInclude = true } if (shouldInclude) { result.push ({ 'country / region': country, 'logo': ' ![' + exchange.id + '](' + logo + ') ', 'id': exchange.id, 'name': '[' + exchange.name + '](' + website + ')', 'ver': version, 'doc': ' [API](' + doc + ') ', }) } }) exchangesByCountries = exchangesByCountries.concat (result) }); exchangesByCountries = exchangesByCountries.sort ((a, b) => { let countryA = a['country / region'].toLowerCase () let countryB = b['country / region'].toLowerCase () let idA = a['id'] let idB = b['id'] if (countryA > countryB) { return 1 } else if (countryA < countryB) { return -1; } else { if (a['id'] > b['id']) return 1; else if (a['id'] < b['id']) return -1; else return 0; } return 0; }) ;(() => { let table = asTable.configure ({ delimiter: ' | ' }) (exchangesByCountries) let lines = table.split ("\n") lines[1] = lines[0].replace (/[^\|]/g, '-') let headerLine = lines[1].split ('|') headerLine[4] = ':' + headerLine[4].slice (1, headerLine[4].length - 1) + ':' headerLine[5] = ':' + headerLine[5].slice (1, headerLine[5].length - 1) + ':' lines[1] = headerLine.join ('|') lines = lines.map (line => '|' + line + '|').join ("\n") let result = "The ccxt library currently supports the following cryptocurrency exchange markets and trading APIs:\n\n" + lines + "\n\n" let filename = wikiPath + '/Exchange-Markets-By-Country.md' fs.truncateSync (filename) fs.writeFileSync (filename, result) // console.log (result) }) (); // console.log (exchangesByCountries) // console.log (asTable.configure ({ delimiter: ' | ' }) (exchangesByCountries)) log.bright ('Exporting exchange ids to'.cyan, 'exchanges.json'.yellow) fs.writeFileSync ('exchanges.json', JSON.stringify ({ ids: Object.keys (exchanges).filter (x => x != 'btce') }, null, 4)) log.bright.green ('Exchanges exported successfully.')
cleanup in export-exchanges.js
export-exchanges.js
cleanup in export-exchanges.js
<ide><path>xport-exchanges.js <ide> let filename = wikiPath + '/Exchange-Markets-By-Country.md' <ide> fs.truncateSync (filename) <ide> fs.writeFileSync (filename, result) <del> // console.log (result) <ide> }) (); <del> <del>// console.log (exchangesByCountries) <del>// console.log (asTable.configure ({ delimiter: ' | ' }) (exchangesByCountries)) <ide> <ide> log.bright ('Exporting exchange ids to'.cyan, 'exchanges.json'.yellow) <ide> fs.writeFileSync ('exchanges.json', JSON.stringify ({ ids: Object.keys (exchanges).filter (x => x != 'btce') }, null, 4))
Java
apache-2.0
6e9af7e511755d5a15dac2a239e96cc9af63a4ea
0
HubSpot/Singularity,HubSpot/Singularity,HubSpot/Singularity,hs-jenkins-bot/Singularity,HubSpot/Singularity,hs-jenkins-bot/Singularity,hs-jenkins-bot/Singularity,hs-jenkins-bot/Singularity,hs-jenkins-bot/Singularity,HubSpot/Singularity
package com.hubspot.singularity.client; import static com.google.common.base.Preconditions.checkNotNull; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Random; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.function.Function; import java.util.function.Predicate; import javax.inject.Provider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.core.type.TypeReference; import com.github.rholder.retry.RetryException; import com.github.rholder.retry.Retryer; import com.github.rholder.retry.RetryerBuilder; import com.github.rholder.retry.StopStrategies; import com.github.rholder.retry.WaitStrategies; import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap.Builder; import com.google.inject.Inject; import com.google.inject.name.Named; import com.hubspot.horizon.HttpClient; import com.hubspot.horizon.HttpRequest; import com.hubspot.horizon.HttpRequest.Method; import com.hubspot.horizon.HttpResponse; import com.hubspot.horizon.RetryStrategy; import com.hubspot.mesos.json.MesosFileChunkObject; import com.hubspot.singularity.ExtendedTaskState; import com.hubspot.singularity.MachineState; import com.hubspot.singularity.OrderDirection; import com.hubspot.singularity.SingularityAction; import com.hubspot.singularity.SingularityAuthorizationScope; import com.hubspot.singularity.SingularityClientCredentials; import com.hubspot.singularity.SingularityClusterUtilization; import com.hubspot.singularity.SingularityCreateResult; import com.hubspot.singularity.SingularityDeleteResult; import com.hubspot.singularity.SingularityDeploy; import com.hubspot.singularity.SingularityDeployHistory; import com.hubspot.singularity.SingularityDeployKey; import com.hubspot.singularity.SingularityDeployUpdate; import com.hubspot.singularity.SingularityDisabledAction; import com.hubspot.singularity.SingularityDisasterType; import com.hubspot.singularity.SingularityDisastersData; import com.hubspot.singularity.SingularityPaginatedResponse; import com.hubspot.singularity.SingularityPendingRequest; import com.hubspot.singularity.SingularityPendingRequestParent; import com.hubspot.singularity.SingularityPendingTaskId; import com.hubspot.singularity.SingularityPriorityFreezeParent; import com.hubspot.singularity.SingularityRack; import com.hubspot.singularity.SingularityRequest; import com.hubspot.singularity.SingularityRequestBatch; import com.hubspot.singularity.SingularityRequestCleanup; import com.hubspot.singularity.SingularityRequestGroup; import com.hubspot.singularity.SingularityRequestHistory; import com.hubspot.singularity.SingularityRequestParent; import com.hubspot.singularity.SingularityRequestWithState; import com.hubspot.singularity.SingularityS3Log; import com.hubspot.singularity.SingularitySandbox; import com.hubspot.singularity.SingularityShellCommand; import com.hubspot.singularity.SingularitySlave; import com.hubspot.singularity.SingularityState; import com.hubspot.singularity.SingularityTask; import com.hubspot.singularity.SingularityTaskCleanup; import com.hubspot.singularity.SingularityTaskHistory; import com.hubspot.singularity.SingularityTaskHistoryUpdate; import com.hubspot.singularity.SingularityTaskId; import com.hubspot.singularity.SingularityTaskIdHistory; import com.hubspot.singularity.SingularityTaskIdsByStatus; import com.hubspot.singularity.SingularityTaskReconciliationStatistics; import com.hubspot.singularity.SingularityTaskRequest; import com.hubspot.singularity.SingularityTaskShellCommandHistory; import com.hubspot.singularity.SingularityTaskShellCommandRequest; import com.hubspot.singularity.SingularityTaskShellCommandUpdate; import com.hubspot.singularity.SingularityTaskState; import com.hubspot.singularity.SingularityUpdatePendingDeployRequest; import com.hubspot.singularity.SingularityWebhook; import com.hubspot.singularity.api.SingularityBounceRequest; import com.hubspot.singularity.api.SingularityDeleteRequestRequest; import com.hubspot.singularity.api.SingularityDeployRequest; import com.hubspot.singularity.api.SingularityDisabledActionRequest; import com.hubspot.singularity.api.SingularityExitCooldownRequest; import com.hubspot.singularity.api.SingularityKillTaskRequest; import com.hubspot.singularity.api.SingularityMachineChangeRequest; import com.hubspot.singularity.api.SingularityPauseRequest; import com.hubspot.singularity.api.SingularityPriorityFreeze; import com.hubspot.singularity.api.SingularityRunNowRequest; import com.hubspot.singularity.api.SingularityScaleRequest; import com.hubspot.singularity.api.SingularityUnpauseRequest; import com.hubspot.singularity.api.SingularityUpdateGroupsRequest; public class SingularityClient { private static final Logger LOG = LoggerFactory.getLogger(SingularityClient.class); private static final String BASE_API_FORMAT = "%s://%s/%s"; private static final String AUTH_FORMAT = "%s/auth"; private static final String AUTH_CHECK_FORMAT = AUTH_FORMAT + "/%s/auth-check"; private static final String AUTH_CHECK_USER_FORMAT = AUTH_CHECK_FORMAT + "/%s"; private static final String AUTH_GROUPS_CHECK_FORMAT = AUTH_FORMAT + "/groups/auth-check"; private static final String STATE_FORMAT = "%s/state"; private static final String TASK_RECONCILIATION_FORMAT = STATE_FORMAT + "/task-reconciliation"; private static final String USAGE_FORMAT = "%s/usage"; private static final String CLUSTER_UTILIZATION_FORMAT = USAGE_FORMAT + "/cluster/utilization"; private static final String RACKS_FORMAT = "%s/racks"; private static final String RACKS_DECOMISSION_FORMAT = RACKS_FORMAT + "/rack/%s/decommission"; private static final String RACKS_FREEZE_FORMAT = RACKS_FORMAT + "/rack/%s/freeze"; private static final String RACKS_ACTIVATE_FORMAT = RACKS_FORMAT + "/rack/%s/activate"; private static final String RACKS_DELETE_FORMAT = RACKS_FORMAT + "/rack/%s"; private static final String SLAVES_FORMAT = "%s/slaves"; private static final String SLAVE_DETAIL_FORMAT = SLAVES_FORMAT + "/slave/%s/details"; private static final String SLAVES_DECOMISSION_FORMAT = SLAVES_FORMAT + "/slave/%s/decommission"; private static final String SLAVES_FREEZE_FORMAT = SLAVES_FORMAT + "/slave/%s/freeze"; private static final String SLAVES_ACTIVATE_FORMAT = SLAVES_FORMAT + "/slave/%s/activate"; private static final String SLAVES_DELETE_FORMAT = SLAVES_FORMAT + "/slave/%s"; private static final String INACTIVE_SLAVES_FORMAT = "%s/inactive"; private static final String TASKS_FORMAT = "%s/tasks"; private static final String TASKS_KILL_TASK_FORMAT = TASKS_FORMAT + "/task/%s"; private static final String TASKS_GET_ACTIVE_FORMAT = TASKS_FORMAT + "/active"; private static final String TASKS_GET_ACTIVE_ON_SLAVE_FORMAT = TASKS_FORMAT + "/active/slave/%s"; private static final String TASKS_GET_SCHEDULED_FORMAT = TASKS_FORMAT + "/scheduled"; private static final String TASKS_GET_SCHEDULED_IDS_FORMAT = TASKS_GET_SCHEDULED_FORMAT + "/ids"; private static final String TASKS_BY_STATE_FORMAT =TASKS_FORMAT + "/ids/request/%s"; private static final String SHELL_COMMAND_FORMAT = TASKS_FORMAT + "/task/%s/command"; private static final String SHELL_COMMAND_UPDATES_FORMAT = SHELL_COMMAND_FORMAT + "/%s/%s"; private static final String HISTORY_FORMAT = "%s/history"; private static final String TASKS_HISTORY_FORMAT = HISTORY_FORMAT + "/tasks"; private static final String TASKS_HISTORY_WITHMETADATA_FORMAT = HISTORY_FORMAT + "/tasks/withmetadata"; private static final String TASK_HISTORY_FORMAT = HISTORY_FORMAT + "/task/%s"; private static final String REQUEST_HISTORY_FORMAT = HISTORY_FORMAT + "/request/%s/requests"; private static final String TASK_HISTORY_BY_RUN_ID_FORMAT = HISTORY_FORMAT + "/request/%s/run/%s"; private static final String REQUEST_ACTIVE_TASKS_HISTORY_FORMAT = HISTORY_FORMAT + "/request/%s/tasks/active"; private static final String REQUEST_INACTIVE_TASKS_HISTORY_FORMAT = HISTORY_FORMAT + "/request/%s/tasks"; private static final String REQUEST_DEPLOY_HISTORY_FORMAT = HISTORY_FORMAT + "/request/%s/deploy/%s"; private static final String TASK_TRACKER_FORMAT = "%s/track"; private static final String TRACK_BY_TASK_ID_FORMAT = TASK_TRACKER_FORMAT + "/task/%s"; private static final String TRACK_BY_RUN_ID_FORMAT = TASK_TRACKER_FORMAT + "/run/%s/%s"; private static final String REQUESTS_FORMAT = "%s/requests"; private static final String REQUESTS_GET_BATCH_FORMAT = REQUESTS_FORMAT + "/batch"; private static final String REQUESTS_GET_ACTIVE_FORMAT = REQUESTS_FORMAT + "/active"; private static final String REQUESTS_GET_PAUSED_FORMAT = REQUESTS_FORMAT + "/paused"; private static final String REQUESTS_GET_COOLDOWN_FORMAT = REQUESTS_FORMAT + "/cooldown"; private static final String REQUESTS_GET_PENDING_FORMAT = REQUESTS_FORMAT + "/queued/pending"; private static final String REQUESTS_GET_CLEANUP_FORMAT = REQUESTS_FORMAT + "/queued/cleanup"; private static final String REQUEST_GROUPS_FORMAT = "%s/groups"; private static final String REQUEST_GROUP_FORMAT = REQUEST_GROUPS_FORMAT + "/group/%s"; private static final String REQUEST_GET_FORMAT = REQUESTS_FORMAT + "/request/%s"; private static final String REQUEST_GET_SIMPLE_FORMAT = REQUESTS_FORMAT + "/request/%s/simple"; private static final String REQUEST_CREATE_OR_UPDATE_FORMAT = REQUESTS_FORMAT; private static final String REQUEST_BY_RUN_ID_FORMAT = REQUEST_GET_FORMAT + "/run/%s"; private static final String REQUEST_DELETE_ACTIVE_FORMAT = REQUESTS_FORMAT + "/request/%s"; private static final String REQUEST_BOUNCE_FORMAT = REQUESTS_FORMAT + "/request/%s/bounce"; private static final String REQUEST_PAUSE_FORMAT = REQUESTS_FORMAT + "/request/%s/pause"; private static final String REQUEST_UNPAUSE_FORMAT = REQUESTS_FORMAT + "/request/%s/unpause"; private static final String REQUEST_SCALE_FORMAT = REQUESTS_FORMAT + "/request/%s/scale"; private static final String REQUEST_RUN_FORMAT = REQUESTS_FORMAT + "/request/%s/run"; private static final String REQUEST_EXIT_COOLDOWN_FORMAT = REQUESTS_FORMAT + "/request/%s/exit-cooldown"; private static final String REQUEST_GROUPS_UPDATE_FORMAT = REQUESTS_FORMAT + "/request/%s/groups"; private static final String REQUEST_GROUPS_UPDATE_AUTH_CHECK_FORMAT = REQUEST_GROUPS_UPDATE_FORMAT + "/auth-check"; private static final String DEPLOYS_FORMAT = "%s/deploys"; private static final String DELETE_DEPLOY_FORMAT = DEPLOYS_FORMAT + "/deploy/%s/request/%s"; private static final String UPDATE_DEPLOY_FORMAT = DEPLOYS_FORMAT + "/update"; private static final String WEBHOOKS_FORMAT = "%s/webhooks"; private static final String WEBHOOKS_DELETE_FORMAT = WEBHOOKS_FORMAT; private static final String WEBHOOKS_GET_QUEUED_DEPLOY_UPDATES_FORMAT = WEBHOOKS_FORMAT + "/deploy"; private static final String WEBHOOKS_GET_QUEUED_REQUEST_UPDATES_FORMAT = WEBHOOKS_FORMAT + "/request"; private static final String WEBHOOKS_GET_QUEUED_TASK_UPDATES_FORMAT = WEBHOOKS_FORMAT + "/task"; private static final String SANDBOX_FORMAT = "%s/sandbox"; private static final String SANDBOX_BROWSE_FORMAT = SANDBOX_FORMAT + "/%s/browse"; private static final String SANDBOX_READ_FILE_FORMAT = SANDBOX_FORMAT + "/%s/read"; private static final String S3_LOG_FORMAT = "%s/logs"; private static final String S3_LOG_GET_TASK_LOGS = S3_LOG_FORMAT + "/task/%s"; private static final String S3_LOG_GET_REQUEST_LOGS = S3_LOG_FORMAT + "/request/%s"; private static final String S3_LOG_GET_DEPLOY_LOGS = S3_LOG_FORMAT + "/request/%s/deploy/%s"; private static final String DISASTERS_FORMAT = "%s/disasters"; private static final String DISASTER_STATS_FORMAT = DISASTERS_FORMAT + "/stats"; private static final String ACTIVE_DISASTERS_FORMAT = DISASTERS_FORMAT + "/active"; private static final String DISABLE_AUTOMATED_ACTIONS_FORMAT = DISASTERS_FORMAT + "/disable"; private static final String ENABLE_AUTOMATED_ACTIONS_FORMAT = DISASTERS_FORMAT + "/enable"; private static final String DISASTER_FORMAT = DISASTERS_FORMAT + "/active/%s"; private static final String DISABLED_ACTIONS_FORMAT = DISASTERS_FORMAT + "/disabled-actions"; private static final String DISABLED_ACTION_FORMAT = DISASTERS_FORMAT + "/disabled-actions/%s"; private static final String PRIORITY_FORMAT = "%s/priority"; private static final String PRIORITY_FREEZE_FORMAT = PRIORITY_FORMAT + "/freeze"; private static final TypeReference<Collection<SingularityRequestParent>> REQUESTS_COLLECTION = new TypeReference<Collection<SingularityRequestParent>>() {}; private static final TypeReference<Collection<SingularityPendingRequest>> PENDING_REQUESTS_COLLECTION = new TypeReference<Collection<SingularityPendingRequest>>() {}; private static final TypeReference<Collection<SingularityRequestCleanup>> CLEANUP_REQUESTS_COLLECTION = new TypeReference<Collection<SingularityRequestCleanup>>() {}; private static final TypeReference<Collection<SingularityTask>> TASKS_COLLECTION = new TypeReference<Collection<SingularityTask>>() {}; private static final TypeReference<Collection<SingularityTaskIdHistory>> TASKID_HISTORY_COLLECTION = new TypeReference<Collection<SingularityTaskIdHistory>>() {}; private static final TypeReference<Collection<SingularityRack>> RACKS_COLLECTION = new TypeReference<Collection<SingularityRack>>() {}; private static final TypeReference<Collection<SingularitySlave>> SLAVES_COLLECTION = new TypeReference<Collection<SingularitySlave>>() {}; private static final TypeReference<Collection<SingularityWebhook>> WEBHOOKS_COLLECTION = new TypeReference<Collection<SingularityWebhook>>() {}; private static final TypeReference<Collection<SingularityDeployUpdate>> DEPLOY_UPDATES_COLLECTION = new TypeReference<Collection<SingularityDeployUpdate>>() {}; private static final TypeReference<Collection<SingularityRequestHistory>> REQUEST_UPDATES_COLLECTION = new TypeReference<Collection<SingularityRequestHistory>>() {}; private static final TypeReference<Collection<SingularityTaskHistoryUpdate>> TASK_UPDATES_COLLECTION = new TypeReference<Collection<SingularityTaskHistoryUpdate>>() {}; private static final TypeReference<Collection<SingularityTaskRequest>> TASKS_REQUEST_COLLECTION = new TypeReference<Collection<SingularityTaskRequest>>() {}; private static final TypeReference<Collection<SingularityTaskShellCommandHistory>> SHELL_COMMAND_HISTORY = new TypeReference<Collection<SingularityTaskShellCommandHistory>>() {}; private static final TypeReference<Collection<SingularityTaskShellCommandUpdate>> SHELL_COMMAND_UPDATES = new TypeReference<Collection<SingularityTaskShellCommandUpdate>>() {}; private static final TypeReference<Collection<SingularityPendingTaskId>> PENDING_TASK_ID_COLLECTION = new TypeReference<Collection<SingularityPendingTaskId>>() {}; private static final TypeReference<Collection<SingularityS3Log>> S3_LOG_COLLECTION = new TypeReference<Collection<SingularityS3Log>>() {}; private static final TypeReference<Collection<SingularityRequestHistory>> REQUEST_HISTORY_COLLECTION = new TypeReference<Collection<SingularityRequestHistory>>() {}; private static final TypeReference<Collection<SingularityRequestGroup>> REQUEST_GROUP_COLLECTION = new TypeReference<Collection<SingularityRequestGroup>>() {}; private static final TypeReference<Collection<SingularityDisasterType>> DISASTERS_COLLECTION = new TypeReference<Collection<SingularityDisasterType>>() {}; private static final TypeReference<Collection<SingularityDisabledAction>> DISABLED_ACTIONS_COLLECTION = new TypeReference<Collection<SingularityDisabledAction>>() {}; private static final TypeReference<SingularityPaginatedResponse<SingularityTaskIdHistory>> PAGINATED_HISTORY = new TypeReference<SingularityPaginatedResponse<SingularityTaskIdHistory>>() {}; private static final TypeReference<Collection<String>> STRING_COLLECTION = new TypeReference<Collection<String>>() {}; private final Random random; private final Provider<List<String>> hostsProvider; private final String contextPath; private final boolean ssl; private final HttpClient httpClient; private final Optional<SingularityClientCredentials> credentials; private final Retryer<HttpResponse> httpResponseRetryer; @Inject @Deprecated public SingularityClient(@Named(SingularityClientModule.CONTEXT_PATH) String contextPath, @Named(SingularityClientModule.HTTP_CLIENT_NAME) HttpClient httpClient, @Named(SingularityClientModule.HOSTS_PROPERTY_NAME) String hosts) { this(contextPath, httpClient, Arrays.asList(hosts.split(",")), Optional.absent()); } public SingularityClient(String contextPath, HttpClient httpClient, List<String> hosts, Optional<SingularityClientCredentials> credentials) { this(contextPath, httpClient, ProviderUtils.of(ImmutableList.copyOf(hosts)), credentials); } public SingularityClient(String contextPath, HttpClient httpClient, Provider<List<String>> hostsProvider, Optional<SingularityClientCredentials> credentials) { this(contextPath, httpClient, hostsProvider, credentials, false); } public SingularityClient(String contextPath, HttpClient httpClient, List<String> hosts, Optional<SingularityClientCredentials> credentials, boolean ssl) { this(contextPath, httpClient, ProviderUtils.of(ImmutableList.copyOf(hosts)), credentials, ssl); } public SingularityClient(String contextPath, HttpClient httpClient, Provider<List<String>> hostsProvider, Optional<SingularityClientCredentials> credentials, boolean ssl) { this(contextPath, httpClient, hostsProvider, credentials, ssl, 3, HttpResponse::isServerError); } public SingularityClient(String contextPath, HttpClient httpClient, Provider<List<String>> hostsProvider, Optional<SingularityClientCredentials> credentials, boolean ssl, int retryAttempts, Predicate<HttpResponse> retryStrategy) { this.httpClient = httpClient; this.contextPath = contextPath; this.hostsProvider = hostsProvider; this.random = new Random(); this.credentials = credentials; this.ssl = ssl; this.httpResponseRetryer = RetryerBuilder.<HttpResponse>newBuilder() .withStopStrategy(StopStrategies.stopAfterAttempt(retryAttempts)) .withWaitStrategy(WaitStrategies.exponentialWait()) .retryIfResult(retryStrategy::test) .retryIfException() .build(); } private String getApiBase(String host) { return String.format(BASE_API_FORMAT, ssl ? "https" : "http", host, contextPath); } // // HttpClient Methods // private void checkResponse(String type, HttpResponse response) { if (response.isError()) { throw fail(type, response); } } private SingularityClientException fail(String type, HttpResponse response) { String body = ""; try { body = response.getAsString(); } catch (Exception e) { LOG.warn("Unable to read body", e); } String uri = ""; try { uri = response.getRequest().getUrl().toString(); } catch (Exception e) { LOG.warn("Unable to read uri", e); } throw new SingularityClientException(String.format("Failed '%s' action on Singularity (%s) - code: %s, %s", type, uri, response.getStatusCode(), body), response.getStatusCode()); } private <T> Optional<T> getSingle(Function<String, String> hostToUrl, String type, String id, Class<T> clazz) { return getSingleWithParams(hostToUrl, type, id, Optional.absent(), clazz); } private <T> Optional<T> getSingleWithParams(Function<String, String> hostToUrl, String type, String id, Optional<Map<String, Object>> queryParams, Class<T> clazz) { final long start = System.currentTimeMillis(); HttpResponse response = executeGetSingleWithParams(hostToUrl, type, id, queryParams); if (response.getStatusCode() == 404) { return Optional.absent(); } checkResponse(type, response); LOG.info("Got {} {} in {}ms", type, id, System.currentTimeMillis() - start); return Optional.fromNullable(response.getAs(clazz)); } private <T> Optional<T> getSingleWithParams(Function<String, String> hostToUrl, String type, String id, Optional<Map<String, Object>> queryParams, TypeReference<T> typeReference) { final long start = System.currentTimeMillis(); HttpResponse response = executeGetSingleWithParams(hostToUrl, type, id, queryParams); if (response.getStatusCode() == 404) { return Optional.absent(); } checkResponse(type, response); LOG.info("Got {} {} in {}ms", type, id, System.currentTimeMillis() - start); return Optional.fromNullable(response.getAs(typeReference)); } private HttpResponse executeGetSingleWithParams(Function<String, String> hostToUrl, String type, String id, Optional<Map<String, Object>> queryParams) { checkNotNull(id, String.format("Provide a %s id", type)); LOG.info("Getting {} {} from Singularity host", type, id); return executeRequest(hostToUrl, Method.GET, Optional.absent(), queryParams.or(Collections.emptyMap())); } private <T> Collection<T> getCollection(Function<String, String> hostToUrl, String type, TypeReference<Collection<T>> typeReference) { return getCollectionWithParams(hostToUrl, type, Optional.absent(), typeReference); } private <T> Collection<T> getCollectionWithParams(Function<String, String> hostToUrl, String type, Optional<Map<String, Object>> queryParams, TypeReference<Collection<T>> typeReference) { final long start = System.currentTimeMillis(); HttpResponse response = executeRequest(hostToUrl, Method.GET, Optional.absent(), queryParams.or(Collections.emptyMap())); if (response.getStatusCode() == 404) { return ImmutableList.of(); } checkResponse(type, response); LOG.info("Got {} in {}ms", type, System.currentTimeMillis() - start); return response.getAs(typeReference); } private void addQueryParams(HttpRequest.Builder requestBuilder, Map<String, ?> queryParams) { for (Entry<String, ?> queryParamEntry : queryParams.entrySet()) { if (queryParamEntry.getValue() instanceof String) { requestBuilder.setQueryParam(queryParamEntry.getKey()).to((String) queryParamEntry.getValue()); } else if (queryParamEntry.getValue() instanceof Integer) { requestBuilder.setQueryParam(queryParamEntry.getKey()).to((Integer) queryParamEntry.getValue()); } else if (queryParamEntry.getValue() instanceof Long) { requestBuilder.setQueryParam(queryParamEntry.getKey()).to((Long) queryParamEntry.getValue()); } else if (queryParamEntry.getValue() instanceof Boolean) { requestBuilder.setQueryParam(queryParamEntry.getKey()).to((Boolean) queryParamEntry.getValue()); } else if (queryParamEntry.getValue() instanceof Set) { requestBuilder.setQueryParam(queryParamEntry.getKey()).to((Set) queryParamEntry.getValue()); } else { throw new RuntimeException(String.format("The type '%s' of query param %s is not supported. Only String, long, int, Set and boolean values are supported", queryParamEntry.getValue().getClass().getName(), queryParamEntry.getKey())); } } } private void addCredentials(HttpRequest.Builder requestBuilder) { if (credentials.isPresent()) { requestBuilder.addHeader(credentials.get().getHeaderName(), credentials.get().getToken()); } } private void delete(Function<String, String> hostToUrl, String type, String id) { delete(hostToUrl, type, id, Optional.absent()); } private <T> void delete(Function<String, String> hostToUrl, String type, String id, Optional<?> body) { delete(hostToUrl, type, id, body, Optional.<Class<T>>absent()); } private <T> Optional<T> delete(Function<String, String> hostToUrl, String type, String id, Optional<?> body, Optional<Class<T>> clazz) { return deleteWithParams(hostToUrl, type, id, body, Optional.absent(), clazz); } private <T> Optional<T> deleteWithParams(Function<String, String> hostToUrl, String type, String id, Optional<?> body, Optional<Map<String, Object>> queryParams, Optional<Class<T>> clazz) { LOG.info("Deleting {} {} from Singularity", type, id); final long start = System.currentTimeMillis(); HttpResponse response = executeRequest(hostToUrl, Method.DELETE, body, queryParams.or(Collections.emptyMap())); if (response.getStatusCode() == 404) { LOG.info("{} ({}) was not found", type, id); return Optional.absent(); } checkResponse(type, response); LOG.info("Deleted {} ({}) from Singularity in %sms", type, id, System.currentTimeMillis() - start); if (clazz.isPresent()) { return Optional.of(response.getAs(clazz.get())); } return Optional.absent(); } private HttpResponse put(Function<String, String> hostToUri, String type, Optional<?> body) { return executeRequest(hostToUri, type, body, Method.PUT, Optional.absent()); } private <T> Optional<T> post(Function<String, String> hostToUri, String type, Optional<?> body, Optional<Class<T>> clazz) { try { HttpResponse response = executeRequest(hostToUri, type, body, Method.POST, Optional.absent()); if (clazz.isPresent()) { return Optional.of(response.getAs(clazz.get())); } } catch (Exception e) { LOG.warn("Http post failed", e); } return Optional.absent(); } private HttpResponse postWithParams(Function<String, String> hostToUri, String type, Optional<?> body, Optional<Map<String, Object>> queryParams) { return executeRequest(hostToUri, type, body, Method.POST, queryParams); } private HttpResponse post(Function<String, String> hostToUri, String type, Optional<?> body) { return executeRequest(hostToUri, type, body, Method.POST, Optional.absent()); } private HttpResponse post(Function<String, String> hostToUri, String type, Optional<?> body, Map<String, Object> queryParams) { return executeRequest(hostToUri, type, body, Method.POST, Optional.of(queryParams)); } private HttpResponse executeRequest(Function<String, String> hostToUri, String type, Optional<?> body, Method method, Optional<Map<String, Object>> queryParams) { final long start = System.currentTimeMillis(); HttpResponse response = executeRequest(hostToUri, method, body, queryParams.or(Collections.emptyMap())); checkResponse(type, response); LOG.info("Successfully {}ed {} in {}ms", method, type, System.currentTimeMillis() - start); return response; } private HttpResponse executeRequest(Function<String, String> hostToUri, Method method, Optional<?> body, Map<String, ?> queryParams) { HttpRequest.Builder request = HttpRequest.newBuilder().setMethod(method); if (body.isPresent()) { request.setBody(body.get()); } addQueryParams(request, queryParams); addCredentials(request); List<String> hosts = new ArrayList<>(hostsProvider.get()); request .setRetryStrategy(RetryStrategy.NEVER_RETRY) .setMaxRetries(1); try { return httpResponseRetryer.call(() -> { if (hosts.isEmpty()) { // We've tried everything we started with. Look again. hosts.addAll(hostsProvider.get()); } int selection = random.nextInt(hosts.size()); String host = hosts.get(selection); String url = hostToUri.apply(host); hosts.remove(selection); LOG.info("Making {} request to {}", method, url); request.setUrl(url); return httpClient.execute(request.build()); }); } catch (ExecutionException | RetryException exn) { if (exn instanceof RetryException) { RetryException retryExn = (RetryException) exn; if (retryExn.getLastFailedAttempt().hasException()) { LOG.error("Failed request to Singularity", retryExn.getLastFailedAttempt().getExceptionCause()); } else { LOG.error("Failed request to Singularity", exn); } } else { LOG.error("Failed request to Singularity", exn); } throw new SingularityClientException("Failed request to Singularity", exn); } } // // GLOBAL // public SingularityState getState(Optional<Boolean> skipCache, Optional<Boolean> includeRequestIds) { final Function<String, String> uri = (host) -> String.format(STATE_FORMAT, getApiBase(host)); LOG.info("Fetching state from {}", uri); final long start = System.currentTimeMillis(); Map<String, Boolean> queryParams = new HashMap<>(); if (skipCache.isPresent()) { queryParams.put("skipCache", skipCache.get()); } if (includeRequestIds.isPresent()) { queryParams.put("includeRequestIds", includeRequestIds.get()); } HttpResponse response = executeRequest(uri, Method.GET, Optional.absent(), queryParams); checkResponse("state", response); LOG.info("Got state in {}ms", System.currentTimeMillis() - start); return response.getAs(SingularityState.class); } public Optional<SingularityTaskReconciliationStatistics> getTaskReconciliationStatistics() { final Function<String, String> uri = (host) -> String.format(TASK_RECONCILIATION_FORMAT, getApiBase(host)); LOG.info("Fetch task reconciliation statistics from {}", uri); final long start = System.currentTimeMillis(); HttpResponse response = executeRequest(uri, Method.GET, Optional.absent(), Collections.emptyMap()); if (response.getStatusCode() == 404) { return Optional.absent(); } checkResponse("task reconciliation statistics", response); LOG.info("Got task reconciliation statistics in {}ms", System.currentTimeMillis() - start); return Optional.of(response.getAs(SingularityTaskReconciliationStatistics.class)); } public Optional<SingularityClusterUtilization> getClusterUtilization() { final Function<String, String> uri = (host) -> String.format(CLUSTER_UTILIZATION_FORMAT, getApiBase(host)); return getSingle(uri, "clusterUtilization", "", SingularityClusterUtilization.class); } // // ACTIONS ON A SINGLE SINGULARITY REQUEST // public Optional<SingularityRequestParent> getSingularityRequest(String requestId) { final Function<String, String> singularityApiRequestUri = (host) -> String.format(REQUEST_GET_FORMAT, getApiBase(host), requestId); return getSingle(singularityApiRequestUri, "request", requestId, SingularityRequestParent.class); } // Fetch only the request + state, no additional deploy/task data public Optional<SingularityRequestWithState> getSingularityRequestSimple(String requestId) { final Function<String, String> singularityApiRequestUri = (host) -> String.format(REQUEST_GET_SIMPLE_FORMAT, getApiBase(host), requestId); return getSingle(singularityApiRequestUri, "request-simple", requestId, SingularityRequestWithState.class); } public Optional<SingularityTaskId> getTaskByRunIdForRequest(String requestId, String runId) { final Function<String, String> singularityApiRequestUri = (host) -> String.format(REQUEST_BY_RUN_ID_FORMAT, getApiBase(host), requestId, runId); return getSingle(singularityApiRequestUri, "requestByRunId", runId, SingularityTaskId.class); } public void createOrUpdateSingularityRequest(SingularityRequest request) { checkNotNull(request.getId(), "A posted Singularity Request must have an id"); final Function<String, String> requestUri = (host) -> String.format(REQUEST_CREATE_OR_UPDATE_FORMAT, getApiBase(host)); post(requestUri, String.format("request %s", request.getId()), Optional.of(request)); } /** * Delete a singularity request. * If the deletion is successful the singularity request is moved to a DELETING state and is returned. * If the request to be deleted is not found {code Optional.absent()} is returned * If an error occurs during deletion an exception is returned * * @param requestId * the id of the singularity request to delete * @return * the singularity request that has been moved to deleting */ public Optional<SingularityRequest> deleteSingularityRequest(String requestId, Optional<SingularityDeleteRequestRequest> deleteRequest) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_DELETE_ACTIVE_FORMAT, getApiBase(host), requestId); return delete(requestUri, "active request", requestId, deleteRequest, Optional.of(SingularityRequest.class)); } public void pauseSingularityRequest(String requestId, Optional<SingularityPauseRequest> pauseRequest) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_PAUSE_FORMAT, getApiBase(host), requestId); post(requestUri, String.format("pause of request %s", requestId), pauseRequest); } public void unpauseSingularityRequest(String requestId, Optional<SingularityUnpauseRequest> unpauseRequest) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_UNPAUSE_FORMAT, getApiBase(host), requestId); post(requestUri, String.format("unpause of request %s", requestId), unpauseRequest); } public void scaleSingularityRequest(String requestId, SingularityScaleRequest scaleRequest) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_SCALE_FORMAT, getApiBase(host), requestId); put(requestUri, String.format("Scale of Request %s", requestId), Optional.of(scaleRequest)); } public SingularityPendingRequestParent runSingularityRequest(String requestId, Optional<SingularityRunNowRequest> runNowRequest) { return runSingularityRequest(requestId, runNowRequest, false); } /** * * @param requestId * @param runNowRequest * @param minimalReturn - if `true` will return a SingularityPendingRequestParent that is _not_ hydrated with extra task + deploy information * @return */ public SingularityPendingRequestParent runSingularityRequest(String requestId, Optional<SingularityRunNowRequest> runNowRequest, boolean minimalReturn) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_RUN_FORMAT, getApiBase(host), requestId); final HttpResponse response = post(requestUri, String.format("run of request %s", requestId), runNowRequest, ImmutableMap.of("minimal", String.valueOf(minimalReturn))); return response.getAs(SingularityPendingRequestParent.class); } public void bounceSingularityRequest(String requestId, Optional<SingularityBounceRequest> bounceOptions) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_BOUNCE_FORMAT, getApiBase(host), requestId); post(requestUri, String.format("bounce of request %s", requestId), bounceOptions); } public void exitCooldown(String requestId, Optional<SingularityExitCooldownRequest> exitCooldownRequest) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_EXIT_COOLDOWN_FORMAT, getApiBase(host), requestId); post(requestUri, String.format("exit cooldown of request %s", requestId), exitCooldownRequest); } public SingularityPendingRequestParent updateAuthorizedGroups(String requestId, SingularityUpdateGroupsRequest updateGroupsRequest) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_GROUPS_UPDATE_FORMAT, getApiBase(host), requestId); final HttpResponse response = post(requestUri, String.format("update authorized groups of request %s", requestId), Optional.of(updateGroupsRequest)); return response.getAs(SingularityPendingRequestParent.class); } public boolean checkAuthForRequestGroupsUpdate(String requestId, SingularityUpdateGroupsRequest updateGroupsRequest) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_GROUPS_UPDATE_AUTH_CHECK_FORMAT, getApiBase(host), requestId); final HttpResponse response = post(requestUri, String.format("check auth for update authorized groups of request %s", requestId), Optional.of(updateGroupsRequest)); return response.isSuccess(); } // // ACTIONS ON A DEPLOY FOR A SINGULARITY REQUEST // public SingularityRequestParent createDeployForSingularityRequest(String requestId, SingularityDeploy pendingDeploy, Optional<Boolean> deployUnpause, Optional<String> message) { return createDeployForSingularityRequest(requestId, pendingDeploy, deployUnpause, message, Optional.absent()); } public SingularityRequestParent createDeployForSingularityRequest(String requestId, SingularityDeploy pendingDeploy, Optional<Boolean> deployUnpause, Optional<String> message, Optional<SingularityRequest> updatedRequest) { final Function<String, String> requestUri = (String host) -> String.format(DEPLOYS_FORMAT, getApiBase(host)); HttpResponse response = post(requestUri, String.format("new deploy %s", new SingularityDeployKey(requestId, pendingDeploy.getId())), Optional.of(new SingularityDeployRequest(pendingDeploy, deployUnpause, message, updatedRequest))); return getAndLogRequestAndDeployStatus(response.getAs(SingularityRequestParent.class)); } private SingularityRequestParent getAndLogRequestAndDeployStatus(SingularityRequestParent singularityRequestParent) { String activeDeployId = singularityRequestParent.getActiveDeploy().isPresent() ? singularityRequestParent.getActiveDeploy().get().getId() : "No Active Deploy"; String pendingDeployId = singularityRequestParent.getPendingDeploy().isPresent() ? singularityRequestParent.getPendingDeploy().get().getId() : "No Pending deploy"; LOG.info("Deploy status: Singularity request {} -> pending deploy: '{}', active deploy: '{}'", singularityRequestParent.getRequest().getId(), pendingDeployId, activeDeployId); return singularityRequestParent; } public SingularityRequestParent cancelPendingDeployForSingularityRequest(String requestId, String deployId) { final Function<String, String> requestUri = (host) -> String.format(DELETE_DEPLOY_FORMAT, getApiBase(host), deployId, requestId); SingularityRequestParent singularityRequestParent = delete(requestUri, "pending deploy", new SingularityDeployKey(requestId, deployId).getId(), Optional.absent(), Optional.of(SingularityRequestParent.class)).get(); return getAndLogRequestAndDeployStatus(singularityRequestParent); } public SingularityRequestParent updateIncrementalDeployInstanceCount(SingularityUpdatePendingDeployRequest updateRequest) { final Function<String, String> requestUri = (host) -> String.format(UPDATE_DEPLOY_FORMAT, getApiBase(host)); HttpResponse response = post(requestUri, String.format("update deploy %s", new SingularityDeployKey(updateRequest.getRequestId(), updateRequest.getDeployId())), Optional.of(updateRequest)); return getAndLogRequestAndDeployStatus(response.getAs(SingularityRequestParent.class)); } // // REQUESTS // /** * Get all singularity requests that their state is either ACTIVE, PAUSED or COOLDOWN * * For the requests that are pending to become ACTIVE use: * {@link SingularityClient#getPendingSingularityRequests()} * * For the requests that are cleaning up use: * {@link SingularityClient#getCleanupSingularityRequests()} * * * Use {@link SingularityClient#getActiveSingularityRequests()}, {@link SingularityClient#getPausedSingularityRequests()}, * {@link SingularityClient#getCoolDownSingularityRequests()} respectively to get only the ACTIVE, PAUSED or COOLDOWN requests. * * @return * returns all the [ACTIVE, PAUSED, COOLDOWN] {@link SingularityRequestParent} instances. * */ public Collection<SingularityRequestParent> getSingularityRequests() { final Function<String, String> requestUri = (host) -> String.format(REQUESTS_FORMAT, getApiBase(host)); return getCollection(requestUri, "[ACTIVE, PAUSED, COOLDOWN] requests", REQUESTS_COLLECTION); } /** * Get a specific batch of requests * * @return * A SingularityRequestBatch containing the found request data and not found request ids */ public SingularityRequestBatch getRequestsBatch(Set<String> requestIds) { final Function<String, String> requestUri = (host) -> String.format(REQUESTS_GET_BATCH_FORMAT, getApiBase(host)); Map<String, Object> queryParams = new HashMap<>(); queryParams.put("id", requestIds); Optional<SingularityRequestBatch> maybeResult = getSingleWithParams(requestUri, "requests BATCH", "requests BATCH", Optional.of(queryParams), SingularityRequestBatch.class); if (!maybeResult.isPresent()) { throw new SingularityClientException("Singularity url not found", 404); } else { return maybeResult.get(); } } /** * Get all requests that their state is ACTIVE * * @return * All ACTIVE {@link SingularityRequestParent} instances */ public Collection<SingularityRequestParent> getActiveSingularityRequests() { final Function<String, String> requestUri = (host) -> String.format(REQUESTS_GET_ACTIVE_FORMAT, getApiBase(host)); return getCollection(requestUri, "ACTIVE requests", REQUESTS_COLLECTION); } /** * Get all requests that their state is PAUSED * ACTIVE requests are paused by users, which is equivalent to stop their tasks from running without undeploying them * * @return * All PAUSED {@link SingularityRequestParent} instances */ public Collection<SingularityRequestParent> getPausedSingularityRequests() { final Function<String, String> requestUri = (host) -> String.format(REQUESTS_GET_PAUSED_FORMAT, getApiBase(host)); return getCollection(requestUri, "PAUSED requests", REQUESTS_COLLECTION); } /** * Get all requests that has been set to a COOLDOWN state by singularity * * @return * All {@link SingularityRequestParent} instances that their state is COOLDOWN */ public Collection<SingularityRequestParent> getCoolDownSingularityRequests() { final Function<String, String> requestUri = (host) -> String.format(REQUESTS_GET_COOLDOWN_FORMAT, getApiBase(host)); return getCollection(requestUri, "COOLDOWN requests", REQUESTS_COLLECTION); } /** * Get all requests that are pending to become ACTIVE * * @return * A collection of {@link SingularityPendingRequest} instances that hold information about the singularity requests that are pending to become ACTIVE */ public Collection<SingularityPendingRequest> getPendingSingularityRequests() { final Function<String, String> requestUri = (host) -> String.format(REQUESTS_GET_PENDING_FORMAT, getApiBase(host)); return getCollection(requestUri, "pending requests", PENDING_REQUESTS_COLLECTION); } /** * Get all requests that are cleaning up * Requests that are cleaning up are those that have been marked for removal and their tasks are being stopped/removed * before they are being removed. So after their have been cleaned up, these request cease to exist in Singularity. * * @return * A collection of {@link SingularityRequestCleanup} instances that hold information about all singularity requests * that are marked for deletion and are currently cleaning up. */ public Collection<SingularityRequestCleanup> getCleanupSingularityRequests() { final Function<String, String> requestUri = (host) -> String.format(REQUESTS_GET_CLEANUP_FORMAT, getApiBase(host)); return getCollection(requestUri, "cleaning requests", CLEANUP_REQUESTS_COLLECTION); } // // SINGULARITY TASK COLLECTIONS // // // ACTIVE TASKS // public Collection<SingularityTask> getActiveTasks() { final Function<String, String> requestUri = (host) -> String.format(TASKS_GET_ACTIVE_FORMAT, getApiBase(host)); return getCollection(requestUri, "active tasks", TASKS_COLLECTION); } public Collection<SingularityTask> getActiveTasksOnSlave(final String slaveId) { final Function<String, String> requestUri = (host) -> String.format(TASKS_GET_ACTIVE_ON_SLAVE_FORMAT, getApiBase(host), slaveId); return getCollection(requestUri, String.format("active tasks on slave %s", slaveId), TASKS_COLLECTION); } public Optional<SingularityTaskCleanup> killTask(String taskId, Optional<SingularityKillTaskRequest> killTaskRequest) { final Function<String, String> requestUri = (host) -> String.format(TASKS_KILL_TASK_FORMAT, getApiBase(host), taskId); return delete(requestUri, "task", taskId, killTaskRequest, Optional.of(SingularityTaskCleanup.class)); } // // SCHEDULED TASKS // public Collection<SingularityTaskRequest> getScheduledTasks() { final Function<String, String> requestUri = (host) -> String.format(TASKS_GET_SCHEDULED_FORMAT, getApiBase(host)); return getCollection(requestUri, "scheduled tasks", TASKS_REQUEST_COLLECTION); } public Collection<SingularityPendingTaskId> getScheduledTaskIds() { final Function<String, String> requestUri = (host) -> String.format(TASKS_GET_SCHEDULED_IDS_FORMAT, getApiBase(host)); return getCollection(requestUri, "scheduled task ids", PENDING_TASK_ID_COLLECTION); } public Optional<SingularityTaskIdsByStatus> getTaskIdsByStatusForRequest(String requestId) { final Function<String, String> requestUri = (host) -> String.format(TASKS_BY_STATE_FORMAT, getApiBase(host), requestId); return getSingle(requestUri, "task ids by state", requestId, SingularityTaskIdsByStatus.class); } public SingularityTaskShellCommandRequest startShellCommand(String taskId, SingularityShellCommand shellCommand) { final Function<String, String> requestUri = (host) -> String.format(SHELL_COMMAND_FORMAT, getApiBase(host), taskId); return post(requestUri, "start shell command", Optional.of(shellCommand), Optional.of(SingularityTaskShellCommandRequest.class)).orNull(); } public Collection<SingularityTaskShellCommandHistory> getShellCommandHistory(String taskId) { final Function<String, String> requestUri = (host) -> String.format(SHELL_COMMAND_FORMAT, getApiBase(host), taskId); return getCollection(requestUri, "get shell command history", SHELL_COMMAND_HISTORY); } public Collection<SingularityTaskShellCommandUpdate> getShellCommandUpdates(SingularityTaskShellCommandRequest shellCommandRequest) { final Function<String, String> requestUri = (host) -> String.format(SHELL_COMMAND_UPDATES_FORMAT, getApiBase(host), shellCommandRequest.getTaskId(), shellCommandRequest.getShellCommand().getName(), shellCommandRequest.getTimestamp()); return getCollection(requestUri, "get shell command update history", SHELL_COMMAND_UPDATES); } // // RACKS // private Collection<SingularityRack> getRacks(Optional<MachineState> rackState) { final Function<String, String> requestUri = (host) -> String.format(RACKS_FORMAT, getApiBase(host)); Optional<Map<String, Object>> maybeQueryParams = Optional.absent(); String type = "racks"; if (rackState.isPresent()) { maybeQueryParams = Optional.of(ImmutableMap.of("state", rackState.get().toString())); type = String.format("%s racks", rackState.get().toString()); } return getCollectionWithParams(requestUri, type, maybeQueryParams, RACKS_COLLECTION); } @Deprecated public void decomissionRack(String rackId) { decommissionRack(rackId, Optional.absent()); } public void decommissionRack(String rackId, Optional<SingularityMachineChangeRequest> machineChangeRequest) { final Function<String, String> requestUri = (host) -> String.format(RACKS_DECOMISSION_FORMAT, getApiBase(host), rackId); post(requestUri, String.format("decommission rack %s", rackId), machineChangeRequest.or(Optional.of(SingularityMachineChangeRequest.empty()))); } public void freezeRack(String rackId, Optional<SingularityMachineChangeRequest> machineChangeRequest) { final Function<String, String> requestUri = (host) -> String.format(RACKS_FREEZE_FORMAT, getApiBase(host), rackId); post(requestUri, String.format("freeze rack %s", rackId), machineChangeRequest.or(Optional.of(SingularityMachineChangeRequest.empty()))); } public void activateRack(String rackId, Optional<SingularityMachineChangeRequest> machineChangeRequest) { final Function<String, String> requestUri = (host) -> String.format(RACKS_ACTIVATE_FORMAT, getApiBase(host), rackId); post(requestUri, String.format("activate rack %s", rackId), machineChangeRequest.or(Optional.of(SingularityMachineChangeRequest.empty()))); } public void deleteRack(String rackId) { final Function<String, String> requestUri = (host) -> String.format(RACKS_DELETE_FORMAT, getApiBase(host), rackId); delete(requestUri, "dead rack", rackId); } // // SLAVES // /** * Retrieve the list of all known slaves, optionally filtering by a particular slave state * * @param slaveState * Optionally specify a particular state to filter slaves by * @return * A collection of {@link SingularitySlave} */ public Collection<SingularitySlave> getSlaves(Optional<MachineState> slaveState) { final Function<String, String> requestUri = (host) -> String.format(SLAVES_FORMAT, getApiBase(host)); Optional<Map<String, Object>> maybeQueryParams = Optional.absent(); String type = "slaves"; if (slaveState.isPresent()) { maybeQueryParams = Optional.of(ImmutableMap.of("state", slaveState.get().toString())); type = String.format("%s slaves", slaveState.get().toString()); } return getCollectionWithParams(requestUri, type, maybeQueryParams, SLAVES_COLLECTION); } /** * Retrieve a single slave by ID * * @param slaveId * The slave ID to search for * @return * A {@link SingularitySlave} */ public Optional<SingularitySlave> getSlave(String slaveId) { final Function<String, String> requestUri = (host) -> String.format(SLAVE_DETAIL_FORMAT, getApiBase(host), slaveId); return getSingle(requestUri, "slave", slaveId, SingularitySlave.class); } @Deprecated public void decomissionSlave(String slaveId) { decommissionSlave(slaveId, Optional.absent()); } public void decommissionSlave(String slaveId, Optional<SingularityMachineChangeRequest> machineChangeRequest) { final Function<String, String> requestUri = (host) -> String.format(SLAVES_DECOMISSION_FORMAT, getApiBase(host), slaveId); post(requestUri, String.format("decommission slave %s", slaveId), machineChangeRequest.or(Optional.of(SingularityMachineChangeRequest.empty()))); } public void freezeSlave(String slaveId, Optional<SingularityMachineChangeRequest> machineChangeRequest) { final Function<String, String> requestUri = (host) -> String.format(SLAVES_FREEZE_FORMAT, getApiBase(host), slaveId); post(requestUri, String.format("freeze slave %s", slaveId), machineChangeRequest.or(Optional.of(SingularityMachineChangeRequest.empty()))); } public void activateSlave(String slaveId, Optional<SingularityMachineChangeRequest> machineChangeRequest) { final Function<String, String> requestUri = (host) -> String.format(SLAVES_ACTIVATE_FORMAT, getApiBase(host), slaveId); post(requestUri, String.format("activate slave %s", slaveId), machineChangeRequest.or(Optional.of(SingularityMachineChangeRequest.empty()))); } public void deleteSlave(String slaveId) { final Function<String, String> requestUri = (host) -> String.format(SLAVES_DELETE_FORMAT, getApiBase(host), slaveId); delete(requestUri, "deleting slave", slaveId); } // // REQUEST HISTORY // /** * Retrieve a paged list of updates for a particular {@link SingularityRequest} * * @param requestId * Request ID to look up * @param count * Number of items to return per page * @param page * Which page of items to return * @return * A list of {@link SingularityRequestHistory} */ public Collection<SingularityRequestHistory> getHistoryForRequest(String requestId, Optional<Integer> count, Optional<Integer> page) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_HISTORY_FORMAT, getApiBase(host), requestId); Optional<Map<String, Object>> maybeQueryParams = Optional.absent(); ImmutableMap.Builder<String, Object> queryParamsBuilder = ImmutableMap.builder(); if (count.isPresent() ) { queryParamsBuilder.put("count", count.get()); } if (page.isPresent()) { queryParamsBuilder.put("page", page.get()); } Map<String, Object> queryParams = queryParamsBuilder.build(); if (!queryParams.isEmpty()) { maybeQueryParams = Optional.of(queryParams); } return getCollectionWithParams(requestUri, "request history", maybeQueryParams, REQUEST_HISTORY_COLLECTION); } // // Inactive/Bad Slaves // public Collection<String> getInactiveSlaves() { final Function<String, String> requestUri = (host) -> String.format(INACTIVE_SLAVES_FORMAT, getApiBase(host)); return getCollection(requestUri, "inactiveSlaves", STRING_COLLECTION); } public void markSlaveAsInactive(String host) { final Function<String, String> requestUri = (singularityHost) -> String.format(INACTIVE_SLAVES_FORMAT, getApiBase(singularityHost)); Map<String, Object> params = Collections.singletonMap("host", host); postWithParams(requestUri, "deactivateSlave", Optional.absent(), Optional.of(params)); } public void clearInactiveSlave(String host) { final Function<String, String> requestUri = (singularityHost) -> String.format(INACTIVE_SLAVES_FORMAT, getApiBase(host)); Map<String, Object> params = Collections.singletonMap("host", host); deleteWithParams(requestUri, "clearInactiveSlave", host, Optional.absent(), Optional.of(params), Optional.absent()); } // // TASK HISTORY // /** * Retrieve information about an inactive task by its id * * @param taskId * The task ID to search for * @return * A {@link SingularityTaskIdHistory} object if the task exists */ public Optional<SingularityTaskHistory> getHistoryForTask(String taskId) { final Function<String, String> requestUri = (host) -> String.format(TASK_HISTORY_FORMAT, getApiBase(host), taskId); return getSingle(requestUri, "task history", taskId, SingularityTaskHistory.class); } public Collection<SingularityTaskIdHistory> getActiveTaskHistoryForRequest(String requestId) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_ACTIVE_TASKS_HISTORY_FORMAT, getApiBase(host), requestId); final String type = String.format("active task history for %s", requestId); return getCollection(requestUri, type, TASKID_HISTORY_COLLECTION); } public Collection<SingularityTaskIdHistory> getInactiveTaskHistoryForRequest(String requestId) { return getInactiveTaskHistoryForRequest(requestId, 100, 1); } public Collection<SingularityTaskIdHistory> getInactiveTaskHistoryForRequest(String requestId, String deployId) { return getInactiveTaskHistoryForRequest(requestId, 100, 1, Optional.absent(), Optional.of(deployId), Optional.absent(), Optional.absent(), Optional.absent(), Optional.absent(), Optional.absent(), Optional.absent(), Optional.absent()); } public Collection<SingularityTaskIdHistory> getInactiveTaskHistoryForRequest(String requestId, int count, int page) { return getInactiveTaskHistoryForRequest(requestId, count, page, Optional.absent(), Optional.absent(), Optional.absent(), Optional.absent(), Optional.absent(), Optional.absent(), Optional.absent(), Optional.absent()); } public Collection<SingularityTaskIdHistory> getInactiveTaskHistoryForRequest(String requestId, int count, int page, Optional<String> host, Optional<String> runId, Optional<ExtendedTaskState> lastTaskStatus, Optional<Long> startedBefore, Optional<Long> startedAfter, Optional<Long> updatedBefore, Optional<Long> updatedAfter, Optional<OrderDirection> orderDirection) { final Function<String, String> requestUri = (singularityHost) -> String.format(REQUEST_INACTIVE_TASKS_HISTORY_FORMAT, getApiBase(singularityHost), requestId); final String type = String.format("inactive (failed, killed, lost) task history for request %s", requestId); Map<String, Object> params = taskSearchParams(Optional.of(requestId), Optional.absent(), runId, host, lastTaskStatus, startedBefore, startedAfter, updatedBefore, updatedAfter, orderDirection, count, page); return getCollectionWithParams(requestUri, type, Optional.of(params), TASKID_HISTORY_COLLECTION); } public Collection<SingularityTaskIdHistory> getInactiveTaskHistoryForRequest(String requestId, int count, int page, Optional<String> host, Optional<String> deployId, Optional<String> runId, Optional<ExtendedTaskState> lastTaskStatus, Optional<Long> startedBefore, Optional<Long> startedAfter, Optional<Long> updatedBefore, Optional<Long> updatedAfter, Optional<OrderDirection> orderDirection) { final Function<String, String> requestUri = (singularityHost) -> String.format(REQUEST_INACTIVE_TASKS_HISTORY_FORMAT, getApiBase(singularityHost), requestId); final String type = String.format("inactive (failed, killed, lost) task history for request %s", requestId); Map<String, Object> params = taskSearchParams(Optional.of(requestId), deployId, runId, host, lastTaskStatus, startedBefore, startedAfter, updatedBefore, updatedAfter, orderDirection, count, page); return getCollectionWithParams(requestUri, type, Optional.of(params), TASKID_HISTORY_COLLECTION); } public Optional<SingularityDeployHistory> getHistoryForRequestDeploy(String requestId, String deployId) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_DEPLOY_HISTORY_FORMAT, getApiBase(host), requestId, deployId); return getSingle(requestUri, "deploy history", new SingularityDeployKey(requestId, deployId).getId(), SingularityDeployHistory.class); } /** * Retrieve the task id and state for an inactive task by its runId * * @param requestId * The request ID to search for * @param runId * The run ID to search for * @return * A {@link SingularityTaskIdHistory} object if the task exists * @deprecated use {@link #getTaskIdHistoryByRunId} */ @Deprecated public Optional<SingularityTaskIdHistory> getHistoryForTask(String requestId, String runId) { return getTaskIdHistoryByRunId(requestId, runId); } public Optional<SingularityTaskIdHistory> getTaskIdHistoryByRunId(String requestId, String runId) { final Function<String, String> requestUri = (host) -> String.format(TASK_HISTORY_BY_RUN_ID_FORMAT, getApiBase(host), requestId, runId); return getSingle(requestUri, "task history", requestId, SingularityTaskIdHistory.class); } public Collection<SingularityTaskIdHistory> getTaskHistory(Optional<String> requestId, Optional<String> deployId, Optional<String> runId, Optional<String> host, Optional<ExtendedTaskState> lastTaskStatus, Optional<Long> startedBefore, Optional<Long> startedAfter, Optional<Long> updatedBefore, Optional<Long> updatedAfter, Optional<OrderDirection> orderDirection, Integer count, Integer page) { final Function<String, String> requestUri = (singularityHost) -> String.format(TASKS_HISTORY_FORMAT, getApiBase(singularityHost)); Map<String, Object> params = taskSearchParams(requestId, deployId, runId, host, lastTaskStatus, startedBefore, startedAfter, updatedBefore, updatedAfter, orderDirection, count, page); return getCollectionWithParams(requestUri, "task id history", Optional.of(params), TASKID_HISTORY_COLLECTION); } public Optional<SingularityPaginatedResponse<SingularityTaskIdHistory>> getTaskHistoryWithMetadata(Optional<String> requestId, Optional<String> deployId, Optional<String> runId, Optional<String> host, Optional<ExtendedTaskState> lastTaskStatus, Optional<Long> startedBefore, Optional<Long> startedAfter, Optional<Long> updatedBefore, Optional<Long> updatedAfter, Optional<OrderDirection> orderDirection, Integer count, Integer page) { final Function<String, String> requestUri = (singularityHost) -> String.format(TASKS_HISTORY_WITHMETADATA_FORMAT, getApiBase(singularityHost)); Map<String, Object> params = taskSearchParams(requestId, deployId, runId, host, lastTaskStatus, startedBefore, startedAfter, updatedBefore, updatedAfter, orderDirection, count, page); return getSingleWithParams(requestUri, "task id history with metadata", "", Optional.of(params), PAGINATED_HISTORY); } private Map<String, Object> taskSearchParams(Optional<String> requestId, Optional<String> deployId, Optional<String> runId, Optional<String> host, Optional<ExtendedTaskState> lastTaskStatus, Optional<Long> startedBefore, Optional<Long> startedAfter, Optional<Long> updatedBefore, Optional<Long> updatedAfter, Optional<OrderDirection> orderDirection, Integer count, Integer page) { Map<String, Object> params = new HashMap<>(); if (requestId.isPresent()) { params.put("requestId", requestId.get()); } if (deployId.isPresent()) { params.put("deployId", deployId.get()); } if (runId.isPresent()) { params.put("runId", runId.get()); } if (host.isPresent()) { params.put("host", host.get()); } if (lastTaskStatus.isPresent()) { params.put("lastTaskStatus", lastTaskStatus.get().toString()); } if (startedBefore.isPresent()) { params.put("startedBefore", startedBefore.get()); } if (startedAfter.isPresent()) { params.put("startedAfter", startedAfter.get()); } if (updatedBefore.isPresent()) { params.put("updatedBefore", updatedBefore.get()); } if (updatedAfter.isPresent()) { params.put("updatedAfter", updatedAfter.get()); } if (orderDirection.isPresent()) { params.put("orderDirection", orderDirection.get().toString()); } params.put("count", count); params.put("page", page); return params; } // // WEBHOOKS // public Optional<SingularityCreateResult> addWebhook(SingularityWebhook webhook) { final Function<String, String> requestUri = (host) -> String.format(WEBHOOKS_FORMAT, getApiBase(host)); return post(requestUri, String.format("webhook %s", webhook.getUri()), Optional.of(webhook), Optional.of(SingularityCreateResult.class)); } public Optional<SingularityDeleteResult> deleteWebhook(String webhookId) { final Function<String, String> requestUri = (host) -> String.format(WEBHOOKS_DELETE_FORMAT, getApiBase(host)); Builder<String, Object> queryParamBuider = ImmutableMap.<String, Object>builder().put("webhookId", webhookId); return deleteWithParams(requestUri, String.format("webhook with id %s", webhookId), webhookId, Optional.absent(), Optional.of(queryParamBuider.build()), Optional.of(SingularityDeleteResult.class)); } public Collection<SingularityWebhook> getActiveWebhook() { final Function<String, String> requestUri = (host) -> String.format(WEBHOOKS_FORMAT, getApiBase(host)); return getCollection(requestUri, "active webhooks", WEBHOOKS_COLLECTION); } public Collection<SingularityDeployUpdate> getQueuedDeployUpdates(String webhookId) { final Function<String, String> requestUri = (host) -> String.format(WEBHOOKS_GET_QUEUED_DEPLOY_UPDATES_FORMAT, getApiBase(host)); Builder<String, Object> queryParamBuider = ImmutableMap.<String, Object>builder().put("webhookId", webhookId); return getCollectionWithParams(requestUri, "deploy updates", Optional.of(queryParamBuider.build()), DEPLOY_UPDATES_COLLECTION); } public Collection<SingularityRequestHistory> getQueuedRequestUpdates(String webhookId) { final Function<String, String> requestUri = (host) -> String.format(WEBHOOKS_GET_QUEUED_REQUEST_UPDATES_FORMAT, getApiBase(host)); Builder<String, Object> queryParamBuider = ImmutableMap.<String, Object>builder().put("webhookId", webhookId); return getCollectionWithParams(requestUri, "request updates", Optional.of(queryParamBuider.build()), REQUEST_UPDATES_COLLECTION); } public Collection<SingularityTaskHistoryUpdate> getQueuedTaskUpdates(String webhookId) { final Function<String, String> requestUri = (host) -> String.format(WEBHOOKS_GET_QUEUED_TASK_UPDATES_FORMAT, getApiBase(host)); Builder<String, Object> queryParamBuider = ImmutableMap.<String, Object>builder().put("webhookId", webhookId); return getCollectionWithParams(requestUri, "request updates", Optional.of(queryParamBuider.build()), TASK_UPDATES_COLLECTION); } // // SANDBOX // /** * Retrieve information about a specific task's sandbox * * @param taskId * The task ID to browse * @param path * The path to browse from. * if not specified it will browse from the sandbox root. * @return * A {@link SingularitySandbox} object that captures the information for the path to a specific task's Mesos sandbox */ public Optional<SingularitySandbox> browseTaskSandBox(String taskId, String path) { final Function<String, String> requestUrl = (host) -> String.format(SANDBOX_BROWSE_FORMAT, getApiBase(host), taskId); return getSingleWithParams(requestUrl, "browse sandbox for task", taskId, Optional.of(ImmutableMap.of("path", path)), SingularitySandbox.class); } /** * Retrieve part of the contents of a file in a specific task's sandbox. * * @param taskId * The task ID of the sandbox to read from * @param path * The path to the file to be read. Relative to the sandbox root (without a leading slash) * @param grep * Optional string to grep for * @param offset * Byte offset to start reading from * @param length * Maximum number of bytes to read * @return * A {@link MesosFileChunkObject} that contains the requested partial file contents */ public Optional<MesosFileChunkObject> readSandBoxFile(String taskId, String path, Optional<String> grep, Optional<Long> offset, Optional<Long> length) { final Function<String, String> requestUrl = (host) -> String.format(SANDBOX_READ_FILE_FORMAT, getApiBase(host), taskId); Builder<String, Object> queryParamBuider = ImmutableMap.<String, Object>builder().put("path", path); if (grep.isPresent()) { queryParamBuider.put("grep", grep.get()); } if (offset.isPresent()) { queryParamBuider.put("offset", offset.get()); } if (length.isPresent()) { queryParamBuider.put("length", length.get()); } return getSingleWithParams(requestUrl, "Read sandbox file for task", taskId, Optional.of(queryParamBuider.build()), MesosFileChunkObject.class); } // // S3 LOGS // /** * Retrieve the list of logs stored in S3 for a specific task * * @param taskId * The task ID to search for * * @return * A collection of {@link SingularityS3Log} */ public Collection<SingularityS3Log> getTaskLogs(String taskId) { final Function<String, String> requestUri = (host) -> String.format(S3_LOG_GET_TASK_LOGS, getApiBase(host), taskId); final String type = String.format("S3 logs for task %s", taskId); return getCollection(requestUri, type, S3_LOG_COLLECTION); } /** * Retrieve the list of logs stored in S3 for a specific request * * @param requestId * The request ID to search for * * @return * A collection of {@link SingularityS3Log} */ public Collection<SingularityS3Log> getRequestLogs(String requestId) { final Function<String, String> requestUri = (host) -> String.format(S3_LOG_GET_REQUEST_LOGS, getApiBase(host), requestId); final String type = String.format("S3 logs for request %s", requestId); return getCollection(requestUri, type, S3_LOG_COLLECTION); } /** * Retrieve the list of logs stored in S3 for a specific deploy if a singularity request * * @param requestId * The request ID to search for * @param deployId * The deploy ID (within the specified request) to search for * * @return * A collection of {@link SingularityS3Log} */ public Collection<SingularityS3Log> getDeployLogs(String requestId, String deployId) { final Function<String, String> requestUri = (host) -> String.format(S3_LOG_GET_DEPLOY_LOGS, getApiBase(host), requestId, deployId); final String type = String.format("S3 logs for deploy %s of request %s", deployId, requestId); return getCollection(requestUri, type, S3_LOG_COLLECTION); } /** * Retrieve the list of request groups * * @return * A collection of {@link SingularityRequestGroup} */ public Collection<SingularityRequestGroup> getRequestGroups() { final Function<String, String> requestUri = (host) -> String.format(REQUEST_GROUPS_FORMAT, getApiBase(host)); return getCollection(requestUri, "request groups", REQUEST_GROUP_COLLECTION); } /** * Retrieve a specific request group by id * * @param requestGroupId * The request group ID to search for * * @return * A {@link SingularityRequestGroup} */ public Optional<SingularityRequestGroup> getRequestGroup(String requestGroupId) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_GROUP_FORMAT, getApiBase(host), requestGroupId); return getSingle(requestUri, "request group", requestGroupId, SingularityRequestGroup.class); } /** * Update or add a {@link SingularityRequestGroup} * * @param requestGroup * The request group to update or add * * @return * A {@link SingularityRequestGroup} if the update was successful */ public Optional<SingularityRequestGroup> saveRequestGroup(SingularityRequestGroup requestGroup) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_GROUPS_FORMAT, getApiBase(host)); return post(requestUri, "request group", Optional.of(requestGroup), Optional.of(SingularityRequestGroup.class)); } /** * Remove a {@link SingularityRequestGroup} * * @param requestGroupId * The request group ID to search for */ public void deleteRequestGroup(String requestGroupId) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_GROUP_FORMAT, getApiBase(host), requestGroupId); delete(requestUri, "request group", requestGroupId); } // // DISASTERS // public Optional<SingularityDisastersData> getDisasterStats() { final Function<String, String> requestUri = (host) -> String.format(DISASTER_STATS_FORMAT, getApiBase(host)); return getSingle(requestUri, "disaster stats", "", SingularityDisastersData.class); } public Collection<SingularityDisasterType> getActiveDisasters() { final Function<String, String> requestUri = (host) -> String.format(ACTIVE_DISASTERS_FORMAT, getApiBase(host)); return getCollection(requestUri, "active disasters", DISASTERS_COLLECTION); } public void disableAutomatedDisasterCreation() { final Function<String, String> requestUri = (host) -> String.format(DISABLE_AUTOMATED_ACTIONS_FORMAT, getApiBase(host)); post(requestUri, "disable automated disasters", Optional.absent()); } public void enableAutomatedDisasterCreation() { final Function<String, String> requestUri = (host) -> String.format(ENABLE_AUTOMATED_ACTIONS_FORMAT, getApiBase(host)); post(requestUri, "enable automated disasters", Optional.absent()); } public void removeDisaster(SingularityDisasterType disasterType) { final Function<String, String> requestUri = (host) -> String.format(DISASTER_FORMAT, getApiBase(host), disasterType); delete(requestUri, "remove disaster", disasterType.toString()); } public void activateDisaster(SingularityDisasterType disasterType) { final Function<String, String> requestUri = (host) -> String.format(DISASTER_FORMAT, getApiBase(host), disasterType); post(requestUri, "activate disaster", Optional.absent()); } public Collection<SingularityDisabledAction> getDisabledActions() { final Function<String, String> requestUri = (host) -> String.format(DISABLED_ACTIONS_FORMAT, getApiBase(host)); return getCollection(requestUri, "disabled actions", DISABLED_ACTIONS_COLLECTION); } public void disableAction(SingularityAction action, Optional<SingularityDisabledActionRequest> request) { final Function<String, String> requestUri = (host) -> String.format(DISABLED_ACTION_FORMAT, getApiBase(host), action); post(requestUri, "disable action", request); } public void enableAction(SingularityAction action) { final Function<String, String> requestUri = (host) -> String.format(DISABLED_ACTION_FORMAT, getApiBase(host), action); delete(requestUri, "disable action", action.toString()); } // // PRIORITY // public Optional<SingularityPriorityFreezeParent> getActivePriorityFreeze() { final Function<String, String> requestUri = (host) -> String.format(PRIORITY_FREEZE_FORMAT, getApiBase(host)); return getSingle(requestUri, "priority freeze", "", SingularityPriorityFreezeParent.class); } public Optional<SingularityPriorityFreezeParent> createPriorityFreeze(SingularityPriorityFreeze priorityFreezeRequest) { final Function<String, String> requestUri = (host) -> String.format(PRIORITY_FREEZE_FORMAT, getApiBase(host)); return post(requestUri, "priority freeze", Optional.of(priorityFreezeRequest), Optional.of(SingularityPriorityFreezeParent.class)); } public void deletePriorityFreeze() { final Function<String, String> requestUri = (host) -> String.format(PRIORITY_FREEZE_FORMAT, getApiBase(host)); delete(requestUri, "priority freeze", ""); } // // Auth // /** * Check if a user is authorized for the specified scope on the specified request * * @param requestId * The request to check authorization on * @param userId * The user whose authorization will be checked * @param scope * The scope to check that `user` has * * @return * true if the user is authorized for scope, false otherwise */ public boolean isUserAuthorized(String requestId, String userId, SingularityAuthorizationScope scope) { final Function<String, String> requestUri = (host) -> String.format(AUTH_CHECK_USER_FORMAT, getApiBase(host), requestId, userId); Map<String, Object> params = Collections.singletonMap("scope", scope.name()); HttpResponse response = executeGetSingleWithParams(requestUri, "auth check", "", Optional.of(params)); return response.isSuccess(); } /** * Check if the current client's user is authorized for the specified scope on the specified request * * @param requestId * The request to check authorization on * @param userId * The user whose authorization will be checked * @param scope * The scope to check that `user` has * * @return * true if the user is authorized for scope, false otherwise */ public boolean isUserAuthorized(String requestId, SingularityAuthorizationScope scope) { final Function<String, String> requestUri = (host) -> String.format(AUTH_CHECK_FORMAT, getApiBase(host), requestId); Map<String, Object> params = Collections.singletonMap("scope", scope.name()); HttpResponse response = executeGetSingleWithParams(requestUri, "auth check", "", Optional.of(params)); return response.isSuccess(); } // // TASK STATE // /** * Get the current state of a task by its task ID, will only search active/inactive tasks, not pending * * @param taskId * The task ID to search for * * @return * A {@link SingularityTaskState} if the task was found among active or inactive tasks */ public Optional<SingularityTaskState> getTaskState(String taskId) { final Function<String, String> requestUri = (host) -> String.format(TRACK_BY_TASK_ID_FORMAT, getApiBase(host), taskId); return getSingle(requestUri, "track by task id", taskId, SingularityTaskState.class); } /** * Get the current state of a task by its run IDg * * @param requestId * The request ID to search for the specified runId * @param runId * The run ID to search for * * @return * A {@link SingularityTaskState} if the task was found among pending, active or inactive tasks */ public Optional<SingularityTaskState> getTaskState(String requestId, String runId) { final Function<String, String> requestUri = (host) -> String.format(TRACK_BY_RUN_ID_FORMAT, getApiBase(host), requestId, runId); return getSingle(requestUri, "track by task id", String.format("%s-%s", requestId, runId), SingularityTaskState.class); } }
SingularityClient/src/main/java/com/hubspot/singularity/client/SingularityClient.java
package com.hubspot.singularity.client; import static com.google.common.base.Preconditions.checkNotNull; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Random; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.function.Function; import java.util.function.Predicate; import javax.inject.Provider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.core.type.TypeReference; import com.github.rholder.retry.RetryException; import com.github.rholder.retry.Retryer; import com.github.rholder.retry.RetryerBuilder; import com.github.rholder.retry.StopStrategies; import com.github.rholder.retry.WaitStrategies; import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap.Builder; import com.google.inject.Inject; import com.google.inject.name.Named; import com.hubspot.horizon.HttpClient; import com.hubspot.horizon.HttpRequest; import com.hubspot.horizon.HttpRequest.Method; import com.hubspot.horizon.HttpResponse; import com.hubspot.horizon.RetryStrategy; import com.hubspot.mesos.json.MesosFileChunkObject; import com.hubspot.singularity.ExtendedTaskState; import com.hubspot.singularity.MachineState; import com.hubspot.singularity.OrderDirection; import com.hubspot.singularity.SingularityAction; import com.hubspot.singularity.SingularityAuthorizationScope; import com.hubspot.singularity.SingularityClientCredentials; import com.hubspot.singularity.SingularityClusterUtilization; import com.hubspot.singularity.SingularityCreateResult; import com.hubspot.singularity.SingularityDeleteResult; import com.hubspot.singularity.SingularityDeploy; import com.hubspot.singularity.SingularityDeployHistory; import com.hubspot.singularity.SingularityDeployKey; import com.hubspot.singularity.SingularityDeployUpdate; import com.hubspot.singularity.SingularityDisabledAction; import com.hubspot.singularity.SingularityDisasterType; import com.hubspot.singularity.SingularityDisastersData; import com.hubspot.singularity.SingularityPaginatedResponse; import com.hubspot.singularity.SingularityPendingRequest; import com.hubspot.singularity.SingularityPendingRequestParent; import com.hubspot.singularity.SingularityPendingTaskId; import com.hubspot.singularity.SingularityPriorityFreezeParent; import com.hubspot.singularity.SingularityRack; import com.hubspot.singularity.SingularityRequest; import com.hubspot.singularity.SingularityRequestBatch; import com.hubspot.singularity.SingularityRequestCleanup; import com.hubspot.singularity.SingularityRequestGroup; import com.hubspot.singularity.SingularityRequestHistory; import com.hubspot.singularity.SingularityRequestParent; import com.hubspot.singularity.SingularityRequestWithState; import com.hubspot.singularity.SingularityS3Log; import com.hubspot.singularity.SingularitySandbox; import com.hubspot.singularity.SingularityShellCommand; import com.hubspot.singularity.SingularitySlave; import com.hubspot.singularity.SingularityState; import com.hubspot.singularity.SingularityTask; import com.hubspot.singularity.SingularityTaskCleanupResult; import com.hubspot.singularity.SingularityTaskHistory; import com.hubspot.singularity.SingularityTaskHistoryUpdate; import com.hubspot.singularity.SingularityTaskId; import com.hubspot.singularity.SingularityTaskIdHistory; import com.hubspot.singularity.SingularityTaskIdsByStatus; import com.hubspot.singularity.SingularityTaskReconciliationStatistics; import com.hubspot.singularity.SingularityTaskRequest; import com.hubspot.singularity.SingularityTaskShellCommandHistory; import com.hubspot.singularity.SingularityTaskShellCommandRequest; import com.hubspot.singularity.SingularityTaskShellCommandUpdate; import com.hubspot.singularity.SingularityTaskState; import com.hubspot.singularity.SingularityUpdatePendingDeployRequest; import com.hubspot.singularity.SingularityWebhook; import com.hubspot.singularity.api.SingularityBounceRequest; import com.hubspot.singularity.api.SingularityDeleteRequestRequest; import com.hubspot.singularity.api.SingularityDeployRequest; import com.hubspot.singularity.api.SingularityDisabledActionRequest; import com.hubspot.singularity.api.SingularityExitCooldownRequest; import com.hubspot.singularity.api.SingularityKillTaskRequest; import com.hubspot.singularity.api.SingularityMachineChangeRequest; import com.hubspot.singularity.api.SingularityPauseRequest; import com.hubspot.singularity.api.SingularityPriorityFreeze; import com.hubspot.singularity.api.SingularityRunNowRequest; import com.hubspot.singularity.api.SingularityScaleRequest; import com.hubspot.singularity.api.SingularityUnpauseRequest; import com.hubspot.singularity.api.SingularityUpdateGroupsRequest; public class SingularityClient { private static final Logger LOG = LoggerFactory.getLogger(SingularityClient.class); private static final String BASE_API_FORMAT = "%s://%s/%s"; private static final String AUTH_FORMAT = "%s/auth"; private static final String AUTH_CHECK_FORMAT = AUTH_FORMAT + "/%s/auth-check"; private static final String AUTH_CHECK_USER_FORMAT = AUTH_CHECK_FORMAT + "/%s"; private static final String AUTH_GROUPS_CHECK_FORMAT = AUTH_FORMAT + "/groups/auth-check"; private static final String STATE_FORMAT = "%s/state"; private static final String TASK_RECONCILIATION_FORMAT = STATE_FORMAT + "/task-reconciliation"; private static final String USAGE_FORMAT = "%s/usage"; private static final String CLUSTER_UTILIZATION_FORMAT = USAGE_FORMAT + "/cluster/utilization"; private static final String RACKS_FORMAT = "%s/racks"; private static final String RACKS_DECOMISSION_FORMAT = RACKS_FORMAT + "/rack/%s/decommission"; private static final String RACKS_FREEZE_FORMAT = RACKS_FORMAT + "/rack/%s/freeze"; private static final String RACKS_ACTIVATE_FORMAT = RACKS_FORMAT + "/rack/%s/activate"; private static final String RACKS_DELETE_FORMAT = RACKS_FORMAT + "/rack/%s"; private static final String SLAVES_FORMAT = "%s/slaves"; private static final String SLAVE_DETAIL_FORMAT = SLAVES_FORMAT + "/slave/%s/details"; private static final String SLAVES_DECOMISSION_FORMAT = SLAVES_FORMAT + "/slave/%s/decommission"; private static final String SLAVES_FREEZE_FORMAT = SLAVES_FORMAT + "/slave/%s/freeze"; private static final String SLAVES_ACTIVATE_FORMAT = SLAVES_FORMAT + "/slave/%s/activate"; private static final String SLAVES_DELETE_FORMAT = SLAVES_FORMAT + "/slave/%s"; private static final String INACTIVE_SLAVES_FORMAT = "%s/inactive"; private static final String TASKS_FORMAT = "%s/tasks"; private static final String TASKS_KILL_TASK_FORMAT = TASKS_FORMAT + "/task/%s"; private static final String TASKS_GET_ACTIVE_FORMAT = TASKS_FORMAT + "/active"; private static final String TASKS_GET_ACTIVE_ON_SLAVE_FORMAT = TASKS_FORMAT + "/active/slave/%s"; private static final String TASKS_GET_SCHEDULED_FORMAT = TASKS_FORMAT + "/scheduled"; private static final String TASKS_GET_SCHEDULED_IDS_FORMAT = TASKS_GET_SCHEDULED_FORMAT + "/ids"; private static final String TASKS_BY_STATE_FORMAT =TASKS_FORMAT + "/ids/request/%s"; private static final String SHELL_COMMAND_FORMAT = TASKS_FORMAT + "/task/%s/command"; private static final String SHELL_COMMAND_UPDATES_FORMAT = SHELL_COMMAND_FORMAT + "/%s/%s"; private static final String HISTORY_FORMAT = "%s/history"; private static final String TASKS_HISTORY_FORMAT = HISTORY_FORMAT + "/tasks"; private static final String TASKS_HISTORY_WITHMETADATA_FORMAT = HISTORY_FORMAT + "/tasks/withmetadata"; private static final String TASK_HISTORY_FORMAT = HISTORY_FORMAT + "/task/%s"; private static final String REQUEST_HISTORY_FORMAT = HISTORY_FORMAT + "/request/%s/requests"; private static final String TASK_HISTORY_BY_RUN_ID_FORMAT = HISTORY_FORMAT + "/request/%s/run/%s"; private static final String REQUEST_ACTIVE_TASKS_HISTORY_FORMAT = HISTORY_FORMAT + "/request/%s/tasks/active"; private static final String REQUEST_INACTIVE_TASKS_HISTORY_FORMAT = HISTORY_FORMAT + "/request/%s/tasks"; private static final String REQUEST_DEPLOY_HISTORY_FORMAT = HISTORY_FORMAT + "/request/%s/deploy/%s"; private static final String TASK_TRACKER_FORMAT = "%s/track"; private static final String TRACK_BY_TASK_ID_FORMAT = TASK_TRACKER_FORMAT + "/task/%s"; private static final String TRACK_BY_RUN_ID_FORMAT = TASK_TRACKER_FORMAT + "/run/%s/%s"; private static final String REQUESTS_FORMAT = "%s/requests"; private static final String REQUESTS_GET_BATCH_FORMAT = REQUESTS_FORMAT + "/batch"; private static final String REQUESTS_GET_ACTIVE_FORMAT = REQUESTS_FORMAT + "/active"; private static final String REQUESTS_GET_PAUSED_FORMAT = REQUESTS_FORMAT + "/paused"; private static final String REQUESTS_GET_COOLDOWN_FORMAT = REQUESTS_FORMAT + "/cooldown"; private static final String REQUESTS_GET_PENDING_FORMAT = REQUESTS_FORMAT + "/queued/pending"; private static final String REQUESTS_GET_CLEANUP_FORMAT = REQUESTS_FORMAT + "/queued/cleanup"; private static final String REQUEST_GROUPS_FORMAT = "%s/groups"; private static final String REQUEST_GROUP_FORMAT = REQUEST_GROUPS_FORMAT + "/group/%s"; private static final String REQUEST_GET_FORMAT = REQUESTS_FORMAT + "/request/%s"; private static final String REQUEST_GET_SIMPLE_FORMAT = REQUESTS_FORMAT + "/request/%s/simple"; private static final String REQUEST_CREATE_OR_UPDATE_FORMAT = REQUESTS_FORMAT; private static final String REQUEST_BY_RUN_ID_FORMAT = REQUEST_GET_FORMAT + "/run/%s"; private static final String REQUEST_DELETE_ACTIVE_FORMAT = REQUESTS_FORMAT + "/request/%s"; private static final String REQUEST_BOUNCE_FORMAT = REQUESTS_FORMAT + "/request/%s/bounce"; private static final String REQUEST_PAUSE_FORMAT = REQUESTS_FORMAT + "/request/%s/pause"; private static final String REQUEST_UNPAUSE_FORMAT = REQUESTS_FORMAT + "/request/%s/unpause"; private static final String REQUEST_SCALE_FORMAT = REQUESTS_FORMAT + "/request/%s/scale"; private static final String REQUEST_RUN_FORMAT = REQUESTS_FORMAT + "/request/%s/run"; private static final String REQUEST_EXIT_COOLDOWN_FORMAT = REQUESTS_FORMAT + "/request/%s/exit-cooldown"; private static final String REQUEST_GROUPS_UPDATE_FORMAT = REQUESTS_FORMAT + "/request/%s/groups"; private static final String REQUEST_GROUPS_UPDATE_AUTH_CHECK_FORMAT = REQUEST_GROUPS_UPDATE_FORMAT + "/auth-check"; private static final String DEPLOYS_FORMAT = "%s/deploys"; private static final String DELETE_DEPLOY_FORMAT = DEPLOYS_FORMAT + "/deploy/%s/request/%s"; private static final String UPDATE_DEPLOY_FORMAT = DEPLOYS_FORMAT + "/update"; private static final String WEBHOOKS_FORMAT = "%s/webhooks"; private static final String WEBHOOKS_DELETE_FORMAT = WEBHOOKS_FORMAT; private static final String WEBHOOKS_GET_QUEUED_DEPLOY_UPDATES_FORMAT = WEBHOOKS_FORMAT + "/deploy"; private static final String WEBHOOKS_GET_QUEUED_REQUEST_UPDATES_FORMAT = WEBHOOKS_FORMAT + "/request"; private static final String WEBHOOKS_GET_QUEUED_TASK_UPDATES_FORMAT = WEBHOOKS_FORMAT + "/task"; private static final String SANDBOX_FORMAT = "%s/sandbox"; private static final String SANDBOX_BROWSE_FORMAT = SANDBOX_FORMAT + "/%s/browse"; private static final String SANDBOX_READ_FILE_FORMAT = SANDBOX_FORMAT + "/%s/read"; private static final String S3_LOG_FORMAT = "%s/logs"; private static final String S3_LOG_GET_TASK_LOGS = S3_LOG_FORMAT + "/task/%s"; private static final String S3_LOG_GET_REQUEST_LOGS = S3_LOG_FORMAT + "/request/%s"; private static final String S3_LOG_GET_DEPLOY_LOGS = S3_LOG_FORMAT + "/request/%s/deploy/%s"; private static final String DISASTERS_FORMAT = "%s/disasters"; private static final String DISASTER_STATS_FORMAT = DISASTERS_FORMAT + "/stats"; private static final String ACTIVE_DISASTERS_FORMAT = DISASTERS_FORMAT + "/active"; private static final String DISABLE_AUTOMATED_ACTIONS_FORMAT = DISASTERS_FORMAT + "/disable"; private static final String ENABLE_AUTOMATED_ACTIONS_FORMAT = DISASTERS_FORMAT + "/enable"; private static final String DISASTER_FORMAT = DISASTERS_FORMAT + "/active/%s"; private static final String DISABLED_ACTIONS_FORMAT = DISASTERS_FORMAT + "/disabled-actions"; private static final String DISABLED_ACTION_FORMAT = DISASTERS_FORMAT + "/disabled-actions/%s"; private static final String PRIORITY_FORMAT = "%s/priority"; private static final String PRIORITY_FREEZE_FORMAT = PRIORITY_FORMAT + "/freeze"; private static final TypeReference<Collection<SingularityRequestParent>> REQUESTS_COLLECTION = new TypeReference<Collection<SingularityRequestParent>>() {}; private static final TypeReference<Collection<SingularityPendingRequest>> PENDING_REQUESTS_COLLECTION = new TypeReference<Collection<SingularityPendingRequest>>() {}; private static final TypeReference<Collection<SingularityRequestCleanup>> CLEANUP_REQUESTS_COLLECTION = new TypeReference<Collection<SingularityRequestCleanup>>() {}; private static final TypeReference<Collection<SingularityTask>> TASKS_COLLECTION = new TypeReference<Collection<SingularityTask>>() {}; private static final TypeReference<Collection<SingularityTaskIdHistory>> TASKID_HISTORY_COLLECTION = new TypeReference<Collection<SingularityTaskIdHistory>>() {}; private static final TypeReference<Collection<SingularityRack>> RACKS_COLLECTION = new TypeReference<Collection<SingularityRack>>() {}; private static final TypeReference<Collection<SingularitySlave>> SLAVES_COLLECTION = new TypeReference<Collection<SingularitySlave>>() {}; private static final TypeReference<Collection<SingularityWebhook>> WEBHOOKS_COLLECTION = new TypeReference<Collection<SingularityWebhook>>() {}; private static final TypeReference<Collection<SingularityDeployUpdate>> DEPLOY_UPDATES_COLLECTION = new TypeReference<Collection<SingularityDeployUpdate>>() {}; private static final TypeReference<Collection<SingularityRequestHistory>> REQUEST_UPDATES_COLLECTION = new TypeReference<Collection<SingularityRequestHistory>>() {}; private static final TypeReference<Collection<SingularityTaskHistoryUpdate>> TASK_UPDATES_COLLECTION = new TypeReference<Collection<SingularityTaskHistoryUpdate>>() {}; private static final TypeReference<Collection<SingularityTaskRequest>> TASKS_REQUEST_COLLECTION = new TypeReference<Collection<SingularityTaskRequest>>() {}; private static final TypeReference<Collection<SingularityTaskShellCommandHistory>> SHELL_COMMAND_HISTORY = new TypeReference<Collection<SingularityTaskShellCommandHistory>>() {}; private static final TypeReference<Collection<SingularityTaskShellCommandUpdate>> SHELL_COMMAND_UPDATES = new TypeReference<Collection<SingularityTaskShellCommandUpdate>>() {}; private static final TypeReference<Collection<SingularityPendingTaskId>> PENDING_TASK_ID_COLLECTION = new TypeReference<Collection<SingularityPendingTaskId>>() {}; private static final TypeReference<Collection<SingularityS3Log>> S3_LOG_COLLECTION = new TypeReference<Collection<SingularityS3Log>>() {}; private static final TypeReference<Collection<SingularityRequestHistory>> REQUEST_HISTORY_COLLECTION = new TypeReference<Collection<SingularityRequestHistory>>() {}; private static final TypeReference<Collection<SingularityRequestGroup>> REQUEST_GROUP_COLLECTION = new TypeReference<Collection<SingularityRequestGroup>>() {}; private static final TypeReference<Collection<SingularityDisasterType>> DISASTERS_COLLECTION = new TypeReference<Collection<SingularityDisasterType>>() {}; private static final TypeReference<Collection<SingularityDisabledAction>> DISABLED_ACTIONS_COLLECTION = new TypeReference<Collection<SingularityDisabledAction>>() {}; private static final TypeReference<SingularityPaginatedResponse<SingularityTaskIdHistory>> PAGINATED_HISTORY = new TypeReference<SingularityPaginatedResponse<SingularityTaskIdHistory>>() {}; private static final TypeReference<Collection<String>> STRING_COLLECTION = new TypeReference<Collection<String>>() {}; private final Random random; private final Provider<List<String>> hostsProvider; private final String contextPath; private final boolean ssl; private final HttpClient httpClient; private final Optional<SingularityClientCredentials> credentials; private final Retryer<HttpResponse> httpResponseRetryer; @Inject @Deprecated public SingularityClient(@Named(SingularityClientModule.CONTEXT_PATH) String contextPath, @Named(SingularityClientModule.HTTP_CLIENT_NAME) HttpClient httpClient, @Named(SingularityClientModule.HOSTS_PROPERTY_NAME) String hosts) { this(contextPath, httpClient, Arrays.asList(hosts.split(",")), Optional.absent()); } public SingularityClient(String contextPath, HttpClient httpClient, List<String> hosts, Optional<SingularityClientCredentials> credentials) { this(contextPath, httpClient, ProviderUtils.of(ImmutableList.copyOf(hosts)), credentials); } public SingularityClient(String contextPath, HttpClient httpClient, Provider<List<String>> hostsProvider, Optional<SingularityClientCredentials> credentials) { this(contextPath, httpClient, hostsProvider, credentials, false); } public SingularityClient(String contextPath, HttpClient httpClient, List<String> hosts, Optional<SingularityClientCredentials> credentials, boolean ssl) { this(contextPath, httpClient, ProviderUtils.of(ImmutableList.copyOf(hosts)), credentials, ssl); } public SingularityClient(String contextPath, HttpClient httpClient, Provider<List<String>> hostsProvider, Optional<SingularityClientCredentials> credentials, boolean ssl) { this(contextPath, httpClient, hostsProvider, credentials, ssl, 3, HttpResponse::isServerError); } public SingularityClient(String contextPath, HttpClient httpClient, Provider<List<String>> hostsProvider, Optional<SingularityClientCredentials> credentials, boolean ssl, int retryAttempts, Predicate<HttpResponse> retryStrategy) { this.httpClient = httpClient; this.contextPath = contextPath; this.hostsProvider = hostsProvider; this.random = new Random(); this.credentials = credentials; this.ssl = ssl; this.httpResponseRetryer = RetryerBuilder.<HttpResponse>newBuilder() .withStopStrategy(StopStrategies.stopAfterAttempt(retryAttempts)) .withWaitStrategy(WaitStrategies.exponentialWait()) .retryIfResult(retryStrategy::test) .retryIfException() .build(); } private String getApiBase(String host) { return String.format(BASE_API_FORMAT, ssl ? "https" : "http", host, contextPath); } // // HttpClient Methods // private void checkResponse(String type, HttpResponse response) { if (response.isError()) { throw fail(type, response); } } private SingularityClientException fail(String type, HttpResponse response) { String body = ""; try { body = response.getAsString(); } catch (Exception e) { LOG.warn("Unable to read body", e); } String uri = ""; try { uri = response.getRequest().getUrl().toString(); } catch (Exception e) { LOG.warn("Unable to read uri", e); } throw new SingularityClientException(String.format("Failed '%s' action on Singularity (%s) - code: %s, %s", type, uri, response.getStatusCode(), body), response.getStatusCode()); } private <T> Optional<T> getSingle(Function<String, String> hostToUrl, String type, String id, Class<T> clazz) { return getSingleWithParams(hostToUrl, type, id, Optional.absent(), clazz); } private <T> Optional<T> getSingleWithParams(Function<String, String> hostToUrl, String type, String id, Optional<Map<String, Object>> queryParams, Class<T> clazz) { final long start = System.currentTimeMillis(); HttpResponse response = executeGetSingleWithParams(hostToUrl, type, id, queryParams); if (response.getStatusCode() == 404) { return Optional.absent(); } checkResponse(type, response); LOG.info("Got {} {} in {}ms", type, id, System.currentTimeMillis() - start); return Optional.fromNullable(response.getAs(clazz)); } private <T> Optional<T> getSingleWithParams(Function<String, String> hostToUrl, String type, String id, Optional<Map<String, Object>> queryParams, TypeReference<T> typeReference) { final long start = System.currentTimeMillis(); HttpResponse response = executeGetSingleWithParams(hostToUrl, type, id, queryParams); if (response.getStatusCode() == 404) { return Optional.absent(); } checkResponse(type, response); LOG.info("Got {} {} in {}ms", type, id, System.currentTimeMillis() - start); return Optional.fromNullable(response.getAs(typeReference)); } private HttpResponse executeGetSingleWithParams(Function<String, String> hostToUrl, String type, String id, Optional<Map<String, Object>> queryParams) { checkNotNull(id, String.format("Provide a %s id", type)); LOG.info("Getting {} {} from Singularity host", type, id); return executeRequest(hostToUrl, Method.GET, Optional.absent(), queryParams.or(Collections.emptyMap())); } private <T> Collection<T> getCollection(Function<String, String> hostToUrl, String type, TypeReference<Collection<T>> typeReference) { return getCollectionWithParams(hostToUrl, type, Optional.absent(), typeReference); } private <T> Collection<T> getCollectionWithParams(Function<String, String> hostToUrl, String type, Optional<Map<String, Object>> queryParams, TypeReference<Collection<T>> typeReference) { final long start = System.currentTimeMillis(); HttpResponse response = executeRequest(hostToUrl, Method.GET, Optional.absent(), queryParams.or(Collections.emptyMap())); if (response.getStatusCode() == 404) { return ImmutableList.of(); } checkResponse(type, response); LOG.info("Got {} in {}ms", type, System.currentTimeMillis() - start); return response.getAs(typeReference); } private void addQueryParams(HttpRequest.Builder requestBuilder, Map<String, ?> queryParams) { for (Entry<String, ?> queryParamEntry : queryParams.entrySet()) { if (queryParamEntry.getValue() instanceof String) { requestBuilder.setQueryParam(queryParamEntry.getKey()).to((String) queryParamEntry.getValue()); } else if (queryParamEntry.getValue() instanceof Integer) { requestBuilder.setQueryParam(queryParamEntry.getKey()).to((Integer) queryParamEntry.getValue()); } else if (queryParamEntry.getValue() instanceof Long) { requestBuilder.setQueryParam(queryParamEntry.getKey()).to((Long) queryParamEntry.getValue()); } else if (queryParamEntry.getValue() instanceof Boolean) { requestBuilder.setQueryParam(queryParamEntry.getKey()).to((Boolean) queryParamEntry.getValue()); } else if (queryParamEntry.getValue() instanceof Set) { requestBuilder.setQueryParam(queryParamEntry.getKey()).to((Set) queryParamEntry.getValue()); } else { throw new RuntimeException(String.format("The type '%s' of query param %s is not supported. Only String, long, int, Set and boolean values are supported", queryParamEntry.getValue().getClass().getName(), queryParamEntry.getKey())); } } } private void addCredentials(HttpRequest.Builder requestBuilder) { if (credentials.isPresent()) { requestBuilder.addHeader(credentials.get().getHeaderName(), credentials.get().getToken()); } } private void delete(Function<String, String> hostToUrl, String type, String id) { delete(hostToUrl, type, id, Optional.absent()); } private <T> void delete(Function<String, String> hostToUrl, String type, String id, Optional<?> body) { delete(hostToUrl, type, id, body, Optional.<Class<T>>absent()); } private <T> Optional<T> delete(Function<String, String> hostToUrl, String type, String id, Optional<?> body, Optional<Class<T>> clazz) { return deleteWithParams(hostToUrl, type, id, body, Optional.absent(), clazz); } private <T> Optional<T> deleteWithParams(Function<String, String> hostToUrl, String type, String id, Optional<?> body, Optional<Map<String, Object>> queryParams, Optional<Class<T>> clazz) { LOG.info("Deleting {} {} from Singularity", type, id); final long start = System.currentTimeMillis(); HttpResponse response = executeRequest(hostToUrl, Method.DELETE, body, queryParams.or(Collections.emptyMap())); if (response.getStatusCode() == 404) { LOG.info("{} ({}) was not found", type, id); return Optional.absent(); } checkResponse(type, response); LOG.info("Deleted {} ({}) from Singularity in %sms", type, id, System.currentTimeMillis() - start); if (clazz.isPresent()) { return Optional.of(response.getAs(clazz.get())); } return Optional.absent(); } private HttpResponse put(Function<String, String> hostToUri, String type, Optional<?> body) { return executeRequest(hostToUri, type, body, Method.PUT, Optional.absent()); } private <T> Optional<T> post(Function<String, String> hostToUri, String type, Optional<?> body, Optional<Class<T>> clazz) { try { HttpResponse response = executeRequest(hostToUri, type, body, Method.POST, Optional.absent()); if (clazz.isPresent()) { return Optional.of(response.getAs(clazz.get())); } } catch (Exception e) { LOG.warn("Http post failed", e); } return Optional.absent(); } private HttpResponse postWithParams(Function<String, String> hostToUri, String type, Optional<?> body, Optional<Map<String, Object>> queryParams) { return executeRequest(hostToUri, type, body, Method.POST, queryParams); } private HttpResponse post(Function<String, String> hostToUri, String type, Optional<?> body) { return executeRequest(hostToUri, type, body, Method.POST, Optional.absent()); } private HttpResponse post(Function<String, String> hostToUri, String type, Optional<?> body, Map<String, Object> queryParams) { return executeRequest(hostToUri, type, body, Method.POST, Optional.of(queryParams)); } private HttpResponse executeRequest(Function<String, String> hostToUri, String type, Optional<?> body, Method method, Optional<Map<String, Object>> queryParams) { final long start = System.currentTimeMillis(); HttpResponse response = executeRequest(hostToUri, method, body, queryParams.or(Collections.emptyMap())); checkResponse(type, response); LOG.info("Successfully {}ed {} in {}ms", method, type, System.currentTimeMillis() - start); return response; } private HttpResponse executeRequest(Function<String, String> hostToUri, Method method, Optional<?> body, Map<String, ?> queryParams) { HttpRequest.Builder request = HttpRequest.newBuilder().setMethod(method); if (body.isPresent()) { request.setBody(body.get()); } addQueryParams(request, queryParams); addCredentials(request); List<String> hosts = new ArrayList<>(hostsProvider.get()); request .setRetryStrategy(RetryStrategy.NEVER_RETRY) .setMaxRetries(1); try { return httpResponseRetryer.call(() -> { if (hosts.isEmpty()) { // We've tried everything we started with. Look again. hosts.addAll(hostsProvider.get()); } int selection = random.nextInt(hosts.size()); String host = hosts.get(selection); String url = hostToUri.apply(host); hosts.remove(selection); LOG.info("Making {} request to {}", method, url); request.setUrl(url); return httpClient.execute(request.build()); }); } catch (ExecutionException | RetryException exn) { if (exn instanceof RetryException) { RetryException retryExn = (RetryException) exn; if (retryExn.getLastFailedAttempt().hasException()) { LOG.error("Failed request to Singularity", retryExn.getLastFailedAttempt().getExceptionCause()); } else { LOG.error("Failed request to Singularity", exn); } } else { LOG.error("Failed request to Singularity", exn); } throw new SingularityClientException("Failed request to Singularity", exn); } } // // GLOBAL // public SingularityState getState(Optional<Boolean> skipCache, Optional<Boolean> includeRequestIds) { final Function<String, String> uri = (host) -> String.format(STATE_FORMAT, getApiBase(host)); LOG.info("Fetching state from {}", uri); final long start = System.currentTimeMillis(); Map<String, Boolean> queryParams = new HashMap<>(); if (skipCache.isPresent()) { queryParams.put("skipCache", skipCache.get()); } if (includeRequestIds.isPresent()) { queryParams.put("includeRequestIds", includeRequestIds.get()); } HttpResponse response = executeRequest(uri, Method.GET, Optional.absent(), queryParams); checkResponse("state", response); LOG.info("Got state in {}ms", System.currentTimeMillis() - start); return response.getAs(SingularityState.class); } public Optional<SingularityTaskReconciliationStatistics> getTaskReconciliationStatistics() { final Function<String, String> uri = (host) -> String.format(TASK_RECONCILIATION_FORMAT, getApiBase(host)); LOG.info("Fetch task reconciliation statistics from {}", uri); final long start = System.currentTimeMillis(); HttpResponse response = executeRequest(uri, Method.GET, Optional.absent(), Collections.emptyMap()); if (response.getStatusCode() == 404) { return Optional.absent(); } checkResponse("task reconciliation statistics", response); LOG.info("Got task reconciliation statistics in {}ms", System.currentTimeMillis() - start); return Optional.of(response.getAs(SingularityTaskReconciliationStatistics.class)); } public Optional<SingularityClusterUtilization> getClusterUtilization() { final Function<String, String> uri = (host) -> String.format(CLUSTER_UTILIZATION_FORMAT, getApiBase(host)); return getSingle(uri, "clusterUtilization", "", SingularityClusterUtilization.class); } // // ACTIONS ON A SINGLE SINGULARITY REQUEST // public Optional<SingularityRequestParent> getSingularityRequest(String requestId) { final Function<String, String> singularityApiRequestUri = (host) -> String.format(REQUEST_GET_FORMAT, getApiBase(host), requestId); return getSingle(singularityApiRequestUri, "request", requestId, SingularityRequestParent.class); } // Fetch only the request + state, no additional deploy/task data public Optional<SingularityRequestWithState> getSingularityRequestSimple(String requestId) { final Function<String, String> singularityApiRequestUri = (host) -> String.format(REQUEST_GET_SIMPLE_FORMAT, getApiBase(host), requestId); return getSingle(singularityApiRequestUri, "request-simple", requestId, SingularityRequestWithState.class); } public Optional<SingularityTaskId> getTaskByRunIdForRequest(String requestId, String runId) { final Function<String, String> singularityApiRequestUri = (host) -> String.format(REQUEST_BY_RUN_ID_FORMAT, getApiBase(host), requestId, runId); return getSingle(singularityApiRequestUri, "requestByRunId", runId, SingularityTaskId.class); } public void createOrUpdateSingularityRequest(SingularityRequest request) { checkNotNull(request.getId(), "A posted Singularity Request must have an id"); final Function<String, String> requestUri = (host) -> String.format(REQUEST_CREATE_OR_UPDATE_FORMAT, getApiBase(host)); post(requestUri, String.format("request %s", request.getId()), Optional.of(request)); } /** * Delete a singularity request. * If the deletion is successful the singularity request is moved to a DELETING state and is returned. * If the request to be deleted is not found {code Optional.absent()} is returned * If an error occurs during deletion an exception is returned * * @param requestId * the id of the singularity request to delete * @return * the singularity request that has been moved to deleting */ public Optional<SingularityRequest> deleteSingularityRequest(String requestId, Optional<SingularityDeleteRequestRequest> deleteRequest) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_DELETE_ACTIVE_FORMAT, getApiBase(host), requestId); return delete(requestUri, "active request", requestId, deleteRequest, Optional.of(SingularityRequest.class)); } public void pauseSingularityRequest(String requestId, Optional<SingularityPauseRequest> pauseRequest) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_PAUSE_FORMAT, getApiBase(host), requestId); post(requestUri, String.format("pause of request %s", requestId), pauseRequest); } public void unpauseSingularityRequest(String requestId, Optional<SingularityUnpauseRequest> unpauseRequest) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_UNPAUSE_FORMAT, getApiBase(host), requestId); post(requestUri, String.format("unpause of request %s", requestId), unpauseRequest); } public void scaleSingularityRequest(String requestId, SingularityScaleRequest scaleRequest) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_SCALE_FORMAT, getApiBase(host), requestId); put(requestUri, String.format("Scale of Request %s", requestId), Optional.of(scaleRequest)); } public SingularityPendingRequestParent runSingularityRequest(String requestId, Optional<SingularityRunNowRequest> runNowRequest) { return runSingularityRequest(requestId, runNowRequest, false); } /** * * @param requestId * @param runNowRequest * @param minimalReturn - if `true` will return a SingularityPendingRequestParent that is _not_ hydrated with extra task + deploy information * @return */ public SingularityPendingRequestParent runSingularityRequest(String requestId, Optional<SingularityRunNowRequest> runNowRequest, boolean minimalReturn) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_RUN_FORMAT, getApiBase(host), requestId); final HttpResponse response = post(requestUri, String.format("run of request %s", requestId), runNowRequest, ImmutableMap.of("minimal", String.valueOf(minimalReturn))); return response.getAs(SingularityPendingRequestParent.class); } public void bounceSingularityRequest(String requestId, Optional<SingularityBounceRequest> bounceOptions) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_BOUNCE_FORMAT, getApiBase(host), requestId); post(requestUri, String.format("bounce of request %s", requestId), bounceOptions); } public void exitCooldown(String requestId, Optional<SingularityExitCooldownRequest> exitCooldownRequest) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_EXIT_COOLDOWN_FORMAT, getApiBase(host), requestId); post(requestUri, String.format("exit cooldown of request %s", requestId), exitCooldownRequest); } public SingularityPendingRequestParent updateAuthorizedGroups(String requestId, SingularityUpdateGroupsRequest updateGroupsRequest) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_GROUPS_UPDATE_FORMAT, getApiBase(host), requestId); final HttpResponse response = post(requestUri, String.format("update authorized groups of request %s", requestId), Optional.of(updateGroupsRequest)); return response.getAs(SingularityPendingRequestParent.class); } public boolean checkAuthForRequestGroupsUpdate(String requestId, SingularityUpdateGroupsRequest updateGroupsRequest) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_GROUPS_UPDATE_AUTH_CHECK_FORMAT, getApiBase(host), requestId); final HttpResponse response = post(requestUri, String.format("check auth for update authorized groups of request %s", requestId), Optional.of(updateGroupsRequest)); return response.isSuccess(); } // // ACTIONS ON A DEPLOY FOR A SINGULARITY REQUEST // public SingularityRequestParent createDeployForSingularityRequest(String requestId, SingularityDeploy pendingDeploy, Optional<Boolean> deployUnpause, Optional<String> message) { return createDeployForSingularityRequest(requestId, pendingDeploy, deployUnpause, message, Optional.absent()); } public SingularityRequestParent createDeployForSingularityRequest(String requestId, SingularityDeploy pendingDeploy, Optional<Boolean> deployUnpause, Optional<String> message, Optional<SingularityRequest> updatedRequest) { final Function<String, String> requestUri = (String host) -> String.format(DEPLOYS_FORMAT, getApiBase(host)); HttpResponse response = post(requestUri, String.format("new deploy %s", new SingularityDeployKey(requestId, pendingDeploy.getId())), Optional.of(new SingularityDeployRequest(pendingDeploy, deployUnpause, message, updatedRequest))); return getAndLogRequestAndDeployStatus(response.getAs(SingularityRequestParent.class)); } private SingularityRequestParent getAndLogRequestAndDeployStatus(SingularityRequestParent singularityRequestParent) { String activeDeployId = singularityRequestParent.getActiveDeploy().isPresent() ? singularityRequestParent.getActiveDeploy().get().getId() : "No Active Deploy"; String pendingDeployId = singularityRequestParent.getPendingDeploy().isPresent() ? singularityRequestParent.getPendingDeploy().get().getId() : "No Pending deploy"; LOG.info("Deploy status: Singularity request {} -> pending deploy: '{}', active deploy: '{}'", singularityRequestParent.getRequest().getId(), pendingDeployId, activeDeployId); return singularityRequestParent; } public SingularityRequestParent cancelPendingDeployForSingularityRequest(String requestId, String deployId) { final Function<String, String> requestUri = (host) -> String.format(DELETE_DEPLOY_FORMAT, getApiBase(host), deployId, requestId); SingularityRequestParent singularityRequestParent = delete(requestUri, "pending deploy", new SingularityDeployKey(requestId, deployId).getId(), Optional.absent(), Optional.of(SingularityRequestParent.class)).get(); return getAndLogRequestAndDeployStatus(singularityRequestParent); } public SingularityRequestParent updateIncrementalDeployInstanceCount(SingularityUpdatePendingDeployRequest updateRequest) { final Function<String, String> requestUri = (host) -> String.format(UPDATE_DEPLOY_FORMAT, getApiBase(host)); HttpResponse response = post(requestUri, String.format("update deploy %s", new SingularityDeployKey(updateRequest.getRequestId(), updateRequest.getDeployId())), Optional.of(updateRequest)); return getAndLogRequestAndDeployStatus(response.getAs(SingularityRequestParent.class)); } // // REQUESTS // /** * Get all singularity requests that their state is either ACTIVE, PAUSED or COOLDOWN * * For the requests that are pending to become ACTIVE use: * {@link SingularityClient#getPendingSingularityRequests()} * * For the requests that are cleaning up use: * {@link SingularityClient#getCleanupSingularityRequests()} * * * Use {@link SingularityClient#getActiveSingularityRequests()}, {@link SingularityClient#getPausedSingularityRequests()}, * {@link SingularityClient#getCoolDownSingularityRequests()} respectively to get only the ACTIVE, PAUSED or COOLDOWN requests. * * @return * returns all the [ACTIVE, PAUSED, COOLDOWN] {@link SingularityRequestParent} instances. * */ public Collection<SingularityRequestParent> getSingularityRequests() { final Function<String, String> requestUri = (host) -> String.format(REQUESTS_FORMAT, getApiBase(host)); return getCollection(requestUri, "[ACTIVE, PAUSED, COOLDOWN] requests", REQUESTS_COLLECTION); } /** * Get a specific batch of requests * * @return * A SingularityRequestBatch containing the found request data and not found request ids */ public SingularityRequestBatch getRequestsBatch(Set<String> requestIds) { final Function<String, String> requestUri = (host) -> String.format(REQUESTS_GET_BATCH_FORMAT, getApiBase(host)); Map<String, Object> queryParams = new HashMap<>(); queryParams.put("id", requestIds); Optional<SingularityRequestBatch> maybeResult = getSingleWithParams(requestUri, "requests BATCH", "requests BATCH", Optional.of(queryParams), SingularityRequestBatch.class); if (!maybeResult.isPresent()) { throw new SingularityClientException("Singularity url not found", 404); } else { return maybeResult.get(); } } /** * Get all requests that their state is ACTIVE * * @return * All ACTIVE {@link SingularityRequestParent} instances */ public Collection<SingularityRequestParent> getActiveSingularityRequests() { final Function<String, String> requestUri = (host) -> String.format(REQUESTS_GET_ACTIVE_FORMAT, getApiBase(host)); return getCollection(requestUri, "ACTIVE requests", REQUESTS_COLLECTION); } /** * Get all requests that their state is PAUSED * ACTIVE requests are paused by users, which is equivalent to stop their tasks from running without undeploying them * * @return * All PAUSED {@link SingularityRequestParent} instances */ public Collection<SingularityRequestParent> getPausedSingularityRequests() { final Function<String, String> requestUri = (host) -> String.format(REQUESTS_GET_PAUSED_FORMAT, getApiBase(host)); return getCollection(requestUri, "PAUSED requests", REQUESTS_COLLECTION); } /** * Get all requests that has been set to a COOLDOWN state by singularity * * @return * All {@link SingularityRequestParent} instances that their state is COOLDOWN */ public Collection<SingularityRequestParent> getCoolDownSingularityRequests() { final Function<String, String> requestUri = (host) -> String.format(REQUESTS_GET_COOLDOWN_FORMAT, getApiBase(host)); return getCollection(requestUri, "COOLDOWN requests", REQUESTS_COLLECTION); } /** * Get all requests that are pending to become ACTIVE * * @return * A collection of {@link SingularityPendingRequest} instances that hold information about the singularity requests that are pending to become ACTIVE */ public Collection<SingularityPendingRequest> getPendingSingularityRequests() { final Function<String, String> requestUri = (host) -> String.format(REQUESTS_GET_PENDING_FORMAT, getApiBase(host)); return getCollection(requestUri, "pending requests", PENDING_REQUESTS_COLLECTION); } /** * Get all requests that are cleaning up * Requests that are cleaning up are those that have been marked for removal and their tasks are being stopped/removed * before they are being removed. So after their have been cleaned up, these request cease to exist in Singularity. * * @return * A collection of {@link SingularityRequestCleanup} instances that hold information about all singularity requests * that are marked for deletion and are currently cleaning up. */ public Collection<SingularityRequestCleanup> getCleanupSingularityRequests() { final Function<String, String> requestUri = (host) -> String.format(REQUESTS_GET_CLEANUP_FORMAT, getApiBase(host)); return getCollection(requestUri, "cleaning requests", CLEANUP_REQUESTS_COLLECTION); } // // SINGULARITY TASK COLLECTIONS // // // ACTIVE TASKS // public Collection<SingularityTask> getActiveTasks() { final Function<String, String> requestUri = (host) -> String.format(TASKS_GET_ACTIVE_FORMAT, getApiBase(host)); return getCollection(requestUri, "active tasks", TASKS_COLLECTION); } public Collection<SingularityTask> getActiveTasksOnSlave(final String slaveId) { final Function<String, String> requestUri = (host) -> String.format(TASKS_GET_ACTIVE_ON_SLAVE_FORMAT, getApiBase(host), slaveId); return getCollection(requestUri, String.format("active tasks on slave %s", slaveId), TASKS_COLLECTION); } public Optional<SingularityTaskCleanupResult> killTask(String taskId, Optional<SingularityKillTaskRequest> killTaskRequest) { final Function<String, String> requestUri = (host) -> String.format(TASKS_KILL_TASK_FORMAT, getApiBase(host), taskId); return delete(requestUri, "task", taskId, killTaskRequest, Optional.of(SingularityTaskCleanupResult.class)); } // // SCHEDULED TASKS // public Collection<SingularityTaskRequest> getScheduledTasks() { final Function<String, String> requestUri = (host) -> String.format(TASKS_GET_SCHEDULED_FORMAT, getApiBase(host)); return getCollection(requestUri, "scheduled tasks", TASKS_REQUEST_COLLECTION); } public Collection<SingularityPendingTaskId> getScheduledTaskIds() { final Function<String, String> requestUri = (host) -> String.format(TASKS_GET_SCHEDULED_IDS_FORMAT, getApiBase(host)); return getCollection(requestUri, "scheduled task ids", PENDING_TASK_ID_COLLECTION); } public Optional<SingularityTaskIdsByStatus> getTaskIdsByStatusForRequest(String requestId) { final Function<String, String> requestUri = (host) -> String.format(TASKS_BY_STATE_FORMAT, getApiBase(host), requestId); return getSingle(requestUri, "task ids by state", requestId, SingularityTaskIdsByStatus.class); } public SingularityTaskShellCommandRequest startShellCommand(String taskId, SingularityShellCommand shellCommand) { final Function<String, String> requestUri = (host) -> String.format(SHELL_COMMAND_FORMAT, getApiBase(host), taskId); return post(requestUri, "start shell command", Optional.of(shellCommand), Optional.of(SingularityTaskShellCommandRequest.class)).orNull(); } public Collection<SingularityTaskShellCommandHistory> getShellCommandHistory(String taskId) { final Function<String, String> requestUri = (host) -> String.format(SHELL_COMMAND_FORMAT, getApiBase(host), taskId); return getCollection(requestUri, "get shell command history", SHELL_COMMAND_HISTORY); } public Collection<SingularityTaskShellCommandUpdate> getShellCommandUpdates(SingularityTaskShellCommandRequest shellCommandRequest) { final Function<String, String> requestUri = (host) -> String.format(SHELL_COMMAND_UPDATES_FORMAT, getApiBase(host), shellCommandRequest.getTaskId(), shellCommandRequest.getShellCommand().getName(), shellCommandRequest.getTimestamp()); return getCollection(requestUri, "get shell command update history", SHELL_COMMAND_UPDATES); } // // RACKS // private Collection<SingularityRack> getRacks(Optional<MachineState> rackState) { final Function<String, String> requestUri = (host) -> String.format(RACKS_FORMAT, getApiBase(host)); Optional<Map<String, Object>> maybeQueryParams = Optional.absent(); String type = "racks"; if (rackState.isPresent()) { maybeQueryParams = Optional.of(ImmutableMap.of("state", rackState.get().toString())); type = String.format("%s racks", rackState.get().toString()); } return getCollectionWithParams(requestUri, type, maybeQueryParams, RACKS_COLLECTION); } @Deprecated public void decomissionRack(String rackId) { decommissionRack(rackId, Optional.absent()); } public void decommissionRack(String rackId, Optional<SingularityMachineChangeRequest> machineChangeRequest) { final Function<String, String> requestUri = (host) -> String.format(RACKS_DECOMISSION_FORMAT, getApiBase(host), rackId); post(requestUri, String.format("decommission rack %s", rackId), machineChangeRequest.or(Optional.of(SingularityMachineChangeRequest.empty()))); } public void freezeRack(String rackId, Optional<SingularityMachineChangeRequest> machineChangeRequest) { final Function<String, String> requestUri = (host) -> String.format(RACKS_FREEZE_FORMAT, getApiBase(host), rackId); post(requestUri, String.format("freeze rack %s", rackId), machineChangeRequest.or(Optional.of(SingularityMachineChangeRequest.empty()))); } public void activateRack(String rackId, Optional<SingularityMachineChangeRequest> machineChangeRequest) { final Function<String, String> requestUri = (host) -> String.format(RACKS_ACTIVATE_FORMAT, getApiBase(host), rackId); post(requestUri, String.format("activate rack %s", rackId), machineChangeRequest.or(Optional.of(SingularityMachineChangeRequest.empty()))); } public void deleteRack(String rackId) { final Function<String, String> requestUri = (host) -> String.format(RACKS_DELETE_FORMAT, getApiBase(host), rackId); delete(requestUri, "dead rack", rackId); } // // SLAVES // /** * Retrieve the list of all known slaves, optionally filtering by a particular slave state * * @param slaveState * Optionally specify a particular state to filter slaves by * @return * A collection of {@link SingularitySlave} */ public Collection<SingularitySlave> getSlaves(Optional<MachineState> slaveState) { final Function<String, String> requestUri = (host) -> String.format(SLAVES_FORMAT, getApiBase(host)); Optional<Map<String, Object>> maybeQueryParams = Optional.absent(); String type = "slaves"; if (slaveState.isPresent()) { maybeQueryParams = Optional.of(ImmutableMap.of("state", slaveState.get().toString())); type = String.format("%s slaves", slaveState.get().toString()); } return getCollectionWithParams(requestUri, type, maybeQueryParams, SLAVES_COLLECTION); } /** * Retrieve a single slave by ID * * @param slaveId * The slave ID to search for * @return * A {@link SingularitySlave} */ public Optional<SingularitySlave> getSlave(String slaveId) { final Function<String, String> requestUri = (host) -> String.format(SLAVE_DETAIL_FORMAT, getApiBase(host), slaveId); return getSingle(requestUri, "slave", slaveId, SingularitySlave.class); } @Deprecated public void decomissionSlave(String slaveId) { decommissionSlave(slaveId, Optional.absent()); } public void decommissionSlave(String slaveId, Optional<SingularityMachineChangeRequest> machineChangeRequest) { final Function<String, String> requestUri = (host) -> String.format(SLAVES_DECOMISSION_FORMAT, getApiBase(host), slaveId); post(requestUri, String.format("decommission slave %s", slaveId), machineChangeRequest.or(Optional.of(SingularityMachineChangeRequest.empty()))); } public void freezeSlave(String slaveId, Optional<SingularityMachineChangeRequest> machineChangeRequest) { final Function<String, String> requestUri = (host) -> String.format(SLAVES_FREEZE_FORMAT, getApiBase(host), slaveId); post(requestUri, String.format("freeze slave %s", slaveId), machineChangeRequest.or(Optional.of(SingularityMachineChangeRequest.empty()))); } public void activateSlave(String slaveId, Optional<SingularityMachineChangeRequest> machineChangeRequest) { final Function<String, String> requestUri = (host) -> String.format(SLAVES_ACTIVATE_FORMAT, getApiBase(host), slaveId); post(requestUri, String.format("activate slave %s", slaveId), machineChangeRequest.or(Optional.of(SingularityMachineChangeRequest.empty()))); } public void deleteSlave(String slaveId) { final Function<String, String> requestUri = (host) -> String.format(SLAVES_DELETE_FORMAT, getApiBase(host), slaveId); delete(requestUri, "deleting slave", slaveId); } // // REQUEST HISTORY // /** * Retrieve a paged list of updates for a particular {@link SingularityRequest} * * @param requestId * Request ID to look up * @param count * Number of items to return per page * @param page * Which page of items to return * @return * A list of {@link SingularityRequestHistory} */ public Collection<SingularityRequestHistory> getHistoryForRequest(String requestId, Optional<Integer> count, Optional<Integer> page) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_HISTORY_FORMAT, getApiBase(host), requestId); Optional<Map<String, Object>> maybeQueryParams = Optional.absent(); ImmutableMap.Builder<String, Object> queryParamsBuilder = ImmutableMap.builder(); if (count.isPresent() ) { queryParamsBuilder.put("count", count.get()); } if (page.isPresent()) { queryParamsBuilder.put("page", page.get()); } Map<String, Object> queryParams = queryParamsBuilder.build(); if (!queryParams.isEmpty()) { maybeQueryParams = Optional.of(queryParams); } return getCollectionWithParams(requestUri, "request history", maybeQueryParams, REQUEST_HISTORY_COLLECTION); } // // Inactive/Bad Slaves // public Collection<String> getInactiveSlaves() { final Function<String, String> requestUri = (host) -> String.format(INACTIVE_SLAVES_FORMAT, getApiBase(host)); return getCollection(requestUri, "inactiveSlaves", STRING_COLLECTION); } public void markSlaveAsInactive(String host) { final Function<String, String> requestUri = (singularityHost) -> String.format(INACTIVE_SLAVES_FORMAT, getApiBase(singularityHost)); Map<String, Object> params = Collections.singletonMap("host", host); postWithParams(requestUri, "deactivateSlave", Optional.absent(), Optional.of(params)); } public void clearInactiveSlave(String host) { final Function<String, String> requestUri = (singularityHost) -> String.format(INACTIVE_SLAVES_FORMAT, getApiBase(host)); Map<String, Object> params = Collections.singletonMap("host", host); deleteWithParams(requestUri, "clearInactiveSlave", host, Optional.absent(), Optional.of(params), Optional.absent()); } // // TASK HISTORY // /** * Retrieve information about an inactive task by its id * * @param taskId * The task ID to search for * @return * A {@link SingularityTaskIdHistory} object if the task exists */ public Optional<SingularityTaskHistory> getHistoryForTask(String taskId) { final Function<String, String> requestUri = (host) -> String.format(TASK_HISTORY_FORMAT, getApiBase(host), taskId); return getSingle(requestUri, "task history", taskId, SingularityTaskHistory.class); } public Collection<SingularityTaskIdHistory> getActiveTaskHistoryForRequest(String requestId) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_ACTIVE_TASKS_HISTORY_FORMAT, getApiBase(host), requestId); final String type = String.format("active task history for %s", requestId); return getCollection(requestUri, type, TASKID_HISTORY_COLLECTION); } public Collection<SingularityTaskIdHistory> getInactiveTaskHistoryForRequest(String requestId) { return getInactiveTaskHistoryForRequest(requestId, 100, 1); } public Collection<SingularityTaskIdHistory> getInactiveTaskHistoryForRequest(String requestId, String deployId) { return getInactiveTaskHistoryForRequest(requestId, 100, 1, Optional.absent(), Optional.of(deployId), Optional.absent(), Optional.absent(), Optional.absent(), Optional.absent(), Optional.absent(), Optional.absent(), Optional.absent()); } public Collection<SingularityTaskIdHistory> getInactiveTaskHistoryForRequest(String requestId, int count, int page) { return getInactiveTaskHistoryForRequest(requestId, count, page, Optional.absent(), Optional.absent(), Optional.absent(), Optional.absent(), Optional.absent(), Optional.absent(), Optional.absent(), Optional.absent()); } public Collection<SingularityTaskIdHistory> getInactiveTaskHistoryForRequest(String requestId, int count, int page, Optional<String> host, Optional<String> runId, Optional<ExtendedTaskState> lastTaskStatus, Optional<Long> startedBefore, Optional<Long> startedAfter, Optional<Long> updatedBefore, Optional<Long> updatedAfter, Optional<OrderDirection> orderDirection) { final Function<String, String> requestUri = (singularityHost) -> String.format(REQUEST_INACTIVE_TASKS_HISTORY_FORMAT, getApiBase(singularityHost), requestId); final String type = String.format("inactive (failed, killed, lost) task history for request %s", requestId); Map<String, Object> params = taskSearchParams(Optional.of(requestId), Optional.absent(), runId, host, lastTaskStatus, startedBefore, startedAfter, updatedBefore, updatedAfter, orderDirection, count, page); return getCollectionWithParams(requestUri, type, Optional.of(params), TASKID_HISTORY_COLLECTION); } public Collection<SingularityTaskIdHistory> getInactiveTaskHistoryForRequest(String requestId, int count, int page, Optional<String> host, Optional<String> deployId, Optional<String> runId, Optional<ExtendedTaskState> lastTaskStatus, Optional<Long> startedBefore, Optional<Long> startedAfter, Optional<Long> updatedBefore, Optional<Long> updatedAfter, Optional<OrderDirection> orderDirection) { final Function<String, String> requestUri = (singularityHost) -> String.format(REQUEST_INACTIVE_TASKS_HISTORY_FORMAT, getApiBase(singularityHost), requestId); final String type = String.format("inactive (failed, killed, lost) task history for request %s", requestId); Map<String, Object> params = taskSearchParams(Optional.of(requestId), deployId, runId, host, lastTaskStatus, startedBefore, startedAfter, updatedBefore, updatedAfter, orderDirection, count, page); return getCollectionWithParams(requestUri, type, Optional.of(params), TASKID_HISTORY_COLLECTION); } public Optional<SingularityDeployHistory> getHistoryForRequestDeploy(String requestId, String deployId) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_DEPLOY_HISTORY_FORMAT, getApiBase(host), requestId, deployId); return getSingle(requestUri, "deploy history", new SingularityDeployKey(requestId, deployId).getId(), SingularityDeployHistory.class); } /** * Retrieve the task id and state for an inactive task by its runId * * @param requestId * The request ID to search for * @param runId * The run ID to search for * @return * A {@link SingularityTaskIdHistory} object if the task exists * @deprecated use {@link #getTaskIdHistoryByRunId} */ @Deprecated public Optional<SingularityTaskIdHistory> getHistoryForTask(String requestId, String runId) { return getTaskIdHistoryByRunId(requestId, runId); } public Optional<SingularityTaskIdHistory> getTaskIdHistoryByRunId(String requestId, String runId) { final Function<String, String> requestUri = (host) -> String.format(TASK_HISTORY_BY_RUN_ID_FORMAT, getApiBase(host), requestId, runId); return getSingle(requestUri, "task history", requestId, SingularityTaskIdHistory.class); } public Collection<SingularityTaskIdHistory> getTaskHistory(Optional<String> requestId, Optional<String> deployId, Optional<String> runId, Optional<String> host, Optional<ExtendedTaskState> lastTaskStatus, Optional<Long> startedBefore, Optional<Long> startedAfter, Optional<Long> updatedBefore, Optional<Long> updatedAfter, Optional<OrderDirection> orderDirection, Integer count, Integer page) { final Function<String, String> requestUri = (singularityHost) -> String.format(TASKS_HISTORY_FORMAT, getApiBase(singularityHost)); Map<String, Object> params = taskSearchParams(requestId, deployId, runId, host, lastTaskStatus, startedBefore, startedAfter, updatedBefore, updatedAfter, orderDirection, count, page); return getCollectionWithParams(requestUri, "task id history", Optional.of(params), TASKID_HISTORY_COLLECTION); } public Optional<SingularityPaginatedResponse<SingularityTaskIdHistory>> getTaskHistoryWithMetadata(Optional<String> requestId, Optional<String> deployId, Optional<String> runId, Optional<String> host, Optional<ExtendedTaskState> lastTaskStatus, Optional<Long> startedBefore, Optional<Long> startedAfter, Optional<Long> updatedBefore, Optional<Long> updatedAfter, Optional<OrderDirection> orderDirection, Integer count, Integer page) { final Function<String, String> requestUri = (singularityHost) -> String.format(TASKS_HISTORY_WITHMETADATA_FORMAT, getApiBase(singularityHost)); Map<String, Object> params = taskSearchParams(requestId, deployId, runId, host, lastTaskStatus, startedBefore, startedAfter, updatedBefore, updatedAfter, orderDirection, count, page); return getSingleWithParams(requestUri, "task id history with metadata", "", Optional.of(params), PAGINATED_HISTORY); } private Map<String, Object> taskSearchParams(Optional<String> requestId, Optional<String> deployId, Optional<String> runId, Optional<String> host, Optional<ExtendedTaskState> lastTaskStatus, Optional<Long> startedBefore, Optional<Long> startedAfter, Optional<Long> updatedBefore, Optional<Long> updatedAfter, Optional<OrderDirection> orderDirection, Integer count, Integer page) { Map<String, Object> params = new HashMap<>(); if (requestId.isPresent()) { params.put("requestId", requestId.get()); } if (deployId.isPresent()) { params.put("deployId", deployId.get()); } if (runId.isPresent()) { params.put("runId", runId.get()); } if (host.isPresent()) { params.put("host", host.get()); } if (lastTaskStatus.isPresent()) { params.put("lastTaskStatus", lastTaskStatus.get().toString()); } if (startedBefore.isPresent()) { params.put("startedBefore", startedBefore.get()); } if (startedAfter.isPresent()) { params.put("startedAfter", startedAfter.get()); } if (updatedBefore.isPresent()) { params.put("updatedBefore", updatedBefore.get()); } if (updatedAfter.isPresent()) { params.put("updatedAfter", updatedAfter.get()); } if (orderDirection.isPresent()) { params.put("orderDirection", orderDirection.get().toString()); } params.put("count", count); params.put("page", page); return params; } // // WEBHOOKS // public Optional<SingularityCreateResult> addWebhook(SingularityWebhook webhook) { final Function<String, String> requestUri = (host) -> String.format(WEBHOOKS_FORMAT, getApiBase(host)); return post(requestUri, String.format("webhook %s", webhook.getUri()), Optional.of(webhook), Optional.of(SingularityCreateResult.class)); } public Optional<SingularityDeleteResult> deleteWebhook(String webhookId) { final Function<String, String> requestUri = (host) -> String.format(WEBHOOKS_DELETE_FORMAT, getApiBase(host)); Builder<String, Object> queryParamBuider = ImmutableMap.<String, Object>builder().put("webhookId", webhookId); return deleteWithParams(requestUri, String.format("webhook with id %s", webhookId), webhookId, Optional.absent(), Optional.of(queryParamBuider.build()), Optional.of(SingularityDeleteResult.class)); } public Collection<SingularityWebhook> getActiveWebhook() { final Function<String, String> requestUri = (host) -> String.format(WEBHOOKS_FORMAT, getApiBase(host)); return getCollection(requestUri, "active webhooks", WEBHOOKS_COLLECTION); } public Collection<SingularityDeployUpdate> getQueuedDeployUpdates(String webhookId) { final Function<String, String> requestUri = (host) -> String.format(WEBHOOKS_GET_QUEUED_DEPLOY_UPDATES_FORMAT, getApiBase(host)); Builder<String, Object> queryParamBuider = ImmutableMap.<String, Object>builder().put("webhookId", webhookId); return getCollectionWithParams(requestUri, "deploy updates", Optional.of(queryParamBuider.build()), DEPLOY_UPDATES_COLLECTION); } public Collection<SingularityRequestHistory> getQueuedRequestUpdates(String webhookId) { final Function<String, String> requestUri = (host) -> String.format(WEBHOOKS_GET_QUEUED_REQUEST_UPDATES_FORMAT, getApiBase(host)); Builder<String, Object> queryParamBuider = ImmutableMap.<String, Object>builder().put("webhookId", webhookId); return getCollectionWithParams(requestUri, "request updates", Optional.of(queryParamBuider.build()), REQUEST_UPDATES_COLLECTION); } public Collection<SingularityTaskHistoryUpdate> getQueuedTaskUpdates(String webhookId) { final Function<String, String> requestUri = (host) -> String.format(WEBHOOKS_GET_QUEUED_TASK_UPDATES_FORMAT, getApiBase(host)); Builder<String, Object> queryParamBuider = ImmutableMap.<String, Object>builder().put("webhookId", webhookId); return getCollectionWithParams(requestUri, "request updates", Optional.of(queryParamBuider.build()), TASK_UPDATES_COLLECTION); } // // SANDBOX // /** * Retrieve information about a specific task's sandbox * * @param taskId * The task ID to browse * @param path * The path to browse from. * if not specified it will browse from the sandbox root. * @return * A {@link SingularitySandbox} object that captures the information for the path to a specific task's Mesos sandbox */ public Optional<SingularitySandbox> browseTaskSandBox(String taskId, String path) { final Function<String, String> requestUrl = (host) -> String.format(SANDBOX_BROWSE_FORMAT, getApiBase(host), taskId); return getSingleWithParams(requestUrl, "browse sandbox for task", taskId, Optional.of(ImmutableMap.of("path", path)), SingularitySandbox.class); } /** * Retrieve part of the contents of a file in a specific task's sandbox. * * @param taskId * The task ID of the sandbox to read from * @param path * The path to the file to be read. Relative to the sandbox root (without a leading slash) * @param grep * Optional string to grep for * @param offset * Byte offset to start reading from * @param length * Maximum number of bytes to read * @return * A {@link MesosFileChunkObject} that contains the requested partial file contents */ public Optional<MesosFileChunkObject> readSandBoxFile(String taskId, String path, Optional<String> grep, Optional<Long> offset, Optional<Long> length) { final Function<String, String> requestUrl = (host) -> String.format(SANDBOX_READ_FILE_FORMAT, getApiBase(host), taskId); Builder<String, Object> queryParamBuider = ImmutableMap.<String, Object>builder().put("path", path); if (grep.isPresent()) { queryParamBuider.put("grep", grep.get()); } if (offset.isPresent()) { queryParamBuider.put("offset", offset.get()); } if (length.isPresent()) { queryParamBuider.put("length", length.get()); } return getSingleWithParams(requestUrl, "Read sandbox file for task", taskId, Optional.of(queryParamBuider.build()), MesosFileChunkObject.class); } // // S3 LOGS // /** * Retrieve the list of logs stored in S3 for a specific task * * @param taskId * The task ID to search for * * @return * A collection of {@link SingularityS3Log} */ public Collection<SingularityS3Log> getTaskLogs(String taskId) { final Function<String, String> requestUri = (host) -> String.format(S3_LOG_GET_TASK_LOGS, getApiBase(host), taskId); final String type = String.format("S3 logs for task %s", taskId); return getCollection(requestUri, type, S3_LOG_COLLECTION); } /** * Retrieve the list of logs stored in S3 for a specific request * * @param requestId * The request ID to search for * * @return * A collection of {@link SingularityS3Log} */ public Collection<SingularityS3Log> getRequestLogs(String requestId) { final Function<String, String> requestUri = (host) -> String.format(S3_LOG_GET_REQUEST_LOGS, getApiBase(host), requestId); final String type = String.format("S3 logs for request %s", requestId); return getCollection(requestUri, type, S3_LOG_COLLECTION); } /** * Retrieve the list of logs stored in S3 for a specific deploy if a singularity request * * @param requestId * The request ID to search for * @param deployId * The deploy ID (within the specified request) to search for * * @return * A collection of {@link SingularityS3Log} */ public Collection<SingularityS3Log> getDeployLogs(String requestId, String deployId) { final Function<String, String> requestUri = (host) -> String.format(S3_LOG_GET_DEPLOY_LOGS, getApiBase(host), requestId, deployId); final String type = String.format("S3 logs for deploy %s of request %s", deployId, requestId); return getCollection(requestUri, type, S3_LOG_COLLECTION); } /** * Retrieve the list of request groups * * @return * A collection of {@link SingularityRequestGroup} */ public Collection<SingularityRequestGroup> getRequestGroups() { final Function<String, String> requestUri = (host) -> String.format(REQUEST_GROUPS_FORMAT, getApiBase(host)); return getCollection(requestUri, "request groups", REQUEST_GROUP_COLLECTION); } /** * Retrieve a specific request group by id * * @param requestGroupId * The request group ID to search for * * @return * A {@link SingularityRequestGroup} */ public Optional<SingularityRequestGroup> getRequestGroup(String requestGroupId) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_GROUP_FORMAT, getApiBase(host), requestGroupId); return getSingle(requestUri, "request group", requestGroupId, SingularityRequestGroup.class); } /** * Update or add a {@link SingularityRequestGroup} * * @param requestGroup * The request group to update or add * * @return * A {@link SingularityRequestGroup} if the update was successful */ public Optional<SingularityRequestGroup> saveRequestGroup(SingularityRequestGroup requestGroup) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_GROUPS_FORMAT, getApiBase(host)); return post(requestUri, "request group", Optional.of(requestGroup), Optional.of(SingularityRequestGroup.class)); } /** * Remove a {@link SingularityRequestGroup} * * @param requestGroupId * The request group ID to search for */ public void deleteRequestGroup(String requestGroupId) { final Function<String, String> requestUri = (host) -> String.format(REQUEST_GROUP_FORMAT, getApiBase(host), requestGroupId); delete(requestUri, "request group", requestGroupId); } // // DISASTERS // public Optional<SingularityDisastersData> getDisasterStats() { final Function<String, String> requestUri = (host) -> String.format(DISASTER_STATS_FORMAT, getApiBase(host)); return getSingle(requestUri, "disaster stats", "", SingularityDisastersData.class); } public Collection<SingularityDisasterType> getActiveDisasters() { final Function<String, String> requestUri = (host) -> String.format(ACTIVE_DISASTERS_FORMAT, getApiBase(host)); return getCollection(requestUri, "active disasters", DISASTERS_COLLECTION); } public void disableAutomatedDisasterCreation() { final Function<String, String> requestUri = (host) -> String.format(DISABLE_AUTOMATED_ACTIONS_FORMAT, getApiBase(host)); post(requestUri, "disable automated disasters", Optional.absent()); } public void enableAutomatedDisasterCreation() { final Function<String, String> requestUri = (host) -> String.format(ENABLE_AUTOMATED_ACTIONS_FORMAT, getApiBase(host)); post(requestUri, "enable automated disasters", Optional.absent()); } public void removeDisaster(SingularityDisasterType disasterType) { final Function<String, String> requestUri = (host) -> String.format(DISASTER_FORMAT, getApiBase(host), disasterType); delete(requestUri, "remove disaster", disasterType.toString()); } public void activateDisaster(SingularityDisasterType disasterType) { final Function<String, String> requestUri = (host) -> String.format(DISASTER_FORMAT, getApiBase(host), disasterType); post(requestUri, "activate disaster", Optional.absent()); } public Collection<SingularityDisabledAction> getDisabledActions() { final Function<String, String> requestUri = (host) -> String.format(DISABLED_ACTIONS_FORMAT, getApiBase(host)); return getCollection(requestUri, "disabled actions", DISABLED_ACTIONS_COLLECTION); } public void disableAction(SingularityAction action, Optional<SingularityDisabledActionRequest> request) { final Function<String, String> requestUri = (host) -> String.format(DISABLED_ACTION_FORMAT, getApiBase(host), action); post(requestUri, "disable action", request); } public void enableAction(SingularityAction action) { final Function<String, String> requestUri = (host) -> String.format(DISABLED_ACTION_FORMAT, getApiBase(host), action); delete(requestUri, "disable action", action.toString()); } // // PRIORITY // public Optional<SingularityPriorityFreezeParent> getActivePriorityFreeze() { final Function<String, String> requestUri = (host) -> String.format(PRIORITY_FREEZE_FORMAT, getApiBase(host)); return getSingle(requestUri, "priority freeze", "", SingularityPriorityFreezeParent.class); } public Optional<SingularityPriorityFreezeParent> createPriorityFreeze(SingularityPriorityFreeze priorityFreezeRequest) { final Function<String, String> requestUri = (host) -> String.format(PRIORITY_FREEZE_FORMAT, getApiBase(host)); return post(requestUri, "priority freeze", Optional.of(priorityFreezeRequest), Optional.of(SingularityPriorityFreezeParent.class)); } public void deletePriorityFreeze() { final Function<String, String> requestUri = (host) -> String.format(PRIORITY_FREEZE_FORMAT, getApiBase(host)); delete(requestUri, "priority freeze", ""); } // // Auth // /** * Check if a user is authorized for the specified scope on the specified request * * @param requestId * The request to check authorization on * @param userId * The user whose authorization will be checked * @param scope * The scope to check that `user` has * * @return * true if the user is authorized for scope, false otherwise */ public boolean isUserAuthorized(String requestId, String userId, SingularityAuthorizationScope scope) { final Function<String, String> requestUri = (host) -> String.format(AUTH_CHECK_USER_FORMAT, getApiBase(host), requestId, userId); Map<String, Object> params = Collections.singletonMap("scope", scope.name()); HttpResponse response = executeGetSingleWithParams(requestUri, "auth check", "", Optional.of(params)); return response.isSuccess(); } /** * Check if the current client's user is authorized for the specified scope on the specified request * * @param requestId * The request to check authorization on * @param userId * The user whose authorization will be checked * @param scope * The scope to check that `user` has * * @return * true if the user is authorized for scope, false otherwise */ public boolean isUserAuthorized(String requestId, SingularityAuthorizationScope scope) { final Function<String, String> requestUri = (host) -> String.format(AUTH_CHECK_FORMAT, getApiBase(host), requestId); Map<String, Object> params = Collections.singletonMap("scope", scope.name()); HttpResponse response = executeGetSingleWithParams(requestUri, "auth check", "", Optional.of(params)); return response.isSuccess(); } // // TASK STATE // /** * Get the current state of a task by its task ID, will only search active/inactive tasks, not pending * * @param taskId * The task ID to search for * * @return * A {@link SingularityTaskState} if the task was found among active or inactive tasks */ public Optional<SingularityTaskState> getTaskState(String taskId) { final Function<String, String> requestUri = (host) -> String.format(TRACK_BY_TASK_ID_FORMAT, getApiBase(host), taskId); return getSingle(requestUri, "track by task id", taskId, SingularityTaskState.class); } /** * Get the current state of a task by its run IDg * * @param requestId * The request ID to search for the specified runId * @param runId * The run ID to search for * * @return * A {@link SingularityTaskState} if the task was found among pending, active or inactive tasks */ public Optional<SingularityTaskState> getTaskState(String requestId, String runId) { final Function<String, String> requestUri = (host) -> String.format(TRACK_BY_RUN_ID_FORMAT, getApiBase(host), requestId, runId); return getSingle(requestUri, "track by task id", String.format("%s-%s", requestId, runId), SingularityTaskState.class); } }
Parse the correct class for the result.
SingularityClient/src/main/java/com/hubspot/singularity/client/SingularityClient.java
Parse the correct class for the result.
<ide><path>ingularityClient/src/main/java/com/hubspot/singularity/client/SingularityClient.java <ide> import com.hubspot.singularity.SingularitySlave; <ide> import com.hubspot.singularity.SingularityState; <ide> import com.hubspot.singularity.SingularityTask; <del>import com.hubspot.singularity.SingularityTaskCleanupResult; <add>import com.hubspot.singularity.SingularityTaskCleanup; <ide> import com.hubspot.singularity.SingularityTaskHistory; <ide> import com.hubspot.singularity.SingularityTaskHistoryUpdate; <ide> import com.hubspot.singularity.SingularityTaskId; <ide> return getCollection(requestUri, String.format("active tasks on slave %s", slaveId), TASKS_COLLECTION); <ide> } <ide> <del> public Optional<SingularityTaskCleanupResult> killTask(String taskId, Optional<SingularityKillTaskRequest> killTaskRequest) { <add> public Optional<SingularityTaskCleanup> killTask(String taskId, Optional<SingularityKillTaskRequest> killTaskRequest) { <ide> final Function<String, String> requestUri = (host) -> String.format(TASKS_KILL_TASK_FORMAT, getApiBase(host), taskId); <ide> <del> return delete(requestUri, "task", taskId, killTaskRequest, Optional.of(SingularityTaskCleanupResult.class)); <add> return delete(requestUri, "task", taskId, killTaskRequest, Optional.of(SingularityTaskCleanup.class)); <ide> } <ide> <ide> //
Java
mit
a6c5ef40960edd91105b2f2bd01a99f37fb565fa
0
isi-nlp/tac-kbp-eal,isi-nlp/tac-kbp-eal,BBN-E/tac-kbp-eal,BBN-E/tac-kbp-eal,rgabbard-bbn/kbp-2014-event-arguments,rgabbard-bbn/kbp-2014-event-arguments
package com.bbn.kbp.events2014.bin.QA; import com.bbn.bue.common.symbols.Symbol; import com.bbn.kbp.events2014.AnswerKey; import com.bbn.kbp.events2014.CorefAnnotation; import com.bbn.kbp.events2014.KBPString; import com.bbn.kbp.events2014.Response; import com.bbn.kbp.events2014.TypeRoleFillerRealis; import com.bbn.kbp.events2014.bin.QA.Warnings.CorefWarningRule; import com.bbn.kbp.events2014.bin.QA.Warnings.Warning; import com.bbn.kbp.events2014.bin.QA.Warnings.WarningRule; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSetMultimap; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import com.google.common.collect.Multimaps; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import com.google.common.io.CharSink; import java.io.IOException; import java.util.Map; import java.util.Set; /** * Ugh these were not supposed to develop into objects that actually had real functionality Created * by jdeyoung on 6/30/15. */ public final class CorefDocumentRenderer extends QADocumentRenderer { CorefDocumentRenderer( final Ordering<Response> overallOrdering, final Ordering<TypeRoleFillerRealis> trfrOrdering, final Map<String, String> warningTypeToDescription) { super(overallOrdering, trfrOrdering, warningTypeToDescription); } public static CorefDocumentRenderer createWithDefaultOrdering( final ImmutableList<CorefWarningRule<Integer>> warnings) { final Map<String, String> warningToType = Maps.transformValues(Maps.uniqueIndex(warnings, new Function<WarningRule, String>() { @Override public String apply(final WarningRule input) { return input.getTypeString(); } }), new Function<WarningRule, String>() { @Override public String apply(final WarningRule input) { return input.getTypeDescription(); } }); return new CorefDocumentRenderer(DEFAULT_OVERALL_ORDERING, DEFAULT_TRFR_ORDERING, warningToType); } public void renderTo(final CharSink sink, final AnswerKey answerKey, final ImmutableMultimap<Integer, Warning> warnings) throws IOException { final StringBuilder sb = new StringBuilder(); sb.append(htmlHeader()); sb.append(String.format("<title>%s</title>", answerKey.docId().asString())); sb.append(javascript()); sb.append(CSS()); sb.append(bodyHeader()); sb.append("<h1>"); sb.append(answerKey.docId()); sb.append("</h1>\n"); sb.append("<div>"); sb.append(href("WarningStrings")); sb.append(String.format("<h1>%s</h1>", "Warning Strings")); sb.append(closehref()); sb.append("<div id=\"WarningStrings\" style=\"display:none\">\n"); sb.append("<ul>"); for (Map.Entry<String, String> e : warningTypeToDescription.entrySet()) { sb.append(String.format("<li>%s: %s</li>\n", e.getKey(), e.getValue())); } sb.append("</ul>"); sb.append("</div>"); sb.append("</div>"); // put all the CAS groups here, just for reference // sb.append(href("CASGroups")); // sb.append("<h2>CASGroups</h2>"); // sb.append(closehref()); // sb.append("<div id=\"CASGroups\" style=\"display:none\""); // for (final Integer CASGroup : answerKey.corefAnnotation().clusterIDToMembersMap().keySet()) { //// sb.append(href(String.format("CASGroup_%d", CASGroup))); // sb.append("<h3>CASGroup-").append(CASGroup).append("</h3>"); //// sb.append(closehref()); // sb.append("<div id=\"CASGroup_"); // sb.append(CASGroup); // sb.append("\" style=\"display:inherit\" >"); // appendCASStringList(sb, answerKey.corefAnnotation().clusterIDToMembersMap().get(CASGroup), // CASGroup); // sb.append("</div>"); // } // sb.append("</div>"); // begin bullets sb.append(href("CASGroupErrors")); sb.append("<h2>CAS Group Analysis</h2>"); sb.append(closehref()); sb.append("<div id=\"CASGroupErrors\" style=\"display:block\">"); final ImmutableSetMultimap<TypeRole, Integer> typeRoleToCASGroup = ImmutableSetMultimap.copyOf(TypeRole.buildMappingFromAnswerKey(answerKey).inverse()); for (final TypeRole typeRole : Ordering.<TypeRole>usingToString().sortedCopy( typeRoleToCASGroup.keySet())) { // append the type role final Set<Integer> offendingCASIntersections = Sets.intersection( typeRoleToCASGroup.get(typeRole), warnings.keySet()); if (offendingCASIntersections.size() > 1) { sb.append(href(typeRole.toString())); sb.append("<h2>").append(typeRole.toString()).append("</h2>"); sb.append(closehref()); sb.append("<div id=\"").append(typeRole.toString()).append("\" style=\"display:block\">"); for (final Integer CASGroup : offendingCASIntersections) { appendCASGroup(sb, CASGroup, answerKey, warnings.get(CASGroup)); } sb.append("</div>"); } } // for (final Integer CASGroup : warnings.keySet()) { // appendCASGroup(sb, CASGroup, answerKey, warnings); // } sb.append("</div>"); sink.write(sb.toString()); } public static ImmutableSet<Response> responsesForCASGroup(final Integer CASGroup, final AnswerKey answerKey) { final ImmutableSet.Builder<Response> responses = ImmutableSet.builder(); final ImmutableSetMultimap<KBPString, Response> kbpStringToResponse = ImmutableSetMultimap.copyOf( Multimaps.index(answerKey.allResponses(), Response.CASFunction())); for (final KBPString kbpString : answerKey.corefAnnotation().clusterIDToMembersMap() .get(CASGroup)) { responses.addAll(kbpStringToResponse.get(kbpString)); } return responses.build(); } private static void appendCASStringList(final StringBuilder sb, final ImmutableCollection<KBPString> kbpStrings, final Integer CASGroup) { // sb.append(href(String.format("CASStringList_%d", CASGroup))); // sb.append("CAS String List\n"); // sb.append(closehref()); sb.append("<div id=\"CASStringList_").append(CASGroup).append("\" style=\"display:block\">"); // sb.append("<ul>\n"); final Joiner semicolon = Joiner.on("; "); sb.append(semicolon.join(orderStringByLength().sortedCopy(ImmutableSet.copyOf( Iterables.transform(kbpStrings, KBPString.Text))))); // for (final String kbpString : orderStringByLength().sortedCopy(ImmutableSet.copyOf( // Iterables.transform(kbpStrings, KBPString.Text)))) { // sb.append("<li>"); // sb.append(kbpString); // sb.append("</li>\n"); // } // sb.append("</ul>\n"); sb.append("</div>"); } private static void appendTypeAndRoleSummaryForCAS(final StringBuilder sb, final Integer CASGroup, final AnswerKey answerKey) { final ImmutableMultimap<Symbol, Response> eventTypeToResponse = Multimaps.index(responsesForCASGroup(CASGroup, answerKey), Response.typeFunction()); final ImmutableSetMultimap<Symbol, Symbol> eventTypeToRoles = ImmutableSetMultimap.copyOf( Multimaps.transformValues(eventTypeToResponse, Response.roleFunction())); final Joiner comma = Joiner.on(", "); sb.append(href(String.format("SummaryEventTypeRole_%d", CASGroup))); sb.append("Summary of event type, role for this CAS\n"); sb.append(closehref()); sb.append("<div id=\"SummaryEventTypeRole_").append(CASGroup) .append("\" style=\"display:none\">"); sb.append("<ul>\n"); for (final Symbol type : eventTypeToRoles.keySet()) { sb.append("<li>"); sb.append(type); sb.append(": "); sb.append(comma.join(eventTypeToRoles.get(type))); sb.append("</li>"); } sb.append("</ul>"); sb.append("</div>"); } private static void appendWarningsListForCAS(final StringBuilder sb, final ImmutableCollection<Warning> warnings, final Integer CASGroup) { final ImmutableSetMultimap<String, Warning> warningByType = ImmutableSetMultimap.copyOf( Multimaps.index(warnings, new Function<Warning, String>() { @Override public String apply(final Warning input) { return input.typeString(); } })); sb.append(href(String.format("Warnings_%d", CASGroup))); sb.append("Warning list\n"); sb.append(closehref()); sb.append("<div id=\"Warnings_").append(CASGroup).append("\" style=\"display:none\">"); sb.append("<ul>\n"); for (final String warningType : warningByType.keySet()) { sb.append("<li>"); sb.append(warningType); sb.append(" - "); sb.append("<ul>"); for (final Warning warning : warningByType.get(warningType)) { sb.append("<li>"); sb.append(warning.warningString()); sb.append("</li>\n"); } sb.append("</ul>"); sb.append("</li>\n"); } sb.append("</ul>"); sb.append("</div>"); } private static void appendCASRoleList(final StringBuilder sb, final Integer CASGroup, final AnswerKey answerKey) { sb.append("CAS with role list\n"); sb.append("<ul>"); for (final KBPString kbpString : answerKey.corefAnnotation().clusterIDToMembersMap() .get(CASGroup)) { sb.append("<li>"); sb.append(kbpString.string()); sb.append("<ul>\n"); for (final Response r : ImmutableSet.copyOf( Multimaps.index(answerKey.allResponses(), Response.CASFunction()).get(kbpString))) { sb.append(r.type()); sb.append(", "); sb.append(r.role()); sb.append(", "); sb.append(r.realis()); } sb.append("</ul>"); sb.append("</li>\n"); } sb.append("</ul>"); } private static void appendCASGroup(final StringBuilder sb, final Integer CASGroup, final AnswerKey answerKey, final ImmutableCollection<Warning> warnings) { final CorefAnnotation coref = answerKey.corefAnnotation(); sb.append(href(String.format("CASGroupError_%d", CASGroup))); sb.append("<b>CASGroup-").append(CASGroup).append("</b>"); sb.append(closehref()); sb.append("<div id=\"CASGroupError_").append(CASGroup).append("\" style=\"display:inherit\" >"); sb.append("<ul>"); //cas string list sb.append("<li>"); appendCASStringList(sb, coref.clusterIDToMembersMap().get(CASGroup), CASGroup); sb.append("</li>"); // summary of type and roles // sb.append("<li>"); // appendTypeAndRoleSummaryForCAS(sb, CASGroup, answerKey); // sb.append("</li>"); // warnings list // sb.append("<li>"); // appendWarningsListForCAS(sb, warnings, CASGroup); // sb.append("</li>"); // // CAS with role list // sb.append("<li>"); // appendCASRoleList(sb, CASGroup, answerKey); // sb.append("</li>"); sb.append("</ul>"); sb.append("</div>"); } private static final Ordering<String> orderStringByLength() { return new Ordering<String>() { @Override public int compare(final String left, final String right) { return left.length() - right.length(); } }; } private final static class TypeRole { final Symbol type; final Symbol role; private TypeRole(final Symbol type, final Symbol role) { this.type = type; this.role = role; } public static ImmutableMultimap<Integer, TypeRole> buildMappingFromAnswerKey( final AnswerKey answerKey) { final ImmutableMultimap.Builder<Integer, Response> responsesForCASGroupBuilder = ImmutableMultimap.builder(); final ImmutableSetMultimap<KBPString, Response> kbpStringToResponse = ImmutableSetMultimap.copyOf( Multimaps.index(answerKey.allResponses(), Response.CASFunction())); for (final Integer CASGroup : answerKey.corefAnnotation().clusterIDToMembersMap().keySet()) { for (final KBPString kbpString : answerKey.corefAnnotation().clusterIDToMembersMap() .get(CASGroup)) { responsesForCASGroupBuilder.putAll(CASGroup, kbpStringToResponse.get(kbpString)); } } final ImmutableMultimap<Integer, Response> CASGroupToResponse = responsesForCASGroupBuilder.build(); final ImmutableMultimap<Integer, TypeRole> CASGroupToTypeRole = ImmutableMultimap.copyOf( Multimaps.transformValues(CASGroupToResponse, responseToTypeRole())); return CASGroupToTypeRole; } public static Function<Response, TypeRole> responseToTypeRole() { return new Function<Response, TypeRole>() { @Override public TypeRole apply(final Response input) { return new TypeRole(input.type(), input.role()); } }; } @Override public boolean equals(final Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } final TypeRole typeRole = (TypeRole) o; if (!type.equals(typeRole.type)) { return false; } return role.equals(typeRole.role); } @Override public int hashCode() { int result = type.hashCode(); result = 31 * result + role.hashCode(); return result; } @Override public String toString() { return type + "/" + role; } } }
kbp-events2014-bin/src/main/java/com/bbn/kbp/events2014/bin/QA/CorefDocumentRenderer.java
package com.bbn.kbp.events2014.bin.QA; import com.bbn.bue.common.symbols.Symbol; import com.bbn.kbp.events2014.AnswerKey; import com.bbn.kbp.events2014.CorefAnnotation; import com.bbn.kbp.events2014.KBPString; import com.bbn.kbp.events2014.Response; import com.bbn.kbp.events2014.TypeRoleFillerRealis; import com.bbn.kbp.events2014.bin.QA.Warnings.CorefWarningRule; import com.bbn.kbp.events2014.bin.QA.Warnings.Warning; import com.bbn.kbp.events2014.bin.QA.Warnings.WarningRule; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSetMultimap; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import com.google.common.collect.Multimaps; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import com.google.common.io.CharSink; import java.io.IOException; import java.util.Map; /** * Ugh these were not supposed to develop into objects that actually had real functionality Created * by jdeyoung on 6/30/15. */ public final class CorefDocumentRenderer extends QADocumentRenderer { CorefDocumentRenderer( final Ordering<Response> overallOrdering, final Ordering<TypeRoleFillerRealis> trfrOrdering, final Map<String, String> warningTypeToDescription) { super(overallOrdering, trfrOrdering, warningTypeToDescription); } public static CorefDocumentRenderer createWithDefaultOrdering( final ImmutableList<CorefWarningRule<Integer>> warnings) { final Map<String, String> warningToType = Maps.transformValues(Maps.uniqueIndex(warnings, new Function<WarningRule, String>() { @Override public String apply(final WarningRule input) { return input.getTypeString(); } }), new Function<WarningRule, String>() { @Override public String apply(final WarningRule input) { return input.getTypeDescription(); } }); return new CorefDocumentRenderer(DEFAULT_OVERALL_ORDERING, DEFAULT_TRFR_ORDERING, warningToType); } public void renderTo(final CharSink sink, final AnswerKey answerKey, final ImmutableMultimap<Integer, Warning> warnings) throws IOException { final StringBuilder sb = new StringBuilder(); sb.append(htmlHeader()); sb.append(String.format("<title>%s</title>", answerKey.docId().asString())); sb.append(javascript()); sb.append(CSS()); sb.append(bodyHeader()); sb.append("<h1>"); sb.append(answerKey.docId()); sb.append("</h1>\n"); sb.append("<div>"); sb.append(href("WarningStrings")); sb.append(String.format("<h1>%s</h1>", "Warning Strings")); sb.append(closehref()); sb.append("<div id=\"WarningStrings\" style=\"display:none\">\n"); sb.append("<ul>"); for (Map.Entry<String, String> e : warningTypeToDescription.entrySet()) { sb.append(String.format("<li>%s: %s</li>\n", e.getKey(), e.getValue())); } sb.append("</ul>"); sb.append("</div>"); sb.append("</div>"); // put all the CAS groups here, just for reference sb.append(href("CASGroups")); sb.append("<h2>CASGroups</h2>"); sb.append(closehref()); sb.append("<div id=\"CASGroups\" style=\"display:none\""); for (final Integer CASGroup : answerKey.corefAnnotation().clusterIDToMembersMap().keySet()) { // sb.append(href(String.format("CASGroup_%d", CASGroup))); sb.append("<h3>CASGroup-").append(CASGroup).append("</h3>"); // sb.append(closehref()); sb.append("<div id=\"CASGroup_"); sb.append(CASGroup); sb.append("\" style=\"display:inherit\" >"); appendCASStringList(sb, answerKey.corefAnnotation().clusterIDToMembersMap().get(CASGroup), CASGroup); sb.append("</div>"); } sb.append("</div>"); // begin bullets sb.append(href("CASGroupErrors")); sb.append("<h2>CAS Group Analysis</h2>"); sb.append(closehref()); sb.append("<div id=\"CASGroupErrors\" style=\"display:block\">"); final ImmutableSetMultimap<TypeRole, Integer> typeRoleToCASGroup = ImmutableSetMultimap.copyOf(TypeRole.buildMappingFromAnswerKey(answerKey).inverse()); for (final TypeRole typeRole : Ordering.<TypeRole>usingToString().sortedCopy( typeRoleToCASGroup.keySet())) { // append the type role sb.append(href(typeRole.toString())); sb.append("<h2>").append(typeRole.toString()).append("</h2>"); sb.append(closehref()); sb.append("<div id=\"").append(typeRole.toString()).append("\" style=\"display:block\">"); for(final Integer CASGroup: Sets.intersection(typeRoleToCASGroup.get(typeRole), warnings.keySet())) { appendCASGroup(sb, CASGroup, answerKey, warnings.get(CASGroup)); } sb.append("</div>"); } // for (final Integer CASGroup : warnings.keySet()) { // appendCASGroup(sb, CASGroup, answerKey, warnings); // } sb.append("</div>"); sink.write(sb.toString()); } public static ImmutableSet<Response> responsesForCASGroup(final Integer CASGroup, final AnswerKey answerKey) { final ImmutableSet.Builder<Response> responses = ImmutableSet.builder(); final ImmutableSetMultimap<KBPString, Response> kbpStringToResponse = ImmutableSetMultimap.copyOf( Multimaps.index(answerKey.allResponses(), Response.CASFunction())); for (final KBPString kbpString : answerKey.corefAnnotation().clusterIDToMembersMap() .get(CASGroup)) { responses.addAll(kbpStringToResponse.get(kbpString)); } return responses.build(); } private static void appendCASStringList(final StringBuilder sb, final ImmutableCollection<KBPString> kbpStrings, final Integer CASGroup) { sb.append(href(String.format("CASStringList_%d", CASGroup))); sb.append("CAS String List\n"); sb.append(closehref()); sb.append("<div id=\"CASStringList_").append(CASGroup).append("\" style=\"display:inherit\">"); sb.append("<ul>\n"); for (final String kbpString : ImmutableSet.copyOf( Iterables.transform(kbpStrings, KBPString.Text))) { sb.append("<li>"); sb.append(kbpString); sb.append("</li>\n"); } sb.append("</ul>\n"); sb.append("</div>"); } private static void appendTypeAndRoleSummaryForCAS(final StringBuilder sb, final Integer CASGroup, final AnswerKey answerKey) { final ImmutableMultimap<Symbol, Response> eventTypeToResponse = Multimaps.index(responsesForCASGroup(CASGroup, answerKey), Response.typeFunction()); final ImmutableSetMultimap<Symbol, Symbol> eventTypeToRoles = ImmutableSetMultimap.copyOf( Multimaps.transformValues(eventTypeToResponse, Response.roleFunction())); final Joiner comma = Joiner.on(", "); sb.append(href(String.format("SummaryEventTypeRole_%d", CASGroup))); sb.append("Summary of event type, role for this CAS\n"); sb.append(closehref()); sb.append("<div id=\"SummaryEventTypeRole_").append(CASGroup) .append("\" style=\"display:none\">"); sb.append("<ul>\n"); for (final Symbol type : eventTypeToRoles.keySet()) { sb.append("<li>"); sb.append(type); sb.append(": "); sb.append(comma.join(eventTypeToRoles.get(type))); sb.append("</li>"); } sb.append("</ul>"); sb.append("</div>"); } private static void appendWarningsListForCAS(final StringBuilder sb, final ImmutableCollection<Warning> warnings, final Integer CASGroup) { final ImmutableSetMultimap<String, Warning> warningByType = ImmutableSetMultimap.copyOf( Multimaps.index(warnings, new Function<Warning, String>() { @Override public String apply(final Warning input) { return input.typeString(); } })); sb.append(href(String.format("Warnings_%d", CASGroup))); sb.append("Warning list\n"); sb.append(closehref()); sb.append("<div id=\"Warnings_").append(CASGroup).append("\" style=\"display:none\">"); sb.append("<ul>\n"); for (final String warningType : warningByType.keySet()) { sb.append("<li>"); sb.append(warningType); sb.append(" - "); sb.append("<ul>"); for (final Warning warning : warningByType.get(warningType)) { sb.append("<li>"); sb.append(warning.warningString()); sb.append("</li>\n"); } sb.append("</ul>"); sb.append("</li>\n"); } sb.append("</ul>"); sb.append("</div>"); } private static void appendCASRoleList(final StringBuilder sb, final Integer CASGroup, final AnswerKey answerKey) { sb.append("CAS with role list\n"); sb.append("<ul>"); for (final KBPString kbpString : answerKey.corefAnnotation().clusterIDToMembersMap() .get(CASGroup)) { sb.append("<li>"); sb.append(kbpString.string()); sb.append("<ul>\n"); for (final Response r : ImmutableSet.copyOf( Multimaps.index(answerKey.allResponses(), Response.CASFunction()).get(kbpString))) { sb.append(r.type()); sb.append(", "); sb.append(r.role()); sb.append(", "); sb.append(r.realis()); } sb.append("</ul>"); sb.append("</li>\n"); } sb.append("</ul>"); } private static void appendCASGroup(final StringBuilder sb, final Integer CASGroup, final AnswerKey answerKey, final ImmutableCollection<Warning> warnings) { final CorefAnnotation coref = answerKey.corefAnnotation(); sb.append(href(String.format("CASGroupError_%d", CASGroup))); sb.append("<h3>CASGroup-").append(CASGroup).append("</h3>"); sb.append(closehref()); sb.append("<div id=\"CASGroupError_").append(CASGroup).append("\" style=\"display:inherit\" >"); sb.append("<ul>"); //cas string list sb.append("<li>"); appendCASStringList(sb, coref.clusterIDToMembersMap().get(CASGroup), CASGroup); sb.append("</li>"); // summary of type and roles sb.append("<li>"); appendTypeAndRoleSummaryForCAS(sb, CASGroup, answerKey); sb.append("</li>"); // warnings list sb.append("<li>"); appendWarningsListForCAS(sb, warnings, CASGroup); sb.append("</li>"); // // CAS with role list // sb.append("<li>"); // appendCASRoleList(sb, CASGroup, answerKey); // sb.append("</li>"); sb.append("</ul>"); sb.append("</div>"); } private final static class TypeRole { final Symbol type; final Symbol role; private TypeRole(final Symbol type, final Symbol role) { this.type = type; this.role = role; } public static ImmutableMultimap<Integer, TypeRole> buildMappingFromAnswerKey( final AnswerKey answerKey) { final ImmutableMultimap.Builder<Integer, Response> responsesForCASGroupBuilder = ImmutableMultimap.builder(); final ImmutableSetMultimap<KBPString, Response> kbpStringToResponse = ImmutableSetMultimap.copyOf( Multimaps.index(answerKey.allResponses(), Response.CASFunction())); for (final Integer CASGroup : answerKey.corefAnnotation().clusterIDToMembersMap().keySet()) { for (final KBPString kbpString : answerKey.corefAnnotation().clusterIDToMembersMap() .get(CASGroup)) { responsesForCASGroupBuilder.putAll(CASGroup, kbpStringToResponse.get(kbpString)); } } final ImmutableMultimap<Integer, Response> CASGroupToResponse = responsesForCASGroupBuilder.build(); final ImmutableMultimap<Integer, TypeRole> CASGroupToTypeRole = ImmutableMultimap.copyOf( Multimaps.transformValues(CASGroupToResponse, responseToTypeRole())); return CASGroupToTypeRole; } public static Function<Response, TypeRole> responseToTypeRole() { return new Function<Response, TypeRole>() { @Override public TypeRole apply(final Response input) { return new TypeRole(input.type(), input.role()); } }; } @Override public boolean equals(final Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } final TypeRole typeRole = (TypeRole) o; if (!type.equals(typeRole.type)) { return false; } return role.equals(typeRole.role); } @Override public int hashCode() { int result = type.hashCode(); result = 31 * result + role.hashCode(); return result; } @Override public String toString() { return type + "-" + role; } } }
many small UI tweaks
kbp-events2014-bin/src/main/java/com/bbn/kbp/events2014/bin/QA/CorefDocumentRenderer.java
many small UI tweaks
<ide><path>bp-events2014-bin/src/main/java/com/bbn/kbp/events2014/bin/QA/CorefDocumentRenderer.java <ide> <ide> import java.io.IOException; <ide> import java.util.Map; <add>import java.util.Set; <ide> <ide> /** <ide> * Ugh these were not supposed to develop into objects that actually had real functionality Created <ide> sb.append("</div>"); <ide> <ide> // put all the CAS groups here, just for reference <del> sb.append(href("CASGroups")); <del> sb.append("<h2>CASGroups</h2>"); <del> sb.append(closehref()); <del> sb.append("<div id=\"CASGroups\" style=\"display:none\""); <del> for (final Integer CASGroup : answerKey.corefAnnotation().clusterIDToMembersMap().keySet()) { <del>// sb.append(href(String.format("CASGroup_%d", CASGroup))); <del> sb.append("<h3>CASGroup-").append(CASGroup).append("</h3>"); <del>// sb.append(closehref()); <del> sb.append("<div id=\"CASGroup_"); <del> sb.append(CASGroup); <del> sb.append("\" style=\"display:inherit\" >"); <del> appendCASStringList(sb, answerKey.corefAnnotation().clusterIDToMembersMap().get(CASGroup), <del> CASGroup); <del> sb.append("</div>"); <del> } <del> sb.append("</div>"); <add>// sb.append(href("CASGroups")); <add>// sb.append("<h2>CASGroups</h2>"); <add>// sb.append(closehref()); <add>// sb.append("<div id=\"CASGroups\" style=\"display:none\""); <add>// for (final Integer CASGroup : answerKey.corefAnnotation().clusterIDToMembersMap().keySet()) { <add>//// sb.append(href(String.format("CASGroup_%d", CASGroup))); <add>// sb.append("<h3>CASGroup-").append(CASGroup).append("</h3>"); <add>//// sb.append(closehref()); <add>// sb.append("<div id=\"CASGroup_"); <add>// sb.append(CASGroup); <add>// sb.append("\" style=\"display:inherit\" >"); <add>// appendCASStringList(sb, answerKey.corefAnnotation().clusterIDToMembersMap().get(CASGroup), <add>// CASGroup); <add>// sb.append("</div>"); <add>// } <add>// sb.append("</div>"); <ide> <ide> // begin bullets <ide> sb.append(href("CASGroupErrors")); <ide> for (final TypeRole typeRole : Ordering.<TypeRole>usingToString().sortedCopy( <ide> typeRoleToCASGroup.keySet())) { <ide> // append the type role <del> sb.append(href(typeRole.toString())); <del> sb.append("<h2>").append(typeRole.toString()).append("</h2>"); <del> sb.append(closehref()); <del> sb.append("<div id=\"").append(typeRole.toString()).append("\" style=\"display:block\">"); <del> for(final Integer CASGroup: Sets.intersection(typeRoleToCASGroup.get(typeRole), warnings.keySet())) { <del> appendCASGroup(sb, CASGroup, answerKey, warnings.get(CASGroup)); <del> } <del> sb.append("</div>"); <add> final Set<Integer> offendingCASIntersections = Sets.intersection( <add> typeRoleToCASGroup.get(typeRole), warnings.keySet()); <add> if (offendingCASIntersections.size() > 1) { <add> sb.append(href(typeRole.toString())); <add> sb.append("<h2>").append(typeRole.toString()).append("</h2>"); <add> sb.append(closehref()); <add> sb.append("<div id=\"").append(typeRole.toString()).append("\" style=\"display:block\">"); <add> for (final Integer CASGroup : offendingCASIntersections) { <add> appendCASGroup(sb, CASGroup, answerKey, warnings.get(CASGroup)); <add> } <add> sb.append("</div>"); <add> } <ide> } <ide> // for (final Integer CASGroup : warnings.keySet()) { <ide> // appendCASGroup(sb, CASGroup, answerKey, warnings); <ide> <ide> private static void appendCASStringList(final StringBuilder sb, <ide> final ImmutableCollection<KBPString> kbpStrings, final Integer CASGroup) { <del> sb.append(href(String.format("CASStringList_%d", CASGroup))); <del> sb.append("CAS String List\n"); <del> sb.append(closehref()); <del> <del> sb.append("<div id=\"CASStringList_").append(CASGroup).append("\" style=\"display:inherit\">"); <del> sb.append("<ul>\n"); <del> for (final String kbpString : ImmutableSet.copyOf( <del> Iterables.transform(kbpStrings, KBPString.Text))) { <del> sb.append("<li>"); <del> sb.append(kbpString); <del> sb.append("</li>\n"); <del> } <del> sb.append("</ul>\n"); <add>// sb.append(href(String.format("CASStringList_%d", CASGroup))); <add>// sb.append("CAS String List\n"); <add>// sb.append(closehref()); <add> <add> sb.append("<div id=\"CASStringList_").append(CASGroup).append("\" style=\"display:block\">"); <add>// sb.append("<ul>\n"); <add> final Joiner semicolon = Joiner.on("; "); <add> sb.append(semicolon.join(orderStringByLength().sortedCopy(ImmutableSet.copyOf( <add> Iterables.transform(kbpStrings, KBPString.Text))))); <add>// for (final String kbpString : orderStringByLength().sortedCopy(ImmutableSet.copyOf( <add>// Iterables.transform(kbpStrings, KBPString.Text)))) { <add>// sb.append("<li>"); <add>// sb.append(kbpString); <add>// sb.append("</li>\n"); <add>// } <add>// sb.append("</ul>\n"); <ide> sb.append("</div>"); <ide> } <ide> <ide> final AnswerKey answerKey, final ImmutableCollection<Warning> warnings) { <ide> final CorefAnnotation coref = answerKey.corefAnnotation(); <ide> sb.append(href(String.format("CASGroupError_%d", CASGroup))); <del> sb.append("<h3>CASGroup-").append(CASGroup).append("</h3>"); <add> sb.append("<b>CASGroup-").append(CASGroup).append("</b>"); <ide> sb.append(closehref()); <ide> sb.append("<div id=\"CASGroupError_").append(CASGroup).append("\" style=\"display:inherit\" >"); <ide> sb.append("<ul>"); <ide> <ide> // summary of type and roles <ide> <del> sb.append("<li>"); <del> appendTypeAndRoleSummaryForCAS(sb, CASGroup, answerKey); <del> sb.append("</li>"); <add>// sb.append("<li>"); <add>// appendTypeAndRoleSummaryForCAS(sb, CASGroup, answerKey); <add>// sb.append("</li>"); <ide> <ide> // warnings list <del> sb.append("<li>"); <del> appendWarningsListForCAS(sb, warnings, CASGroup); <del> sb.append("</li>"); <add>// sb.append("<li>"); <add>// appendWarningsListForCAS(sb, warnings, CASGroup); <add>// sb.append("</li>"); <ide> <ide> // // CAS with role list <ide> // sb.append("<li>"); <ide> <ide> sb.append("</ul>"); <ide> sb.append("</div>"); <add> } <add> <add> private static final Ordering<String> orderStringByLength() { <add> return new Ordering<String>() { <add> @Override <add> public int compare(final String left, final String right) { <add> return left.length() - right.length(); <add> } <add> }; <ide> } <ide> <ide> private final static class TypeRole { <ide> <ide> @Override <ide> public String toString() { <del> return type + "-" + role; <add> return type + "/" + role; <ide> } <ide> } <ide> }
Java
mit
a78aaa3e23ec6ea495f208f24bd5b417120cc8b7
0
viktormuller/notifications-quickstart-android
/** * Copyright 2015 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.twilio.android.quickstart; import android.app.IntentService; import android.content.Intent; import android.content.SharedPreferences; import android.preference.PreferenceManager; import android.support.v4.content.LocalBroadcastManager; import android.util.Log; import com.google.android.gms.gcm.GoogleCloudMessaging; import com.google.android.gms.iid.InstanceID; import com.twilio.notification.api.BindingResource; import java.io.IOException; import retrofit2.Call; import retrofit2.Response; import retrofit2.Retrofit; import retrofit2.converter.jackson.JacksonConverterFactory; import static com.twilio.android.quickstart.QuickstartPreferences.*; public class RegistrationIntentService extends IntentService { private static final String TAG = "RegIntentService"; private BindingResource bindingResource; private static final String schema = "http"; private static final String host = "myserver.com"; //Do NOT include http:// private static final int port = 80; public RegistrationIntentService() { super(TAG); } @Override public void onCreate(){ super.onCreate(); Retrofit retrofit = new Retrofit.Builder() .baseUrl(schema + "://" + host + ":" + port).addConverterFactory(JacksonConverterFactory.create()) .build(); bindingResource = retrofit.create(BindingResource.class); } @Override protected void onHandleIntent(Intent intent) { SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(getBaseContext()); try { // [START register_for_gcm] // Initially this call goes out to the network to retrieve the token, subsequent calls // are local. // R.string.gcm_defaultSenderId (the Sender ID) is typically derived from google-services.json. // See https://developers.google.com/cloud-messaging/android/start for details on this file. // [START get_token] InstanceID instanceID = InstanceID.getInstance(this); String token = instanceID.getToken(getString(R.string.gcm_defaultSenderId), GoogleCloudMessaging.INSTANCE_ID_SCOPE, null); // [END get_token] Log.i(TAG, "GCM Registration Token: " + token); String identity = sharedPreferences.getString(IDENTITY, "Bob"); String endpoint = sharedPreferences.getString(ENDPOINT, "Bob's Moto E"); Log.i(TAG,"Identity = " + identity); Log.i(TAG,"Endpoint = " + endpoint); sendRegistrationToServer(identity, endpoint, token); // You should store a boolean that indicates whether the generated token has been // sent to your server. If the boolean is false, send the token to your server, // otherwise your server should have already received the token. sharedPreferences.edit().putBoolean(SENT_TOKEN_TO_SERVER, true).apply(); // [END register_for_gcm] } catch (Exception e) { Log.d(TAG, "Failed to complete token refresh", e); // If an exception happens while fetching the new token or updating our registration data // on Twilio server, this ensures that we'll attempt the update at a later time. sharedPreferences.edit().putBoolean(SENT_TOKEN_TO_SERVER, false).apply(); } // Notify UI that registration has completed, so the progress indicator can be hidden. Intent registrationComplete = new Intent(REGISTRATION_COMPLETE); LocalBroadcastManager.getInstance(this).sendBroadcast(registrationComplete); } /** * Persist registration to Twilio via your application server. * * @param token The new token. */ private void sendRegistrationToServer(String identity, String endpoint, String token) throws IOException { Call<Void> call = bindingResource.createBinding(identity, endpoint, token, "gcm"); Response<Void> response = call.execute(); } }
app/src/main/java/com/twilio/android/quickstart/RegistrationIntentService.java
/** * Copyright 2015 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.twilio.android.quickstart; import android.app.IntentService; import android.content.Intent; import android.content.SharedPreferences; import android.preference.PreferenceManager; import android.support.v4.content.LocalBroadcastManager; import android.util.Log; import com.google.android.gms.gcm.GoogleCloudMessaging; import com.google.android.gms.iid.InstanceID; import com.twilio.notification.api.BindingResource; import java.io.IOException; import retrofit2.Call; import retrofit2.Response; import retrofit2.Retrofit; import retrofit2.converter.jackson.JacksonConverterFactory; import static com.twilio.android.quickstart.QuickstartPreferences.*; public class RegistrationIntentService extends IntentService { private static final String TAG = "RegIntentService"; private BindingResource bindingResource; private static final String schema = "http"; private static final String host = "myserver.com"; private static final int port = 80; public RegistrationIntentService() { super(TAG); } @Override public void onCreate(){ super.onCreate(); Retrofit retrofit = new Retrofit.Builder() .baseUrl(schema + "://" + host + ":" + port).addConverterFactory(JacksonConverterFactory.create()) .build(); bindingResource = retrofit.create(BindingResource.class); } @Override protected void onHandleIntent(Intent intent) { SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(getBaseContext()); try { // [START register_for_gcm] // Initially this call goes out to the network to retrieve the token, subsequent calls // are local. // R.string.gcm_defaultSenderId (the Sender ID) is typically derived from google-services.json. // See https://developers.google.com/cloud-messaging/android/start for details on this file. // [START get_token] InstanceID instanceID = InstanceID.getInstance(this); String token = instanceID.getToken(getString(R.string.gcm_defaultSenderId), GoogleCloudMessaging.INSTANCE_ID_SCOPE, null); // [END get_token] Log.i(TAG, "GCM Registration Token: " + token); String identity = sharedPreferences.getString(IDENTITY, "Bob"); String endpoint = sharedPreferences.getString(ENDPOINT, "Bob's Moto E"); Log.i(TAG,"Identity = " + identity); Log.i(TAG,"Endpoint = " + endpoint); sendRegistrationToServer(identity, endpoint, token); // You should store a boolean that indicates whether the generated token has been // sent to your server. If the boolean is false, send the token to your server, // otherwise your server should have already received the token. sharedPreferences.edit().putBoolean(SENT_TOKEN_TO_SERVER, true).apply(); // [END register_for_gcm] } catch (Exception e) { Log.d(TAG, "Failed to complete token refresh", e); // If an exception happens while fetching the new token or updating our registration data // on Twilio server, this ensures that we'll attempt the update at a later time. sharedPreferences.edit().putBoolean(SENT_TOKEN_TO_SERVER, false).apply(); } // Notify UI that registration has completed, so the progress indicator can be hidden. Intent registrationComplete = new Intent(REGISTRATION_COMPLETE); LocalBroadcastManager.getInstance(this).sendBroadcast(registrationComplete); } /** * Persist registration to Twilio via your application server. * * @param token The new token. */ private void sendRegistrationToServer(String identity, String endpoint, String token) throws IOException { Call<Void> call = bindingResource.createBinding(identity, endpoint, token, "gcm"); Response<Void> response = call.execute(); } }
Update RegistrationIntentService.java
app/src/main/java/com/twilio/android/quickstart/RegistrationIntentService.java
Update RegistrationIntentService.java
<ide><path>pp/src/main/java/com/twilio/android/quickstart/RegistrationIntentService.java <ide> private static final String TAG = "RegIntentService"; <ide> private BindingResource bindingResource; <ide> private static final String schema = "http"; <del> private static final String host = "myserver.com"; <add> private static final String host = "myserver.com"; //Do NOT include http:// <ide> private static final int port = 80; <ide> <ide> public RegistrationIntentService() {
Java
mit
76676e17e984acd6246fe4b3ee7be4ff5a152ce1
0
Frederikam/fred.moe,Frederikam/fred.moe,Frederikam/fred.moe,Frederikam/fred.moe
package com.frederikam.fred.moe; import com.frederikam.fred.moe.util.SLF4JInputStreamLogger; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; public class Caddy extends Thread { private static final Logger log = LoggerFactory.getLogger(Caddy.class); public Caddy() { setName("Caddy"); } @Override public void run() { try { log.info("Starting Caddy"); ProcessBuilder pb = new ProcessBuilder() .command("caddy", "-conf Caddyfile", "-agree", "-email $CADDY_EMAIL"); Process proc = pb.start(); new SLF4JInputStreamLogger(log, proc.getInputStream()).start(); try { int code = proc.waitFor(); log.warn("Exited with code " + code); } catch (InterruptedException e) { throw new RuntimeException(e); } } catch (IOException e) { log.warn(e.getMessage()); } } }
src/main/java/com/frederikam/fred/moe/Caddy.java
package com.frederikam.fred.moe; import com.frederikam.fred.moe.util.SLF4JInputStreamLogger; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; public class Caddy extends Thread { private static final Logger log = LoggerFactory.getLogger(Caddy.class); public Caddy() { setName("Caddy"); } @Override public void run() { try { log.info("Starting Caddy"); ProcessBuilder pb = new ProcessBuilder() .command("caddy", "-conf Caddyfile", "-agree", "-email $CADDY_EMAIL"); Process proc = pb.start();doc new SLF4JInputStreamLogger(log, proc.getInputStream()).start(); try { int code = proc.waitFor(); log.warn("Exited with code " + code); } catch (InterruptedException e) { throw new RuntimeException(e); } } catch (IOException e) { log.warn(e.getMessage()); } } }
Removed typo
src/main/java/com/frederikam/fred/moe/Caddy.java
Removed typo
<ide><path>rc/main/java/com/frederikam/fred/moe/Caddy.java <ide> <ide> ProcessBuilder pb = new ProcessBuilder() <ide> .command("caddy", "-conf Caddyfile", "-agree", "-email $CADDY_EMAIL"); <del> Process proc = pb.start();doc <add> Process proc = pb.start(); <ide> new SLF4JInputStreamLogger(log, proc.getInputStream()).start(); <ide> <ide> try {
JavaScript
mit
d3c83eb8d43afa5e17f2a31fae309a02c14d65d5
0
bengarvey/savetheworld
var game = new Phaser.Game("100%", 420, Phaser.CANVAS, 'game', { preload: preload, create: create, update: update, render: render }); function preload() { game.load.image('player','assets/sprites/hero.png'); game.load.image('background','assets/tests/space-city.png'); game.load.image('green-energy','assets/sprites/green-energy.png'); game.load.audio('amia_dope_song', ['assets/amia_dope_song.m4a']); } var player; var cursors; var jumpTimer = 0; function create() { var worldWidth = 10000; var worldHeight = 420; game.add.tileSprite(0, -150, worldWidth, worldHeight+150, 'background'); game.world.setBounds(0, 0, worldWidth, worldHeight); music = game.add.audio('amia_dope_song'); music.loop = true; music.play(); game.physics.startSystem(Phaser.Physics.ARCADE); game.physics.arcade.gravity.y = 1500; player = game.add.sprite(1, game.world.centerY, 'player'); powerup = game.add.sprite(35, game.world.centerY, 'green-energy'); joker = game.add.sprite(5000, game.world.centerY, 'player'); catwoman = game.add.sprite(4000, game.world.centerY, 'player'); darthvader = game.add.sprite(3000, game.world.centerY, 'player'); alien = game.add.sprite(2000, game.world.centerY, 'player'); game.physics.enable(player); player.body.collideWorldBounds = true; game.physics.enable(powerup); powerup.body.collideWorldBounds = true; game.physics.enable(joker); game.physics.enable(catwoman); game.physics.enable(darthvader); game.physics.enable(alien); joker.body.collideWorldBounds = true; catwoman.body.collideWorldBounds = true; darthvader.body.collideWorldBounds = true; alien.body.collideWorldBounds = true; cursors = game.input.keyboard.createCursorKeys(); game.camera.follow(player); } function restartGame() { game.state.restart(); } function update() { player.body.velocity.x = 250; joker.body.velocity.x = -245; catwoman.body.velocity.x = -245; darthvader.body.velocity.x = -245; alien.body.velocity.x = -245; game.physics.arcade.collide(player, alien, restartGame); game.physics.arcade.collide(player, joker, restartGame); game.physics.arcade.collide(player, catwoman, restartGame); game.physics.arcade.collide(player, darthvader, restartGame); // Jumps if ((cursors.up.isDown || game.input.pointer1.isDown) && player.body.onFloor()) { player.body.velocity.y = -500; } } function render() { game.debug.cameraInfo(game.camera, 32, 32); game.debug.spriteCoords(player, 32, 500); }
src/main.js
var game = new Phaser.Game("100%", 568, Phaser.CANVAS, 'game', { preload: preload, create: create, update: update, render: render }); function preload() { game.load.image('player','assets/sprites/hero.png'); game.load.image('background','assets/tests/space-city.png'); game.load.image('green-energy','assets/sprites/green-energy.png'); game.load.audio('amia_dope_song', ['assets/amia_dope_song.m4a']); } var player; var cursors; var jumpTimer = 0; function create() { var worldWidth = 10000; var worldHeight = 568; game.add.tileSprite(0, 0, worldWidth, worldHeight, 'background'); game.world.setBounds(0, 0, worldWidth, worldHeight); music = game.add.audio('amia_dope_song'); music.loop = true; music.play(); game.physics.startSystem(Phaser.Physics.ARCADE); game.physics.arcade.gravity.y = 1500; player = game.add.sprite(1, game.world.centerY, 'player'); powerup = game.add.sprite(35, game.world.centerY, 'green-energy'); joker = game.add.sprite(5000, game.world.centerY, 'player'); catwoman = game.add.sprite(4000, game.world.centerY, 'player'); darthvader = game.add.sprite(3000, game.world.centerY, 'player'); alien = game.add.sprite(2000, game.world.centerY, 'player'); game.physics.enable(player); player.body.collideWorldBounds = true; game.physics.enable(powerup); powerup.body.collideWorldBounds = true; game.physics.enable(joker); game.physics.enable(catwoman); game.physics.enable(darthvader); game.physics.enable(alien); joker.body.collideWorldBounds = true; catwoman.body.collideWorldBounds = true; darthvader.body.collideWorldBounds = true; alien.body.collideWorldBounds = true; cursors = game.input.keyboard.createCursorKeys(); game.camera.follow(player); } function update() { player.body.velocity.x = 250; joker.body.velocity.x = -245; catwoman.body.velocity.x = -245; darthvader.body.velocity.x = -245; alien.body.velocity.x = -245; if ((cursors.up.isDown || game.input.pointer1.isDown) && player.body.onFloor()) { player.body.velocity.y = -500; } } function render() { game.debug.cameraInfo(game.camera, 32, 32); game.debug.spriteCoords(player, 32, 500); }
fixes
src/main.js
fixes
<ide><path>rc/main.js <del>var game = new Phaser.Game("100%", 568, Phaser.CANVAS, 'game', { preload: preload, create: create, update: update, render: render }); <add>var game = new Phaser.Game("100%", 420, Phaser.CANVAS, 'game', { preload: preload, create: create, update: update, render: render }); <ide> <ide> function preload() { <ide> game.load.image('player','assets/sprites/hero.png'); <ide> <ide> function create() { <ide> var worldWidth = 10000; <del> var worldHeight = 568; <add> var worldHeight = 420; <ide> <del> game.add.tileSprite(0, 0, worldWidth, worldHeight, 'background'); <add> game.add.tileSprite(0, -150, worldWidth, worldHeight+150, 'background'); <add> <ide> game.world.setBounds(0, 0, worldWidth, worldHeight); <ide> <ide> music = game.add.audio('amia_dope_song'); <ide> game.camera.follow(player); <ide> } <ide> <add>function restartGame() { <add> game.state.restart(); <add>} <add> <ide> function update() { <ide> <ide> player.body.velocity.x = 250; <ide> darthvader.body.velocity.x = -245; <ide> alien.body.velocity.x = -245; <ide> <add> game.physics.arcade.collide(player, alien, restartGame); <add> game.physics.arcade.collide(player, joker, restartGame); <add> game.physics.arcade.collide(player, catwoman, restartGame); <add> game.physics.arcade.collide(player, darthvader, restartGame); <add> // Jumps <ide> if ((cursors.up.isDown || game.input.pointer1.isDown) && player.body.onFloor()) <ide> { <ide> player.body.velocity.y = -500; <ide> } <add> <ide> <ide> } <ide>
Java
mit
error: pathspec 'NumToWord.java' did not match any file(s) known to git
cdc143e6ba6dba66ad19bd1675e36bb4cfc6e59e
1
henrino3/Java
/*Calculator by Henry and Opeyemi */ import java.util.Scanner; public class NumToWord{ public static Scanner in = new Scanner(System.in); public static String [] units = { "", " one ", " two ", " three ", " four ", " five ", " six ", " seven ", " eight ", " nine ", " ten ", " eleven ", " twelve ", " thirteen ", " fourteen ", " fifteen ", " sixteen ", " seventeen ", " eighteen ", " nineteen " } ; public static String [] tens = { "", ""," twenty ", " thirty ", " fourty ", " fifty ", " sixty ", " seventy ", " eighty ", " ninty " }; public static String [] big = { " hundred ", " thousand ", " million ", " billion " }; public static int unit, ten, hundred, thousand,tenThousand, tenMillion, hundredThousand, million, toTenMillion, hundredMillion, toHundredMillion, billion, tenUnit, tohundred, toThousand, toTenThousand, toHundredThousand, toMillion; public static String word = "",s = "", str = "hundred "; public static void main(String[] args) { init(); } public static void init( ){ System.out.println("Enter Number"); int num = in.nextInt(); int digit = 0; int temp = num; while(temp>0) { if (temp%10 >= 0){ digit++; temp = temp/10; }; }; if (num < 0) { num *= -1; s = "minus "; } if (num == 0) System.out.println("Zero"); if (num < 20){ word = units[num]; } if (digit == 2){ tens(num); } if (digit == 3){ hundreds(num); } if (digit == 4){ thousands(num); } if (digit == 5){ tenThousands(num); } if (digit == 6){ hundredThousands(num); } if (digit == 7){ millions(num); } if (digit == 8){ tenMillions(num); } if (digit == 9){ hundredMillions(num); } if (digit == 10){ billions(num); } System.out.println(s+word); } public static String tens(int num) { if (num > 20){ ten = num/10; unit = num%10; word =tens[ten]+""+units[unit]; }; return word; } public static String hundreds(int num) { hundred = num/100; tenUnit= num%100; if (tenUnit != 0) str = "hundred and"; tens(tenUnit); if(tenUnit == 0) str = ""; word = units[hundred] +str+ word; return word; } public static String thousands(int num) { thousand = num/1000; tohundred = num %1000; hundreds(tohundred); if(thousand == 0) str = ""; word = units[thousand] + big[1] +word; return word; } public static String tenThousands(int num) { tenThousand = num/10000; toThousand = num % 10000; thousands(toThousand); word = tens[tenThousand]+word; return word; } public static String hundredThousands(int num) { hundredThousand = num/100000; toTenThousand = num % 100000; if (toTenThousand != 0) str = "hundred and"; if(toTenThousand == 0) str = ""; tenThousands(toTenThousand); word = units[hundredThousand]+ str +word; return word; } public static String millions(int num) { million = num/1000000; toHundredThousand = num % 1000000; hundredThousands(toHundredThousand); word = units[million] +big[2] +word; return word; } public static String tenMillions(int num) { tenMillion = num/10000000; toMillion = num % 10000000; millions(toMillion); word = tens[tenMillion] +word; return word; } public static String hundredMillions(int num) { hundredMillion = num/100000000; toTenMillion = num % 100000000; tenMillions(toTenMillion); if (toTenMillion != 0) str = "hundred and"; if(toTenMillion == 0) str = ""; word = units[hundredMillion] +str +word; return word; } public static String billions(int num) { billion = num/1000000000; toHundredMillion = num %1000000000; hundredMillions(toHundredMillion); word = units[billion] + big[3] +word; return word; } }
NumToWord.java
Update Num to Word Program
NumToWord.java
Update Num to Word Program
<ide><path>umToWord.java <add>/*Calculator by Henry and Opeyemi */ <add>import java.util.Scanner; <add> <add>public class NumToWord{ <add> public static Scanner in = new Scanner(System.in); <add> <add> public static String [] units = { <add> "", " one ", " two ", " three ", " four ", " five ", " six ", " seven ", " eight ", <add> " nine ", " ten ", " eleven ", " twelve ", " thirteen ", " fourteen ", " fifteen ", <add> " sixteen ", " seventeen ", " eighteen ", " nineteen " <add> } ; <add> <add> public static String [] tens = { <add> "", ""," twenty ", " thirty ", " fourty ", " fifty ", " sixty ", " seventy ", " eighty ", " ninty " <add> }; <add> <add> public static String [] big = { <add> " hundred ", " thousand ", " million ", " billion " <add> }; <add> public static int unit, ten, hundred, thousand,tenThousand, tenMillion, hundredThousand, million, toTenMillion, hundredMillion, toHundredMillion, billion, tenUnit, tohundred, toThousand, toTenThousand, toHundredThousand, toMillion; <add> <add> public static String word = "",s = "", str = "hundred "; <add> <add> public static void main(String[] args) { <add> init(); <add> } <add> <add> public static void init( ){ <add> <add> System.out.println("Enter Number"); <add> int num = in.nextInt(); <add> <add> <add> <add> <add> int digit = 0; <add> int temp = num; <add> while(temp>0) <add> { <add> if (temp%10 >= 0){ <add> digit++; <add> temp = temp/10; <add> }; <add> }; <add> <add> <add> if (num < 0) { <add> num *= -1; <add> s = "minus "; <add> } <add> <add> if (num == 0) System.out.println("Zero"); <add> <add> <add> if (num < 20){ <add> word = units[num]; <add> } <add> <add> if (digit == 2){ <add> tens(num); <add> } <add> <add> if (digit == 3){ <add> hundreds(num); <add> } <add> <add> if (digit == 4){ <add> thousands(num); <add> } <add> <add> if (digit == 5){ <add> tenThousands(num); <add> } <add> <add> if (digit == 6){ <add> hundredThousands(num); <add> } <add> <add> if (digit == 7){ <add> millions(num); <add> } <add> <add> if (digit == 8){ <add> tenMillions(num); <add> } <add> <add> if (digit == 9){ <add> hundredMillions(num); <add> } <add> <add> if (digit == 10){ <add> billions(num); <add> } <add> <add> <add> <add> System.out.println(s+word); <add> } <add> <add> public static String tens(int num) { <add> if (num > 20){ <add> ten = num/10; <add> unit = num%10; <add> word =tens[ten]+""+units[unit]; <add> }; <add> return word; <add> <add> } <add> <add> public static String hundreds(int num) { <add> hundred = num/100; <add> tenUnit= num%100; <add> if (tenUnit != 0) str = "hundred and"; <add> tens(tenUnit); <add> if(tenUnit == 0) str = ""; <add> word = units[hundred] +str+ word; <add> return word; <add> <add> } <add> <add> public static String thousands(int num) { <add> thousand = num/1000; <add> tohundred = num %1000; <add> hundreds(tohundred); <add> if(thousand == 0) str = ""; <add> word = units[thousand] + big[1] +word; <add> return word; <add> } <add> <add> public static String tenThousands(int num) { <add> tenThousand = num/10000; <add> toThousand = num % 10000; <add> thousands(toThousand); <add> word = tens[tenThousand]+word; <add> return word; <add> <add> } <add> <add> public static String hundredThousands(int num) { <add> hundredThousand = num/100000; <add> toTenThousand = num % 100000; <add> if (toTenThousand != 0) str = "hundred and"; <add> if(toTenThousand == 0) str = ""; <add> tenThousands(toTenThousand); <add> word = units[hundredThousand]+ str +word; <add> return word; <add> } <add> <add> <add> public static String millions(int num) { <add> million = num/1000000; <add> toHundredThousand = num % 1000000; <add> hundredThousands(toHundredThousand); <add> word = units[million] +big[2] +word; <add> return word; <add> } <add> <add> public static String tenMillions(int num) { <add> tenMillion = num/10000000; <add> toMillion = num % 10000000; <add> millions(toMillion); <add> word = tens[tenMillion] +word; <add> return word; <add> <add> } <add> <add> public static String hundredMillions(int num) { <add> hundredMillion = num/100000000; <add> toTenMillion = num % 100000000; <add> tenMillions(toTenMillion); <add> if (toTenMillion != 0) str = "hundred and"; <add> if(toTenMillion == 0) str = ""; <add> word = units[hundredMillion] +str +word; <add> return word; <add> <add> } <add> <add> public static String billions(int num) { <add> billion = num/1000000000; <add> toHundredMillion = num %1000000000; <add> <add> hundredMillions(toHundredMillion); <add> word = units[billion] + big[3] +word; <add> return word; <add> <add> } <add> <add> <add> <add>} <add>
Java
mit
513b1ddc9028b63cefc2ef7a1b1c3fd0d2953998
0
xiweicheng/tms,xiweicheng/tms,xiweicheng/tms,xiweicheng/tms
/** * 版权所有 (TMS) */ package com.lhjz.portal.component; import java.io.File; import java.io.IOException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import javax.mail.MessagingException; import javax.mail.internet.MimeMessage; import javax.mail.internet.MimeUtility; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.core.io.FileSystemResource; import org.springframework.mail.SimpleMailMessage; import org.springframework.mail.javamail.JavaMailSender; import org.springframework.mail.javamail.JavaMailSenderImpl; import org.springframework.mail.javamail.MimeMessageHelper; import org.springframework.scheduling.annotation.Scheduled; import org.springframework.stereotype.Component; import com.lhjz.portal.component.core.MailQueue; import com.lhjz.portal.component.core.model.MailItem; import com.lhjz.portal.util.StringUtil; import lombok.extern.log4j.Log4j; /** * * @author xi * * @date 2015年6月14日 上午10:31:32 * */ @Component("myMailSender2") @Log4j public class MailSender2 { @Autowired JavaMailSender mailSender; @Autowired MailQueue mailQueue; static ExecutorService pool = Executors.newSingleThreadExecutor(); public JavaMailSenderImpl getMailSender() { return (JavaMailSenderImpl) mailSender; } public boolean sendText(String subject, String text, String... toAddr) { SimpleMailMessage message = new SimpleMailMessage(); message.setFrom(((JavaMailSenderImpl) mailSender).getUsername()); message.setTo(toAddr); message.setSubject(subject); message.setText(text); mailSender.send(message); return true; } public boolean sendHtml(String subject, String html, String... toAddr) throws MessagingException { if (toAddr == null || toAddr.length == 0) { return false; } MimeMessage mimeMessage = mailSender.createMimeMessage(); MimeMessageHelper helper = new MimeMessageHelper(mimeMessage, false, "UTF-8"); helper.setFrom(((JavaMailSenderImpl) mailSender).getUsername()); helper.setTo(toAddr); helper.setSubject(subject); helper.setText(html, true); mailSender.send(mimeMessage); return true; } public boolean sendHtmlWithAttachment(String subject, String html, String[] attachmentPaths, String... toAddr) throws MessagingException, IOException { MimeMessage mimeMessage = mailSender.createMimeMessage(); MimeMessageHelper helper = new MimeMessageHelper(mimeMessage, true, "UTF-8"); helper.setFrom(((JavaMailSenderImpl) mailSender).getUsername()); helper.setTo(toAddr); helper.setSubject(subject); helper.setText(html, true); for (String path : attachmentPaths) { helper.addAttachment(MimeUtility.encodeText( new File(path).getName(), "UTF-8", "B"), new FileSystemResource(path)); } mailSender.send(mimeMessage); return true; } public void sendHtmlByQueue(MailItem mailItem) { this.sendHtmlByQueue(mailItem.getSubject(), mailItem.getHtml(), mailItem.getToAddr()); } public void sendHtmlByQueue(String subject, String html, String... toAddr) { if (StringUtil.isEmpty(subject) || StringUtil.isEmpty(html)) { return; } if (toAddr == null || toAddr.length == 0) { return; } pool.execute(() -> { try { this.sendHtml(subject, html, toAddr); log.info("邮件发送成功!"); } catch (MessagingException e) { log.info("邮件发送失败,放入邮件定时计划任务队列中!"); if (!mailQueue.offer(new MailItem(subject, html, toAddr))) { log.error("邮件队列已满!"); } e.printStackTrace(); } }); } @Scheduled(fixedRate = 60000) public void reportCurrentTime() { log.info("邮件定时计划任务执行!"); MailItem mailItem = mailQueue.poll(); if (mailItem != null) { this.sendHtmlByQueue(mailItem); } } }
src/main/java/com/lhjz/portal/component/MailSender2.java
/** * 版权所有 (TMS) */ package com.lhjz.portal.component; import java.io.File; import java.io.IOException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import javax.mail.MessagingException; import javax.mail.internet.MimeMessage; import javax.mail.internet.MimeUtility; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.core.io.FileSystemResource; import org.springframework.mail.SimpleMailMessage; import org.springframework.mail.javamail.JavaMailSender; import org.springframework.mail.javamail.JavaMailSenderImpl; import org.springframework.mail.javamail.MimeMessageHelper; import org.springframework.scheduling.annotation.Scheduled; import org.springframework.stereotype.Component; import com.lhjz.portal.component.core.MailQueue; import com.lhjz.portal.component.core.model.MailItem; import com.lhjz.portal.util.StringUtil; import lombok.extern.log4j.Log4j; /** * * @author xi * * @date 2015年6月14日 上午10:31:32 * */ @Component("myMailSender2") @Log4j public class MailSender2 { @Autowired JavaMailSender mailSender; @Autowired MailQueue mailQueue; static ExecutorService pool = Executors.newSingleThreadExecutor(); public JavaMailSenderImpl getMailSender() { return (JavaMailSenderImpl) mailSender; } public boolean sendText(String subject, String text, String... toAddr) { SimpleMailMessage message = new SimpleMailMessage(); message.setFrom(((JavaMailSenderImpl) mailSender).getUsername()); message.setTo(toAddr); message.setSubject(subject); message.setText(text); mailSender.send(message); return true; } public boolean sendHtml(String subject, String html, String... toAddr) throws MessagingException { if (toAddr == null || toAddr.length == 0) { return false; } MimeMessage mimeMessage = mailSender.createMimeMessage(); MimeMessageHelper helper = new MimeMessageHelper(mimeMessage, false, "UTF-8"); helper.setFrom(((JavaMailSenderImpl) mailSender).getUsername()); helper.setTo(toAddr); helper.setSubject(subject); helper.setText(html, true); mailSender.send(mimeMessage); return true; } public boolean sendHtmlWithAttachment(String subject, String html, String[] attachmentPaths, String... toAddr) throws MessagingException, IOException { MimeMessage mimeMessage = mailSender.createMimeMessage(); MimeMessageHelper helper = new MimeMessageHelper(mimeMessage, true, "UTF-8"); helper.setFrom(((JavaMailSenderImpl) mailSender).getUsername()); helper.setTo(toAddr); helper.setSubject(subject); helper.setText(html, true); for (String path : attachmentPaths) { helper.addAttachment(MimeUtility.encodeText( new File(path).getName(), "UTF-8", "B"), new FileSystemResource(path)); } mailSender.send(mimeMessage); return true; } public void sendHtmlByQueue(MailItem mailItem) { this.sendHtmlByQueue(mailItem.getSubject(), mailItem.getHtml(), mailItem.getToAddr()); } public void sendHtmlByQueue(String subject, String html, String... toAddr) { if (StringUtil.isEmpty(subject) || StringUtil.isEmpty(html)) { return; } if (toAddr == null || toAddr.length == 0) { return; } pool.execute(() -> { try { this.sendHtml(subject, html, toAddr); log.info("邮件发送成功!"); } catch (MessagingException e) { log.info("邮件发送失败,放入邮件定时计划任务队列中!"); if (mailQueue.offer(new MailItem(subject, html, toAddr))) { log.error("邮件队列已满!"); } e.printStackTrace(); } }); } @Scheduled(fixedRate = 60000) public void reportCurrentTime() { log.debug("邮件定时计划任务执行!"); MailItem mailItem = mailQueue.poll(); if (mailItem != null) { this.sendHtmlByQueue(mailItem); } } }
邮件队列已满判断处理错误问题修正.
src/main/java/com/lhjz/portal/component/MailSender2.java
邮件队列已满判断处理错误问题修正.
<ide><path>rc/main/java/com/lhjz/portal/component/MailSender2.java <ide> log.info("邮件发送成功!"); <ide> } catch (MessagingException e) { <ide> log.info("邮件发送失败,放入邮件定时计划任务队列中!"); <del> if (mailQueue.offer(new MailItem(subject, html, toAddr))) { <add> if (!mailQueue.offer(new MailItem(subject, html, toAddr))) { <ide> log.error("邮件队列已满!"); <ide> } <ide> e.printStackTrace(); <ide> <ide> @Scheduled(fixedRate = 60000) <ide> public void reportCurrentTime() { <del> log.debug("邮件定时计划任务执行!"); <add> log.info("邮件定时计划任务执行!"); <ide> MailItem mailItem = mailQueue.poll(); <ide> if (mailItem != null) { <ide> this.sendHtmlByQueue(mailItem);
JavaScript
mit
8fee7c46c16cbc1b46477cf600160f9668b9f5d0
0
go-gitea/gitea,go-gitea/gitea,go-gitea/gitea
import $ from 'jquery'; function getDefaultSvgBoundsIfUndefined(svgXml, src) { const DefaultSize = 300; const MaxSize = 99999; const svg = svgXml.documentElement; const width = svg?.width?.baseVal; const height = svg?.height?.baseVal; if (width === undefined || height === undefined) { return null; // in case some svg is invalid or doesn't have the width/height } if (width.unitType === SVGLength.SVG_LENGTHTYPE_PERCENTAGE || height.unitType === SVGLength.SVG_LENGTHTYPE_PERCENTAGE) { const img = new Image(); img.src = src; if (img.width > 1 && img.width < MaxSize && img.height > 1 && img.height < MaxSize) { return { width: img.width, height: img.height }; } if (svg.hasAttribute('viewBox')) { const viewBox = svg.viewBox.baseVal; return { width: DefaultSize, height: DefaultSize * viewBox.width / viewBox.height }; } return { width: DefaultSize, height: DefaultSize }; } return null; } export default function initImageDiff() { function createContext(image1, image2) { const size1 = { width: image1 && image1.width || 0, height: image1 && image1.height || 0 }; const size2 = { width: image2 && image2.width || 0, height: image2 && image2.height || 0 }; const max = { width: Math.max(size2.width, size1.width), height: Math.max(size2.height, size1.height) }; return { image1: $(image1), image2: $(image2), size1, size2, max, ratio: [ Math.floor(max.width - size1.width) / 2, Math.floor(max.height - size1.height) / 2, Math.floor(max.width - size2.width) / 2, Math.floor(max.height - size2.height) / 2 ] }; } $('.image-diff').each(function() { const $container = $(this); // the container may be hidden by "viewed" checkbox, so use the parent's width for reference const diffContainerWidth = Math.max($container.closest('.diff-file-box').width() - 300, 100); const pathAfter = $container.data('path-after'); const pathBefore = $container.data('path-before'); const imageInfos = [{ loaded: false, path: pathAfter, $image: $container.find('img.image-after'), $boundsInfo: $container.find('.bounds-info-after') }, { loaded: false, path: pathBefore, $image: $container.find('img.image-before'), $boundsInfo: $container.find('.bounds-info-before') }]; for (const info of imageInfos) { if (info.$image.length > 0) { $.ajax({ url: info.path, success: (data, _, jqXHR) => { info.$image.on('load', () => { info.loaded = true; setReadyIfLoaded(); }).on('error', () => { info.loaded = true; setReadyIfLoaded(); info.$boundsInfo.text('(image error)'); }); info.$image.attr('src', info.path); if (jqXHR.getResponseHeader('Content-Type') === 'image/svg+xml') { const bounds = getDefaultSvgBoundsIfUndefined(data, info.path); if (bounds) { info.$image.attr('width', bounds.width); info.$image.attr('height', bounds.height); info.$boundsInfo.hide(); } } } }); } else { info.loaded = true; setReadyIfLoaded(); } } function setReadyIfLoaded() { if (imageInfos[0].loaded && imageInfos[1].loaded) { initViews(imageInfos[0].$image, imageInfos[1].$image); } } function initViews($imageAfter, $imageBefore) { initSideBySide(createContext($imageAfter[0], $imageBefore[0])); if ($imageAfter.length > 0 && $imageBefore.length > 0) { initSwipe(createContext($imageAfter[1], $imageBefore[1])); initOverlay(createContext($imageAfter[2], $imageBefore[2])); } $container.find('> .loader').hide(); $container.find('> .hide').removeClass('hide'); } function initSideBySide(sizes) { let factor = 1; if (sizes.max.width > (diffContainerWidth - 24) / 2) { factor = (diffContainerWidth - 24) / 2 / sizes.max.width; } const widthChanged = sizes.image1.length !== 0 && sizes.image2.length !== 0 && sizes.image1[0].naturalWidth !== sizes.image2[0].naturalWidth; const heightChanged = sizes.image1.length !== 0 && sizes.image2.length !== 0 && sizes.image1[0].naturalHeight !== sizes.image2[0].naturalHeight; if (sizes.image1.length !== 0) { $container.find('.bounds-info-after .bounds-info-width').text(`${sizes.image1[0].naturalWidth}px`).addClass(widthChanged ? 'green' : ''); $container.find('.bounds-info-after .bounds-info-height').text(`${sizes.image1[0].naturalHeight}px`).addClass(heightChanged ? 'green' : ''); } if (sizes.image2.length !== 0) { $container.find('.bounds-info-before .bounds-info-width').text(`${sizes.image2[0].naturalWidth}px`).addClass(widthChanged ? 'red' : ''); $container.find('.bounds-info-before .bounds-info-height').text(`${sizes.image2[0].naturalHeight}px`).addClass(heightChanged ? 'red' : ''); } sizes.image1.css({ width: sizes.size1.width * factor, height: sizes.size1.height * factor }); sizes.image1.parent().css({ margin: `${sizes.ratio[1] * factor + 15}px ${sizes.ratio[0] * factor}px ${sizes.ratio[1] * factor}px`, width: sizes.size1.width * factor + 2, height: sizes.size1.height * factor + 2 }); sizes.image2.css({ width: sizes.size2.width * factor, height: sizes.size2.height * factor }); sizes.image2.parent().css({ margin: `${sizes.ratio[3] * factor}px ${sizes.ratio[2] * factor}px`, width: sizes.size2.width * factor + 2, height: sizes.size2.height * factor + 2 }); } function initSwipe(sizes) { let factor = 1; if (sizes.max.width > diffContainerWidth - 12) { factor = (diffContainerWidth - 12) / sizes.max.width; } sizes.image1.css({ width: sizes.size1.width * factor, height: sizes.size1.height * factor }); sizes.image1.parent().css({ margin: `0px ${sizes.ratio[0] * factor}px`, width: sizes.size1.width * factor + 2, height: sizes.size1.height * factor + 2 }); sizes.image1.parent().parent().css({ padding: `${sizes.ratio[1] * factor}px 0 0 0`, width: sizes.max.width * factor + 2 }); sizes.image2.css({ width: sizes.size2.width * factor, height: sizes.size2.height * factor }); sizes.image2.parent().css({ margin: `${sizes.ratio[3] * factor}px ${sizes.ratio[2] * factor}px`, width: sizes.size2.width * factor + 2, height: sizes.size2.height * factor + 2 }); sizes.image2.parent().parent().css({ width: sizes.max.width * factor + 2, height: sizes.max.height * factor + 2 }); $container.find('.diff-swipe').css({ width: sizes.max.width * factor + 2, height: sizes.max.height * factor + 4 }); $container.find('.swipe-bar').on('mousedown', function(e) { e.preventDefault(); const $swipeBar = $(this); const $swipeFrame = $swipeBar.parent(); const width = $swipeFrame.width() - $swipeBar.width() - 2; $(document).on('mousemove.diff-swipe', (e2) => { e2.preventDefault(); const value = Math.max(0, Math.min(e2.clientX - $swipeFrame.offset().left, width)); $swipeBar.css({ left: value }); $container.find('.swipe-container').css({ width: $swipeFrame.width() - value }); $(document).on('mouseup.diff-swipe', () => { $(document).off('.diff-swipe'); }); }); }); } function initOverlay(sizes) { let factor = 1; if (sizes.max.width > diffContainerWidth - 12) { factor = (diffContainerWidth - 12) / sizes.max.width; } sizes.image1.css({ width: sizes.size1.width * factor, height: sizes.size1.height * factor }); sizes.image2.css({ width: sizes.size2.width * factor, height: sizes.size2.height * factor }); sizes.image1.parent().css({ margin: `${sizes.ratio[1] * factor}px ${sizes.ratio[0] * factor}px`, width: sizes.size1.width * factor + 2, height: sizes.size1.height * factor + 2 }); sizes.image2.parent().css({ margin: `${sizes.ratio[3] * factor}px ${sizes.ratio[2] * factor}px`, width: sizes.size2.width * factor + 2, height: sizes.size2.height * factor + 2 }); sizes.image2.parent().parent().css({ width: sizes.max.width * factor + 2, height: sizes.max.height * factor + 2 }); $container.find('.onion-skin').css({ width: sizes.max.width * factor + 2, height: sizes.max.height * factor + 4 }); const $range = $container.find("input[type='range'"); const onInput = () => sizes.image1.parent().css({ opacity: $range.val() / 100 }); $range.on('input', onInput); onInput(); } }); }
web_src/js/features/imagediff.js
import $ from 'jquery'; function getDefaultSvgBoundsIfUndefined(svgXml, src) { const DefaultSize = 300; const MaxSize = 99999; const svg = svgXml.documentElement; const width = svg?.width?.baseVal; const height = svg?.height?.baseVal; if (width === undefined || height === undefined) { return null; // in case some svg is invalid or doesn't have the width/height } if (width.unitType === SVGLength.SVG_LENGTHTYPE_PERCENTAGE || height.unitType === SVGLength.SVG_LENGTHTYPE_PERCENTAGE) { const img = new Image(); img.src = src; if (img.width > 1 && img.width < MaxSize && img.height > 1 && img.height < MaxSize) { return { width: img.width, height: img.height }; } if (svg.hasAttribute('viewBox')) { const viewBox = svg.viewBox.baseVal; return { width: DefaultSize, height: DefaultSize * viewBox.width / viewBox.height }; } return { width: DefaultSize, height: DefaultSize }; } return null; } export default function initImageDiff() { function createContext(image1, image2) { const size1 = { width: image1 && image1.width || 0, height: image1 && image1.height || 0 }; const size2 = { width: image2 && image2.width || 0, height: image2 && image2.height || 0 }; const max = { width: Math.max(size2.width, size1.width), height: Math.max(size2.height, size1.height) }; return { image1: $(image1), image2: $(image2), size1, size2, max, ratio: [ Math.floor(max.width - size1.width) / 2, Math.floor(max.height - size1.height) / 2, Math.floor(max.width - size2.width) / 2, Math.floor(max.height - size2.height) / 2 ] }; } $('.image-diff').each(function() { const $container = $(this); const diffContainerWidth = $container.width() - 300; const pathAfter = $container.data('path-after'); const pathBefore = $container.data('path-before'); const imageInfos = [{ loaded: false, path: pathAfter, $image: $container.find('img.image-after'), $boundsInfo: $container.find('.bounds-info-after') }, { loaded: false, path: pathBefore, $image: $container.find('img.image-before'), $boundsInfo: $container.find('.bounds-info-before') }]; for (const info of imageInfos) { if (info.$image.length > 0) { $.ajax({ url: info.path, success: (data, _, jqXHR) => { info.$image.on('load', () => { info.loaded = true; setReadyIfLoaded(); }).on('error', () => { info.loaded = true; setReadyIfLoaded(); info.$boundsInfo.text('(image error)'); }); info.$image.attr('src', info.path); if (jqXHR.getResponseHeader('Content-Type') === 'image/svg+xml') { const bounds = getDefaultSvgBoundsIfUndefined(data, info.path); if (bounds) { info.$image.attr('width', bounds.width); info.$image.attr('height', bounds.height); info.$boundsInfo.hide(); } } } }); } else { info.loaded = true; setReadyIfLoaded(); } } function setReadyIfLoaded() { if (imageInfos[0].loaded && imageInfos[1].loaded) { initViews(imageInfos[0].$image, imageInfos[1].$image); } } function initViews($imageAfter, $imageBefore) { initSideBySide(createContext($imageAfter[0], $imageBefore[0])); if ($imageAfter.length > 0 && $imageBefore.length > 0) { initSwipe(createContext($imageAfter[1], $imageBefore[1])); initOverlay(createContext($imageAfter[2], $imageBefore[2])); } $container.find('> .loader').hide(); $container.find('> .hide').removeClass('hide'); } function initSideBySide(sizes) { let factor = 1; if (sizes.max.width > (diffContainerWidth - 24) / 2) { factor = (diffContainerWidth - 24) / 2 / sizes.max.width; } const widthChanged = sizes.image1.length !== 0 && sizes.image2.length !== 0 && sizes.image1[0].naturalWidth !== sizes.image2[0].naturalWidth; const heightChanged = sizes.image1.length !== 0 && sizes.image2.length !== 0 && sizes.image1[0].naturalHeight !== sizes.image2[0].naturalHeight; if (sizes.image1.length !== 0) { $container.find('.bounds-info-after .bounds-info-width').text(`${sizes.image1[0].naturalWidth}px`).addClass(widthChanged ? 'green' : ''); $container.find('.bounds-info-after .bounds-info-height').text(`${sizes.image1[0].naturalHeight}px`).addClass(heightChanged ? 'green' : ''); } if (sizes.image2.length !== 0) { $container.find('.bounds-info-before .bounds-info-width').text(`${sizes.image2[0].naturalWidth}px`).addClass(widthChanged ? 'red' : ''); $container.find('.bounds-info-before .bounds-info-height').text(`${sizes.image2[0].naturalHeight}px`).addClass(heightChanged ? 'red' : ''); } sizes.image1.css({ width: sizes.size1.width * factor, height: sizes.size1.height * factor }); sizes.image1.parent().css({ margin: `${sizes.ratio[1] * factor + 15}px ${sizes.ratio[0] * factor}px ${sizes.ratio[1] * factor}px`, width: sizes.size1.width * factor + 2, height: sizes.size1.height * factor + 2 }); sizes.image2.css({ width: sizes.size2.width * factor, height: sizes.size2.height * factor }); sizes.image2.parent().css({ margin: `${sizes.ratio[3] * factor}px ${sizes.ratio[2] * factor}px`, width: sizes.size2.width * factor + 2, height: sizes.size2.height * factor + 2 }); } function initSwipe(sizes) { let factor = 1; if (sizes.max.width > diffContainerWidth - 12) { factor = (diffContainerWidth - 12) / sizes.max.width; } sizes.image1.css({ width: sizes.size1.width * factor, height: sizes.size1.height * factor }); sizes.image1.parent().css({ margin: `0px ${sizes.ratio[0] * factor}px`, width: sizes.size1.width * factor + 2, height: sizes.size1.height * factor + 2 }); sizes.image1.parent().parent().css({ padding: `${sizes.ratio[1] * factor}px 0 0 0`, width: sizes.max.width * factor + 2 }); sizes.image2.css({ width: sizes.size2.width * factor, height: sizes.size2.height * factor }); sizes.image2.parent().css({ margin: `${sizes.ratio[3] * factor}px ${sizes.ratio[2] * factor}px`, width: sizes.size2.width * factor + 2, height: sizes.size2.height * factor + 2 }); sizes.image2.parent().parent().css({ width: sizes.max.width * factor + 2, height: sizes.max.height * factor + 2 }); $container.find('.diff-swipe').css({ width: sizes.max.width * factor + 2, height: sizes.max.height * factor + 4 }); $container.find('.swipe-bar').on('mousedown', function(e) { e.preventDefault(); const $swipeBar = $(this); const $swipeFrame = $swipeBar.parent(); const width = $swipeFrame.width() - $swipeBar.width() - 2; $(document).on('mousemove.diff-swipe', (e2) => { e2.preventDefault(); const value = Math.max(0, Math.min(e2.clientX - $swipeFrame.offset().left, width)); $swipeBar.css({ left: value }); $container.find('.swipe-container').css({ width: $swipeFrame.width() - value }); $(document).on('mouseup.diff-swipe', () => { $(document).off('.diff-swipe'); }); }); }); } function initOverlay(sizes) { let factor = 1; if (sizes.max.width > diffContainerWidth - 12) { factor = (diffContainerWidth - 12) / sizes.max.width; } sizes.image1.css({ width: sizes.size1.width * factor, height: sizes.size1.height * factor }); sizes.image2.css({ width: sizes.size2.width * factor, height: sizes.size2.height * factor }); sizes.image1.parent().css({ margin: `${sizes.ratio[1] * factor}px ${sizes.ratio[0] * factor}px`, width: sizes.size1.width * factor + 2, height: sizes.size1.height * factor + 2 }); sizes.image2.parent().css({ margin: `${sizes.ratio[3] * factor}px ${sizes.ratio[2] * factor}px`, width: sizes.size2.width * factor + 2, height: sizes.size2.height * factor + 2 }); sizes.image2.parent().parent().css({ width: sizes.max.width * factor + 2, height: sizes.max.height * factor + 2 }); $container.find('.onion-skin').css({ width: sizes.max.width * factor + 2, height: sizes.max.height * factor + 4 }); const $range = $container.find("input[type='range'"); const onInput = () => sizes.image1.parent().css({ opacity: $range.val() / 100 }); $range.on('input', onInput); onInput(); } }); }
Fix viewed images not loading in a PR (#19919) Close #19651
web_src/js/features/imagediff.js
Fix viewed images not loading in a PR (#19919)
<ide><path>eb_src/js/features/imagediff.js <ide> $('.image-diff').each(function() { <ide> const $container = $(this); <ide> <del> const diffContainerWidth = $container.width() - 300; <add> // the container may be hidden by "viewed" checkbox, so use the parent's width for reference <add> const diffContainerWidth = Math.max($container.closest('.diff-file-box').width() - 300, 100); <ide> const pathAfter = $container.data('path-after'); <ide> const pathBefore = $container.data('path-before'); <ide>
Java
apache-2.0
a054dd4837135baf80e6be1d161a031976f1c8ee
0
rinde/RinLog,rinde/RinLog
package rinde.logistics.pdptw.solver; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.collect.Sets.newLinkedHashSet; import java.util.List; import java.util.Set; import rinde.sim.pdptw.central.GlobalStateObject; import rinde.sim.pdptw.central.GlobalStateObject.VehicleStateObject; import rinde.sim.pdptw.central.Solver; import rinde.sim.pdptw.central.Solvers; import rinde.sim.pdptw.common.ObjectiveFunction; import rinde.sim.pdptw.common.ParcelDTO; import rinde.sim.util.SupplierRng; import rinde.sim.util.SupplierRng.DefaultSupplierRng; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; public class CheapestInsertionHeuristic implements Solver { // TODO this state is not really necessary private final Set<ParcelDTO> knownParcels; private final ObjectiveFunction objectiveFunction; public CheapestInsertionHeuristic(ObjectiveFunction objFunc) { knownParcels = newLinkedHashSet(); objectiveFunction = objFunc; } @Override public ImmutableList<ImmutableList<ParcelDTO>> solve(GlobalStateObject state) { final ImmutableSet<ParcelDTO> newParcels = Sets.difference( state.availableParcels, knownParcels).immutableCopy(); knownParcels.addAll(newParcels); // construct schedule final ImmutableList.Builder<ImmutableList<ParcelDTO>> b = ImmutableList .builder(); for (final VehicleStateObject vso : state.vehicles) { if (vso.route.isPresent()) { b.add(vso.route.get()); } else { b.add(ImmutableList.<ParcelDTO> of()); } } ImmutableList<ImmutableList<ParcelDTO>> schedule = b.build(); // all new parcels need to be inserted in the plan for (final ParcelDTO p : newParcels) { double cheapestInsertion = Double.POSITIVE_INFINITY; ImmutableList<ImmutableList<ParcelDTO>> bestSchedule = schedule; for (int i = 0; i < state.vehicles.size(); i++) { final int startIndex = state.vehicles.get(i).destination == null ? 0 : 1; final List<ImmutableList<ParcelDTO>> insertions = Insertions .plusTwoInsertions(schedule.get(i), p, startIndex); for (int j = 0; j < insertions.size(); j++) { final ImmutableList<ParcelDTO> r = insertions.get(j); // compute cost using entire schedule final ImmutableList<ImmutableList<ParcelDTO>> newSchedule = modifySchedule( schedule, r, i); final double cost = objectiveFunction.computeCost(Solvers .computeStats(state, newSchedule)); if (cost < cheapestInsertion) { cheapestInsertion = cost; bestSchedule = newSchedule; } } } schedule = bestSchedule; } return schedule; } // replaces one route static <T> ImmutableList<ImmutableList<T>> modifySchedule( ImmutableList<ImmutableList<T>> originalSchedule, ImmutableList<T> vehicleSchedule, int vehicleIndex) { checkArgument(vehicleIndex >= 0 && vehicleIndex < originalSchedule.size(), "Vehicle index must be >= 0 && < %s, it is %s.", originalSchedule.size(), vehicleIndex); final ImmutableList.Builder<ImmutableList<T>> builder = ImmutableList .builder(); builder.addAll(originalSchedule.subList(0, vehicleIndex)); builder.add(vehicleSchedule); builder.addAll(originalSchedule.subList(vehicleIndex + 1, originalSchedule.size())); return builder.build(); } static <T> ImmutableList<ImmutableList<T>> createEmptySchedule(int numVehicles) { final ImmutableList.Builder<ImmutableList<T>> builder = ImmutableList .builder(); for (int i = 0; i < numVehicles; i++) { builder.add(ImmutableList.<T> of()); } return builder.build(); } /** * @param objFunc The objective function used to calculate the cost of a * schedule. * @return A {@link SupplierRng} that supplies * {@link CheapestInsertionHeuristic} instances. */ public static SupplierRng<Solver> supplier(final ObjectiveFunction objFunc) { return new DefaultSupplierRng<Solver>() { @Override public Solver get(long seed) { return new CheapestInsertionHeuristic(objFunc); } }; } }
src/main/java/rinde/logistics/pdptw/solver/CheapestInsertionHeuristic.java
package rinde.logistics.pdptw.solver; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.collect.Sets.newLinkedHashSet; import java.util.List; import java.util.Set; import rinde.sim.pdptw.central.GlobalStateObject; import rinde.sim.pdptw.central.GlobalStateObject.VehicleStateObject; import rinde.sim.pdptw.central.Solver; import rinde.sim.pdptw.central.Solvers; import rinde.sim.pdptw.common.ObjectiveFunction; import rinde.sim.pdptw.common.ParcelDTO; import rinde.sim.util.SupplierRng; import rinde.sim.util.SupplierRng.DefaultSupplierRng; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; public class CheapestInsertionHeuristic implements Solver { private final Set<ParcelDTO> knownParcels; private final ObjectiveFunction objectiveFunction; public CheapestInsertionHeuristic(ObjectiveFunction objFunc) { knownParcels = newLinkedHashSet(); objectiveFunction = objFunc; } @Override public ImmutableList<ImmutableList<ParcelDTO>> solve(GlobalStateObject state) { final ImmutableSet<ParcelDTO> newParcels = Sets.difference( state.availableParcels, knownParcels).immutableCopy(); knownParcels.addAll(newParcels); // construct schedule final ImmutableList.Builder<ImmutableList<ParcelDTO>> b = ImmutableList .builder(); for (final VehicleStateObject vso : state.vehicles) { if (vso.route.isPresent()) { b.add(vso.route.get()); } else { b.add(ImmutableList.<ParcelDTO> of()); } } ImmutableList<ImmutableList<ParcelDTO>> schedule = b.build(); // all new parcels need to be inserted in the plan for (final ParcelDTO p : newParcels) { double cheapestInsertion = Double.POSITIVE_INFINITY; ImmutableList<ImmutableList<ParcelDTO>> bestSchedule = schedule; for (int i = 0; i < state.vehicles.size(); i++) { final int startIndex = state.vehicles.get(i).destination == null ? 0 : 1; final List<ImmutableList<ParcelDTO>> insertions = Insertions .plusTwoInsertions(schedule.get(i), p, startIndex); for (int j = 0; j < insertions.size(); j++) { final ImmutableList<ParcelDTO> r = insertions.get(j); // compute cost using entire schedule final ImmutableList<ImmutableList<ParcelDTO>> newSchedule = modifySchedule( schedule, r, i); final double cost = objectiveFunction.computeCost(Solvers .computeStats(state, newSchedule)); if (cost < cheapestInsertion) { cheapestInsertion = cost; bestSchedule = newSchedule; } } } schedule = bestSchedule; } return schedule; } // replaces one route static <T> ImmutableList<ImmutableList<T>> modifySchedule( ImmutableList<ImmutableList<T>> originalSchedule, ImmutableList<T> vehicleSchedule, int vehicleIndex) { checkArgument(vehicleIndex >= 0 && vehicleIndex < originalSchedule.size(), "Vehicle index must be >= 0 && < %s, it is %s.", originalSchedule.size(), vehicleIndex); final ImmutableList.Builder<ImmutableList<T>> builder = ImmutableList .builder(); builder.addAll(originalSchedule.subList(0, vehicleIndex)); builder.add(vehicleSchedule); builder.addAll(originalSchedule.subList(vehicleIndex + 1, originalSchedule.size())); return builder.build(); } static <T> ImmutableList<ImmutableList<T>> createEmptySchedule(int numVehicles) { final ImmutableList.Builder<ImmutableList<T>> builder = ImmutableList .builder(); for (int i = 0; i < numVehicles; i++) { builder.add(ImmutableList.<T> of()); } return builder.build(); } /** * @param objFunc The objective function used to calculate the cost of a * schedule. * @return A {@link SupplierRng} that supplies * {@link CheapestInsertionHeuristic} instances. */ public static SupplierRng<Solver> supplier(final ObjectiveFunction objFunc) { return new DefaultSupplierRng<Solver>() { @Override public Solver get(long seed) { return new CheapestInsertionHeuristic(objFunc); } }; } }
added comment
src/main/java/rinde/logistics/pdptw/solver/CheapestInsertionHeuristic.java
added comment
<ide><path>rc/main/java/rinde/logistics/pdptw/solver/CheapestInsertionHeuristic.java <ide> <ide> public class CheapestInsertionHeuristic implements Solver { <ide> <add> // TODO this state is not really necessary <ide> private final Set<ParcelDTO> knownParcels; <ide> private final ObjectiveFunction objectiveFunction; <ide>
Java
apache-2.0
8c37d82d0624a4003def4dd1692d7dee528415d8
0
folio-org/raml-module-builder,folio-org/raml-module-builder,folio-org/raml-module-builder
package org.folio.rest.persist; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import freemarker.template.TemplateException; import io.vertx.core.AsyncResult; import io.vertx.core.CompositeFuture; import io.vertx.core.Future; import io.vertx.core.Handler; import io.vertx.core.Promise; import io.vertx.core.Vertx; import io.vertx.core.json.JsonArray; import io.vertx.core.json.JsonObject; import io.vertx.core.logging.Logger; import io.vertx.core.logging.LoggerFactory; import io.vertx.pgclient.PgConnectOptions; import io.vertx.pgclient.PgConnection; import io.vertx.pgclient.PgPool; import io.vertx.sqlclient.PoolOptions; import io.vertx.sqlclient.PreparedStatement; import io.vertx.sqlclient.Row; import io.vertx.sqlclient.RowIterator; import io.vertx.sqlclient.RowSet; import io.vertx.sqlclient.RowStream; import io.vertx.sqlclient.Transaction; import io.vertx.sqlclient.Tuple; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.io.StringReader; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.crypto.SecretKey; import org.apache.commons.collections4.map.HashedMap; import org.apache.commons.collections4.map.MultiKeyMap; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; import org.folio.cql2pgjson.util.Cql2PgUtil; import org.folio.rest.jaxrs.model.ResultInfo; import org.folio.rest.persist.Criteria.Criterion; import org.folio.rest.persist.Criteria.Limit; import org.folio.rest.persist.Criteria.Offset; import org.folio.rest.persist.Criteria.UpdateSection; import org.folio.rest.persist.cql.CQLWrapper; import org.folio.rest.persist.facets.FacetField; import org.folio.rest.persist.facets.FacetManager; import org.folio.rest.persist.helpers.LocalRowSet; import org.folio.rest.persist.interfaces.Results; import org.folio.rest.security.AES; import org.folio.rest.tools.PomReader; import org.folio.rest.tools.messages.MessageConsts; import org.folio.rest.tools.messages.Messages; import org.folio.rest.tools.monitor.StatsTracker; import org.folio.rest.tools.utils.Envs; import org.folio.rest.tools.utils.LogUtil; import org.folio.rest.tools.utils.NetworkUtils; import org.folio.rest.tools.utils.ObjectMapperTool; import org.folio.rest.tools.utils.ResourceUtils; import org.postgresql.copy.CopyManager; import org.postgresql.core.BaseConnection; import ru.yandex.qatools.embed.postgresql.EmbeddedPostgres; import ru.yandex.qatools.embed.postgresql.PostgresProcess; import ru.yandex.qatools.embed.postgresql.distribution.Version; /** * @author shale * * currently does not support binary data unless base64 encoded */ public class PostgresClient { public static final String DEFAULT_SCHEMA = "public"; public static final String DEFAULT_JSONB_FIELD_NAME = "jsonb"; static Logger log = LoggerFactory.getLogger(PostgresClient.class); /** default analyze threshold value in milliseconds */ static final long EXPLAIN_QUERY_THRESHOLD_DEFAULT = 1000; static final String COUNT_FIELD = "count"; private static final String ID_FIELD = "id"; private static final String RETURNING_ID = " RETURNING id "; private static final String CONNECTION_RELEASE_DELAY = "connectionReleaseDelay"; private static final String MAX_POOL_SIZE = "maxPoolSize"; /** default release delay in milliseconds; after this time an idle database connection is closed */ private static final int DEFAULT_CONNECTION_RELEASE_DELAY = 60000; private static final String POSTGRES_LOCALHOST_CONFIG = "/postgres-conf.json"; private static final int EMBEDDED_POSTGRES_PORT = 6000; private static final int STREAM_GET_DEFAULT_CHUNK_SIZE = 100; private static final String SELECT = "SELECT "; private static final String UPDATE = "UPDATE "; private static final String DELETE = "DELETE "; private static final String FROM = " FROM "; private static final String SET = " SET "; private static final String WHERE = " WHERE "; private static final String INSERT_CLAUSE = "INSERT INTO "; private static final String _PASSWORD = "password"; //NOSONAR private static final String _USERNAME = "username"; private static final String HOST = "host"; private static final String PORT = "port"; private static final String DATABASE = "database"; private static final String DEFAULT_IP = "127.0.0.1"; //NOSONAR private static final String STATS_KEY = PostgresClient.class.getName(); private static final String GET_STAT_METHOD = "get"; private static final String COUNT_STAT_METHOD = "count"; private static final String SAVE_STAT_METHOD = "save"; private static final String UPDATE_STAT_METHOD = "update"; private static final String DELETE_STAT_METHOD = "delete"; private static final String EXECUTE_STAT_METHOD = "execute"; private static final String PROCESS_RESULTS_STAT_METHOD = "processResults"; private static final String SPACE = " "; private static final String DOT = "."; private static final String COMMA = ","; private static final String SEMI_COLON = ";"; private static EmbeddedPostgres embeddedPostgres; private static boolean embeddedMode = false; private static String configPath = null; private static ObjectMapper mapper = ObjectMapperTool.getMapper(); private static MultiKeyMap<Object, PostgresClient> connectionPool = MultiKeyMap.multiKeyMap(new HashedMap<>()); private static final String MODULE_NAME = PomReader.INSTANCE.getModuleName(); private static final Pattern POSTGRES_IDENTIFIER = Pattern.compile("^[a-zA-Z_][0-9a-zA-Z_]{0,62}$"); private static final Pattern POSTGRES_DOLLAR_QUOTING = // \\B = a non-word boundary, the first $ must not be part of an identifier (foo$bar$baz) Pattern.compile("[^\\n\\r]*?\\B(\\$\\w*\\$).*?\\1[^\\n\\r]*", Pattern.DOTALL); private static final Pattern POSTGRES_COPY_FROM_STDIN = // \\b = a word boundary Pattern.compile("^\\s*COPY\\b.*\\bFROM\\s+STDIN\\b.*", Pattern.CASE_INSENSITIVE); private static int embeddedPort = -1; /** analyze threshold value in milliseconds */ private static long explainQueryThreshold = EXPLAIN_QUERY_THRESHOLD_DEFAULT; private final Vertx vertx; private JsonObject postgreSQLClientConfig = null; private final Messages messages = Messages.getInstance(); private PgPool client; private final String tenantId; private final String schemaName; protected PostgresClient(Vertx vertx, String tenantId) throws Exception { this.tenantId = tenantId; this.vertx = vertx; this.schemaName = convertToPsqlStandard(tenantId); init(); } /** * test constructor for unit testing */ private PostgresClient() { this.tenantId = "test"; this.vertx = null; this.schemaName = convertToPsqlStandard(tenantId); log.warn("Instantiating test Postgres client! Only use with tests!"); } static PostgresClient testClient() { explainQueryThreshold = 0; return new PostgresClient(); } /** * Log the duration since startNanoTime as a debug message. * @param description text for the log entry * @param sql additional text for the log entry * @param startNanoTime start time as returned by System.nanoTime() */ private void logTimer(String description, String sql, long startNanoTime) { if (! log.isDebugEnabled()) { return; } logTimer(description, sql, startNanoTime, System.nanoTime()); } /** * Log the duration between startNanoTime and endNanoTime as a debug message. * @param description text for the log entry * @param sql additional text for the log entry * @param startNanoTime start time in nanoseconds * @param endNanoTime end time in nanoseconds */ private void logTimer(String description, String sql, long startNanoTime, long endNanoTime) { log.debug(description + " timer: " + sql + " took " + ((endNanoTime - startNanoTime) / 1000000) + " ms"); } /** * Log the duration since startNanoTime at the StatsTracker and as a debug message. * @param descriptionKey key for StatsTracker and text for the log entry * @param sql additional text for the log entry * @param startNanoTime start time as returned by System.nanoTime() */ private void statsTracker(String descriptionKey, String sql, long startNanoTime) { long endNanoTime = System.nanoTime(); StatsTracker.addStatElement(STATS_KEY + DOT + descriptionKey, (endNanoTime - startNanoTime)); if (log.isDebugEnabled()) { logTimer(descriptionKey, sql, startNanoTime, endNanoTime); } } /** * Enable or disable using embedded specific defaults for the * PostgreSQL configuration. They are used if there is no * postgres json config file. * <p> * This function must be invoked before calling the constructor. * <p> * The embedded specific defaults are: * <ul> * <li><code>username = "username"</code></li> * <li><code>password = "password"</code></li> * <li><code>host = "127.0.0.1"</code></li> * <li><code>port = 6000</code></li> * <li><code>database = "postgres"</code></li> * </ul> * * @param embed - whether to use embedded specific defaults */ public static void setIsEmbedded(boolean embed){ embeddedMode = embed; } /** * Set the port that overwrites to port of the embedded PostgreSQL. * This port overwrites any default port and any port set in the * DB_PORT environment variable or the * PostgreSQL configuration file. It is only used when <code>isEmbedded() == true</code> * when invoking the constructor. * <p> * This function must be invoked before calling the constructor. * <p> * Use -1 to not overwrite the port. * * <p>-1 is the default. * * @param port the port for embedded PostgreSQL, or -1 to not overwrite the port */ public static void setEmbeddedPort(int port){ embeddedPort = port; } /** * @return the port number to use for embedded PostgreSQL, or -1 for not overwriting the * port number of the configuration. * @see #setEmbeddedPort(int) */ public static int getEmbeddedPort() { return embeddedPort; } /** * True if embedded specific defaults for the * PostgreSQL configuration should be used if there is no * postgres json config file. * @return true for using embedded specific defaults * @see #setIsEmbedded(boolean) */ public static boolean isEmbedded(){ return embeddedMode; } /** * Set the path to the PostgreSQL connection configuration, * must be called before getInstance() to take affect. * <p> * This function must be invoked before calling the constructor. * * @param path new path, or null to use the default path "/postgres-conf.json" */ public static void setConfigFilePath(String path){ configPath = path; } /** * @return the path to the PostgreSQL connection configuration file; * this is never null */ public static String getConfigFilePath(){ if(configPath == null){ configPath = POSTGRES_LOCALHOST_CONFIG; } return configPath; } static void setExplainQueryThreshold(long ms) { explainQueryThreshold = ms; } static Long getExplainQueryThreshold() { return explainQueryThreshold; } /** * Instance for the tenantId from connectionPool or created and * added to connectionPool. * @param vertx the Vertx to use * @param tenantId the tenantId the instance is for * @return the PostgresClient instance, or null on error */ private static PostgresClient getInstanceInternal(Vertx vertx, String tenantId) { // assumes a single thread vertx model so no sync needed PostgresClient postgresClient = connectionPool.get(vertx, tenantId); try { if (postgresClient == null) { postgresClient = new PostgresClient(vertx, tenantId); connectionPool.put(vertx, tenantId, postgresClient); } } catch (Exception e) { log.error(e.getMessage(), e); } return postgresClient; } /** * Instance for the Postgres' default schema public. * @param vertx the Vertx to use * @return the PostgresClient instance, or null on error */ public static PostgresClient getInstance(Vertx vertx) { return getInstanceInternal(vertx, DEFAULT_SCHEMA); } /** * Instance for the tenantId. * @param vertx the Vertx to use * @param tenantId the tenantId the instance is for * @return the PostgresClient instance, or null on error */ public static PostgresClient getInstance(Vertx vertx, String tenantId) { if (DEFAULT_SCHEMA.equals(tenantId)) { throw new IllegalArgumentException("tenantId must not be default schema " + DEFAULT_SCHEMA); } return getInstanceInternal(vertx, tenantId); } /* if the password in the config file is encrypted then use the secret key * that should have been set via the admin api to decode it and use that to connect * note that in embedded mode (such as unit tests) the postgres embedded is started before the * verticle is deployed*/ private static String decodePassword(String password) throws Exception { String key = AES.getSecretKey(); if(key != null){ SecretKey sk = AES.getSecretKeyObject(key); String decoded = AES.decryptPassword(password, sk); return decoded; } /* no key , so nothing to decode */ return password; } /** this function is intended to receive the tenant id as a password * encrypt the tenant id with the secret key and use the encrypted * password as the actual password for the tenant user in the DB. * In order to then know the password - you need to take the tenant id * and encrypt it with the secret key and then you have the tenant's password */ private static String createPassword(String password) throws Exception { String key = AES.getSecretKey(); if(key != null){ SecretKey sk = AES.getSecretKeyObject(key); String newPassword = AES.encryptPasswordAsBase64(password, sk); return newPassword; } /** no key , so nothing to encrypt, the password will be the tenant id */ return password; } /** * @return this instance's PgPool that allows connections to be made */ PgPool getClient() { return client; } /** * Set this instance's PgPool that can connect to Postgres. * @param client the new client */ void setClient(PgPool client) { this.client = client; } /** * Close the SQL client of this PostgresClient instance. * @param whenDone invoked with the close result; additional close invocations * are always successful. */ public void closeClient(Handler<AsyncResult<Void>> whenDone) { if (client == null) { whenDone.handle(Future.succeededFuture()); return; } PgPool clientToClose = client; client = null; connectionPool.removeMultiKey(vertx, tenantId); // remove (vertx, tenantId, this) entry clientToClose.close(); whenDone.handle(Future.succeededFuture()); } /** * Close all SQL clients stored in the connection pool. */ public static void closeAllClients() { @SuppressWarnings("rawtypes") List<Future> list = new ArrayList<>(connectionPool.size()); // copy of values() because closeClient will delete them from connectionPool for (PostgresClient client : connectionPool.values().toArray(new PostgresClient [0])) { Promise<Object> promise = Promise.promise(); list.add(promise.future()); client.closeClient(f -> promise.complete()); } CompositeFuture.join(list); } static PgConnectOptions createPgConnectOptions(JsonObject sqlConfig) { PgConnectOptions pgConnectOptions = new PgConnectOptions(); String host = sqlConfig.getString(HOST); if (host != null) { pgConnectOptions.setHost(host); } Integer port = sqlConfig.getInteger(PORT); if (port != null) { pgConnectOptions.setPort(port); } String username = sqlConfig.getString(_USERNAME); if (username != null) { pgConnectOptions.setUser(username); } String password = sqlConfig.getString(_PASSWORD); if (password != null) { pgConnectOptions.setPassword(password); } String database = sqlConfig.getString(DATABASE); if (database != null) { pgConnectOptions.setDatabase(database); } Integer connectionReleaseDelay = sqlConfig.getInteger(CONNECTION_RELEASE_DELAY, DEFAULT_CONNECTION_RELEASE_DELAY); pgConnectOptions.setIdleTimeout(connectionReleaseDelay); pgConnectOptions.setIdleTimeoutUnit(TimeUnit.MILLISECONDS); return pgConnectOptions; } private void init() throws Exception { /** check if in pom.xml this prop is declared in order to work with encrypted * passwords for postgres embedded - this is a dev mode only feature */ String secretKey = System.getProperty("postgres_secretkey_4_embeddedmode"); if (secretKey != null) { AES.setSecretKey(secretKey); } postgreSQLClientConfig = getPostgreSQLClientConfig(tenantId, schemaName, Envs.allDBConfs()); logPostgresConfig(); if (isEmbedded()) { startEmbeddedPostgres(); } client = createPgPool(vertx, postgreSQLClientConfig); } static PgPool createPgPool(Vertx vertx, JsonObject configuration) { PgConnectOptions connectOptions = createPgConnectOptions(configuration); PoolOptions poolOptions = new PoolOptions(); poolOptions.setMaxSize(configuration.getInteger(MAX_POOL_SIZE, 4)); return PgPool.pool(vertx, connectOptions, poolOptions); } /** * Get PostgreSQL configuration, invokes setIsEmbedded(true) if needed. * @return configuration for PostgreSQL * @throws Exception on password decryption or encryption failure */ @SuppressWarnings("squid:S2068") /* Suppress "Credentials should not be hard-coded" - The docker container does not expose the embedded postges port. Moving the hard-coded credentials into some default config file doesn't remove them from the build. */ static JsonObject getPostgreSQLClientConfig(String tenantId, String schemaName, JsonObject environmentVariables) throws Exception { // static function for easy unit testing JsonObject config = environmentVariables; if (config.size() > 0) { log.info("DB config read from environment variables"); } else { //no env variables passed in, read for module's config file config = LoadConfs.loadConfig(getConfigFilePath()); // LoadConfs.loadConfig writes its own log message } if (config == null) { if (NetworkUtils.isLocalPortFree(EMBEDDED_POSTGRES_PORT)) { log.info("No DB configuration found, starting embedded postgres with default config"); setIsEmbedded(true); } else { log.info("No DB configuration found, using default config, port is already in use"); } config = new JsonObject(); config.put(_USERNAME, _USERNAME); config.put(_PASSWORD, _PASSWORD); config.put(HOST, DEFAULT_IP); config.put(PORT, EMBEDDED_POSTGRES_PORT); config.put(DATABASE, "postgres"); } Object v = config.remove(Envs.DB_EXPLAIN_QUERY_THRESHOLD.name()); if (v instanceof Long) { PostgresClient.setExplainQueryThreshold((Long) v); } if (tenantId.equals(DEFAULT_SCHEMA)) { config.put(_PASSWORD, decodePassword( config.getString(_PASSWORD) )); } else { log.info("Using schema: " + tenantId); config.put(_USERNAME, schemaName); config.put(_PASSWORD, createPassword(tenantId)); } if(embeddedPort != -1 && embeddedMode){ //over ride the declared default port - coming from the config file and use the //passed in port as well. useful when multiple modules start up an embedded postgres //in a single server. config.put(PORT, embeddedPort); } return config; } /** * Log postgreSQLClientConfig. */ @SuppressWarnings("squid:S2068") // Suppress "Credentials should not be hard-coded" // "'password' detected in this expression". // False positive: Password is configurable, here we remove it from the log. private void logPostgresConfig() { if (! log.isInfoEnabled()) { return; } JsonObject passwordRedacted = postgreSQLClientConfig.copy(); passwordRedacted.put(_PASSWORD, "..."); log.info("postgreSQLClientConfig = " + passwordRedacted.encode()); } /** * Get connection configuration. * The following properties are returned (some of which are optional): * username, password, host, port, database, connectionReleaseDelay, maxPoolSize. * Originally based on driver * <a href="https://vertx.io/docs/vertx-mysql-postgresql-client/java/#_configuration"> * Configuration * </a>. * which is no longer in actual use. * * @return */ public JsonObject getConnectionConfig(){ return postgreSQLClientConfig; } public static JsonObject pojo2JsonObject(Object entity) throws JsonProcessingException { if (entity == null) { throw new IllegalArgumentException("Entity can not be null"); } if (entity instanceof JsonObject) { return ((JsonObject) entity); } else { return new JsonObject(mapper.writeValueAsString(entity)); } } /** * Start a SQL transaction. * * <p>Use the AsyncResult<SQLConnection> result to invoke any of the * functions that take that result as first parameter for the commands * within the transaction. * * <p>To close the open connection invoke the END or ROLLBACK * function. Note that after a failing operation (for example some UPDATE) * both the connection and the transaction remain open to let the caller * decide what to do. * * @param done - the result is the current connection */ public void startTx(Handler<AsyncResult<SQLConnection>> done) { getConnection(res -> { if (res.failed()) { log.error(res.cause().getMessage(), res.cause()); done.handle(Future.failedFuture(res.cause())); return; } try { SQLConnection pgTransaction = new SQLConnection(res.result(), res.result().begin(), null); done.handle(Future.succeededFuture(pgTransaction)); } catch (Exception e) { log.error(e.getMessage(), e); done.handle(Future.failedFuture(e.getCause())); } }); } static void finalizeTx(AsyncResult<Void> txResult, PgConnection conn, Handler<AsyncResult<Void>> done ) { if (conn != null) { conn.close(); } if (txResult.failed() && !"Transaction already completed".equals(txResult.cause().getMessage())) { done.handle(Future.failedFuture(txResult.cause())); return; } done.handle(Future.succeededFuture()); } /** * Rollback a SQL transaction started on the connection. This closes the connection. * * @see #startTx(Handler) * @param trans the connection with an open transaction * @param done success or failure */ //@Timer public void rollbackTx(AsyncResult<SQLConnection> trans, Handler<AsyncResult<Void>> done) { try { if (trans.failed()) { done.handle(Future.failedFuture(trans.cause())); return; } trans.result().tx.rollback(res -> finalizeTx(res, trans.result().conn, done)); } catch (Exception e) { done.handle(Future.failedFuture(e)); } } /** * Ends a SQL transaction (commit) started on the connection. This closes the connection. * * @see #startTx(Handler) * @param trans the connection with an open transaction * @param done success or failure */ //@Timer public void endTx(AsyncResult<SQLConnection> trans, Handler<AsyncResult<Void>> done) { try { if (trans.failed()) { done.handle(Future.failedFuture(trans.cause())); return; } trans.result().tx.commit(res -> finalizeTx(res, trans.result().conn, done)); } catch (Exception e) { done.handle(Future.failedFuture(e)); } } /** * The returned handler first closes the SQLConnection and then passes on the AsyncResult to handler. * * <p>The returned Handler ignores (but logs) any failure when opening the connection (conn) or * closing the connection and always passes on the AsyncResult<T>. This is in contrast to * io.vertx.ext.sql.HandlerUtil.closeAndHandleResult where the connection * closing failure suppresses any result or failure of the AsyncResult<T> input. * * @param conn the SQLConnection to close * @param handler where to pass on the input AsyncResult * @return the Handler */ <T> Handler<AsyncResult<T>> closeAndHandleResult( AsyncResult<SQLConnection> conn, Handler<AsyncResult<T>> handler) { return ar -> { if (conn.failed()) { log.error("Opening SQLConnection failed: " + conn.cause().getMessage(), conn.cause()); handler.handle(ar); return; } SQLConnection sqlConnection = conn.result(); if (sqlConnection.conn != null) { sqlConnection.conn.close(); } cancelConnectionTimeoutTimer(sqlConnection); handler.handle(ar); }; } /** * Insert entity into table. Create a new id UUID and return it via replyHandler. * @param table database table (without schema) * @param entity a POJO (plain old java object) * @param replyHandler returns any errors and the result. */ public void save(String table, Object entity, Handler<AsyncResult<String>> replyHandler) { getSQLConnection(conn -> save(conn, table, /* id */ null, entity, /* returnId */ true, /* upsert */ false, /* convertEntity */ true, closeAndHandleResult(conn, replyHandler))); } /** * Insert entity into table. * @param table database table (without schema) * @param entity a POJO (plain old java object) * @param returnId true to return the id of the inserted record, false to return an empty string * @param replyHandler returns any errors and the result. */ public void save(String table, Object entity, boolean returnId, Handler<AsyncResult<String>> replyHandler) { getSQLConnection(conn -> save(conn, table, /* id */ null, entity, returnId, /* upsert */ false, /* convertEntity */ true, closeAndHandleResult(conn, replyHandler))); } /** * Insert entity into table. * @param table database table (without schema) * @param id primary key for the record, or null if one should be created * @param entity a POJO (plain old java object) * @param replyHandler returns any errors and the result (see returnId). */ public void save(String table, String id, Object entity, Handler<AsyncResult<String>> replyHandler) { getSQLConnection(conn -> save(conn, table, id, entity, /* returnId */ true, /* upsert */ false, /* convertEntity */ true, closeAndHandleResult(conn, replyHandler))); } /** * Insert entity into table and return the updated entity. * @param table database table (without schema) * @param id primary key for the record * @param entity a POJO (plain old java object) * @param replyHandler returns any errors and the entity after applying any database INSERT triggers */ <T> void saveAndReturnUpdatedEntity(String table, String id, T entity, Handler<AsyncResult<T>> replyHandler) { getSQLConnection(conn -> saveAndReturnUpdatedEntity(conn, table, id, entity, closeAndHandleResult(conn, replyHandler))); } /** * Insert entity into table. * @param table database table (without schema) * @param id primary key for the record, or null if one should be created * @param entity a POJO (plain old java object) * @param returnId true to return the id of the inserted record, false to return an empty string * @param replyHandler returns any errors and the result (see returnId). */ public void save(String table, String id, Object entity, boolean returnId, Handler<AsyncResult<String>> replyHandler) { getSQLConnection(conn -> save(conn, table, id, entity, returnId, /* upsert */ false, /* convertEntity */ true, closeAndHandleResult(conn, replyHandler))); } /** * Insert entity into table. * @param table database table (without schema) * @param id primary key for the record, or null if one should be created * @param entity a POJO (plain old java object) * @param returnId true to return the id of the inserted record, false to return an empty string * @param upsert whether to update if the record with that id already exists (INSERT or UPDATE) * @param replyHandler returns any errors and the result (see returnId). */ public void save(String table, String id, Object entity, boolean returnId, boolean upsert, Handler<AsyncResult<String>> replyHandler) { getSQLConnection(conn -> save(conn, table, id, entity, returnId, upsert, /* convertEntity */ true, closeAndHandleResult(conn, replyHandler))); } /** * Insert entity into table, or update it if it already exists. * @param table database table (without schema) * @param id primary key for the record, or null if one should be created * @param entity a POJO (plain old java object) * @param replyHandler returns any errors and the id of the entity. */ public void upsert(String table, String id, Object entity, Handler<AsyncResult<String>> replyHandler) { getSQLConnection(conn -> save(conn, table, id, entity, /* returnId */ true, /* upsert */ true, /* convertEntity */ true, closeAndHandleResult(conn, replyHandler))); } /** * Insert or update. * * <p>Needed if upserting binary data as base64 where converting it to a json will corrupt the data * otherwise this function is not needed as the default is true * example: * byte[] data = ......; * JsonArray jsonArray = new JsonArray().add(data); * .upsert(TABLE_NAME, id, jsonArray, false, replyHandler -> { * @param table database table (without schema) * @param id primary key for the record, or null if one should be created * @param entity either a POJO, or a JsonArray containing a byte[] element, see convertEntity * @param convertEntity true if entity is a POJO, false if entity is a JsonArray * @param replyHandler returns any errors and the result (see returnId). */ public void upsert(String table, String id, Object entity, boolean convertEntity, Handler<AsyncResult<String>> replyHandler) { getSQLConnection(conn -> save(conn, table, id, entity, /* returnId */ true, /* upsert */ true, /* convertEntity */ convertEntity, closeAndHandleResult(conn, replyHandler))); } /** * Insert entity into table. * @param table database table (without schema) * @param id primary key for the record, or null if one should be created * @param entity either a POJO, or a JsonArray containing a byte[] element, see convertEntity * @param returnId true to return the id of the inserted record, false to return an empty string * @param upsert whether to update if the record with that id already exists (INSERT or UPDATE) * @param convertEntity true if entity is a POJO, false if entity is a JsonArray * @param replyHandler returns any errors and the result (see returnId). */ public void save(String table, String id, Object entity, boolean returnId, boolean upsert, boolean convertEntity, Handler<AsyncResult<String>> replyHandler) { getSQLConnection(conn -> save(conn, table, id, entity, returnId, upsert, convertEntity, closeAndHandleResult(conn, replyHandler))); } /** * Save entity in table using the sqlConnection. Return the * created id via the replyHandler. * * @param sqlConnection connection with transaction * @param table where to insert the entity record * @param entity the record to insert, a POJO (plain old java object) * @param replyHandler where to report success status and the created id */ public void save(AsyncResult<SQLConnection> sqlConnection, String table, Object entity, Handler<AsyncResult<String>> replyHandler) { save(sqlConnection, table, /* id */ null, entity, /* returnId */ true, /* upsert */ false, /* convertEntity */ true, replyHandler); } /** * Save entity in table. Use the transaction of sqlConnection. Return the id * of the id field (primary key) via the replyHandler. If id (primary key) and * the id of entity (jsonb field) are different you may need a trigger in the * database to sync them. * * @param sqlConnection connection (for example with transaction) * @param table where to insert the entity record * @param id the value for the id field (primary key); if null a new random UUID is created for it. * @param entity the record to insert, a POJO (plain old java object) * @param replyHandler where to report success status and the final id of the id field */ public void save(AsyncResult<SQLConnection> sqlConnection, String table, String id, Object entity, Handler<AsyncResult<String>> replyHandler) { save(sqlConnection, table, id, entity, /* returnId */ true, /* upsert */ false, /* convertEntity */ true, replyHandler); } /** * Save entity in table. Use the transaction of sqlConnection. Return the id * of the id field (primary key) via the replyHandler. If id (primary key) and * the id of entity (jsonb field) are different you may need a trigger in the * database to sync them. * * @param sqlConnection connection (for example with transaction) * @param table where to insert the entity record * @param id the value for the id field (primary key); if null a new random UUID is created for it. * @param entity the record to insert, a POJO (plain old java object) * @param returnId true to return the id of the inserted record, false to return an empty string * @param upsert whether to update if the record with that id already exists (INSERT or UPDATE) * @param replyHandler where to report success status and the final id of the id field */ public void save(AsyncResult<SQLConnection> sqlConnection, String table, String id, Object entity, boolean returnId, boolean upsert, Handler<AsyncResult<String>> replyHandler) { save(sqlConnection, table, id, entity, returnId, upsert, /* convertEntity */ true, replyHandler); } /** * Save entity in table. Use the transaction of sqlConnection. Return the id * of the id field (primary key) via the replyHandler. If id (primary key) and * the id of entity (jsonb field) are different you may need a trigger in the * database to sync them. * * @param sqlConnection connection (for example with transaction) * @param table where to insert the entity record * @param id the value for the id field (primary key); if null a new random UUID is created for it. * @param entity the record to insert, either a POJO or a JsonArray, see convertEntity * @param returnId true to return the id of the inserted record, false to return an empty string * @param upsert whether to update if the record with that id already exists (INSERT or UPDATE) * @param convertEntity true if entity is a POJO, false if entity is a JsonArray * @param replyHandler where to report success status and the final id of the id field */ @SuppressWarnings({"squid:S00107"}) // Method has more than 7 parameters public void save(AsyncResult<SQLConnection> sqlConnection, String table, String id, Object entity, boolean returnId, boolean upsert, boolean convertEntity, Handler<AsyncResult<String>> replyHandler) { if (log.isDebugEnabled()) { log.debug("save (with connection and id) called on " + table); } try { if (sqlConnection.failed()) { replyHandler.handle(Future.failedFuture(sqlConnection.cause())); return; } long start = System.nanoTime(); String sql = INSERT_CLAUSE + schemaName + DOT + table + " (id, jsonb) VALUES ($1, " + (convertEntity ? "$2" : "$2::text") + ")" + (upsert ? " ON CONFLICT (id) DO UPDATE SET jsonb=EXCLUDED.jsonb" : "") + " RETURNING " + (returnId ? "id" : "''"); sqlConnection.result().conn.preparedQuery(sql).execute(Tuple.of( id == null ? UUID.randomUUID() : UUID.fromString(id), convertEntity ? pojo2JsonObject(entity) : ((JsonArray)entity).getString(0) ), query -> { statsTracker(SAVE_STAT_METHOD, table, start); if (query.failed()) { replyHandler.handle(Future.failedFuture(query.cause())); } else { RowSet<Row> result = query.result(); String res = result.iterator().next().getValue(0).toString(); replyHandler.handle(Future.succeededFuture(res)); } }); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } /** * Save entity in table and return the updated entity. * * @param sqlConnection connection (for example with transaction) * @param table where to insert the entity record * @param id the value for the id field (primary key); if null a new random UUID is created for it. * @param entity the record to insert, a POJO * @param replyHandler where to report success status and the entity after applying any database INSERT triggers */ private <T> void saveAndReturnUpdatedEntity(AsyncResult<SQLConnection> sqlConnection, String table, String id, T entity, Handler<AsyncResult<T>> replyHandler) { log.info("save (with connection and id) called on " + table); if (sqlConnection.failed()) { log.error(sqlConnection.cause().getMessage(), sqlConnection.cause()); replyHandler.handle(Future.failedFuture(sqlConnection.cause())); return; } try { long start = System.nanoTime(); String sql = INSERT_CLAUSE + schemaName + DOT + table + " (id, jsonb) VALUES ($1, $2) RETURNING jsonb"; sqlConnection.result().conn.preparedQuery(sql).execute( Tuple.of(id == null ? UUID.randomUUID() : UUID.fromString(id), pojo2JsonObject(entity)), query -> { statsTracker(SAVE_STAT_METHOD, table, start); if (query.failed()) { log.error(query.cause().getMessage(), query.cause()); replyHandler.handle(Future.failedFuture(query.cause())); return; } try { RowSet<Row> result = query.result(); String updatedEntityString = result.iterator().next().getValue(0).toString(); @SuppressWarnings("unchecked") T updatedEntity = (T) mapper.readValue(updatedEntityString, entity.getClass()); replyHandler.handle(Future.succeededFuture(updatedEntity)); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } }); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } /** * Insert the entities into table using a single INSERT statement. * @param table destination table to insert into * @param entities each array element is a String with the content for the JSONB field of table; if id is missing a random id is generated * @param replyHandler result, containing the id field for each inserted element of entities */ public void saveBatch(String table, JsonArray entities, Handler<AsyncResult<RowSet<Row>>> replyHandler) { getSQLConnection(conn -> saveBatch(conn, table, entities, closeAndHandleResult(conn, replyHandler))); } /** * Upsert the entities into table using a single INSERT statement. * @param table destination table to insert into * @param entities each array element is a String with the content for the JSONB field of table; if id is missing a random id is generated * @param replyHandler result, containing the id field for each inserted element of entities */ public void upsertBatch(String table, JsonArray entities, Handler<AsyncResult<RowSet<Row>>> replyHandler) { getSQLConnection(conn -> upsertBatch(conn, table, entities, closeAndHandleResult(conn, replyHandler))); } /** * Insert the entities into table using a single INSERT statement. * @param sqlConnection the connection to run on, may be on a transaction * @param table destination table to insert into * @param entities each array element is a String with the content for the JSONB field of table; if id is missing a random id is generated * @param replyHandler result, containing the id field for each inserted element of entities */ public void saveBatch(AsyncResult<SQLConnection> sqlConnection, String table, JsonArray entities, Handler<AsyncResult<RowSet<Row>>> replyHandler) { saveBatch(sqlConnection, /* upsert */ false, table, entities, replyHandler); } /** * Upsert the entities into table using a single INSERT statement. * @param sqlConnection the connection to run on, may be on a transaction * @param table destination table to insert into * @param entities each array element is a String with the content for the JSONB field of table; if id is missing a random id is generated * @param replyHandler result, containing the id field for each inserted element of entities */ public void upsertBatch(AsyncResult<SQLConnection> sqlConnection, String table, JsonArray entities, Handler<AsyncResult<RowSet<Row>>> replyHandler) { saveBatch(sqlConnection, /* upsert */ true, table, entities, replyHandler); } /** * Insert or upsert the entities into table using a single INSERT statement. * @param sqlConnection the connection to run on, may be on a transaction * @param upsert true for upsert, false for insert with fail on duplicate id * @param table destination table to insert into * @param entities each array element is a String with the content for the JSONB field of table; if id is missing a random id is generated * @param replyHandler result, containing the id field for each inserted element of entities */ private void saveBatch(AsyncResult<SQLConnection> sqlConnection, boolean upsert, String table, JsonArray entities, Handler<AsyncResult<RowSet<Row>>> replyHandler) { try { List<Tuple> list = new ArrayList<>(); if (entities != null) { for (int i = 0; i < entities.size(); i++) { String json = entities.getString(i); JsonObject jsonObject = new JsonObject(json); String id = jsonObject.getString("id"); list.add(Tuple.of(id == null ? UUID.randomUUID() : UUID.fromString(id), jsonObject)); } } saveBatchInternal(sqlConnection, upsert, table, list, replyHandler); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } private void saveBatchInternal(AsyncResult<SQLConnection> sqlConnection, boolean upsert, String table, List<Tuple> batch, Handler<AsyncResult<RowSet<Row>>> replyHandler) { try { long start = System.nanoTime(); log.info("starting: saveBatch size=" + batch.size()); String sql = INSERT_CLAUSE + schemaName + DOT + table + " (id, jsonb) VALUES ($1, $2)" + (upsert ? " ON CONFLICT (id) DO UPDATE SET jsonb = EXCLUDED.jsonb" : "") + RETURNING_ID; if (sqlConnection.failed()) { replyHandler.handle(Future.failedFuture(sqlConnection.cause())); return; } PgConnection connection = sqlConnection.result().conn; connection.preparedQuery(sql).executeBatch(batch, queryRes -> { if (queryRes.failed()) { log.error("saveBatch size=" + batch.size() + SPACE + queryRes.cause().getMessage(), queryRes.cause()); statsTracker("saveBatchFailed", table, start); replyHandler.handle(Future.failedFuture(queryRes.cause())); return; } statsTracker("saveBatch", table, start); if (queryRes.result() != null) { replyHandler.handle(Future.succeededFuture(queryRes.result())); } else { replyHandler.handle(Future.succeededFuture(new LocalRowSet(0))); } }); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } /*** * Save a list of POJOs. * POJOs are converted to a JSON String and saved in a single INSERT call. * A random id is generated if POJO's id is null. * @param table destination table to insert into * @param entities each list element is a POJO * @param replyHandler result, containing the id field for each inserted POJO */ public <T> void saveBatch(String table, List<T> entities, Handler<AsyncResult<RowSet<Row>>> replyHandler) { getSQLConnection(conn -> saveBatch(conn, table, entities, closeAndHandleResult(conn, replyHandler))); } /*** * Upsert a list of POJOs. * POJOs are converted to a JSON String and saved or updated in a single INSERT call. * A random id is generated if POJO's id is null. * If a record with the id already exists it is updated (upsert). * @param table destination table to insert into * @param entities each list element is a POJO * @param replyHandler result, containing the id field for each inserted POJO */ public <T> void upsertBatch(String table, List<T> entities, Handler<AsyncResult<RowSet<Row>>> replyHandler) { getSQLConnection(conn -> upsertBatch(conn, table, entities, closeAndHandleResult(conn, replyHandler))); } /*** * Save a list of POJOs. * POJOs are converted to a JSON String and saved in a single INSERT call. * A random id is generated if POJO's id is null. * @param sqlConnection the connection to run on, may be on a transaction * @param table destination table to insert into * @param entities each list element is a POJO * @param replyHandler result, containing the id field for each inserted POJO */ public <T> void saveBatch(AsyncResult<SQLConnection> sqlConnection, String table, List<T> entities, Handler<AsyncResult<RowSet<Row>>> replyHandler) { saveBatch(sqlConnection, /* upsert */ false, table, entities, replyHandler); } /*** * Upsert a list of POJOs. * POJOs are converted to a JSON String and saved or updated in a single INSERT call. * A random id is generated if POJO's id is null. * If a record with the id already exists it is updated (upsert). * @param sqlConnection the connection to run on, may be on a transaction * @param table destination table to insert into * @param entities each list element is a POJO * @param replyHandler result, containing the id field for each inserted POJO */ public <T> void upsertBatch(AsyncResult<SQLConnection> sqlConnection, String table, List<T> entities, Handler<AsyncResult<RowSet<Row>>> replyHandler) { saveBatch(sqlConnection, /* upsert */ true, table, entities, replyHandler); } private <T> void saveBatch(AsyncResult<SQLConnection> sqlConnection, boolean upsert, String table, List<T> entities, Handler<AsyncResult<RowSet<Row>>> replyHandler) { try { List<Tuple> batch = new ArrayList<>(); if (entities == null || entities.isEmpty()) { RowSet<Row> rowSet = new LocalRowSet(0).withColumns(Arrays.asList("id")); replyHandler.handle(Future.succeededFuture(rowSet)); return; } // We must use reflection, the POJOs don't have a interface/superclass in common. Method getIdMethod = entities.get(0).getClass().getDeclaredMethod("getId"); for (Object entity : entities) { Object obj = getIdMethod.invoke(entity); UUID id = obj == null ? UUID.randomUUID() : UUID.fromString((String) obj); batch.add(Tuple.of(id, pojo2JsonObject(entity))); } saveBatchInternal(sqlConnection, upsert, table, batch, replyHandler); } catch (Exception e) { log.error("saveBatch error " + e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } /** * update a specific record associated with the key passed in the id arg * @param table - table to save to (must exist) * @param entity - pojo to save * @param id - key of the entity being updated * @param replyHandler */ public void update(String table, Object entity, String id, Handler<AsyncResult<RowSet<Row>>> replyHandler) { StringBuilder where = new StringBuilder().append(WHERE).append(ID_FIELD).append('='); Cql2PgUtil.appendQuoted(id, where); // proper masking prevents SQL injection update(table, entity, DEFAULT_JSONB_FIELD_NAME, where.toString(), false, replyHandler); } /** * Update 1...n records matching the filter * <br> * Criterion Examples: * <br> * 1. can be mapped from a string in the following format [{"field":"''","value":"","op":""}] * <pre> * Criterion a = json2Criterion("[{\"field\":\"'fund_distributions'->[]->'amount'->>'sum'\",\"value\":120,\"op\":\"<\"}]"); //denotes funds_distribution is an array of objects * Criterion a = json2Criterion("[{"field":"'po_line_status'->>'value'","value":"SENT","op":"like"},{"field":"'owner'->>'value'","value":"MITLIBMATH","op":"="}, {"op":"AND"}]"); * (see postgres query syntax for more examples in the read.me * </pre> * 2. Simple Criterion * <pre> * Criteria b = new Criteria(); * b.field.add("'note'"); * b.operation = "="; * b.value = "a"; * b.isArray = true; //denotes that the queried field is an array with multiple values * Criterion a = new Criterion(b); * </pre> * 3. For a boolean field called rush = false OR note[] contains 'a' * <pre> * Criteria d = new Criteria(); * d.field.add("'rush'"); * d.operation = Criteria.OP_IS_FALSE; * d.value = null; * Criterion a = new Criterion(); * a.addCriterion(d, Criteria.OP_OR, b); * </pre> * 4. for the following json: * <pre> * "price": { * "sum": "150.0", * "po_currency": { * "value": "USD", * "desc": "US Dollar" * } * }, * * Criteria c = new Criteria(); * c.addField("'price'").addField("'po_currency'").addField("'value'"); * c.operation = Criteria.OP_LIKE; * c.value = "USD"; * * </pre> * @param table - table to update * @param entity - pojo to set for matching records * @param filter - see example below * @param returnUpdatedIds - return ids of updated records * @param replyHandler * */ public void update(String table, Object entity, Criterion filter, boolean returnUpdatedIds, Handler<AsyncResult<RowSet<Row>>> replyHandler) { String where = null; if(filter != null){ where = filter.toString(); } update(table, entity, DEFAULT_JSONB_FIELD_NAME, where, returnUpdatedIds, replyHandler); } public void update(String table, Object entity, CQLWrapper filter, boolean returnUpdatedIds, Handler<AsyncResult<RowSet<Row>>> replyHandler) { String where = ""; if(filter != null){ where = filter.toString(); } update(table, entity, DEFAULT_JSONB_FIELD_NAME, where, returnUpdatedIds, replyHandler); } public void update(AsyncResult<SQLConnection> conn, String table, Object entity, CQLWrapper filter, boolean returnUpdatedIds, Handler<AsyncResult<RowSet<Row>>> replyHandler) { String where = ""; try { if (filter != null) { where = filter.toString(); } update(conn, table, entity, DEFAULT_JSONB_FIELD_NAME, where, returnUpdatedIds, replyHandler); } catch (Exception e) { replyHandler.handle(Future.failedFuture(e)); } } public void update(AsyncResult<SQLConnection> conn, String table, Object entity, String jsonbField, String whereClause, boolean returnUpdatedIds, Handler<AsyncResult<RowSet<Row>>> replyHandler) { if (conn.failed()) { replyHandler.handle(Future.failedFuture(conn.cause())); return; } long start = System.nanoTime(); StringBuilder sb = new StringBuilder(); sb.append(whereClause); StringBuilder returning = new StringBuilder(); if (returnUpdatedIds) { returning.append(RETURNING_ID); } try { String q = UPDATE + schemaName + DOT + table + SET + jsonbField + " = $1::jsonb " + whereClause + SPACE + returning; log.debug("update query = " + q); conn.result().conn.preparedQuery(q).execute(Tuple.of(pojo2JsonObject(entity)), query -> { if (query.failed()) { log.error(query.cause().getMessage(), query.cause()); } statsTracker(UPDATE_STAT_METHOD, table, start); replyHandler.handle(query); }); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } public void update(String table, Object entity, String jsonbField, String whereClause, boolean returnUpdatedIds, Handler<AsyncResult<RowSet<Row>>> replyHandler) { getSQLConnection(conn -> update(conn, table, entity, jsonbField, whereClause, returnUpdatedIds, closeAndHandleResult(conn, replyHandler))); } /** * update a section / field / object in the pojo - * <br> * for example: * <br> if a json called po_line contains the following field * <pre> * "po_line_status": { * "value": "SENT", * "desc": "sent to vendor" * }, * </pre> * this translates into a po_line_status object within the po_line object - to update the entire object / section * create an updateSection object pushing into the section the po line status as the field and the value (string / json / etc...) to replace it with * <pre> * a = new UpdateSection(); * a.addField("po_line_status"); * a.setValue(new JsonObject("{\"value\":\"SOMETHING_NEW4\",\"desc\":\"sent to vendor again\"}")); * </pre> * Note that postgres does not update inplace the json but rather will create a new json with the * updated section and then reference the id to that newly created json * <br> * Queries generated will look something like this: * <pre> * * update test.po_line set jsonb = jsonb_set(jsonb, '{po_line_status}', '{"value":"SOMETHING_NEW4","desc":"sent to vendor"}') where _id = 19; * update test.po_line set jsonb = jsonb_set(jsonb, '{po_line_status, value}', '"SOMETHING_NEW5"', false) where _id = 15; * </pre> * * @param table - table to update * @param section - see UpdateSection class * @param when - Criterion object * @param replyHandler * */ public void update(String table, UpdateSection section, Criterion when, boolean returnUpdatedIdsCount, Handler<AsyncResult<RowSet<Row>>> replyHandler) { long start = System.nanoTime(); getConnection(res -> { if (res.succeeded()) { PgConnection connection = res.result(); try { String value = section.getValue().replace("'", "''"); String where = when == null ? "" : when.toString(); String returning = returnUpdatedIdsCount ? RETURNING_ID : ""; String q = UPDATE + schemaName + DOT + table + SET + DEFAULT_JSONB_FIELD_NAME + " = jsonb_set(" + DEFAULT_JSONB_FIELD_NAME + "," + section.getFieldsString() + ", '" + value + "', false) " + where + returning; log.debug("update query = " + q); connection.query(q).execute(query -> { connection.close(); statsTracker(UPDATE_STAT_METHOD, table, start); if (query.failed()) { log.error(query.cause().getMessage(), query.cause()); replyHandler.handle(Future.failedFuture(query.cause())); } else { replyHandler.handle(Future.succeededFuture(query.result())); } }); } catch (Exception e) { if (connection != null){ connection.close(); } log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } else { log.error(res.cause().getMessage(), res.cause()); replyHandler.handle(Future.failedFuture(res.cause())); } }); } /** * Delete by id. * @param table table name without schema * @param id primary key value of the record to delete */ public void delete(String table, String id, Handler<AsyncResult<RowSet<Row>>> replyHandler) { getSQLConnection(conn -> delete(conn, table, id, closeAndHandleResult(conn, replyHandler))); } /** * Delete by id. * @param connection where to run, can be within a transaction * @param table table name without schema * @param id primary key value of the record to delete * @param replyHandler */ public void delete(AsyncResult<SQLConnection> connection, String table, String id, Handler<AsyncResult<RowSet<Row>>> replyHandler) { try { if (connection.failed()) { replyHandler.handle(Future.failedFuture(connection.cause())); return; } connection.result().conn.preparedQuery( "DELETE FROM " + schemaName + DOT + table + WHERE + ID_FIELD + "=$1") .execute(Tuple.of(UUID.fromString(id)), replyHandler); } catch (Exception e) { replyHandler.handle(Future.failedFuture(e)); } } /** * Delete by CQL wrapper. * @param table table name without schema * @param cql which records to delete */ public void delete(String table, CQLWrapper cql, Handler<AsyncResult<RowSet<Row>>> replyHandler) { getSQLConnection(conn -> delete(conn, table, cql, closeAndHandleResult(conn, replyHandler))); } /** * Delete by CQL wrapper. * @param connection where to run, can be within a transaction * @param table table name without schema * @param cql which records to delete */ public void delete(AsyncResult<SQLConnection> connection, String table, CQLWrapper cql, Handler<AsyncResult<RowSet<Row>>> replyHandler) { try { String where = cql == null ? "" : cql.toString(); doDelete(connection, table, where, replyHandler); } catch (Exception e) { replyHandler.handle(Future.failedFuture(e)); } } /** * Delete based on filter * @param table table name without schema * @param filter * @param replyHandler */ public void delete(String table, Criterion filter, Handler<AsyncResult<RowSet<Row>>> replyHandler) { getSQLConnection(conn -> delete(conn, table, filter, closeAndHandleResult(conn, replyHandler))); } /** * Delete as part of a transaction * @param conn where to run, can be within a transaction * @param table table name without schema * @param filter which records to delete */ public void delete(AsyncResult<SQLConnection> conn, String table, Criterion filter, Handler<AsyncResult<RowSet<Row>>> replyHandler) { try { String where = filter == null ? "" : filter.toString(); doDelete(conn, table, where, replyHandler); } catch (Exception e) { replyHandler.handle(Future.failedFuture(e)); } } /** * delete based on jsons matching the field/value pairs in the pojo (which is first converted to json and then similar jsons are searched) * --> do not use on large tables without checking as the @> will not use a btree * @param table * @param entity * @param replyHandler */ public void delete(String table, Object entity, Handler<AsyncResult<RowSet<Row>>> replyHandler) { getSQLConnection(conn -> delete(conn, table, entity, closeAndHandleResult(conn, replyHandler))); } public void delete(AsyncResult<SQLConnection> connection, String table, Object entity, Handler<AsyncResult<RowSet<Row>>> replyHandler) { try { long start = System.nanoTime(); if (connection.failed()) { replyHandler.handle(Future.failedFuture(connection.cause())); return; } String sql = DELETE + FROM + schemaName + DOT + table + WHERE + DEFAULT_JSONB_FIELD_NAME + "@>$1"; log.debug("delete by entity, query = " + sql + "; $1=" + entity); connection.result().conn.preparedQuery(sql).execute(Tuple.of(pojo2JsonObject(entity)), delete -> { statsTracker(DELETE_STAT_METHOD, table, start); if (delete.failed()) { log.error(delete.cause().getMessage(), delete.cause()); replyHandler.handle(Future.failedFuture(delete.cause())); return; } replyHandler.handle(Future.succeededFuture(delete.result())); }); } catch (Exception e) { replyHandler.handle(Future.failedFuture(e)); } } private void doDelete(AsyncResult<SQLConnection> connection, String table, String where, Handler<AsyncResult<RowSet<Row>>> replyHandler) { try { long start = System.nanoTime(); String sql = DELETE + FROM + schemaName + DOT + table + " " + where; log.debug("doDelete query = " + sql); if (connection.failed()) { replyHandler.handle(Future.failedFuture(connection.cause())); return; } connection.result().conn.query(sql).execute(query -> { statsTracker(DELETE_STAT_METHOD, table, start); if (query.failed()) { log.error(query.cause().getMessage(), query.cause()); replyHandler.handle(Future.failedFuture(query.cause())); return; } replyHandler.handle(Future.succeededFuture(query.result())); }); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } /** * * @param <T> * @param table * @param clazz * @param fieldName * @param where * @param returnCount * @param returnIdField * @param setId - unused, the database trigger will always set jsonb->'id' automatically * @param replyHandler * @deprecated use get with CQLWrapper or Criterion instead */ @Deprecated public <T> void get(String table, Class<T> clazz, String fieldName, String where, boolean returnCount, boolean returnIdField, boolean setId, Handler<AsyncResult<Results<T>>> replyHandler) { get(table, clazz, fieldName, where, returnCount, returnIdField, setId, null /* facets */, replyHandler); } /** * * @param <T> * @param table * @param clazz * @param fieldName * @param where * @param returnCount * @param returnIdField * @param setId - unused, the database trigger will always set jsonb->'id' automatically * @param facets * @param replyHandler * @deprecated use get with CQLWrapper or Criterion instead */ @Deprecated public <T> void get(String table, Class<T> clazz, String fieldName, String where, boolean returnCount, boolean returnIdField, boolean setId, List<FacetField> facets, Handler<AsyncResult<Results<T>>> replyHandler) { get(table, clazz, fieldName, where, returnCount, returnIdField, setId, facets, null /*distinctOn*/, replyHandler); } /** * * @param <T> * @param table * @param clazz * @param fieldName * @param where * @param returnCount * @param returnIdField * @param setId - unused, the database trigger will always set jsonb->'id' automatically * @param facets * @param distinctOn * @param replyHandler * @deprecated use get with CQLWrapper or Criterion instead */ @Deprecated public <T> void get(String table, Class<T> clazz, String fieldName, String where, boolean returnCount, boolean returnIdField, boolean setId, List<FacetField> facets, String distinctOn, Handler<AsyncResult<Results<T>>> replyHandler) { CQLWrapper wrapper = new CQLWrapper().setWhereClause(where); getSQLConnection(conn -> doGet(conn, table, clazz, fieldName, wrapper, returnCount, returnIdField, facets, distinctOn, closeAndHandleResult(conn, replyHandler))); } static class QueryHelper { String table; List<FacetField> facets; String selectQuery; String countQuery; int offset; int limit; public QueryHelper(String table) { this.table = table; } } static class TotaledResults { final RowSet<Row> set; final Integer total; public TotaledResults(RowSet<Row> set, Integer total) { this.set = set; this.total = total; } } /** * low-level getter based on CQLWrapper * @param <T> * @param conn * @param table * @param clazz * @param fieldName * @param wrapper * @param returnCount * @param returnIdField * @param facets * @param distinctOn * @param replyHandler */ private <T> void doGet( AsyncResult<SQLConnection> conn, String table, Class<T> clazz, String fieldName, CQLWrapper wrapper, boolean returnCount, boolean returnIdField, List<FacetField> facets, String distinctOn, Handler<AsyncResult<Results<T>>> replyHandler ) { if (conn.failed()) { log.error(conn.cause().getMessage(), conn.cause()); replyHandler.handle(Future.failedFuture(conn.cause())); return; } PgConnection connection = conn.result().conn; try { QueryHelper queryHelper = buildQueryHelper(table, fieldName, wrapper, returnIdField, facets, distinctOn); if (returnCount) { processQueryWithCount(connection, queryHelper, GET_STAT_METHOD, totaledResults -> processResults(totaledResults.set, totaledResults.total, queryHelper.offset, queryHelper.limit, clazz), replyHandler); } else { processQuery(connection, queryHelper, null, GET_STAT_METHOD, totaledResults -> processResults(totaledResults.set, totaledResults.total, queryHelper.offset, queryHelper.limit, clazz), replyHandler); } } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } /** * Streamed GET with CQLWrapper (T variant, no facets) * @param <T> * @param table * @param entity * @param fieldName usually "jsonb" * @param filter usually CQL query * @param returnIdField * @param distinctOn may be null * @param streamHandler called for each record * @param replyHandler called when query is complete * @deprecated This function is deprecated because either you'll have to * buffer whole HTTP buffer in memory to produce HTTP status; or you'll have to * return a fake error. Furthermore, this API does not provide totalCount * Use streamGet with {@link PostgresClientStreamResult} instead. * {@link #streamGet(java.lang.String, java.lang.Object, java.lang.String, * org.folio.rest.persist.cql.CQLWrapper, boolean, java.lang.String, * io.vertx.core.Handler, io.vertx.core.Handler)} */ @Deprecated @SuppressWarnings({"squid:S00107"}) // has more than 7 parameters public <T> void streamGet(String table, T entity, String fieldName, CQLWrapper filter, boolean returnIdField, String distinctOn, Handler<T> streamHandler, Handler<AsyncResult<Void>> replyHandler) { Class<T> clazz = (Class<T>) entity.getClass(); streamGet(table, clazz, fieldName, filter, returnIdField, distinctOn, res -> { if (res.failed()) { replyHandler.handle(Future.failedFuture(res.cause())); return; } PostgresClientStreamResult<T> streamResult = res.result(); streamResult.handler(streamHandler); streamResult.endHandler(x -> replyHandler.handle(Future.succeededFuture())); streamResult.exceptionHandler(e -> replyHandler.handle(Future.failedFuture(e))); }); } /** * Stream GET with CQLWrapper, no facets {@link org.folio.rest.persist.PostgresClientStreamResult} * @param <T> * @param table * @param clazz * @param fieldName * @param filter * @param returnIdField * @param distinctOn may be null * @param replyHandler AsyncResult; on success with result {@link org.folio.rest.persist.PostgresClientStreamResult} */ public <T> void streamGet(String table, Class<T> clazz, String fieldName, CQLWrapper filter, boolean returnIdField, String distinctOn, Handler<AsyncResult<PostgresClientStreamResult<T>>> replyHandler) { streamGet(table, clazz, fieldName, filter, returnIdField, distinctOn, null, 0, replyHandler); } /** * Stream GET with CQLWrapper, no facets {@link org.folio.rest.persist.PostgresClientStreamResult} * @param <T> * @param table * @param clazz * @param fieldName * @param filter * @param returnIdField * @param distinctOn may be null * @param queryTimeout query timeout in milliseconds, or 0 for no timeout * @param replyHandler AsyncResult; on success with result {@link PostgresClientStreamResult} */ @SuppressWarnings({"squid:S00107"}) // Method has >7 parameters public <T> void streamGet(String table, Class<T> clazz, String fieldName, CQLWrapper filter, boolean returnIdField, String distinctOn, int queryTimeout, Handler<AsyncResult<PostgresClientStreamResult<T>>> replyHandler) { streamGet(table, clazz, fieldName, filter, returnIdField, distinctOn, null, queryTimeout, replyHandler); } /** * Stream GET with CQLWrapper and facets {@link org.folio.rest.persist.PostgresClientStreamResult} * @param <T> * @param table * @param clazz * @param fieldName * @param filter * @param returnIdField must be true if facets are in passed * @param distinctOn may be null * @param facets for no facets: null or Collections.emptyList() * @param replyHandler AsyncResult; on success with result {@link org.folio.rest.persist.PostgresClientStreamResult} */ @SuppressWarnings({"squid:S00107"}) // Method has >7 parameters public <T> void streamGet(String table, Class<T> clazz, String fieldName, CQLWrapper filter, boolean returnIdField, String distinctOn, List<FacetField> facets, Handler<AsyncResult<PostgresClientStreamResult<T>>> replyHandler) { getSQLConnection(0, conn -> streamGet(conn, table, clazz, fieldName, filter, returnIdField, distinctOn, facets, replyHandler)); } /** * Stream GET with CQLWrapper and facets {@link org.folio.rest.persist.PostgresClientStreamResult} * @param <T> * @param table * @param clazz * @param fieldName * @param filter * @param returnIdField must be true if facets are in passed * @param distinctOn may be null * @param facets for no facets: null or Collections.emptyList() * @param queryTimeout query timeout in milliseconds, or 0 for no timeout * @param replyHandler AsyncResult; on success with result {@link PostgresClientStreamResult} */ @SuppressWarnings({"squid:S00107"}) // Method has >7 parameters public <T> void streamGet(String table, Class<T> clazz, String fieldName, CQLWrapper filter, boolean returnIdField, String distinctOn, List<FacetField> facets, int queryTimeout, Handler<AsyncResult<PostgresClientStreamResult<T>>> replyHandler) { getSQLConnection(queryTimeout, conn -> streamGet(conn, table, clazz, fieldName, filter, returnIdField, distinctOn, facets, replyHandler)); } /** * streamGet with existing transaction/connection * @param <T> * @param connResult * @param table * @param clazz * @param fieldName * @param wrapper * @param returnIdField * @param distinctOn * @param facets * @param replyHandler */ @SuppressWarnings({"squid:S00107"}) // Method has >7 parameters <T> void streamGet(AsyncResult<SQLConnection> connResult, String table, Class<T> clazz, String fieldName, CQLWrapper wrapper, boolean returnIdField, String distinctOn, List<FacetField> facets, Handler<AsyncResult<PostgresClientStreamResult<T>>> replyHandler) { if (connResult.failed()) { log.error(connResult.cause().getMessage(), connResult.cause()); replyHandler.handle(Future.failedFuture(connResult.cause())); return; } doStreamGetCount(connResult.result(), table, clazz, fieldName, wrapper, returnIdField, distinctOn, facets, replyHandler); } /** * private for now, might be public later (and renamed) * @param <T> * @param connection * @param table * @param clazz * @param fieldName * @param wrapper * @param returnIdField * @param distinctOn * @param facets * @param replyHandler */ @SuppressWarnings({"squid:S00107"}) // Method has >7 parameters private <T> void doStreamGetCount(SQLConnection connection, String table, Class<T> clazz, String fieldName, CQLWrapper wrapper, boolean returnIdField, String distinctOn, List<FacetField> facets, Handler<AsyncResult<PostgresClientStreamResult<T>>> replyHandler) { try { QueryHelper queryHelper = buildQueryHelper(table, fieldName, wrapper, returnIdField, facets, distinctOn); connection.conn.query(queryHelper.countQuery).execute(countQueryResult -> { if (countQueryResult.failed()) { replyHandler.handle(Future.failedFuture(countQueryResult.cause())); return; } ResultInfo resultInfo = new ResultInfo(); resultInfo.setTotalRecords(countQueryResult.result().iterator().next().getInteger(0)); doStreamGetQuery(connection, queryHelper, resultInfo, clazz, replyHandler); }); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } <T> void doStreamGetQuery(SQLConnection connection, QueryHelper queryHelper, ResultInfo resultInfo, Class<T> clazz, Handler<AsyncResult<PostgresClientStreamResult<T>>> replyHandler) { // decide if we need to close transaction+connection ourselves final PgConnection closeConnection = connection.tx == null ? connection.conn : null; if (closeConnection != null) { closeConnection.begin(); } connection.conn.prepare(queryHelper.selectQuery, prepareRes -> { if (prepareRes.failed()) { connection.conn.close(); log.error(prepareRes.cause().getMessage(), prepareRes.cause()); replyHandler.handle(Future.failedFuture(prepareRes.cause())); return; } PreparedStatement pq = prepareRes.result(); RowStream<Row> stream = pq.createStream(STREAM_GET_DEFAULT_CHUNK_SIZE, Tuple.tuple()); PostgresClientStreamResult<T> streamResult = new PostgresClientStreamResult(resultInfo); doStreamRowResults(stream, clazz, closeConnection, queryHelper, streamResult, replyHandler); }); } private static List<String> getColumnNames(Row row) { List<String> columnNames = new ArrayList<>(); for (int i = 0; row.getColumnName(i) != null; i++) { columnNames.add(row.getColumnName(i)); } return columnNames; } private void closeIfNonNull(PgConnection pgConnection) { if (pgConnection != null) { pgConnection.close(); } } <T> void doStreamRowResults(RowStream<Row> sqlRowStream, Class<T> clazz, PgConnection pgConnection, QueryHelper queryHelper, PostgresClientStreamResult<T> streamResult, Handler<AsyncResult<PostgresClientStreamResult<T>>> replyHandler) { ResultInfo resultInfo = streamResult.resultInto(); Promise<PostgresClientStreamResult<T>> promise = Promise.promise(); ResultsHelper<T> resultsHelper = new ResultsHelper<>(clazz); boolean isAuditFlavored = isAuditFlavored(resultsHelper.clazz); Map<String, Method> externalColumnSetters = new HashMap<>(); AtomicInteger resultCount = new AtomicInteger(); sqlRowStream.handler(r -> { try { // for first row, get column names if (resultsHelper.offset == 0) { List<String> columnNames = getColumnNames(r); collectExternalColumnSetters(columnNames, resultsHelper.clazz, isAuditFlavored, externalColumnSetters); } T objRow = (T) deserializeRow(resultsHelper, externalColumnSetters, isAuditFlavored, r); if (!resultsHelper.facet) { resultCount.incrementAndGet(); if (!promise.future().isComplete()) { // end of facets (if any) .. produce result resultsHelper.facets.forEach((k, v) -> resultInfo.getFacets().add(v)); promise.complete(streamResult); replyHandler.handle(promise.future()); } streamResult.fireHandler(objRow); } resultsHelper.offset++; } catch (Exception e) { if (!promise.future().isComplete()) { promise.complete(streamResult); replyHandler.handle(promise.future()); } sqlRowStream.close(); // does not really stop stream for vertx-pg-client closeIfNonNull(pgConnection); log.error(e.getMessage(), e); streamResult.fireExceptionHandler(e); } }).endHandler(v2 -> { closeIfNonNull(pgConnection); resultInfo.setTotalRecords( getTotalRecords(resultCount.get(), resultInfo.getTotalRecords(), queryHelper.offset, queryHelper.limit)); try { if (!promise.future().isComplete()) { promise.complete(streamResult); replyHandler.handle(promise.future()); } streamResult.fireEndHandler(); } catch (Exception ex) { streamResult.fireExceptionHandler(ex); } }).exceptionHandler(e -> { closeIfNonNull(pgConnection); if (!promise.future().isComplete()) { promise.complete(streamResult); replyHandler.handle(promise.future()); } streamResult.fireExceptionHandler(e); }); } QueryHelper buildQueryHelper( String table, String fieldName, CQLWrapper wrapper, boolean returnIdField, List<FacetField> facets, String distinctOn) throws IOException, TemplateException { if (wrapper == null) { wrapper = new CQLWrapper(); } String addIdField = ""; if (returnIdField) { addIdField = COMMA + ID_FIELD; } if (!"null".equals(fieldName) && fieldName.contains("*")) { // if we are requesting all fields (*) , then dont add the id field to the select // this will return two id columns which will create ambiguity in facet queries addIdField = ""; } QueryHelper queryHelper = new QueryHelper(table); String countOn = "*"; String distinctOnClause = ""; if (distinctOn != null && !distinctOn.isEmpty()) { distinctOnClause = String.format("DISTINCT ON(%s) ", distinctOn); countOn = String.format("DISTINCT(%s)", distinctOn); } queryHelper.selectQuery = SELECT + distinctOnClause + fieldName + addIdField + FROM + schemaName + DOT + table + SPACE + wrapper.toString(); queryHelper.countQuery = SELECT + "COUNT(" + countOn + ")" + FROM + schemaName + DOT + table + SPACE + wrapper.getWhereClause(); if (facets != null && !facets.isEmpty()) { String mainQuery = SELECT + distinctOnClause + fieldName + addIdField + FROM + schemaName + DOT + table + SPACE + wrapper.getWithoutLimOff(); FacetManager facetManager = buildFacetManager(wrapper, queryHelper, mainQuery, facets); // this method call invokes freemarker templating queryHelper.selectQuery = facetManager.generateFacetQuery(); } if (!wrapper.getWhereClause().isEmpty()) { // only do estimation when filter is in use (such as CQL). String estQuery = SELECT + distinctOnClause + fieldName + addIdField + FROM + schemaName + DOT + table + SPACE + wrapper.getWhereClause(); queryHelper.countQuery = SELECT + "count_estimate('" + org.apache.commons.lang.StringEscapeUtils.escapeSql(estQuery) + "')"; } int offset = wrapper.getOffset().get(); if (offset != -1) { queryHelper.offset = offset; } int limit = wrapper.getLimit().get(); queryHelper.limit = limit != -1 ? limit : Integer.MAX_VALUE; return queryHelper; } <T> void processQueryWithCount( PgConnection connection, QueryHelper queryHelper, String statMethod, Function<TotaledResults, T> resultSetMapper, Handler<AsyncResult<T>> replyHandler) { long start = System.nanoTime(); log.debug("Attempting count query: " + queryHelper.countQuery); connection.query(queryHelper.countQuery).execute(countQueryResult -> { try { if (countQueryResult.failed()) { log.error("query with count: " + countQueryResult.cause().getMessage() + " - " + queryHelper.countQuery, countQueryResult.cause()); replyHandler.handle(Future.failedFuture(countQueryResult.cause())); return; } int total = countQueryResult.result().iterator().next().getInteger(0); long countQueryTime = (System.nanoTime() - start); StatsTracker.addStatElement(STATS_KEY + COUNT_STAT_METHOD, countQueryTime); log.debug("timer: get " + queryHelper.countQuery + " (ns) " + countQueryTime); if (total <= queryHelper.offset) { log.debug("Skipping query due to no results expected!"); RowSet<Row> emptySet = null; replyHandler.handle(Future.succeededFuture(resultSetMapper.apply(new TotaledResults(emptySet, total)))); return; } processQuery(connection, queryHelper, total, statMethod, resultSetMapper, replyHandler); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } }); } <T> void processQuery( PgConnection connection, QueryHelper queryHelper, Integer total, String statMethod, Function<TotaledResults, T> resultSetMapper, Handler<AsyncResult<T>> replyHandler ) { try { queryAndAnalyze(connection, queryHelper.selectQuery, statMethod, query -> { if (query.failed()) { replyHandler.handle(Future.failedFuture(query.cause())); return; } replyHandler.handle(Future.succeededFuture(resultSetMapper.apply(new TotaledResults(query.result(), total)))); }); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } private FacetManager buildFacetManager(CQLWrapper wrapper, QueryHelper queryHelper, String mainQuery, List<FacetField> facets) { FacetManager fm = new FacetManager(schemaName + DOT + queryHelper.table); if (wrapper.getWhereClause().isEmpty()) { fm.setWhere(" " + wrapper.getWhereClause()); } fm.setSupportFacets(facets); fm.setIdField(ID_FIELD); fm.setLimitClause(wrapper.getLimit().toString()); fm.setOffsetClause(wrapper.getOffset().toString()); fm.setMainQuery(mainQuery); fm.setSchema(schemaName); fm.setCountQuery(queryHelper.countQuery); return fm; } /** * pass in an entity that is fully / partially populated and the query will return all records matching the * populated fields in the entity - note that this queries the jsonb object, so should not be used to query external * fields * * @param <T> type of the query entity and the result entity * @param table database table to query * @param entity contains the fields to use for the query * @param replyHandler the result contains the entities found */ public <T> void get(String table, T entity, boolean returnCount, Handler<AsyncResult<Results<T>>> replyHandler) { get(table, entity, returnCount, true /*returnIdField*/, replyHandler); } public <T> void get(String table, T entity, boolean returnCount, boolean returnIdField, Handler<AsyncResult<Results<T>>> replyHandler) { get(table, entity, new String[]{DEFAULT_JSONB_FIELD_NAME}, returnCount, returnIdField, replyHandler); } public <T> void get(String table, T entity, String[] fields, boolean returnCount, boolean returnIdField, Handler<AsyncResult<Results<T>>> replyHandler) { get(table, entity, fields, returnCount, returnIdField, -1, -1, replyHandler); } public <T> void get(String table, T entity, String[] fields, boolean returnCount, boolean returnIdField, int offset, int limit, Handler<AsyncResult<Results<T>>> replyHandler) { Criterion criterion = new Criterion(); if (offset != -1) { criterion.setOffset(new Offset(offset)); } if (limit != -1) { criterion.setLimit(new Limit(limit)); } String fieldsStr = Arrays.toString(fields); Class<T> clazz = (Class<T>) entity.getClass(); get(null, table, clazz, fieldsStr.substring(1, fieldsStr.length() - 1), criterion, returnCount, returnIdField, null, replyHandler); } /** * select query * @param table - table to query * @param clazz - class of objects to be returned * @param filter - see Criterion class * @param returnCount - whether to return the amount of records matching the query * @param replyHandler * @throws Exception */ public <T> void get(String table, Class<T> clazz, Criterion filter, boolean returnCount, Handler<AsyncResult<Results<T>>> replyHandler) { get(table, clazz, filter, returnCount, false /*setId*/, replyHandler); } /** * @param setId - unused, the database trigger will always set jsonb->'id' automatically */ public <T> void get(String table, Class<T> clazz, String[] fields, CQLWrapper filter, boolean returnCount, boolean setId, Handler<AsyncResult<Results<T>>> replyHandler) { get(table, clazz, fields, filter, returnCount, setId, null /*facets*/, replyHandler); } /** * @param setId - unused, the database trigger will always set jsonb->'id' automatically */ public <T> void get(String table, Class<T> clazz, String[] fields, CQLWrapper filter, boolean returnCount, boolean setId, List<FacetField> facets, Handler<AsyncResult<Results<T>>> replyHandler) { String distinctOn = null; boolean returnIdField = true; get(table, clazz, fields, filter, returnCount, returnIdField, facets, distinctOn, replyHandler); } <T> void get(String table, Class<T> clazz, String[] fields, CQLWrapper filter, boolean returnCount, boolean returnIdField, List<FacetField> facets, String distinctOn, Handler<AsyncResult<Results<T>>> replyHandler) { String fieldsStr = Arrays.toString(fields); String fieldName = fieldsStr.substring(1, fieldsStr.length() - 1); get(table, clazz, fieldName, filter, returnCount, returnIdField, facets, distinctOn, replyHandler); } <T> void get(String table, Class<T> clazz, String fieldName, CQLWrapper filter, boolean returnCount, boolean returnIdField, List<FacetField> facets, String distinctOn, Handler<AsyncResult<Results<T>>> replyHandler) { getSQLConnection(conn -> doGet(conn, table, clazz, fieldName, filter, returnCount, returnIdField, facets, distinctOn, closeAndHandleResult(conn, replyHandler))); } /** * * @param <T> * @param table * @param clazz * @param fields * @param filter * @param returnCount * @param setId - unused, the database trigger will always set jsonb->'id' automatically * @param replyHandler * @deprecated use get with CQLWrapper or Criterion instead */ @Deprecated public <T> void get(String table, Class<T> clazz, String[] fields, String filter, boolean returnCount, boolean setId, Handler<AsyncResult<Results<T>>> replyHandler) { String where = ""; if(filter != null){ where = filter; } String fieldsStr = Arrays.toString(fields); get(table, clazz, fieldsStr.substring(1, fieldsStr.length()-1), where, returnCount, true, setId, replyHandler); } /** * * @param <T> * @param table * @param clazz * @param filter * @param returnCount * @param setId - unused, the database trigger will always set jsonb->'id' automatically * @param replyHandler * @deprecated use get with CQLWrapper or Criterion instead */ @Deprecated public <T> void get(String table, Class<T> clazz, String filter, boolean returnCount, boolean setId, Handler<AsyncResult<Results<T>>> replyHandler) { String where = ""; if(filter != null){ where = filter; } get(table, clazz, new String[]{DEFAULT_JSONB_FIELD_NAME}, where, returnCount, setId, replyHandler); } public <T> void get(String table, Class<T> clazz, String[] fields, CQLWrapper filter, boolean returnCount, Handler<AsyncResult<Results<T>>> replyHandler) { get(table, clazz, fields, filter, returnCount, false /* setId */, replyHandler); } /* PGUTIL USED VERSION */ public <T> void get(String table, Class<T> clazz, CQLWrapper filter, boolean returnCount, Handler<AsyncResult<Results<T>>> replyHandler) { get(table, clazz, new String[]{DEFAULT_JSONB_FIELD_NAME}, filter, returnCount, false /*setId*/, replyHandler); } /** * @param setId - unused, the database trigger will always set jsonb->'id' automatically * @deprecated use {@link #get(String, Class, CQLWrapper, boolean, Handler)} instead. */ @Deprecated public <T> void get(String table, Class<T> clazz, CQLWrapper filter, boolean returnCount, boolean setId, Handler<AsyncResult<Results<T>>> replyHandler) { get(table, clazz, new String[]{DEFAULT_JSONB_FIELD_NAME}, filter, returnCount, setId, replyHandler); } public <T> void get(String table, Class<T> clazz, CQLWrapper filter, boolean returnCount, List<FacetField> facets, Handler<AsyncResult<Results<T>>> replyHandler) { get(table, clazz, new String[]{DEFAULT_JSONB_FIELD_NAME}, filter, returnCount, false /* setId */, facets, replyHandler); } /** * @param setId - unused, the database trigger will always set jsonb->'id' automatically * @deprecated use {@link #get(String, Class, CQLWrapper, boolean, List, Handler)} instead. */ @Deprecated public <T> void get(String table, Class<T> clazz, CQLWrapper filter, boolean returnCount, boolean setId, List<FacetField> facets, Handler<AsyncResult<Results<T>>> replyHandler) { get(table, clazz, filter, returnCount, facets, replyHandler); } /** * @param setId - unused, the database trigger will always set jsonb->'id' automatically */ public <T> void get(String table, Class<T> clazz, Criterion filter, boolean returnCount, boolean setId, Handler<AsyncResult<Results<T>>> replyHandler) { get(table, clazz, filter, returnCount, setId, null, replyHandler); } /** * @param setId - unused, the database trigger will always set jsonb->'id' automatically */ public <T> void get(AsyncResult<SQLConnection> conn, String table, Class<T> clazz, Criterion filter, boolean returnCount, boolean setId, Handler<AsyncResult<Results<T>>> replyHandler) { get(conn, table, clazz, filter, returnCount, setId, null, replyHandler); } /** * select query * @param table - table to query * @param clazz - class of objects to be returned * @param filter - see Criterion class * @param returnCount - whether to return the amount of records matching the query * @param setId - unused, the database trigger will always set jsonb->'id' automatically * @param replyHandler * @throws Exception */ public <T> void get(String table, Class<T> clazz, Criterion filter, boolean returnCount, boolean setId, List<FacetField> facets, Handler<AsyncResult<Results<T>>> replyHandler) { get(null, table, clazz, filter, returnCount, setId, facets, replyHandler); } /** * @param setId - unused, the database trigger will always set jsonb->'id' automatically */ @SuppressWarnings({"squid:S00107"}) // Method has more than 7 parameters public <T> void get(AsyncResult<SQLConnection> conn, String table, Class<T> clazz, Criterion filter, boolean returnCount, boolean setId, List<FacetField> facets, Handler<AsyncResult<Results<T>>> replyHandler) { get(conn, table, clazz, DEFAULT_JSONB_FIELD_NAME, filter, returnCount, false, facets, replyHandler); } @SuppressWarnings({"squid:S00107"}) // Method has more than 7 parameters <T> void get(AsyncResult<SQLConnection> conn, String table, Class<T> clazz, String fieldName, Criterion filter, boolean returnCount, boolean returnIdField, List<FacetField> facets, Handler<AsyncResult<Results<T>>> replyHandler) { CQLWrapper cqlWrapper = new CQLWrapper(filter); if (conn == null) { get(table, clazz, fieldName, cqlWrapper, returnCount, returnIdField, facets, null, replyHandler); } else { doGet(conn, table, clazz, fieldName, cqlWrapper, returnCount, returnIdField, facets, null, replyHandler); } } /** * A FunctionalInterface that may throw an Exception. * * @param <T> input type * @param <R> output type * @param <E> the type of Exception */ @FunctionalInterface public interface FunctionWithException<T, R, E extends Exception> { /** * @param t some input * @return some output * @throws Exception of type E */ R apply(T t) throws E; } /** * Get the jsonb by id. * @param table the table to search in * @param id the value of the id field * @param function how to convert the (String encoded) JSON * @param replyHandler the result after applying function */ private <R> void getById(String table, String id, FunctionWithException<String, R, Exception> function, Handler<AsyncResult<R>> replyHandler) { getConnection(res -> { if (res.failed()) { replyHandler.handle(Future.failedFuture(res.cause())); return; } PgConnection connection = res.result(); String sql = SELECT + DEFAULT_JSONB_FIELD_NAME + FROM + schemaName + DOT + table + WHERE + ID_FIELD + "= $1"; try { connection.preparedQuery(sql).execute(Tuple.of(UUID.fromString(id)), query -> { connection.close(); if (query.failed()) { replyHandler.handle(Future.failedFuture(query.cause())); return; } RowSet<Row> result = query.result(); if (result.size() == 0) { replyHandler.handle(Future.succeededFuture(null)); return; } try { String entity = result.iterator().next().getValue(0).toString(); R r = function.apply(entity); replyHandler.handle(Future.succeededFuture(r)); } catch (Exception e) { replyHandler.handle(Future.failedFuture(e)); } }); } catch (Exception e) { replyHandler.handle(Future.failedFuture(e)); } }); } /** * Get the jsonb by id and return it as a String. * @param table the table to search in * @param id the value of the id field * @param replyHandler the result; the JSON is encoded as a String */ public void getByIdAsString(String table, String id, Handler<AsyncResult<String>> replyHandler) { getById(table, id, string -> string, replyHandler); } /** * Get the jsonb by id and return it as a JsonObject. * @param table the table to search in * @param id the value of the id field * @param replyHandler the result; the JSON is encoded as a JsonObject */ public void getById(String table, String id, Handler<AsyncResult<JsonObject>> replyHandler) { getById(table, id, JsonObject::new, replyHandler); } /** * Get the jsonb by id and return it as a pojo of type T. * @param table the table to search in * @param id the value of the id field * @param clazz the type of the pojo * @param replyHandler the result; the JSON is converted into a T pojo. */ public <T> void getById(String table, String id, Class<T> clazz, Handler<AsyncResult<T>> replyHandler) { getById(table, id, json -> mapper.readValue(json, clazz), replyHandler); } /** * Get jsonb by id for a list of ids. * <p> * The result is a map of all found records where the key is the id * and the value is the jsonb. * * @param table the table to search in * @param ids the values of the id field * @param function how to convert the (String encoded) JSON * @param replyHandler the result after applying function */ private <R> void getById(String table, JsonArray ids, FunctionWithException<String, R, Exception> function, Handler<AsyncResult<Map<String,R>>> replyHandler) { if (ids == null || ids.isEmpty()) { replyHandler.handle(Future.succeededFuture(Collections.emptyMap())); return; } getConnection(res -> { if (res.failed()) { replyHandler.handle(Future.failedFuture(res.cause())); return; } Tuple list = Tuple.tuple(); for (int i = 0; i < ids.size(); i++) { list.addUUID(UUID.fromString(ids.getString(i))); } PgConnection connection = res.result(); StringBuilder sql = new StringBuilder() .append(SELECT).append(ID_FIELD).append(", ").append(DEFAULT_JSONB_FIELD_NAME) .append(FROM).append(schemaName).append(DOT).append(table) .append(WHERE).append(ID_FIELD).append(" IN ($1"); for (int i = 2; i <= ids.size(); i++) { sql.append(", $" + i); } sql.append(")"); connection.preparedQuery(sql.toString()).execute(list, query -> { connection.close(); if (query.failed()) { replyHandler.handle(Future.failedFuture(query.cause())); return; } try { Map<String,R> result = new HashMap<>(); Iterator<Row> iterator = query.result().iterator(); while (iterator.hasNext()) { Row row = iterator.next(); result.put(row.getValue(0).toString(), function.apply(row.getValue(1).toString())); } replyHandler.handle(Future.succeededFuture(result)); } catch (Exception e) { replyHandler.handle(Future.failedFuture(e)); } }); }); } /** * Get the jsonb by id for a list of ids and return each jsonb as a String. * @param table the table to search in * @param ids the values of the id field * @param replyHandler the result; the JSON is encoded as a String */ public void getByIdAsString(String table, JsonArray ids, Handler<AsyncResult<Map<String,String>>> replyHandler) { getById(table, ids, string -> string, replyHandler); } /** * Get the jsonb by id for a list of ids and return each jsonb as a JsonObject. * @param table the table to search in * @param ids the values of the id field * @param replyHandler the result; the JSON is encoded as a JsonObject */ public void getById(String table, JsonArray ids, Handler<AsyncResult<Map<String,JsonObject>>> replyHandler) { getById(table, ids, JsonObject::new, replyHandler); } /** * Get the jsonb by id for a list of ids and return each jsonb as pojo of type T. * @param table the table to search in * @param ids the values of the id field * @param clazz the type of the pojo * @param replyHandler the result; the JSON is encoded as a T pojo */ public <T> void getById(String table, JsonArray ids, Class<T> clazz, Handler<AsyncResult<Map<String,T>>> replyHandler) { getById(table, ids, json -> mapper.readValue(json, clazz), replyHandler); } static class ResultsHelper<T> { final List<T> list; final Map<String, org.folio.rest.jaxrs.model.Facet> facets; final RowSet<Row> resultSet; final Class<T> clazz; int total; int offset; boolean facet; public ResultsHelper(RowSet<Row> resultSet, int total, Class<T> clazz) { this.list = new ArrayList<>(); this.facets = new HashMap<>(); this.resultSet = resultSet; this.clazz= clazz; this.total = total; this.offset = 0; } public ResultsHelper(Class<T> clazz) { this.list = new ArrayList<>(); this.facets = new HashMap<>(); this.resultSet = null; this.clazz= clazz; this.offset = 0; } } /** * converts a result set into pojos - handles 3 types of queries: * 1. a regular query will return N rows, where each row contains Y columns. one of those columns is the jsonb * column which is mapped into a pojo. each row will also contain the count column (if count was requested for * the query), other fields , like updated date may also be returned if they were requested in the select. * 1a. note that there is an attempt to map external (non jsonb) columns to fields in the pojo. for example, * a column called update_date will attempt to map its value to a field called updateDate in the pojo. however, * for this to happen, the query must select the update_date -> select id,jsonb,update_date from .... * 2. a facet query returns 2 columns, a uuid and a jsonb column. the results of the query are returned as * id and json rows. facets are returned as jsonb values: * {"facetValues": [{"count": 542,"value": "11 ed."}], "type": "name"} * (along with a static '00000000-0000-0000-0000-000000000000' uuid) * the count for a facet query is returned in the following manner: * {"count": 501312} , with a static uuid as the facets * 3. audit queries - queries that query an audit table, meaning the clazz parameter passed in has a jsonb member. * * @param rs * @param total * @param clazz * @return */ <T> Results<T> processResults(RowSet<Row> rs, Integer total, int offset, int limit, Class<T> clazz) { long start = System.nanoTime(); if (total == null) { // NOTE: this may not be an accurate total, may be better for it to be 0 or null total = rs.rowCount(); } ResultsHelper<T> resultsHelper = new ResultsHelper<>(rs, total, clazz); deserializeResults(resultsHelper); ResultInfo resultInfo = new ResultInfo(); resultsHelper.facets.forEach((k , v) -> resultInfo.getFacets().add(v)); Integer totalRecords = getTotalRecords(resultsHelper.list.size(), resultsHelper.total, offset, limit); resultInfo.setTotalRecords(totalRecords); Results<T> results = new Results<>(); results.setResults(resultsHelper.list); results.setResultInfo(resultInfo); statsTracker(PROCESS_RESULTS_STAT_METHOD, clazz.getSimpleName(), start); return results; } /** * * @param resultsHelper */ <T> void deserializeResults(ResultsHelper<T> resultsHelper) { if (resultsHelper.resultSet == null) { return; } boolean isAuditFlavored = isAuditFlavored(resultsHelper.clazz); Map<String, Method> externalColumnSetters = new HashMap<>(); collectExternalColumnSetters( resultsHelper.resultSet.columnsNames(), resultsHelper.clazz, isAuditFlavored, externalColumnSetters ); RowIterator<Row> iterator = resultsHelper.resultSet.iterator(); while (iterator.hasNext()) { Row row = iterator.next(); try { T objRow = (T) deserializeRow(resultsHelper, externalColumnSetters, isAuditFlavored, row); if (!resultsHelper.facet) { resultsHelper.list.add(objRow); } } catch (Exception e) { log.error(e.getMessage(), e); resultsHelper.list.add(null); } } } /** * * @param resultsHelper * @param externalColumnSetters * @param isAuditFlavored * @param row */ <T> Object deserializeRow( ResultsHelper<T> resultsHelper, Map<String, Method> externalColumnSetters, boolean isAuditFlavored, Row row ) throws IOException, InstantiationException, IllegalAccessException, InvocationTargetException { Object jo = row.getValue(DEFAULT_JSONB_FIELD_NAME); Object o = null; resultsHelper.facet = false; if (!isAuditFlavored && jo != null) { try { // is this a facet entry - if so process it, otherwise will throw an exception // and continue trying to map to the pojos o = mapper.readValue(jo.toString(), org.folio.rest.jaxrs.model.Facet.class); org.folio.rest.jaxrs.model.Facet of = (org.folio.rest.jaxrs.model.Facet) o; org.folio.rest.jaxrs.model.Facet facet = resultsHelper.facets.get(of.getType()); if (facet == null) { resultsHelper.facets.put(of.getType(), of); } else { facet.getFacetValues().add(of.getFacetValues().get(0)); } resultsHelper.facet = true; return o; } catch (Exception e) { o = mapper.readValue(jo.toString(), resultsHelper.clazz); } } else { o = resultsHelper.clazz.newInstance(); } populateExternalColumns(externalColumnSetters, o, row); return o; } /** * an exception to having the jsonb column and the fields within the json * get mapped to the corresponding clazz is a case where the * clazz has a jsonb field (member), for example an audit class which contains a field called * jsonb - meaning it encapsulates the real object for example for auditing purposes * (contains the jsonb object as well as some other fields). In such a * case, do not map the clazz to the content of the jsonb - but rather set the jsonb named field of the clazz * with the jsonb column value * * @param clazz * @return */ <T> boolean isAuditFlavored(Class<T> clazz) { boolean isAuditFlavored = false; try { clazz.getDeclaredField(DEFAULT_JSONB_FIELD_NAME); isAuditFlavored = true; } catch (NoSuchFieldException nse) { if (log.isDebugEnabled()) { log.debug("non audit table, no " + DEFAULT_JSONB_FIELD_NAME + " found in json"); } } return isAuditFlavored; } /** * get the class methods in order to populate jsonb object from external columns * abiding to audit mode * * @param columnNames * @param clazz * @param isAuditFlavored * @param externalColumnSetters */ <T> void collectExternalColumnSetters(List<String> columnNames, Class<T> clazz, boolean isAuditFlavored, Map<String, Method> externalColumnSetters) { for (String columnName : columnNames) { if ((isAuditFlavored || !columnName.equals(DEFAULT_JSONB_FIELD_NAME)) && !columnName.equals(ID_FIELD)) { String methodName = databaseFieldToPojoSetter(columnName); for (Method method : clazz.getMethods()) { if (method.getName().equals(methodName)) { externalColumnSetters.put(columnName, method); } } } } } /** * populate jsonb object with values from external columns - for example: * if there is an update_date column in the record - try to populate a field updateDate in the * jsonb object - this allows to use the DB for things like triggers to populate the update_date * automatically, but still push them into the jsonb object - the json schema must declare this field * as well - also support the audit mode descrbed above. * NOTE: that the query must request any field it wants to get populated into the jsonb obj * * @param externalColumnSetters * @param o * @param row */ void populateExternalColumns(Map<String, Method> externalColumnSetters, Object o, Row row) throws InvocationTargetException, IllegalAccessException { for (Map.Entry<String, Method> entry : externalColumnSetters.entrySet()) { String columnName = entry.getKey(); Method method = entry.getValue(); String[] stringArray = row.getStringArray(columnName); if (stringArray != null) { method.invoke(o, Arrays.asList(stringArray)); } else { method.invoke(o, row.getValue(columnName)); } } } /** * assumes column names are all lower case with multi word column names * separated by an '_' * @param str * @return */ String databaseFieldToPojoSetter(String str) { StringBuilder sb = new StringBuilder(str); sb.replace(0, 1, String.valueOf(Character.toUpperCase(sb.charAt(0)))); for (int i = 0; i < sb.length(); i++) { if (sb.charAt(i) == '_') { sb.deleteCharAt(i); sb.replace(i, i + 1, String.valueOf(Character.toUpperCase(sb.charAt(i)))); } } return "set" + sb.toString(); } /** * Run a select query. * * <p>To update see {@link #execute(String, Handler)}. * * @param sql - the sql query to run * @param replyHandler the query result or the failure */ public void select(String sql, Handler<AsyncResult<RowSet<Row>>> replyHandler) { getSQLConnection(conn -> select(conn, sql, closeAndHandleResult(conn, replyHandler))); } /** * Run a select query. * * <p>To update see {@link #execute(String, Handler)}. * @param sql - the sql query to run * @param queryTimeout query timeout in milliseconds, or 0 for no timeout * @param replyHandler the query result or the failure */ public void select(String sql, int queryTimeout, Handler<AsyncResult<RowSet<Row>>> replyHandler) { getSQLConnection(queryTimeout, conn -> select(conn, sql, closeAndHandleResult(conn, replyHandler)) ); } static void queryAndAnalyze(PgConnection conn, String sql, String statMethod, Handler<AsyncResult<RowSet<Row>>> replyHandler) { long start = System.nanoTime(); conn.query(sql).execute(res -> { long queryTime = (System.nanoTime() - start); StatsTracker.addStatElement(STATS_KEY + statMethod, queryTime); if (res.failed()) { log.error("queryAndAnalyze: " + res.cause().getMessage() + " - " + sql, res.cause()); replyHandler.handle(Future.failedFuture(res.cause())); return; } if (queryTime >= explainQueryThreshold * 1000000) { final String explainQuery = "EXPLAIN ANALYZE " + sql; conn.query(explainQuery).execute(explain -> { replyHandler.handle(res); // not before, so we have conn if it gets closed if (explain.failed()) { log.warn(explainQuery + ": ", explain.cause().getMessage(), explain.cause()); return; } StringBuilder e = new StringBuilder(explainQuery); RowIterator<Row> iterator = explain.result().iterator(); while (iterator.hasNext()) { Row row = iterator.next(); e.append('\n').append(row.getString(0)); } log.warn(e.toString()); }); } else { replyHandler.handle(res); } }); } /** * Run a select query. * * <p>This never closes the connection conn. * * <p>To update see {@link #execute(AsyncResult, String, Handler)}. * * @param conn The connection on which to execute the query on. * @param sql The sql query to run. * @param replyHandler The query result or the failure. */ public void select(AsyncResult<SQLConnection> conn, String sql, Handler<AsyncResult<RowSet<Row>>> replyHandler) { try { if (conn.failed()) { replyHandler.handle(Future.failedFuture(conn.cause())); return; } queryAndAnalyze(conn.result().conn, sql, GET_STAT_METHOD, replyHandler); } catch (Exception e) { log.error("select sql: " + e.getMessage() + " - " + sql, e); replyHandler.handle(Future.failedFuture(e)); } } /** * Run a parameterized/prepared select query. * * <p>To update see {@link #execute(String, Tuple, Handler)}. * * @param sql The sql query to run. * @param params The parameters for the placeholders in sql. * @param replyHandler The query result or the failure. */ public void select(String sql, Tuple params, Handler<AsyncResult<RowSet<Row>>> replyHandler) { getSQLConnection(conn -> select(conn, sql, params, closeAndHandleResult(conn, replyHandler))); } /** * Run a parameterized/prepared select query. * * <p>This never closes the connection conn. * * <p>To update see {@link #execute(AsyncResult, String, Tuple, Handler)}. * * @param conn The connection on which to execute the query on. * @param sql The sql query to run. * @param params The parameters for the placeholders in sql. * @param replyHandler The query result or the failure. */ public void select(AsyncResult<SQLConnection> conn, String sql, Tuple params, Handler<AsyncResult<RowSet<Row>>> replyHandler) { try { if (conn.failed()) { replyHandler.handle(Future.failedFuture(conn.cause())); return; } conn.result().conn.preparedQuery(sql).execute(params, replyHandler); } catch (Exception e) { log.error("select sql: " + e.getMessage() + " - " + sql, e); replyHandler.handle(Future.failedFuture(e)); } } /** * Run a select query and return the first record, or null if there is no result. * * <p>To update see {@link #execute(String, Handler)}. * * @param sql The sql query to run. * @param replyHandler The query result or the failure. */ public void selectSingle(String sql, Handler<AsyncResult<Row>> replyHandler) { getSQLConnection(conn -> selectSingle(conn, sql, closeAndHandleResult(conn, replyHandler))); } /** * Run a select query and return the first record, or null if there is no result. * * <p>This never closes the connection conn. * * <p>To update see {@link #execute(AsyncResult, String, Handler)}. * * @param conn The connection on which to execute the query on. * @param sql The sql query to run. * @param replyHandler The query result or the failure. */ public void selectSingle(AsyncResult<SQLConnection> conn, String sql, Handler<AsyncResult<Row>> replyHandler) { selectSingle(conn, sql, Tuple.tuple(), replyHandler); } /** * Run a parameterized/prepared select query and return the first record, or null if there is no result. * * <p>To update see {@link #execute(String, Handler)}. * * @param sql The sql query to run. * @param params The parameters for the placeholders in sql. * @param replyHandler The query result or the failure. */ public void selectSingle(String sql, Tuple params, Handler<AsyncResult<Row>> replyHandler) { getSQLConnection(conn -> selectSingle(conn, sql, params, closeAndHandleResult(conn, replyHandler))); } static void selectReturn(AsyncResult<RowSet<Row>> res, Handler<AsyncResult<Row>> replyHandler) { if (res.failed()) { replyHandler.handle(Future.failedFuture(res.cause())); return; } try { if (!res.result().iterator().hasNext()) { replyHandler.handle(Future.succeededFuture(null)); return; } replyHandler.handle(Future.succeededFuture(res.result().iterator().next())); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } /** * Run a parameterized/prepared select query and return the first record, or null if there is no result. * * <p>This never closes the connection conn. * * <p>To update see {@link #execute(AsyncResult, String, Handler)}. * * @param conn The connection on which to execute the query on. * @param sql The sql query to run. * @param params The parameters for the placeholders in sql. * @param replyHandler The query result or the failure. */ public void selectSingle(AsyncResult<SQLConnection> conn, String sql, Tuple params, Handler<AsyncResult<Row>> replyHandler) { try { if (conn.failed()) { replyHandler.handle(Future.failedFuture(conn.cause())); return; } if (params.size() == 0) { conn.result().conn.query(sql).execute(res -> selectReturn(res, replyHandler)); } else { conn.result().conn.preparedQuery(sql).execute(params, res -> selectReturn(res, replyHandler)); } } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } /** * Run a parameterized/prepared select query returning with an SQLRowStream. * * <p>This never closes the connection conn. * * @param conn The connection on which to execute the query on. * @param sql The sql query to run. * @param replyHandler The query result or the failure. */ public void selectStream(AsyncResult<SQLConnection> conn, String sql, Handler<AsyncResult<RowStream<Row>>> replyHandler) { selectStream(conn, sql, Tuple.tuple(), replyHandler); } /** * Run a parameterized/prepared select query returning with an SQLRowStream. * * <p>This never closes the connection conn. * * @param conn The connection on which to execute the query on. * @param sql The sql query to run. * @param params The parameters for the placeholders in sql. * @param replyHandler The query result or the failure. */ public void selectStream(AsyncResult<SQLConnection> conn, String sql, Tuple params, Handler<AsyncResult<RowStream<Row>>> replyHandler) { selectStream(conn, sql, params, STREAM_GET_DEFAULT_CHUNK_SIZE, replyHandler); } void selectStream(AsyncResult<SQLConnection> conn, String sql, Tuple params, int chunkSize, Handler<AsyncResult<RowStream<Row>>> replyHandler) { try { if (conn.failed()) { replyHandler.handle(Future.failedFuture(conn.cause())); return; } final Transaction tx = conn.result().tx; tx.prepare(sql, res -> { if (res.failed()) { log.error(res.cause().getMessage(), res.cause()); replyHandler.handle(Future.failedFuture(res.cause())); return; } PreparedStatement pq = res.result(); RowStream<Row> rowStream = pq.createStream(chunkSize, params); replyHandler.handle(Future.succeededFuture(rowStream)); }); } catch (Exception e) { log.error("select stream sql: " + e.getMessage() + " - " + sql, e); replyHandler.handle(Future.failedFuture(e)); } } /** * Execute an INSERT, UPDATE or DELETE statement. * @param sql - the sql to run * @param replyHandler - the result handler with UpdateResult */ public void execute(String sql, Handler<AsyncResult<RowSet<Row>>> replyHandler) { execute(sql, Tuple.tuple(), replyHandler); } /** * Get vertx-pg-client connection * @param replyHandler */ public void getConnection(Handler<AsyncResult<PgConnection>> replyHandler) { getClient().getConnection(x -> { if (x.failed()) { replyHandler.handle(Future.failedFuture(x.cause())); return; } try { replyHandler.handle(Future.succeededFuture((PgConnection) x.result())); } catch (Exception e) { replyHandler.handle(Future.failedFuture(e)); } }); } void getSQLConnection(Handler<AsyncResult<SQLConnection>> handler) { getSQLConnection(0, handler); } void getSQLConnection(int queryTimeout, Handler<AsyncResult<SQLConnection>> handler) { getConnection(res -> { if (res.failed()) { handler.handle(Future.failedFuture(res.cause())); return; } PgConnection pgConnection = res.result(); if (queryTimeout == 0) { handler.handle(Future.succeededFuture(new SQLConnection(pgConnection, null, null))); return; } long timerId = vertx.setTimer(queryTimeout, id -> pgConnection.cancelRequest(ar -> { if (ar.succeeded()) { log.warn( String.format("Cancelling request due to timeout after : %d ms", queryTimeout)); } else { log.warn("Failed to send cancelling request", ar.cause()); } })); SQLConnection sqlConnection = new SQLConnection(pgConnection, null, timerId); handler.handle(Future.succeededFuture(sqlConnection)); }); } private void cancelConnectionTimeoutTimer(SQLConnection sqlConnection) { Long timeId = sqlConnection.timerId; if (timeId != null) { vertx.cancelTimer(timeId); } } /** * Execute a parameterized/prepared INSERT, UPDATE or DELETE statement. * @param sql The SQL statement to run. * @param params The parameters for the placeholders in sql. * @param replyHandler */ public void execute(String sql, Tuple params, Handler<AsyncResult<RowSet<Row>>> replyHandler) { getSQLConnection(conn -> execute(conn, sql, params, closeAndHandleResult(conn, replyHandler))); } /** * Send an INSERT, UPDATE or DELETE statement within a transaction. * * <p>Example: * <pre> * postgresClient.startTx(beginTx -> { * try { * postgresClient.execute(beginTx, sql, reply -> {... * </pre> * @param conn - connection - see {@link #startTx(Handler)} * @param sql - the sql to run * @param replyHandler - reply handler with UpdateResult */ public void execute(AsyncResult<SQLConnection> conn, String sql, Handler<AsyncResult<RowSet<Row>>> replyHandler){ execute(conn, sql, Tuple.tuple(), replyHandler); } /** * Send an INSERT, UPDATE or DELETE parameterized/prepared statement within a transaction. * * <p>Example: * <pre> * postgresClient.startTx(beginTx -> { * try { * postgresClient.execute(beginTx, sql, params, reply -> {... * </pre> * @param conn - connection - see {@link #startTx(Handler)} * @param sql - the sql to run * @param replyHandler - reply handler with UpdateResult */ public void execute(AsyncResult<SQLConnection> conn, String sql, Tuple params, Handler<AsyncResult<RowSet<Row>>> replyHandler) { try { if (conn.failed()) { replyHandler.handle(Future.failedFuture(conn.cause())); return; } PgConnection connection = conn.result().conn; long start = System.nanoTime(); // more than optimization.. preparedQuery does not work for multiple SQL statements if (params.size() == 0) { connection.query(sql).execute(query -> { statsTracker(EXECUTE_STAT_METHOD, sql, start); replyHandler.handle(query); }); } else { connection.preparedQuery(sql).execute(params, query -> { statsTracker(EXECUTE_STAT_METHOD, sql, start); replyHandler.handle(query); }); } } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } /** * Create a parameterized/prepared INSERT, UPDATE or DELETE statement and * run it with a list of sets of parameters. * * <p>Example: * <pre> * postgresClient.startTx(beginTx -> { * try { * postgresClient.execute(beginTx, sql, params, reply -> {... * </pre> * @param conn - connection - see {@link #startTx(Handler)} * @param sql - the sql to run * @param params - there is one list entry for each sql invocation containing the parameters for the placeholders. * @param replyHandler - reply handler with one UpdateResult for each list entry of params. */ public void execute(AsyncResult<SQLConnection> conn, String sql, List<Tuple> params, Handler<AsyncResult<List<RowSet<Row>>>> replyHandler) { try { if (conn.failed()) { replyHandler.handle(Future.failedFuture(conn.cause())); return; } PgConnection sqlConnection = conn.result().conn; List<RowSet<Row>> results = new ArrayList<>(params.size()); Iterator<Tuple> iterator = params.iterator(); Runnable task = new Runnable() { @Override public void run() { if (! iterator.hasNext()) { replyHandler.handle(Future.succeededFuture(results)); return; } Tuple params1 = iterator.next(); sqlConnection.preparedQuery(sql).execute(params1, query -> { if (query.failed()) { replyHandler.handle(Future.failedFuture(query.cause())); return; } results.add(query.result()); this.run(); }); } }; task.run(); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } /** * Create a parameterized/prepared INSERT, UPDATE or DELETE statement and * run it with a list of sets of parameters. Wrap all in a transaction. * * @param sql - the sql to run * @param params - there is one list entry for each sql invocation containing the parameters for the placeholders. * @param replyHandler - reply handler with one UpdateResult for each list entry of params. */ public void execute(String sql, List<Tuple> params, Handler<AsyncResult<List<RowSet<Row>>>> replyHandler) { startTx(res -> { if (res.failed()) { replyHandler.handle(Future.failedFuture(res.cause())); return; } execute(res, sql, params, result -> { if (result.failed()) { rollbackTx(res, rollback -> replyHandler.handle(result)); return; } endTx(res, end -> { if (end.failed()) { replyHandler.handle(Future.failedFuture(end.cause())); return; } replyHandler.handle(result); }); }); }); } /** * For queries where you only want to populate the where clause * <br/> * See {@link #persistentlyCacheResult(String, String, Handler) } * @param cacheName * @param tableName * @param filter * @param replyHandler */ public void persistentlyCacheResult(String cacheName, String tableName, CQLWrapper filter, Handler<AsyncResult<Integer>> replyHandler) { String where = ""; if (filter != null) { try { where = filter.toString(); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); return; } } String q = "SELECT * FROM " + schemaName + DOT + tableName + SPACE + where; persistentlyCacheResult(cacheName, q, replyHandler); } /** * For queries where you only want to populate the where clause * <br/> * See {@link #persistentlyCacheResult(String, String, Handler) } * @param cacheName * @param tableName * @param filter * @param replyHandler */ public void persistentlyCacheResult(String cacheName, String tableName, Criterion filter, Handler<AsyncResult<Integer>> replyHandler) { String where = ""; if (filter != null) { where = filter.toString(); } String q = "SELECT * FROM " + schemaName + DOT + tableName + SPACE + where; persistentlyCacheResult(cacheName, q, replyHandler); } /** * Create a table, a type of materialized view, with the results of a specific query. * This can be very helpful when the query is complex and the data is relatively static. * This will create a table populated with the results from the query (sql2cache). * Further queries can then be run on this table (cacheName) instead of re-executing the complex * sql query over and over again. * <br/> * 1. The table will not track subsequent changes to the source tables * <br/> * 2. The table should be DROPPED when not needed anymore * <br/> * 3. To Refresh the table, DROP and Re-call this function * <br/> * Use carefully, index support on created table to be added * @param cacheName - name of the table holding the results of the query * @param sql2cache - the sql query to use to populate the table * @param replyHandler */ public void persistentlyCacheResult(String cacheName, String sql2cache, Handler<AsyncResult<Integer>> replyHandler) { getSQLConnection(conn -> persistentlyCacheResult(conn, cacheName, sql2cache, closeAndHandleResult(conn, replyHandler))); } private void persistentlyCacheResult(AsyncResult<SQLConnection> conn, String cacheName, String sql2cache, Handler<AsyncResult<Integer>> replyHandler) { try { if (conn.failed()) { replyHandler.handle(Future.failedFuture(conn.cause())); return; } long start = System.nanoTime(); PgConnection connection = conn.result().conn; String q = "CREATE UNLOGGED TABLE IF NOT EXISTS " + schemaName + DOT + cacheName +" AS " + sql2cache; log.info(q); connection.query(q).execute( query -> { statsTracker("persistentlyCacheResult", "CREATE TABLE AS", start); if (query.failed()) { replyHandler.handle(Future.failedFuture(query.cause())); } else { replyHandler.handle(Future.succeededFuture(query.result().rowCount())); } }); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } public void removePersistentCacheResult(String cacheName, Handler<AsyncResult<Integer>> replyHandler) { getSQLConnection(conn -> removePersistentCacheResult(conn, cacheName, closeAndHandleResult(conn, replyHandler))); } private void removePersistentCacheResult(AsyncResult<SQLConnection> conn, String cacheName, Handler<AsyncResult<Integer>> replyHandler){ try { if (conn.failed()) { replyHandler.handle(Future.failedFuture(conn.cause())); return; } long start = System.nanoTime(); PgConnection connection = conn.result().conn; connection.query("DROP TABLE " + schemaName + DOT + cacheName).execute(query -> { statsTracker("removePersistentCacheResult", "DROP TABLE " + cacheName, start); if (query.failed()) { replyHandler.handle(Future.failedFuture(query.cause())); } else { replyHandler.handle(Future.succeededFuture(query.result().rowCount())); } }); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } /** * @param identifier the identifier to check * @return if the identifier is a valid Postgres identifier and does not contain * letters with diacritical marks or non-Latin letters */ public boolean isValidPostgresIdentifier(String identifier) { return POSTGRES_IDENTIFIER.matcher(identifier).matches(); } /** * Drop the database if it exists. * @param database database name * @throws SQLException on database error * @throws IllegalArgumentException if database name is too long, contains * illegal characters or letters with diacritical marks or non-Latin letters */ public void dropCreateDatabase(String database) throws SQLException { if (! isValidPostgresIdentifier(database)) { throw new IllegalArgumentException("Illegal character in database name: " + database); } try (Connection connection = getStandaloneConnection("postgres", true); Statement statement = connection.createStatement()) { statement.executeUpdate("DROP DATABASE IF EXISTS " + database); //NOSONAR statement.executeUpdate("CREATE DATABASE " + database); //NOSONAR } } /** * Split string into lines. */ private static List<String> lines(String string) { return Arrays.asList(string.split("\\r\\n|\\n|\\r")); } /** * Split the sqlFile into SQL statements. * * <a href="https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-DOLLAR-QUOTING"> * Dollar-quoted string constants</a> with $$ or $[0-9a-zA-Z_]+$ are preserved. */ static String [] splitSqlStatements(String sqlFile) { List<String> lines = new ArrayList<>(); Matcher matcher = POSTGRES_DOLLAR_QUOTING.matcher(sqlFile); int searchStart = 0; while (matcher.find()) { lines.addAll(lines(sqlFile.substring(searchStart, matcher.start()))); lines.add(matcher.group()); searchStart = matcher.end(); } lines.addAll(lines(sqlFile.substring(searchStart))); return lines.toArray(new String [0]); } @SuppressWarnings("checkstyle:EmptyBlock") static String [] preprocessSqlStatements(String sqlFile) throws Exception { StringBuilder singleStatement = new StringBuilder(); String[] allLines = splitSqlStatements(sqlFile); List<String> execStatements = new ArrayList<>(); boolean inCopy = false; for (int i = 0; i < allLines.length; i++) { if (allLines[i].toUpperCase().matches("^\\s*(CREATE USER|CREATE ROLE).*") && AES.getSecretKey() != null) { final Pattern pattern = Pattern.compile("PASSWORD\\s*'(.+?)'\\s*", Pattern.CASE_INSENSITIVE); final Matcher matcher = pattern.matcher(allLines[i]); if(matcher.find()){ /** password argument indicated in the create user / role statement */ String newPassword = createPassword(matcher.group(1)); allLines[i] = matcher.replaceFirst(" PASSWORD '" + newPassword +"' "); } } if (allLines[i].trim().startsWith("\ufeff--") || allLines[i].trim().length() == 0 || allLines[i].trim().startsWith("--")) { // this is an sql comment, skip } else if (POSTGRES_COPY_FROM_STDIN.matcher(allLines[i]).matches()) { singleStatement.append(allLines[i]); inCopy = true; } else if (inCopy && (allLines[i].trim().equals("\\."))) { inCopy = false; execStatements.add( singleStatement.toString() ); singleStatement = new StringBuilder(); } else if (allLines[i].trim().endsWith(SEMI_COLON) && !inCopy) { execStatements.add( singleStatement.append(SPACE + allLines[i]).toString() ); singleStatement = new StringBuilder(); } else { if (inCopy) { singleStatement.append("\n"); } else { singleStatement.append(SPACE); } singleStatement.append(allLines[i]); } } String lastStatement = singleStatement.toString(); if (! lastStatement.trim().isEmpty()) { execStatements.add(lastStatement); } return execStatements.toArray(new String[]{}); } /** * Will connect to a specific database and execute the commands in the .sql file * against that database.<p /> * NOTE: NOT tested on all types of statements - but on a lot * * @param sqlFile - string of sqls with executable statements * @param stopOnError - stop on first error * @return Future with list of statements that failed; the list may be empty */ public Future<List<String>> runSQLFile(String sqlFile, boolean stopOnError) { Promise<List<String>> promise = Promise.promise(); runSQLFile(sqlFile, stopOnError, promise.future()); return promise.future(); } /** * Will connect to a specific database and execute the commands in the .sql file * against that database.<p /> * NOTE: NOT tested on all types of statements - but on a lot * * @param sqlFile - string of sqls with executable statements * @param stopOnError - stop on first error * @param replyHandler - the handler's result is the list of statements that failed; the list may be empty */ public void runSQLFile(String sqlFile, boolean stopOnError, Handler<AsyncResult<List<String>>> replyHandler){ try { execute(preprocessSqlStatements(sqlFile), stopOnError, replyHandler); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } private Connection getStandaloneConnection(String newDB, boolean superUser) throws SQLException { String host = postgreSQLClientConfig.getString(HOST); int port = postgreSQLClientConfig.getInteger(PORT); String user = postgreSQLClientConfig.getString(_USERNAME); String pass = postgreSQLClientConfig.getString(_PASSWORD); String db = postgreSQLClientConfig.getString(DATABASE); if(newDB != null){ db = newDB; if(!superUser){ pass = newDB; user = newDB; } } return DriverManager.getConnection( "jdbc:postgresql://"+host+":"+port+"/"+db, user , pass); } /** * Copy files via the COPY FROM postgres syntax * Support 3 modes * 1. In line (STDIN) Notice the mandatory \. at the end of all entries to import * COPY config_data (jsonb) FROM STDIN ENCODING 'UTF8'; * {"module":"SETTINGS","config_name":"locale","update_date":"1.1.2017","code":"system.currency_symbol.dk","description":"currency code","default": false,"enabled": true,"value": "kr"} * \. * 2. Copy from a data file packaged in the jar * COPY config_data (jsonb) FROM 'data/locales.data' ENCODING 'UTF8'; * 3. Copy from a file on disk (absolute path) * COPY config_data (jsonb) FROM 'C:\\Git\\configuration\\mod-configuration-server\\src\\main\\resources\\data\\locales.data' ENCODING 'UTF8'; * @param copyInStatement * @param connection * @throws Exception */ private void copyIn(String copyInStatement, Connection connection) throws Exception { long totalInsertedRecords = 0; CopyManager copyManager = new CopyManager((BaseConnection) connection); if(copyInStatement.contains("STDIN")){ //run as is int sep = copyInStatement.indexOf("\n"); String copyIn = copyInStatement.substring(0, sep); String data = copyInStatement.substring(sep+1); totalInsertedRecords = copyManager.copyIn(copyIn, new StringReader(data)); } else{ //copy from a file, String[] valuesInQuotes = StringUtils.substringsBetween(copyInStatement , "'", "'"); if(valuesInQuotes.length == 0){ log.warn("SQL statement: COPY FROM, has no STDIN and no file path wrapped in ''"); throw new Exception("SQL statement: COPY FROM, has no STDIN and no file path wrapped in ''"); } //do not read from the file system for now as this needs to support data files packaged in //the jar, read files into memory and load - consider improvements to this String filePath = valuesInQuotes[0]; String data; if(new File(filePath).isAbsolute()){ data = FileUtils.readFileToString(new File(filePath), "UTF8"); } else{ try { //assume running from within a jar, data = ResourceUtils.resource2String(filePath); } catch (Exception e) { //from IDE data = ResourceUtils.resource2String("/"+filePath); } } copyInStatement = copyInStatement.replace("'"+filePath+"'", "STDIN"); totalInsertedRecords = copyManager.copyIn(copyInStatement, new StringReader(data)); } log.info("Inserted " + totalInsertedRecords + " via COPY IN. Tenant: " + tenantId); } private void execute(String[] sql, boolean stopOnError, Handler<AsyncResult<List<String>>> replyHandler){ long s = System.nanoTime(); log.info("Executing multiple statements with id " + Arrays.hashCode(sql)); List<String> results = new ArrayList<>(); vertx.executeBlocking(dothis -> { Connection connection = null; Statement statement = null; boolean error = false; try { /* this should be super user account that is in the config file */ connection = getStandaloneConnection(null, false); connection.setAutoCommit(false); statement = connection.createStatement(); for (int j = 0; j < sql.length; j++) { try { log.info("trying to execute: " + sql[j].substring(0, Math.min(sql[j].length()-1, 1000))); if(sql[j].trim().toUpperCase().startsWith("COPY ")){ copyIn(sql[j], connection); } else{ statement.executeUpdate(sql[j]); //NOSONAR } log.info("Successfully executed: " + sql[j].substring(0, Math.min(sql[j].length()-1, 400))); } catch (Exception e) { results.add(sql[j]); error = true; log.error(e.getMessage(),e); if(stopOnError){ break; } } } try { if(error){ connection.rollback(); log.error("Rollback for: " + Arrays.hashCode(sql)); } else{ connection.commit(); log.info("Successfully committed: " + Arrays.hashCode(sql)); } } catch (Exception e) { error = true; log.error("Commit failed " + Arrays.hashCode(sql) + SPACE + e.getMessage(), e); } } catch(Exception e){ log.error(e.getMessage(), e); error = true; } finally { try { if(statement != null) statement.close(); } catch (Exception e) { log.error(e.getMessage(), e); } try { if(connection != null) connection.close(); } catch (Exception e) { log.error(e.getMessage(), e); } if(error){ dothis.fail("error"); } else{ dothis.complete(); } } }, done -> { logTimer(EXECUTE_STAT_METHOD, "" + Arrays.hashCode(sql), s); replyHandler.handle(Future.succeededFuture(results)); }); } private static void rememberEmbeddedPostgres() { embeddedPostgres = new EmbeddedPostgres(Version.Main.V10); } /** * Start an embedded PostgreSQL. * doesn't change the configuration. * * @throws IOException when starting embedded PostgreSQL fails */ public void startEmbeddedPostgres() throws IOException { // starting Postgres setIsEmbedded(true); if (embeddedPostgres == null) { int port = postgreSQLClientConfig.getInteger(PORT); String username = postgreSQLClientConfig.getString(_USERNAME); String password = postgreSQLClientConfig.getString(_PASSWORD); String database = postgreSQLClientConfig.getString(DATABASE); String locale = "en_US.UTF-8"; String operatingSystem = System.getProperty("os.name").toLowerCase(); if (operatingSystem.contains("win")) { locale = "american_usa"; } rememberEmbeddedPostgres(); embeddedPostgres.start("localhost", port, database, username, password, Arrays.asList("-E", "UTF-8", "--locale", locale)); Runtime.getRuntime().addShutdownHook(new Thread(PostgresClient::stopEmbeddedPostgres)); log.info("embedded postgres started on port " + port); } else { log.info("embedded postgres is already running..."); } } /** * .sql files * @param path */ public void importFileEmbedded(String path) { // starting Postgres if (embeddedMode) { if (embeddedPostgres != null) { Optional<PostgresProcess> optionalPostgresProcess = embeddedPostgres.getProcess(); if (optionalPostgresProcess.isPresent()) { log.info("embedded postgress import starting...."); PostgresProcess postgresProcess = optionalPostgresProcess.get(); postgresProcess.importFromFile(new File(path)); log.info("embedded postgress import complete...."); } else { log.warn("embedded postgress is not running..."); } } else { log.info("embedded postgress not enabled"); } } } /** * This is a blocking call - run in an execBlocking statement * import data in a tab delimited file into columns of an existing table * Using only default values of the COPY FROM STDIN Postgres command * @param path - path to the file * @param tableName - name of the table to import the content into */ public void importFile(String path, String tableName) { long recordsImported[] = new long[]{-1}; vertx.<String>executeBlocking(dothis -> { try { String host = postgreSQLClientConfig.getString(HOST); int port = postgreSQLClientConfig.getInteger(PORT); String user = postgreSQLClientConfig.getString(_USERNAME); String pass = postgreSQLClientConfig.getString(_PASSWORD); String db = postgreSQLClientConfig.getString(DATABASE); log.info("Connecting to " + db); Connection con = DriverManager.getConnection( "jdbc:postgresql://"+host+":"+port+"/"+db, user , pass); log.info("Copying text data rows from stdin"); CopyManager copyManager = new CopyManager((BaseConnection) con); FileReader fileReader = new FileReader(path); recordsImported[0] = copyManager.copyIn("COPY "+tableName+" FROM STDIN", fileReader ); } catch (Exception e) { log.error(messages.getMessage("en", MessageConsts.ImportFailed), e); dothis.fail(e); } dothis.complete("Done."); }, whendone -> { if(whendone.succeeded()){ log.info("Done importing file: " + path + ". Number of records imported: " + recordsImported[0]); } else{ log.info("Failed importing file: " + path); } }); } public static void stopEmbeddedPostgres() { if (embeddedPostgres != null) { closeAllClients(); LogUtil.formatLogMessage(PostgresClient.class.getName(), "stopEmbeddedPostgres", "called stop on embedded postgress ..."); embeddedPostgres.stop(); embeddedPostgres = null; embeddedMode = false; } } public static String convertToPsqlStandard(String tenantId){ return tenantId.toLowerCase() + "_" + MODULE_NAME; } public static String getModuleName(){ return MODULE_NAME; } /** * @return the tenantId of this PostgresClient */ String getTenantId() { return tenantId; } /** * @return the PostgreSQL schema name for the tenantId and the module name of this PostgresClient. * A PostgreSQL schema name is of the form tenant_module and is used to address tables: * "SELECT * FROM tenant_module.table" */ String getSchemaName() { return schemaName; } /** * Function to correct estimated result count: * If the resultsCount is equal to 0, the result should be not more than offset * If the resultsCount is equal to limit, the result should be not less than offset + limit * Otherwise it should be equal to offset + resultsCount * * @param resultsCount the count of rows, that are returned from database * @param estimateCount the estimate result count from returned by database * @param offset database offset * @param limit database limit * @return corrected results count */ static Integer getTotalRecords(int resultsCount, Integer estimateCount, int offset, int limit) { if (estimateCount == null) { return null; } if (limit == 0) { return estimateCount; } if (resultsCount == 0) { return Math.min(offset, estimateCount); } else if (resultsCount == limit) { return Math.max(offset + limit, estimateCount); } return offset + resultsCount; } }
domain-models-runtime/src/main/java/org/folio/rest/persist/PostgresClient.java
package org.folio.rest.persist; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import freemarker.template.TemplateException; import io.vertx.core.AsyncResult; import io.vertx.core.CompositeFuture; import io.vertx.core.Future; import io.vertx.core.Handler; import io.vertx.core.Promise; import io.vertx.core.Vertx; import io.vertx.core.json.JsonArray; import io.vertx.core.json.JsonObject; import io.vertx.core.logging.Logger; import io.vertx.core.logging.LoggerFactory; import io.vertx.pgclient.PgConnectOptions; import io.vertx.pgclient.PgConnection; import io.vertx.pgclient.PgPool; import io.vertx.sqlclient.PoolOptions; import io.vertx.sqlclient.PreparedStatement; import io.vertx.sqlclient.Row; import io.vertx.sqlclient.RowIterator; import io.vertx.sqlclient.RowSet; import io.vertx.sqlclient.RowStream; import io.vertx.sqlclient.Transaction; import io.vertx.sqlclient.Tuple; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.io.StringReader; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.crypto.SecretKey; import org.apache.commons.collections4.map.HashedMap; import org.apache.commons.collections4.map.MultiKeyMap; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; import org.folio.cql2pgjson.util.Cql2PgUtil; import org.folio.rest.jaxrs.model.ResultInfo; import org.folio.rest.persist.Criteria.Criterion; import org.folio.rest.persist.Criteria.Limit; import org.folio.rest.persist.Criteria.Offset; import org.folio.rest.persist.Criteria.UpdateSection; import org.folio.rest.persist.cql.CQLWrapper; import org.folio.rest.persist.facets.FacetField; import org.folio.rest.persist.facets.FacetManager; import org.folio.rest.persist.helpers.LocalRowSet; import org.folio.rest.persist.interfaces.Results; import org.folio.rest.security.AES; import org.folio.rest.tools.PomReader; import org.folio.rest.tools.messages.MessageConsts; import org.folio.rest.tools.messages.Messages; import org.folio.rest.tools.monitor.StatsTracker; import org.folio.rest.tools.utils.Envs; import org.folio.rest.tools.utils.LogUtil; import org.folio.rest.tools.utils.NetworkUtils; import org.folio.rest.tools.utils.ObjectMapperTool; import org.folio.rest.tools.utils.ResourceUtils; import org.postgresql.copy.CopyManager; import org.postgresql.core.BaseConnection; import ru.yandex.qatools.embed.postgresql.EmbeddedPostgres; import ru.yandex.qatools.embed.postgresql.PostgresProcess; import ru.yandex.qatools.embed.postgresql.distribution.Version; /** * @author shale * * currently does not support binary data unless base64 encoded */ public class PostgresClient { public static final String DEFAULT_SCHEMA = "public"; public static final String DEFAULT_JSONB_FIELD_NAME = "jsonb"; static Logger log = LoggerFactory.getLogger(PostgresClient.class); /** default analyze threshold value in milliseconds */ static final long EXPLAIN_QUERY_THRESHOLD_DEFAULT = 1000; static final String COUNT_FIELD = "count"; private static final String ID_FIELD = "id"; private static final String RETURNING_ID = " RETURNING id "; private static final String CONNECTION_RELEASE_DELAY = "connectionReleaseDelay"; private static final String MAX_POOL_SIZE = "maxPoolSize"; /** default release delay in milliseconds; after this time an idle database connection is closed */ private static final int DEFAULT_CONNECTION_RELEASE_DELAY = 60000; private static final String POSTGRES_LOCALHOST_CONFIG = "/postgres-conf.json"; private static final int EMBEDDED_POSTGRES_PORT = 6000; private static final int STREAM_GET_DEFAULT_CHUNK_SIZE = 100; private static final String SELECT = "SELECT "; private static final String UPDATE = "UPDATE "; private static final String DELETE = "DELETE "; private static final String FROM = " FROM "; private static final String SET = " SET "; private static final String WHERE = " WHERE "; private static final String INSERT_CLAUSE = "INSERT INTO "; private static final String _PASSWORD = "password"; //NOSONAR private static final String _USERNAME = "username"; private static final String HOST = "host"; private static final String PORT = "port"; private static final String DATABASE = "database"; private static final String DEFAULT_IP = "127.0.0.1"; //NOSONAR private static final String STATS_KEY = PostgresClient.class.getName(); private static final String GET_STAT_METHOD = "get"; private static final String COUNT_STAT_METHOD = "count"; private static final String SAVE_STAT_METHOD = "save"; private static final String UPDATE_STAT_METHOD = "update"; private static final String DELETE_STAT_METHOD = "delete"; private static final String EXECUTE_STAT_METHOD = "execute"; private static final String PROCESS_RESULTS_STAT_METHOD = "processResults"; private static final String SPACE = " "; private static final String DOT = "."; private static final String COMMA = ","; private static final String SEMI_COLON = ";"; private static EmbeddedPostgres embeddedPostgres; private static boolean embeddedMode = false; private static String configPath = null; private static ObjectMapper mapper = ObjectMapperTool.getMapper(); private static MultiKeyMap<Object, PostgresClient> connectionPool = MultiKeyMap.multiKeyMap(new HashedMap<>()); private static final String MODULE_NAME = PomReader.INSTANCE.getModuleName(); private static final Pattern POSTGRES_IDENTIFIER = Pattern.compile("^[a-zA-Z_][0-9a-zA-Z_]{0,62}$"); private static final Pattern POSTGRES_DOLLAR_QUOTING = // \\B = a non-word boundary, the first $ must not be part of an identifier (foo$bar$baz) Pattern.compile("[^\\n\\r]*?\\B(\\$\\w*\\$).*?\\1[^\\n\\r]*", Pattern.DOTALL); private static final Pattern POSTGRES_COPY_FROM_STDIN = // \\b = a word boundary Pattern.compile("^\\s*COPY\\b.*\\bFROM\\s+STDIN\\b.*", Pattern.CASE_INSENSITIVE); private static int embeddedPort = -1; /** analyze threshold value in milliseconds */ private static long explainQueryThreshold = EXPLAIN_QUERY_THRESHOLD_DEFAULT; private final Vertx vertx; private JsonObject postgreSQLClientConfig = null; private final Messages messages = Messages.getInstance(); private PgPool client; private final String tenantId; private final String schemaName; protected PostgresClient(Vertx vertx, String tenantId) throws Exception { this.tenantId = tenantId; this.vertx = vertx; this.schemaName = convertToPsqlStandard(tenantId); init(); } /** * test constructor for unit testing */ private PostgresClient() { this.tenantId = "test"; this.vertx = null; this.schemaName = convertToPsqlStandard(tenantId); log.warn("Instantiating test Postgres client! Only use with tests!"); } static PostgresClient testClient() { explainQueryThreshold = 0; return new PostgresClient(); } /** * Log the duration since startNanoTime as a debug message. * @param description text for the log entry * @param sql additional text for the log entry * @param startNanoTime start time as returned by System.nanoTime() */ private void logTimer(String description, String sql, long startNanoTime) { if (! log.isDebugEnabled()) { return; } logTimer(description, sql, startNanoTime, System.nanoTime()); } /** * Log the duration between startNanoTime and endNanoTime as a debug message. * @param description text for the log entry * @param sql additional text for the log entry * @param startNanoTime start time in nanoseconds * @param endNanoTime end time in nanoseconds */ private void logTimer(String description, String sql, long startNanoTime, long endNanoTime) { log.debug(description + " timer: " + sql + " took " + ((endNanoTime - startNanoTime) / 1000000) + " ms"); } /** * Log the duration since startNanoTime at the StatsTracker and as a debug message. * @param descriptionKey key for StatsTracker and text for the log entry * @param sql additional text for the log entry * @param startNanoTime start time as returned by System.nanoTime() */ private void statsTracker(String descriptionKey, String sql, long startNanoTime) { long endNanoTime = System.nanoTime(); StatsTracker.addStatElement(STATS_KEY + DOT + descriptionKey, (endNanoTime - startNanoTime)); if (log.isDebugEnabled()) { logTimer(descriptionKey, sql, startNanoTime, endNanoTime); } } /** * Enable or disable using embedded specific defaults for the * PostgreSQL configuration. They are used if there is no * postgres json config file. * <p> * This function must be invoked before calling the constructor. * <p> * The embedded specific defaults are: * <ul> * <li><code>username = "username"</code></li> * <li><code>password = "password"</code></li> * <li><code>host = "127.0.0.1"</code></li> * <li><code>port = 6000</code></li> * <li><code>database = "postgres"</code></li> * </ul> * * @param embed - whether to use embedded specific defaults */ public static void setIsEmbedded(boolean embed){ embeddedMode = embed; } /** * Set the port that overwrites to port of the embedded PostgreSQL. * This port overwrites any default port and any port set in the * DB_PORT environment variable or the * PostgreSQL configuration file. It is only used when <code>isEmbedded() == true</code> * when invoking the constructor. * <p> * This function must be invoked before calling the constructor. * <p> * Use -1 to not overwrite the port. * * <p>-1 is the default. * * @param port the port for embedded PostgreSQL, or -1 to not overwrite the port */ public static void setEmbeddedPort(int port){ embeddedPort = port; } /** * @return the port number to use for embedded PostgreSQL, or -1 for not overwriting the * port number of the configuration. * @see #setEmbeddedPort(int) */ public static int getEmbeddedPort() { return embeddedPort; } /** * True if embedded specific defaults for the * PostgreSQL configuration should be used if there is no * postgres json config file. * @return true for using embedded specific defaults * @see #setIsEmbedded(boolean) */ public static boolean isEmbedded(){ return embeddedMode; } /** * Set the path to the PostgreSQL connection configuration, * must be called before getInstance() to take affect. * <p> * This function must be invoked before calling the constructor. * * @param path new path, or null to use the default path "/postgres-conf.json" */ public static void setConfigFilePath(String path){ configPath = path; } /** * @return the path to the PostgreSQL connection configuration file; * this is never null */ public static String getConfigFilePath(){ if(configPath == null){ configPath = POSTGRES_LOCALHOST_CONFIG; } return configPath; } static void setExplainQueryThreshold(long ms) { explainQueryThreshold = ms; } static Long getExplainQueryThreshold() { return explainQueryThreshold; } /** * Instance for the tenantId from connectionPool or created and * added to connectionPool. * @param vertx the Vertx to use * @param tenantId the tenantId the instance is for * @return the PostgresClient instance, or null on error */ private static PostgresClient getInstanceInternal(Vertx vertx, String tenantId) { // assumes a single thread vertx model so no sync needed PostgresClient postgresClient = connectionPool.get(vertx, tenantId); try { if (postgresClient == null) { postgresClient = new PostgresClient(vertx, tenantId); connectionPool.put(vertx, tenantId, postgresClient); } } catch (Exception e) { log.error(e.getMessage(), e); } return postgresClient; } /** * Instance for the Postgres' default schema public. * @param vertx the Vertx to use * @return the PostgresClient instance, or null on error */ public static PostgresClient getInstance(Vertx vertx) { return getInstanceInternal(vertx, DEFAULT_SCHEMA); } /** * Instance for the tenantId. * @param vertx the Vertx to use * @param tenantId the tenantId the instance is for * @return the PostgresClient instance, or null on error */ public static PostgresClient getInstance(Vertx vertx, String tenantId) { if (DEFAULT_SCHEMA.equals(tenantId)) { throw new IllegalArgumentException("tenantId must not be default schema " + DEFAULT_SCHEMA); } return getInstanceInternal(vertx, tenantId); } /* if the password in the config file is encrypted then use the secret key * that should have been set via the admin api to decode it and use that to connect * note that in embedded mode (such as unit tests) the postgres embedded is started before the * verticle is deployed*/ private static String decodePassword(String password) throws Exception { String key = AES.getSecretKey(); if(key != null){ SecretKey sk = AES.getSecretKeyObject(key); String decoded = AES.decryptPassword(password, sk); return decoded; } /* no key , so nothing to decode */ return password; } /** this function is intended to receive the tenant id as a password * encrypt the tenant id with the secret key and use the encrypted * password as the actual password for the tenant user in the DB. * In order to then know the password - you need to take the tenant id * and encrypt it with the secret key and then you have the tenant's password */ private static String createPassword(String password) throws Exception { String key = AES.getSecretKey(); if(key != null){ SecretKey sk = AES.getSecretKeyObject(key); String newPassword = AES.encryptPasswordAsBase64(password, sk); return newPassword; } /** no key , so nothing to encrypt, the password will be the tenant id */ return password; } /** * @return this instance's PgPool that allows connections to be made */ PgPool getClient() { return client; } /** * Set this instance's PgPool that can connect to Postgres. * @param client the new client */ void setClient(PgPool client) { this.client = client; } /** * Close the SQL client of this PostgresClient instance. * @param whenDone invoked with the close result; additional close invocations * are always successful. */ public void closeClient(Handler<AsyncResult<Void>> whenDone) { if (client == null) { whenDone.handle(Future.succeededFuture()); return; } PgPool clientToClose = client; client = null; connectionPool.removeMultiKey(vertx, tenantId); // remove (vertx, tenantId, this) entry clientToClose.close(); whenDone.handle(Future.succeededFuture()); } /** * Close all SQL clients stored in the connection pool. */ public static void closeAllClients() { @SuppressWarnings("rawtypes") List<Future> list = new ArrayList<>(connectionPool.size()); // copy of values() because closeClient will delete them from connectionPool for (PostgresClient client : connectionPool.values().toArray(new PostgresClient [0])) { Promise<Object> promise = Promise.promise(); list.add(promise.future()); client.closeClient(f -> promise.complete()); } CompositeFuture.join(list); } static PgConnectOptions createPgConnectOptions(JsonObject sqlConfig) { PgConnectOptions pgConnectOptions = new PgConnectOptions(); String host = sqlConfig.getString(HOST); if (host != null) { pgConnectOptions.setHost(host); } Integer port = sqlConfig.getInteger(PORT); if (port != null) { pgConnectOptions.setPort(port); } String username = sqlConfig.getString(_USERNAME); if (username != null) { pgConnectOptions.setUser(username); } String password = sqlConfig.getString(_PASSWORD); if (password != null) { pgConnectOptions.setPassword(password); } String database = sqlConfig.getString(DATABASE); if (database != null) { pgConnectOptions.setDatabase(database); } Integer connectionReleaseDelay = sqlConfig.getInteger(CONNECTION_RELEASE_DELAY, DEFAULT_CONNECTION_RELEASE_DELAY); pgConnectOptions.setIdleTimeout(connectionReleaseDelay); pgConnectOptions.setIdleTimeoutUnit(TimeUnit.MILLISECONDS); return pgConnectOptions; } private void init() throws Exception { /** check if in pom.xml this prop is declared in order to work with encrypted * passwords for postgres embedded - this is a dev mode only feature */ String secretKey = System.getProperty("postgres_secretkey_4_embeddedmode"); if (secretKey != null) { AES.setSecretKey(secretKey); } postgreSQLClientConfig = getPostgreSQLClientConfig(tenantId, schemaName, Envs.allDBConfs()); logPostgresConfig(); if (isEmbedded()) { startEmbeddedPostgres(); } client = createPgPool(vertx, postgreSQLClientConfig); } static PgPool createPgPool(Vertx vertx, JsonObject configuration) { PgConnectOptions connectOptions = createPgConnectOptions(configuration); PoolOptions poolOptions = new PoolOptions(); poolOptions.setMaxSize(configuration.getInteger(MAX_POOL_SIZE, 4)); return PgPool.pool(vertx, connectOptions, poolOptions); } /** * Get PostgreSQL configuration, invokes setIsEmbedded(true) if needed. * @return configuration for PostgreSQL * @throws Exception on password decryption or encryption failure */ @SuppressWarnings("squid:S2068") /* Suppress "Credentials should not be hard-coded" - The docker container does not expose the embedded postges port. Moving the hard-coded credentials into some default config file doesn't remove them from the build. */ static JsonObject getPostgreSQLClientConfig(String tenantId, String schemaName, JsonObject environmentVariables) throws Exception { // static function for easy unit testing JsonObject config = environmentVariables; if (config.size() > 0) { log.info("DB config read from environment variables"); } else { //no env variables passed in, read for module's config file config = LoadConfs.loadConfig(getConfigFilePath()); // LoadConfs.loadConfig writes its own log message } if (config == null) { if (NetworkUtils.isLocalPortFree(EMBEDDED_POSTGRES_PORT)) { log.info("No DB configuration found, starting embedded postgres with default config"); setIsEmbedded(true); } else { log.info("No DB configuration found, using default config, port is already in use"); } config = new JsonObject(); config.put(_USERNAME, _USERNAME); config.put(_PASSWORD, _PASSWORD); config.put(HOST, DEFAULT_IP); config.put(PORT, EMBEDDED_POSTGRES_PORT); config.put(DATABASE, "postgres"); } Object v = config.remove(Envs.DB_EXPLAIN_QUERY_THRESHOLD.name()); if (v instanceof Long) { PostgresClient.setExplainQueryThreshold((Long) v); } if (tenantId.equals(DEFAULT_SCHEMA)) { config.put(_PASSWORD, decodePassword( config.getString(_PASSWORD) )); } else { log.info("Using schema: " + tenantId); config.put(_USERNAME, schemaName); config.put(_PASSWORD, createPassword(tenantId)); } if(embeddedPort != -1 && embeddedMode){ //over ride the declared default port - coming from the config file and use the //passed in port as well. useful when multiple modules start up an embedded postgres //in a single server. config.put(PORT, embeddedPort); } return config; } /** * Log postgreSQLClientConfig. */ @SuppressWarnings("squid:S2068") // Suppress "Credentials should not be hard-coded" // "'password' detected in this expression". // False positive: Password is configurable, here we remove it from the log. private void logPostgresConfig() { if (! log.isInfoEnabled()) { return; } JsonObject passwordRedacted = postgreSQLClientConfig.copy(); passwordRedacted.put(_PASSWORD, "..."); log.info("postgreSQLClientConfig = " + passwordRedacted.encode()); } /** * Get connection configuration. * The following properties are returned (some of which are optional): * username, password, host, port, database, connectionReleaseDelay, maxPoolSize. * Originally based on driver * <a href="https://vertx.io/docs/vertx-mysql-postgresql-client/java/#_configuration"> * Configuration * </a>. * which is no longer in actual use. * * @return */ public JsonObject getConnectionConfig(){ return postgreSQLClientConfig; } public static JsonObject pojo2JsonObject(Object entity) throws JsonProcessingException { if (entity == null) { throw new IllegalArgumentException("Entity can not be null"); } if (entity instanceof JsonObject) { return ((JsonObject) entity); } else { return new JsonObject(mapper.writeValueAsString(entity)); } } /** * Start a SQL transaction. * * <p>Use the AsyncResult<SQLConnection> result to invoke any of the * functions that take that result as first parameter for the commands * within the transaction. * * <p>To close the open connection invoke the END or ROLLBACK * function. Note that after a failing operation (for example some UPDATE) * both the connection and the transaction remain open to let the caller * decide what to do. * * @param done - the result is the current connection */ public void startTx(Handler<AsyncResult<SQLConnection>> done) { getConnection(res -> { if (res.failed()) { log.error(res.cause().getMessage(), res.cause()); done.handle(Future.failedFuture(res.cause())); return; } try { SQLConnection pgTransaction = new SQLConnection(res.result(), res.result().begin(), null); done.handle(Future.succeededFuture(pgTransaction)); } catch (Exception e) { log.error(e.getMessage(), e); done.handle(Future.failedFuture(e.getCause())); } }); } static void finalizeTx(AsyncResult<Void> txResult, PgConnection conn, Handler<AsyncResult<Void>> done ) { if (conn != null) { conn.close(); } if (txResult.failed() && !"Transaction already completed".equals(txResult.cause().getMessage())) { done.handle(Future.failedFuture(txResult.cause())); return; } done.handle(Future.succeededFuture()); } /** * Rollback a SQL transaction started on the connection. This closes the connection. * * @see #startTx(Handler) * @param trans the connection with an open transaction * @param done success or failure */ //@Timer public void rollbackTx(AsyncResult<SQLConnection> trans, Handler<AsyncResult<Void>> done) { try { if (trans.failed()) { done.handle(Future.failedFuture(trans.cause())); return; } trans.result().tx.rollback(res -> finalizeTx(res, trans.result().conn, done)); } catch (Exception e) { done.handle(Future.failedFuture(e)); } } /** * Ends a SQL transaction (commit) started on the connection. This closes the connection. * * @see #startTx(Handler) * @param trans the connection with an open transaction * @param done success or failure */ //@Timer public void endTx(AsyncResult<SQLConnection> trans, Handler<AsyncResult<Void>> done) { try { if (trans.failed()) { done.handle(Future.failedFuture(trans.cause())); return; } trans.result().tx.commit(res -> finalizeTx(res, trans.result().conn, done)); } catch (Exception e) { done.handle(Future.failedFuture(e)); } } /** * The returned handler first closes the SQLConnection and then passes on the AsyncResult to handler. * * <p>The returned Handler ignores (but logs) any failure when opening the connection (conn) or * closing the connection and always passes on the AsyncResult<T>. This is in contrast to * io.vertx.ext.sql.HandlerUtil.closeAndHandleResult where the connection * closing failure suppresses any result or failure of the AsyncResult<T> input. * * @param conn the SQLConnection to close * @param handler where to pass on the input AsyncResult * @return the Handler */ <T> Handler<AsyncResult<T>> closeAndHandleResult( AsyncResult<SQLConnection> conn, Handler<AsyncResult<T>> handler) { return ar -> { if (conn.failed()) { log.error("Opening SQLConnection failed: " + conn.cause().getMessage(), conn.cause()); handler.handle(ar); return; } SQLConnection sqlConnection = conn.result(); if (sqlConnection.conn != null) { sqlConnection.conn.close(); } cancelConnectionTimeoutTimer(sqlConnection); handler.handle(ar); }; } /** * Insert entity into table. Create a new id UUID and return it via replyHandler. * @param table database table (without schema) * @param entity a POJO (plain old java object) * @param replyHandler returns any errors and the result. */ public void save(String table, Object entity, Handler<AsyncResult<String>> replyHandler) { getSQLConnection(conn -> save(conn, table, /* id */ null, entity, /* returnId */ true, /* upsert */ false, /* convertEntity */ true, closeAndHandleResult(conn, replyHandler))); } /** * Insert entity into table. * @param table database table (without schema) * @param entity a POJO (plain old java object) * @param returnId true to return the id of the inserted record, false to return an empty string * @param replyHandler returns any errors and the result. */ public void save(String table, Object entity, boolean returnId, Handler<AsyncResult<String>> replyHandler) { getSQLConnection(conn -> save(conn, table, /* id */ null, entity, returnId, /* upsert */ false, /* convertEntity */ true, closeAndHandleResult(conn, replyHandler))); } /** * Insert entity into table. * @param table database table (without schema) * @param id primary key for the record, or null if one should be created * @param entity a POJO (plain old java object) * @param replyHandler returns any errors and the result (see returnId). */ public void save(String table, String id, Object entity, Handler<AsyncResult<String>> replyHandler) { getSQLConnection(conn -> save(conn, table, id, entity, /* returnId */ true, /* upsert */ false, /* convertEntity */ true, closeAndHandleResult(conn, replyHandler))); } /** * Insert entity into table and return the updated entity. * @param table database table (without schema) * @param id primary key for the record * @param entity a POJO (plain old java object) * @param replyHandler returns any errors and the entity after applying any database INSERT triggers */ <T> void saveAndReturnUpdatedEntity(String table, String id, T entity, Handler<AsyncResult<T>> replyHandler) { getSQLConnection(conn -> saveAndReturnUpdatedEntity(conn, table, id, entity, closeAndHandleResult(conn, replyHandler))); } /** * Insert entity into table. * @param table database table (without schema) * @param id primary key for the record, or null if one should be created * @param entity a POJO (plain old java object) * @param returnId true to return the id of the inserted record, false to return an empty string * @param replyHandler returns any errors and the result (see returnId). */ public void save(String table, String id, Object entity, boolean returnId, Handler<AsyncResult<String>> replyHandler) { getSQLConnection(conn -> save(conn, table, id, entity, returnId, /* upsert */ false, /* convertEntity */ true, closeAndHandleResult(conn, replyHandler))); } /** * Insert entity into table. * @param table database table (without schema) * @param id primary key for the record, or null if one should be created * @param entity a POJO (plain old java object) * @param returnId true to return the id of the inserted record, false to return an empty string * @param upsert whether to update if the record with that id already exists (INSERT or UPDATE) * @param replyHandler returns any errors and the result (see returnId). */ public void save(String table, String id, Object entity, boolean returnId, boolean upsert, Handler<AsyncResult<String>> replyHandler) { getSQLConnection(conn -> save(conn, table, id, entity, returnId, upsert, /* convertEntity */ true, closeAndHandleResult(conn, replyHandler))); } /** * Insert entity into table, or update it if it already exists. * @param table database table (without schema) * @param id primary key for the record, or null if one should be created * @param entity a POJO (plain old java object) * @param replyHandler returns any errors and the id of the entity. */ public void upsert(String table, String id, Object entity, Handler<AsyncResult<String>> replyHandler) { getSQLConnection(conn -> save(conn, table, id, entity, /* returnId */ true, /* upsert */ true, /* convertEntity */ true, closeAndHandleResult(conn, replyHandler))); } /** * Insert or update. * * <p>Needed if upserting binary data as base64 where converting it to a json will corrupt the data * otherwise this function is not needed as the default is true * example: * byte[] data = ......; * JsonArray jsonArray = new JsonArray().add(data); * .upsert(TABLE_NAME, id, jsonArray, false, replyHandler -> { * @param table database table (without schema) * @param id primary key for the record, or null if one should be created * @param entity either a POJO, or a JsonArray containing a byte[] element, see convertEntity * @param convertEntity true if entity is a POJO, false if entity is a JsonArray * @param replyHandler returns any errors and the result (see returnId). */ public void upsert(String table, String id, Object entity, boolean convertEntity, Handler<AsyncResult<String>> replyHandler) { getSQLConnection(conn -> save(conn, table, id, entity, /* returnId */ true, /* upsert */ true, /* convertEntity */ convertEntity, closeAndHandleResult(conn, replyHandler))); } /** * Insert entity into table. * @param table database table (without schema) * @param id primary key for the record, or null if one should be created * @param entity either a POJO, or a JsonArray containing a byte[] element, see convertEntity * @param returnId true to return the id of the inserted record, false to return an empty string * @param upsert whether to update if the record with that id already exists (INSERT or UPDATE) * @param convertEntity true if entity is a POJO, false if entity is a JsonArray * @param replyHandler returns any errors and the result (see returnId). */ public void save(String table, String id, Object entity, boolean returnId, boolean upsert, boolean convertEntity, Handler<AsyncResult<String>> replyHandler) { getSQLConnection(conn -> save(conn, table, id, entity, returnId, upsert, convertEntity, closeAndHandleResult(conn, replyHandler))); } /** * Save entity in table using the sqlConnection. Return the * created id via the replyHandler. * * @param sqlConnection connection with transaction * @param table where to insert the entity record * @param entity the record to insert, a POJO (plain old java object) * @param replyHandler where to report success status and the created id */ public void save(AsyncResult<SQLConnection> sqlConnection, String table, Object entity, Handler<AsyncResult<String>> replyHandler) { save(sqlConnection, table, /* id */ null, entity, /* returnId */ true, /* upsert */ false, /* convertEntity */ true, replyHandler); } /** * Save entity in table. Use the transaction of sqlConnection. Return the id * of the id field (primary key) via the replyHandler. If id (primary key) and * the id of entity (jsonb field) are different you may need a trigger in the * database to sync them. * * @param sqlConnection connection (for example with transaction) * @param table where to insert the entity record * @param id the value for the id field (primary key); if null a new random UUID is created for it. * @param entity the record to insert, a POJO (plain old java object) * @param replyHandler where to report success status and the final id of the id field */ public void save(AsyncResult<SQLConnection> sqlConnection, String table, String id, Object entity, Handler<AsyncResult<String>> replyHandler) { save(sqlConnection, table, id, entity, /* returnId */ true, /* upsert */ false, /* convertEntity */ true, replyHandler); } /** * Save entity in table. Use the transaction of sqlConnection. Return the id * of the id field (primary key) via the replyHandler. If id (primary key) and * the id of entity (jsonb field) are different you may need a trigger in the * database to sync them. * * @param sqlConnection connection (for example with transaction) * @param table where to insert the entity record * @param id the value for the id field (primary key); if null a new random UUID is created for it. * @param entity the record to insert, a POJO (plain old java object) * @param returnId true to return the id of the inserted record, false to return an empty string * @param upsert whether to update if the record with that id already exists (INSERT or UPDATE) * @param replyHandler where to report success status and the final id of the id field */ public void save(AsyncResult<SQLConnection> sqlConnection, String table, String id, Object entity, boolean returnId, boolean upsert, Handler<AsyncResult<String>> replyHandler) { save(sqlConnection, table, id, entity, returnId, upsert, /* convertEntity */ true, replyHandler); } /** * Save entity in table. Use the transaction of sqlConnection. Return the id * of the id field (primary key) via the replyHandler. If id (primary key) and * the id of entity (jsonb field) are different you may need a trigger in the * database to sync them. * * @param sqlConnection connection (for example with transaction) * @param table where to insert the entity record * @param id the value for the id field (primary key); if null a new random UUID is created for it. * @param entity the record to insert, either a POJO or a JsonArray, see convertEntity * @param returnId true to return the id of the inserted record, false to return an empty string * @param upsert whether to update if the record with that id already exists (INSERT or UPDATE) * @param convertEntity true if entity is a POJO, false if entity is a JsonArray * @param replyHandler where to report success status and the final id of the id field */ @SuppressWarnings({"squid:S00107"}) // Method has more than 7 parameters public void save(AsyncResult<SQLConnection> sqlConnection, String table, String id, Object entity, boolean returnId, boolean upsert, boolean convertEntity, Handler<AsyncResult<String>> replyHandler) { if (log.isDebugEnabled()) { log.debug("save (with connection and id) called on " + table); } try { if (sqlConnection.failed()) { replyHandler.handle(Future.failedFuture(sqlConnection.cause())); return; } long start = System.nanoTime(); String sql = INSERT_CLAUSE + schemaName + DOT + table + " (id, jsonb) VALUES ($1, " + (convertEntity ? "$2" : "$2::text") + ")" + (upsert ? " ON CONFLICT (id) DO UPDATE SET jsonb=EXCLUDED.jsonb" : "") + " RETURNING " + (returnId ? "id" : "''"); sqlConnection.result().conn.preparedQuery(sql).execute(Tuple.of( id == null ? UUID.randomUUID() : UUID.fromString(id), convertEntity ? pojo2JsonObject(entity) : ((JsonArray)entity).getString(0) ), query -> { statsTracker(SAVE_STAT_METHOD, table, start); if (query.failed()) { replyHandler.handle(Future.failedFuture(query.cause())); } else { RowSet<Row> result = query.result(); String res = result.iterator().next().getValue(0).toString(); replyHandler.handle(Future.succeededFuture(res)); } }); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } /** * Save entity in table and return the updated entity. * * @param sqlConnection connection (for example with transaction) * @param table where to insert the entity record * @param id the value for the id field (primary key); if null a new random UUID is created for it. * @param entity the record to insert, a POJO * @param replyHandler where to report success status and the entity after applying any database INSERT triggers */ private <T> void saveAndReturnUpdatedEntity(AsyncResult<SQLConnection> sqlConnection, String table, String id, T entity, Handler<AsyncResult<T>> replyHandler) { log.info("save (with connection and id) called on " + table); if (sqlConnection.failed()) { log.error(sqlConnection.cause().getMessage(), sqlConnection.cause()); replyHandler.handle(Future.failedFuture(sqlConnection.cause())); return; } try { long start = System.nanoTime(); String sql = INSERT_CLAUSE + schemaName + DOT + table + " (id, jsonb) VALUES ($1, $2) RETURNING jsonb"; sqlConnection.result().conn.preparedQuery(sql).execute( Tuple.of(id == null ? UUID.randomUUID() : UUID.fromString(id), pojo2JsonObject(entity)), query -> { statsTracker(SAVE_STAT_METHOD, table, start); if (query.failed()) { log.error(query.cause().getMessage(), query.cause()); replyHandler.handle(Future.failedFuture(query.cause())); return; } try { RowSet<Row> result = query.result(); String updatedEntityString = result.iterator().next().getValue(0).toString(); @SuppressWarnings("unchecked") T updatedEntity = (T) mapper.readValue(updatedEntityString, entity.getClass()); replyHandler.handle(Future.succeededFuture(updatedEntity)); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } }); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } /** * Insert the entities into table using a single INSERT statement. * @param table destination table to insert into * @param entities each array element is a String with the content for the JSONB field of table; if id is missing a random id is generated * @param replyHandler result, containing the id field for each inserted element of entities */ public void saveBatch(String table, JsonArray entities, Handler<AsyncResult<RowSet<Row>>> replyHandler) { getSQLConnection(conn -> saveBatch(conn, table, entities, closeAndHandleResult(conn, replyHandler))); } /** * Upsert the entities into table using a single INSERT statement. * @param table destination table to insert into * @param entities each array element is a String with the content for the JSONB field of table; if id is missing a random id is generated * @param replyHandler result, containing the id field for each inserted element of entities */ public void upsertBatch(String table, JsonArray entities, Handler<AsyncResult<RowSet<Row>>> replyHandler) { getSQLConnection(conn -> upsertBatch(conn, table, entities, closeAndHandleResult(conn, replyHandler))); } /** * Insert the entities into table using a single INSERT statement. * @param sqlConnection the connection to run on, may be on a transaction * @param table destination table to insert into * @param entities each array element is a String with the content for the JSONB field of table; if id is missing a random id is generated * @param replyHandler result, containing the id field for each inserted element of entities */ public void saveBatch(AsyncResult<SQLConnection> sqlConnection, String table, JsonArray entities, Handler<AsyncResult<RowSet<Row>>> replyHandler) { saveBatch(sqlConnection, /* upsert */ false, table, entities, replyHandler); } /** * Upsert the entities into table using a single INSERT statement. * @param sqlConnection the connection to run on, may be on a transaction * @param table destination table to insert into * @param entities each array element is a String with the content for the JSONB field of table; if id is missing a random id is generated * @param replyHandler result, containing the id field for each inserted element of entities */ public void upsertBatch(AsyncResult<SQLConnection> sqlConnection, String table, JsonArray entities, Handler<AsyncResult<RowSet<Row>>> replyHandler) { saveBatch(sqlConnection, /* upsert */ true, table, entities, replyHandler); } /** * Insert or upsert the entities into table using a single INSERT statement. * @param sqlConnection the connection to run on, may be on a transaction * @param upsert true for upsert, false for insert with fail on duplicate id * @param table destination table to insert into * @param entities each array element is a String with the content for the JSONB field of table; if id is missing a random id is generated * @param replyHandler result, containing the id field for each inserted element of entities */ private void saveBatch(AsyncResult<SQLConnection> sqlConnection, boolean upsert, String table, JsonArray entities, Handler<AsyncResult<RowSet<Row>>> replyHandler) { try { List<Tuple> list = new ArrayList<>(); if (entities != null) { for (int i = 0; i < entities.size(); i++) { String json = entities.getString(i); JsonObject jsonObject = new JsonObject(json); String id = jsonObject.getString("id"); list.add(Tuple.of(id == null ? UUID.randomUUID() : UUID.fromString(id), jsonObject)); } } saveBatchInternal(sqlConnection, upsert, table, list, replyHandler); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } private void saveBatchInternal(AsyncResult<SQLConnection> sqlConnection, boolean upsert, String table, List<Tuple> batch, Handler<AsyncResult<RowSet<Row>>> replyHandler) { try { long start = System.nanoTime(); log.info("starting: saveBatch size=" + batch.size()); String sql = INSERT_CLAUSE + schemaName + DOT + table + " (id, jsonb) VALUES ($1, $2)" + (upsert ? " ON CONFLICT (id) DO UPDATE SET jsonb = EXCLUDED.jsonb" : "") + RETURNING_ID; if (sqlConnection.failed()) { replyHandler.handle(Future.failedFuture(sqlConnection.cause())); return; } PgConnection connection = sqlConnection.result().conn; connection.preparedQuery(sql).executeBatch(batch, queryRes -> { if (queryRes.failed()) { log.error("saveBatch size=" + batch.size() + SPACE + queryRes.cause().getMessage(), queryRes.cause()); statsTracker("saveBatchFailed", table, start); replyHandler.handle(Future.failedFuture(queryRes.cause())); return; } statsTracker("saveBatch", table, start); if (queryRes.result() != null) { replyHandler.handle(Future.succeededFuture(queryRes.result())); } else { replyHandler.handle(Future.succeededFuture(new LocalRowSet(0))); } }); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } /*** * Save a list of POJOs. * POJOs are converted to a JSON String and saved in a single INSERT call. * A random id is generated if POJO's id is null. * @param table destination table to insert into * @param entities each list element is a POJO * @param replyHandler result, containing the id field for each inserted POJO */ public <T> void saveBatch(String table, List<T> entities, Handler<AsyncResult<RowSet<Row>>> replyHandler) { getSQLConnection(conn -> saveBatch(conn, table, entities, closeAndHandleResult(conn, replyHandler))); } /*** * Upsert a list of POJOs. * POJOs are converted to a JSON String and saved or updated in a single INSERT call. * A random id is generated if POJO's id is null. * If a record with the id already exists it is updated (upsert). * @param table destination table to insert into * @param entities each list element is a POJO * @param replyHandler result, containing the id field for each inserted POJO */ public <T> void upsertBatch(String table, List<T> entities, Handler<AsyncResult<RowSet<Row>>> replyHandler) { getSQLConnection(conn -> upsertBatch(conn, table, entities, closeAndHandleResult(conn, replyHandler))); } /*** * Save a list of POJOs. * POJOs are converted to a JSON String and saved in a single INSERT call. * A random id is generated if POJO's id is null. * @param sqlConnection the connection to run on, may be on a transaction * @param table destination table to insert into * @param entities each list element is a POJO * @param replyHandler result, containing the id field for each inserted POJO */ public <T> void saveBatch(AsyncResult<SQLConnection> sqlConnection, String table, List<T> entities, Handler<AsyncResult<RowSet<Row>>> replyHandler) { saveBatch(sqlConnection, /* upsert */ false, table, entities, replyHandler); } /*** * Upsert a list of POJOs. * POJOs are converted to a JSON String and saved or updated in a single INSERT call. * A random id is generated if POJO's id is null. * If a record with the id already exists it is updated (upsert). * @param sqlConnection the connection to run on, may be on a transaction * @param table destination table to insert into * @param entities each list element is a POJO * @param replyHandler result, containing the id field for each inserted POJO */ public <T> void upsertBatch(AsyncResult<SQLConnection> sqlConnection, String table, List<T> entities, Handler<AsyncResult<RowSet<Row>>> replyHandler) { saveBatch(sqlConnection, /* upsert */ true, table, entities, replyHandler); } private <T> void saveBatch(AsyncResult<SQLConnection> sqlConnection, boolean upsert, String table, List<T> entities, Handler<AsyncResult<RowSet<Row>>> replyHandler) { try { List<Tuple> batch = new ArrayList<>(); if (entities == null || entities.isEmpty()) { RowSet<Row> rowSet = new LocalRowSet(0).withColumns(Arrays.asList("id")); replyHandler.handle(Future.succeededFuture(rowSet)); return; } // We must use reflection, the POJOs don't have a interface/superclass in common. Method getIdMethod = entities.get(0).getClass().getDeclaredMethod("getId"); for (Object entity : entities) { Object obj = getIdMethod.invoke(entity); UUID id = obj == null ? UUID.randomUUID() : UUID.fromString((String) obj); batch.add(Tuple.of(id, pojo2JsonObject(entity))); } saveBatchInternal(sqlConnection, upsert, table, batch, replyHandler); } catch (Exception e) { log.error("saveBatch error " + e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } /** * update a specific record associated with the key passed in the id arg * @param table - table to save to (must exist) * @param entity - pojo to save * @param id - key of the entity being updated * @param replyHandler */ public void update(String table, Object entity, String id, Handler<AsyncResult<RowSet<Row>>> replyHandler) { StringBuilder where = new StringBuilder().append(WHERE).append(ID_FIELD).append('='); Cql2PgUtil.appendQuoted(id, where); // proper masking prevents SQL injection update(table, entity, DEFAULT_JSONB_FIELD_NAME, where.toString(), false, replyHandler); } /** * Update 1...n records matching the filter * <br> * Criterion Examples: * <br> * 1. can be mapped from a string in the following format [{"field":"''","value":"","op":""}] * <pre> * Criterion a = json2Criterion("[{\"field\":\"'fund_distributions'->[]->'amount'->>'sum'\",\"value\":120,\"op\":\"<\"}]"); //denotes funds_distribution is an array of objects * Criterion a = json2Criterion("[{"field":"'po_line_status'->>'value'","value":"SENT","op":"like"},{"field":"'owner'->>'value'","value":"MITLIBMATH","op":"="}, {"op":"AND"}]"); * (see postgres query syntax for more examples in the read.me * </pre> * 2. Simple Criterion * <pre> * Criteria b = new Criteria(); * b.field.add("'note'"); * b.operation = "="; * b.value = "a"; * b.isArray = true; //denotes that the queried field is an array with multiple values * Criterion a = new Criterion(b); * </pre> * 3. For a boolean field called rush = false OR note[] contains 'a' * <pre> * Criteria d = new Criteria(); * d.field.add("'rush'"); * d.operation = Criteria.OP_IS_FALSE; * d.value = null; * Criterion a = new Criterion(); * a.addCriterion(d, Criteria.OP_OR, b); * </pre> * 4. for the following json: * <pre> * "price": { * "sum": "150.0", * "po_currency": { * "value": "USD", * "desc": "US Dollar" * } * }, * * Criteria c = new Criteria(); * c.addField("'price'").addField("'po_currency'").addField("'value'"); * c.operation = Criteria.OP_LIKE; * c.value = "USD"; * * </pre> * @param table - table to update * @param entity - pojo to set for matching records * @param filter - see example below * @param returnUpdatedIds - return ids of updated records * @param replyHandler * */ public void update(String table, Object entity, Criterion filter, boolean returnUpdatedIds, Handler<AsyncResult<RowSet<Row>>> replyHandler) { String where = null; if(filter != null){ where = filter.toString(); } update(table, entity, DEFAULT_JSONB_FIELD_NAME, where, returnUpdatedIds, replyHandler); } public void update(String table, Object entity, CQLWrapper filter, boolean returnUpdatedIds, Handler<AsyncResult<RowSet<Row>>> replyHandler) { String where = ""; if(filter != null){ where = filter.toString(); } update(table, entity, DEFAULT_JSONB_FIELD_NAME, where, returnUpdatedIds, replyHandler); } public void update(AsyncResult<SQLConnection> conn, String table, Object entity, CQLWrapper filter, boolean returnUpdatedIds, Handler<AsyncResult<RowSet<Row>>> replyHandler) { String where = ""; try { if (filter != null) { where = filter.toString(); } update(conn, table, entity, DEFAULT_JSONB_FIELD_NAME, where, returnUpdatedIds, replyHandler); } catch (Exception e) { replyHandler.handle(Future.failedFuture(e)); } } public void update(AsyncResult<SQLConnection> conn, String table, Object entity, String jsonbField, String whereClause, boolean returnUpdatedIds, Handler<AsyncResult<RowSet<Row>>> replyHandler) { if (conn.failed()) { replyHandler.handle(Future.failedFuture(conn.cause())); return; } long start = System.nanoTime(); StringBuilder sb = new StringBuilder(); sb.append(whereClause); StringBuilder returning = new StringBuilder(); if (returnUpdatedIds) { returning.append(RETURNING_ID); } try { String q = UPDATE + schemaName + DOT + table + SET + jsonbField + " = $1::jsonb " + whereClause + SPACE + returning; log.debug("update query = " + q); conn.result().conn.preparedQuery(q).execute(Tuple.of(pojo2JsonObject(entity)), query -> { if (query.failed()) { log.error(query.cause().getMessage(), query.cause()); } statsTracker(UPDATE_STAT_METHOD, table, start); replyHandler.handle(query); }); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } public void update(String table, Object entity, String jsonbField, String whereClause, boolean returnUpdatedIds, Handler<AsyncResult<RowSet<Row>>> replyHandler) { getSQLConnection(conn -> update(conn, table, entity, jsonbField, whereClause, returnUpdatedIds, closeAndHandleResult(conn, replyHandler))); } /** * update a section / field / object in the pojo - * <br> * for example: * <br> if a json called po_line contains the following field * <pre> * "po_line_status": { * "value": "SENT", * "desc": "sent to vendor" * }, * </pre> * this translates into a po_line_status object within the po_line object - to update the entire object / section * create an updateSection object pushing into the section the po line status as the field and the value (string / json / etc...) to replace it with * <pre> * a = new UpdateSection(); * a.addField("po_line_status"); * a.setValue(new JsonObject("{\"value\":\"SOMETHING_NEW4\",\"desc\":\"sent to vendor again\"}")); * </pre> * Note that postgres does not update inplace the json but rather will create a new json with the * updated section and then reference the id to that newly created json * <br> * Queries generated will look something like this: * <pre> * * update test.po_line set jsonb = jsonb_set(jsonb, '{po_line_status}', '{"value":"SOMETHING_NEW4","desc":"sent to vendor"}') where _id = 19; * update test.po_line set jsonb = jsonb_set(jsonb, '{po_line_status, value}', '"SOMETHING_NEW5"', false) where _id = 15; * </pre> * * @param table - table to update * @param section - see UpdateSection class * @param when - Criterion object * @param replyHandler * */ public void update(String table, UpdateSection section, Criterion when, boolean returnUpdatedIdsCount, Handler<AsyncResult<RowSet<Row>>> replyHandler) { long start = System.nanoTime(); getConnection(res -> { if (res.succeeded()) { PgConnection connection = res.result(); try { String value = section.getValue().replace("'", "''"); String where = when == null ? "" : when.toString(); String returning = returnUpdatedIdsCount ? RETURNING_ID : ""; String q = UPDATE + schemaName + DOT + table + SET + DEFAULT_JSONB_FIELD_NAME + " = jsonb_set(" + DEFAULT_JSONB_FIELD_NAME + "," + section.getFieldsString() + ", '" + value + "', false) " + where + returning; log.debug("update query = " + q); connection.query(q).execute(query -> { connection.close(); statsTracker(UPDATE_STAT_METHOD, table, start); if (query.failed()) { log.error(query.cause().getMessage(), query.cause()); replyHandler.handle(Future.failedFuture(query.cause())); } else { replyHandler.handle(Future.succeededFuture(query.result())); } }); } catch (Exception e) { if (connection != null){ connection.close(); } log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } else { log.error(res.cause().getMessage(), res.cause()); replyHandler.handle(Future.failedFuture(res.cause())); } }); } /** * Delete by id. * @param table table name without schema * @param id primary key value of the record to delete */ public void delete(String table, String id, Handler<AsyncResult<RowSet<Row>>> replyHandler) { getSQLConnection(conn -> delete(conn, table, id, closeAndHandleResult(conn, replyHandler))); } /** * Delete by id. * @param connection where to run, can be within a transaction * @param table table name without schema * @param id primary key value of the record to delete * @param replyHandler */ public void delete(AsyncResult<SQLConnection> connection, String table, String id, Handler<AsyncResult<RowSet<Row>>> replyHandler) { try { if (connection.failed()) { replyHandler.handle(Future.failedFuture(connection.cause())); return; } connection.result().conn.preparedQuery( "DELETE FROM " + schemaName + DOT + table + WHERE + ID_FIELD + "=$1") .execute(Tuple.of(UUID.fromString(id)), replyHandler); } catch (Exception e) { replyHandler.handle(Future.failedFuture(e)); } } /** * Delete by CQL wrapper. * @param table table name without schema * @param cql which records to delete */ public void delete(String table, CQLWrapper cql, Handler<AsyncResult<RowSet<Row>>> replyHandler) { getSQLConnection(conn -> delete(conn, table, cql, closeAndHandleResult(conn, replyHandler))); } /** * Delete by CQL wrapper. * @param connection where to run, can be within a transaction * @param table table name without schema * @param cql which records to delete */ public void delete(AsyncResult<SQLConnection> connection, String table, CQLWrapper cql, Handler<AsyncResult<RowSet<Row>>> replyHandler) { try { String where = cql == null ? "" : cql.toString(); doDelete(connection, table, where, replyHandler); } catch (Exception e) { replyHandler.handle(Future.failedFuture(e)); } } /** * Delete based on filter * @param table table name without schema * @param filter * @param replyHandler */ public void delete(String table, Criterion filter, Handler<AsyncResult<RowSet<Row>>> replyHandler) { getSQLConnection(conn -> delete(conn, table, filter, closeAndHandleResult(conn, replyHandler))); } /** * Delete as part of a transaction * @param conn where to run, can be within a transaction * @param table table name without schema * @param filter which records to delete */ public void delete(AsyncResult<SQLConnection> conn, String table, Criterion filter, Handler<AsyncResult<RowSet<Row>>> replyHandler) { try { String where = filter == null ? "" : filter.toString(); doDelete(conn, table, where, replyHandler); } catch (Exception e) { replyHandler.handle(Future.failedFuture(e)); } } /** * delete based on jsons matching the field/value pairs in the pojo (which is first converted to json and then similar jsons are searched) * --> do not use on large tables without checking as the @> will not use a btree * @param table * @param entity * @param replyHandler */ public void delete(String table, Object entity, Handler<AsyncResult<RowSet<Row>>> replyHandler) { getSQLConnection(conn -> delete(conn, table, entity, closeAndHandleResult(conn, replyHandler))); } public void delete(AsyncResult<SQLConnection> connection, String table, Object entity, Handler<AsyncResult<RowSet<Row>>> replyHandler) { try { long start = System.nanoTime(); if (connection.failed()) { replyHandler.handle(Future.failedFuture(connection.cause())); return; } String sql = DELETE + FROM + schemaName + DOT + table + WHERE + DEFAULT_JSONB_FIELD_NAME + "@>$1"; log.debug("delete by entity, query = " + sql + "; $1=" + entity); connection.result().conn.preparedQuery(sql).execute(Tuple.of(pojo2JsonObject(entity)), delete -> { statsTracker(DELETE_STAT_METHOD, table, start); if (delete.failed()) { log.error(delete.cause().getMessage(), delete.cause()); replyHandler.handle(Future.failedFuture(delete.cause())); return; } replyHandler.handle(Future.succeededFuture(delete.result())); }); } catch (Exception e) { replyHandler.handle(Future.failedFuture(e)); } } private void doDelete(AsyncResult<SQLConnection> connection, String table, String where, Handler<AsyncResult<RowSet<Row>>> replyHandler) { try { long start = System.nanoTime(); String sql = DELETE + FROM + schemaName + DOT + table + " " + where; log.debug("doDelete query = " + sql); if (connection.failed()) { replyHandler.handle(Future.failedFuture(connection.cause())); return; } connection.result().conn.query(sql).execute(query -> { statsTracker(DELETE_STAT_METHOD, table, start); if (query.failed()) { log.error(query.cause().getMessage(), query.cause()); replyHandler.handle(Future.failedFuture(query.cause())); return; } replyHandler.handle(Future.succeededFuture(query.result())); }); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } /** * * @param <T> * @param table * @param clazz * @param fieldName * @param where * @param returnCount * @param returnIdField * @param setId - unused, the database trigger will always set jsonb->'id' automatically * @param replyHandler * @deprecated use get with CQLWrapper or Criterion instead */ @Deprecated public <T> void get(String table, Class<T> clazz, String fieldName, String where, boolean returnCount, boolean returnIdField, boolean setId, Handler<AsyncResult<Results<T>>> replyHandler) { get(table, clazz, fieldName, where, returnCount, returnIdField, setId, null /* facets */, replyHandler); } /** * * @param <T> * @param table * @param clazz * @param fieldName * @param where * @param returnCount * @param returnIdField * @param setId - unused, the database trigger will always set jsonb->'id' automatically * @param facets * @param replyHandler * @deprecated use get with CQLWrapper or Criterion instead */ @Deprecated public <T> void get(String table, Class<T> clazz, String fieldName, String where, boolean returnCount, boolean returnIdField, boolean setId, List<FacetField> facets, Handler<AsyncResult<Results<T>>> replyHandler) { get(table, clazz, fieldName, where, returnCount, returnIdField, setId, facets, null /*distinctOn*/, replyHandler); } /** * * @param <T> * @param table * @param clazz * @param fieldName * @param where * @param returnCount * @param returnIdField * @param setId - unused, the database trigger will always set jsonb->'id' automatically * @param facets * @param distinctOn * @param replyHandler * @deprecated use get with CQLWrapper or Criterion instead */ @Deprecated public <T> void get(String table, Class<T> clazz, String fieldName, String where, boolean returnCount, boolean returnIdField, boolean setId, List<FacetField> facets, String distinctOn, Handler<AsyncResult<Results<T>>> replyHandler) { CQLWrapper wrapper = new CQLWrapper().setWhereClause(where); getSQLConnection(conn -> doGet(conn, table, clazz, fieldName, wrapper, returnCount, returnIdField, facets, distinctOn, closeAndHandleResult(conn, replyHandler))); } static class QueryHelper { String table; List<FacetField> facets; String selectQuery; String countQuery; int offset; int limit; public QueryHelper(String table) { this.table = table; } } static class TotaledResults { final RowSet<Row> set; final Integer total; public TotaledResults(RowSet<Row> set, Integer total) { this.set = set; this.total = total; } } /** * low-level getter based on CQLWrapper * @param <T> * @param conn * @param table * @param clazz * @param fieldName * @param wrapper * @param returnCount * @param returnIdField * @param facets * @param distinctOn * @param replyHandler */ private <T> void doGet( AsyncResult<SQLConnection> conn, String table, Class<T> clazz, String fieldName, CQLWrapper wrapper, boolean returnCount, boolean returnIdField, List<FacetField> facets, String distinctOn, Handler<AsyncResult<Results<T>>> replyHandler ) { if (conn.failed()) { log.error(conn.cause().getMessage(), conn.cause()); replyHandler.handle(Future.failedFuture(conn.cause())); return; } PgConnection connection = conn.result().conn; try { QueryHelper queryHelper = buildQueryHelper(table, fieldName, wrapper, returnIdField, facets, distinctOn); if (returnCount) { processQueryWithCount(connection, queryHelper, GET_STAT_METHOD, totaledResults -> processResults(totaledResults.set, totaledResults.total, queryHelper.offset, queryHelper.limit, clazz), replyHandler); } else { processQuery(connection, queryHelper, null, GET_STAT_METHOD, totaledResults -> processResults(totaledResults.set, totaledResults.total, queryHelper.offset, queryHelper.limit, clazz), replyHandler); } } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } /** * Streamed GET with CQLWrapper (T variant, no facets) * @param <T> * @param table * @param entity * @param fieldName usually "jsonb" * @param filter usually CQL query * @param returnIdField * @param distinctOn may be null * @param streamHandler called for each record * @param replyHandler called when query is complete * @deprecated This function is deprecated because either you'll have to * buffer whole HTTP buffer in memory to produce HTTP status; or you'll have to * return a fake error. Furthermore, this API does not provide totalCount * Use streamGet with {@link PostgresClientStreamResult} instead. * {@link #streamGet(java.lang.String, java.lang.Object, java.lang.String, * org.folio.rest.persist.cql.CQLWrapper, boolean, java.lang.String, * io.vertx.core.Handler, io.vertx.core.Handler)} */ @Deprecated @SuppressWarnings({"squid:S00107"}) // has more than 7 parameters public <T> void streamGet(String table, T entity, String fieldName, CQLWrapper filter, boolean returnIdField, String distinctOn, Handler<T> streamHandler, Handler<AsyncResult<Void>> replyHandler) { Class<T> clazz = (Class<T>) entity.getClass(); streamGet(table, clazz, fieldName, filter, returnIdField, distinctOn, res -> { if (res.failed()) { replyHandler.handle(Future.failedFuture(res.cause())); return; } PostgresClientStreamResult<T> streamResult = res.result(); streamResult.handler(streamHandler); streamResult.endHandler(x -> replyHandler.handle(Future.succeededFuture())); streamResult.exceptionHandler(e -> replyHandler.handle(Future.failedFuture(e))); }); } /** * Stream GET with CQLWrapper, no facets {@link org.folio.rest.persist.PostgresClientStreamResult} * @param <T> * @param table * @param clazz * @param fieldName * @param filter * @param returnIdField * @param distinctOn may be null * @param replyHandler AsyncResult; on success with result {@link org.folio.rest.persist.PostgresClientStreamResult} */ public <T> void streamGet(String table, Class<T> clazz, String fieldName, CQLWrapper filter, boolean returnIdField, String distinctOn, Handler<AsyncResult<PostgresClientStreamResult<T>>> replyHandler) { streamGet(table, clazz, fieldName, filter, returnIdField, distinctOn, null, 0, replyHandler); } /** * Stream GET with CQLWrapper, no facets {@link org.folio.rest.persist.PostgresClientStreamResult} * @param <T> * @param table * @param clazz * @param fieldName * @param filter * @param returnIdField * @param distinctOn may be null * @param queryTimeout query timeout in milliseconds, or 0 for no timeout * @param replyHandler AsyncResult; on success with result {@link PostgresClientStreamResult} */ @SuppressWarnings({"squid:S00107"}) // Method has >7 parameters public <T> void streamGet(String table, Class<T> clazz, String fieldName, CQLWrapper filter, boolean returnIdField, String distinctOn, int queryTimeout, Handler<AsyncResult<PostgresClientStreamResult<T>>> replyHandler) { streamGet(table, clazz, fieldName, filter, returnIdField, distinctOn, null, queryTimeout, replyHandler); } /** * Stream GET with CQLWrapper and facets {@link org.folio.rest.persist.PostgresClientStreamResult} * @param <T> * @param table * @param clazz * @param fieldName * @param filter * @param returnIdField must be true if facets are in passed * @param distinctOn may be null * @param facets for no facets: null or Collections.emptyList() * @param replyHandler AsyncResult; on success with result {@link org.folio.rest.persist.PostgresClientStreamResult} */ @SuppressWarnings({"squid:S00107"}) // Method has >7 parameters public <T> void streamGet(String table, Class<T> clazz, String fieldName, CQLWrapper filter, boolean returnIdField, String distinctOn, List<FacetField> facets, Handler<AsyncResult<PostgresClientStreamResult<T>>> replyHandler) { getSQLConnection(0, conn -> streamGet(conn, table, clazz, fieldName, filter, returnIdField, distinctOn, facets, replyHandler)); } /** * Stream GET with CQLWrapper and facets {@link org.folio.rest.persist.PostgresClientStreamResult} * @param <T> * @param table * @param clazz * @param fieldName * @param filter * @param returnIdField must be true if facets are in passed * @param distinctOn may be null * @param facets for no facets: null or Collections.emptyList() * @param queryTimeout query timeout in milliseconds, or 0 for no timeout * @param replyHandler AsyncResult; on success with result {@link PostgresClientStreamResult} */ @SuppressWarnings({"squid:S00107"}) // Method has >7 parameters public <T> void streamGet(String table, Class<T> clazz, String fieldName, CQLWrapper filter, boolean returnIdField, String distinctOn, List<FacetField> facets, int queryTimeout, Handler<AsyncResult<PostgresClientStreamResult<T>>> replyHandler) { getSQLConnection(queryTimeout, conn -> streamGet(conn, table, clazz, fieldName, filter, returnIdField, distinctOn, facets, replyHandler)); } /** * streamGet with existing transaction/connection * @param <T> * @param connResult * @param table * @param clazz * @param fieldName * @param wrapper * @param returnIdField * @param distinctOn * @param facets * @param replyHandler */ @SuppressWarnings({"squid:S00107"}) // Method has >7 parameters <T> void streamGet(AsyncResult<SQLConnection> connResult, String table, Class<T> clazz, String fieldName, CQLWrapper wrapper, boolean returnIdField, String distinctOn, List<FacetField> facets, Handler<AsyncResult<PostgresClientStreamResult<T>>> replyHandler) { if (connResult.failed()) { log.error(connResult.cause().getMessage(), connResult.cause()); replyHandler.handle(Future.failedFuture(connResult.cause())); return; } doStreamGetCount(connResult.result(), table, clazz, fieldName, wrapper, returnIdField, distinctOn, facets, replyHandler); } /** * private for now, might be public later (and renamed) * @param <T> * @param connection * @param table * @param clazz * @param fieldName * @param wrapper * @param returnIdField * @param distinctOn * @param facets * @param replyHandler */ @SuppressWarnings({"squid:S00107"}) // Method has >7 parameters private <T> void doStreamGetCount(SQLConnection connection, String table, Class<T> clazz, String fieldName, CQLWrapper wrapper, boolean returnIdField, String distinctOn, List<FacetField> facets, Handler<AsyncResult<PostgresClientStreamResult<T>>> replyHandler) { try { QueryHelper queryHelper = buildQueryHelper(table, fieldName, wrapper, returnIdField, facets, distinctOn); connection.conn.query(queryHelper.countQuery).execute(countQueryResult -> { if (countQueryResult.failed()) { replyHandler.handle(Future.failedFuture(countQueryResult.cause())); return; } ResultInfo resultInfo = new ResultInfo(); resultInfo.setTotalRecords(countQueryResult.result().iterator().next().getInteger(0)); doStreamGetQuery(connection, queryHelper, resultInfo, clazz, replyHandler); }); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } <T> void doStreamGetQuery(SQLConnection connection, QueryHelper queryHelper, ResultInfo resultInfo, Class<T> clazz, Handler<AsyncResult<PostgresClientStreamResult<T>>> replyHandler) { // decide if we need to close transaction+connection ourselves final PgConnection closeConnection = connection.tx == null ? connection.conn : null; if (closeConnection != null) { closeConnection.begin(); } connection.conn.prepare(queryHelper.selectQuery, prepareRes -> { if (prepareRes.failed()) { connection.conn.close(); log.error(prepareRes.cause().getMessage(), prepareRes.cause()); replyHandler.handle(Future.failedFuture(prepareRes.cause())); return; } PreparedStatement pq = prepareRes.result(); RowStream<Row> stream = pq.createStream(STREAM_GET_DEFAULT_CHUNK_SIZE, Tuple.tuple()); PostgresClientStreamResult<T> streamResult = new PostgresClientStreamResult(resultInfo); doStreamRowResults(stream, clazz, closeConnection, queryHelper, streamResult, replyHandler); }); } private static List<String> getColumnNames(Row row) { List<String> columnNames = new ArrayList<>(); for (int i = 0; row.getColumnName(i) != null; i++) { columnNames.add(row.getColumnName(i)); } return columnNames; } private void closeIfNonNull(PgConnection pgConnection) { if (pgConnection != null) { pgConnection.close(); } } <T> void doStreamRowResults(RowStream<Row> sqlRowStream, Class<T> clazz, PgConnection pgConnection, QueryHelper queryHelper, PostgresClientStreamResult<T> streamResult, Handler<AsyncResult<PostgresClientStreamResult<T>>> replyHandler) { ResultInfo resultInfo = streamResult.resultInto(); Promise<PostgresClientStreamResult<T>> promise = Promise.promise(); ResultsHelper<T> resultsHelper = new ResultsHelper<>(clazz); boolean isAuditFlavored = isAuditFlavored(resultsHelper.clazz); Map<String, Method> externalColumnSetters = new HashMap<>(); AtomicInteger resultCount = new AtomicInteger(); sqlRowStream.handler(r -> { try { // for first row, get column names if (resultsHelper.offset == 0) { List<String> columnNames = getColumnNames(r); collectExternalColumnSetters(columnNames, resultsHelper.clazz, isAuditFlavored, externalColumnSetters); } T objRow = (T) deserializeRow(resultsHelper, externalColumnSetters, isAuditFlavored, r); if (!resultsHelper.facet) { resultCount.incrementAndGet(); if (!promise.future().isComplete()) { // end of facets (if any) .. produce result resultsHelper.facets.forEach((k, v) -> resultInfo.getFacets().add(v)); promise.complete(streamResult); replyHandler.handle(promise.future()); } streamResult.fireHandler(objRow); } resultsHelper.offset++; } catch (Exception e) { if (!promise.future().isComplete()) { promise.complete(streamResult); replyHandler.handle(promise.future()); } sqlRowStream.close(); // does not really stop stream for vertx-pg-client closeIfNonNull(pgConnection); log.error(e.getMessage(), e); streamResult.fireExceptionHandler(e); } }).endHandler(v2 -> { closeIfNonNull(pgConnection); resultInfo.setTotalRecords( getTotalRecords(resultCount.get(), resultInfo.getTotalRecords(), queryHelper.offset, queryHelper.limit)); try { if (!promise.future().isComplete()) { promise.complete(streamResult); replyHandler.handle(promise.future()); } streamResult.fireEndHandler(); } catch (Exception ex) { streamResult.fireExceptionHandler(ex); } }).exceptionHandler(e -> { closeIfNonNull(pgConnection); if (!promise.future().isComplete()) { promise.complete(streamResult); replyHandler.handle(promise.future()); } streamResult.fireExceptionHandler(e); }); } QueryHelper buildQueryHelper( String table, String fieldName, CQLWrapper wrapper, boolean returnIdField, List<FacetField> facets, String distinctOn) throws IOException, TemplateException { if (wrapper == null) { wrapper = new CQLWrapper(); } String addIdField = ""; if (returnIdField) { addIdField = COMMA + ID_FIELD; } if (!"null".equals(fieldName) && fieldName.contains("*")) { // if we are requesting all fields (*) , then dont add the id field to the select // this will return two id columns which will create ambiguity in facet queries addIdField = ""; } QueryHelper queryHelper = new QueryHelper(table); String countOn = "*"; String distinctOnClause = ""; if (distinctOn != null && !distinctOn.isEmpty()) { distinctOnClause = String.format("DISTINCT ON(%s) ", distinctOn); countOn = String.format("DISTINCT(%s)", distinctOn); } queryHelper.selectQuery = SELECT + distinctOnClause + fieldName + addIdField + FROM + schemaName + DOT + table + SPACE + wrapper.toString(); queryHelper.countQuery = SELECT + "COUNT(" + countOn + ")" + FROM + schemaName + DOT + table + SPACE + wrapper.getWhereClause(); String mainQuery = SELECT + distinctOnClause + fieldName + addIdField + FROM + schemaName + DOT + table + SPACE + wrapper.getWithoutLimOff(); if (facets != null && !facets.isEmpty()) { FacetManager facetManager = buildFacetManager(wrapper, queryHelper, mainQuery, facets); // this method call invokes freemarker templating queryHelper.selectQuery = facetManager.generateFacetQuery(); } if (!wrapper.getWhereClause().isEmpty()) { // only do estimation when filter is in use (such as CQL). queryHelper.countQuery = SELECT + "count_estimate('" + org.apache.commons.lang.StringEscapeUtils.escapeSql(mainQuery) + "')"; } int offset = wrapper.getOffset().get(); if (offset != -1) { queryHelper.offset = offset; } int limit = wrapper.getLimit().get(); queryHelper.limit = limit != -1 ? limit : Integer.MAX_VALUE; return queryHelper; } <T> void processQueryWithCount( PgConnection connection, QueryHelper queryHelper, String statMethod, Function<TotaledResults, T> resultSetMapper, Handler<AsyncResult<T>> replyHandler) { long start = System.nanoTime(); log.debug("Attempting count query: " + queryHelper.countQuery); connection.query(queryHelper.countQuery).execute(countQueryResult -> { try { if (countQueryResult.failed()) { log.error("query with count: " + countQueryResult.cause().getMessage() + " - " + queryHelper.countQuery, countQueryResult.cause()); replyHandler.handle(Future.failedFuture(countQueryResult.cause())); return; } int total = countQueryResult.result().iterator().next().getInteger(0); long countQueryTime = (System.nanoTime() - start); StatsTracker.addStatElement(STATS_KEY + COUNT_STAT_METHOD, countQueryTime); log.debug("timer: get " + queryHelper.countQuery + " (ns) " + countQueryTime); if (total <= queryHelper.offset) { log.debug("Skipping query due to no results expected!"); RowSet<Row> emptySet = null; replyHandler.handle(Future.succeededFuture(resultSetMapper.apply(new TotaledResults(emptySet, total)))); return; } processQuery(connection, queryHelper, total, statMethod, resultSetMapper, replyHandler); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } }); } <T> void processQuery( PgConnection connection, QueryHelper queryHelper, Integer total, String statMethod, Function<TotaledResults, T> resultSetMapper, Handler<AsyncResult<T>> replyHandler ) { try { queryAndAnalyze(connection, queryHelper.selectQuery, statMethod, query -> { if (query.failed()) { replyHandler.handle(Future.failedFuture(query.cause())); return; } replyHandler.handle(Future.succeededFuture(resultSetMapper.apply(new TotaledResults(query.result(), total)))); }); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } private FacetManager buildFacetManager(CQLWrapper wrapper, QueryHelper queryHelper, String mainQuery, List<FacetField> facets) { FacetManager fm = new FacetManager(schemaName + DOT + queryHelper.table); if (wrapper.getWhereClause().isEmpty()) { fm.setWhere(" " + wrapper.getWhereClause()); } fm.setSupportFacets(facets); fm.setIdField(ID_FIELD); fm.setLimitClause(wrapper.getLimit().toString()); fm.setOffsetClause(wrapper.getOffset().toString()); fm.setMainQuery(mainQuery); fm.setSchema(schemaName); fm.setCountQuery(queryHelper.countQuery); return fm; } /** * pass in an entity that is fully / partially populated and the query will return all records matching the * populated fields in the entity - note that this queries the jsonb object, so should not be used to query external * fields * * @param <T> type of the query entity and the result entity * @param table database table to query * @param entity contains the fields to use for the query * @param replyHandler the result contains the entities found */ public <T> void get(String table, T entity, boolean returnCount, Handler<AsyncResult<Results<T>>> replyHandler) { get(table, entity, returnCount, true /*returnIdField*/, replyHandler); } public <T> void get(String table, T entity, boolean returnCount, boolean returnIdField, Handler<AsyncResult<Results<T>>> replyHandler) { get(table, entity, new String[]{DEFAULT_JSONB_FIELD_NAME}, returnCount, returnIdField, replyHandler); } public <T> void get(String table, T entity, String[] fields, boolean returnCount, boolean returnIdField, Handler<AsyncResult<Results<T>>> replyHandler) { get(table, entity, fields, returnCount, returnIdField, -1, -1, replyHandler); } public <T> void get(String table, T entity, String[] fields, boolean returnCount, boolean returnIdField, int offset, int limit, Handler<AsyncResult<Results<T>>> replyHandler) { Criterion criterion = new Criterion(); if (offset != -1) { criterion.setOffset(new Offset(offset)); } if (limit != -1) { criterion.setLimit(new Limit(limit)); } String fieldsStr = Arrays.toString(fields); Class<T> clazz = (Class<T>) entity.getClass(); get(null, table, clazz, fieldsStr.substring(1, fieldsStr.length() - 1), criterion, returnCount, returnIdField, null, replyHandler); } /** * select query * @param table - table to query * @param clazz - class of objects to be returned * @param filter - see Criterion class * @param returnCount - whether to return the amount of records matching the query * @param replyHandler * @throws Exception */ public <T> void get(String table, Class<T> clazz, Criterion filter, boolean returnCount, Handler<AsyncResult<Results<T>>> replyHandler) { get(table, clazz, filter, returnCount, false /*setId*/, replyHandler); } /** * @param setId - unused, the database trigger will always set jsonb->'id' automatically */ public <T> void get(String table, Class<T> clazz, String[] fields, CQLWrapper filter, boolean returnCount, boolean setId, Handler<AsyncResult<Results<T>>> replyHandler) { get(table, clazz, fields, filter, returnCount, setId, null /*facets*/, replyHandler); } /** * @param setId - unused, the database trigger will always set jsonb->'id' automatically */ public <T> void get(String table, Class<T> clazz, String[] fields, CQLWrapper filter, boolean returnCount, boolean setId, List<FacetField> facets, Handler<AsyncResult<Results<T>>> replyHandler) { String distinctOn = null; boolean returnIdField = true; get(table, clazz, fields, filter, returnCount, returnIdField, facets, distinctOn, replyHandler); } <T> void get(String table, Class<T> clazz, String[] fields, CQLWrapper filter, boolean returnCount, boolean returnIdField, List<FacetField> facets, String distinctOn, Handler<AsyncResult<Results<T>>> replyHandler) { String fieldsStr = Arrays.toString(fields); String fieldName = fieldsStr.substring(1, fieldsStr.length() - 1); get(table, clazz, fieldName, filter, returnCount, returnIdField, facets, distinctOn, replyHandler); } <T> void get(String table, Class<T> clazz, String fieldName, CQLWrapper filter, boolean returnCount, boolean returnIdField, List<FacetField> facets, String distinctOn, Handler<AsyncResult<Results<T>>> replyHandler) { getSQLConnection(conn -> doGet(conn, table, clazz, fieldName, filter, returnCount, returnIdField, facets, distinctOn, closeAndHandleResult(conn, replyHandler))); } /** * * @param <T> * @param table * @param clazz * @param fields * @param filter * @param returnCount * @param setId - unused, the database trigger will always set jsonb->'id' automatically * @param replyHandler * @deprecated use get with CQLWrapper or Criterion instead */ @Deprecated public <T> void get(String table, Class<T> clazz, String[] fields, String filter, boolean returnCount, boolean setId, Handler<AsyncResult<Results<T>>> replyHandler) { String where = ""; if(filter != null){ where = filter; } String fieldsStr = Arrays.toString(fields); get(table, clazz, fieldsStr.substring(1, fieldsStr.length()-1), where, returnCount, true, setId, replyHandler); } /** * * @param <T> * @param table * @param clazz * @param filter * @param returnCount * @param setId - unused, the database trigger will always set jsonb->'id' automatically * @param replyHandler * @deprecated use get with CQLWrapper or Criterion instead */ @Deprecated public <T> void get(String table, Class<T> clazz, String filter, boolean returnCount, boolean setId, Handler<AsyncResult<Results<T>>> replyHandler) { String where = ""; if(filter != null){ where = filter; } get(table, clazz, new String[]{DEFAULT_JSONB_FIELD_NAME}, where, returnCount, setId, replyHandler); } public <T> void get(String table, Class<T> clazz, String[] fields, CQLWrapper filter, boolean returnCount, Handler<AsyncResult<Results<T>>> replyHandler) { get(table, clazz, fields, filter, returnCount, false /* setId */, replyHandler); } /* PGUTIL USED VERSION */ public <T> void get(String table, Class<T> clazz, CQLWrapper filter, boolean returnCount, Handler<AsyncResult<Results<T>>> replyHandler) { get(table, clazz, new String[]{DEFAULT_JSONB_FIELD_NAME}, filter, returnCount, false /*setId*/, replyHandler); } /** * @param setId - unused, the database trigger will always set jsonb->'id' automatically * @deprecated use {@link #get(String, Class, CQLWrapper, boolean, Handler)} instead. */ @Deprecated public <T> void get(String table, Class<T> clazz, CQLWrapper filter, boolean returnCount, boolean setId, Handler<AsyncResult<Results<T>>> replyHandler) { get(table, clazz, new String[]{DEFAULT_JSONB_FIELD_NAME}, filter, returnCount, setId, replyHandler); } public <T> void get(String table, Class<T> clazz, CQLWrapper filter, boolean returnCount, List<FacetField> facets, Handler<AsyncResult<Results<T>>> replyHandler) { get(table, clazz, new String[]{DEFAULT_JSONB_FIELD_NAME}, filter, returnCount, false /* setId */, facets, replyHandler); } /** * @param setId - unused, the database trigger will always set jsonb->'id' automatically * @deprecated use {@link #get(String, Class, CQLWrapper, boolean, List, Handler)} instead. */ @Deprecated public <T> void get(String table, Class<T> clazz, CQLWrapper filter, boolean returnCount, boolean setId, List<FacetField> facets, Handler<AsyncResult<Results<T>>> replyHandler) { get(table, clazz, filter, returnCount, facets, replyHandler); } /** * @param setId - unused, the database trigger will always set jsonb->'id' automatically */ public <T> void get(String table, Class<T> clazz, Criterion filter, boolean returnCount, boolean setId, Handler<AsyncResult<Results<T>>> replyHandler) { get(table, clazz, filter, returnCount, setId, null, replyHandler); } /** * @param setId - unused, the database trigger will always set jsonb->'id' automatically */ public <T> void get(AsyncResult<SQLConnection> conn, String table, Class<T> clazz, Criterion filter, boolean returnCount, boolean setId, Handler<AsyncResult<Results<T>>> replyHandler) { get(conn, table, clazz, filter, returnCount, setId, null, replyHandler); } /** * select query * @param table - table to query * @param clazz - class of objects to be returned * @param filter - see Criterion class * @param returnCount - whether to return the amount of records matching the query * @param setId - unused, the database trigger will always set jsonb->'id' automatically * @param replyHandler * @throws Exception */ public <T> void get(String table, Class<T> clazz, Criterion filter, boolean returnCount, boolean setId, List<FacetField> facets, Handler<AsyncResult<Results<T>>> replyHandler) { get(null, table, clazz, filter, returnCount, setId, facets, replyHandler); } /** * @param setId - unused, the database trigger will always set jsonb->'id' automatically */ @SuppressWarnings({"squid:S00107"}) // Method has more than 7 parameters public <T> void get(AsyncResult<SQLConnection> conn, String table, Class<T> clazz, Criterion filter, boolean returnCount, boolean setId, List<FacetField> facets, Handler<AsyncResult<Results<T>>> replyHandler) { get(conn, table, clazz, DEFAULT_JSONB_FIELD_NAME, filter, returnCount, false, facets, replyHandler); } @SuppressWarnings({"squid:S00107"}) // Method has more than 7 parameters <T> void get(AsyncResult<SQLConnection> conn, String table, Class<T> clazz, String fieldName, Criterion filter, boolean returnCount, boolean returnIdField, List<FacetField> facets, Handler<AsyncResult<Results<T>>> replyHandler) { CQLWrapper cqlWrapper = new CQLWrapper(filter); if (conn == null) { get(table, clazz, fieldName, cqlWrapper, returnCount, returnIdField, facets, null, replyHandler); } else { doGet(conn, table, clazz, fieldName, cqlWrapper, returnCount, returnIdField, facets, null, replyHandler); } } /** * A FunctionalInterface that may throw an Exception. * * @param <T> input type * @param <R> output type * @param <E> the type of Exception */ @FunctionalInterface public interface FunctionWithException<T, R, E extends Exception> { /** * @param t some input * @return some output * @throws Exception of type E */ R apply(T t) throws E; } /** * Get the jsonb by id. * @param table the table to search in * @param id the value of the id field * @param function how to convert the (String encoded) JSON * @param replyHandler the result after applying function */ private <R> void getById(String table, String id, FunctionWithException<String, R, Exception> function, Handler<AsyncResult<R>> replyHandler) { getConnection(res -> { if (res.failed()) { replyHandler.handle(Future.failedFuture(res.cause())); return; } PgConnection connection = res.result(); String sql = SELECT + DEFAULT_JSONB_FIELD_NAME + FROM + schemaName + DOT + table + WHERE + ID_FIELD + "= $1"; try { connection.preparedQuery(sql).execute(Tuple.of(UUID.fromString(id)), query -> { connection.close(); if (query.failed()) { replyHandler.handle(Future.failedFuture(query.cause())); return; } RowSet<Row> result = query.result(); if (result.size() == 0) { replyHandler.handle(Future.succeededFuture(null)); return; } try { String entity = result.iterator().next().getValue(0).toString(); R r = function.apply(entity); replyHandler.handle(Future.succeededFuture(r)); } catch (Exception e) { replyHandler.handle(Future.failedFuture(e)); } }); } catch (Exception e) { replyHandler.handle(Future.failedFuture(e)); } }); } /** * Get the jsonb by id and return it as a String. * @param table the table to search in * @param id the value of the id field * @param replyHandler the result; the JSON is encoded as a String */ public void getByIdAsString(String table, String id, Handler<AsyncResult<String>> replyHandler) { getById(table, id, string -> string, replyHandler); } /** * Get the jsonb by id and return it as a JsonObject. * @param table the table to search in * @param id the value of the id field * @param replyHandler the result; the JSON is encoded as a JsonObject */ public void getById(String table, String id, Handler<AsyncResult<JsonObject>> replyHandler) { getById(table, id, JsonObject::new, replyHandler); } /** * Get the jsonb by id and return it as a pojo of type T. * @param table the table to search in * @param id the value of the id field * @param clazz the type of the pojo * @param replyHandler the result; the JSON is converted into a T pojo. */ public <T> void getById(String table, String id, Class<T> clazz, Handler<AsyncResult<T>> replyHandler) { getById(table, id, json -> mapper.readValue(json, clazz), replyHandler); } /** * Get jsonb by id for a list of ids. * <p> * The result is a map of all found records where the key is the id * and the value is the jsonb. * * @param table the table to search in * @param ids the values of the id field * @param function how to convert the (String encoded) JSON * @param replyHandler the result after applying function */ private <R> void getById(String table, JsonArray ids, FunctionWithException<String, R, Exception> function, Handler<AsyncResult<Map<String,R>>> replyHandler) { if (ids == null || ids.isEmpty()) { replyHandler.handle(Future.succeededFuture(Collections.emptyMap())); return; } getConnection(res -> { if (res.failed()) { replyHandler.handle(Future.failedFuture(res.cause())); return; } Tuple list = Tuple.tuple(); for (int i = 0; i < ids.size(); i++) { list.addUUID(UUID.fromString(ids.getString(i))); } PgConnection connection = res.result(); StringBuilder sql = new StringBuilder() .append(SELECT).append(ID_FIELD).append(", ").append(DEFAULT_JSONB_FIELD_NAME) .append(FROM).append(schemaName).append(DOT).append(table) .append(WHERE).append(ID_FIELD).append(" IN ($1"); for (int i = 2; i <= ids.size(); i++) { sql.append(", $" + i); } sql.append(")"); connection.preparedQuery(sql.toString()).execute(list, query -> { connection.close(); if (query.failed()) { replyHandler.handle(Future.failedFuture(query.cause())); return; } try { Map<String,R> result = new HashMap<>(); Iterator<Row> iterator = query.result().iterator(); while (iterator.hasNext()) { Row row = iterator.next(); result.put(row.getValue(0).toString(), function.apply(row.getValue(1).toString())); } replyHandler.handle(Future.succeededFuture(result)); } catch (Exception e) { replyHandler.handle(Future.failedFuture(e)); } }); }); } /** * Get the jsonb by id for a list of ids and return each jsonb as a String. * @param table the table to search in * @param ids the values of the id field * @param replyHandler the result; the JSON is encoded as a String */ public void getByIdAsString(String table, JsonArray ids, Handler<AsyncResult<Map<String,String>>> replyHandler) { getById(table, ids, string -> string, replyHandler); } /** * Get the jsonb by id for a list of ids and return each jsonb as a JsonObject. * @param table the table to search in * @param ids the values of the id field * @param replyHandler the result; the JSON is encoded as a JsonObject */ public void getById(String table, JsonArray ids, Handler<AsyncResult<Map<String,JsonObject>>> replyHandler) { getById(table, ids, JsonObject::new, replyHandler); } /** * Get the jsonb by id for a list of ids and return each jsonb as pojo of type T. * @param table the table to search in * @param ids the values of the id field * @param clazz the type of the pojo * @param replyHandler the result; the JSON is encoded as a T pojo */ public <T> void getById(String table, JsonArray ids, Class<T> clazz, Handler<AsyncResult<Map<String,T>>> replyHandler) { getById(table, ids, json -> mapper.readValue(json, clazz), replyHandler); } static class ResultsHelper<T> { final List<T> list; final Map<String, org.folio.rest.jaxrs.model.Facet> facets; final RowSet<Row> resultSet; final Class<T> clazz; int total; int offset; boolean facet; public ResultsHelper(RowSet<Row> resultSet, int total, Class<T> clazz) { this.list = new ArrayList<>(); this.facets = new HashMap<>(); this.resultSet = resultSet; this.clazz= clazz; this.total = total; this.offset = 0; } public ResultsHelper(Class<T> clazz) { this.list = new ArrayList<>(); this.facets = new HashMap<>(); this.resultSet = null; this.clazz= clazz; this.offset = 0; } } /** * converts a result set into pojos - handles 3 types of queries: * 1. a regular query will return N rows, where each row contains Y columns. one of those columns is the jsonb * column which is mapped into a pojo. each row will also contain the count column (if count was requested for * the query), other fields , like updated date may also be returned if they were requested in the select. * 1a. note that there is an attempt to map external (non jsonb) columns to fields in the pojo. for example, * a column called update_date will attempt to map its value to a field called updateDate in the pojo. however, * for this to happen, the query must select the update_date -> select id,jsonb,update_date from .... * 2. a facet query returns 2 columns, a uuid and a jsonb column. the results of the query are returned as * id and json rows. facets are returned as jsonb values: * {"facetValues": [{"count": 542,"value": "11 ed."}], "type": "name"} * (along with a static '00000000-0000-0000-0000-000000000000' uuid) * the count for a facet query is returned in the following manner: * {"count": 501312} , with a static uuid as the facets * 3. audit queries - queries that query an audit table, meaning the clazz parameter passed in has a jsonb member. * * @param rs * @param total * @param clazz * @return */ <T> Results<T> processResults(RowSet<Row> rs, Integer total, int offset, int limit, Class<T> clazz) { long start = System.nanoTime(); if (total == null) { // NOTE: this may not be an accurate total, may be better for it to be 0 or null total = rs.rowCount(); } ResultsHelper<T> resultsHelper = new ResultsHelper<>(rs, total, clazz); deserializeResults(resultsHelper); ResultInfo resultInfo = new ResultInfo(); resultsHelper.facets.forEach((k , v) -> resultInfo.getFacets().add(v)); Integer totalRecords = getTotalRecords(resultsHelper.list.size(), resultsHelper.total, offset, limit); resultInfo.setTotalRecords(totalRecords); Results<T> results = new Results<>(); results.setResults(resultsHelper.list); results.setResultInfo(resultInfo); statsTracker(PROCESS_RESULTS_STAT_METHOD, clazz.getSimpleName(), start); return results; } /** * * @param resultsHelper */ <T> void deserializeResults(ResultsHelper<T> resultsHelper) { if (resultsHelper.resultSet == null) { return; } boolean isAuditFlavored = isAuditFlavored(resultsHelper.clazz); Map<String, Method> externalColumnSetters = new HashMap<>(); collectExternalColumnSetters( resultsHelper.resultSet.columnsNames(), resultsHelper.clazz, isAuditFlavored, externalColumnSetters ); RowIterator<Row> iterator = resultsHelper.resultSet.iterator(); while (iterator.hasNext()) { Row row = iterator.next(); try { T objRow = (T) deserializeRow(resultsHelper, externalColumnSetters, isAuditFlavored, row); if (!resultsHelper.facet) { resultsHelper.list.add(objRow); } } catch (Exception e) { log.error(e.getMessage(), e); resultsHelper.list.add(null); } } } /** * * @param resultsHelper * @param externalColumnSetters * @param isAuditFlavored * @param row */ <T> Object deserializeRow( ResultsHelper<T> resultsHelper, Map<String, Method> externalColumnSetters, boolean isAuditFlavored, Row row ) throws IOException, InstantiationException, IllegalAccessException, InvocationTargetException { Object jo = row.getValue(DEFAULT_JSONB_FIELD_NAME); Object o = null; resultsHelper.facet = false; if (!isAuditFlavored && jo != null) { try { // is this a facet entry - if so process it, otherwise will throw an exception // and continue trying to map to the pojos o = mapper.readValue(jo.toString(), org.folio.rest.jaxrs.model.Facet.class); org.folio.rest.jaxrs.model.Facet of = (org.folio.rest.jaxrs.model.Facet) o; org.folio.rest.jaxrs.model.Facet facet = resultsHelper.facets.get(of.getType()); if (facet == null) { resultsHelper.facets.put(of.getType(), of); } else { facet.getFacetValues().add(of.getFacetValues().get(0)); } resultsHelper.facet = true; return o; } catch (Exception e) { o = mapper.readValue(jo.toString(), resultsHelper.clazz); } } else { o = resultsHelper.clazz.newInstance(); } populateExternalColumns(externalColumnSetters, o, row); return o; } /** * an exception to having the jsonb column and the fields within the json * get mapped to the corresponding clazz is a case where the * clazz has a jsonb field (member), for example an audit class which contains a field called * jsonb - meaning it encapsulates the real object for example for auditing purposes * (contains the jsonb object as well as some other fields). In such a * case, do not map the clazz to the content of the jsonb - but rather set the jsonb named field of the clazz * with the jsonb column value * * @param clazz * @return */ <T> boolean isAuditFlavored(Class<T> clazz) { boolean isAuditFlavored = false; try { clazz.getDeclaredField(DEFAULT_JSONB_FIELD_NAME); isAuditFlavored = true; } catch (NoSuchFieldException nse) { if (log.isDebugEnabled()) { log.debug("non audit table, no " + DEFAULT_JSONB_FIELD_NAME + " found in json"); } } return isAuditFlavored; } /** * get the class methods in order to populate jsonb object from external columns * abiding to audit mode * * @param columnNames * @param clazz * @param isAuditFlavored * @param externalColumnSetters */ <T> void collectExternalColumnSetters(List<String> columnNames, Class<T> clazz, boolean isAuditFlavored, Map<String, Method> externalColumnSetters) { for (String columnName : columnNames) { if ((isAuditFlavored || !columnName.equals(DEFAULT_JSONB_FIELD_NAME)) && !columnName.equals(ID_FIELD)) { String methodName = databaseFieldToPojoSetter(columnName); for (Method method : clazz.getMethods()) { if (method.getName().equals(methodName)) { externalColumnSetters.put(columnName, method); } } } } } /** * populate jsonb object with values from external columns - for example: * if there is an update_date column in the record - try to populate a field updateDate in the * jsonb object - this allows to use the DB for things like triggers to populate the update_date * automatically, but still push them into the jsonb object - the json schema must declare this field * as well - also support the audit mode descrbed above. * NOTE: that the query must request any field it wants to get populated into the jsonb obj * * @param externalColumnSetters * @param o * @param row */ void populateExternalColumns(Map<String, Method> externalColumnSetters, Object o, Row row) throws InvocationTargetException, IllegalAccessException { for (Map.Entry<String, Method> entry : externalColumnSetters.entrySet()) { String columnName = entry.getKey(); Method method = entry.getValue(); String[] stringArray = row.getStringArray(columnName); if (stringArray != null) { method.invoke(o, Arrays.asList(stringArray)); } else { method.invoke(o, row.getValue(columnName)); } } } /** * assumes column names are all lower case with multi word column names * separated by an '_' * @param str * @return */ String databaseFieldToPojoSetter(String str) { StringBuilder sb = new StringBuilder(str); sb.replace(0, 1, String.valueOf(Character.toUpperCase(sb.charAt(0)))); for (int i = 0; i < sb.length(); i++) { if (sb.charAt(i) == '_') { sb.deleteCharAt(i); sb.replace(i, i + 1, String.valueOf(Character.toUpperCase(sb.charAt(i)))); } } return "set" + sb.toString(); } /** * Run a select query. * * <p>To update see {@link #execute(String, Handler)}. * * @param sql - the sql query to run * @param replyHandler the query result or the failure */ public void select(String sql, Handler<AsyncResult<RowSet<Row>>> replyHandler) { getSQLConnection(conn -> select(conn, sql, closeAndHandleResult(conn, replyHandler))); } /** * Run a select query. * * <p>To update see {@link #execute(String, Handler)}. * @param sql - the sql query to run * @param queryTimeout query timeout in milliseconds, or 0 for no timeout * @param replyHandler the query result or the failure */ public void select(String sql, int queryTimeout, Handler<AsyncResult<RowSet<Row>>> replyHandler) { getSQLConnection(queryTimeout, conn -> select(conn, sql, closeAndHandleResult(conn, replyHandler)) ); } static void queryAndAnalyze(PgConnection conn, String sql, String statMethod, Handler<AsyncResult<RowSet<Row>>> replyHandler) { long start = System.nanoTime(); conn.query(sql).execute(res -> { long queryTime = (System.nanoTime() - start); StatsTracker.addStatElement(STATS_KEY + statMethod, queryTime); if (res.failed()) { log.error("queryAndAnalyze: " + res.cause().getMessage() + " - " + sql, res.cause()); replyHandler.handle(Future.failedFuture(res.cause())); return; } if (queryTime >= explainQueryThreshold * 1000000) { final String explainQuery = "EXPLAIN ANALYZE " + sql; conn.query(explainQuery).execute(explain -> { replyHandler.handle(res); // not before, so we have conn if it gets closed if (explain.failed()) { log.warn(explainQuery + ": ", explain.cause().getMessage(), explain.cause()); return; } StringBuilder e = new StringBuilder(explainQuery); RowIterator<Row> iterator = explain.result().iterator(); while (iterator.hasNext()) { Row row = iterator.next(); e.append('\n').append(row.getString(0)); } log.warn(e.toString()); }); } else { replyHandler.handle(res); } }); } /** * Run a select query. * * <p>This never closes the connection conn. * * <p>To update see {@link #execute(AsyncResult, String, Handler)}. * * @param conn The connection on which to execute the query on. * @param sql The sql query to run. * @param replyHandler The query result or the failure. */ public void select(AsyncResult<SQLConnection> conn, String sql, Handler<AsyncResult<RowSet<Row>>> replyHandler) { try { if (conn.failed()) { replyHandler.handle(Future.failedFuture(conn.cause())); return; } queryAndAnalyze(conn.result().conn, sql, GET_STAT_METHOD, replyHandler); } catch (Exception e) { log.error("select sql: " + e.getMessage() + " - " + sql, e); replyHandler.handle(Future.failedFuture(e)); } } /** * Run a parameterized/prepared select query. * * <p>To update see {@link #execute(String, Tuple, Handler)}. * * @param sql The sql query to run. * @param params The parameters for the placeholders in sql. * @param replyHandler The query result or the failure. */ public void select(String sql, Tuple params, Handler<AsyncResult<RowSet<Row>>> replyHandler) { getSQLConnection(conn -> select(conn, sql, params, closeAndHandleResult(conn, replyHandler))); } /** * Run a parameterized/prepared select query. * * <p>This never closes the connection conn. * * <p>To update see {@link #execute(AsyncResult, String, Tuple, Handler)}. * * @param conn The connection on which to execute the query on. * @param sql The sql query to run. * @param params The parameters for the placeholders in sql. * @param replyHandler The query result or the failure. */ public void select(AsyncResult<SQLConnection> conn, String sql, Tuple params, Handler<AsyncResult<RowSet<Row>>> replyHandler) { try { if (conn.failed()) { replyHandler.handle(Future.failedFuture(conn.cause())); return; } conn.result().conn.preparedQuery(sql).execute(params, replyHandler); } catch (Exception e) { log.error("select sql: " + e.getMessage() + " - " + sql, e); replyHandler.handle(Future.failedFuture(e)); } } /** * Run a select query and return the first record, or null if there is no result. * * <p>To update see {@link #execute(String, Handler)}. * * @param sql The sql query to run. * @param replyHandler The query result or the failure. */ public void selectSingle(String sql, Handler<AsyncResult<Row>> replyHandler) { getSQLConnection(conn -> selectSingle(conn, sql, closeAndHandleResult(conn, replyHandler))); } /** * Run a select query and return the first record, or null if there is no result. * * <p>This never closes the connection conn. * * <p>To update see {@link #execute(AsyncResult, String, Handler)}. * * @param conn The connection on which to execute the query on. * @param sql The sql query to run. * @param replyHandler The query result or the failure. */ public void selectSingle(AsyncResult<SQLConnection> conn, String sql, Handler<AsyncResult<Row>> replyHandler) { selectSingle(conn, sql, Tuple.tuple(), replyHandler); } /** * Run a parameterized/prepared select query and return the first record, or null if there is no result. * * <p>To update see {@link #execute(String, Handler)}. * * @param sql The sql query to run. * @param params The parameters for the placeholders in sql. * @param replyHandler The query result or the failure. */ public void selectSingle(String sql, Tuple params, Handler<AsyncResult<Row>> replyHandler) { getSQLConnection(conn -> selectSingle(conn, sql, params, closeAndHandleResult(conn, replyHandler))); } static void selectReturn(AsyncResult<RowSet<Row>> res, Handler<AsyncResult<Row>> replyHandler) { if (res.failed()) { replyHandler.handle(Future.failedFuture(res.cause())); return; } try { if (!res.result().iterator().hasNext()) { replyHandler.handle(Future.succeededFuture(null)); return; } replyHandler.handle(Future.succeededFuture(res.result().iterator().next())); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } /** * Run a parameterized/prepared select query and return the first record, or null if there is no result. * * <p>This never closes the connection conn. * * <p>To update see {@link #execute(AsyncResult, String, Handler)}. * * @param conn The connection on which to execute the query on. * @param sql The sql query to run. * @param params The parameters for the placeholders in sql. * @param replyHandler The query result or the failure. */ public void selectSingle(AsyncResult<SQLConnection> conn, String sql, Tuple params, Handler<AsyncResult<Row>> replyHandler) { try { if (conn.failed()) { replyHandler.handle(Future.failedFuture(conn.cause())); return; } if (params.size() == 0) { conn.result().conn.query(sql).execute(res -> selectReturn(res, replyHandler)); } else { conn.result().conn.preparedQuery(sql).execute(params, res -> selectReturn(res, replyHandler)); } } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } /** * Run a parameterized/prepared select query returning with an SQLRowStream. * * <p>This never closes the connection conn. * * @param conn The connection on which to execute the query on. * @param sql The sql query to run. * @param replyHandler The query result or the failure. */ public void selectStream(AsyncResult<SQLConnection> conn, String sql, Handler<AsyncResult<RowStream<Row>>> replyHandler) { selectStream(conn, sql, Tuple.tuple(), replyHandler); } /** * Run a parameterized/prepared select query returning with an SQLRowStream. * * <p>This never closes the connection conn. * * @param conn The connection on which to execute the query on. * @param sql The sql query to run. * @param params The parameters for the placeholders in sql. * @param replyHandler The query result or the failure. */ public void selectStream(AsyncResult<SQLConnection> conn, String sql, Tuple params, Handler<AsyncResult<RowStream<Row>>> replyHandler) { selectStream(conn, sql, params, STREAM_GET_DEFAULT_CHUNK_SIZE, replyHandler); } void selectStream(AsyncResult<SQLConnection> conn, String sql, Tuple params, int chunkSize, Handler<AsyncResult<RowStream<Row>>> replyHandler) { try { if (conn.failed()) { replyHandler.handle(Future.failedFuture(conn.cause())); return; } final Transaction tx = conn.result().tx; tx.prepare(sql, res -> { if (res.failed()) { log.error(res.cause().getMessage(), res.cause()); replyHandler.handle(Future.failedFuture(res.cause())); return; } PreparedStatement pq = res.result(); RowStream<Row> rowStream = pq.createStream(chunkSize, params); replyHandler.handle(Future.succeededFuture(rowStream)); }); } catch (Exception e) { log.error("select stream sql: " + e.getMessage() + " - " + sql, e); replyHandler.handle(Future.failedFuture(e)); } } /** * Execute an INSERT, UPDATE or DELETE statement. * @param sql - the sql to run * @param replyHandler - the result handler with UpdateResult */ public void execute(String sql, Handler<AsyncResult<RowSet<Row>>> replyHandler) { execute(sql, Tuple.tuple(), replyHandler); } /** * Get vertx-pg-client connection * @param replyHandler */ public void getConnection(Handler<AsyncResult<PgConnection>> replyHandler) { getClient().getConnection(x -> { if (x.failed()) { replyHandler.handle(Future.failedFuture(x.cause())); return; } try { replyHandler.handle(Future.succeededFuture((PgConnection) x.result())); } catch (Exception e) { replyHandler.handle(Future.failedFuture(e)); } }); } void getSQLConnection(Handler<AsyncResult<SQLConnection>> handler) { getSQLConnection(0, handler); } void getSQLConnection(int queryTimeout, Handler<AsyncResult<SQLConnection>> handler) { getConnection(res -> { if (res.failed()) { handler.handle(Future.failedFuture(res.cause())); return; } PgConnection pgConnection = res.result(); if (queryTimeout == 0) { handler.handle(Future.succeededFuture(new SQLConnection(pgConnection, null, null))); return; } long timerId = vertx.setTimer(queryTimeout, id -> pgConnection.cancelRequest(ar -> { if (ar.succeeded()) { log.warn( String.format("Cancelling request due to timeout after : %d ms", queryTimeout)); } else { log.warn("Failed to send cancelling request", ar.cause()); } })); SQLConnection sqlConnection = new SQLConnection(pgConnection, null, timerId); handler.handle(Future.succeededFuture(sqlConnection)); }); } private void cancelConnectionTimeoutTimer(SQLConnection sqlConnection) { Long timeId = sqlConnection.timerId; if (timeId != null) { vertx.cancelTimer(timeId); } } /** * Execute a parameterized/prepared INSERT, UPDATE or DELETE statement. * @param sql The SQL statement to run. * @param params The parameters for the placeholders in sql. * @param replyHandler */ public void execute(String sql, Tuple params, Handler<AsyncResult<RowSet<Row>>> replyHandler) { getSQLConnection(conn -> execute(conn, sql, params, closeAndHandleResult(conn, replyHandler))); } /** * Send an INSERT, UPDATE or DELETE statement within a transaction. * * <p>Example: * <pre> * postgresClient.startTx(beginTx -> { * try { * postgresClient.execute(beginTx, sql, reply -> {... * </pre> * @param conn - connection - see {@link #startTx(Handler)} * @param sql - the sql to run * @param replyHandler - reply handler with UpdateResult */ public void execute(AsyncResult<SQLConnection> conn, String sql, Handler<AsyncResult<RowSet<Row>>> replyHandler){ execute(conn, sql, Tuple.tuple(), replyHandler); } /** * Send an INSERT, UPDATE or DELETE parameterized/prepared statement within a transaction. * * <p>Example: * <pre> * postgresClient.startTx(beginTx -> { * try { * postgresClient.execute(beginTx, sql, params, reply -> {... * </pre> * @param conn - connection - see {@link #startTx(Handler)} * @param sql - the sql to run * @param replyHandler - reply handler with UpdateResult */ public void execute(AsyncResult<SQLConnection> conn, String sql, Tuple params, Handler<AsyncResult<RowSet<Row>>> replyHandler) { try { if (conn.failed()) { replyHandler.handle(Future.failedFuture(conn.cause())); return; } PgConnection connection = conn.result().conn; long start = System.nanoTime(); // more than optimization.. preparedQuery does not work for multiple SQL statements if (params.size() == 0) { connection.query(sql).execute(query -> { statsTracker(EXECUTE_STAT_METHOD, sql, start); replyHandler.handle(query); }); } else { connection.preparedQuery(sql).execute(params, query -> { statsTracker(EXECUTE_STAT_METHOD, sql, start); replyHandler.handle(query); }); } } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } /** * Create a parameterized/prepared INSERT, UPDATE or DELETE statement and * run it with a list of sets of parameters. * * <p>Example: * <pre> * postgresClient.startTx(beginTx -> { * try { * postgresClient.execute(beginTx, sql, params, reply -> {... * </pre> * @param conn - connection - see {@link #startTx(Handler)} * @param sql - the sql to run * @param params - there is one list entry for each sql invocation containing the parameters for the placeholders. * @param replyHandler - reply handler with one UpdateResult for each list entry of params. */ public void execute(AsyncResult<SQLConnection> conn, String sql, List<Tuple> params, Handler<AsyncResult<List<RowSet<Row>>>> replyHandler) { try { if (conn.failed()) { replyHandler.handle(Future.failedFuture(conn.cause())); return; } PgConnection sqlConnection = conn.result().conn; List<RowSet<Row>> results = new ArrayList<>(params.size()); Iterator<Tuple> iterator = params.iterator(); Runnable task = new Runnable() { @Override public void run() { if (! iterator.hasNext()) { replyHandler.handle(Future.succeededFuture(results)); return; } Tuple params1 = iterator.next(); sqlConnection.preparedQuery(sql).execute(params1, query -> { if (query.failed()) { replyHandler.handle(Future.failedFuture(query.cause())); return; } results.add(query.result()); this.run(); }); } }; task.run(); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } /** * Create a parameterized/prepared INSERT, UPDATE or DELETE statement and * run it with a list of sets of parameters. Wrap all in a transaction. * * @param sql - the sql to run * @param params - there is one list entry for each sql invocation containing the parameters for the placeholders. * @param replyHandler - reply handler with one UpdateResult for each list entry of params. */ public void execute(String sql, List<Tuple> params, Handler<AsyncResult<List<RowSet<Row>>>> replyHandler) { startTx(res -> { if (res.failed()) { replyHandler.handle(Future.failedFuture(res.cause())); return; } execute(res, sql, params, result -> { if (result.failed()) { rollbackTx(res, rollback -> replyHandler.handle(result)); return; } endTx(res, end -> { if (end.failed()) { replyHandler.handle(Future.failedFuture(end.cause())); return; } replyHandler.handle(result); }); }); }); } /** * For queries where you only want to populate the where clause * <br/> * See {@link #persistentlyCacheResult(String, String, Handler) } * @param cacheName * @param tableName * @param filter * @param replyHandler */ public void persistentlyCacheResult(String cacheName, String tableName, CQLWrapper filter, Handler<AsyncResult<Integer>> replyHandler) { String where = ""; if (filter != null) { try { where = filter.toString(); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); return; } } String q = "SELECT * FROM " + schemaName + DOT + tableName + SPACE + where; persistentlyCacheResult(cacheName, q, replyHandler); } /** * For queries where you only want to populate the where clause * <br/> * See {@link #persistentlyCacheResult(String, String, Handler) } * @param cacheName * @param tableName * @param filter * @param replyHandler */ public void persistentlyCacheResult(String cacheName, String tableName, Criterion filter, Handler<AsyncResult<Integer>> replyHandler) { String where = ""; if (filter != null) { where = filter.toString(); } String q = "SELECT * FROM " + schemaName + DOT + tableName + SPACE + where; persistentlyCacheResult(cacheName, q, replyHandler); } /** * Create a table, a type of materialized view, with the results of a specific query. * This can be very helpful when the query is complex and the data is relatively static. * This will create a table populated with the results from the query (sql2cache). * Further queries can then be run on this table (cacheName) instead of re-executing the complex * sql query over and over again. * <br/> * 1. The table will not track subsequent changes to the source tables * <br/> * 2. The table should be DROPPED when not needed anymore * <br/> * 3. To Refresh the table, DROP and Re-call this function * <br/> * Use carefully, index support on created table to be added * @param cacheName - name of the table holding the results of the query * @param sql2cache - the sql query to use to populate the table * @param replyHandler */ public void persistentlyCacheResult(String cacheName, String sql2cache, Handler<AsyncResult<Integer>> replyHandler) { getSQLConnection(conn -> persistentlyCacheResult(conn, cacheName, sql2cache, closeAndHandleResult(conn, replyHandler))); } private void persistentlyCacheResult(AsyncResult<SQLConnection> conn, String cacheName, String sql2cache, Handler<AsyncResult<Integer>> replyHandler) { try { if (conn.failed()) { replyHandler.handle(Future.failedFuture(conn.cause())); return; } long start = System.nanoTime(); PgConnection connection = conn.result().conn; String q = "CREATE UNLOGGED TABLE IF NOT EXISTS " + schemaName + DOT + cacheName +" AS " + sql2cache; log.info(q); connection.query(q).execute( query -> { statsTracker("persistentlyCacheResult", "CREATE TABLE AS", start); if (query.failed()) { replyHandler.handle(Future.failedFuture(query.cause())); } else { replyHandler.handle(Future.succeededFuture(query.result().rowCount())); } }); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } public void removePersistentCacheResult(String cacheName, Handler<AsyncResult<Integer>> replyHandler) { getSQLConnection(conn -> removePersistentCacheResult(conn, cacheName, closeAndHandleResult(conn, replyHandler))); } private void removePersistentCacheResult(AsyncResult<SQLConnection> conn, String cacheName, Handler<AsyncResult<Integer>> replyHandler){ try { if (conn.failed()) { replyHandler.handle(Future.failedFuture(conn.cause())); return; } long start = System.nanoTime(); PgConnection connection = conn.result().conn; connection.query("DROP TABLE " + schemaName + DOT + cacheName).execute(query -> { statsTracker("removePersistentCacheResult", "DROP TABLE " + cacheName, start); if (query.failed()) { replyHandler.handle(Future.failedFuture(query.cause())); } else { replyHandler.handle(Future.succeededFuture(query.result().rowCount())); } }); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } /** * @param identifier the identifier to check * @return if the identifier is a valid Postgres identifier and does not contain * letters with diacritical marks or non-Latin letters */ public boolean isValidPostgresIdentifier(String identifier) { return POSTGRES_IDENTIFIER.matcher(identifier).matches(); } /** * Drop the database if it exists. * @param database database name * @throws SQLException on database error * @throws IllegalArgumentException if database name is too long, contains * illegal characters or letters with diacritical marks or non-Latin letters */ public void dropCreateDatabase(String database) throws SQLException { if (! isValidPostgresIdentifier(database)) { throw new IllegalArgumentException("Illegal character in database name: " + database); } try (Connection connection = getStandaloneConnection("postgres", true); Statement statement = connection.createStatement()) { statement.executeUpdate("DROP DATABASE IF EXISTS " + database); //NOSONAR statement.executeUpdate("CREATE DATABASE " + database); //NOSONAR } } /** * Split string into lines. */ private static List<String> lines(String string) { return Arrays.asList(string.split("\\r\\n|\\n|\\r")); } /** * Split the sqlFile into SQL statements. * * <a href="https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-DOLLAR-QUOTING"> * Dollar-quoted string constants</a> with $$ or $[0-9a-zA-Z_]+$ are preserved. */ static String [] splitSqlStatements(String sqlFile) { List<String> lines = new ArrayList<>(); Matcher matcher = POSTGRES_DOLLAR_QUOTING.matcher(sqlFile); int searchStart = 0; while (matcher.find()) { lines.addAll(lines(sqlFile.substring(searchStart, matcher.start()))); lines.add(matcher.group()); searchStart = matcher.end(); } lines.addAll(lines(sqlFile.substring(searchStart))); return lines.toArray(new String [0]); } @SuppressWarnings("checkstyle:EmptyBlock") static String [] preprocessSqlStatements(String sqlFile) throws Exception { StringBuilder singleStatement = new StringBuilder(); String[] allLines = splitSqlStatements(sqlFile); List<String> execStatements = new ArrayList<>(); boolean inCopy = false; for (int i = 0; i < allLines.length; i++) { if (allLines[i].toUpperCase().matches("^\\s*(CREATE USER|CREATE ROLE).*") && AES.getSecretKey() != null) { final Pattern pattern = Pattern.compile("PASSWORD\\s*'(.+?)'\\s*", Pattern.CASE_INSENSITIVE); final Matcher matcher = pattern.matcher(allLines[i]); if(matcher.find()){ /** password argument indicated in the create user / role statement */ String newPassword = createPassword(matcher.group(1)); allLines[i] = matcher.replaceFirst(" PASSWORD '" + newPassword +"' "); } } if (allLines[i].trim().startsWith("\ufeff--") || allLines[i].trim().length() == 0 || allLines[i].trim().startsWith("--")) { // this is an sql comment, skip } else if (POSTGRES_COPY_FROM_STDIN.matcher(allLines[i]).matches()) { singleStatement.append(allLines[i]); inCopy = true; } else if (inCopy && (allLines[i].trim().equals("\\."))) { inCopy = false; execStatements.add( singleStatement.toString() ); singleStatement = new StringBuilder(); } else if (allLines[i].trim().endsWith(SEMI_COLON) && !inCopy) { execStatements.add( singleStatement.append(SPACE + allLines[i]).toString() ); singleStatement = new StringBuilder(); } else { if (inCopy) { singleStatement.append("\n"); } else { singleStatement.append(SPACE); } singleStatement.append(allLines[i]); } } String lastStatement = singleStatement.toString(); if (! lastStatement.trim().isEmpty()) { execStatements.add(lastStatement); } return execStatements.toArray(new String[]{}); } /** * Will connect to a specific database and execute the commands in the .sql file * against that database.<p /> * NOTE: NOT tested on all types of statements - but on a lot * * @param sqlFile - string of sqls with executable statements * @param stopOnError - stop on first error * @return Future with list of statements that failed; the list may be empty */ public Future<List<String>> runSQLFile(String sqlFile, boolean stopOnError) { Promise<List<String>> promise = Promise.promise(); runSQLFile(sqlFile, stopOnError, promise.future()); return promise.future(); } /** * Will connect to a specific database and execute the commands in the .sql file * against that database.<p /> * NOTE: NOT tested on all types of statements - but on a lot * * @param sqlFile - string of sqls with executable statements * @param stopOnError - stop on first error * @param replyHandler - the handler's result is the list of statements that failed; the list may be empty */ public void runSQLFile(String sqlFile, boolean stopOnError, Handler<AsyncResult<List<String>>> replyHandler){ try { execute(preprocessSqlStatements(sqlFile), stopOnError, replyHandler); } catch (Exception e) { log.error(e.getMessage(), e); replyHandler.handle(Future.failedFuture(e)); } } private Connection getStandaloneConnection(String newDB, boolean superUser) throws SQLException { String host = postgreSQLClientConfig.getString(HOST); int port = postgreSQLClientConfig.getInteger(PORT); String user = postgreSQLClientConfig.getString(_USERNAME); String pass = postgreSQLClientConfig.getString(_PASSWORD); String db = postgreSQLClientConfig.getString(DATABASE); if(newDB != null){ db = newDB; if(!superUser){ pass = newDB; user = newDB; } } return DriverManager.getConnection( "jdbc:postgresql://"+host+":"+port+"/"+db, user , pass); } /** * Copy files via the COPY FROM postgres syntax * Support 3 modes * 1. In line (STDIN) Notice the mandatory \. at the end of all entries to import * COPY config_data (jsonb) FROM STDIN ENCODING 'UTF8'; * {"module":"SETTINGS","config_name":"locale","update_date":"1.1.2017","code":"system.currency_symbol.dk","description":"currency code","default": false,"enabled": true,"value": "kr"} * \. * 2. Copy from a data file packaged in the jar * COPY config_data (jsonb) FROM 'data/locales.data' ENCODING 'UTF8'; * 3. Copy from a file on disk (absolute path) * COPY config_data (jsonb) FROM 'C:\\Git\\configuration\\mod-configuration-server\\src\\main\\resources\\data\\locales.data' ENCODING 'UTF8'; * @param copyInStatement * @param connection * @throws Exception */ private void copyIn(String copyInStatement, Connection connection) throws Exception { long totalInsertedRecords = 0; CopyManager copyManager = new CopyManager((BaseConnection) connection); if(copyInStatement.contains("STDIN")){ //run as is int sep = copyInStatement.indexOf("\n"); String copyIn = copyInStatement.substring(0, sep); String data = copyInStatement.substring(sep+1); totalInsertedRecords = copyManager.copyIn(copyIn, new StringReader(data)); } else{ //copy from a file, String[] valuesInQuotes = StringUtils.substringsBetween(copyInStatement , "'", "'"); if(valuesInQuotes.length == 0){ log.warn("SQL statement: COPY FROM, has no STDIN and no file path wrapped in ''"); throw new Exception("SQL statement: COPY FROM, has no STDIN and no file path wrapped in ''"); } //do not read from the file system for now as this needs to support data files packaged in //the jar, read files into memory and load - consider improvements to this String filePath = valuesInQuotes[0]; String data; if(new File(filePath).isAbsolute()){ data = FileUtils.readFileToString(new File(filePath), "UTF8"); } else{ try { //assume running from within a jar, data = ResourceUtils.resource2String(filePath); } catch (Exception e) { //from IDE data = ResourceUtils.resource2String("/"+filePath); } } copyInStatement = copyInStatement.replace("'"+filePath+"'", "STDIN"); totalInsertedRecords = copyManager.copyIn(copyInStatement, new StringReader(data)); } log.info("Inserted " + totalInsertedRecords + " via COPY IN. Tenant: " + tenantId); } private void execute(String[] sql, boolean stopOnError, Handler<AsyncResult<List<String>>> replyHandler){ long s = System.nanoTime(); log.info("Executing multiple statements with id " + Arrays.hashCode(sql)); List<String> results = new ArrayList<>(); vertx.executeBlocking(dothis -> { Connection connection = null; Statement statement = null; boolean error = false; try { /* this should be super user account that is in the config file */ connection = getStandaloneConnection(null, false); connection.setAutoCommit(false); statement = connection.createStatement(); for (int j = 0; j < sql.length; j++) { try { log.info("trying to execute: " + sql[j].substring(0, Math.min(sql[j].length()-1, 1000))); if(sql[j].trim().toUpperCase().startsWith("COPY ")){ copyIn(sql[j], connection); } else{ statement.executeUpdate(sql[j]); //NOSONAR } log.info("Successfully executed: " + sql[j].substring(0, Math.min(sql[j].length()-1, 400))); } catch (Exception e) { results.add(sql[j]); error = true; log.error(e.getMessage(),e); if(stopOnError){ break; } } } try { if(error){ connection.rollback(); log.error("Rollback for: " + Arrays.hashCode(sql)); } else{ connection.commit(); log.info("Successfully committed: " + Arrays.hashCode(sql)); } } catch (Exception e) { error = true; log.error("Commit failed " + Arrays.hashCode(sql) + SPACE + e.getMessage(), e); } } catch(Exception e){ log.error(e.getMessage(), e); error = true; } finally { try { if(statement != null) statement.close(); } catch (Exception e) { log.error(e.getMessage(), e); } try { if(connection != null) connection.close(); } catch (Exception e) { log.error(e.getMessage(), e); } if(error){ dothis.fail("error"); } else{ dothis.complete(); } } }, done -> { logTimer(EXECUTE_STAT_METHOD, "" + Arrays.hashCode(sql), s); replyHandler.handle(Future.succeededFuture(results)); }); } private static void rememberEmbeddedPostgres() { embeddedPostgres = new EmbeddedPostgres(Version.Main.V10); } /** * Start an embedded PostgreSQL. * doesn't change the configuration. * * @throws IOException when starting embedded PostgreSQL fails */ public void startEmbeddedPostgres() throws IOException { // starting Postgres setIsEmbedded(true); if (embeddedPostgres == null) { int port = postgreSQLClientConfig.getInteger(PORT); String username = postgreSQLClientConfig.getString(_USERNAME); String password = postgreSQLClientConfig.getString(_PASSWORD); String database = postgreSQLClientConfig.getString(DATABASE); String locale = "en_US.UTF-8"; String operatingSystem = System.getProperty("os.name").toLowerCase(); if (operatingSystem.contains("win")) { locale = "american_usa"; } rememberEmbeddedPostgres(); embeddedPostgres.start("localhost", port, database, username, password, Arrays.asList("-E", "UTF-8", "--locale", locale)); Runtime.getRuntime().addShutdownHook(new Thread(PostgresClient::stopEmbeddedPostgres)); log.info("embedded postgres started on port " + port); } else { log.info("embedded postgres is already running..."); } } /** * .sql files * @param path */ public void importFileEmbedded(String path) { // starting Postgres if (embeddedMode) { if (embeddedPostgres != null) { Optional<PostgresProcess> optionalPostgresProcess = embeddedPostgres.getProcess(); if (optionalPostgresProcess.isPresent()) { log.info("embedded postgress import starting...."); PostgresProcess postgresProcess = optionalPostgresProcess.get(); postgresProcess.importFromFile(new File(path)); log.info("embedded postgress import complete...."); } else { log.warn("embedded postgress is not running..."); } } else { log.info("embedded postgress not enabled"); } } } /** * This is a blocking call - run in an execBlocking statement * import data in a tab delimited file into columns of an existing table * Using only default values of the COPY FROM STDIN Postgres command * @param path - path to the file * @param tableName - name of the table to import the content into */ public void importFile(String path, String tableName) { long recordsImported[] = new long[]{-1}; vertx.<String>executeBlocking(dothis -> { try { String host = postgreSQLClientConfig.getString(HOST); int port = postgreSQLClientConfig.getInteger(PORT); String user = postgreSQLClientConfig.getString(_USERNAME); String pass = postgreSQLClientConfig.getString(_PASSWORD); String db = postgreSQLClientConfig.getString(DATABASE); log.info("Connecting to " + db); Connection con = DriverManager.getConnection( "jdbc:postgresql://"+host+":"+port+"/"+db, user , pass); log.info("Copying text data rows from stdin"); CopyManager copyManager = new CopyManager((BaseConnection) con); FileReader fileReader = new FileReader(path); recordsImported[0] = copyManager.copyIn("COPY "+tableName+" FROM STDIN", fileReader ); } catch (Exception e) { log.error(messages.getMessage("en", MessageConsts.ImportFailed), e); dothis.fail(e); } dothis.complete("Done."); }, whendone -> { if(whendone.succeeded()){ log.info("Done importing file: " + path + ". Number of records imported: " + recordsImported[0]); } else{ log.info("Failed importing file: " + path); } }); } public static void stopEmbeddedPostgres() { if (embeddedPostgres != null) { closeAllClients(); LogUtil.formatLogMessage(PostgresClient.class.getName(), "stopEmbeddedPostgres", "called stop on embedded postgress ..."); embeddedPostgres.stop(); embeddedPostgres = null; embeddedMode = false; } } public static String convertToPsqlStandard(String tenantId){ return tenantId.toLowerCase() + "_" + MODULE_NAME; } public static String getModuleName(){ return MODULE_NAME; } /** * @return the tenantId of this PostgresClient */ String getTenantId() { return tenantId; } /** * @return the PostgreSQL schema name for the tenantId and the module name of this PostgresClient. * A PostgreSQL schema name is of the form tenant_module and is used to address tables: * "SELECT * FROM tenant_module.table" */ String getSchemaName() { return schemaName; } /** * Function to correct estimated result count: * If the resultsCount is equal to 0, the result should be not more than offset * If the resultsCount is equal to limit, the result should be not less than offset + limit * Otherwise it should be equal to offset + resultsCount * * @param resultsCount the count of rows, that are returned from database * @param estimateCount the estimate result count from returned by database * @param offset database offset * @param limit database limit * @return corrected results count */ static Integer getTotalRecords(int resultsCount, Integer estimateCount, int offset, int limit) { if (estimateCount == null) { return null; } if (limit == 0) { return estimateCount; } if (resultsCount == 0) { return Math.min(offset, estimateCount); } else if (resultsCount == limit) { return Math.max(offset + limit, estimateCount); } return offset + resultsCount; } }
Use WHERE clause only for count query RMB-645 (#701)
domain-models-runtime/src/main/java/org/folio/rest/persist/PostgresClient.java
Use WHERE clause only for count query RMB-645 (#701)
<ide><path>omain-models-runtime/src/main/java/org/folio/rest/persist/PostgresClient.java <ide> + FROM + schemaName + DOT + table + SPACE + wrapper.toString(); <ide> queryHelper.countQuery = SELECT + "COUNT(" + countOn + ")" <ide> + FROM + schemaName + DOT + table + SPACE + wrapper.getWhereClause(); <del> String mainQuery = SELECT + distinctOnClause + fieldName + addIdField <del> + FROM + schemaName + DOT + table + SPACE + wrapper.getWithoutLimOff(); <ide> <ide> if (facets != null && !facets.isEmpty()) { <add> String mainQuery = SELECT + distinctOnClause + fieldName + addIdField <add> + FROM + schemaName + DOT + table + SPACE + wrapper.getWithoutLimOff(); <add> <ide> FacetManager facetManager = buildFacetManager(wrapper, queryHelper, mainQuery, facets); <ide> // this method call invokes freemarker templating <ide> queryHelper.selectQuery = facetManager.generateFacetQuery(); <ide> } <ide> if (!wrapper.getWhereClause().isEmpty()) { <ide> // only do estimation when filter is in use (such as CQL). <add> String estQuery = SELECT + distinctOnClause + fieldName + addIdField <add> + FROM + schemaName + DOT + table + SPACE + wrapper.getWhereClause(); <ide> queryHelper.countQuery = SELECT + "count_estimate('" <del> + org.apache.commons.lang.StringEscapeUtils.escapeSql(mainQuery) <add> + org.apache.commons.lang.StringEscapeUtils.escapeSql(estQuery) <ide> + "')"; <ide> } <ide> int offset = wrapper.getOffset().get();
JavaScript
mit
34a40758676f1e19a821ab978118b657f85aced9
0
davidblurton/pianograms,davidblurton/pianograms
define(['jquery', 'underscore', 'backbone', 'models/Chord', 'views/KeyboardView', 'views/LinkView' ], function ($, _, Backbone, Chord, KeyboardView, LinkView) { var AppView = Backbone.View.extend({ el: $('#page'), render: function () { var keyboardView = new KeyboardView({ model: this.model }); this.$el.append(keyboardView.render().$el); var linkView = new LinkView({ model: this.model }); this.$el.append('<p>Link to this diagram:</p>'); // Hack: use a template this.$el.append(linkView.render().$el); } }); return AppView; });
app/js/views/AppView.js
define(['jquery', 'underscore', 'backbone', 'models/Chord', 'views/KeyboardView' ], function ($, _, Backbone, Chord, KeyboardView) { var AppView = Backbone.View.extend({ el: $('#page'), render: function () { var keyboardView = new KeyboardView({ model: this.model }); this.$el.append(keyboardView.render().$el); } }); return AppView; });
Render the link view
app/js/views/AppView.js
Render the link view
<ide><path>pp/js/views/AppView.js <ide> 'underscore', <ide> 'backbone', <ide> 'models/Chord', <del> 'views/KeyboardView' <del> ], function ($, _, Backbone, Chord, KeyboardView) { <add> 'views/KeyboardView', <add> 'views/LinkView' <add> ], function ($, _, Backbone, Chord, KeyboardView, LinkView) { <ide> <ide> var AppView = Backbone.View.extend({ <ide> el: $('#page'), <ide> }); <ide> <ide> this.$el.append(keyboardView.render().$el); <add> <add> var linkView = new LinkView({ <add> model: this.model <add> }); <add> <add> this.$el.append('<p>Link to this diagram:</p>'); // Hack: use a template <add> this.$el.append(linkView.render().$el); <ide> } <ide> }); <ide>
Java
apache-2.0
84c6d060cba0c1c84c077d8085e27a681be5b005
0
seven332/Image,seven332/Image,seven332/Image
/* * Copyright 2016 Hippo Seven * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hippo.image; import android.graphics.Canvas; import android.graphics.ColorFilter; import android.graphics.Paint; import android.graphics.PixelFormat; import android.graphics.drawable.Animatable; import android.graphics.drawable.Drawable; /** * A drawable to draw {@link ImageBitmap} */ public class ImageDrawable extends Drawable implements Animatable, ImageBitmap.Callback { private final ImageBitmap mImageBitmap; private final Paint mPaint; public ImageDrawable(ImageBitmap imageBitmap) throws RecycledException { if (!imageBitmap.obtain()) { throw new RecycledException(); } mImageBitmap = imageBitmap; mPaint = new Paint(Paint.FILTER_BITMAP_FLAG); // Add callback imageBitmap.addCallback(this); } public void recycle() { mImageBitmap.removeCallback(this); mImageBitmap.release(); } @Override public void draw(Canvas canvas) { mImageBitmap.draw(canvas, null, getBounds(), mPaint); } @Override public void setAlpha(int alpha) { mPaint.setAlpha(alpha); } @Override public void setColorFilter(ColorFilter colorFilter) { mPaint.setColorFilter(colorFilter); } @Override public int getOpacity() { return mImageBitmap.isOpaque() ? PixelFormat.OPAQUE : PixelFormat.UNKNOWN; } @Override public int getIntrinsicWidth() { return mImageBitmap.getWidth(); } @Override public int getIntrinsicHeight() { return mImageBitmap.getHeight(); } @Override public void start() { mImageBitmap.start(); } @Override public void stop() { mImageBitmap.stop(); } @Override public boolean isRunning() { return mImageBitmap.isRunning(); } @Override public void invalidateImage(ImageBitmap who) { invalidateSelf(); } }
library/src/main/java/com/hippo/image/ImageDrawable.java
/* * Copyright 2016 Hippo Seven * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hippo.image; import android.graphics.Canvas; import android.graphics.ColorFilter; import android.graphics.Paint; import android.graphics.PixelFormat; import android.graphics.drawable.Animatable; import android.graphics.drawable.Drawable; /** * A drawable to draw {@link ImageBitmap} */ public class ImageDrawable extends Drawable implements Animatable, ImageBitmap.Callback { private ImageBitmap mImageBitmap; private Paint mPaint; public ImageDrawable(ImageBitmap imageBitmap) throws RecycledException { if (!imageBitmap.obtain()) { throw new RecycledException(); } mImageBitmap = imageBitmap; mPaint = new Paint(Paint.FILTER_BITMAP_FLAG); // Add callback imageBitmap.addCallback(this); } public void recycle() { mImageBitmap.removeCallback(this); mImageBitmap.release(); } @Override public void draw(Canvas canvas) { mImageBitmap.draw(canvas, null, getBounds(), mPaint); } @Override public void setAlpha(int alpha) { mPaint.setAlpha(alpha); } @Override public void setColorFilter(ColorFilter colorFilter) { mPaint.setColorFilter(colorFilter); } @Override public int getOpacity() { return mImageBitmap.isOpaque() ? PixelFormat.OPAQUE : PixelFormat.UNKNOWN; } @Override public int getIntrinsicWidth() { return mImageBitmap.getWidth(); } @Override public int getIntrinsicHeight() { return mImageBitmap.getHeight(); } @Override public void start() { mImageBitmap.start(); } @Override public void stop() { mImageBitmap.stop(); } @Override public boolean isRunning() { return mImageBitmap.isRunning(); } @Override public void invalidateImage(ImageBitmap who) { invalidateSelf(); } }
Clean up
library/src/main/java/com/hippo/image/ImageDrawable.java
Clean up
<ide><path>ibrary/src/main/java/com/hippo/image/ImageDrawable.java <ide> */ <ide> public class ImageDrawable extends Drawable implements Animatable, ImageBitmap.Callback { <ide> <del> private ImageBitmap mImageBitmap; <del> private Paint mPaint; <add> private final ImageBitmap mImageBitmap; <add> private final Paint mPaint; <ide> <ide> public ImageDrawable(ImageBitmap imageBitmap) throws RecycledException { <ide> if (!imageBitmap.obtain()) {
Java
bsd-3-clause
11e994b6aa7cc81424d3ef26a46dbcf7ca6462c3
0
frc-88/2014-Robot
package edu.wpi.first.wpilibj.templates.commands; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; import edu.wpi.first.wpilibj.templates.subsystems.Drive; /** * * @author David */ /** * Command for driving the robot */ public class DrivewithControllerOpen extends CommandBase { private static int TANK = 1; private static int TANK_AVG = 2; private static int ARCADE_SINGLE = 3; private static int ARCADE_SPLIT = 4; private static double AVG_RANGE = 0.1; private int controllerMode; public DrivewithControllerOpen() { super("DriveWithController"); requires(drive); } protected void initialize() { drive.disableClosedLoop(); controllerMode = SmartDashboard.getInt("controller mode", TANK); } /** * Part that drives it */ protected void execute() { double left; double right; double speed; double direction; double average; if (controllerMode==TANK) { // drive the robot based on driver sticks left = oi.getDriveLeftVerticalAxis(); right = oi.getDriveRightVerticalAxis(); drive.driveTankOpenLoop(left, right); } else if (controllerMode == TANK_AVG) { // drive the robot based on driver sticks left = oi.getDriveLeftVerticalAxis(); right = oi.getDriveRightVerticalAxis(); if ( Math.abs(left - right) < AVG_RANGE) { average = left + right / 2.0; drive.driveTankOpenLoop(average, average); } else { drive.driveTankOpenLoop(left, right); } } else if (controllerMode == ARCADE_SINGLE) { speed = oi.getDriveLeftVerticalAxis(); direction = oi.getDriveLeftHorizontalAxis(); arcade(speed, direction); } else if (controllerMode == ARCADE_SPLIT) { speed = oi.getDriveLeftVerticalAxis(); direction = oi.getDriveRightHorizontalAxis(); arcade(speed, direction); } } private void arcade (double speed, double direction) { double left; double right; // set left and right to speed adjusted for direction left = (2.0 * speed + direction) / 3.0; right = (2.0 * speed - direction) / 3.0; drive.driveTankOpenLoop(left, right); } protected boolean isFinished() { return false; } protected void end() { } protected void interrupted() { } }
src/edu/wpi/first/wpilibj/templates/commands/DrivewithControllerOpen.java
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package edu.wpi.first.wpilibj.templates.commands; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; import edu.wpi.first.wpilibj.templates.subsystems.Drive; /** * * @author David */ /** * Command for driving the robot */ public class DrivewithControllerOpen extends CommandBase { private static int TANK = 1; private static int TANK_AVG = 2; private static int ARCADE_SINGLE = 3; private static int ARCADE_SPLIT = 4; private static double AVG_RANGE = 0.1; private int controllerMode; public DrivewithControllerOpen() { super("DriveWithController"); requires(drive); // Use requires() here to declare subsystem dependencies // eg. requires(chassis); } // Called just before this Command runs the first time protected void initialize() { drive.disableClosedLoop(); controllerMode = SmartDashboard.getInt("controller mode", TANK); } // Called repeatedly when this Command is scheduled to run /** * Part that drives it */ protected void execute() { double left; double right; double speed; double direction; double average; if (controllerMode==TANK) { // drive the robot based on driver sticks left = oi.getDriveLeftVerticalAxis(); right = oi.getDriveRightVerticalAxis(); drive.driveTankOpenLoop(left, right); } else if (controllerMode == TANK_AVG) { // drive the robot based on driver sticks left = oi.getDriveLeftVerticalAxis(); right = oi.getDriveRightVerticalAxis(); if ( Math.abs(left - right) < AVG_RANGE) { average = left + right / 2.0; drive.driveTankOpenLoop(average, average); } else { drive.driveTankOpenLoop(left, right); } } else if (controllerMode == ARCADE_SINGLE) { speed = oi.getDriveLeftVerticalAxis(); direction = oi.getDriveLeftHorizontalAxis(); arcade(speed, direction); } else if (controllerMode == ARCADE_SPLIT) { speed = oi.getDriveLeftVerticalAxis(); direction = oi.getDriveRightHorizontalAxis(); arcade(speed, direction); } // drive the robot based on driver sticks //drive.driveTankOpenLoop(left, right); } private void arcade (double speed, double direction) { double left; double right; // set left and right to speed adjusted for direction left = (2.0 * speed + direction) / 3.0; right = (2.0 * speed - direction) / 3.0; drive.driveTankOpenLoop(left, right); } // Make this return true when this Command no longer needs to run execute() protected boolean isFinished() { return false; } // Called once after isFinished returns true protected void end() { } // Called when another command which requires one or more of the same // subsystems is scheduled to run protected void interrupted() { } }
Clean DrivewithControllerOpen
src/edu/wpi/first/wpilibj/templates/commands/DrivewithControllerOpen.java
Clean DrivewithControllerOpen
<ide><path>rc/edu/wpi/first/wpilibj/templates/commands/DrivewithControllerOpen.java <del>/* <del> * To change this template, choose Tools | Templates <del> * and open the template in the editor. <del> */ <ide> package edu.wpi.first.wpilibj.templates.commands; <ide> <ide> import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; <ide> private static int TANK_AVG = 2; <ide> private static int ARCADE_SINGLE = 3; <ide> private static int ARCADE_SPLIT = 4; <del> <ide> private static double AVG_RANGE = 0.1; <del> <ide> private int controllerMode; <ide> <ide> public DrivewithControllerOpen() { <ide> super("DriveWithController"); <ide> requires(drive); <del> // Use requires() here to declare subsystem dependencies <del> // eg. requires(chassis); <ide> } <ide> <del> // Called just before this Command runs the first time <ide> protected void initialize() { <ide> drive.disableClosedLoop(); <ide> controllerMode = SmartDashboard.getInt("controller mode", TANK); <del> <ide> } <ide> <del> // Called repeatedly when this Command is scheduled to run <del> <ide> /** <ide> * Part that drives it <ide> */ <ide> else { <ide> drive.driveTankOpenLoop(left, right); <ide> } <del> <del> <ide> <ide> } else if (controllerMode == ARCADE_SINGLE) { <ide> speed = oi.getDriveLeftVerticalAxis(); <ide> direction = oi.getDriveRightHorizontalAxis(); <ide> <ide> arcade(speed, direction); <del> <ide> } <del> <del> // drive the robot based on driver sticks <del> <del> //drive.driveTankOpenLoop(left, right); <del> <ide> } <ide> <ide> private void arcade (double speed, double direction) { <ide> right = (2.0 * speed - direction) / 3.0; <ide> <ide> drive.driveTankOpenLoop(left, right); <del> <ide> } <ide> <del> // Make this return true when this Command no longer needs to run execute() <ide> protected boolean isFinished() { <ide> return false; <ide> } <ide> <del> // Called once after isFinished returns true <ide> protected void end() { <ide> } <ide> <del> // Called when another command which requires one or more of the same <del> // subsystems is scheduled to run <ide> protected void interrupted() { <ide> } <ide> }
Java
apache-2.0
af4510eb9082492fb47415c5fa8196bcb565be71
0
uaihebert/uaiMockServer,uaihebert/uaiMockServer,uaihebert/uaiMockServer
/* * Copyright 2015 uaiHebert * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.uaihebert.uaimockserver.model; import com.typesafe.config.Config; import com.uaihebert.uaimockserver.constants.RootConstants; import com.uaihebert.uaimockserver.factory.TypeSafeConfigFactory; import com.uaihebert.uaimockserver.log.Log; import com.uaihebert.uaimockserver.log.LogBuilder; import com.uaihebert.uaimockserver.util.FileUtil; import com.uaihebert.uaimockserver.util.RouteMapKeyUtil; import com.uaihebert.uaimockserver.util.RouteUtil; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; /** * Class that will hold all the project configurations */ public final class UaiMockServerConfig { private static final Map<String, UaiRoute> ROUTE_MAP_BY_ID = new HashMap<String, UaiRoute>(); private static final Map<String, Set<UaiRoute>> ROUTE_MAP_BY_PATH = new HashMap<String, Set<UaiRoute>>(); private static final String CONFIGURATION_FILE_NAME = "uaiMockServer.config"; private UaiMockServerConfig() { } public static void createInstance() { createInstance(CONFIGURATION_FILE_NAME); } // todo create a test with another context // todo in the dialog page, send to the dialog a clone of the object, and not the real one public static void createInstance(final String fileName) { final File file = FileUtil.findFile(fileName); final Config config = TypeSafeConfigFactory.loadConfiguration(file); createLog(config); UaiBasicServerConfiguration.createInstance(config); RouteUtil.configureRouteMap(config, file); Log.info(String.format("Configurations of the file [%s] was read with success", fileName)); } private static void createLog(final Config config) { final boolean fileLog = config.getBoolean(RootConstants.FILE_LOG.path); final boolean consoleLog = config.getBoolean(RootConstants.CONSOLE_LOG.path); LogBuilder.createInstance(fileLog, consoleLog); } public static Set<UaiRoute> findRouteListByKey(final String requestKey) { return getRouteList(requestKey); } public static void addRoute(final String key, final UaiRoute uaiRoute) { setInMapById(uaiRoute); setInMapByPath(key, uaiRoute); } private static void setInMapById(final UaiRoute uaiRoute) { ROUTE_MAP_BY_ID.put(uaiRoute.id, uaiRoute); } public static void editRoute(final UaiRoute uaiRoute) { final String key = RouteMapKeyUtil.createKey(uaiRoute.uaiRequest.method, uaiRoute.uaiRequest.path); deleteOldRoute(uaiRoute); addRoute(key, uaiRoute); } private static void deleteOldRoute(final UaiRoute uaiRoute) { final UaiRoute oldRoute = ROUTE_MAP_BY_ID.get(uaiRoute.id); final String key = RouteMapKeyUtil.createKey(oldRoute.uaiRequest.method, oldRoute.uaiRequest.path); final Set<UaiRoute> uaiRouteList = getRouteList(key); uaiRouteList.remove(oldRoute); ROUTE_MAP_BY_ID.remove(oldRoute.id); } private static void setInMapByPath(final String key, final UaiRoute uaiRoute) { final Set<UaiRoute> uaiRouteList = getRouteList(key); uaiRouteList.add(uaiRoute); } private static Set<UaiRoute> getRouteList(final String key) { if (ROUTE_MAP_BY_PATH.containsKey(key)) { return ROUTE_MAP_BY_PATH.get(key); } final Set<UaiRoute> uaiRouteList = new HashSet<UaiRoute>(); ROUTE_MAP_BY_PATH.put(key, uaiRouteList); return uaiRouteList; } public static List<UaiRoute> listAllRoutes() { final List<UaiRoute> resultList = new ArrayList<UaiRoute>(); for (Set<UaiRoute> uaiRouteList : ROUTE_MAP_BY_PATH.values()) { resultList.addAll(uaiRouteList); } return resultList; } public static void resetRouteMap() { ROUTE_MAP_BY_PATH.clear(); } }
src/main/java/com/uaihebert/uaimockserver/model/UaiMockServerConfig.java
/* * Copyright 2015 uaiHebert * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.uaihebert.uaimockserver.model; import com.typesafe.config.Config; import com.uaihebert.uaimockserver.constants.RootConstants; import com.uaihebert.uaimockserver.factory.TypeSafeConfigFactory; import com.uaihebert.uaimockserver.log.Log; import com.uaihebert.uaimockserver.log.LogBuilder; import com.uaihebert.uaimockserver.util.FileUtil; import com.uaihebert.uaimockserver.util.RouteMapKeyUtil; import com.uaihebert.uaimockserver.util.RouteUtil; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; /** * Class that will hold all the project configurations */ public final class UaiMockServerConfig { private static final Map<String, Set<UaiRoute>> ROUTE_MAP_BY_PATH = new HashMap<String, Set<UaiRoute>>(); private static final String CONFIGURATION_FILE_NAME = "uaiMockServer.config"; private UaiMockServerConfig() { } public static void createInstance() { createInstance(CONFIGURATION_FILE_NAME); } // todo tratar caso quando usuário trocar o metodo do request GET -> POST // todo tratar caso quando usuário o PATH // todo create a test with another context // todo in the dialog page, send to the dialog a clone of the object, and not the real one public static void createInstance(final String fileName) { final File file = FileUtil.findFile(fileName); final Config config = TypeSafeConfigFactory.loadConfiguration(file); createLog(config); UaiBasicServerConfiguration.createInstance(config); RouteUtil.configureRouteMap(config, file); Log.info(String.format("Configurations of the file [%s] was read with success", fileName)); } private static void createLog(final Config config) { final boolean fileLog = config.getBoolean(RootConstants.FILE_LOG.path); final boolean consoleLog = config.getBoolean(RootConstants.CONSOLE_LOG.path); LogBuilder.createInstance(fileLog, consoleLog); } public static Set<UaiRoute> findRouteListByKey(final String requestKey) { return getRouteList(requestKey); } public static void addRoute(final String key, final UaiRoute uaiRoute) { setInMapByPath(key, uaiRoute); } public static void editRoute(final UaiRoute uaiRoute) { final String key = RouteMapKeyUtil.createKey(uaiRoute.uaiRequest.method, uaiRoute.uaiRequest.path); deleteOldRoute(key, uaiRoute); addRoute(key, uaiRoute); } private static void deleteOldRoute(final String key, final UaiRoute uaiRoute) { final Set<UaiRoute> uaiRouteList = getRouteList(key); uaiRouteList.remove(uaiRoute); } private static void setInMapByPath(final String key, final UaiRoute uaiRoute) { final Set<UaiRoute> uaiRouteList = getRouteList(key); uaiRouteList.add(uaiRoute); } private static Set<UaiRoute> getRouteList(final String key) { if (ROUTE_MAP_BY_PATH.containsKey(key)) { return ROUTE_MAP_BY_PATH.get(key); } final Set<UaiRoute> uaiRouteList = new HashSet<UaiRoute>(); ROUTE_MAP_BY_PATH.put(key, uaiRouteList); return uaiRouteList; } public static List<UaiRoute> listAllRoutes() { final List<UaiRoute> resultList = new ArrayList<UaiRoute>(); for (Set<UaiRoute> uaiRouteList : ROUTE_MAP_BY_PATH.values()) { resultList.addAll(uaiRouteList); } return resultList; } public static void resetRouteMap() { ROUTE_MAP_BY_PATH.clear(); } }
allowing the route edit to change the method and path url
src/main/java/com/uaihebert/uaimockserver/model/UaiMockServerConfig.java
allowing the route edit to change the method and path url
<ide><path>rc/main/java/com/uaihebert/uaimockserver/model/UaiMockServerConfig.java <ide> * Class that will hold all the project configurations <ide> */ <ide> public final class UaiMockServerConfig { <add> private static final Map<String, UaiRoute> ROUTE_MAP_BY_ID = new HashMap<String, UaiRoute>(); <ide> private static final Map<String, Set<UaiRoute>> ROUTE_MAP_BY_PATH = new HashMap<String, Set<UaiRoute>>(); <ide> <ide> private static final String CONFIGURATION_FILE_NAME = "uaiMockServer.config"; <ide> createInstance(CONFIGURATION_FILE_NAME); <ide> } <ide> <del> // todo tratar caso quando usuário trocar o metodo do request GET -> POST <del> // todo tratar caso quando usuário o PATH <ide> // todo create a test with another context <ide> // todo in the dialog page, send to the dialog a clone of the object, and not the real one <ide> public static void createInstance(final String fileName) { <ide> } <ide> <ide> public static void addRoute(final String key, final UaiRoute uaiRoute) { <add> setInMapById(uaiRoute); <ide> setInMapByPath(key, uaiRoute); <add> } <add> <add> private static void setInMapById(final UaiRoute uaiRoute) { <add> ROUTE_MAP_BY_ID.put(uaiRoute.id, uaiRoute); <ide> } <ide> <ide> public static void editRoute(final UaiRoute uaiRoute) { <ide> final String key = RouteMapKeyUtil.createKey(uaiRoute.uaiRequest.method, uaiRoute.uaiRequest.path); <ide> <del> deleteOldRoute(key, uaiRoute); <add> deleteOldRoute(uaiRoute); <ide> addRoute(key, uaiRoute); <ide> } <ide> <del> private static void deleteOldRoute(final String key, final UaiRoute uaiRoute) { <add> private static void deleteOldRoute(final UaiRoute uaiRoute) { <add> final UaiRoute oldRoute = ROUTE_MAP_BY_ID.get(uaiRoute.id); <add> <add> final String key = RouteMapKeyUtil.createKey(oldRoute.uaiRequest.method, oldRoute.uaiRequest.path); <add> <ide> final Set<UaiRoute> uaiRouteList = getRouteList(key); <del> uaiRouteList.remove(uaiRoute); <add> uaiRouteList.remove(oldRoute); <add> <add> ROUTE_MAP_BY_ID.remove(oldRoute.id); <ide> } <ide> <ide> private static void setInMapByPath(final String key, final UaiRoute uaiRoute) {
JavaScript
mit
f5b40e59e6513eb430378d703271e93994c659b3
0
axetroy/gpm,gpmer/gpm.js,gpmer/gpm.js
const process = require('process'); const path = require('path'); // 3th lib const program = require('commander'); const co = require('co'); const colors = require('colors'); const axios = require('axios'); const semver = require('semver'); const log4js = require('log4js'); const logger = log4js.getLogger('ADD'); const pkg = require(path.join(__dirname, '../package.json')); function bootstrapWrapper(func) { return function () { program.__bootstrap__ = true; func.apply(this, arguments); } } // check the update axios.get(`https://registry.npmjs.org/${pkg.name}/latest`) .then(function (resp) { const remotePkg = resp.data; if (semver.gt(remotePkg.version, pkg.version)) { logger.warn(`The current version ${remotePkg} is not the latest, please run [npm install -g ${pkg.name}] update to ${remotePkg.version}`); } }) .catch(function (err) { logger.error(err); }); process.on('uncaughtException', function (err) { console.log(err); }); program .version(pkg.version) .description(pkg.description) .usage('<command> [options]'); program .command('add <repo>') .alias('a') .description('clone repo into local dir') .action(bootstrapWrapper(function (repo, options) { co(require('./command/add')(repo, options)) .catch(function (err) { console.error('ERROR:', err); }); })); program .command('remove') .alias('rm') .description('remove a repo') .action(bootstrapWrapper(function (options) { co(require('./command/remove')(options)) .catch(function (err) { console.error(err); }); })); program .command('init') .alias('i') .description('init the GPM config, when you install or update, run this command first') .action(bootstrapWrapper(function (options) { co(require('./command/init')(options)) .catch(function (err) { console.error(err); }); })); program .command('list') .alias('ls') .description('display the all repo') .action(bootstrapWrapper(function (options) { co(require('./command/list')(options)) .catch(function (err) { console.error(err); }); })); program .command('reset') .alias('rs') .description('reset the GPM config to default') .action(bootstrapWrapper(function (options) { co(require('./command/reset')(options)) .catch(function (err) { console.error(err); }); })); program .command('clean') .alias('cl') .description('clear the cache') .action(bootstrapWrapper(function (options) { co(require('./command/clean')(options)) .catch(function (err) { console.error(err); }); })); program .command('search <key>') .alias('sr') .description('search repository witch add by gpm') .action(bootstrapWrapper(function (key, options) { co(require('./command/search')(key, options)) .catch(function (err) { console.error(err); }); })); program.parse(process.argv); if (!program.args.length || !program.__bootstrap__) program.help();
lib/gpmx.js
const process = require('process'); const path = require('path'); // 3th lib const program = require('commander'); const co = require('co'); const colors = require('colors'); const axios = require('axios'); const semver = require('semver'); const log4js = require('log4js'); const logger = log4js.getLogger('ADD'); const pkg = require(path.join(__dirname, '../package.json')); function bootstrapWrapper(func) { return function () { program.__bootstrap__ = true; func.apply(this, arguments); } } // check the update axios.get(`https://registry.npm.taobao.org/${pkg.name}/latest`) .then(function (resp) { const remotePkg = resp.data; if (semver.gt(remotePkg.version, pkg.version)) { logger.warn(`The current version ${remotePkg} is not the latest, please run [npm install -g ${pkg.name}] update to ${remotePkg.version}`); } }) .catch(function (err) { logger.error(err); }); process.on('uncaughtException', function (err) { console.log(err); }); program .version(pkg.version) .description(pkg.description) .usage('<command> [options]'); program .command('add <repo>') .alias('a') .description('clone repo into local dir') .action(bootstrapWrapper(function (repo, options) { co(require('./command/add')(repo, options)) .catch(function (err) { console.error('ERROR:', err); }); })); program .command('remove') .alias('rm') .description('remove a repo') .action(bootstrapWrapper(function (options) { co(require('./command/remove')(options)) .catch(function (err) { console.error(err); }); })); program .command('init') .alias('i') .description('init the GPM config, when you install or update, run this command first') .action(bootstrapWrapper(function (options) { co(require('./command/init')(options)) .catch(function (err) { console.error(err); }); })); program .command('list') .alias('ls') .description('display the all repo') .action(bootstrapWrapper(function (options) { co(require('./command/list')(options)) .catch(function (err) { console.error(err); }); })); program .command('reset') .alias('rs') .description('reset the GPM config to default') .action(bootstrapWrapper(function (options) { co(require('./command/reset')(options)) .catch(function (err) { console.error(err); }); })); program .command('clean') .alias('cl') .description('clear the cache') .action(bootstrapWrapper(function (options) { co(require('./command/clean')(options)) .catch(function (err) { console.error(err); }); })); program .command('search <key>') .alias('sr') .description('search repository witch add by gpm') .action(bootstrapWrapper(function (key, options) { co(require('./command/search')(key, options)) .catch(function (err) { console.error(err); }); })); program.parse(process.argv); if (!program.args.length || !program.__bootstrap__) program.help();
refactor: replace taobao registry to npm registry
lib/gpmx.js
refactor: replace taobao registry to npm registry
<ide><path>ib/gpmx.js <ide> } <ide> <ide> // check the update <del>axios.get(`https://registry.npm.taobao.org/${pkg.name}/latest`) <add>axios.get(`https://registry.npmjs.org/${pkg.name}/latest`) <ide> .then(function (resp) { <ide> const remotePkg = resp.data; <ide> if (semver.gt(remotePkg.version, pkg.version)) {
Java
mit
97debb6c5881b884183d6ad45d09e0be7913f288
0
frc2503/r2016
package org.usfirst.frc.team2503.r2016; import java.net.InetSocketAddress; import java.net.UnknownHostException; import org.json.JSONObject; import org.usfirst.frc.team2503.lib.util.WarriorMath; import org.usfirst.frc.team2503.r2016.component.CameraMount.CameraMountMode; import org.usfirst.frc.team2503.r2016.component.Intake.IntakeMode; import org.usfirst.frc.team2503.r2016.debug.Logger; import org.usfirst.frc.team2503.r2016.debug.Logger.LoggerPrintStream; import org.usfirst.frc.team2503.r2016.input.Data; import org.usfirst.frc.team2503.r2016.input.gamepad.LogitechF310Gamepad; import org.usfirst.frc.team2503.r2016.input.joystick.MadCatzV1Joystick; import org.usfirst.frc.team2503.r2016.server.DataServer; import org.usfirst.frc.team2503.r2016.server.MessageServer; import edu.wpi.first.wpilibj.DoubleSolenoid; import edu.wpi.first.wpilibj.IterativeRobot; import edu.wpi.first.wpilibj.Relay; public class Robot extends IterativeRobot { public class RobotDataServer extends DataServer implements Tickable { public void tick(Data data) { { JSONObject encoders = new JSONObject(); encoders.put("hooker", R.hooker.encoder.get()); this.serverData.put("encoders", encoders); } { JSONObject switches = new JSONObject(); switches.put("intake", R.intake.limitSwitch.get()); switches.put("hooker", R.hooker.limitSwitch.get()); this.serverData.put("switches", switches); } { JSONObject pneumatics = new JSONObject(); pneumatics.put("charged", R.pneumaticsSubsystem.compressor.getPressureSwitchValue()); pneumatics.put("enabled", R.pneumaticsSubsystem.compressor.enabled()); pneumatics.put("closed", R.pneumaticsSubsystem.compressor.getClosedLoopControl()); this.serverData.put("pneumatics", pneumatics); } { JSONObject joystickInputs = new JSONObject(); joystickInputs.put("left", leftValue); joystickInputs.put("right", rightValue); joystickInputs.put("winch", winchValue); joystickInputs.put("hooker", hookerValue); joystickInputs.put("shooter", shooterValue); this.serverData.put("joysticks", joystickInputs); } } public RobotDataServer() throws UnknownHostException { super(); } public RobotDataServer(InetSocketAddress address) { super(address); } } public RobotDataServer robotDataServer; public MessageServer messageServer; public Thread dataServerThread, messageServerThread; public MadCatzV1Joystick leftJoystick; public MadCatzV1Joystick rightJoystick; public LogitechF310Gamepad gamepad; public JSONObject modeObject; public final MainRobotMap R; public double leftValue; public double rightValue; public double winchValue; public double hookerValue; public double shooterValue; public Robot() { Logger.addPrintStream("main", new LoggerPrintStream(System.out)); Logger.addPrintStream("error", new LoggerPrintStream(System.err)); Logger.addPrintStream("warning", new LoggerPrintStream(System.err)); Logger.addPrintStream("debug", new LoggerPrintStream(System.out)); Logger.addPrintStream("information", new LoggerPrintStream(System.out)); Logger.addPrintStream("data", new LoggerPrintStream(robotDataServer.new WebSocketByteArrayOutputStream())); Logger.println("main", "[Robot] Starting... Version '" + Constants.VERSION + "'"); R = new MainRobotMap(); robotDataServer = new RobotDataServer(new InetSocketAddress(5800)); messageServer = new MessageServer(new InetSocketAddress(5801)); dataServerThread = new Thread(robotDataServer); messageServerThread = new Thread(messageServer); Logger.println("information", "Starting DataServer..."); dataServerThread.start(); Logger.println("information", "Starting MessageServer..."); messageServerThread.start(); leftJoystick = new MadCatzV1Joystick(0); rightJoystick = new MadCatzV1Joystick(1); gamepad = new LogitechF310Gamepad(2); modeObject = new JSONObject(); } public void robotInit() { } public void disabledInit() { } public void disabledPeriodic() { } public void autonomousInit() { } public void autonomousPeriodic() { int ticks = R.hooker.encoder.get(); if(ticks < 175) { R.hooker.set(-0.5); } else if(ticks >= 175 && ticks < 185) { R.hooker.set(0.0); } else { R.hooker.set(0.5); } } public void teleopInit() { } public void teleopPeriodic() { // TODO: Move All of this into ControlLayouts leftValue = (leftJoystick.y.get()); rightValue = (rightJoystick.y.get()); winchValue = gamepad.rightY.get(); hookerValue = (gamepad.leftY.get() * 0.5); shooterValue = (gamepad.rightTrigger.get()); // TODO: Move all of this into leftValue *= leftJoystick.throttle.get(); rightValue *= rightJoystick.throttle.get(); // Inverted mode. Flip values, and swap sides to do mirrored driving. if(leftJoystick.button5.get()) { double oldLeft = leftValue; double oldRight = rightValue; leftValue = -oldRight; rightValue = -oldLeft; } if(gamepad.b.get()) { R.intake.setMode(IntakeMode.OUTBOUND); } else if(gamepad.a.get()) { R.intake.setMode(IntakeMode.INBOUND); } else if(gamepad.leftBumper.get()) { R.intake.setMode(IntakeMode.FIRE); } else { R.intake.setMode(IntakeMode.STOPPED); } R.winch.set(winchValue); R.hooker.set(hookerValue); R.shooter.set(shooterValue); R.intake.tick(null); R.winch.tick(null); R.hooker.tick(null); R.shooter.tick(null); if(rightJoystick.button2.get() && !rightJoystick.trigger.get()) { R.pneumaticsSubsystem.lift.set(DoubleSolenoid.Value.kForward); } else if(rightJoystick.trigger.get() && !rightJoystick.button2.get()) { R.pneumaticsSubsystem.lift.set(DoubleSolenoid.Value.kReverse); } else { R.pneumaticsSubsystem.lift.set(DoubleSolenoid.Value.kOff); } R.driveBase.drive(leftValue, rightValue); double povAngle = (double) gamepad.pov.get(); if(povAngle >= 0) { R.cameraMount.setMode(CameraMountMode.LOOKING); R.cameraMount.tweak(Math.cos(WarriorMath.degreesToRadians(90.0d - povAngle)), Math.sin(90.0d - povAngle)); } else { R.cameraMount.setMode(CameraMountMode.TARGETING); } R.cameraMount.tick(null); if(R.intake.limitSwitch.get()) { R.indicatorRelay.set(Relay.Value.kOn); } else { R.indicatorRelay.set(Relay.Value.kOff); } robotDataServer.tick(null); robotDataServer.send(); } public void testInit() { } public void testPeriodic() { } }
src/org/usfirst/frc/team2503/r2016/Robot.java
package org.usfirst.frc.team2503.r2016; import java.net.InetSocketAddress; import java.net.UnknownHostException; import org.json.JSONObject; import org.usfirst.frc.team2503.lib.util.WarriorMath; import org.usfirst.frc.team2503.r2016.component.CameraMount.CameraMountMode; import org.usfirst.frc.team2503.r2016.component.Intake.IntakeMode; import org.usfirst.frc.team2503.r2016.debug.Logger; import org.usfirst.frc.team2503.r2016.debug.Logger.LoggerPrintStream; import org.usfirst.frc.team2503.r2016.input.gamepad.LogitechF310Gamepad; import org.usfirst.frc.team2503.r2016.input.joystick.MadCatzV1Joystick; import org.usfirst.frc.team2503.r2016.server.DataServer; import org.usfirst.frc.team2503.r2016.server.MessageServer; import edu.wpi.first.wpilibj.DoubleSolenoid; import edu.wpi.first.wpilibj.IterativeRobot; import edu.wpi.first.wpilibj.Relay; public class Robot extends IterativeRobot { public class RobotDataServer extends DataServer { public void update() { { JSONObject encoders = new JSONObject(); encoders.put("hooker", robotMap.hooker.encoder.get()); this.serverData.put("encoders", encoders); } { JSONObject switches = new JSONObject(); switches.put("intake", robotMap.intake.limitSwitch.get()); switches.put("hooker", robotMap.hooker.limitSwitch.get()); this.serverData.put("switches", switches); } { JSONObject pneumatics = new JSONObject(); pneumatics.put("charged", robotMap.pneumaticsSubsystem.compressor.getPressureSwitchValue()); pneumatics.put("enabled", robotMap.pneumaticsSubsystem.compressor.enabled()); pneumatics.put("closed", robotMap.pneumaticsSubsystem.compressor.getClosedLoopControl()); this.serverData.put("pneumatics", pneumatics); } { JSONObject joystickInputs = new JSONObject(); joystickInputs.put("left", leftValue); joystickInputs.put("right", rightValue); joystickInputs.put("winch", winchValue); joystickInputs.put("hooker", hookerValue); joystickInputs.put("shooter", shooterValue); this.serverData.put("joysticks", joystickInputs); } } public RobotDataServer() throws UnknownHostException { super(); } public RobotDataServer(InetSocketAddress address) { super(address); } } public RobotDataServer robotDataServer; public MessageServer messageServer; public Thread dataServerThread, messageServerThread; public MadCatzV1Joystick leftJoystick; public MadCatzV1Joystick rightJoystick; public LogitechF310Gamepad gamepad; public JSONObject modeObject; public MainRobotMap robotMap; public double leftValue; public double rightValue; public double winchValue; public double hookerValue; public double shooterValue; public Robot() { Logger.addPrintStream("main", new LoggerPrintStream(System.out)); Logger.addPrintStream("error", new LoggerPrintStream(System.err)); Logger.addPrintStream("warning", new LoggerPrintStream(System.err)); Logger.addPrintStream("debug", new LoggerPrintStream(System.out)); Logger.addPrintStream("information", new LoggerPrintStream(System.out)); Logger.addPrintStream("data", new LoggerPrintStream(robotDataServer.new WebSocketByteArrayOutputStream())); robotMap = new MainRobotMap(); robotDataServer = new RobotDataServer(new InetSocketAddress(5800)); messageServer = new MessageServer(new InetSocketAddress(5801)); dataServerThread = new Thread(robotDataServer); messageServerThread = new Thread(messageServer); Logger.println("information", "Starting DataServer..."); dataServerThread.start(); Logger.println("information", "Starting MessageServer..."); messageServerThread.start(); leftJoystick = new MadCatzV1Joystick(0); rightJoystick = new MadCatzV1Joystick(1); gamepad = new LogitechF310Gamepad(2); modeObject = new JSONObject(); } public void robotInit() { } public void disabledInit() { } public void disabledPeriodic() { } public void autonomousInit() { robotMap.hooker.encoder.reset(); } public void autonomousPeriodic() { int ticks = robotMap.hooker.encoder.get(); if(ticks < 175) { robotMap.hooker.set(-0.5); } else if(ticks >= 175 && ticks < 185) { robotMap.hooker.set(0.0); } else { robotMap.hooker.set(0.5); } } public void teleopInit() { } public void teleopPeriodic() { leftValue = (leftJoystick.y.get()); rightValue = (rightJoystick.y.get()); winchValue = gamepad.rightY.get(); hookerValue = (gamepad.leftY.get() * 0.5); shooterValue = (gamepad.rightTrigger.get()); leftValue *= leftJoystick.throttle.get(); rightValue *= rightJoystick.throttle.get(); // Inverted mode. Flip values, and swap sides to do mirrored driving. if(leftJoystick.button5.get()) { double oldLeft = leftValue; double oldRight = rightValue; leftValue = -oldRight; rightValue = -oldLeft; } if(gamepad.b.get()) { robotMap.intake.setMode(IntakeMode.OUTBOUND); } else if(gamepad.a.get()) { robotMap.intake.setMode(IntakeMode.INBOUND); } else if(gamepad.leftBumper.get()) { robotMap.intake.setMode(IntakeMode.FIRE); } else { robotMap.intake.setMode(IntakeMode.STOPPED); } robotMap.winch.set(winchValue); robotMap.hooker.set(hookerValue); robotMap.shooter.set(shooterValue); robotMap.intake.tick(null); robotMap.winch.tick(null); robotMap.hooker.tick(null); robotMap.shooter.tick(null); if(rightJoystick.button2.get() && !rightJoystick.trigger.get()) { robotMap.pneumaticsSubsystem.lift.set(DoubleSolenoid.Value.kForward); } else if(rightJoystick.trigger.get() && !rightJoystick.button2.get()) { robotMap.pneumaticsSubsystem.lift.set(DoubleSolenoid.Value.kReverse); } else { robotMap.pneumaticsSubsystem.lift.set(DoubleSolenoid.Value.kOff); } robotMap.driveBase.drive(leftValue, rightValue); double povAngle = (double) gamepad.pov.get(); if(povAngle >= 0) { robotMap.cameraMount.setMode(CameraMountMode.LOOKING); robotMap.cameraMount.tweak(Math.cos(WarriorMath.degreesToRadians(90.0d - povAngle)), Math.sin(90.0d - povAngle)); } else { robotMap.cameraMount.setMode(CameraMountMode.TARGETING); } robotMap.cameraMount.tick(null); if(robotMap.intake.limitSwitch.get()) { robotMap.indicatorRelay.set(Relay.Value.kOn); } else { robotMap.indicatorRelay.set(Relay.Value.kOff); } robotDataServer.update(); robotDataServer.send(); } public void testInit() { } public void testPeriodic() { } }
RobotDataServer: Use Tickable; Robot: Shorten robotMap name
src/org/usfirst/frc/team2503/r2016/Robot.java
RobotDataServer: Use Tickable; Robot: Shorten robotMap name
<ide><path>rc/org/usfirst/frc/team2503/r2016/Robot.java <ide> import org.usfirst.frc.team2503.r2016.component.Intake.IntakeMode; <ide> import org.usfirst.frc.team2503.r2016.debug.Logger; <ide> import org.usfirst.frc.team2503.r2016.debug.Logger.LoggerPrintStream; <add>import org.usfirst.frc.team2503.r2016.input.Data; <ide> import org.usfirst.frc.team2503.r2016.input.gamepad.LogitechF310Gamepad; <ide> import org.usfirst.frc.team2503.r2016.input.joystick.MadCatzV1Joystick; <ide> import org.usfirst.frc.team2503.r2016.server.DataServer; <ide> <ide> public class Robot extends IterativeRobot { <ide> <del> public class RobotDataServer extends DataServer { <del> <del> public void update() { <add> public class RobotDataServer extends DataServer implements Tickable { <add> <add> public void tick(Data data) { <ide> { <ide> JSONObject encoders = new JSONObject(); <ide> <del> encoders.put("hooker", robotMap.hooker.encoder.get()); <add> encoders.put("hooker", R.hooker.encoder.get()); <ide> <ide> this.serverData.put("encoders", encoders); <ide> } <ide> { <ide> JSONObject switches = new JSONObject(); <ide> <del> switches.put("intake", robotMap.intake.limitSwitch.get()); <del> switches.put("hooker", robotMap.hooker.limitSwitch.get()); <add> switches.put("intake", R.intake.limitSwitch.get()); <add> switches.put("hooker", R.hooker.limitSwitch.get()); <ide> <ide> this.serverData.put("switches", switches); <ide> } <ide> { <ide> JSONObject pneumatics = new JSONObject(); <ide> <del> pneumatics.put("charged", robotMap.pneumaticsSubsystem.compressor.getPressureSwitchValue()); <del> pneumatics.put("enabled", robotMap.pneumaticsSubsystem.compressor.enabled()); <del> pneumatics.put("closed", robotMap.pneumaticsSubsystem.compressor.getClosedLoopControl()); <add> pneumatics.put("charged", R.pneumaticsSubsystem.compressor.getPressureSwitchValue()); <add> pneumatics.put("enabled", R.pneumaticsSubsystem.compressor.enabled()); <add> pneumatics.put("closed", R.pneumaticsSubsystem.compressor.getClosedLoopControl()); <ide> <ide> this.serverData.put("pneumatics", pneumatics); <ide> } <ide> super(address); <ide> } <ide> <del> <ide> } <ide> <ide> public RobotDataServer robotDataServer; <ide> <ide> public JSONObject modeObject; <ide> <del> public MainRobotMap robotMap; <add> public final MainRobotMap R; <ide> <ide> public double leftValue; <ide> public double rightValue; <ide> Logger.addPrintStream("information", new LoggerPrintStream(System.out)); <ide> Logger.addPrintStream("data", new LoggerPrintStream(robotDataServer.new WebSocketByteArrayOutputStream())); <ide> <del> robotMap = new MainRobotMap(); <add> Logger.println("main", "[Robot] Starting... Version '" + Constants.VERSION + "'"); <add> <add> R = new MainRobotMap(); <ide> <ide> robotDataServer = new RobotDataServer(new InetSocketAddress(5800)); <ide> messageServer = new MessageServer(new InetSocketAddress(5801)); <ide> } <ide> <ide> public void autonomousInit() { <del> robotMap.hooker.encoder.reset(); <ide> } <ide> <ide> public void autonomousPeriodic() { <del> int ticks = robotMap.hooker.encoder.get(); <add> int ticks = R.hooker.encoder.get(); <ide> <ide> if(ticks < 175) { <del> robotMap.hooker.set(-0.5); <add> R.hooker.set(-0.5); <ide> } else if(ticks >= 175 && ticks < 185) { <del> robotMap.hooker.set(0.0); <del> } else { <del> robotMap.hooker.set(0.5); <add> R.hooker.set(0.0); <add> } else { <add> R.hooker.set(0.5); <ide> } <ide> } <ide> <ide> } <ide> <ide> public void teleopPeriodic() { <add> // TODO: Move All of this into ControlLayouts <ide> leftValue = (leftJoystick.y.get()); <ide> rightValue = (rightJoystick.y.get()); <ide> winchValue = gamepad.rightY.get(); <ide> hookerValue = (gamepad.leftY.get() * 0.5); <ide> shooterValue = (gamepad.rightTrigger.get()); <ide> <add> // TODO: Move all of this into <ide> leftValue *= leftJoystick.throttle.get(); <ide> rightValue *= rightJoystick.throttle.get(); <ide> <ide> } <ide> <ide> if(gamepad.b.get()) { <del> robotMap.intake.setMode(IntakeMode.OUTBOUND); <add> R.intake.setMode(IntakeMode.OUTBOUND); <ide> } else if(gamepad.a.get()) { <del> robotMap.intake.setMode(IntakeMode.INBOUND); <add> R.intake.setMode(IntakeMode.INBOUND); <ide> } else if(gamepad.leftBumper.get()) { <del> robotMap.intake.setMode(IntakeMode.FIRE); <del> } else { <del> robotMap.intake.setMode(IntakeMode.STOPPED); <del> } <del> <del> robotMap.winch.set(winchValue); <del> robotMap.hooker.set(hookerValue); <del> robotMap.shooter.set(shooterValue); <del> <del> robotMap.intake.tick(null); <del> robotMap.winch.tick(null); <del> robotMap.hooker.tick(null); <del> robotMap.shooter.tick(null); <add> R.intake.setMode(IntakeMode.FIRE); <add> } else { <add> R.intake.setMode(IntakeMode.STOPPED); <add> } <add> <add> R.winch.set(winchValue); <add> R.hooker.set(hookerValue); <add> R.shooter.set(shooterValue); <add> <add> R.intake.tick(null); <add> R.winch.tick(null); <add> R.hooker.tick(null); <add> R.shooter.tick(null); <ide> <ide> if(rightJoystick.button2.get() && !rightJoystick.trigger.get()) { <del> robotMap.pneumaticsSubsystem.lift.set(DoubleSolenoid.Value.kForward); <add> R.pneumaticsSubsystem.lift.set(DoubleSolenoid.Value.kForward); <ide> } else if(rightJoystick.trigger.get() && !rightJoystick.button2.get()) { <del> robotMap.pneumaticsSubsystem.lift.set(DoubleSolenoid.Value.kReverse); <del> } else { <del> robotMap.pneumaticsSubsystem.lift.set(DoubleSolenoid.Value.kOff); <del> } <del> <del> robotMap.driveBase.drive(leftValue, rightValue); <add> R.pneumaticsSubsystem.lift.set(DoubleSolenoid.Value.kReverse); <add> } else { <add> R.pneumaticsSubsystem.lift.set(DoubleSolenoid.Value.kOff); <add> } <add> <add> R.driveBase.drive(leftValue, rightValue); <ide> <ide> double povAngle = (double) gamepad.pov.get(); <ide> <ide> if(povAngle >= 0) { <del> robotMap.cameraMount.setMode(CameraMountMode.LOOKING); <del> robotMap.cameraMount.tweak(Math.cos(WarriorMath.degreesToRadians(90.0d - povAngle)), Math.sin(90.0d - povAngle)); <del> } else { <del> robotMap.cameraMount.setMode(CameraMountMode.TARGETING); <del> } <del> <del> robotMap.cameraMount.tick(null); <del> <del> if(robotMap.intake.limitSwitch.get()) { <del> robotMap.indicatorRelay.set(Relay.Value.kOn); <del> } else { <del> robotMap.indicatorRelay.set(Relay.Value.kOff); <del> } <del> <del> robotDataServer.update(); <add> R.cameraMount.setMode(CameraMountMode.LOOKING); <add> R.cameraMount.tweak(Math.cos(WarriorMath.degreesToRadians(90.0d - povAngle)), Math.sin(90.0d - povAngle)); <add> } else { <add> R.cameraMount.setMode(CameraMountMode.TARGETING); <add> } <add> <add> R.cameraMount.tick(null); <add> <add> if(R.intake.limitSwitch.get()) { <add> R.indicatorRelay.set(Relay.Value.kOn); <add> } else { <add> R.indicatorRelay.set(Relay.Value.kOff); <add> } <add> <add> robotDataServer.tick(null); <ide> robotDataServer.send(); <ide> } <ide>
Java
apache-2.0
f6e782c25e392542ff56643c62c1ca8a00b7c81d
0
SidneyXu/libgdx,MetSystem/libgdx,basherone/libgdxcn,fiesensee/libgdx,realitix/libgdx,Zomby2D/libgdx,kotcrab/libgdx,snovak/libgdx,sarkanyi/libgdx,nrallakis/libgdx,lordjone/libgdx,KrisLee/libgdx,thepullman/libgdx,nooone/libgdx,bsmr-java/libgdx,kotcrab/libgdx,stickyd/libgdx,nelsonsilva/libgdx,GreenLightning/libgdx,saqsun/libgdx,alireza-hosseini/libgdx,tommycli/libgdx,del-sol/libgdx,Dzamir/libgdx,nelsonsilva/libgdx,nooone/libgdx,ricardorigodon/libgdx,del-sol/libgdx,ttencate/libgdx,josephknight/libgdx,bladecoder/libgdx,ztv/libgdx,xoppa/libgdx,xranby/libgdx,azakhary/libgdx,saltares/libgdx,gouessej/libgdx,srwonka/libGdx,saqsun/libgdx,stickyd/libgdx,Zomby2D/libgdx,jsjolund/libgdx,FyiurAmron/libgdx,MetSystem/libgdx,srwonka/libGdx,anserran/libgdx,libgdx/libgdx,bgroenks96/libgdx,noelsison2/libgdx,JDReutt/libgdx,Arcnor/libgdx,Gliby/libgdx,firefly2442/libgdx,gdos/libgdx,srwonka/libGdx,stinsonga/libgdx,zhimaijoy/libgdx,hyvas/libgdx,fwolff/libgdx,haedri/libgdx-1,yangweigbh/libgdx,MovingBlocks/libgdx,gouessej/libgdx,zommuter/libgdx,hyvas/libgdx,gdos/libgdx,basherone/libgdxcn,nudelchef/libgdx,fiesensee/libgdx,youprofit/libgdx,jberberick/libgdx,tell10glu/libgdx,Deftwun/libgdx,gdos/libgdx,nave966/libgdx,Heart2009/libgdx,MetSystem/libgdx,EsikAntony/libgdx,thepullman/libgdx,ThiagoGarciaAlves/libgdx,Badazdz/libgdx,toloudis/libgdx,tommyettinger/libgdx,SidneyXu/libgdx,sjosegarcia/libgdx,nudelchef/libgdx,Thotep/libgdx,shiweihappy/libgdx,EsikAntony/libgdx,tell10glu/libgdx,curtiszimmerman/libgdx,Senth/libgdx,designcrumble/libgdx,xoppa/libgdx,fiesensee/libgdx,nave966/libgdx,SidneyXu/libgdx,hyvas/libgdx,ttencate/libgdx,shiweihappy/libgdx,kagehak/libgdx,gdos/libgdx,js78/libgdx,Badazdz/libgdx,djom20/libgdx,stickyd/libgdx,fiesensee/libgdx,xpenatan/libgdx-LWJGL3,designcrumble/libgdx,samskivert/libgdx,stinsonga/libgdx,Xhanim/libgdx,Zonglin-Li6565/libgdx,shiweihappy/libgdx,youprofit/libgdx,djom20/libgdx,FyiurAmron/libgdx,stickyd/libgdx,kzganesan/libgdx,ThiagoGarciaAlves/libgdx,snovak/libgdx,revo09/libgdx,collinsmith/libgdx,EsikAntony/libgdx,snovak/libgdx,NathanSweet/libgdx,bgroenks96/libgdx,JDReutt/libgdx,EsikAntony/libgdx,katiepino/libgdx,toa5/libgdx,alex-dorokhov/libgdx,Arcnor/libgdx,collinsmith/libgdx,flaiker/libgdx,samskivert/libgdx,sinistersnare/libgdx,BlueRiverInteractive/libgdx,FredGithub/libgdx,UnluckyNinja/libgdx,PedroRomanoBarbosa/libgdx,MovingBlocks/libgdx,petugez/libgdx,js78/libgdx,junkdog/libgdx,flaiker/libgdx,FyiurAmron/libgdx,xranby/libgdx,stinsonga/libgdx,katiepino/libgdx,nudelchef/libgdx,Gliby/libgdx,xpenatan/libgdx-LWJGL3,ztv/libgdx,FredGithub/libgdx,del-sol/libgdx,mumer92/libgdx,codepoke/libgdx,MadcowD/libgdx,andyvand/libgdx,kotcrab/libgdx,revo09/libgdx,GreenLightning/libgdx,nave966/libgdx,thepullman/libgdx,antag99/libgdx,ttencate/libgdx,toa5/libgdx,nooone/libgdx,stickyd/libgdx,js78/libgdx,ricardorigodon/libgdx,gouessej/libgdx,flaiker/libgdx,js78/libgdx,del-sol/libgdx,gouessej/libgdx,gf11speed/libgdx,realitix/libgdx,djom20/libgdx,MikkelTAndersen/libgdx,ttencate/libgdx,tell10glu/libgdx,sarkanyi/libgdx,NathanSweet/libgdx,fwolff/libgdx,EsikAntony/libgdx,Xhanim/libgdx,MikkelTAndersen/libgdx,ninoalma/libgdx,Zonglin-Li6565/libgdx,Xhanim/libgdx,GreenLightning/libgdx,gouessej/libgdx,BlueRiverInteractive/libgdx,alireza-hosseini/libgdx,titovmaxim/libgdx,ThiagoGarciaAlves/libgdx,zommuter/libgdx,ryoenji/libgdx,djom20/libgdx,JFixby/libgdx,xranby/libgdx,Senth/libgdx,toloudis/libgdx,haedri/libgdx-1,anserran/libgdx,Thotep/libgdx,revo09/libgdx,haedri/libgdx-1,1yvT0s/libgdx,MetSystem/libgdx,JDReutt/libgdx,Senth/libgdx,zommuter/libgdx,czyzby/libgdx,zommuter/libgdx,js78/libgdx,nave966/libgdx,ThiagoGarciaAlves/libgdx,1yvT0s/libgdx,basherone/libgdxcn,nrallakis/libgdx,davebaol/libgdx,saqsun/libgdx,snovak/libgdx,flaiker/libgdx,Senth/libgdx,jsjolund/libgdx,saqsun/libgdx,Badazdz/libgdx,gouessej/libgdx,UnluckyNinja/libgdx,Thotep/libgdx,TheAks999/libgdx,andyvand/libgdx,saltares/libgdx,mumer92/libgdx,luischavez/libgdx,junkdog/libgdx,bsmr-java/libgdx,del-sol/libgdx,UnluckyNinja/libgdx,ThiagoGarciaAlves/libgdx,youprofit/libgdx,firefly2442/libgdx,noelsison2/libgdx,Thotep/libgdx,katiepino/libgdx,bgroenks96/libgdx,jasonwee/libgdx,hyvas/libgdx,tell10glu/libgdx,titovmaxim/libgdx,samskivert/libgdx,FredGithub/libgdx,Zonglin-Li6565/libgdx,srwonka/libGdx,collinsmith/libgdx,Wisienkas/libgdx,sjosegarcia/libgdx,UnluckyNinja/libgdx,fiesensee/libgdx,Zonglin-Li6565/libgdx,flaiker/libgdx,collinsmith/libgdx,lordjone/libgdx,xpenatan/libgdx-LWJGL3,mumer92/libgdx,MikkelTAndersen/libgdx,MetSystem/libgdx,Zonglin-Li6565/libgdx,JFixby/libgdx,billgame/libgdx,MadcowD/libgdx,fiesensee/libgdx,MovingBlocks/libgdx,luischavez/libgdx,MadcowD/libgdx,samskivert/libgdx,youprofit/libgdx,firefly2442/libgdx,anserran/libgdx,KrisLee/libgdx,SidneyXu/libgdx,js78/libgdx,kzganesan/libgdx,UnluckyNinja/libgdx,cypherdare/libgdx,gouessej/libgdx,jasonwee/libgdx,tommycli/libgdx,309746069/libgdx,ya7lelkom/libgdx,realitix/libgdx,Thotep/libgdx,toloudis/libgdx,ryoenji/libgdx,thepullman/libgdx,PedroRomanoBarbosa/libgdx,katiepino/libgdx,ryoenji/libgdx,xoppa/libgdx,xpenatan/libgdx-LWJGL3,lordjone/libgdx,MetSystem/libgdx,azakhary/libgdx,zommuter/libgdx,copystudy/libgdx,fwolff/libgdx,copystudy/libgdx,designcrumble/libgdx,sinistersnare/libgdx,saltares/libgdx,codepoke/libgdx,hyvas/libgdx,Deftwun/libgdx,kotcrab/libgdx,Thotep/libgdx,luischavez/libgdx,nrallakis/libgdx,Wisienkas/libgdx,Dzamir/libgdx,srwonka/libGdx,ya7lelkom/libgdx,jberberick/libgdx,gf11speed/libgdx,petugez/libgdx,FredGithub/libgdx,copystudy/libgdx,junkdog/libgdx,cypherdare/libgdx,Dzamir/libgdx,SidneyXu/libgdx,ya7lelkom/libgdx,Heart2009/libgdx,titovmaxim/libgdx,Gliby/libgdx,srwonka/libGdx,luischavez/libgdx,kzganesan/libgdx,xoppa/libgdx,davebaol/libgdx,EsikAntony/libgdx,nudelchef/libgdx,nooone/libgdx,libgdx/libgdx,sarkanyi/libgdx,Thotep/libgdx,codepoke/libgdx,FyiurAmron/libgdx,MetSystem/libgdx,nelsonsilva/libgdx,UnluckyNinja/libgdx,codepoke/libgdx,xoppa/libgdx,andyvand/libgdx,ttencate/libgdx,andyvand/libgdx,KrisLee/libgdx,curtiszimmerman/libgdx,codepoke/libgdx,Deftwun/libgdx,Heart2009/libgdx,TheAks999/libgdx,revo09/libgdx,snovak/libgdx,firefly2442/libgdx,billgame/libgdx,sjosegarcia/libgdx,alex-dorokhov/libgdx,firefly2442/libgdx,309746069/libgdx,tommyettinger/libgdx,Xhanim/libgdx,yangweigbh/libgdx,designcrumble/libgdx,gf11speed/libgdx,yangweigbh/libgdx,FyiurAmron/libgdx,FyiurAmron/libgdx,alex-dorokhov/libgdx,bgroenks96/libgdx,nrallakis/libgdx,titovmaxim/libgdx,kzganesan/libgdx,noelsison2/libgdx,1yvT0s/libgdx,billgame/libgdx,gf11speed/libgdx,Thotep/libgdx,jsjolund/libgdx,zhimaijoy/libgdx,czyzby/libgdx,ninoalma/libgdx,realitix/libgdx,josephknight/libgdx,zhimaijoy/libgdx,azakhary/libgdx,BlueRiverInteractive/libgdx,alex-dorokhov/libgdx,sarkanyi/libgdx,bsmr-java/libgdx,del-sol/libgdx,firefly2442/libgdx,czyzby/libgdx,copystudy/libgdx,sjosegarcia/libgdx,sjosegarcia/libgdx,FredGithub/libgdx,ztv/libgdx,thepullman/libgdx,nooone/libgdx,billgame/libgdx,samskivert/libgdx,petugez/libgdx,nrallakis/libgdx,curtiszimmerman/libgdx,MikkelTAndersen/libgdx,MadcowD/libgdx,Gliby/libgdx,ninoalma/libgdx,titovmaxim/libgdx,bgroenks96/libgdx,saltares/libgdx,ztv/libgdx,xpenatan/libgdx-LWJGL3,Gliby/libgdx,codepoke/libgdx,tell10glu/libgdx,Wisienkas/libgdx,gdos/libgdx,MadcowD/libgdx,youprofit/libgdx,ztv/libgdx,JFixby/libgdx,tommyettinger/libgdx,sjosegarcia/libgdx,MadcowD/libgdx,GreenLightning/libgdx,MikkelTAndersen/libgdx,petugez/libgdx,azakhary/libgdx,snovak/libgdx,lordjone/libgdx,antag99/libgdx,ninoalma/libgdx,luischavez/libgdx,gdos/libgdx,thepullman/libgdx,JDReutt/libgdx,zommuter/libgdx,Zonglin-Li6565/libgdx,gf11speed/libgdx,bsmr-java/libgdx,kagehak/libgdx,MovingBlocks/libgdx,TheAks999/libgdx,ztv/libgdx,djom20/libgdx,shiweihappy/libgdx,realitix/libgdx,MikkelTAndersen/libgdx,jasonwee/libgdx,Xhanim/libgdx,ztv/libgdx,stickyd/libgdx,lordjone/libgdx,titovmaxim/libgdx,PedroRomanoBarbosa/libgdx,Dzamir/libgdx,jberberick/libgdx,hyvas/libgdx,1yvT0s/libgdx,kzganesan/libgdx,hyvas/libgdx,noelsison2/libgdx,309746069/libgdx,FredGithub/libgdx,nudelchef/libgdx,codepoke/libgdx,curtiszimmerman/libgdx,saqsun/libgdx,youprofit/libgdx,PedroRomanoBarbosa/libgdx,ztv/libgdx,Deftwun/libgdx,toloudis/libgdx,haedri/libgdx-1,bladecoder/libgdx,nrallakis/libgdx,gdos/libgdx,Deftwun/libgdx,nave966/libgdx,alireza-hosseini/libgdx,samskivert/libgdx,katiepino/libgdx,JFixby/libgdx,stickyd/libgdx,ricardorigodon/libgdx,copystudy/libgdx,saltares/libgdx,kzganesan/libgdx,antag99/libgdx,kagehak/libgdx,nrallakis/libgdx,MadcowD/libgdx,curtiszimmerman/libgdx,MovingBlocks/libgdx,ya7lelkom/libgdx,KrisLee/libgdx,Heart2009/libgdx,Zomby2D/libgdx,UnluckyNinja/libgdx,MovingBlocks/libgdx,toloudis/libgdx,bgroenks96/libgdx,ninoalma/libgdx,hyvas/libgdx,cypherdare/libgdx,andyvand/libgdx,saqsun/libgdx,stinsonga/libgdx,gf11speed/libgdx,collinsmith/libgdx,kotcrab/libgdx,basherone/libgdxcn,alireza-hosseini/libgdx,basherone/libgdxcn,stinsonga/libgdx,MovingBlocks/libgdx,BlueRiverInteractive/libgdx,haedri/libgdx-1,Deftwun/libgdx,revo09/libgdx,KrisLee/libgdx,UnluckyNinja/libgdx,billgame/libgdx,JFixby/libgdx,petugez/libgdx,bladecoder/libgdx,del-sol/libgdx,davebaol/libgdx,nave966/libgdx,kagehak/libgdx,luischavez/libgdx,sjosegarcia/libgdx,Heart2009/libgdx,JDReutt/libgdx,309746069/libgdx,GreenLightning/libgdx,katiepino/libgdx,collinsmith/libgdx,designcrumble/libgdx,luischavez/libgdx,Senth/libgdx,tell10glu/libgdx,jsjolund/libgdx,jsjolund/libgdx,ricardorigodon/libgdx,antag99/libgdx,yangweigbh/libgdx,Dzamir/libgdx,sinistersnare/libgdx,flaiker/libgdx,saltares/libgdx,Arcnor/libgdx,xoppa/libgdx,curtiszimmerman/libgdx,js78/libgdx,xpenatan/libgdx-LWJGL3,Senth/libgdx,designcrumble/libgdx,ttencate/libgdx,JFixby/libgdx,JFixby/libgdx,zommuter/libgdx,yangweigbh/libgdx,BlueRiverInteractive/libgdx,copystudy/libgdx,andyvand/libgdx,NathanSweet/libgdx,zhimaijoy/libgdx,toloudis/libgdx,js78/libgdx,alireza-hosseini/libgdx,Deftwun/libgdx,samskivert/libgdx,davebaol/libgdx,bsmr-java/libgdx,nave966/libgdx,noelsison2/libgdx,kotcrab/libgdx,djom20/libgdx,toa5/libgdx,Wisienkas/libgdx,stickyd/libgdx,EsikAntony/libgdx,gdos/libgdx,kagehak/libgdx,davebaol/libgdx,jberberick/libgdx,ricardorigodon/libgdx,toa5/libgdx,Senth/libgdx,Badazdz/libgdx,antag99/libgdx,ninoalma/libgdx,Dzamir/libgdx,copystudy/libgdx,xranby/libgdx,xoppa/libgdx,Wisienkas/libgdx,tommycli/libgdx,ricardorigodon/libgdx,ryoenji/libgdx,gf11speed/libgdx,titovmaxim/libgdx,MikkelTAndersen/libgdx,1yvT0s/libgdx,junkdog/libgdx,Xhanim/libgdx,del-sol/libgdx,thepullman/libgdx,fiesensee/libgdx,309746069/libgdx,zhimaijoy/libgdx,Zomby2D/libgdx,jasonwee/libgdx,yangweigbh/libgdx,revo09/libgdx,realitix/libgdx,Xhanim/libgdx,309746069/libgdx,libgdx/libgdx,Badazdz/libgdx,josephknight/libgdx,sinistersnare/libgdx,andyvand/libgdx,anserran/libgdx,jsjolund/libgdx,JDReutt/libgdx,nudelchef/libgdx,Badazdz/libgdx,MovingBlocks/libgdx,toa5/libgdx,basherone/libgdxcn,jasonwee/libgdx,libgdx/libgdx,haedri/libgdx-1,TheAks999/libgdx,curtiszimmerman/libgdx,collinsmith/libgdx,designcrumble/libgdx,fwolff/libgdx,katiepino/libgdx,fwolff/libgdx,czyzby/libgdx,junkdog/libgdx,josephknight/libgdx,Heart2009/libgdx,FredGithub/libgdx,Arcnor/libgdx,thepullman/libgdx,czyzby/libgdx,MadcowD/libgdx,billgame/libgdx,SidneyXu/libgdx,EsikAntony/libgdx,bgroenks96/libgdx,katiepino/libgdx,anserran/libgdx,alex-dorokhov/libgdx,yangweigbh/libgdx,sinistersnare/libgdx,mumer92/libgdx,saltares/libgdx,petugez/libgdx,jsjolund/libgdx,ttencate/libgdx,cypherdare/libgdx,Badazdz/libgdx,alireza-hosseini/libgdx,lordjone/libgdx,youprofit/libgdx,djom20/libgdx,ya7lelkom/libgdx,BlueRiverInteractive/libgdx,xranby/libgdx,designcrumble/libgdx,alex-dorokhov/libgdx,billgame/libgdx,copystudy/libgdx,anserran/libgdx,bgroenks96/libgdx,zhimaijoy/libgdx,309746069/libgdx,Zonglin-Li6565/libgdx,realitix/libgdx,shiweihappy/libgdx,KrisLee/libgdx,flaiker/libgdx,PedroRomanoBarbosa/libgdx,lordjone/libgdx,Dzamir/libgdx,Wisienkas/libgdx,bladecoder/libgdx,firefly2442/libgdx,mumer92/libgdx,sjosegarcia/libgdx,KrisLee/libgdx,JFixby/libgdx,ThiagoGarciaAlves/libgdx,jberberick/libgdx,FyiurAmron/libgdx,Wisienkas/libgdx,nudelchef/libgdx,Zomby2D/libgdx,jberberick/libgdx,xpenatan/libgdx-LWJGL3,fwolff/libgdx,ya7lelkom/libgdx,haedri/libgdx-1,toloudis/libgdx,czyzby/libgdx,cypherdare/libgdx,saqsun/libgdx,xranby/libgdx,yangweigbh/libgdx,BlueRiverInteractive/libgdx,MikkelTAndersen/libgdx,ttencate/libgdx,realitix/libgdx,jberberick/libgdx,309746069/libgdx,NathanSweet/libgdx,FredGithub/libgdx,jberberick/libgdx,czyzby/libgdx,lordjone/libgdx,revo09/libgdx,TheAks999/libgdx,sinistersnare/libgdx,noelsison2/libgdx,titovmaxim/libgdx,Heart2009/libgdx,ryoenji/libgdx,1yvT0s/libgdx,bsmr-java/libgdx,gouessej/libgdx,srwonka/libGdx,GreenLightning/libgdx,josephknight/libgdx,kzganesan/libgdx,junkdog/libgdx,Dzamir/libgdx,alireza-hosseini/libgdx,saltares/libgdx,Gliby/libgdx,sarkanyi/libgdx,petugez/libgdx,JDReutt/libgdx,junkdog/libgdx,shiweihappy/libgdx,azakhary/libgdx,mumer92/libgdx,alex-dorokhov/libgdx,JDReutt/libgdx,jsjolund/libgdx,zhimaijoy/libgdx,noelsison2/libgdx,1yvT0s/libgdx,toa5/libgdx,Zonglin-Li6565/libgdx,zhimaijoy/libgdx,nelsonsilva/libgdx,SidneyXu/libgdx,anserran/libgdx,nelsonsilva/libgdx,ryoenji/libgdx,nooone/libgdx,bsmr-java/libgdx,tommycli/libgdx,czyzby/libgdx,josephknight/libgdx,kagehak/libgdx,mumer92/libgdx,kotcrab/libgdx,josephknight/libgdx,davebaol/libgdx,Wisienkas/libgdx,PedroRomanoBarbosa/libgdx,nelsonsilva/libgdx,NathanSweet/libgdx,tell10glu/libgdx,mumer92/libgdx,jasonwee/libgdx,revo09/libgdx,SidneyXu/libgdx,tommycli/libgdx,xranby/libgdx,sarkanyi/libgdx,snovak/libgdx,GreenLightning/libgdx,junkdog/libgdx,bladecoder/libgdx,haedri/libgdx-1,kotcrab/libgdx,ricardorigodon/libgdx,antag99/libgdx,antag99/libgdx,kagehak/libgdx,KrisLee/libgdx,xoppa/libgdx,shiweihappy/libgdx,codepoke/libgdx,xpenatan/libgdx-LWJGL3,nrallakis/libgdx,noelsison2/libgdx,curtiszimmerman/libgdx,TheAks999/libgdx,libgdx/libgdx,petugez/libgdx,FyiurAmron/libgdx,Heart2009/libgdx,tell10glu/libgdx,kagehak/libgdx,Arcnor/libgdx,Arcnor/libgdx,anserran/libgdx,Xhanim/libgdx,Deftwun/libgdx,samskivert/libgdx,Senth/libgdx,ThiagoGarciaAlves/libgdx,ninoalma/libgdx,tommycli/libgdx,ya7lelkom/libgdx,nave966/libgdx,sarkanyi/libgdx,alex-dorokhov/libgdx,tommyettinger/libgdx,flaiker/libgdx,1yvT0s/libgdx,Gliby/libgdx,bsmr-java/libgdx,snovak/libgdx,zommuter/libgdx,xranby/libgdx,josephknight/libgdx,toloudis/libgdx,GreenLightning/libgdx,gf11speed/libgdx,sarkanyi/libgdx,antag99/libgdx,TheAks999/libgdx,TheAks999/libgdx,jasonwee/libgdx,Badazdz/libgdx,fwolff/libgdx,MetSystem/libgdx,tommycli/libgdx,tommyettinger/libgdx,nudelchef/libgdx,billgame/libgdx,djom20/libgdx,saqsun/libgdx,luischavez/libgdx,ninoalma/libgdx,Gliby/libgdx,ya7lelkom/libgdx,ricardorigodon/libgdx,PedroRomanoBarbosa/libgdx,firefly2442/libgdx,tommycli/libgdx,andyvand/libgdx,jasonwee/libgdx,BlueRiverInteractive/libgdx,srwonka/libGdx,youprofit/libgdx,ryoenji/libgdx,PedroRomanoBarbosa/libgdx,azakhary/libgdx,collinsmith/libgdx,fiesensee/libgdx,fwolff/libgdx,ThiagoGarciaAlves/libgdx,shiweihappy/libgdx,toa5/libgdx,alireza-hosseini/libgdx,toa5/libgdx
/******************************************************************************* * Copyright 2011 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.badlogic.gdx.graphics.g2d; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.graphics.Texture; import com.badlogic.gdx.math.MathUtils; import com.badlogic.gdx.math.Rectangle; import com.badlogic.gdx.utils.NumberUtils; import static com.badlogic.gdx.graphics.g2d.SpriteBatch.*; /** Holds the geometry, color, and texture information for drawing 2D sprites using {@link SpriteBatch}. A Sprite has a position * and a size given as width and height. The position is relative to the origin of the coordinate system specified via * {@link SpriteBatch#begin()} and the respective matrices. A Sprite is always rectangular and its position (x, y) are located in * the bottom left corner of that rectangle. A Sprite also has an origin around which rotations and scaling are performed (that * is, the origin is not modified by rotation and scaling). The origin is given relative to the bottom left corner of the Sprite, * its position. * @author mzechner * @author Nathan Sweet */ public class Sprite extends TextureRegion { static final int VERTEX_SIZE = 2 + 1 + 2; static final int SPRITE_SIZE = 4 * VERTEX_SIZE; final float[] vertices = new float[SPRITE_SIZE]; private final Color color = new Color(1, 1, 1, 1); private float x, y; float width, height; private float originX, originY; private float rotation; private float scaleX = 1, scaleY = 1; private boolean dirty = true; private Rectangle bounds; /** Creates an uninitialized sprite. The sprite will need a texture region and bounds set before it can be drawn. */ public Sprite () { setColor(1, 1, 1, 1); } /** Creates a sprite with width, height, and texture region equal to the size of the texture. */ public Sprite (Texture texture) { this(texture, 0, 0, texture.getWidth(), texture.getHeight()); } /** Creates a sprite with width, height, and texture region equal to the specified size. The texture region's upper left corner * will be 0,0. * @param srcWidth The width of the texture region. May be negative to flip the sprite when drawn. * @param srcHeight The height of the texture region. May be negative to flip the sprite when drawn. */ public Sprite (Texture texture, int srcWidth, int srcHeight) { this(texture, 0, 0, srcWidth, srcHeight); } /** Creates a sprite with width, height, and texture region equal to the specified size. * @param srcWidth The width of the texture region. May be negative to flip the sprite when drawn. * @param srcHeight The height of the texture region. May be negative to flip the sprite when drawn. */ public Sprite (Texture texture, int srcX, int srcY, int srcWidth, int srcHeight) { if (texture == null) throw new IllegalArgumentException("texture cannot be null."); this.texture = texture; setRegion(srcX, srcY, srcWidth, srcHeight); setColor(1, 1, 1, 1); setSize(Math.abs(srcWidth), Math.abs(srcHeight)); setOrigin(width / 2, height / 2); } // Note the region is copied. /** Creates a sprite based on a specific TextureRegion, the new sprite's region is a copy of the parameter region - altering one * does not affect the other */ public Sprite (TextureRegion region) { setRegion(region); setColor(1, 1, 1, 1); setSize(region.getRegionWidth(), region.getRegionHeight()); setOrigin(width / 2, height / 2); } /** Creates a sprite with width, height, and texture region equal to the specified size, relative to specified sprite's texture * region. * @param srcWidth The width of the texture region. May be negative to flip the sprite when drawn. * @param srcHeight The height of the texture region. May be negative to flip the sprite when drawn. */ public Sprite (TextureRegion region, int srcX, int srcY, int srcWidth, int srcHeight) { setRegion(region, srcX, srcY, srcWidth, srcHeight); setColor(1, 1, 1, 1); setSize(Math.abs(srcWidth), Math.abs(srcHeight)); setOrigin(width / 2, height / 2); } /** Creates a sprite that is a copy in every way of the specified sprite. */ public Sprite (Sprite sprite) { set(sprite); } /** Make this sprite a copy in every way of the specified sprite */ public void set (Sprite sprite) { if (sprite == null) throw new IllegalArgumentException("sprite cannot be null."); System.arraycopy(sprite.vertices, 0, vertices, 0, SPRITE_SIZE); texture = sprite.texture; u = sprite.u; v = sprite.v; u2 = sprite.u2; v2 = sprite.v2; x = sprite.x; y = sprite.y; width = sprite.width; height = sprite.height; regionWidth = sprite.regionWidth; regionHeight = sprite.regionHeight; originX = sprite.originX; originY = sprite.originY; rotation = sprite.rotation; scaleX = sprite.scaleX; scaleY = sprite.scaleY; color.set(sprite.color); dirty = sprite.dirty; } /** Sets the position and size of the sprite when drawn, before scaling and rotation are applied. If origin, rotation, or scale * are changed, it is slightly more efficient to set the bounds after those operations. */ public void setBounds (float x, float y, float width, float height) { this.x = x; this.y = y; this.width = width; this.height = height; if (dirty) return; float x2 = x + width; float y2 = y + height; float[] vertices = this.vertices; vertices[X1] = x; vertices[Y1] = y; vertices[X2] = x; vertices[Y2] = y2; vertices[X3] = x2; vertices[Y3] = y2; vertices[X4] = x2; vertices[Y4] = y; if (rotation != 0 || scaleX != 1 || scaleY != 1) dirty = true; } /** Sets the size of the sprite when drawn, before scaling and rotation are applied. If origin, rotation, or scale are changed, * it is slightly more efficient to set the size after those operations. If both position and size are to be changed, it is * better to use {@link #setBounds(float, float, float, float)}. */ public void setSize (float width, float height) { this.width = width; this.height = height; if (dirty) return; float x2 = x + width; float y2 = y + height; float[] vertices = this.vertices; vertices[X1] = x; vertices[Y1] = y; vertices[X2] = x; vertices[Y2] = y2; vertices[X3] = x2; vertices[Y3] = y2; vertices[X4] = x2; vertices[Y4] = y; if (rotation != 0 || scaleX != 1 || scaleY != 1) dirty = true; } /** Sets the position where the sprite will be drawn. If origin, rotation, or scale are changed, it is slightly more efficient * to set the position after those operations. If both position and size are to be changed, it is better to use * {@link #setBounds(float, float, float, float)}. */ public void setPosition (float x, float y) { translate(x - this.x, y - this.y); } /** Sets the x position where the sprite will be drawn. If origin, rotation, or scale are changed, it is slightly more efficient * to set the position after those operations. If both position and size are to be changed, it is better to use * {@link #setBounds(float, float, float, float)}. */ public void setX (float x) { translateX(x - this.x); } /** Sets the y position where the sprite will be drawn. If origin, rotation, or scale are changed, it is slightly more efficient * to set the position after those operations. If both position and size are to be changed, it is better to use * {@link #setBounds(float, float, float, float)}. */ public void setY (float y) { translateY(y - this.y); } /** Sets the x position relative to the current position where the sprite will be drawn. If origin, rotation, or scale are * changed, it is slightly more efficient to translate after those operations. */ public void translateX (float xAmount) { this.x += xAmount; if (dirty) return; float[] vertices = this.vertices; vertices[X1] += xAmount; vertices[X2] += xAmount; vertices[X3] += xAmount; vertices[X4] += xAmount; } /** Sets the y position relative to the current position where the sprite will be drawn. If origin, rotation, or scale are * changed, it is slightly more efficient to translate after those operations. */ public void translateY (float yAmount) { y += yAmount; if (dirty) return; float[] vertices = this.vertices; vertices[Y1] += yAmount; vertices[Y2] += yAmount; vertices[Y3] += yAmount; vertices[Y4] += yAmount; } /** Sets the position relative to the current position where the sprite will be drawn. If origin, rotation, or scale are * changed, it is slightly more efficient to translate after those operations. */ public void translate (float xAmount, float yAmount) { x += xAmount; y += yAmount; if (dirty) return; float[] vertices = this.vertices; vertices[X1] += xAmount; vertices[Y1] += yAmount; vertices[X2] += xAmount; vertices[Y2] += yAmount; vertices[X3] += xAmount; vertices[Y3] += yAmount; vertices[X4] += xAmount; vertices[Y4] += yAmount; } /** Sets the color used to tint this sprite. Default is {@link Color#WHITE}. */ public void setColor (Color tint) { float color = tint.toFloatBits(); float[] vertices = this.vertices; vertices[C1] = color; vertices[C2] = color; vertices[C3] = color; vertices[C4] = color; } /** @see #setColor(Color) */ public void setColor (float r, float g, float b, float a) { int intBits = ((int)(255 * a) << 24) | ((int)(255 * b) << 16) | ((int)(255 * g) << 8) | ((int)(255 * r)); float color = NumberUtils.intToFloatColor(intBits); float[] vertices = this.vertices; vertices[C1] = color; vertices[C2] = color; vertices[C3] = color; vertices[C4] = color; } /** @see #setColor(Color) * @see Color#toFloatBits() */ public void setColor (float color) { float[] vertices = this.vertices; vertices[C1] = color; vertices[C2] = color; vertices[C3] = color; vertices[C4] = color; } /** Sets the origin in relation to the sprite's position for scaling and rotation. */ public void setOrigin (float originX, float originY) { this.originX = originX; this.originY = originY; dirty = true; } /** Sets the rotation of the sprite in degrees. Rotation is centered on the origin set in {@link #setOrigin(float, float)} */ public void setRotation (float degrees) { this.rotation = degrees; dirty = true; } /** @return the rotation of the sprite in degrees */ public float getRotation () { return rotation; } /** Sets the sprite's rotation in degrees relative to the current rotation. Rotation is centered on the origin set in * {@link #setOrigin(float, float)} */ public void rotate (float degrees) { rotation += degrees; dirty = true; } /** Rotates this sprite 90 degrees in-place by rotating the texture coordinates. This rotation is unaffected by * {@link #setRotation(float)} and {@link #rotate(float)}. */ public void rotate90 (boolean clockwise) { float[] vertices = this.vertices; if (clockwise) { float temp = vertices[V1]; vertices[V1] = vertices[V4]; vertices[V4] = vertices[V3]; vertices[V3] = vertices[V2]; vertices[V2] = temp; temp = vertices[U1]; vertices[U1] = vertices[U4]; vertices[U4] = vertices[U3]; vertices[U3] = vertices[U2]; vertices[U2] = temp; } else { float temp = vertices[V1]; vertices[V1] = vertices[V2]; vertices[V2] = vertices[V3]; vertices[V3] = vertices[V4]; vertices[V4] = temp; temp = vertices[U1]; vertices[U1] = vertices[U2]; vertices[U2] = vertices[U3]; vertices[U3] = vertices[U4]; vertices[U4] = temp; } } /** Sets the sprite's scale for both X and Y uniformly. The sprite scales out from the origin. This will not affect the values * returned by {@link #getWidth()} and {@link #getHeight()} */ public void setScale (float scaleXY) { this.scaleX = scaleXY; this.scaleY = scaleXY; dirty = true; } /** Sets the sprite's scale for both X and Y. The sprite scales out from the origin. This will not affect the values returned by * {@link #getWidth()} and {@link #getHeight()} */ public void setScale (float scaleX, float scaleY) { this.scaleX = scaleX; this.scaleY = scaleY; dirty = true; } /** Sets the sprite's scale relative to the current scale. for example: original scale 2 -> sprite.scale(4) -> final scale 6. * The sprite scales out from the origin. This will not affect the values returned by {@link #getWidth()} and * {@link #getHeight()} */ public void scale (float amount) { this.scaleX += amount; this.scaleY += amount; dirty = true; } /** Returns the packed vertices, colors, and texture coordinates for this sprite. */ public float[] getVertices () { if (dirty) { dirty = false; float[] vertices = this.vertices; float localX = -originX; float localY = -originY; float localX2 = localX + width; float localY2 = localY + height; float worldOriginX = this.x - localX; float worldOriginY = this.y - localY; if (scaleX != 1 || scaleY != 1) { localX *= scaleX; localY *= scaleY; localX2 *= scaleX; localY2 *= scaleY; } if (rotation != 0) { final float cos = MathUtils.cosDeg(rotation); final float sin = MathUtils.sinDeg(rotation); final float localXCos = localX * cos; final float localXSin = localX * sin; final float localYCos = localY * cos; final float localYSin = localY * sin; final float localX2Cos = localX2 * cos; final float localX2Sin = localX2 * sin; final float localY2Cos = localY2 * cos; final float localY2Sin = localY2 * sin; final float x1 = localXCos - localYSin + worldOriginX; final float y1 = localYCos + localXSin + worldOriginY; vertices[X1] = x1; vertices[Y1] = y1; final float x2 = localXCos - localY2Sin + worldOriginX; final float y2 = localY2Cos + localXSin + worldOriginY; vertices[X2] = x2; vertices[Y2] = y2; final float x3 = localX2Cos - localY2Sin + worldOriginX; final float y3 = localY2Cos + localX2Sin + worldOriginY; vertices[X3] = x3; vertices[Y3] = y3; vertices[X4] = x1 + (x3 - x2); vertices[Y4] = y3 - (y2 - y1); } else { final float x1 = localX + worldOriginX; final float y1 = localY + worldOriginY; final float x2 = localX2 + worldOriginX; final float y2 = localY2 + worldOriginY; vertices[X1] = x1; vertices[Y1] = y1; vertices[X2] = x1; vertices[Y2] = y2; vertices[X3] = x2; vertices[Y3] = y2; vertices[X4] = x2; vertices[Y4] = y1; } } return vertices; } /** Returns the bounding axis aligned {@link Rectangle} that bounds this sprite. The rectangles x and y coordinates describe its * bottom left corner. If you change the position or size of the sprite, you have to fetch the triangle again for it to be * recomputed. * * @return the bounding Rectangle */ public Rectangle getBoundingRectangle () { final float[] vertices = getVertices(); float minx = vertices[X1]; float miny = vertices[Y1]; float maxx = vertices[X1]; float maxy = vertices[Y1]; minx = minx > vertices[X2] ? vertices[X2] : minx; minx = minx > vertices[X3] ? vertices[X3] : minx; minx = minx > vertices[X4] ? vertices[X4] : minx; maxx = maxx < vertices[X2] ? vertices[X2] : maxx; maxx = maxx < vertices[X3] ? vertices[X3] : maxx; maxx = maxx < vertices[X4] ? vertices[X4] : maxx; miny = miny > vertices[Y2] ? vertices[Y2] : miny; miny = miny > vertices[Y3] ? vertices[Y3] : miny; miny = miny > vertices[Y4] ? vertices[Y4] : miny; maxy = maxy < vertices[Y2] ? vertices[Y2] : maxy; maxy = maxy < vertices[Y3] ? vertices[Y3] : maxy; maxy = maxy < vertices[Y4] ? vertices[Y4] : maxy; if (bounds == null) bounds = new Rectangle(); bounds.x = minx; bounds.y = miny; bounds.width = maxx - minx; bounds.height = maxy - miny; return bounds; } public void draw (SpriteBatch spriteBatch) { spriteBatch.draw(texture, getVertices(), 0, SPRITE_SIZE); } public void draw (SpriteBatch spriteBatch, float alphaModulation) { Color color = getColor(); float oldAlpha = color.a; color.a *= alphaModulation; setColor(color); draw(spriteBatch); color.a = oldAlpha; setColor(color); } public float getX () { return x; } public float getY () { return y; } /** @return the width of the sprite, not accounting for scale. */ public float getWidth () { return width; } /** @return the height of the sprite, not accounting for scale. */ public float getHeight () { return height; } /** The origin influences {@link #setPosition(float, float)}, {@link #setRotation(float)} and the expansion direction of scaling * {@link #setScale(float, float)} */ public float getOriginX () { return originX; } /** The origin influences {@link #setPosition(float, float)}, {@link #setRotation(float)} and the expansion direction of scaling * {@link #setScale(float, float)} */ public float getOriginY () { return originY; } /** X scale of the sprite, independent of size set by {@link #setSize(float, float)} */ public float getScaleX () { return scaleX; } /** Y scale of the sprite, independent of size set by {@link #setSize(float, float)} */ public float getScaleY () { return scaleY; } /** Returns the color of this sprite. Changing the returned color will have no affect, {@link #setColor(Color)} or * {@link #setColor(float, float, float, float)} must be used. */ public Color getColor () { int intBits = NumberUtils.floatToIntColor(vertices[C1]); Color color = this.color; color.r = (intBits & 0xff) / 255f; color.g = ((intBits >>> 8) & 0xff) / 255f; color.b = ((intBits >>> 16) & 0xff) / 255f; color.a = ((intBits >>> 24) & 0xff) / 255f; return color; } public void setRegion (float u, float v, float u2, float v2) { super.setRegion(u, v, u2, v2); float[] vertices = Sprite.this.vertices; vertices[U1] = u; vertices[V1] = v2; vertices[U2] = u; vertices[V2] = v; vertices[U3] = u2; vertices[V3] = v; vertices[U4] = u2; vertices[V4] = v2; } public void setU (float u) { super.setU(u); vertices[U1] = u; vertices[U2] = u; } public void setV (float v) { super.setV(v); vertices[V2] = v; vertices[V3] = v; } public void setU2 (float u2) { super.setU2(u2); vertices[U3] = u2; vertices[U4] = u2; } public void setV2 (float v2) { super.setV2(v2); vertices[V1] = v2; vertices[V4] = v2; } /** boolean parameters are not setting a state, but performing a flip */ public void flip (boolean x, boolean y) { super.flip(x, y); float[] vertices = Sprite.this.vertices; if (x) { float temp = vertices[U1]; vertices[U1] = vertices[U3]; vertices[U3] = temp; temp = vertices[U2]; vertices[U2] = vertices[U4]; vertices[U4] = temp; } if (y) { float temp = vertices[V1]; vertices[V1] = vertices[V3]; vertices[V3] = temp; temp = vertices[V2]; vertices[V2] = vertices[V4]; vertices[V4] = temp; } } public void scroll (float xAmount, float yAmount) { float[] vertices = Sprite.this.vertices; if (xAmount != 0) { float u = (vertices[U1] + xAmount) % 1; float u2 = u + width / texture.getWidth(); this.u = u; this.u2 = u2; vertices[U1] = u; vertices[U2] = u; vertices[U3] = u2; vertices[U4] = u2; } if (yAmount != 0) { float v = (vertices[V2] + yAmount) % 1; float v2 = v + height / texture.getHeight(); this.v = v; this.v2 = v2; vertices[V1] = v2; vertices[V2] = v; vertices[V3] = v; vertices[V4] = v2; } } }
gdx/src/com/badlogic/gdx/graphics/g2d/Sprite.java
/******************************************************************************* * Copyright 2011 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.badlogic.gdx.graphics.g2d; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.graphics.Texture; import com.badlogic.gdx.math.MathUtils; import com.badlogic.gdx.math.Rectangle; import com.badlogic.gdx.utils.NumberUtils; import static com.badlogic.gdx.graphics.g2d.SpriteBatch.*; /** Holds the geometry, color, and texture information for drawing 2D sprites using {@link SpriteBatch}. A Sprite has a position * and a size given as width and height. The position is relative to the origin of the coordinate system specified via * {@link SpriteBatch#begin()} and the respective matrices. A Sprite is always rectangular and its position (x, y) are located in * the bottom left corner of that rectangle. A Sprite also has an origin around which rotations and scaling are performed (that * is, the origin is not modified by rotation and scaling). The origin is given relative to the bottom left corner of the Sprite, * its position. * @author mzechner * @author Nathan Sweet */ public class Sprite extends TextureRegion { static final int VERTEX_SIZE = 2 + 1 + 2; static final int SPRITE_SIZE = 4 * VERTEX_SIZE; final float[] vertices = new float[SPRITE_SIZE]; private final Color color = new Color(1, 1, 1, 1); private float x, y; float width, height; private float originX, originY; private float rotation; private float scaleX = 1, scaleY = 1; private boolean dirty = true; private Rectangle bounds; /** Creates an uninitialized sprite. The sprite will need a texture region and bounds set before it can be drawn. */ public Sprite () { setColor(1, 1, 1, 1); } /** Creates a sprite with width, height, and texture region equal to the size of the texture. */ public Sprite (Texture texture) { this(texture, 0, 0, texture.getWidth(), texture.getHeight()); } /** Creates a sprite with width, height, and texture region equal to the specified size. The texture region's upper left corner * will be 0,0. * @param srcWidth The width of the texture region. May be negative to flip the sprite when drawn. * @param srcHeight The height of the texture region. May be negative to flip the sprite when drawn. */ public Sprite (Texture texture, int srcWidth, int srcHeight) { this(texture, 0, 0, srcWidth, srcHeight); } /** Creates a sprite with width, height, and texture region equal to the specified size. * @param srcWidth The width of the texture region. May be negative to flip the sprite when drawn. * @param srcHeight The height of the texture region. May be negative to flip the sprite when drawn. */ public Sprite (Texture texture, int srcX, int srcY, int srcWidth, int srcHeight) { if (texture == null) throw new IllegalArgumentException("texture cannot be null."); this.texture = texture; setRegion(srcX, srcY, srcWidth, srcHeight); setColor(1, 1, 1, 1); setSize(Math.abs(srcWidth), Math.abs(srcHeight)); setOrigin(width / 2, height / 2); } // Note the region is copied. /** Creates a sprite based on a specific TextureRegion, the new sprite's region is a copy of the parameter region - altering one * does not affect the other */ public Sprite (TextureRegion region) { setRegion(region); setColor(1, 1, 1, 1); setSize(region.getRegionWidth(), region.getRegionHeight()); setOrigin(width / 2, height / 2); } /** Creates a sprite with width, height, and texture region equal to the specified size, relative to specified sprite's texture * region. * @param srcWidth The width of the texture region. May be negative to flip the sprite when drawn. * @param srcHeight The height of the texture region. May be negative to flip the sprite when drawn. */ public Sprite (TextureRegion region, int srcX, int srcY, int srcWidth, int srcHeight) { setRegion(region, srcX, srcY, srcWidth, srcHeight); setColor(1, 1, 1, 1); setSize(Math.abs(srcWidth), Math.abs(srcHeight)); setOrigin(width / 2, height / 2); } /** Creates a sprite that is a copy in every way of the specified sprite. */ public Sprite (Sprite sprite) { set(sprite); } /** Make this sprite a copy in every way of the specified sprite */ public void set (Sprite sprite) { if (sprite == null) throw new IllegalArgumentException("sprite cannot be null."); System.arraycopy(sprite.vertices, 0, vertices, 0, SPRITE_SIZE); texture = sprite.texture; u = sprite.u; v = sprite.v; u2 = sprite.u2; v2 = sprite.v2; x = sprite.x; y = sprite.y; width = sprite.width; height = sprite.height; originX = sprite.originX; originY = sprite.originY; rotation = sprite.rotation; scaleX = sprite.scaleX; scaleY = sprite.scaleY; color.set(sprite.color); dirty = sprite.dirty; } /** Sets the position and size of the sprite when drawn, before scaling and rotation are applied. If origin, rotation, or scale * are changed, it is slightly more efficient to set the bounds after those operations. */ public void setBounds (float x, float y, float width, float height) { this.x = x; this.y = y; this.width = width; this.height = height; if (dirty) return; float x2 = x + width; float y2 = y + height; float[] vertices = this.vertices; vertices[X1] = x; vertices[Y1] = y; vertices[X2] = x; vertices[Y2] = y2; vertices[X3] = x2; vertices[Y3] = y2; vertices[X4] = x2; vertices[Y4] = y; if (rotation != 0 || scaleX != 1 || scaleY != 1) dirty = true; } /** Sets the size of the sprite when drawn, before scaling and rotation are applied. If origin, rotation, or scale are changed, * it is slightly more efficient to set the size after those operations. If both position and size are to be changed, it is * better to use {@link #setBounds(float, float, float, float)}. */ public void setSize (float width, float height) { this.width = width; this.height = height; if (dirty) return; float x2 = x + width; float y2 = y + height; float[] vertices = this.vertices; vertices[X1] = x; vertices[Y1] = y; vertices[X2] = x; vertices[Y2] = y2; vertices[X3] = x2; vertices[Y3] = y2; vertices[X4] = x2; vertices[Y4] = y; if (rotation != 0 || scaleX != 1 || scaleY != 1) dirty = true; } /** Sets the position where the sprite will be drawn. If origin, rotation, or scale are changed, it is slightly more efficient * to set the position after those operations. If both position and size are to be changed, it is better to use * {@link #setBounds(float, float, float, float)}. */ public void setPosition (float x, float y) { translate(x - this.x, y - this.y); } /** Sets the x position where the sprite will be drawn. If origin, rotation, or scale are changed, it is slightly more efficient * to set the position after those operations. If both position and size are to be changed, it is better to use * {@link #setBounds(float, float, float, float)}. */ public void setX (float x) { translateX(x - this.x); } /** Sets the y position where the sprite will be drawn. If origin, rotation, or scale are changed, it is slightly more efficient * to set the position after those operations. If both position and size are to be changed, it is better to use * {@link #setBounds(float, float, float, float)}. */ public void setY (float y) { translateY(y - this.y); } /** Sets the x position relative to the current position where the sprite will be drawn. If origin, rotation, or scale are * changed, it is slightly more efficient to translate after those operations. */ public void translateX (float xAmount) { this.x += xAmount; if (dirty) return; float[] vertices = this.vertices; vertices[X1] += xAmount; vertices[X2] += xAmount; vertices[X3] += xAmount; vertices[X4] += xAmount; } /** Sets the y position relative to the current position where the sprite will be drawn. If origin, rotation, or scale are * changed, it is slightly more efficient to translate after those operations. */ public void translateY (float yAmount) { y += yAmount; if (dirty) return; float[] vertices = this.vertices; vertices[Y1] += yAmount; vertices[Y2] += yAmount; vertices[Y3] += yAmount; vertices[Y4] += yAmount; } /** Sets the position relative to the current position where the sprite will be drawn. If origin, rotation, or scale are * changed, it is slightly more efficient to translate after those operations. */ public void translate (float xAmount, float yAmount) { x += xAmount; y += yAmount; if (dirty) return; float[] vertices = this.vertices; vertices[X1] += xAmount; vertices[Y1] += yAmount; vertices[X2] += xAmount; vertices[Y2] += yAmount; vertices[X3] += xAmount; vertices[Y3] += yAmount; vertices[X4] += xAmount; vertices[Y4] += yAmount; } /** Sets the color used to tint this sprite. Default is {@link Color#WHITE}. */ public void setColor (Color tint) { float color = tint.toFloatBits(); float[] vertices = this.vertices; vertices[C1] = color; vertices[C2] = color; vertices[C3] = color; vertices[C4] = color; } /** @see #setColor(Color) */ public void setColor (float r, float g, float b, float a) { int intBits = ((int)(255 * a) << 24) | ((int)(255 * b) << 16) | ((int)(255 * g) << 8) | ((int)(255 * r)); float color = NumberUtils.intToFloatColor(intBits); float[] vertices = this.vertices; vertices[C1] = color; vertices[C2] = color; vertices[C3] = color; vertices[C4] = color; } /** @see #setColor(Color) * @see Color#toFloatBits() */ public void setColor (float color) { float[] vertices = this.vertices; vertices[C1] = color; vertices[C2] = color; vertices[C3] = color; vertices[C4] = color; } /** Sets the origin in relation to the sprite's position for scaling and rotation. */ public void setOrigin (float originX, float originY) { this.originX = originX; this.originY = originY; dirty = true; } /** Sets the rotation of the sprite in degrees. Rotation is centered on the origin set in {@link #setOrigin(float, float)} */ public void setRotation (float degrees) { this.rotation = degrees; dirty = true; } /** @return the rotation of the sprite in degrees */ public float getRotation () { return rotation; } /** Sets the sprite's rotation in degrees relative to the current rotation. Rotation is centered on the origin set in * {@link #setOrigin(float, float)} */ public void rotate (float degrees) { rotation += degrees; dirty = true; } /** Rotates this sprite 90 degrees in-place by rotating the texture coordinates. This rotation is unaffected by * {@link #setRotation(float)} and {@link #rotate(float)}. */ public void rotate90 (boolean clockwise) { float[] vertices = this.vertices; if (clockwise) { float temp = vertices[V1]; vertices[V1] = vertices[V4]; vertices[V4] = vertices[V3]; vertices[V3] = vertices[V2]; vertices[V2] = temp; temp = vertices[U1]; vertices[U1] = vertices[U4]; vertices[U4] = vertices[U3]; vertices[U3] = vertices[U2]; vertices[U2] = temp; } else { float temp = vertices[V1]; vertices[V1] = vertices[V2]; vertices[V2] = vertices[V3]; vertices[V3] = vertices[V4]; vertices[V4] = temp; temp = vertices[U1]; vertices[U1] = vertices[U2]; vertices[U2] = vertices[U3]; vertices[U3] = vertices[U4]; vertices[U4] = temp; } } /** Sets the sprite's scale for both X and Y uniformly. The sprite scales out from the origin. This will not affect the values * returned by {@link #getWidth()} and {@link #getHeight()} */ public void setScale (float scaleXY) { this.scaleX = scaleXY; this.scaleY = scaleXY; dirty = true; } /** Sets the sprite's scale for both X and Y. The sprite scales out from the origin. This will not affect the values returned by * {@link #getWidth()} and {@link #getHeight()} */ public void setScale (float scaleX, float scaleY) { this.scaleX = scaleX; this.scaleY = scaleY; dirty = true; } /** Sets the sprite's scale relative to the current scale. for example: original scale 2 -> sprite.scale(4) -> final scale 6. * The sprite scales out from the origin. This will not affect the values returned by {@link #getWidth()} and * {@link #getHeight()} */ public void scale (float amount) { this.scaleX += amount; this.scaleY += amount; dirty = true; } /** Returns the packed vertices, colors, and texture coordinates for this sprite. */ public float[] getVertices () { if (dirty) { dirty = false; float[] vertices = this.vertices; float localX = -originX; float localY = -originY; float localX2 = localX + width; float localY2 = localY + height; float worldOriginX = this.x - localX; float worldOriginY = this.y - localY; if (scaleX != 1 || scaleY != 1) { localX *= scaleX; localY *= scaleY; localX2 *= scaleX; localY2 *= scaleY; } if (rotation != 0) { final float cos = MathUtils.cosDeg(rotation); final float sin = MathUtils.sinDeg(rotation); final float localXCos = localX * cos; final float localXSin = localX * sin; final float localYCos = localY * cos; final float localYSin = localY * sin; final float localX2Cos = localX2 * cos; final float localX2Sin = localX2 * sin; final float localY2Cos = localY2 * cos; final float localY2Sin = localY2 * sin; final float x1 = localXCos - localYSin + worldOriginX; final float y1 = localYCos + localXSin + worldOriginY; vertices[X1] = x1; vertices[Y1] = y1; final float x2 = localXCos - localY2Sin + worldOriginX; final float y2 = localY2Cos + localXSin + worldOriginY; vertices[X2] = x2; vertices[Y2] = y2; final float x3 = localX2Cos - localY2Sin + worldOriginX; final float y3 = localY2Cos + localX2Sin + worldOriginY; vertices[X3] = x3; vertices[Y3] = y3; vertices[X4] = x1 + (x3 - x2); vertices[Y4] = y3 - (y2 - y1); } else { final float x1 = localX + worldOriginX; final float y1 = localY + worldOriginY; final float x2 = localX2 + worldOriginX; final float y2 = localY2 + worldOriginY; vertices[X1] = x1; vertices[Y1] = y1; vertices[X2] = x1; vertices[Y2] = y2; vertices[X3] = x2; vertices[Y3] = y2; vertices[X4] = x2; vertices[Y4] = y1; } } return vertices; } /** Returns the bounding axis aligned {@link Rectangle} that bounds this sprite. The rectangles x and y coordinates describe its * bottom left corner. If you change the position or size of the sprite, you have to fetch the triangle again for it to be * recomputed. * * @return the bounding Rectangle */ public Rectangle getBoundingRectangle () { final float[] vertices = getVertices(); float minx = vertices[X1]; float miny = vertices[Y1]; float maxx = vertices[X1]; float maxy = vertices[Y1]; minx = minx > vertices[X2] ? vertices[X2] : minx; minx = minx > vertices[X3] ? vertices[X3] : minx; minx = minx > vertices[X4] ? vertices[X4] : minx; maxx = maxx < vertices[X2] ? vertices[X2] : maxx; maxx = maxx < vertices[X3] ? vertices[X3] : maxx; maxx = maxx < vertices[X4] ? vertices[X4] : maxx; miny = miny > vertices[Y2] ? vertices[Y2] : miny; miny = miny > vertices[Y3] ? vertices[Y3] : miny; miny = miny > vertices[Y4] ? vertices[Y4] : miny; maxy = maxy < vertices[Y2] ? vertices[Y2] : maxy; maxy = maxy < vertices[Y3] ? vertices[Y3] : maxy; maxy = maxy < vertices[Y4] ? vertices[Y4] : maxy; if (bounds == null) bounds = new Rectangle(); bounds.x = minx; bounds.y = miny; bounds.width = maxx - minx; bounds.height = maxy - miny; return bounds; } public void draw (SpriteBatch spriteBatch) { spriteBatch.draw(texture, getVertices(), 0, SPRITE_SIZE); } public void draw (SpriteBatch spriteBatch, float alphaModulation) { Color color = getColor(); float oldAlpha = color.a; color.a *= alphaModulation; setColor(color); draw(spriteBatch); color.a = oldAlpha; setColor(color); } public float getX () { return x; } public float getY () { return y; } /** @return the width of the sprite, not accounting for scale. */ public float getWidth () { return width; } /** @return the height of the sprite, not accounting for scale. */ public float getHeight () { return height; } /** The origin influences {@link #setPosition(float, float)}, {@link #setRotation(float)} and the expansion direction of scaling * {@link #setScale(float, float)} */ public float getOriginX () { return originX; } /** The origin influences {@link #setPosition(float, float)}, {@link #setRotation(float)} and the expansion direction of scaling * {@link #setScale(float, float)} */ public float getOriginY () { return originY; } /** X scale of the sprite, independent of size set by {@link #setSize(float, float)} */ public float getScaleX () { return scaleX; } /** Y scale of the sprite, independent of size set by {@link #setSize(float, float)} */ public float getScaleY () { return scaleY; } /** Returns the color of this sprite. Changing the returned color will have no affect, {@link #setColor(Color)} or * {@link #setColor(float, float, float, float)} must be used. */ public Color getColor () { int intBits = NumberUtils.floatToIntColor(vertices[C1]); Color color = this.color; color.r = (intBits & 0xff) / 255f; color.g = ((intBits >>> 8) & 0xff) / 255f; color.b = ((intBits >>> 16) & 0xff) / 255f; color.a = ((intBits >>> 24) & 0xff) / 255f; return color; } public void setRegion (float u, float v, float u2, float v2) { super.setRegion(u, v, u2, v2); float[] vertices = Sprite.this.vertices; vertices[U1] = u; vertices[V1] = v2; vertices[U2] = u; vertices[V2] = v; vertices[U3] = u2; vertices[V3] = v; vertices[U4] = u2; vertices[V4] = v2; } public void setU (float u) { super.setU(u); vertices[U1] = u; vertices[U2] = u; } public void setV (float v) { super.setV(v); vertices[V2] = v; vertices[V3] = v; } public void setU2 (float u2) { super.setU2(u2); vertices[U3] = u2; vertices[U4] = u2; } public void setV2 (float v2) { super.setV2(v2); vertices[V1] = v2; vertices[V4] = v2; } /** boolean parameters are not setting a state, but performing a flip */ public void flip (boolean x, boolean y) { super.flip(x, y); float[] vertices = Sprite.this.vertices; if (x) { float temp = vertices[U1]; vertices[U1] = vertices[U3]; vertices[U3] = temp; temp = vertices[U2]; vertices[U2] = vertices[U4]; vertices[U4] = temp; } if (y) { float temp = vertices[V1]; vertices[V1] = vertices[V3]; vertices[V3] = temp; temp = vertices[V2]; vertices[V2] = vertices[V4]; vertices[V4] = temp; } } public void scroll (float xAmount, float yAmount) { float[] vertices = Sprite.this.vertices; if (xAmount != 0) { float u = (vertices[U1] + xAmount) % 1; float u2 = u + width / texture.getWidth(); this.u = u; this.u2 = u2; vertices[U1] = u; vertices[U2] = u; vertices[U3] = u2; vertices[U4] = u2; } if (yAmount != 0) { float v = (vertices[V2] + yAmount) % 1; float v2 = v + height / texture.getHeight(); this.v = v; this.v2 = v2; vertices[V1] = v2; vertices[V2] = v; vertices[V3] = v; vertices[V4] = v2; } } }
fixed Sprite.set(Sprite) so that it now also copies regionWidth and regionHeight. If the method's documentation declares that it will: /** Make this sprite a copy in every way of the specified sprite */ …then the critical fields regionWidth and regionHeight should also be copied from the incoming sprite. Their absence makes Sprite's copy constructor (which uses set) produce buggy, zero-width, zero-height textures that are invisible when rendered.
gdx/src/com/badlogic/gdx/graphics/g2d/Sprite.java
fixed Sprite.set(Sprite) so that it now also copies regionWidth and regionHeight.
<ide><path>dx/src/com/badlogic/gdx/graphics/g2d/Sprite.java <ide> y = sprite.y; <ide> width = sprite.width; <ide> height = sprite.height; <add> regionWidth = sprite.regionWidth; <add> regionHeight = sprite.regionHeight; <ide> originX = sprite.originX; <ide> originY = sprite.originY; <ide> rotation = sprite.rotation;
JavaScript
mit
3202e7e62ee7bc09249a8ce91e858f46269f2c04
0
webrtcHacks/WebRTC-Camera-Resolution,webrtcHacks/WebRTC-Camera-Resolution
/* * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. */ /* More information about these options at jshint.com/docs/options */ /* jshint browser: true, camelcase: true, curly: true, devel: true, eqeqeq: true, forin: false, globalstrict: true, node: true, quotmark: single, undef: true, unused: strict */ /* global mozRTCIceCandidate, mozRTCPeerConnection, Promise, mozRTCSessionDescription, webkitRTCPeerConnection, MediaStreamTrack */ /* exported trace,requestUserMedia */ 'use strict'; var getUserMedia = null; var attachMediaStream = null; var reattachMediaStream = null; var webrtcDetectedBrowser = null; var webrtcDetectedVersion = null; var webrtcMinimumVersion = null; var webrtcUtils = { log: function() { // suppress console.log output when being included as a module. if (typeof module !== 'undefined' || typeof require === 'function' && typeof define === 'function') { return; } console.log.apply(console, arguments); }, extractVersion: function(uastring, expr, pos) { var match = uastring.match(expr); return match && match.length >= pos && parseInt(match[pos], 10); } }; function trace(text) { // This function is used for logging. if (text[text.length - 1] === '\n') { text = text.substring(0, text.length - 1); } if (window.performance) { var now = (window.performance.now() / 1000).toFixed(3); webrtcUtils.log(now + ': ' + text); } else { webrtcUtils.log(text); } } if (typeof window === 'object') { if (window.HTMLMediaElement && !('srcObject' in window.HTMLMediaElement.prototype)) { // Shim the srcObject property, once, when HTMLMediaElement is found. Object.defineProperty(window.HTMLMediaElement.prototype, 'srcObject', { get: function() { // If prefixed srcObject property exists, return it. // Otherwise use the shimmed property, _srcObject return 'mozSrcObject' in this ? this.mozSrcObject : this._srcObject; }, set: function(stream) { if ('mozSrcObject' in this) { this.mozSrcObject = stream; } else { // Use _srcObject as a private property for this shim this._srcObject = stream; // TODO: revokeObjectUrl(this.src) when !stream to release resources? this.src = URL.createObjectURL(stream); } } }); } // Proxy existing globals getUserMedia = window.navigator && window.navigator.getUserMedia; } // Attach a media stream to an element. attachMediaStream = function(element, stream) { element.srcObject = stream; }; reattachMediaStream = function(to, from) { to.srcObject = from.srcObject; }; if (typeof window === 'undefined' || !window.navigator) { webrtcUtils.log('This does not appear to be a browser'); webrtcDetectedBrowser = 'not a browser'; } else if (navigator.mozGetUserMedia && window.mozRTCPeerConnection) { webrtcUtils.log('This appears to be Firefox'); webrtcDetectedBrowser = 'firefox'; // the detected firefox version. webrtcDetectedVersion = webrtcUtils.extractVersion(navigator.userAgent, /Firefox\/([0-9]+)\./, 1); // the minimum firefox version still supported by adapter. webrtcMinimumVersion = 31; // The RTCPeerConnection object. window.RTCPeerConnection = function(pcConfig, pcConstraints) { if (webrtcDetectedVersion < 38) { // .urls is not supported in FF < 38. // create RTCIceServers with a single url. if (pcConfig && pcConfig.iceServers) { var newIceServers = []; for (var i = 0; i < pcConfig.iceServers.length; i++) { var server = pcConfig.iceServers[i]; if (server.hasOwnProperty('urls')) { for (var j = 0; j < server.urls.length; j++) { var newServer = { url: server.urls[j] }; if (server.urls[j].indexOf('turn') === 0) { newServer.username = server.username; newServer.credential = server.credential; } newIceServers.push(newServer); } } else { newIceServers.push(pcConfig.iceServers[i]); } } pcConfig.iceServers = newIceServers; } } return new mozRTCPeerConnection(pcConfig, pcConstraints); // jscs:ignore requireCapitalizedConstructors }; // The RTCSessionDescription object. if (!window.RTCSessionDescription) { window.RTCSessionDescription = mozRTCSessionDescription; } // The RTCIceCandidate object. if (!window.RTCIceCandidate) { window.RTCIceCandidate = mozRTCIceCandidate; } // getUserMedia constraints shim. getUserMedia = function(constraints, onSuccess, onError) { var constraintsToFF37 = function(c) { if (typeof c !== 'object' || c.require) { return c; } var require = []; Object.keys(c).forEach(function(key) { if (key === 'require' || key === 'advanced' || key === 'mediaSource') { return; } var r = c[key] = (typeof c[key] === 'object') ? c[key] : {ideal: c[key]}; if (r.min !== undefined || r.max !== undefined || r.exact !== undefined) { require.push(key); } if (r.exact !== undefined) { if (typeof r.exact === 'number') { r.min = r.max = r.exact; } else { c[key] = r.exact; } delete r.exact; } if (r.ideal !== undefined) { c.advanced = c.advanced || []; var oc = {}; if (typeof r.ideal === 'number') { oc[key] = {min: r.ideal, max: r.ideal}; } else { oc[key] = r.ideal; } c.advanced.push(oc); delete r.ideal; if (!Object.keys(r).length) { delete c[key]; } } }); if (require.length) { c.require = require; } return c; }; if (webrtcDetectedVersion < 38) { webrtcUtils.log('spec: ' + JSON.stringify(constraints)); if (constraints.audio) { constraints.audio = constraintsToFF37(constraints.audio); } if (constraints.video) { constraints.video = constraintsToFF37(constraints.video); } webrtcUtils.log('ff37: ' + JSON.stringify(constraints)); } return navigator.mozGetUserMedia(constraints, onSuccess, onError); }; navigator.getUserMedia = getUserMedia; // Shim for mediaDevices on older versions. if (!navigator.mediaDevices) { navigator.mediaDevices = {getUserMedia: requestUserMedia, addEventListener: function() { }, removeEventListener: function() { } }; } navigator.mediaDevices.enumerateDevices = navigator.mediaDevices.enumerateDevices || function() { return new Promise(function(resolve) { var infos = [ {kind: 'audioinput', deviceId: 'default', label: '', groupId: ''}, {kind: 'videoinput', deviceId: 'default', label: '', groupId: ''} ]; resolve(infos); }); }; if (webrtcDetectedVersion < 41) { // Work around http://bugzil.la/1169665 var orgEnumerateDevices = navigator.mediaDevices.enumerateDevices.bind(navigator.mediaDevices); navigator.mediaDevices.enumerateDevices = function() { return orgEnumerateDevices().then(undefined, function(e) { if (e.name === 'NotFoundError') { return []; } throw e; }); }; } } else if (navigator.webkitGetUserMedia && window.webkitRTCPeerConnection) { webrtcUtils.log('This appears to be Chrome'); webrtcDetectedBrowser = 'chrome'; // the detected chrome version. webrtcDetectedVersion = webrtcUtils.extractVersion(navigator.userAgent, /Chrom(e|ium)\/([0-9]+)\./, 2); // the minimum chrome version still supported by adapter. webrtcMinimumVersion = 38; // The RTCPeerConnection object. window.RTCPeerConnection = function(pcConfig, pcConstraints) { // Translate iceTransportPolicy to iceTransports, // see https://code.google.com/p/webrtc/issues/detail?id=4869 if (pcConfig && pcConfig.iceTransportPolicy) { pcConfig.iceTransports = pcConfig.iceTransportPolicy; } var pc = new webkitRTCPeerConnection(pcConfig, pcConstraints); // jscs:ignore requireCapitalizedConstructors var origGetStats = pc.getStats.bind(pc); pc.getStats = function(selector, successCallback, errorCallback) { // jshint ignore: line var self = this; var args = arguments; // If selector is a function then we are in the old style stats so just // pass back the original getStats format to avoid breaking old users. if (arguments.length > 0 && typeof selector === 'function') { return origGetStats(selector, successCallback); } var fixChromeStats = function(response) { var standardReport = {}; var reports = response.result(); reports.forEach(function(report) { var standardStats = { id: report.id, timestamp: report.timestamp, type: report.type }; report.names().forEach(function(name) { standardStats[name] = report.stat(name); }); standardReport[standardStats.id] = standardStats; }); return standardReport; }; if (arguments.length >= 2) { var successCallbackWrapper = function(response) { args[1](fixChromeStats(response)); }; return origGetStats.apply(this, [successCallbackWrapper, arguments[0]]); } // promise-support return new Promise(function(resolve, reject) { if (args.length === 1 && selector === null) { origGetStats.apply(self, [ function(response) { resolve.apply(null, [fixChromeStats(response)]); }, reject]); } else { origGetStats.apply(self, [resolve, reject]); } }); }; return pc; }; // add promise support ['createOffer', 'createAnswer'].forEach(function(method) { var nativeMethod = webkitRTCPeerConnection.prototype[method]; webkitRTCPeerConnection.prototype[method] = function() { var self = this; if (arguments.length < 1 || (arguments.length === 1 && typeof(arguments[0]) === 'object')) { var opts = arguments.length === 1 ? arguments[0] : undefined; return new Promise(function(resolve, reject) { nativeMethod.apply(self, [resolve, reject, opts]); }); } else { return nativeMethod.apply(this, arguments); } }; }); ['setLocalDescription', 'setRemoteDescription', 'addIceCandidate'].forEach(function(method) { var nativeMethod = webkitRTCPeerConnection.prototype[method]; webkitRTCPeerConnection.prototype[method] = function() { var args = arguments; var self = this; return new Promise(function(resolve, reject) { nativeMethod.apply(self, [args[0], function() { resolve(); if (args.length >= 2) { args[1].apply(null, []); } }, function(err) { reject(err); if (args.length >= 3) { args[2].apply(null, [err]); } }] ); }); }; }); // getUserMedia constraints shim. var constraintsToChrome = function(c) { if (typeof c !== 'object' || c.mandatory || c.optional) { return c; } var cc = {}; Object.keys(c).forEach(function(key) { if (key === 'require' || key === 'advanced' || key === 'mediaSource') { return; } var r = (typeof c[key] === 'object') ? c[key] : {ideal: c[key]}; if (r.exact !== undefined && typeof r.exact === 'number') { r.min = r.max = r.exact; } var oldname = function(prefix, name) { if (prefix) { return prefix + name.charAt(0).toUpperCase() + name.slice(1); } return (name === 'deviceId') ? 'sourceId' : name; }; if (r.ideal !== undefined) { cc.optional = cc.optional || []; var oc = {}; if (typeof r.ideal === 'number') { oc[oldname('min', key)] = r.ideal; cc.optional.push(oc); oc = {}; oc[oldname('max', key)] = r.ideal; cc.optional.push(oc); } else { oc[oldname('', key)] = r.ideal; cc.optional.push(oc); } } if (r.exact !== undefined && typeof r.exact !== 'number') { cc.mandatory = cc.mandatory || {}; cc.mandatory[oldname('', key)] = r.exact; } else { ['min', 'max'].forEach(function(mix) { if (r[mix] !== undefined) { cc.mandatory = cc.mandatory || {}; cc.mandatory[oldname(mix, key)] = r[mix]; } }); } }); if (c.advanced) { cc.optional = (cc.optional || []).concat(c.advanced); } return cc; }; getUserMedia = function(constraints, onSuccess, onError) { if (constraints.audio) { constraints.audio = constraintsToChrome(constraints.audio); } if (constraints.video) { constraints.video = constraintsToChrome(constraints.video); } webrtcUtils.log('chrome: ' + JSON.stringify(constraints)); return navigator.webkitGetUserMedia(constraints, onSuccess, onError); }; navigator.getUserMedia = getUserMedia; if (!navigator.mediaDevices) { navigator.mediaDevices = {getUserMedia: requestUserMedia, enumerateDevices: function() { return new Promise(function(resolve) { var kinds = {audio: 'audioinput', video: 'videoinput'}; return MediaStreamTrack.getSources(function(devices) { resolve(devices.map(function(device) { return {label: device.label, kind: kinds[device.kind], deviceId: device.id, groupId: ''}; })); }); }); }}; } // A shim for getUserMedia method on the mediaDevices object. // TODO(KaptenJansson) remove once implemented in Chrome stable. if (!navigator.mediaDevices.getUserMedia) { navigator.mediaDevices.getUserMedia = function(constraints) { return requestUserMedia(constraints); }; } else { // Even though Chrome 45 has navigator.mediaDevices and a getUserMedia // function which returns a Promise, it does not accept spec-style // constraints. var origGetUserMedia = navigator.mediaDevices.getUserMedia. bind(navigator.mediaDevices); navigator.mediaDevices.getUserMedia = function(c) { webrtcUtils.log('spec: ' + JSON.stringify(c)); // whitespace for alignment c.audio = constraintsToChrome(c.audio); c.video = constraintsToChrome(c.video); webrtcUtils.log('chrome: ' + JSON.stringify(c)); return origGetUserMedia(c); }; } // Dummy devicechange event methods. // TODO(KaptenJansson) remove once implemented in Chrome stable. if (typeof navigator.mediaDevices.addEventListener === 'undefined') { navigator.mediaDevices.addEventListener = function() { webrtcUtils.log('Dummy mediaDevices.addEventListener called.'); }; } if (typeof navigator.mediaDevices.removeEventListener === 'undefined') { navigator.mediaDevices.removeEventListener = function() { webrtcUtils.log('Dummy mediaDevices.removeEventListener called.'); }; } // Attach a media stream to an element. attachMediaStream = function(element, stream) { if (webrtcDetectedVersion >= 43) { element.srcObject = stream; } else if (typeof element.src !== 'undefined') { element.src = URL.createObjectURL(stream); } else { webrtcUtils.log('Error attaching stream to element.'); } }; reattachMediaStream = function(to, from) { if (webrtcDetectedVersion >= 43) { to.srcObject = from.srcObject; } else { to.src = from.src; } }; } else if (navigator.mediaDevices && navigator.userAgent.match( /Edge\/(\d+).(\d+)$/)) { webrtcUtils.log('This appears to be Edge'); webrtcDetectedBrowser = 'edge'; webrtcDetectedVersion = webrtcUtils.extractVersion(navigator.userAgent, /Edge\/(\d+).(\d+)$/, 2); // the minimum version still supported by adapter. webrtcMinimumVersion = 12; } else { webrtcUtils.log('Browser does not appear to be WebRTC-capable'); } // Returns the result of getUserMedia as a Promise. function requestUserMedia(constraints) { return new Promise(function(resolve, reject) { getUserMedia(constraints, resolve, reject); }); } var webrtcTesting = {}; try { Object.defineProperty(webrtcTesting, 'version', { set: function(version) { webrtcDetectedVersion = version; } }); } catch (e) {} if (typeof module !== 'undefined') { var RTCPeerConnection; var RTCIceCandidate; var RTCSessionDescription; if (typeof window !== 'undefined') { RTCPeerConnection = window.RTCPeerConnection; RTCIceCandidate = window.RTCIceCandidate; RTCSessionDescription = window.RTCSessionDescription; } module.exports = { RTCPeerConnection: RTCPeerConnection, RTCIceCandidate: RTCIceCandidate, RTCSessionDescription: RTCSessionDescription, getUserMedia: getUserMedia, attachMediaStream: attachMediaStream, reattachMediaStream: reattachMediaStream, webrtcDetectedBrowser: webrtcDetectedBrowser, webrtcDetectedVersion: webrtcDetectedVersion, webrtcMinimumVersion: webrtcMinimumVersion, webrtcTesting: webrtcTesting, webrtcUtils: webrtcUtils //requestUserMedia: not exposed on purpose. //trace: not exposed on purpose. }; } else if ((typeof require === 'function') && (typeof define === 'function')) { // Expose objects and functions when RequireJS is doing the loading. define([], function() { return { RTCPeerConnection: window.RTCPeerConnection, RTCIceCandidate: window.RTCIceCandidate, RTCSessionDescription: window.RTCSessionDescription, getUserMedia: getUserMedia, attachMediaStream: attachMediaStream, reattachMediaStream: reattachMediaStream, webrtcDetectedBrowser: webrtcDetectedBrowser, webrtcDetectedVersion: webrtcDetectedVersion, webrtcMinimumVersion: webrtcMinimumVersion, webrtcTesting: webrtcTesting, webrtcUtils: webrtcUtils //requestUserMedia: not exposed on purpose. //trace: not exposed on purpose. }; }); }
js/adapter.js
/* * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. */ /* More information about these options at jshint.com/docs/options */ /* jshint browser: true, camelcase: true, curly: true, devel: true, eqeqeq: true, forin: false, globalstrict: true, quotmark: single, undef: true, unused: strict */ /* global mozRTCIceCandidate, mozRTCPeerConnection, mozRTCSessionDescription, webkitRTCPeerConnection */ /* exported trace */ 'use strict'; var RTCPeerConnection = null; var getUserMedia = null; var attachMediaStream = null; var reattachMediaStream = null; var webrtcDetectedBrowser = null; var webrtcDetectedVersion = null; function trace(text) { // This function is used for logging. if (text[text.length - 1] === '\n') { text = text.substring(0, text.length - 1); } console.log((window.performance.now() / 1000).toFixed(3) + ': ' + text); } function maybeFixConfiguration(pcConfig) { if (!pcConfig) { return; } for (var i = 0; i < pcConfig.iceServers.length; i++) { if (pcConfig.iceServers[i].hasOwnProperty('urls')) { pcConfig.iceServers[i].url = pcConfig.iceServers[i].urls; delete pcConfig.iceServers[i].urls; } } } if (navigator.mozGetUserMedia) { console.log('This appears to be Firefox'); webrtcDetectedBrowser = 'firefox'; webrtcDetectedVersion = parseInt(navigator.userAgent.match(/Firefox\/([0-9]+)\./)[1], 10); // The RTCPeerConnection object. RTCPeerConnection = function(pcConfig, pcConstraints) { // .urls is not supported in FF yet. maybeFixConfiguration(pcConfig); return new mozRTCPeerConnection(pcConfig, pcConstraints); }; // The RTCSessionDescription object. window.RTCSessionDescription = mozRTCSessionDescription; // The RTCIceCandidate object. window.RTCIceCandidate = mozRTCIceCandidate; // getUserMedia shim (only difference is the prefix). // Code from Adam Barth. getUserMedia = navigator.mozGetUserMedia.bind(navigator); navigator.getUserMedia = getUserMedia; // Creates ICE server from the URL for FF. window.createIceServer = function(url, username, password) { var iceServer = null; var urlParts = url.split(':'); if (urlParts[0].indexOf('stun') === 0) { // Create ICE server with STUN URL. iceServer = { 'url': url }; } else if (urlParts[0].indexOf('turn') === 0) { if (webrtcDetectedVersion < 27) { // Create iceServer with turn url. // Ignore the transport parameter from TURN url for FF version <=27. var turnUrlParts = url.split('?'); // Return null for createIceServer if transport=tcp. if (turnUrlParts.length === 1 || turnUrlParts[1].indexOf('transport=udp') === 0) { iceServer = { 'url': turnUrlParts[0], 'credential': password, 'username': username }; } } else { // FF 27 and above supports transport parameters in TURN url, // So passing in the full url to create iceServer. iceServer = { 'url': url, 'credential': password, 'username': username }; } } return iceServer; }; window.createIceServers = function(urls, username, password) { var iceServers = []; // Use .url for FireFox. for (var i = 0; i < urls.length; i++) { var iceServer = window.createIceServer(urls[i], username, password); if (iceServer !== null) { iceServers.push(iceServer); } } return iceServers; }; // Attach a media stream to an element. attachMediaStream = function(element, stream) { console.log('Attaching media stream'); element.mozSrcObject = stream; element.play(); }; reattachMediaStream = function(to, from) { console.log('Reattaching media stream'); to.mozSrcObject = from.mozSrcObject; to.play(); }; } else if (navigator.webkitGetUserMedia) { console.log('This appears to be Chrome'); webrtcDetectedBrowser = 'chrome'; // Temporary fix until crbug/374263 is fixed. // Setting Chrome version to 999, if version is unavailable. var result = navigator.userAgent.match(/Chrom(e|ium)\/([0-9]+)\./); if (result !== null) { webrtcDetectedVersion = parseInt(result[2], 10); } else { webrtcDetectedVersion = 999; } // Creates iceServer from the url for Chrome M33 and earlier. window.createIceServer = function(url, username, password) { var iceServer = null; var urlParts = url.split(':'); if (urlParts[0].indexOf('stun') === 0) { // Create iceServer with stun url. iceServer = { 'url': url }; } else if (urlParts[0].indexOf('turn') === 0) { // Chrome M28 & above uses below TURN format. iceServer = { 'url': url, 'credential': password, 'username': username }; } return iceServer; }; // Creates iceServers from the urls for Chrome M34 and above. window.createIceServers = function(urls, username, password) { var iceServers = []; if (webrtcDetectedVersion >= 34) { // .urls is supported since Chrome M34. iceServers = { 'urls': urls, 'credential': password, 'username': username }; } else { for (var i = 0; i < urls.length; i++) { var iceServer = window.createIceServer(urls[i], username, password); if (iceServer !== null) { iceServers.push(iceServer); } } } return iceServers; }; // The RTCPeerConnection object. RTCPeerConnection = function(pcConfig, pcConstraints) { // .urls is supported since Chrome M34. if (webrtcDetectedVersion < 34) { maybeFixConfiguration(pcConfig); } return new webkitRTCPeerConnection(pcConfig, pcConstraints); }; // Get UserMedia (only difference is the prefix). // Code from Adam Barth. getUserMedia = navigator.webkitGetUserMedia.bind(navigator); navigator.getUserMedia = getUserMedia; // Attach a media stream to an element. attachMediaStream = function(element, stream) { if (typeof element.srcObject !== 'undefined') { element.srcObject = stream; } else if (typeof element.mozSrcObject !== 'undefined') { element.mozSrcObject = stream; } else if (typeof element.src !== 'undefined') { element.src = URL.createObjectURL(stream); } else { console.log('Error attaching stream to element.'); } }; reattachMediaStream = function(to, from) { to.src = from.src; }; } else { console.log('Browser does not appear to be WebRTC-capable'); }
updated adapter.js
js/adapter.js
updated adapter.js
<ide><path>s/adapter.js <ide> */ <ide> <ide> /* More information about these options at jshint.com/docs/options */ <del> <ide> /* jshint browser: true, camelcase: true, curly: true, devel: true, <del> eqeqeq: true, forin: false, globalstrict: true, quotmark: single, <del> undef: true, unused: strict */ <del> <del>/* global mozRTCIceCandidate, mozRTCPeerConnection, <del> mozRTCSessionDescription, webkitRTCPeerConnection */ <del> <del>/* exported trace */ <add> eqeqeq: true, forin: false, globalstrict: true, node: true, <add> quotmark: single, undef: true, unused: strict */ <add>/* global mozRTCIceCandidate, mozRTCPeerConnection, Promise, <add> mozRTCSessionDescription, webkitRTCPeerConnection, MediaStreamTrack */ <add>/* exported trace,requestUserMedia */ <ide> <ide> 'use strict'; <ide> <del>var RTCPeerConnection = null; <ide> var getUserMedia = null; <ide> var attachMediaStream = null; <ide> var reattachMediaStream = null; <ide> var webrtcDetectedBrowser = null; <ide> var webrtcDetectedVersion = null; <add>var webrtcMinimumVersion = null; <add>var webrtcUtils = { <add> log: function() { <add> // suppress console.log output when being included as a module. <add> if (typeof module !== 'undefined' || <add> typeof require === 'function' && typeof define === 'function') { <add> return; <add> } <add> console.log.apply(console, arguments); <add> }, <add> extractVersion: function(uastring, expr, pos) { <add> var match = uastring.match(expr); <add> return match && match.length >= pos && parseInt(match[pos], 10); <add> } <add>}; <ide> <ide> function trace(text) { <ide> // This function is used for logging. <ide> if (text[text.length - 1] === '\n') { <ide> text = text.substring(0, text.length - 1); <ide> } <del> console.log((window.performance.now() / 1000).toFixed(3) + ': ' + text); <add> if (window.performance) { <add> var now = (window.performance.now() / 1000).toFixed(3); <add> webrtcUtils.log(now + ': ' + text); <add> } else { <add> webrtcUtils.log(text); <add> } <ide> } <ide> <del>function maybeFixConfiguration(pcConfig) { <del> if (!pcConfig) { <del> return; <del> } <del> for (var i = 0; i < pcConfig.iceServers.length; i++) { <del> if (pcConfig.iceServers[i].hasOwnProperty('urls')) { <del> pcConfig.iceServers[i].url = pcConfig.iceServers[i].urls; <del> delete pcConfig.iceServers[i].urls; <del> } <del> } <add>if (typeof window === 'object') { <add> if (window.HTMLMediaElement && <add> !('srcObject' in window.HTMLMediaElement.prototype)) { <add> // Shim the srcObject property, once, when HTMLMediaElement is found. <add> Object.defineProperty(window.HTMLMediaElement.prototype, 'srcObject', { <add> get: function() { <add> // If prefixed srcObject property exists, return it. <add> // Otherwise use the shimmed property, _srcObject <add> return 'mozSrcObject' in this ? this.mozSrcObject : this._srcObject; <add> }, <add> set: function(stream) { <add> if ('mozSrcObject' in this) { <add> this.mozSrcObject = stream; <add> } else { <add> // Use _srcObject as a private property for this shim <add> this._srcObject = stream; <add> // TODO: revokeObjectUrl(this.src) when !stream to release resources? <add> this.src = URL.createObjectURL(stream); <add> } <add> } <add> }); <add> } <add> // Proxy existing globals <add> getUserMedia = window.navigator && window.navigator.getUserMedia; <ide> } <ide> <del>if (navigator.mozGetUserMedia) { <del> console.log('This appears to be Firefox'); <add>// Attach a media stream to an element. <add>attachMediaStream = function(element, stream) { <add> element.srcObject = stream; <add>}; <add> <add>reattachMediaStream = function(to, from) { <add> to.srcObject = from.srcObject; <add>}; <add> <add>if (typeof window === 'undefined' || !window.navigator) { <add> webrtcUtils.log('This does not appear to be a browser'); <add> webrtcDetectedBrowser = 'not a browser'; <add>} else if (navigator.mozGetUserMedia && window.mozRTCPeerConnection) { <add> webrtcUtils.log('This appears to be Firefox'); <ide> <ide> webrtcDetectedBrowser = 'firefox'; <ide> <del> webrtcDetectedVersion = <del> parseInt(navigator.userAgent.match(/Firefox\/([0-9]+)\./)[1], 10); <add> // the detected firefox version. <add> webrtcDetectedVersion = webrtcUtils.extractVersion(navigator.userAgent, <add> /Firefox\/([0-9]+)\./, 1); <add> <add> // the minimum firefox version still supported by adapter. <add> webrtcMinimumVersion = 31; <ide> <ide> // The RTCPeerConnection object. <del> RTCPeerConnection = function(pcConfig, pcConstraints) { <del> // .urls is not supported in FF yet. <del> maybeFixConfiguration(pcConfig); <del> return new mozRTCPeerConnection(pcConfig, pcConstraints); <add> window.RTCPeerConnection = function(pcConfig, pcConstraints) { <add> if (webrtcDetectedVersion < 38) { <add> // .urls is not supported in FF < 38. <add> // create RTCIceServers with a single url. <add> if (pcConfig && pcConfig.iceServers) { <add> var newIceServers = []; <add> for (var i = 0; i < pcConfig.iceServers.length; i++) { <add> var server = pcConfig.iceServers[i]; <add> if (server.hasOwnProperty('urls')) { <add> for (var j = 0; j < server.urls.length; j++) { <add> var newServer = { <add> url: server.urls[j] <add> }; <add> if (server.urls[j].indexOf('turn') === 0) { <add> newServer.username = server.username; <add> newServer.credential = server.credential; <add> } <add> newIceServers.push(newServer); <add> } <add> } else { <add> newIceServers.push(pcConfig.iceServers[i]); <add> } <add> } <add> pcConfig.iceServers = newIceServers; <add> } <add> } <add> return new mozRTCPeerConnection(pcConfig, pcConstraints); // jscs:ignore requireCapitalizedConstructors <ide> }; <ide> <ide> // The RTCSessionDescription object. <del> window.RTCSessionDescription = mozRTCSessionDescription; <add> if (!window.RTCSessionDescription) { <add> window.RTCSessionDescription = mozRTCSessionDescription; <add> } <ide> <ide> // The RTCIceCandidate object. <del> window.RTCIceCandidate = mozRTCIceCandidate; <del> <del> // getUserMedia shim (only difference is the prefix). <del> // Code from Adam Barth. <del> getUserMedia = navigator.mozGetUserMedia.bind(navigator); <add> if (!window.RTCIceCandidate) { <add> window.RTCIceCandidate = mozRTCIceCandidate; <add> } <add> <add> // getUserMedia constraints shim. <add> getUserMedia = function(constraints, onSuccess, onError) { <add> var constraintsToFF37 = function(c) { <add> if (typeof c !== 'object' || c.require) { <add> return c; <add> } <add> var require = []; <add> Object.keys(c).forEach(function(key) { <add> if (key === 'require' || key === 'advanced' || key === 'mediaSource') { <add> return; <add> } <add> var r = c[key] = (typeof c[key] === 'object') ? <add> c[key] : {ideal: c[key]}; <add> if (r.min !== undefined || <add> r.max !== undefined || r.exact !== undefined) { <add> require.push(key); <add> } <add> if (r.exact !== undefined) { <add> if (typeof r.exact === 'number') { <add> r.min = r.max = r.exact; <add> } else { <add> c[key] = r.exact; <add> } <add> delete r.exact; <add> } <add> if (r.ideal !== undefined) { <add> c.advanced = c.advanced || []; <add> var oc = {}; <add> if (typeof r.ideal === 'number') { <add> oc[key] = {min: r.ideal, max: r.ideal}; <add> } else { <add> oc[key] = r.ideal; <add> } <add> c.advanced.push(oc); <add> delete r.ideal; <add> if (!Object.keys(r).length) { <add> delete c[key]; <add> } <add> } <add> }); <add> if (require.length) { <add> c.require = require; <add> } <add> return c; <add> }; <add> if (webrtcDetectedVersion < 38) { <add> webrtcUtils.log('spec: ' + JSON.stringify(constraints)); <add> if (constraints.audio) { <add> constraints.audio = constraintsToFF37(constraints.audio); <add> } <add> if (constraints.video) { <add> constraints.video = constraintsToFF37(constraints.video); <add> } <add> webrtcUtils.log('ff37: ' + JSON.stringify(constraints)); <add> } <add> return navigator.mozGetUserMedia(constraints, onSuccess, onError); <add> }; <add> <ide> navigator.getUserMedia = getUserMedia; <ide> <del> // Creates ICE server from the URL for FF. <del> window.createIceServer = function(url, username, password) { <del> var iceServer = null; <del> var urlParts = url.split(':'); <del> if (urlParts[0].indexOf('stun') === 0) { <del> // Create ICE server with STUN URL. <del> iceServer = { <del> 'url': url <add> // Shim for mediaDevices on older versions. <add> if (!navigator.mediaDevices) { <add> navigator.mediaDevices = {getUserMedia: requestUserMedia, <add> addEventListener: function() { }, <add> removeEventListener: function() { } <add> }; <add> } <add> navigator.mediaDevices.enumerateDevices = <add> navigator.mediaDevices.enumerateDevices || function() { <add> return new Promise(function(resolve) { <add> var infos = [ <add> {kind: 'audioinput', deviceId: 'default', label: '', groupId: ''}, <add> {kind: 'videoinput', deviceId: 'default', label: '', groupId: ''} <add> ]; <add> resolve(infos); <add> }); <add> }; <add> <add> if (webrtcDetectedVersion < 41) { <add> // Work around http://bugzil.la/1169665 <add> var orgEnumerateDevices = <add> navigator.mediaDevices.enumerateDevices.bind(navigator.mediaDevices); <add> navigator.mediaDevices.enumerateDevices = function() { <add> return orgEnumerateDevices().then(undefined, function(e) { <add> if (e.name === 'NotFoundError') { <add> return []; <add> } <add> throw e; <add> }); <add> }; <add> } <add>} else if (navigator.webkitGetUserMedia && window.webkitRTCPeerConnection) { <add> webrtcUtils.log('This appears to be Chrome'); <add> <add> webrtcDetectedBrowser = 'chrome'; <add> <add> // the detected chrome version. <add> webrtcDetectedVersion = webrtcUtils.extractVersion(navigator.userAgent, <add> /Chrom(e|ium)\/([0-9]+)\./, 2); <add> <add> // the minimum chrome version still supported by adapter. <add> webrtcMinimumVersion = 38; <add> <add> // The RTCPeerConnection object. <add> window.RTCPeerConnection = function(pcConfig, pcConstraints) { <add> // Translate iceTransportPolicy to iceTransports, <add> // see https://code.google.com/p/webrtc/issues/detail?id=4869 <add> if (pcConfig && pcConfig.iceTransportPolicy) { <add> pcConfig.iceTransports = pcConfig.iceTransportPolicy; <add> } <add> <add> var pc = new webkitRTCPeerConnection(pcConfig, pcConstraints); // jscs:ignore requireCapitalizedConstructors <add> var origGetStats = pc.getStats.bind(pc); <add> pc.getStats = function(selector, successCallback, errorCallback) { // jshint ignore: line <add> var self = this; <add> var args = arguments; <add> <add> // If selector is a function then we are in the old style stats so just <add> // pass back the original getStats format to avoid breaking old users. <add> if (arguments.length > 0 && typeof selector === 'function') { <add> return origGetStats(selector, successCallback); <add> } <add> <add> var fixChromeStats = function(response) { <add> var standardReport = {}; <add> var reports = response.result(); <add> reports.forEach(function(report) { <add> var standardStats = { <add> id: report.id, <add> timestamp: report.timestamp, <add> type: report.type <add> }; <add> report.names().forEach(function(name) { <add> standardStats[name] = report.stat(name); <add> }); <add> standardReport[standardStats.id] = standardStats; <add> }); <add> <add> return standardReport; <ide> }; <del> } else if (urlParts[0].indexOf('turn') === 0) { <del> if (webrtcDetectedVersion < 27) { <del> // Create iceServer with turn url. <del> // Ignore the transport parameter from TURN url for FF version <=27. <del> var turnUrlParts = url.split('?'); <del> // Return null for createIceServer if transport=tcp. <del> if (turnUrlParts.length === 1 || <del> turnUrlParts[1].indexOf('transport=udp') === 0) { <del> iceServer = { <del> 'url': turnUrlParts[0], <del> 'credential': password, <del> 'username': username <del> }; <del> } <add> <add> if (arguments.length >= 2) { <add> var successCallbackWrapper = function(response) { <add> args[1](fixChromeStats(response)); <add> }; <add> <add> return origGetStats.apply(this, [successCallbackWrapper, arguments[0]]); <add> } <add> <add> // promise-support <add> return new Promise(function(resolve, reject) { <add> if (args.length === 1 && selector === null) { <add> origGetStats.apply(self, [ <add> function(response) { <add> resolve.apply(null, [fixChromeStats(response)]); <add> }, reject]); <add> } else { <add> origGetStats.apply(self, [resolve, reject]); <add> } <add> }); <add> }; <add> <add> return pc; <add> }; <add> <add> // add promise support <add> ['createOffer', 'createAnswer'].forEach(function(method) { <add> var nativeMethod = webkitRTCPeerConnection.prototype[method]; <add> webkitRTCPeerConnection.prototype[method] = function() { <add> var self = this; <add> if (arguments.length < 1 || (arguments.length === 1 && <add> typeof(arguments[0]) === 'object')) { <add> var opts = arguments.length === 1 ? arguments[0] : undefined; <add> return new Promise(function(resolve, reject) { <add> nativeMethod.apply(self, [resolve, reject, opts]); <add> }); <ide> } else { <del> // FF 27 and above supports transport parameters in TURN url, <del> // So passing in the full url to create iceServer. <del> iceServer = { <del> 'url': url, <del> 'credential': password, <del> 'username': username <del> }; <del> } <del> } <del> return iceServer; <del> }; <del> <del> window.createIceServers = function(urls, username, password) { <del> var iceServers = []; <del> // Use .url for FireFox. <del> for (var i = 0; i < urls.length; i++) { <del> var iceServer = <del> window.createIceServer(urls[i], username, password); <del> if (iceServer !== null) { <del> iceServers.push(iceServer); <del> } <del> } <del> return iceServers; <del> }; <add> return nativeMethod.apply(this, arguments); <add> } <add> }; <add> }); <add> <add> ['setLocalDescription', 'setRemoteDescription', <add> 'addIceCandidate'].forEach(function(method) { <add> var nativeMethod = webkitRTCPeerConnection.prototype[method]; <add> webkitRTCPeerConnection.prototype[method] = function() { <add> var args = arguments; <add> var self = this; <add> return new Promise(function(resolve, reject) { <add> nativeMethod.apply(self, [args[0], <add> function() { <add> resolve(); <add> if (args.length >= 2) { <add> args[1].apply(null, []); <add> } <add> }, <add> function(err) { <add> reject(err); <add> if (args.length >= 3) { <add> args[2].apply(null, [err]); <add> } <add> }] <add> ); <add> }); <add> }; <add> }); <add> <add> // getUserMedia constraints shim. <add> var constraintsToChrome = function(c) { <add> if (typeof c !== 'object' || c.mandatory || c.optional) { <add> return c; <add> } <add> var cc = {}; <add> Object.keys(c).forEach(function(key) { <add> if (key === 'require' || key === 'advanced' || key === 'mediaSource') { <add> return; <add> } <add> var r = (typeof c[key] === 'object') ? c[key] : {ideal: c[key]}; <add> if (r.exact !== undefined && typeof r.exact === 'number') { <add> r.min = r.max = r.exact; <add> } <add> var oldname = function(prefix, name) { <add> if (prefix) { <add> return prefix + name.charAt(0).toUpperCase() + name.slice(1); <add> } <add> return (name === 'deviceId') ? 'sourceId' : name; <add> }; <add> if (r.ideal !== undefined) { <add> cc.optional = cc.optional || []; <add> var oc = {}; <add> if (typeof r.ideal === 'number') { <add> oc[oldname('min', key)] = r.ideal; <add> cc.optional.push(oc); <add> oc = {}; <add> oc[oldname('max', key)] = r.ideal; <add> cc.optional.push(oc); <add> } else { <add> oc[oldname('', key)] = r.ideal; <add> cc.optional.push(oc); <add> } <add> } <add> if (r.exact !== undefined && typeof r.exact !== 'number') { <add> cc.mandatory = cc.mandatory || {}; <add> cc.mandatory[oldname('', key)] = r.exact; <add> } else { <add> ['min', 'max'].forEach(function(mix) { <add> if (r[mix] !== undefined) { <add> cc.mandatory = cc.mandatory || {}; <add> cc.mandatory[oldname(mix, key)] = r[mix]; <add> } <add> }); <add> } <add> }); <add> if (c.advanced) { <add> cc.optional = (cc.optional || []).concat(c.advanced); <add> } <add> return cc; <add> }; <add> <add> getUserMedia = function(constraints, onSuccess, onError) { <add> if (constraints.audio) { <add> constraints.audio = constraintsToChrome(constraints.audio); <add> } <add> if (constraints.video) { <add> constraints.video = constraintsToChrome(constraints.video); <add> } <add> webrtcUtils.log('chrome: ' + JSON.stringify(constraints)); <add> return navigator.webkitGetUserMedia(constraints, onSuccess, onError); <add> }; <add> navigator.getUserMedia = getUserMedia; <add> <add> if (!navigator.mediaDevices) { <add> navigator.mediaDevices = {getUserMedia: requestUserMedia, <add> enumerateDevices: function() { <add> return new Promise(function(resolve) { <add> var kinds = {audio: 'audioinput', video: 'videoinput'}; <add> return MediaStreamTrack.getSources(function(devices) { <add> resolve(devices.map(function(device) { <add> return {label: device.label, <add> kind: kinds[device.kind], <add> deviceId: device.id, <add> groupId: ''}; <add> })); <add> }); <add> }); <add> }}; <add> } <add> <add> // A shim for getUserMedia method on the mediaDevices object. <add> // TODO(KaptenJansson) remove once implemented in Chrome stable. <add> if (!navigator.mediaDevices.getUserMedia) { <add> navigator.mediaDevices.getUserMedia = function(constraints) { <add> return requestUserMedia(constraints); <add> }; <add> } else { <add> // Even though Chrome 45 has navigator.mediaDevices and a getUserMedia <add> // function which returns a Promise, it does not accept spec-style <add> // constraints. <add> var origGetUserMedia = navigator.mediaDevices.getUserMedia. <add> bind(navigator.mediaDevices); <add> navigator.mediaDevices.getUserMedia = function(c) { <add> webrtcUtils.log('spec: ' + JSON.stringify(c)); // whitespace for alignment <add> c.audio = constraintsToChrome(c.audio); <add> c.video = constraintsToChrome(c.video); <add> webrtcUtils.log('chrome: ' + JSON.stringify(c)); <add> return origGetUserMedia(c); <add> }; <add> } <add> <add> // Dummy devicechange event methods. <add> // TODO(KaptenJansson) remove once implemented in Chrome stable. <add> if (typeof navigator.mediaDevices.addEventListener === 'undefined') { <add> navigator.mediaDevices.addEventListener = function() { <add> webrtcUtils.log('Dummy mediaDevices.addEventListener called.'); <add> }; <add> } <add> if (typeof navigator.mediaDevices.removeEventListener === 'undefined') { <add> navigator.mediaDevices.removeEventListener = function() { <add> webrtcUtils.log('Dummy mediaDevices.removeEventListener called.'); <add> }; <add> } <ide> <ide> // Attach a media stream to an element. <ide> attachMediaStream = function(element, stream) { <del> console.log('Attaching media stream'); <del> element.mozSrcObject = stream; <del> element.play(); <del> }; <del> <del> reattachMediaStream = function(to, from) { <del> console.log('Reattaching media stream'); <del> to.mozSrcObject = from.mozSrcObject; <del> to.play(); <del> }; <del> <del>} else if (navigator.webkitGetUserMedia) { <del> console.log('This appears to be Chrome'); <del> <del> webrtcDetectedBrowser = 'chrome'; <del> // Temporary fix until crbug/374263 is fixed. <del> // Setting Chrome version to 999, if version is unavailable. <del> var result = navigator.userAgent.match(/Chrom(e|ium)\/([0-9]+)\./); <del> if (result !== null) { <del> webrtcDetectedVersion = parseInt(result[2], 10); <del> } else { <del> webrtcDetectedVersion = 999; <del> } <del> <del> // Creates iceServer from the url for Chrome M33 and earlier. <del> window.createIceServer = function(url, username, password) { <del> var iceServer = null; <del> var urlParts = url.split(':'); <del> if (urlParts[0].indexOf('stun') === 0) { <del> // Create iceServer with stun url. <del> iceServer = { <del> 'url': url <del> }; <del> } else if (urlParts[0].indexOf('turn') === 0) { <del> // Chrome M28 & above uses below TURN format. <del> iceServer = { <del> 'url': url, <del> 'credential': password, <del> 'username': username <del> }; <del> } <del> return iceServer; <del> }; <del> <del> // Creates iceServers from the urls for Chrome M34 and above. <del> window.createIceServers = function(urls, username, password) { <del> var iceServers = []; <del> if (webrtcDetectedVersion >= 34) { <del> // .urls is supported since Chrome M34. <del> iceServers = { <del> 'urls': urls, <del> 'credential': password, <del> 'username': username <del> }; <del> } else { <del> for (var i = 0; i < urls.length; i++) { <del> var iceServer = <del> window.createIceServer(urls[i], username, password); <del> if (iceServer !== null) { <del> iceServers.push(iceServer); <del> } <del> } <del> } <del> return iceServers; <del> }; <del> <del> // The RTCPeerConnection object. <del> RTCPeerConnection = function(pcConfig, pcConstraints) { <del> // .urls is supported since Chrome M34. <del> if (webrtcDetectedVersion < 34) { <del> maybeFixConfiguration(pcConfig); <del> } <del> return new webkitRTCPeerConnection(pcConfig, pcConstraints); <del> }; <del> <del> // Get UserMedia (only difference is the prefix). <del> // Code from Adam Barth. <del> getUserMedia = navigator.webkitGetUserMedia.bind(navigator); <del> navigator.getUserMedia = getUserMedia; <del> <del> // Attach a media stream to an element. <del> attachMediaStream = function(element, stream) { <del> if (typeof element.srcObject !== 'undefined') { <add> if (webrtcDetectedVersion >= 43) { <ide> element.srcObject = stream; <del> } else if (typeof element.mozSrcObject !== 'undefined') { <del> element.mozSrcObject = stream; <ide> } else if (typeof element.src !== 'undefined') { <ide> element.src = URL.createObjectURL(stream); <ide> } else { <del> console.log('Error attaching stream to element.'); <del> } <del> }; <del> <add> webrtcUtils.log('Error attaching stream to element.'); <add> } <add> }; <ide> reattachMediaStream = function(to, from) { <del> to.src = from.src; <del> }; <add> if (webrtcDetectedVersion >= 43) { <add> to.srcObject = from.srcObject; <add> } else { <add> to.src = from.src; <add> } <add> }; <add> <add>} else if (navigator.mediaDevices && navigator.userAgent.match( <add> /Edge\/(\d+).(\d+)$/)) { <add> webrtcUtils.log('This appears to be Edge'); <add> webrtcDetectedBrowser = 'edge'; <add> <add> webrtcDetectedVersion = webrtcUtils.extractVersion(navigator.userAgent, <add> /Edge\/(\d+).(\d+)$/, 2); <add> <add> // the minimum version still supported by adapter. <add> webrtcMinimumVersion = 12; <ide> } else { <del> console.log('Browser does not appear to be WebRTC-capable'); <add> webrtcUtils.log('Browser does not appear to be WebRTC-capable'); <ide> } <add> <add>// Returns the result of getUserMedia as a Promise. <add>function requestUserMedia(constraints) { <add> return new Promise(function(resolve, reject) { <add> getUserMedia(constraints, resolve, reject); <add> }); <add>} <add> <add>var webrtcTesting = {}; <add>try { <add> Object.defineProperty(webrtcTesting, 'version', { <add> set: function(version) { <add> webrtcDetectedVersion = version; <add> } <add> }); <add>} catch (e) {} <add> <add>if (typeof module !== 'undefined') { <add> var RTCPeerConnection; <add> var RTCIceCandidate; <add> var RTCSessionDescription; <add> if (typeof window !== 'undefined') { <add> RTCPeerConnection = window.RTCPeerConnection; <add> RTCIceCandidate = window.RTCIceCandidate; <add> RTCSessionDescription = window.RTCSessionDescription; <add> } <add> module.exports = { <add> RTCPeerConnection: RTCPeerConnection, <add> RTCIceCandidate: RTCIceCandidate, <add> RTCSessionDescription: RTCSessionDescription, <add> getUserMedia: getUserMedia, <add> attachMediaStream: attachMediaStream, <add> reattachMediaStream: reattachMediaStream, <add> webrtcDetectedBrowser: webrtcDetectedBrowser, <add> webrtcDetectedVersion: webrtcDetectedVersion, <add> webrtcMinimumVersion: webrtcMinimumVersion, <add> webrtcTesting: webrtcTesting, <add> webrtcUtils: webrtcUtils <add> //requestUserMedia: not exposed on purpose. <add> //trace: not exposed on purpose. <add> }; <add>} else if ((typeof require === 'function') && (typeof define === 'function')) { <add> // Expose objects and functions when RequireJS is doing the loading. <add> define([], function() { <add> return { <add> RTCPeerConnection: window.RTCPeerConnection, <add> RTCIceCandidate: window.RTCIceCandidate, <add> RTCSessionDescription: window.RTCSessionDescription, <add> getUserMedia: getUserMedia, <add> attachMediaStream: attachMediaStream, <add> reattachMediaStream: reattachMediaStream, <add> webrtcDetectedBrowser: webrtcDetectedBrowser, <add> webrtcDetectedVersion: webrtcDetectedVersion, <add> webrtcMinimumVersion: webrtcMinimumVersion, <add> webrtcTesting: webrtcTesting, <add> webrtcUtils: webrtcUtils <add> //requestUserMedia: not exposed on purpose. <add> //trace: not exposed on purpose. <add> }; <add> }); <add>}
Java
apache-2.0
8f92fc9ae2612234fe39cea43b60ad7023cc14ec
0
realityforge/replicant,realityforge/replicant
package org.realityforge.replicant.client; import java.util.Collections; import java.util.LinkedList; import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.inject.Inject; import org.realityforge.replicant.client.transport.ClientSession; import org.realityforge.replicant.client.transport.RequestEntry; import org.realityforge.replicant.client.transport.RequestManager; import org.realityforge.replicant.client.transport.SessionContext; /** * Class from which to extend to implement a service that loads data from a change set. * Data can be loaded by bulk or incrementally and the load can be broken up into several * steps to avoid locking a thread such as in GWT. */ public abstract class AbstractDataLoaderService<T extends ClientSession> { private static final int DEFAULT_CHANGES_TO_PROCESS_PER_TICK = 100; private static final int DEFAULT_LINKS_TO_PROCESS_PER_TICK = 100; protected static final Logger LOG = Logger.getLogger( AbstractDataLoaderService.class.getName() ); @Inject private ChangeMapper _changeMapper; @Inject private EntityChangeBroker _changeBroker; @Inject private EntityRepository _repository; private int _lastKnownChangeSet; /** * The set of data load actions that still need to have the json parsed. */ private final LinkedList<DataLoadAction> _pendingActions = new LinkedList<>(); /** * The set of data load actions that have their json parsed. They are inserted into * this list according to their sequence. */ private final LinkedList<DataLoadAction> _parsedActions = new LinkedList<>(); /** * Sometimes a data load action occurs that is not initiated by the server. These do not * typically need to be sequenced and are prioritized above other actions. */ private final LinkedList<DataLoadAction> _oobActions = new LinkedList<>(); private DataLoadAction _currentAction; private int _updateCount; private int _removeCount; private int _linkCount; private int _changesToProcessPerTick = DEFAULT_CHANGES_TO_PROCESS_PER_TICK; private int _linksToProcessPerTick = DEFAULT_LINKS_TO_PROCESS_PER_TICK; private T _session; protected void setSession( final T session ) { _session = session; // This should probably be moved elsewhere ... but where? SessionContext.setSession( session ); } protected final EntityChangeBroker getChangeBroker() { return _changeBroker; } protected final ChangeMapper getChangeMapper() { return _changeMapper; } public final T getSession() { return _session; } protected abstract void scheduleDataLoad(); protected final void setChangesToProcessPerTick( final int changesToProcessPerTick ) { _changesToProcessPerTick = changesToProcessPerTick; } protected final void setLinksToProcessPerTick( final int linksToProcessPerTick ) { _linksToProcessPerTick = linksToProcessPerTick; } protected final int getLastKnownChangeSet() { return _lastKnownChangeSet; } protected abstract ChangeSet parseChangeSet( String rawJsonData ); @SuppressWarnings( "ConstantConditions" ) protected final void enqueueDataLoad( @Nonnull final String rawJsonData ) { if ( null == rawJsonData ) { throw new IllegalStateException( "null == rawJsonData" ); } _pendingActions.add( new DataLoadAction( rawJsonData, false ) ); scheduleDataLoad(); } @SuppressWarnings( "ConstantConditions" ) protected final void enqueueOOB( @Nonnull final String rawJsonData, @Nullable final Runnable runnable, final boolean bulkLoad ) { if ( null == rawJsonData ) { throw new IllegalStateException( "null == rawJsonData" ); } final DataLoadAction action = new DataLoadAction( rawJsonData, true ); action.setRunnable( runnable ); action.setBulkLoad( bulkLoad ); _oobActions.add( action ); scheduleDataLoad(); } protected final boolean progressDataLoad() { // Step: Retrieve any out of band actions if ( null == _currentAction && !_oobActions.isEmpty() ) { _currentAction = _oobActions.removeFirst(); return true; } //Step: Retrieve the action from the parsed queue if it is the next in the sequence if ( null == _currentAction && !_parsedActions.isEmpty() ) { final DataLoadAction action = _parsedActions.get( 0 ); final ChangeSet changeSet = action.getChangeSet(); assert null != changeSet; if ( action.isOob() || _lastKnownChangeSet + 1 == changeSet.getSequence() ) { _currentAction = _parsedActions.remove(); if ( LOG.isLoggable( getLogLevel() ) ) { LOG.log( getLogLevel(), "Parsed Action Selected: " + _currentAction ); } return true; } } // Abort if there is no pending data load actions to take if ( null == _currentAction && _pendingActions.isEmpty() ) { if ( LOG.isLoggable( getLogLevel() ) ) { LOG.log( getLogLevel(), "No data to load. Terminating incremental load process." ); } return false; } //Step: Retrieve the action from the un-parsed queue if ( null == _currentAction ) { _currentAction = _pendingActions.remove(); if ( LOG.isLoggable( getLogLevel() ) ) { LOG.log( getLogLevel(), "Un-parsed Action Selected: " + _currentAction ); } return true; } //Step: Parse the json final String rawJsonData = _currentAction.getRawJsonData(); if ( null != rawJsonData ) { if ( LOG.isLoggable( getLogLevel() ) ) { LOG.log( getLogLevel(), "Parsing JSON: " + _currentAction ); } final ChangeSet changeSet = parseChangeSet( _currentAction.getRawJsonData() ); final RequestManager requestManager = getSession().getRequestManager(); final String requestID = changeSet.getRequestID(); final RequestEntry request = null != requestID ? requestManager.getRequest( requestID ) : null; _currentAction.setChangeSet( changeSet, request ); _parsedActions.add( _currentAction ); Collections.sort( _parsedActions ); _currentAction = null; return true; } //Step: Setup the change recording state if ( _currentAction.needsBrokerPause() ) { _currentAction.markBrokerPaused(); if ( _currentAction.isBulkLoad() ) { getChangeBroker().disable(); } else { getChangeBroker().pause(); } if ( LOG.isLoggable( Level.INFO ) ) { _updateCount = 0; _removeCount = 0; _linkCount = 0; } } //Step: Process a chunk of changes if ( _currentAction.areChangesPending() ) { if ( LOG.isLoggable( getLogLevel() ) ) { LOG.log( getLogLevel(), "Processing ChangeSet: " + _currentAction ); } Change change; for ( int i = 0; i < _changesToProcessPerTick && null != ( change = _currentAction.nextChange() ); i++ ) { final Object entity = getChangeMapper().applyChange( change ); if ( LOG.isLoggable( Level.INFO ) ) { if ( change.isUpdate() ) { _updateCount++; } else { _removeCount++; } } _currentAction.changeProcessed( change.isUpdate(), entity ); } return true; } //Step: Calculate the entities that need to be linked if ( !_currentAction.areEntityLinksCalculated() ) { if ( LOG.isLoggable( getLogLevel() ) ) { LOG.log( getLogLevel(), "Calculating Link list: " + _currentAction ); } _currentAction.calculateEntitiesToLink(); return true; } //Step: Process a chunk of links if ( _currentAction.areEntityLinksPending() ) { if ( LOG.isLoggable( getLogLevel() ) ) { LOG.log( getLogLevel(), "Linking Entities: " + _currentAction ); } Linkable linkable; for ( int i = 0; i < _linksToProcessPerTick && null != ( linkable = _currentAction.nextEntityToLink() ); i++ ) { linkable.link(); if ( LOG.isLoggable( Level.INFO ) ) { _linkCount++; } } return true; } final ChangeSet set = _currentAction.getChangeSet(); assert null != set; //Step: Finalize the change set if ( !_currentAction.hasWorldBeenNotified() ) { _currentAction.markWorldAsNotified(); if ( LOG.isLoggable( getLogLevel() ) ) { LOG.log( getLogLevel(), "Finalizing action: " + _currentAction ); } _lastKnownChangeSet = set.getSequence(); if ( _currentAction.isBulkLoad() ) { if ( _currentAction.hasBrokerBeenPaused() ) { getChangeBroker().enable(); } } else { if ( _currentAction.hasBrokerBeenPaused() ) { getChangeBroker().resume(); } } if ( shouldValidateOnLoad() ) { validateRepository(); } return true; } if ( LOG.isLoggable( Level.INFO ) ) { LOG.info( "ChangeSet " + set.getSequence() + " involved " + _updateCount + " updates, " + _removeCount + " removes and " + _linkCount + " links." ); } //Step: Run the post actions if ( LOG.isLoggable( getLogLevel() ) ) { LOG.log( getLogLevel(), "Running post action and cleaning action: " + _currentAction ); } final RequestEntry request = _currentAction.getRequest(); if ( null != request ) { request.markResultsAsArrived(); } final Runnable runnable = _currentAction.getRunnable(); if ( null != runnable ) { runnable.run(); //Request can be null for an out of band action with runnable if ( null != request ) { // We can remove the request because this side ran second and the // RPC channel has already returned. getSession().getRequestManager().removeRequest( request.getRequestID() ); } } onDataLoadComplete( _currentAction.isBulkLoad(), set.getRequestID() ); _currentAction = null; return true; } /** * Invoked when a change set has been completely processed. * * @param bulkLoad true if the change set was processed as a bulk load, false otherwise. * @param requestID the local request id that initiated the changes. */ protected void onDataLoadComplete( final boolean bulkLoad, @Nullable final String requestID ) { } protected Level getLogLevel() { return Level.FINEST; } protected final EntityRepository getRepository() { return _repository; } /** * @return true if a load action should result in the EntityRepository being validated. */ protected boolean shouldValidateOnLoad() { return false; } /** * Perform a validation of the EntityRepository. */ protected final void validateRepository() { try { _repository.validate(); } catch ( final Exception e ) { throw new IllegalStateException( e.getMessage(), e ); } } }
src/main/java/org/realityforge/replicant/client/AbstractDataLoaderService.java
package org.realityforge.replicant.client; import java.util.Collections; import java.util.LinkedList; import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.inject.Inject; import org.realityforge.replicant.client.transport.ClientSession; import org.realityforge.replicant.client.transport.RequestEntry; import org.realityforge.replicant.client.transport.RequestManager; import org.realityforge.replicant.client.transport.SessionContext; /** * Class from which to extend to implement a service that loads data from a change set. * Data can be loaded by bulk or incrementally and the load can be broken up into several * steps to avoid locking a thread such as in GWT. */ public abstract class AbstractDataLoaderService<T extends ClientSession> { private static final int DEFAULT_CHANGES_TO_PROCESS_PER_TICK = 100; private static final int DEFAULT_LINKS_TO_PROCESS_PER_TICK = 100; protected static final Logger LOG = Logger.getLogger( AbstractDataLoaderService.class.getName() ); @Inject private ChangeMapper _changeMapper; @Inject private EntityChangeBroker _changeBroker; @Inject private EntityRepository _repository; private int _lastKnownChangeSet; /** * The set of data load actions that still need to have the json parsed. */ private final LinkedList<DataLoadAction> _pendingActions = new LinkedList<>(); /** * The set of data load actions that have their json parsed. They are inserted into * this list according to their sequence. */ private final LinkedList<DataLoadAction> _parsedActions = new LinkedList<>(); /** * Sometimes a data load action occurs that is not initiated by the server. These do not * typically need to be sequenced and are prioritized above other actions. */ private final LinkedList<DataLoadAction> _oobActions = new LinkedList<>(); private DataLoadAction _currentAction; private int _updateCount; private int _removeCount; private int _linkCount; private int _changesToProcessPerTick = DEFAULT_CHANGES_TO_PROCESS_PER_TICK; private int _linksToProcessPerTick = DEFAULT_LINKS_TO_PROCESS_PER_TICK; private T _session; protected void setSession( final T session ) { _session = session; // This should probably be moved elsewhere ... but where? SessionContext.setSession( session ); } public final T getSession() { return _session; } protected abstract void scheduleDataLoad(); protected final void setChangesToProcessPerTick( final int changesToProcessPerTick ) { _changesToProcessPerTick = changesToProcessPerTick; } protected final void setLinksToProcessPerTick( final int linksToProcessPerTick ) { _linksToProcessPerTick = linksToProcessPerTick; } protected final int getLastKnownChangeSet() { return _lastKnownChangeSet; } protected abstract ChangeSet parseChangeSet( String rawJsonData ); @SuppressWarnings( "ConstantConditions" ) protected final void enqueueDataLoad( @Nonnull final String rawJsonData ) { if ( null == rawJsonData ) { throw new IllegalStateException( "null == rawJsonData" ); } _pendingActions.add( new DataLoadAction( rawJsonData, false ) ); scheduleDataLoad(); } @SuppressWarnings( "ConstantConditions" ) protected final void enqueueOOB( @Nonnull final String rawJsonData, @Nullable final Runnable runnable, final boolean bulkLoad ) { if ( null == rawJsonData ) { throw new IllegalStateException( "null == rawJsonData" ); } final DataLoadAction action = new DataLoadAction( rawJsonData, true ); action.setRunnable( runnable ); action.setBulkLoad( bulkLoad ); _oobActions.add( action ); scheduleDataLoad(); } protected final boolean progressDataLoad() { // Step: Retrieve any out of band actions if ( null == _currentAction && !_oobActions.isEmpty() ) { _currentAction = _oobActions.removeFirst(); return true; } //Step: Retrieve the action from the parsed queue if it is the next in the sequence if ( null == _currentAction && !_parsedActions.isEmpty() ) { final DataLoadAction action = _parsedActions.get( 0 ); final ChangeSet changeSet = action.getChangeSet(); assert null != changeSet; if ( action.isOob() || _lastKnownChangeSet + 1 == changeSet.getSequence() ) { _currentAction = _parsedActions.remove(); if ( LOG.isLoggable( getLogLevel() ) ) { LOG.log( getLogLevel(), "Parsed Action Selected: " + _currentAction ); } return true; } } // Abort if there is no pending data load actions to take if ( null == _currentAction && _pendingActions.isEmpty() ) { if ( LOG.isLoggable( getLogLevel() ) ) { LOG.log( getLogLevel(), "No data to load. Terminating incremental load process." ); } return false; } //Step: Retrieve the action from the un-parsed queue if ( null == _currentAction ) { _currentAction = _pendingActions.remove(); if ( LOG.isLoggable( getLogLevel() ) ) { LOG.log( getLogLevel(), "Un-parsed Action Selected: " + _currentAction ); } return true; } //Step: Parse the json final String rawJsonData = _currentAction.getRawJsonData(); if ( null != rawJsonData ) { if ( LOG.isLoggable( getLogLevel() ) ) { LOG.log( getLogLevel(), "Parsing JSON: " + _currentAction ); } final ChangeSet changeSet = parseChangeSet( _currentAction.getRawJsonData() ); final RequestManager requestManager = getSession().getRequestManager(); final String requestID = changeSet.getRequestID(); final RequestEntry request = null != requestID ? requestManager.getRequest( requestID ) : null; _currentAction.setChangeSet( changeSet, request ); _parsedActions.add( _currentAction ); Collections.sort( _parsedActions ); _currentAction = null; return true; } //Step: Setup the change recording state if ( _currentAction.needsBrokerPause() ) { _currentAction.markBrokerPaused(); if ( _currentAction.isBulkLoad() ) { _changeBroker.disable(); } else { _changeBroker.pause(); } if ( LOG.isLoggable( Level.INFO ) ) { _updateCount = 0; _removeCount = 0; _linkCount = 0; } } //Step: Process a chunk of changes if ( _currentAction.areChangesPending() ) { if ( LOG.isLoggable( getLogLevel() ) ) { LOG.log( getLogLevel(), "Processing ChangeSet: " + _currentAction ); } Change change; for ( int i = 0; i < _changesToProcessPerTick && null != ( change = _currentAction.nextChange() ); i++ ) { final Object entity = _changeMapper.applyChange( change ); if ( LOG.isLoggable( Level.INFO ) ) { if ( change.isUpdate() ) { _updateCount++; } else { _removeCount++; } } _currentAction.changeProcessed( change.isUpdate(), entity ); } return true; } //Step: Calculate the entities that need to be linked if ( !_currentAction.areEntityLinksCalculated() ) { if ( LOG.isLoggable( getLogLevel() ) ) { LOG.log( getLogLevel(), "Calculating Link list: " + _currentAction ); } _currentAction.calculateEntitiesToLink(); return true; } //Step: Process a chunk of links if ( _currentAction.areEntityLinksPending() ) { if ( LOG.isLoggable( getLogLevel() ) ) { LOG.log( getLogLevel(), "Linking Entities: " + _currentAction ); } Linkable linkable; for ( int i = 0; i < _linksToProcessPerTick && null != ( linkable = _currentAction.nextEntityToLink() ); i++ ) { linkable.link(); if ( LOG.isLoggable( Level.INFO ) ) { _linkCount++; } } return true; } final ChangeSet set = _currentAction.getChangeSet(); assert null != set; //Step: Finalize the change set if ( !_currentAction.hasWorldBeenNotified() ) { _currentAction.markWorldAsNotified(); if ( LOG.isLoggable( getLogLevel() ) ) { LOG.log( getLogLevel(), "Finalizing action: " + _currentAction ); } _lastKnownChangeSet = set.getSequence(); if ( _currentAction.isBulkLoad() ) { if ( _currentAction.hasBrokerBeenPaused() ) { _changeBroker.enable(); } } else { if ( _currentAction.hasBrokerBeenPaused() ) { _changeBroker.resume(); } } if ( shouldValidateOnLoad() ) { validateRepository(); } return true; } if ( LOG.isLoggable( Level.INFO ) ) { LOG.info( "ChangeSet " + set.getSequence() + " involved " + _updateCount + " updates, " + _removeCount + " removes and " + _linkCount + " links." ); } //Step: Run the post actions if ( LOG.isLoggable( getLogLevel() ) ) { LOG.log( getLogLevel(), "Running post action and cleaning action: " + _currentAction ); } final RequestEntry request = _currentAction.getRequest(); if ( null != request ) { request.markResultsAsArrived(); } final Runnable runnable = _currentAction.getRunnable(); if ( null != runnable ) { runnable.run(); //Request can be null for an out of band action with runnable if ( null != request ) { // We can remove the request because this side ran second and the // RPC channel has already returned. getSession().getRequestManager().removeRequest( request.getRequestID() ); } } onDataLoadComplete( _currentAction.isBulkLoad(), set.getRequestID() ); _currentAction = null; return true; } /** * Invoked when a change set has been completely processed. * * @param bulkLoad true if the change set was processed as a bulk load, false otherwise. * @param requestID the local request id that initiated the changes. */ protected void onDataLoadComplete( final boolean bulkLoad, @Nullable final String requestID ) { } protected Level getLogLevel() { return Level.FINEST; } protected final EntityRepository getRepository() { return _repository; } /** * @return true if a load action should result in the EntityRepository being validated. */ protected boolean shouldValidateOnLoad() { return false; } /** * Perform a validation of the EntityRepository. */ protected final void validateRepository() { try { _repository.validate(); } catch ( final Exception e ) { throw new IllegalStateException( e.getMessage(), e ); } } }
Access some fields via accessors
src/main/java/org/realityforge/replicant/client/AbstractDataLoaderService.java
Access some fields via accessors
<ide><path>rc/main/java/org/realityforge/replicant/client/AbstractDataLoaderService.java <ide> SessionContext.setSession( session ); <ide> } <ide> <add> protected final EntityChangeBroker getChangeBroker() <add> { <add> return _changeBroker; <add> } <add> <add> protected final ChangeMapper getChangeMapper() <add> { <add> return _changeMapper; <add> } <add> <ide> public final T getSession() <ide> { <ide> return _session; <ide> _currentAction.markBrokerPaused(); <ide> if ( _currentAction.isBulkLoad() ) <ide> { <del> _changeBroker.disable(); <add> getChangeBroker().disable(); <ide> } <ide> else <ide> { <del> _changeBroker.pause(); <add> getChangeBroker().pause(); <ide> } <ide> if ( LOG.isLoggable( Level.INFO ) ) <ide> { <ide> Change change; <ide> for ( int i = 0; i < _changesToProcessPerTick && null != ( change = _currentAction.nextChange() ); i++ ) <ide> { <del> final Object entity = _changeMapper.applyChange( change ); <add> final Object entity = getChangeMapper().applyChange( change ); <ide> if ( LOG.isLoggable( Level.INFO ) ) <ide> { <ide> if ( change.isUpdate() ) <ide> { <ide> if ( _currentAction.hasBrokerBeenPaused() ) <ide> { <del> _changeBroker.enable(); <add> getChangeBroker().enable(); <ide> } <ide> } <ide> else <ide> { <ide> if ( _currentAction.hasBrokerBeenPaused() ) <ide> { <del> _changeBroker.resume(); <add> getChangeBroker().resume(); <ide> } <ide> } <ide> if ( shouldValidateOnLoad() )
Java
apache-2.0
e76cd9a0163cd5e903f5bfa00cfdc7822b3ddf28
0
jvalkeal/spring-cloud-data,trisberg/spring-cloud-dataflow,donovanmuller/spring-cloud-dataflow,mminella/spring-cloud-data,mbogoevici/spring-cloud-data,trisberg/spring-cloud-dataflow,ericbottard/spring-cloud-dataflow,markfisher/spring-cloud-data,jvalkeal/spring-cloud-dataflow,markfisher/spring-cloud-data,mbogoevici/spring-cloud-data,markfisher/spring-cloud-dataflow,markfisher/spring-cloud-dataflow,markfisher/spring-cloud-data,jvalkeal/spring-cloud-dataflow,mbogoevici/spring-cloud-data,pperalta/spring-cloud-dataflow,sabbyanandan/spring-cloud-dataflow,markfisher/spring-cloud-dataflow,ilayaperumalg/spring-cloud-dataflow,spring-cloud/spring-cloud-dataflow,markfisher/spring-cloud-dataflow,pperalta/spring-cloud-dataflow,jvalkeal/spring-cloud-dataflow,sabbyanandan/spring-cloud-dataflow,ilayaperumalg/spring-cloud-dataflow,jvalkeal/spring-cloud-data,jvalkeal/spring-cloud-dataflow,ericbottard/spring-cloud-dataflow,jvalkeal/spring-cloud-data,pperalta/spring-cloud-dataflow,trisberg/spring-cloud-dataflow,jvalkeal/spring-cloud-dataflow,cppwfs/spring-cloud-dataflow,spring-cloud/spring-cloud-data,spring-cloud/spring-cloud-data,cppwfs/spring-cloud-dataflow,markpollack/spring-cloud-dataflow,spring-cloud/spring-cloud-dataflow,donovanmuller/spring-cloud-dataflow,ilayaperumalg/spring-cloud-dataflow,markpollack/spring-cloud-dataflow,spring-cloud/spring-cloud-dataflow,trisberg/spring-cloud-dataflow,donovanmuller/spring-cloud-dataflow,ericbottard/spring-cloud-dataflow,cppwfs/spring-cloud-dataflow,ghillert/spring-cloud-dataflow,mminella/spring-cloud-data,spring-cloud/spring-cloud-dataflow,markpollack/spring-cloud-dataflow,sabbyanandan/spring-cloud-dataflow,spring-cloud/spring-cloud-data,spring-cloud/spring-cloud-dataflow,ghillert/spring-cloud-dataflow
/* * Copyright 2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.dataflow.admin.config; import static org.springframework.cloud.dataflow.core.ArtifactType.processor; import static org.springframework.cloud.dataflow.core.ArtifactType.sink; import static org.springframework.cloud.dataflow.core.ArtifactType.source; import static org.springframework.cloud.dataflow.core.ArtifactType.task; import javax.annotation.PostConstruct; import org.springframework.cloud.dataflow.artifact.registry.ArtifactRegistration; import org.springframework.cloud.dataflow.artifact.registry.ArtifactRegistry; import org.springframework.cloud.dataflow.core.ArtifactCoordinates; import org.springframework.cloud.dataflow.core.ArtifactType; import org.springframework.util.Assert; /** * Populates a {@link ArtifactRegistry} with default modules. * * @author Patrick Peralta * @author Mark Fisher */ public class ArtifactRegistryPopulator { /** * Group ID for default stream modules. */ private static final String DEFAULT_STREAM_GROUP_ID = "org.springframework.cloud.stream.module"; /** * Group ID for default task modules. */ private static final String DEFAULT_TASK_GROUP_ID = "org.springframework.cloud.task.module"; /** * Version number for default modules. */ private static final String DEFAULT_VERSION = "1.0.0.BUILD-SNAPSHOT"; /** * Default classifier for default modules. */ private static final String DEFAULT_CLASSIFIER = "exec"; /** * Default extension for default modules. */ private static final String DEFAULT_EXTENSION = "jar"; /** * The {@link ArtifactRegistry} to populate. */ private final ArtifactRegistry artifactRegistry; /** * Construct a {@code ArtifactRegistryPopulator} with the provided {@link ArtifactRegistry}. * * @param artifactRegistry the {@link ArtifactRegistry} to populate. */ public ArtifactRegistryPopulator(ArtifactRegistry artifactRegistry) { Assert.notNull(artifactRegistry, "ArtifactRegistry must not be null"); this.artifactRegistry = artifactRegistry; } /** * Populate the registry with default module coordinates; * will not overwrite existing values. */ @PostConstruct public void populateDefaults() { populateDefault("file", source); populateDefault("ftp", source); populateDefault("http", source); populateDefault("load-generator", source); populateDefault("sftp", source); populateDefault("tcp", source); populateDefault("time", source); populateDefault("twitterstream", source); populateDefault("filter", processor); populateDefault("groovy-filter", processor); populateDefault("groovy-transform", processor); populateDefault("httpclient", processor); populateDefault("noop", processor); populateDefault("pmml", processor); populateDefault("transform", processor); populateDefault("cassandra", sink); populateDefault("counter", sink); populateDefault("field-value-counter", sink); populateDefault("file", sink); populateDefault("ftp", sink); populateDefault("gemfire", sink); populateDefault("hdfs", sink); populateDefault("jdbc", sink); populateDefault("log", sink); populateDefault("redis", sink); populateDefault("tcp", sink); populateDefault("throughput", sink); populateDefault("websocket", sink); populateDefault("timestamp", task); } /** * Populate the registry with default values for the provided * module name and type; will not overwrite existing values. * * @param name module name * @param type module type */ private void populateDefault(String name, ArtifactType type) { if (this.artifactRegistry.find(name, type) == null) { this.artifactRegistry.save(new ArtifactRegistration(name, type, (type == task) ? defaultTaskCoordinatesFor(name + '-' + type) : defaultStreamCoordinatesFor(name + '-' + type))); } } /** * Return the default task coordinates for the provided module name. * * @param moduleName module name for which to provide default coordinates * @return default coordinates for the provided module */ private ArtifactCoordinates defaultTaskCoordinatesFor(String moduleName) { return ArtifactCoordinates.parse(String.format("%s:%s:%s:%s:%s", DEFAULT_TASK_GROUP_ID, moduleName, DEFAULT_EXTENSION, DEFAULT_CLASSIFIER, DEFAULT_VERSION)); } /** * Return the default stream coordinates for the provided module name. * * @param moduleName module name for which to provide default coordinates * @return default coordinates for the provided module */ private ArtifactCoordinates defaultStreamCoordinatesFor(String moduleName) { return ArtifactCoordinates.parse(String.format("%s:%s:%s:%s:%s", DEFAULT_STREAM_GROUP_ID, moduleName, DEFAULT_EXTENSION, DEFAULT_CLASSIFIER, DEFAULT_VERSION)); } }
spring-cloud-dataflow-admin-starter/src/main/java/org/springframework/cloud/dataflow/admin/config/ArtifactRegistryPopulator.java
/* * Copyright 2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.dataflow.admin.config; import static org.springframework.cloud.dataflow.core.ArtifactType.processor; import static org.springframework.cloud.dataflow.core.ArtifactType.sink; import static org.springframework.cloud.dataflow.core.ArtifactType.source; import static org.springframework.cloud.dataflow.core.ArtifactType.task; import javax.annotation.PostConstruct; import org.springframework.cloud.dataflow.artifact.registry.ArtifactRegistration; import org.springframework.cloud.dataflow.artifact.registry.ArtifactRegistry; import org.springframework.cloud.dataflow.core.ArtifactCoordinates; import org.springframework.cloud.dataflow.core.ArtifactType; import org.springframework.util.Assert; /** * Populates a {@link ArtifactRegistry} with default modules. * * @author Patrick Peralta * @author Mark Fisher */ public class ArtifactRegistryPopulator { /** * Group ID for default stream modules. */ private static final String DEFAULT_STREAM_GROUP_ID = "org.springframework.cloud.stream.module"; /** * Group ID for default task modules. */ private static final String DEFAULT_TASK_GROUP_ID = "org.springframework.cloud.task.module"; /** * Version number for default modules. */ private static final String DEFAULT_VERSION = "1.0.0.BUILD-SNAPSHOT"; /** * Default classifier for default modules. */ private static final String DEFAULT_CLASSIFIER = "exec"; /** * Default extension for default modules. */ private static final String DEFAULT_EXTENSION = "jar"; /** * The {@link ArtifactRegistry} to populate. */ private final ArtifactRegistry artifactRegistry; /** * Construct a {@code ArtifactRegistryPopulator} with the provided {@link ArtifactRegistry}. * * @param artifactRegistry the {@link ArtifactRegistry} to populate. */ public ArtifactRegistryPopulator(ArtifactRegistry artifactRegistry) { Assert.notNull(artifactRegistry, "ArtifactRegistry must not be null"); this.artifactRegistry = artifactRegistry; } /** * Populate the registry with default module coordinates; * will not overwrite existing values. */ @PostConstruct public void populateDefaults() { populateDefault("file", source); populateDefault("ftp", source); populateDefault("http", source); populateDefault("load-generator", source); populateDefault("sftp", source); populateDefault("tcp", source); populateDefault("time", source); populateDefault("twitterstream", source); populateDefault("filter", processor); populateDefault("groovy-filter", processor); populateDefault("groovy-transform", processor); populateDefault("httpclient", processor); populateDefault("noop", processor); populateDefault("pmml", processor); populateDefault("transform", processor); populateDefault("cassandra", sink); populateDefault("counter", sink); populateDefault("field-value-counter", sink); populateDefault("file", sink); populateDefault("ftp", sink); populateDefault("gemfire", sink); populateDefault("hdfs", sink); populateDefault("jdbc", sink); populateDefault("log", sink); populateDefault("redis", sink); populateDefault("throughput", sink); populateDefault("websocket", sink); populateDefault("timestamp", task); } /** * Populate the registry with default values for the provided * module name and type; will not overwrite existing values. * * @param name module name * @param type module type */ private void populateDefault(String name, ArtifactType type) { if (this.artifactRegistry.find(name, type) == null) { this.artifactRegistry.save(new ArtifactRegistration(name, type, (type == task) ? defaultTaskCoordinatesFor(name + '-' + type) : defaultStreamCoordinatesFor(name + '-' + type))); } } /** * Return the default task coordinates for the provided module name. * * @param moduleName module name for which to provide default coordinates * @return default coordinates for the provided module */ private ArtifactCoordinates defaultTaskCoordinatesFor(String moduleName) { return ArtifactCoordinates.parse(String.format("%s:%s:%s:%s:%s", DEFAULT_TASK_GROUP_ID, moduleName, DEFAULT_EXTENSION, DEFAULT_CLASSIFIER, DEFAULT_VERSION)); } /** * Return the default stream coordinates for the provided module name. * * @param moduleName module name for which to provide default coordinates * @return default coordinates for the provided module */ private ArtifactCoordinates defaultStreamCoordinatesFor(String moduleName) { return ArtifactCoordinates.parse(String.format("%s:%s:%s:%s:%s", DEFAULT_STREAM_GROUP_ID, moduleName, DEFAULT_EXTENSION, DEFAULT_CLASSIFIER, DEFAULT_VERSION)); } }
Add TCP Sink
spring-cloud-dataflow-admin-starter/src/main/java/org/springframework/cloud/dataflow/admin/config/ArtifactRegistryPopulator.java
Add TCP Sink
<ide><path>pring-cloud-dataflow-admin-starter/src/main/java/org/springframework/cloud/dataflow/admin/config/ArtifactRegistryPopulator.java <ide> populateDefault("jdbc", sink); <ide> populateDefault("log", sink); <ide> populateDefault("redis", sink); <add> populateDefault("tcp", sink); <ide> populateDefault("throughput", sink); <ide> populateDefault("websocket", sink); <ide> populateDefault("timestamp", task);
Java
apache-2.0
5af93de5a1b5486ab43d4bdaa5acc75f6b4104ad
0
soggier/compiler,akalberer/deep-compiler,deepjava/compiler,ursgraf/compiler
package ch.ntb.inf.deep.classItems; import java.io.DataInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import ch.ntb.inf.deep.config.Segment; import ch.ntb.inf.deep.debug.Dbg; import ch.ntb.inf.deep.host.ClassFileAdmin; import ch.ntb.inf.deep.config.SystemClass; import ch.ntb.inf.deep.config.SystemMethod; import ch.ntb.inf.deep.strings.HString; import ch.ntb.inf.deep.strings.StringTable; public class Class extends Type implements ICclassFileConsts, ICdescAndTypeConsts, ICjvmInstructionOpcs { //--- static fields private static final int fieldListArrayLength = 9; private static final Item[] instFieldLists = new Item[fieldListArrayLength]; // instance field lists, used by method readFields private static final Item[] classFieldLists = new Item[fieldListArrayLength]; // class field lists, used by method readFields // private static Item constFields; // constant field list (unsorted) static{ assert fieldSizeUnit >= 4 && (fieldSizeUnit & (fieldSizeUnit-1)) == 0; } //--- instance fields public Item[] constPool; // reduced constant pool public Item methods; // list with all methods public int nOfMethods; // number of methods public int nOfClassMethods, nOfInstanceMethods; // number of methods public Item fields; // list with all fields public int nOfClassFields, nOfInstanceFields; // number of fields public Class[] interfaces; public int nOfInterfaces; // number of interfaces public int nOfBaseClasses; // number of base classes public int nOfReferences; // number of class fields which are references public int[] constantBlock; // the constant block for this class public int constantBlockSize; // size of the constant block public int constantPoolSize; // size of this pool on the target (in byte) public int stringPoolSize; // size of this pool on the target (in byte) public int classDescriptorSize; // size of the class descriptor on the target (in byte) public int machineCodeSize; // size of the machine code on the target (in byte) public Segment codeSegment, varSegment, constSegment; // references to the memory segments for this class public int codeOffset, varOffset, constOffset; // the offset of the code/class fields/constant block in the dedicated segment Class[] imports; HString srcFileName; // file ident + ".java", e.g. String.java for java/lang/String.java //--- debug fields int magic, version; //--- instance methods public Class(HString registeredCpClassName){ super(registeredCpClassName, null); name = registeredCpClassName; category = tcRef; sizeInBits = 32; if(classList == null) classList = this; nofClasses++; } /** * @param newByteCode one of {new, newarray, anewarray, multianewarray} * @return the reference to the new-method, null for invalid byteCodes */ public static Method getNewMemoryMethod(int newByteCode){ int methIndex; switch(newByteCode){ case bCnew: methIndex = 0; break; case bCnewarray: methIndex = 1; break; case bCanewarray: methIndex = 2; break; case bCmultianewarray: methIndex = 3; break; default: return null; } return (Method)newMethods[methIndex]; } /** * Select field by name and delete it in the fields list (if found). * @param fieldName * @return the selected field or null if not found */ // private Item getAndExtractField(HString fieldName){ // assert false; // Item item = fields, pred = null; // while(item != null && item.name != fieldName) { // pred = item; // item = item.next; // } // if(item != null){ // if(pred == null) fields = item.next; else pred.next = item.next; // item.next = null; // } // return item; // } Item getField(HString fieldName){ Item item = fields; while(item != null && item.name != fieldName) item = item.next; if(item == null && type != null) item = ((Class)type).getField(fieldName); return item; } /** * Select method by name and descriptor and delete it in the methods list if found. * @param methName * @param methDescriptor * @return the selected method or null if not found */ // private Method getAndExtractMethod(HString methName, HString methDescriptor){ // assert false; // Item item = methods, pred = null; // while(item != null && (item.name != methName || ((Method)item).methDescriptor != methDescriptor) ) { // pred = item; // item = item.next; // } // if(item != null){ // if(pred == null) methods = item.next; else pred.next = item.next; // item.next = null; // } // return (Method)item; // } Item getMethod(HString name, HString descriptor){ Item item = methods; while(item != null && item.name != name) item = item.next; if(item == null && type != null) item = ((Class)type).getMethod(name, descriptor); return item; } /** * The field with fieldName is selected and returned. * If the field is not found, an new Field is created and insert, if the field is found it is checked for the correct descriptor. * @param fieldName a registered string * @param fieldType a registred string * @return the selected field or newly created one */ DataItem insertCondAndGetField(HString fieldName, Type fieldType){ //pre: all strings in the const are already registered in the proper hash table. Item item = fields; while(item != null && item.name != fieldName) item = item.next; DataItem field; if(item != null){ field = (DataItem)item; assert field.type == fieldType; }else{// create Field and update field list field = new DataItem(fieldName, fieldType); field.next = fields; fields = field; } return field; } /** * The method with methodName is selected and returned. * If the method is not found, an new Method is created and inserted, if the method is found it is checked for the correct descriptor. * @param methName a registered string * @param methDescriptor a registered string * @return the selected method or newly created one */ Method insertCondAndGetMethod(HString methName, HString methDescriptor){ //pre: all strings in the const are already registered in the proper hash table. Item item = methods; while(item != null && (item.name != methName || ((Method)item).methDescriptor != methDescriptor) ) item = item.next; Method method; if(item != null){ method = (Method)item; }else{// create Method and update method list Type retrunType = getReturnType(methDescriptor); method = new Method(methName, retrunType, methDescriptor); method.next = methods; methods = method; } return method; } /** * Check ClassInfo entry in const pool and update it accordingly if necessary. * <br>That is: if there is not yet a direct reference to an object of type Class, then such an object is created and registered. * @param cpClassInfoIndex index of ClassInfo entry * @return object of this type Class */ Item updateAndGetCpClassEntry(int cpClassInfoIndex){ //pre: all strings in the const pool are already registered in the proper hash table. Item cls = cpItems[cpClassInfoIndex]; if(cls == null){ HString registeredClassName = cpStrings[cpIndices[cpClassInfoIndex]]; if(registeredClassName.charAt(0) == '[') cls = getTypeByNameAndUpdate(tcArray, registeredClassName, wktObject); else cls = getTypeByNameAndUpdate(tcRef, registeredClassName, null); cpItems[cpClassInfoIndex] = cls; } return cls; } /** * Check FieldRefInfo entry in constant pool and update it accordingly if necessary. * <br>That is: if there is not yet a direct reference to an object of type Field, then such an object is created and registered. * @param cpFieldInfoIndex index of FieldRefInfo entry * @return object of this type Class */ // private Item updateAndGetCpFieldEntry(int cpFieldInfoIndex){ // //pre: all strings in the const are already registered in the proper hash table. // Item field = cpItems[cpFieldInfoIndex]; // if(field == null){ // int csx = cpIndices[cpFieldInfoIndex]; // get class and signature indices // Class cls = (Class)updateAndGetCpClassEntry(csx>>>16); // int sx = cpIndices[csx & 0xFFFF]; // HString fieldName = cpStrings[sx>>>16]; // HString fieldDesc = cpStrings[sx & 0xFFFF]; // Type fieldType = getTypeByDescriptor(fieldDesc); // field = cls.insertCondAndGetField( fieldName, fieldType); // cpItems[cpFieldInfoIndex] = field; // } // return field; // } private Item getFieldOrStub(HString fieldName, Type fieldType){ Item field = getField(fieldName); if( field == null ) field = new ItemStub(this, fieldName, fieldType); return field; } private Item getFieldOrStub(int cpFieldInfoIndex){ //pre: all strings in the const are already registered in the proper hash table. Item field = cpItems[cpFieldInfoIndex]; if(field == null){ int csx = cpIndices[cpFieldInfoIndex]; // get class and signature indices Class cls = (Class)updateAndGetCpClassEntry(csx>>>16); int sx = cpIndices[csx & 0xFFFF]; HString fieldName = cpStrings[sx>>>16]; HString fieldDesc = cpStrings[sx & 0xFFFF]; Type fieldType = getTypeByDescriptor(fieldDesc); field = cls.getFieldOrStub(fieldName, fieldType); } return field; } /** * Check MethRefInfo entry in constant pool and update it accordingly if necessary. * <br>That is: if there is not yet a direct reference to an object of type Method, then such an object is created and registered. * @param cpMethInfoIndex index of MethRefInfo entry * @return object of this type Class */ // private Item updateAndGetCpInterfaceMethodEntry(int cpMethInfoIndex){ // //pre: all strings in the const are already registered in the proper hash table. // Method method = null; // if(cpItems[cpMethInfoIndex] == null){ // int csx = cpIndices[cpMethInfoIndex]; // get class and signature indices // Class cls = (Class)updateAndGetCpClassEntry(csx>>>16); // int sx = cpIndices[csx & 0xFFFF]; // // HString methName = cpStrings[sx>>>16]; // HString methDesc = cpStrings[sx & 0xFFFF]; // method = cls.insertCondAndGetMethod( methName, methDesc); // method.owner = cls; // cpItems[cpMethInfoIndex] = method; // } // return method; // } private Item getMethodOrStub(HString name, HString descriptor){ Item meth = getMethod(name, descriptor); if( meth == null ) meth = new ItemStub(this, name, descriptor); return meth; } private Item getMethodOrStub(int cpMethInfoIndex){ //pre: all strings in the const are already registered in the proper hash table. Item method = null; if(cpItems[cpMethInfoIndex] == null){ int csx = cpIndices[cpMethInfoIndex]; // get class and signature indices Class cls = (Class)updateAndGetCpClassEntry(csx>>>16); int sx = cpIndices[csx & 0xFFFF]; HString methName = cpStrings[sx>>>16]; HString methDesc = cpStrings[sx & 0xFFFF]; method = cls.getMethodOrStub( methName, methDesc); } return method; } private void loadConstPool(DataInputStream clfInStrm) throws IOException{ if(verbose) vrb.println(">loadConstPool:"); magic = clfInStrm.readInt(); if(magic != 0xcafeBabe) throw new IOException("illegal class file"); if(verbose) vrb.printf("magic=0x%1$4x\n", magic); version = clfInStrm.readInt(); if(verbose) vrb.printf("version=%1$d.%2$d\n", (version&0xFFFF), (version>>>16) ); constPoolCnt = clfInStrm.readUnsignedShort(); if(verbose) vrb.printf("constPoolCnt=%1$d\n", constPoolCnt ); allocatePoolArray(constPoolCnt); for(int pEntry = 1; pEntry < constPoolCnt; pEntry++){ int tag = clfInStrm.readUnsignedByte(); cpTags[pEntry] = (byte)tag; cpIndices[pEntry] = 0; cpItems[pEntry] = null; cpStrings[pEntry] = null; switch(tag){ case cptUtf8: cpStrings[pEntry] = HString.readUTFandRegister(clfInStrm); break; case cptInteger: cpIndices[pEntry] = clfInStrm.readInt(); break; // integer value case cptFloat: cpIndices[pEntry] = clfInStrm.readInt(); break; // float pattern case cptLong: case cptDouble: cpIndices[pEntry++] = clfInStrm.readInt(); cpIndices[pEntry] = clfInStrm.readInt(); cpTags[pEntry] = cptExtSlot; cpItems[pEntry] = null; cpStrings[pEntry] = null; break; case cptClass: cpIndices[pEntry] = clfInStrm.readUnsignedShort(); break; // class index case cptString: cpIndices[pEntry] = clfInStrm.readUnsignedShort(); break; // string index case cptFieldRef: cpIndices[pEntry] = clfInStrm.readInt(); break; // (class index) <<16, nameAndType index case cptMethRef: cpIndices[pEntry] = clfInStrm.readInt(); break; // (class index) <<16, nameAndType index case cptIntfMethRef: cpIndices[pEntry] = clfInStrm.readInt(); break; // (class index) <<16, nameAndType index case cptNameAndType: cpIndices[pEntry] = clfInStrm.readInt(); break;// (name index) <<16, descriptor index default: throw new IOException("illegal tag in const pool"); } } if(verbose) vrb.println("<loadConstPool"); } private void updateConstPool() throws IOException{ if(verbose) vrb.println(">updateConstPool:"); //pre: all strings in the const are already registered in the proper hash table. int nofItems = 0; int pEntry; for(pEntry = 1; pEntry < constPoolCnt; pEntry++){// constPoolCnt int tag = cpTags[pEntry]; switch(tag){ case cptExtSlot: case cptUtf8: // cptExtSlot, Utf8 string break; case cptInteger: // integer literal cpItems[pEntry] = new StdConstant(hsNumber, wellKnownTypes[txInt], cpIndices[pEntry], 0); nofItems++; break; case cptFloat: // float literal cpItems[pEntry] = new StdConstant(hsNumber, wellKnownTypes[txFloat], cpIndices[pEntry], 0); nofItems++; break; // float pattern case cptLong: cpItems[pEntry] = new StdConstant(hsNumber, wellKnownTypes[txLong], cpIndices[pEntry], cpIndices[pEntry+1]); nofItems++; pEntry++; break; case cptDouble: cpItems[pEntry] = new StdConstant(hsNumber, wellKnownTypes[txDouble], cpIndices[pEntry], cpIndices[pEntry+1]); nofItems++; pEntry++; break; case cptClass: // class index updateAndGetCpClassEntry(pEntry); nofItems++; break; case cptString: cpItems[pEntry] = new StringLiteral(hsString, cpStrings[cpIndices[pEntry]]); nofItems++; break; case cptFieldRef: // updateAndGetCpFieldEntry(pEntry); cpItems[pEntry] = getFieldOrStub(pEntry); nofItems++; break; case cptMethRef: // updateAndGetCpMethodEntry(pEntry); cpItems[pEntry] = getMethodOrStub(pEntry); nofItems++; break; case cptIntfMethRef: // Item meth = updateAndGetCpInterfaceMethodEntry(pEntry); // meth.accAndPropFlags |= (1<<dpfInterfCall); cpItems[pEntry] = getMethodOrStub(pEntry); nofItems++; break; case cptNameAndType: break;// (name index) <<16, descriptor index default: throw new IOException("illegal tag in const pool"); } } assert pEntry == constPoolCnt; constPool = new Item[nofItems]; while(--pEntry > 0){ Item item = cpItems[pEntry]; if(item != null){ constPool[--nofItems] = item; // cpItems[pEntry] = null; cpIndices[pEntry] = nofItems; }else{ cpIndices[pEntry] = 0; } } assert nofItems == 0; if(verbose) vrb.println("<updateConstPool"); } private void readInterfaces(DataInputStream clfInStrm) throws IOException{ int cnt = clfInStrm.readUnsignedShort(); nOfInterfaces = cnt; if(cnt > 0){ interfaces = new Class[cnt]; for (int intf = 0; intf < cnt; intf++){ int intfInx = clfInStrm.readUnsignedShort(); interfaces[intf] = (Class)cpItems[intfInx]; } } } private void addItemToFieldList(Item item){ if(verbose) vrb.println(">addItemToFieldList"); Type type = (Type)item.type; int sizeInBits = type.sizeInBits; int fieldList = sizeInBits>>3; // fieldList={0, 1, 2, 4, 8} if( (item.accAndPropFlags & (1<<apfStatic)) == 0){// instance field item.next = instFieldLists[fieldList]; instFieldLists[fieldList] = item; nOfInstanceFields++; }else{// class field item.next = classFieldLists[fieldList]; classFieldLists[fieldList] = item; nOfClassFields++; } if(verbose) vrb.println("<addItemToFieldList"); } private void clearFieldLists(){ if(verbose) vrb.println(">clearFieldLists"); nOfClassFields = 0; nOfInstanceFields = 0; instanceFieldsSize = 0; classFieldsSize = 0; for(int n = fieldListArrayLength-1; n >= 0; n--){ instFieldLists[n] = null; classFieldLists[n] = null; } if(verbose) vrb.println("<clearFieldLists"); } private Item getFieldListAndUpdate(Item[] fieldLists){ // final boolean verbose = true; if(verbose) vrb.printf(">getFieldListAndUpdate: class: %1$s\n", name); Item head = null, tail = null; Item rest = null; //--- select and extract reference fields int fieldsSize = 0; Item list = fieldLists[4]; Item item = list; while(item != null){ list = item.next; Type type = (Type)item.type; if( type.category != tcPrimitive ){// reference (tcRef || tcArray) fieldsSize += 4; item.next = head; head = item; if(tail == null) tail = item; }else{ item.next = rest; rest = item; } item = list; } fieldLists[4] = rest; // vrb.printf(">getF1: fieldsSize=%1$d\n", fieldsSize); //--- select and extract any other fields Item consts = null; for(int category = 8; category >= 0; category--){ list = fieldLists[category]; fieldLists[category] = null; item = list; while(item != null){ list = item.next; if( (item.accAndPropFlags & (1<<dpfConst)) != 0){ item.next = consts; consts = item; // vrb.printf(">getF1a: const.name=%1$s\n", consts.name); }else{ fieldsSize += Math.max(category, 1); // reserve for boolean etc to 1 B if(tail == null) head = item; else tail.next = item; tail = item; } item = list; } } // vrb.printf(">getF2: fieldsSize=%1$d\n", fieldsSize); if( fieldLists == instFieldLists) instanceFieldsSize = (fieldsSize + (fieldSizeUnit-1)) & -fieldSizeUnit; else classFieldsSize = (fieldsSize + (fieldSizeUnit-1)) & -fieldSizeUnit; // vrb.printf(">getF3: objectSizeOrDim=%1$d, classFieldsSize=%2$d\n", instanceFieldsSize, classFieldsSize); if( tail == null ) head = consts; else tail.next = consts; if(verbose) vrb.println("<getFieldListAndUpdate"); return head; } private Item appendItem(Item head1, Item tail1, Item head2){ if(tail1 == null) head1 = head2; else tail1.next = head2; return head1; } private void readFields(DataInputStream clfInStrm) throws IOException{ final boolean verbose = false; if(verbose) vrb.println(">readFields: "+name); clearFieldLists(); assert fields == null; int fieldCnt = clfInStrm.readUnsignedShort(); while(fieldCnt > 0){ int flags; HString name, descriptor; flags = clfInStrm.readUnsignedShort(); //read access and property flags //--- read name and descriptor int index = clfInStrm.readUnsignedShort(); name = cpStrings[index]; // Item field = getAndExtractField(name); index = clfInStrm.readUnsignedShort(); descriptor = cpStrings[index]; DataItem field = null; if(verbose) vrb.printf(" readFields: cls=%1$s, desc=%2$s\n", name, descriptor); //--- read field attributes {ConstantValue, Deprecated, Synthetic} int attrCnt = clfInStrm.readUnsignedShort(); while(attrCnt-- > 0){ index = clfInStrm.readUnsignedShort(); int attr = selectAttribute(index); int attrLength = clfInStrm.readInt(); switch(attr){ case atxConstantValue: index = clfInStrm.readUnsignedShort(); int rcpIndex = cpIndices[index]; Item cpField = constPool[rcpIndex]; Type type = getTypeByDescriptor(descriptor); if(verbose) vrb.printf(" readFields: field.desc=%1$s, const: name=%2$s, type=%3$s\n", descriptor, cpField.name, cpField.type.name); assert cpField instanceof Constant; field = new NamedConst(name, type, (Constant)cpField); if( (flags & (1<<apfStatic) ) != 0) flags |= (1<<dpfConst); break; case atxDeprecated: flags |= (1<<dpfDeprecated); break; case atxSynthetic: flags |= (1<<dpfSynthetic); break; default: skipAttributeAndLogCond(clfInStrm, attrLength, index); } } if(field == null){ Type type = getTypeByDescriptor(descriptor); field = new DataItem(name, type); } flags |= field.accAndPropFlags; field.accAndPropFlags = flags; addItemToFieldList(field); ((ClassMember)field).owner = this; fieldCnt--; } assert fields == null; // if(fields != null){ // Item item = fields; // while(item != null){ // vrb.print("("+item.name +") "); // item.println(1); // item = item.next; // } // vrb.println(" Class.readFieldsE-end"); // } Item clsFields = getFieldListAndUpdate(classFieldLists); Item instFields = getFieldListAndUpdate(instFieldLists); if(clsFields == null) clsFields = instFields; else{ Item tail = null, item = clsFields; while(item != null){ tail = item; item = item.next; } tail.next = instFields; } fields = clsFields; if(verbose) vrb.println("<readFields"); } private void readMethods(DataInputStream clfInStrm, int userReqAttributes) throws IOException{ int methodCnt = clfInStrm.readUnsignedShort(); nOfMethods = methodCnt; assert methods == null; int nofClsMeths = 0, nofInstMeths = 0; Item clsMethHead = null, clsMethTail = null; Item instMethHead = null, instMethTail = null; while(methodCnt-- > 0){ int flags; HString name, descriptor; flags = clfInStrm.readUnsignedShort(); //read access and property flags //--- read name and descriptor int index = clfInStrm.readUnsignedShort(); name = cpStrings[index]; index = clfInStrm.readUnsignedShort(); descriptor = cpStrings[index]; Type returnType = getReturnType(descriptor); // Method method = getAndExtractMethod(name, descriptor); // if(method == null){ // method = new Method(name, returnType, descriptor); // }else{ // method.type = returnType; // } // method.owner = this; // assert method.type == returnType; Method method = new Method(name, returnType, descriptor); method.owner = this; //--- read method attributes {Code, Deprecated, Synthetic} int attrCnt = clfInStrm.readUnsignedShort(); while(attrCnt-- > 0){ int cpInxOfAttr = clfInStrm.readUnsignedShort(); int attr = selectAttribute( cpInxOfAttr ); int attrLength = clfInStrm.readInt(); switch(attr){ case atxCode: if( (userReqAttributes&(1<<atxCode)) == 0){ skipAttributeAndLogCond(clfInStrm, attrLength, 0); // skip without logging break; } method.maxStackSlots = clfInStrm.readUnsignedShort(); method.maxLocals = clfInStrm.readUnsignedShort(); int codeLen = clfInStrm.readInt(); method.code = new byte[codeLen]; clfInStrm.read(method.code); //--- read exception table int excTabLen = clfInStrm.readUnsignedShort(); if(excTabLen > 0){ method.exceptionTab = new ExceptionTabEntry[excTabLen]; for(int exc = 0; exc < excTabLen; exc++){ ExceptionTabEntry entry = new ExceptionTabEntry(); method.exceptionTab[exc] = entry; entry.startPc = clfInStrm.readUnsignedShort(); entry.endPc = clfInStrm.readUnsignedShort(); entry.handlerPc = clfInStrm.readUnsignedShort(); int catchTypeInx = clfInStrm.readUnsignedShort(); entry.catchType = (Class)cpItems[catchTypeInx]; } } //--- read attributes of the code attribute {LineNumberTable, LocalVariableTable} int codAttrCnt = clfInStrm.readUnsignedShort(); while(codAttrCnt-- > 0){ int codAttrIndex = clfInStrm.readUnsignedShort(); int codeAttr = selectAttribute( codAttrIndex ); int codAttrLen = clfInStrm.readInt(); if(codeAttr == atxLocalVariableTable){ if( (userReqAttributes&(1<<atxLocalVariableTable)) == 0){ skipAttributeAndLogCond(clfInStrm, codAttrLen, 0); // skip without logging }else{ int locVarTabLength = clfInStrm.readUnsignedShort(); if(locVarTabLength > 0){ method.localVars = new LocalVar[method.maxLocals]; while(locVarTabLength-- > 0){ LocalVar locVar = new LocalVar(); locVar.startPc = clfInStrm.readUnsignedShort(); locVar.length = clfInStrm.readUnsignedShort(); locVar.name = cpStrings[ clfInStrm.readUnsignedShort() ]; locVar.type = getTypeByDescriptor( cpStrings[ clfInStrm.readUnsignedShort() ] ); locVar.index = clfInStrm.readUnsignedShort(); method.insertLocalVar(locVar); } } } }else if(codeAttr == atxLineNumberTable){ if( (userReqAttributes&(1<<atxLineNumberTable)) == 0){ skipAttributeAndLogCond(clfInStrm, codAttrLen, 0); // skip without logging }else{ int lineNrTabLength = clfInStrm.readUnsignedShort(); int[] lineNrTab = new int[lineNrTabLength]; method.lineNrTab = lineNrTab; for(int lnp = 0; lnp < lineNrTabLength; lnp++) lineNrTab[lnp] = clfInStrm.readInt(); } }else{// skip skipAttributeAndLogCond(clfInStrm, codAttrLen, codAttrIndex); } } break; case atxDeprecated: flags |= (1<<dpfDeprecated); break; case atxSynthetic: flags |= (1<<dpfSynthetic); break; default: skipAttributeAndLogCond(clfInStrm, attrLength, index); } } flags |= method.accAndPropFlags; method.accAndPropFlags = flags; //--- append method if( (flags & (1<<apfStatic)) != 0 ){ // class method nofClsMeths++; if(clsMethHead == null) clsMethHead = method; else clsMethTail.next = method; clsMethTail = method; }else{// instance method nofInstMeths++; if(instMethHead == null) instMethHead = method; else instMethTail.next = method; instMethTail = method; } } assert methods == null; methods = appendItem(clsMethHead, clsMethTail, instMethHead); // methods = clsMethHead; clsMethTail.next = instMethHead; nOfClassMethods = nofClsMeths; nOfInstanceMethods = nofInstMeths; } private void readClassAttributes(DataInputStream clfInStrm, int userReqAttributes) throws IOException{ int attrCnt = clfInStrm.readUnsignedShort(); while(attrCnt-- > 0){ int index = clfInStrm.readUnsignedShort(); int attr = selectAttribute(index); int attrLength = clfInStrm.readInt(); switch(attr){ case atxSourceFile: index = clfInStrm.readUnsignedShort(); srcFileName = cpStrings[index]; break; case atxDeprecated: accAndPropFlags |= (1<<dpfDeprecated); break; case atxInnerClasses: // 4.7.5, p125 if( (userReqAttributes&(1<<atxInnerClasses)) == 0) skipAttributeAndLogCond(clfInStrm, attrLength, index); else{ // TODO Auto-generated method stub assert false: "TODO"; } break; default: skipAttributeAndLogCond(clfInStrm, attrLength, index); } } } private void analyseByteCode(){ if(verbose) vrb.println(">analyseByteCode:"); Item item = methods; while(item != null){ Method meth = (Method)item; if(verbose){ vrb.print("\nmethod: "); meth.printHeader(); vrb.print(" (owner="); meth.owner.printName(); vrb.println(')'); } ByteCodePreProc.analyseCodeAndFixCpRefs(cpIndices, constPool, meth.code); item = item.next; } if(verbose) vrb.println("<analyseByteCode"); } private void loadClass(int userReqAttributes) throws IOException{ if(verbose) vrb.println(">loadClass:"); if( (accAndPropFlags & ((1<<dpfClassLoaded)|(1<<dpfSynthetic)) ) == 0 ){// if not yet loaded try{ File classFile = ClassFileAdmin.getClassFile(name); log.println("opening class file of class: "+name ); if(classFile == null) throw new FileNotFoundException(); InputStream inStrm = new FileInputStream(classFile); // new FileInputStream DataInputStream clfInStrm = new DataInputStream(inStrm); // new DataInputStream loadConstPool(clfInStrm); accAndPropFlags |= clfInStrm.readUnsignedShort(); if(verbose){ printOrigConstPool("state: 0"); // stab.print("String Table 0"); printClassList("state: 0"); print(0); } updateConstPool(); if(verbose){ printOrigConstPool("state: 1"); print(0); } clfInStrm.readUnsignedShort(); // read this class index int thisSupClassCpInx = clfInStrm.readUnsignedShort(); if(verbose) vrb.println("thisSupClassCpInx="+thisSupClassCpInx); if(thisSupClassCpInx > 0){ int constPoolInx = cpIndices[thisSupClassCpInx]; type = (Class)constPool[constPoolInx]; if(verbose){ vrb.print("superClassName="); type.printName(); } } readInterfaces(clfInStrm); readFields(clfInStrm); readMethods(clfInStrm, userReqAttributes); readClassAttributes(clfInStrm, userReqAttributes); // if(verbose){ // vrb.println("\nstate: 2"); // printOrigConstPool("state: 2"); // stab.print("String Table in state: 2"); // printClassList("state: 2"); // print(0); // } if( (accAndPropFlags & ((1<<apfInterface)|(1<<apfEnum))) == 0){ analyseByteCode(); this.accAndPropFlags |= (1<<dpfClassLoaded); }else if( (accAndPropFlags & (1<<apfInterface)) != 0) this.accAndPropFlags |= (1<<dpfClassLoaded); else if( (accAndPropFlags & (1<<apfInterface)) != 0){ this.accAndPropFlags |= (1<<dpfClassLoaded); } if(verbose){ vrb.println("\n>dump of class: "+name); vrb.println("\nstate: 3"); // stab.print("String Table in state: 3"); printOrigConstPool("state: 3"); printReducedConstPool("state: 3"); // printClassList("state: 3"); print(0); vrb.println("\n<end of dump: "+name); } // printReducedConstPool("reduced cp, state: 3"); clfInStrm.close(); }catch (FileNotFoundException fnfE){ errRep.error("class file not found"); errRep.println(); fnfE.getCause(); } } //--- load referenced classes if( (accAndPropFlags & (1<<dpfClassLoaded)) != 0){ for(int cpx = constPool.length-1; cpx >= 0; cpx--){ Item item = constPool[cpx]; if(item instanceof Class){ Class refClass = (Class) item; if( (refClass.accAndPropFlags & ((1<<dpfClassLoaded)|(1<<dpfSynthetic)) ) == 0) { refClass.loadClass(userReqAttributes); } } } } if(verbose) vrb.println("<loadClass"); } public static void startLoading(int nofRootClasses){ if(verbose) vrb.println(">startLoading:"); rootClasses = new Class[nofRootClasses]; nofRootClasses = 0; classList = null; classListTail = null; nofClasses = 0; prevCpLenth = 0; constPoolCnt = 0; if(StringTable.getInstance() != null) StringTable.resetTable(); else{ StringTable.createSingleton(1000, "??"); stab = StringTable.getInstance(); HString.setStringTable(stab); } hsNumber = stab.insertCondAndGetEntry("#"); hsString = stab.insertCondAndGetEntry("\"\""); setUpBaseTypeTable(); setClassFileAttributeTable(stab); if(verbose) vrb.println("<startLoading"); } public static void releaseLoadingResources(){ cpItems = null; cpStrings = null; cpIndices = null; cpTags = null; prevCpLenth = 0; constPoolCnt = 0; HString.releaseBuffers(); ClassFileAdmin.clear(); // StringTable.resetTable(); // hsNumber = null; // wellKnownTypes = null; // classFileAttributeTable = null; } public static void loadRootClass(String rootClassName, int userReqAttributes) throws IOException{ if(verbose) vrb.println(">loadRootClass: "+rootClassName); HString hRootClassName = stab.insertCondAndGetEntry(rootClassName); Class root = new Class(hRootClassName); appendRootClass(root); root.accAndPropFlags |= (1<<dpfRootClass); assert root.next == null; root.loadClass(userReqAttributes); if(verbose) vrb.println("<loadRootClass"); } private static int unitedSysMethodFlags(SystemClass systemClass){ SystemMethod systemMeth = systemClass.methods; int unitedFlags = 0; if(systemMeth != null) unitedFlags = 1<<dpfSysPrimitive; while(systemMeth != null){ unitedFlags |= (systemMeth.attributes & dpfSetSysMethProperties); systemMeth = systemMeth.next; } return unitedFlags; } private static void loadSystemClass(SystemClass systemClass, int userReqAttributes) throws IOException{ final boolean verbose = false; String systemClassName = systemClass.name; int systemClassAttributes = systemClass.attributes | unitedSysMethodFlags(systemClass); if(verbose) vrb.println(">loadSystemClass: "+systemClassName); if(verbose) vrb.printf(" sysClsAttributes1=0x%1$x\n", systemClassAttributes); HString hSysClassName = stab.insertCondAndGetEntry(systemClassName); Class cls = (Class)getClassByName(hSysClassName); if(cls == null){ cls = new Class(hSysClassName); appendClass(cls); } cls.loadClass(userReqAttributes); cls.accAndPropFlags |= systemClassAttributes & dpfSetSysClassProperties; if( (systemClassAttributes & (1<<dpfNew)) != 0 ){// set up new memory method table SystemMethod systemMeth = systemClass.methods; while(systemMeth != null){ Item method = cls.methods.getItemByName(systemMeth.name); if(method == null){ errRep.error("method "+systemMeth.name +" in system class "+systemClass.name + " not found"); }else{ if(verbose)vrb.printf("lsc: method=%1$s, attr=0x%2$x\n", (cls.name + "." + method.name), systemMeth.attributes); int methIndex = (systemMeth.attributes-1)&0xFF; if( methIndex >= nofNewMethods ){ errRep.error("method id of"+systemMeth.name +" in system class "+systemClass.name + " out of range"); }else{ if(verbose) vrb.println(" ldSysCls: newMethInx="+methIndex); systemClassAttributes |= method.accAndPropFlags & dpfSetSysMethProperties; newMethods[methIndex] = method; if(verbose)vrb.printf("lsc: newMethods[%1$d]: %2$s\n", methIndex, method.name); } } systemMeth = systemMeth.next; } } //--- update method attributes (with system method attributes) SystemMethod systemMeth = systemClass.methods; Item method = null; while(systemMeth != null){ method = cls.methods.getItemByName(systemMeth.name); if(method != null){ method.offset = systemMeth.offset; int sysMethAttr = systemMeth.attributes & (dpfSetSysMethProperties | sysMethCodeMask); method.accAndPropFlags = (method.accAndPropFlags & ~(dpfSetSysMethProperties | sysMethCodeMask) ) |(1<<dpfSysPrimitive) | sysMethAttr; if( (sysMethAttr & (1<<dpfSynthetic)) != 0) ((Method)method).clearCodeAndAssociatedFields(); } systemMeth = systemMeth.next; } if(verbose) vrb.println("<loadSystemClass"); } private static void loadSystemClasses(SystemClass sysClasses, int userReqAttributes) throws IOException{ while(sysClasses != null){ loadSystemClass(sysClasses, userReqAttributes); if(verbose){ // vrb.println(" *system class: "+sysCls.name); // sysCls.print(0); // printClassList(" *** class list:"); // vrb.println(" end of *system class: "+sysCls.name); // sysCls.print(0); } sysClasses = sysClasses.next; } } private static void printConstPools(){ Item type = classList; while(type != null){ if(type instanceof Class){ Class cls = (Class)type; if( cls.constPool != null) cls.printReducedConstPool(cls.name.toString()); } type = type.next; } } private static void repalceConstPoolStubs(){ final boolean verbose = true; if(verbose) vrb.println(">repalceConstPoolStubs:"); Item type = classList; while(type != null){ if(type instanceof Class){ Class cls = (Class)type; if( cls.constPool != null) { Item[] cp = cls.constPool; for(int cpx = cp.length-1; cpx >= 0; cpx--) cp[cpx] = cp[cpx].getReplacedStub(); } } type = type.next; } if(verbose) vrb.println("<repalceConstPoolStubs"); } public static void buildSystem(String[] rootClassNames, String[] parentDirsOfClassFiles, SystemClass sysClasses, int userReqAttributes) throws IOException{ errRep.nofErrors = 0; Type.nofRootClasses = 0; ClassFileAdmin.registerParentDirs(parentDirsOfClassFiles); int nofRootClasses = rootClassNames.length; startLoading(nofRootClasses); Class clsObject = (Class)wellKnownTypes[txObject]; clsObject.loadClass(userReqAttributes); loadSystemClasses(sysClasses, userReqAttributes); if(verbose) printClassList("state: sysClasses loaded, class list:"); for (int rc = 0; rc < nofRootClasses && errRep.nofErrors == 0; rc++){ String sname = rootClassNames[rc]; vrb.println("\n\nRootClass["+rc +"] = "+ sname); loadRootClass( sname, userReqAttributes); } // printClassList("DbG: state 4, class list:"); // printConstPools(); repalceConstPoolStubs(); // printClassList("DbG: state 4, class list:"); // assert false; completeLoading(); if(verbose) printClassList("end state, class list:"); releaseLoadingResources(); log.printf("number of errors %1$d\n", errRep.nofErrors); log.print("system building "); if(errRep.nofErrors == 0) log.println("successfully done"); else log.println("terminated with errors"); } public static void buildSystem(String[] rootClassNames, int userReqAttributes) throws IOException{ buildSystem(rootClassNames, new String[] {"bin"}, null, userReqAttributes); } public Method getClassConstructor() { if(this.methods != null) return (Method)this.methods.getItemByName("<clinit>"); return null; } //--- debug primitives public void printItemCategory(){ vrb.print("class"); } public static void printClassList(String title){ vrb.println("\nclass list: (nofClasses="+nofClasses +')'); vrb.println(title); if(verbose) vrb.println("\n<class list:"); Item cls = classList; while(cls != null){ Dbg.indent(1); Dbg.printJavaAccAndPropertyFlags(cls.accAndPropFlags); vrb.print(cls.name); vrb.print(" //dFlags"); Dbg.printDeepAccAndPropertyFlags(cls.accAndPropFlags); vrb.println(); cls.printFields(2); cls.printMethods(2); cls = cls.next; vrb.println(); } if(verbose) vrb.println("end of class list>"); } public void printFields(int indentLevel){ indent(indentLevel); vrb.printf("fields: (clsFields: #=%1$d, size=%2$d B; instFields: #=%3$d, size=%4$d B)\n", nOfClassFields, classFieldsSize, nOfInstanceFields, instanceFieldsSize); Item item = fields; while(item != null){ item.println(indentLevel+1); item = item.next; } } public void printMethods(int indentLevel){ indent(indentLevel); vrb.println("methods: (#clsMeths="+ nOfClassMethods + ",#InstMeths=" + nOfInstanceMethods +')' ); Item item = methods; while(item != null){ item.println(indentLevel+1); item = item.next; } } private void printRedCpEntry(int redCpInd){ Item item = constPool[redCpInd]; item.printShort(0); Dbg.printSpace(); Dbg.printJavaAccAndPropertyFlags(item.accAndPropFlags); Dbg.print('+'); Dbg.printDeepAccAndPropertyFlags(item.accAndPropFlags); } private void printRedCpEntryCond(int cpIndex, int tag){ // if(tag < 0){// has entry in the reduced const pool if(constPool != null && tag != cptUtf8 && tag != cptNameAndType){ int redCpInd = cpIndices[cpIndex]; indent(1); vrb.printf(" \t=> [%1$3d] ", redCpInd); printRedCpEntry(redCpInd); } // } } private void printCpEntry(int cpIndex, int tag, int indentLevel){ indent(indentLevel+1); int cpIntValue = cpIndices[cpIndex]; int cpIndH = cpIntValue >>> 16; int cpIndL = cpIntValue & 0xFFFF; vrb.printf("[%1$4d]%2$4d ", cpIndex, tag); Dbg.printCpTagIdent(tag, 12); vrb.printf(" <%1$5d,%2$5d>", cpIndH, cpIndL); switch(tag){ case cptExtSlot: vrb.printf("=0x%1$x", cpIntValue); break; case cptUtf8: vrb.printf("=%1$s", cpStrings[cpIndex]); printRedCpEntryCond(cpIndex, tag); break; case cptInteger: vrb.printf("=0x%1$x", cpIntValue); printRedCpEntryCond(cpIndex, tag); break; case cptFloat: vrb.printf("=0x%1$x", cpIntValue); printRedCpEntryCond(cpIndex, tag); break; // float pattern case cptLong: case cptDouble: printRedCpEntryCond(cpIndex, tag); break; case cptClass: printRedCpEntryCond(cpIndex, tag); break; case cptString: printRedCpEntryCond(cpIndex, tag); break; // string index case cptFieldRef: case cptMethRef: case cptIntfMethRef: case cptNameAndType:// (class index) <<16, nameAndType index printRedCpEntryCond(cpIndex, tag); break; default: assert false; } vrb.println(); } public void printImports(int indentLevel){ if(imports != null){ indent(indentLevel); vrb.print("imports: "); vrb.print(imports[0].name); int nofImp = imports.length; for(int imp = 1; imp < nofImp; imp++){ vrb.print(", "); vrb.print(imports[imp].name); } } } public void printInterfaces(int indentLevel){ if(interfaces != null){ indent(indentLevel); vrb.print("implements "); vrb.print(interfaces[0].name); int nofIntf = interfaces.length; for(int inf = 1; inf < nofIntf; inf++){ vrb.print(", "); vrb.print(interfaces[inf].name); } } } void printOrigConstPool(String title){ vrb.println("\nconstant pool:"); vrb.println(title); for(int pe = 1; pe < constPoolCnt; pe++){ printCpEntry(pe, cpTags[pe], 1); } } private void printReducedConstPool(String title){ vrb.println("\nreduced constant pool:"); vrb.println(title); for(int pe = 0; pe < constPool.length; pe++){ vrb.printf(" [%1$3d] ", pe); printRedCpEntry(pe); vrb.println(); } } public void printShort(int indentLevel){ indent(indentLevel); vrb.print("class "); vrb.print(name); if(type != null) { vrb.print(" extends "); vrb.print(type.name); } } public void print(int indentLevel){ indent(indentLevel); Dbg.printJavaAccAndPropertyFlags(accAndPropFlags); vrb.print("class "); vrb.print(name); if(type != null) { vrb.print(" extends "); vrb.print(type.name); } vrb.print("\n\t// dFlags"); Dbg.printDeepAccAndPropertyFlags(accAndPropFlags); vrb.print("\n\t// category: "); vrb.print((char)category); vrb.print("\n\t// source file: "); vrb.println(srcFileName); printInterfaces(indentLevel+1); vrb.println('{'); printImports(indentLevel+1); vrb.println(); printFields(indentLevel+1); printMethods(indentLevel+1); } public void printConstantBlock() { printConstantBlock(0); } public void printConstantBlock(int indentLevel) { int i = 0; if(this.constantBlock != null) { indent(indentLevel); vrb.printf("> %4d", i); vrb.print(" ["); vrb.printf("%8x", this.constantBlock[i]); vrb.print("] constBlockSize\n"); i++; indent(indentLevel); vrb.printf("> %4d", i); vrb.print(" ["); vrb.printf("%8x", this.constantBlock[i]); vrb.print("] codeBase\n"); i++; indent(indentLevel); vrb.printf("> %4d", i); vrb.print(" ["); vrb.printf("%8x", this.constantBlock[i]); vrb.print("] codeSize\n"); i++; indent(indentLevel); vrb.printf("> %4d", i); vrb.print(" ["); vrb.printf("%8x", this.constantBlock[i]); vrb.print("] varBase\n"); i++; indent(indentLevel); vrb.printf("> %4d", i); vrb.print(" ["); vrb.printf("%8x", this.constantBlock[i]); vrb.print("] varSize\n"); i++; indent(indentLevel); vrb.printf("> %4d", i); vrb.print(" ["); vrb.printf("%8x", this.constantBlock[i]); vrb.print("] clinitAddr\n"); i++; indent(indentLevel); vrb.printf("> %4d", i); vrb.print(" ["); vrb.printf("%8x", this.constantBlock[i]); vrb.print("] nofPtrs\n"); i++; for(int j = 0; j < this.nOfReferences; j++) { indent(indentLevel); vrb.printf("> %4d", i); vrb.print(" ["); vrb.printf("%8x", this.constantBlock[i]); vrb.print("] ptr" + j + "\n"); i++; } for(int j = 0; j < this.classDescriptorSize / 4; j++) { indent(indentLevel); vrb.printf("> %4d", i); vrb.print(" ["); vrb.printf("%8x", this.constantBlock[i]); vrb.print("] CD[" + j + "]\n"); i++; } for(int j = 0; j < this.stringPoolSize / 4; j++) { indent(indentLevel); vrb.printf("> %4d", i); vrb.print(" ["); vrb.printf("%8x", this.constantBlock[i]); vrb.print("] SP[" + j + "]\n"); i++; } for(int j = 0; j < this.constantPoolSize / 4; j++) { indent(indentLevel); vrb.printf("> %4d", i); vrb.print(" ["); vrb.printf("%8x", this.constantBlock[i]); vrb.print("] CP[" + j + "]\n"); i++; } indent(indentLevel); vrb.printf("> %4d", i); vrb.print(" ["); vrb.printf("%8x", this.constantBlock[i]); vrb.print("] fcs\n"); } else { indent(indentLevel); vrb.print("<null>\n"); } } }
src/ch/ntb/inf/deep/classItems/Class.java
package ch.ntb.inf.deep.classItems; import java.io.DataInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import ch.ntb.inf.deep.config.Segment; import ch.ntb.inf.deep.debug.Dbg; import ch.ntb.inf.deep.host.ClassFileAdmin; import ch.ntb.inf.deep.config.SystemClass; import ch.ntb.inf.deep.config.SystemMethod; import ch.ntb.inf.deep.strings.HString; import ch.ntb.inf.deep.strings.StringTable; public class Class extends Type implements ICclassFileConsts, ICdescAndTypeConsts, ICjvmInstructionOpcs { //--- static fields private static final int fieldListArrayLength = 9; private static final Item[] instFieldLists = new Item[fieldListArrayLength]; // instance field lists, used by method readFields private static final Item[] classFieldLists = new Item[fieldListArrayLength]; // class field lists, used by method readFields // private static Item constFields; // constant field list (unsorted) static{ assert fieldSizeUnit >= 4 && (fieldSizeUnit & (fieldSizeUnit-1)) == 0; } //--- instance fields public Item[] constPool; // reduced constant pool public Item methods; // list with all methods public int nOfMethods; // number of methods public int nOfClassMethods, nOfInstanceMethods; // number of methods public Item fields; // list with all fields public int nOfClassFields, nOfInstanceFields; // number of fields public Class[] interfaces; public int nOfInterfaces; // number of interfaces public int nOfBaseClasses; // number of base classes public int nOfReferences; // number of class fields which are references public int[] constantBlock; // the constant block for this class public int constantBlockSize; // size of the constant block public int constantPoolSize; // size of this pool on the target (in byte) public int stringPoolSize; // size of this pool on the target (in byte) public int classDescriptorSize; // size of the class descriptor on the target (in byte) public int machineCodeSize; // size of the machine code on the target (in byte) public Segment codeSegment, varSegment, constSegment; // references to the memory segments for this class public int codeOffset, varOffset, constOffset; // the offset of the code/class fields/constant block in the dedicated segment Class[] imports; HString srcFileName; // file ident + ".java", e.g. String.java for java/lang/String.java //--- debug fields int magic, version; //--- instance methods public Class(HString registeredCpClassName){ super(registeredCpClassName, null); name = registeredCpClassName; category = tcRef; sizeInBits = 32; if(classList == null) classList = this; nofClasses++; } /** * @param newByteCode one of {new, newarray, anewarray, multianewarray} * @return the reference to the new-method, null for invalid byteCodes */ public static Method getNewMemoryMethod(int newByteCode){ int methIndex; switch(newByteCode){ case bCnew: methIndex = 0; break; case bCnewarray: methIndex = 1; break; case bCanewarray: methIndex = 2; break; case bCmultianewarray: methIndex = 3; break; default: return null; } return (Method)newMethods[methIndex]; } /** * Select field by name and delete it in the fields list (if found). * @param fieldName * @return the selected field or null if not found */ // private Item getAndExtractField(HString fieldName){ // assert false; // Item item = fields, pred = null; // while(item != null && item.name != fieldName) { // pred = item; // item = item.next; // } // if(item != null){ // if(pred == null) fields = item.next; else pred.next = item.next; // item.next = null; // } // return item; // } Item getField(HString fieldName){ Item item = fields; while(item != null && item.name != fieldName) item = item.next; if(item == null && type != null) item = ((Class)type).getField(fieldName); return item; } /** * Select method by name and descriptor and delete it in the methods list if found. * @param methName * @param methDescriptor * @return the selected method or null if not found */ // private Method getAndExtractMethod(HString methName, HString methDescriptor){ // assert false; // Item item = methods, pred = null; // while(item != null && (item.name != methName || ((Method)item).methDescriptor != methDescriptor) ) { // pred = item; // item = item.next; // } // if(item != null){ // if(pred == null) methods = item.next; else pred.next = item.next; // item.next = null; // } // return (Method)item; // } Item getMethod(HString name, HString descriptor){ Item item = methods; while(item != null && item.name != name) item = item.next; if(item == null && type != null) item = ((Class)type).getMethod(name, descriptor); return item; } /** * The field with fieldName is selected and returned. * If the field is not found, an new Field is created and insert, if the field is found it is checked for the correct descriptor. * @param fieldName a registered string * @param fieldType a registred string * @return the selected field or newly created one */ DataItem insertCondAndGetField(HString fieldName, Type fieldType){ //pre: all strings in the const are already registered in the proper hash table. Item item = fields; while(item != null && item.name != fieldName) item = item.next; DataItem field; if(item != null){ field = (DataItem)item; assert field.type == fieldType; }else{// create Field and update field list field = new DataItem(fieldName, fieldType); field.next = fields; fields = field; } return field; } /** * The method with methodName is selected and returned. * If the method is not found, an new Method is created and inserted, if the method is found it is checked for the correct descriptor. * @param methName a registered string * @param methDescriptor a registered string * @return the selected method or newly created one */ Method insertCondAndGetMethod(HString methName, HString methDescriptor){ //pre: all strings in the const are already registered in the proper hash table. Item item = methods; while(item != null && (item.name != methName || ((Method)item).methDescriptor != methDescriptor) ) item = item.next; Method method; if(item != null){ method = (Method)item; }else{// create Method and update method list Type retrunType = getReturnType(methDescriptor); method = new Method(methName, retrunType, methDescriptor); method.next = methods; methods = method; } return method; } /** * Check ClassInfo entry in const pool and update it accordingly if necessary. * <br>That is: if there is not yet a direct reference to an object of type Class, then such an object is created and registered. * @param cpClassInfoIndex index of ClassInfo entry * @return object of this type Class */ Item updateAndGetCpClassEntry(int cpClassInfoIndex){ //pre: all strings in the const pool are already registered in the proper hash table. Item cls = cpItems[cpClassInfoIndex]; if(cls == null){ HString registeredClassName = cpStrings[cpIndices[cpClassInfoIndex]]; if(registeredClassName.charAt(0) == '[') cls = getTypeByNameAndUpdate(tcArray, registeredClassName, wktObject); else cls = getTypeByNameAndUpdate(tcRef, registeredClassName, null); cpItems[cpClassInfoIndex] = cls; } return cls; } /** * Check FieldRefInfo entry in constant pool and update it accordingly if necessary. * <br>That is: if there is not yet a direct reference to an object of type Field, then such an object is created and registered. * @param cpFieldInfoIndex index of FieldRefInfo entry * @return object of this type Class */ // private Item updateAndGetCpFieldEntry(int cpFieldInfoIndex){ // //pre: all strings in the const are already registered in the proper hash table. // Item field = cpItems[cpFieldInfoIndex]; // if(field == null){ // int csx = cpIndices[cpFieldInfoIndex]; // get class and signature indices // Class cls = (Class)updateAndGetCpClassEntry(csx>>>16); // int sx = cpIndices[csx & 0xFFFF]; // HString fieldName = cpStrings[sx>>>16]; // HString fieldDesc = cpStrings[sx & 0xFFFF]; // Type fieldType = getTypeByDescriptor(fieldDesc); // field = cls.insertCondAndGetField( fieldName, fieldType); // cpItems[cpFieldInfoIndex] = field; // } // return field; // } private Item getFieldOrStub(HString fieldName, Type fieldType){ Item field = getField(fieldName); if( field == null ) field = new ItemStub(this, fieldName, fieldType); return field; } private Item getFieldOrStub(int cpFieldInfoIndex){ //pre: all strings in the const are already registered in the proper hash table. Item field = cpItems[cpFieldInfoIndex]; if(field == null){ int csx = cpIndices[cpFieldInfoIndex]; // get class and signature indices Class cls = (Class)updateAndGetCpClassEntry(csx>>>16); int sx = cpIndices[csx & 0xFFFF]; HString fieldName = cpStrings[sx>>>16]; HString fieldDesc = cpStrings[sx & 0xFFFF]; Type fieldType = getTypeByDescriptor(fieldDesc); field = cls.getFieldOrStub(fieldName, fieldType); } return field; } /** * Check MethRefInfo entry in constant pool and update it accordingly if necessary. * <br>That is: if there is not yet a direct reference to an object of type Method, then such an object is created and registered. * @param cpMethInfoIndex index of MethRefInfo entry * @return object of this type Class */ // private Item updateAndGetCpInterfaceMethodEntry(int cpMethInfoIndex){ // //pre: all strings in the const are already registered in the proper hash table. // Method method = null; // if(cpItems[cpMethInfoIndex] == null){ // int csx = cpIndices[cpMethInfoIndex]; // get class and signature indices // Class cls = (Class)updateAndGetCpClassEntry(csx>>>16); // int sx = cpIndices[csx & 0xFFFF]; // // HString methName = cpStrings[sx>>>16]; // HString methDesc = cpStrings[sx & 0xFFFF]; // method = cls.insertCondAndGetMethod( methName, methDesc); // method.owner = cls; // cpItems[cpMethInfoIndex] = method; // } // return method; // } private Item getMethodOrStub(HString name, HString descriptor){ Item meth = getMethod(name, descriptor); if( meth == null ) meth = new ItemStub(this, name, descriptor); return meth; } private Item getMethodOrStub(int cpMethInfoIndex){ //pre: all strings in the const are already registered in the proper hash table. Item method = null; if(cpItems[cpMethInfoIndex] == null){ int csx = cpIndices[cpMethInfoIndex]; // get class and signature indices Class cls = (Class)updateAndGetCpClassEntry(csx>>>16); int sx = cpIndices[csx & 0xFFFF]; HString methName = cpStrings[sx>>>16]; HString methDesc = cpStrings[sx & 0xFFFF]; method = cls.getMethodOrStub( methName, methDesc); } return method; } private void loadConstPool(DataInputStream clfInStrm) throws IOException{ if(verbose) vrb.println(">loadConstPool:"); magic = clfInStrm.readInt(); if(magic != 0xcafeBabe) throw new IOException("illegal class file"); if(verbose) vrb.printf("magic=0x%1$4x\n", magic); version = clfInStrm.readInt(); if(verbose) vrb.printf("version=%1$d.%2$d\n", (version&0xFFFF), (version>>>16) ); constPoolCnt = clfInStrm.readUnsignedShort(); if(verbose) vrb.printf("constPoolCnt=%1$d\n", constPoolCnt ); allocatePoolArray(constPoolCnt); for(int pEntry = 1; pEntry < constPoolCnt; pEntry++){ int tag = clfInStrm.readUnsignedByte(); cpTags[pEntry] = (byte)tag; cpIndices[pEntry] = 0; cpItems[pEntry] = null; cpStrings[pEntry] = null; switch(tag){ case cptUtf8: cpStrings[pEntry] = HString.readUTFandRegister(clfInStrm); break; case cptInteger: cpIndices[pEntry] = clfInStrm.readInt(); break; // integer value case cptFloat: cpIndices[pEntry] = clfInStrm.readInt(); break; // float pattern case cptLong: case cptDouble: cpIndices[pEntry++] = clfInStrm.readInt(); cpIndices[pEntry] = clfInStrm.readInt(); cpTags[pEntry] = cptExtSlot; cpItems[pEntry] = null; cpStrings[pEntry] = null; break; case cptClass: cpIndices[pEntry] = clfInStrm.readUnsignedShort(); break; // class index case cptString: cpIndices[pEntry] = clfInStrm.readUnsignedShort(); break; // string index case cptFieldRef: cpIndices[pEntry] = clfInStrm.readInt(); break; // (class index) <<16, nameAndType index case cptMethRef: cpIndices[pEntry] = clfInStrm.readInt(); break; // (class index) <<16, nameAndType index case cptIntfMethRef: cpIndices[pEntry] = clfInStrm.readInt(); break; // (class index) <<16, nameAndType index case cptNameAndType: cpIndices[pEntry] = clfInStrm.readInt(); break;// (name index) <<16, descriptor index default: throw new IOException("illegal tag in const pool"); } } if(verbose) vrb.println("<loadConstPool"); } private void updateConstPool() throws IOException{ if(verbose) vrb.println(">updateConstPool:"); //pre: all strings in the const are already registered in the proper hash table. int nofItems = 0; int pEntry; for(pEntry = 1; pEntry < constPoolCnt; pEntry++){// constPoolCnt int tag = cpTags[pEntry]; switch(tag){ case cptExtSlot: case cptUtf8: // cptExtSlot, Utf8 string break; case cptInteger: // integer literal cpItems[pEntry] = new StdConstant(hsNumber, wellKnownTypes[txInt], cpIndices[pEntry], 0); nofItems++; break; case cptFloat: // float literal cpItems[pEntry] = new StdConstant(hsNumber, wellKnownTypes[txFloat], cpIndices[pEntry], 0); nofItems++; break; // float pattern case cptLong: cpItems[pEntry] = new StdConstant(hsNumber, wellKnownTypes[txLong], cpIndices[pEntry], cpIndices[pEntry+1]); nofItems++; pEntry++; break; case cptDouble: cpItems[pEntry] = new StdConstant(hsNumber, wellKnownTypes[txDouble], cpIndices[pEntry], cpIndices[pEntry+1]); nofItems++; pEntry++; break; case cptClass: // class index updateAndGetCpClassEntry(pEntry); nofItems++; break; case cptString: cpItems[pEntry] = new StringLiteral(hsString, cpStrings[cpIndices[pEntry]]); nofItems++; break; case cptFieldRef: // updateAndGetCpFieldEntry(pEntry); cpItems[pEntry] = getFieldOrStub(pEntry); nofItems++; break; case cptMethRef: // updateAndGetCpMethodEntry(pEntry); cpItems[pEntry] = getMethodOrStub(pEntry); nofItems++; break; case cptIntfMethRef: // Item meth = updateAndGetCpInterfaceMethodEntry(pEntry); // meth.accAndPropFlags |= (1<<dpfInterfCall); cpItems[pEntry] = getMethodOrStub(pEntry); nofItems++; break; case cptNameAndType: break;// (name index) <<16, descriptor index default: throw new IOException("illegal tag in const pool"); } } assert pEntry == constPoolCnt; constPool = new Item[nofItems]; while(--pEntry > 0){ Item item = cpItems[pEntry]; if(item != null){ constPool[--nofItems] = item; // cpItems[pEntry] = null; cpIndices[pEntry] = nofItems; }else{ cpIndices[pEntry] = 0; } } assert nofItems == 0; if(verbose) vrb.println("<updateConstPool"); } private void readInterfaces(DataInputStream clfInStrm) throws IOException{ int cnt = clfInStrm.readUnsignedShort(); nOfInterfaces = cnt; if(cnt > 0){ interfaces = new Class[cnt]; for (int intf = 0; intf < cnt; intf++){ int intfInx = clfInStrm.readUnsignedShort(); interfaces[intf] = (Class)cpItems[intfInx]; } } } private void addItemToFieldList(Item item){ if(verbose) vrb.println(">addItemToFieldList"); Type type = (Type)item.type; int sizeInBits = type.sizeInBits; int fieldList = sizeInBits>>3; // fieldList={0, 1, 2, 4, 8} if( (item.accAndPropFlags & (1<<apfStatic)) == 0){// instance field item.next = instFieldLists[fieldList]; instFieldLists[fieldList] = item; nOfInstanceFields++; }else{// class field item.next = classFieldLists[fieldList]; classFieldLists[fieldList] = item; nOfClassFields++; } if(verbose) vrb.println("<addItemToFieldList"); } private void clearFieldLists(){ if(verbose) vrb.println(">clearFieldLists"); nOfClassFields = 0; nOfInstanceFields = 0; instanceFieldsSize = 0; classFieldsSize = 0; for(int n = fieldListArrayLength-1; n >= 0; n--){ instFieldLists[n] = null; classFieldLists[n] = null; } if(verbose) vrb.println("<clearFieldLists"); } private Item getFieldListAndUpdate(Item[] fieldLists){ // final boolean verbose = true; if(verbose) vrb.printf(">getFieldListAndUpdate: class: %1$s\n", name); Item head = null, tail = null; Item rest = null; //--- select and extract reference fields int fieldsSize = 0; Item list = fieldLists[4]; Item item = list; while(item != null){ list = item.next; Type type = (Type)item.type; if( type.category != tcPrimitive ){// reference (tcRef || tcArray) fieldsSize += 4; item.next = head; head = item; if(tail == null) tail = item; }else{ item.next = rest; rest = item; } item = list; } fieldLists[4] = rest; // vrb.printf(">getF1: fieldsSize=%1$d\n", fieldsSize); //--- select and extract any other fields Item consts = null; for(int category = 8; category >= 0; category--){ list = fieldLists[category]; fieldLists[category] = null; item = list; while(item != null){ list = item.next; if( (item.accAndPropFlags & (1<<dpfConst)) != 0){ item.next = consts; consts = item; // vrb.printf(">getF1a: const.name=%1$s\n", consts.name); }else{ fieldsSize += Math.max(category, 1); // reserve for boolean etc to 1 B if(tail == null) head = item; else tail.next = item; tail = item; } item = list; } } // vrb.printf(">getF2: fieldsSize=%1$d\n", fieldsSize); if( fieldLists == instFieldLists) instanceFieldsSize = (fieldsSize + (fieldSizeUnit-1)) & -fieldSizeUnit; else classFieldsSize = (fieldsSize + (fieldSizeUnit-1)) & -fieldSizeUnit; // vrb.printf(">getF3: objectSizeOrDim=%1$d, classFieldsSize=%2$d\n", instanceFieldsSize, classFieldsSize); if( tail == null ) head = consts; else tail.next = consts; if(verbose) vrb.println("<getFieldListAndUpdate"); return head; } private Item appendItem(Item head1, Item tail1, Item head2){ if(tail1 == null) head1 = head2; else tail1.next = head2; return head1; } private void readFields(DataInputStream clfInStrm) throws IOException{ final boolean verbose = false; if(verbose) vrb.println(">readFields: "+name); clearFieldLists(); // assert fields == null; fields = null; int fieldCnt = clfInStrm.readUnsignedShort(); while(fieldCnt > 0){ int flags; HString name, descriptor; flags = clfInStrm.readUnsignedShort(); //read access and property flags //--- read name and descriptor int index = clfInStrm.readUnsignedShort(); name = cpStrings[index]; // Item field = getAndExtractField(name); index = clfInStrm.readUnsignedShort(); descriptor = cpStrings[index]; DataItem field = null; if(verbose) vrb.printf(" readFields: cls=%1$s, desc=%2$s\n", name, descriptor); //--- read field attributes {ConstantValue, Deprecated, Synthetic} int attrCnt = clfInStrm.readUnsignedShort(); while(attrCnt-- > 0){ index = clfInStrm.readUnsignedShort(); int attr = selectAttribute(index); int attrLength = clfInStrm.readInt(); switch(attr){ case atxConstantValue: index = clfInStrm.readUnsignedShort(); int rcpIndex = cpIndices[index]; Item cpField = constPool[rcpIndex]; Type type = getTypeByDescriptor(descriptor); if(verbose) vrb.printf(" readFields: field.desc=%1$s, const: name=%2$s, type=%3$s\n", descriptor, cpField.name, cpField.type.name); assert cpField instanceof Constant; field = new NamedConst(name, type, (Constant)cpField); if( (flags & (1<<apfStatic) ) != 0) flags |= (1<<dpfConst); break; case atxDeprecated: flags |= (1<<dpfDeprecated); break; case atxSynthetic: flags |= (1<<dpfSynthetic); break; default: skipAttributeAndLogCond(clfInStrm, attrLength, index); } } if(field == null){ Type type = getTypeByDescriptor(descriptor); field = new DataItem(name, type); } flags |= field.accAndPropFlags; field.accAndPropFlags = flags; addItemToFieldList(field); ((ClassMember)field).owner = this; fieldCnt--; } assert fields == null; // if(fields != null){ // Item item = fields; // while(item != null){ // vrb.print("("+item.name +") "); // item.println(1); // item = item.next; // } // vrb.println(" Class.readFieldsE-end"); // } Item clsFields = getFieldListAndUpdate(classFieldLists); Item instFields = getFieldListAndUpdate(instFieldLists); if(clsFields == null) clsFields = instFields; else{ Item tail = null, item = clsFields; while(item != null){ tail = item; item = item.next; } tail.next = instFields; } fields = clsFields; if(verbose) vrb.println("<readFields"); } private void readMethods(DataInputStream clfInStrm, int userReqAttributes) throws IOException{ int methodCnt = clfInStrm.readUnsignedShort(); nOfMethods = methodCnt; assert methods == null; int nofClsMeths = 0, nofInstMeths = 0; Item clsMethHead = null, clsMethTail = null; Item instMethHead = null, instMethTail = null; while(methodCnt-- > 0){ int flags; HString name, descriptor; flags = clfInStrm.readUnsignedShort(); //read access and property flags //--- read name and descriptor int index = clfInStrm.readUnsignedShort(); name = cpStrings[index]; index = clfInStrm.readUnsignedShort(); descriptor = cpStrings[index]; Type returnType = getReturnType(descriptor); // Method method = getAndExtractMethod(name, descriptor); // if(method == null){ // method = new Method(name, returnType, descriptor); // }else{ // method.type = returnType; // } // method.owner = this; // assert method.type == returnType; Method method = new Method(name, returnType, descriptor); method.owner = this; //--- read method attributes {Code, Deprecated, Synthetic} int attrCnt = clfInStrm.readUnsignedShort(); while(attrCnt-- > 0){ int cpInxOfAttr = clfInStrm.readUnsignedShort(); int attr = selectAttribute( cpInxOfAttr ); int attrLength = clfInStrm.readInt(); switch(attr){ case atxCode: if( (userReqAttributes&(1<<atxCode)) == 0){ skipAttributeAndLogCond(clfInStrm, attrLength, 0); // skip without logging break; } method.maxStackSlots = clfInStrm.readUnsignedShort(); method.maxLocals = clfInStrm.readUnsignedShort(); int codeLen = clfInStrm.readInt(); method.code = new byte[codeLen]; clfInStrm.read(method.code); //--- read exception table int excTabLen = clfInStrm.readUnsignedShort(); if(excTabLen > 0){ method.exceptionTab = new ExceptionTabEntry[excTabLen]; for(int exc = 0; exc < excTabLen; exc++){ ExceptionTabEntry entry = new ExceptionTabEntry(); method.exceptionTab[exc] = entry; entry.startPc = clfInStrm.readUnsignedShort(); entry.endPc = clfInStrm.readUnsignedShort(); entry.handlerPc = clfInStrm.readUnsignedShort(); int catchTypeInx = clfInStrm.readUnsignedShort(); entry.catchType = (Class)cpItems[catchTypeInx]; } } //--- read attributes of the code attribute {LineNumberTable, LocalVariableTable} int codAttrCnt = clfInStrm.readUnsignedShort(); while(codAttrCnt-- > 0){ int codAttrIndex = clfInStrm.readUnsignedShort(); int codeAttr = selectAttribute( codAttrIndex ); int codAttrLen = clfInStrm.readInt(); if(codeAttr == atxLocalVariableTable){ if( (userReqAttributes&(1<<atxLocalVariableTable)) == 0){ skipAttributeAndLogCond(clfInStrm, codAttrLen, 0); // skip without logging }else{ int locVarTabLength = clfInStrm.readUnsignedShort(); if(locVarTabLength > 0){ method.localVars = new LocalVar[method.maxLocals]; while(locVarTabLength-- > 0){ LocalVar locVar = new LocalVar(); locVar.startPc = clfInStrm.readUnsignedShort(); locVar.length = clfInStrm.readUnsignedShort(); locVar.name = cpStrings[ clfInStrm.readUnsignedShort() ]; locVar.type = getTypeByDescriptor( cpStrings[ clfInStrm.readUnsignedShort() ] ); locVar.index = clfInStrm.readUnsignedShort(); method.insertLocalVar(locVar); } } } }else if(codeAttr == atxLineNumberTable){ if( (userReqAttributes&(1<<atxLineNumberTable)) == 0){ skipAttributeAndLogCond(clfInStrm, codAttrLen, 0); // skip without logging }else{ int lineNrTabLength = clfInStrm.readUnsignedShort(); int[] lineNrTab = new int[lineNrTabLength]; method.lineNrTab = lineNrTab; for(int lnp = 0; lnp < lineNrTabLength; lnp++) lineNrTab[lnp] = clfInStrm.readInt(); } }else{// skip skipAttributeAndLogCond(clfInStrm, codAttrLen, codAttrIndex); } } break; case atxDeprecated: flags |= (1<<dpfDeprecated); break; case atxSynthetic: flags |= (1<<dpfSynthetic); break; default: skipAttributeAndLogCond(clfInStrm, attrLength, index); } } flags |= method.accAndPropFlags; method.accAndPropFlags = flags; //--- append method if( (flags & (1<<apfStatic)) != 0 ){ // class method nofClsMeths++; if(clsMethHead == null) clsMethHead = method; else clsMethTail.next = method; clsMethTail = method; }else{// instance method nofInstMeths++; if(instMethHead == null) instMethHead = method; else instMethTail.next = method; instMethTail = method; } } assert methods == null; methods = appendItem(clsMethHead, clsMethTail, instMethHead); // methods = clsMethHead; clsMethTail.next = instMethHead; nOfClassMethods = nofClsMeths; nOfInstanceMethods = nofInstMeths; } private void readClassAttributes(DataInputStream clfInStrm, int userReqAttributes) throws IOException{ int attrCnt = clfInStrm.readUnsignedShort(); while(attrCnt-- > 0){ int index = clfInStrm.readUnsignedShort(); int attr = selectAttribute(index); int attrLength = clfInStrm.readInt(); switch(attr){ case atxSourceFile: index = clfInStrm.readUnsignedShort(); srcFileName = cpStrings[index]; break; case atxDeprecated: accAndPropFlags |= (1<<dpfDeprecated); break; case atxInnerClasses: // 4.7.5, p125 if( (userReqAttributes&(1<<atxInnerClasses)) == 0) skipAttributeAndLogCond(clfInStrm, attrLength, index); else{ // TODO Auto-generated method stub assert false: "TODO"; } break; default: skipAttributeAndLogCond(clfInStrm, attrLength, index); } } } private void analyseByteCode(){ if(verbose) vrb.println(">analyseByteCode:"); Item item = methods; while(item != null){ Method meth = (Method)item; if(verbose){ vrb.print("\nmethod: "); meth.printHeader(); vrb.print(" (owner="); meth.owner.printName(); vrb.println(')'); } ByteCodePreProc.analyseCodeAndFixCpRefs(cpIndices, constPool, meth.code); item = item.next; } if(verbose) vrb.println("<analyseByteCode"); } private void loadClass(int userReqAttributes) throws IOException{ if(verbose) vrb.println(">loadClass:"); if( (accAndPropFlags & ((1<<dpfClassLoaded)|(1<<dpfSynthetic)) ) == 0 ){// if not yet loaded try{ File classFile = ClassFileAdmin.getClassFile(name); log.println("opening class file of class: "+name ); if(classFile == null) throw new FileNotFoundException(); InputStream inStrm = new FileInputStream(classFile); // new FileInputStream DataInputStream clfInStrm = new DataInputStream(inStrm); // new DataInputStream loadConstPool(clfInStrm); accAndPropFlags |= clfInStrm.readUnsignedShort(); if(verbose){ printOrigConstPool("state: 0"); // stab.print("String Table 0"); printClassList("state: 0"); print(0); } updateConstPool(); if(verbose){ printOrigConstPool("state: 1"); print(0); } clfInStrm.readUnsignedShort(); // read this class index int thisSupClassCpInx = clfInStrm.readUnsignedShort(); if(verbose) vrb.println("thisSupClassCpInx="+thisSupClassCpInx); if(thisSupClassCpInx > 0){ int constPoolInx = cpIndices[thisSupClassCpInx]; type = (Class)constPool[constPoolInx]; if(verbose){ vrb.print("superClassName="); type.printName(); } } readInterfaces(clfInStrm); readFields(clfInStrm); readMethods(clfInStrm, userReqAttributes); readClassAttributes(clfInStrm, userReqAttributes); // if(verbose){ // vrb.println("\nstate: 2"); // printOrigConstPool("state: 2"); // stab.print("String Table in state: 2"); // printClassList("state: 2"); // print(0); // } if( (accAndPropFlags & ((1<<apfInterface)|(1<<apfEnum))) == 0){ analyseByteCode(); this.accAndPropFlags |= (1<<dpfClassLoaded); }else if( (accAndPropFlags & (1<<apfInterface)) != 0) this.accAndPropFlags |= (1<<dpfClassLoaded); if(verbose){ vrb.println("\n>dump of class: "+name); vrb.println("\nstate: 3"); // stab.print("String Table in state: 3"); printOrigConstPool("state: 3"); printReducedConstPool("state: 3"); // printClassList("state: 3"); print(0); vrb.println("\n<end of dump: "+name); } // printReducedConstPool("reduced cp, state: 3"); clfInStrm.close(); }catch (FileNotFoundException fnfE){ errRep.error("class file not found"); errRep.println(); fnfE.getCause(); } } //--- load referenced classes if( (accAndPropFlags & (1<<dpfClassLoaded)) != 0){ for(int cpx = constPool.length-1; cpx >= 0; cpx--){ Item item = constPool[cpx]; if(item instanceof Class){ Class refClass = (Class) item; if( (refClass.accAndPropFlags & ((1<<dpfClassLoaded)|(1<<dpfSynthetic)) ) == 0) { refClass.loadClass(userReqAttributes); } } } } if(verbose) vrb.println("<loadClass"); } public static void startLoading(int nofRootClasses){ if(verbose) vrb.println(">startLoading:"); rootClasses = new Class[nofRootClasses]; nofRootClasses = 0; classList = null; classListTail = null; nofClasses = 0; prevCpLenth = 0; constPoolCnt = 0; if(StringTable.getInstance() != null) StringTable.resetTable(); else{ StringTable.createSingleton(1000, "??"); stab = StringTable.getInstance(); HString.setStringTable(stab); } hsNumber = stab.insertCondAndGetEntry("#"); hsString = stab.insertCondAndGetEntry("\"\""); setUpBaseTypeTable(); setClassFileAttributeTable(stab); if(verbose) vrb.println("<startLoading"); } public static void releaseLoadingResources(){ cpItems = null; cpStrings = null; cpIndices = null; cpTags = null; prevCpLenth = 0; constPoolCnt = 0; HString.releaseBuffers(); ClassFileAdmin.clear(); // StringTable.resetTable(); // hsNumber = null; // wellKnownTypes = null; // classFileAttributeTable = null; } public static void loadRootClass(String rootClassName, int userReqAttributes) throws IOException{ if(verbose) vrb.println(">loadRootClass: "+rootClassName); HString hRootClassName = stab.insertCondAndGetEntry(rootClassName); Class root = new Class(hRootClassName); appendRootClass(root); root.accAndPropFlags |= (1<<dpfRootClass); assert root.next == null; root.loadClass(userReqAttributes); if(verbose) vrb.println("<loadRootClass"); } private static int unitedSysMethodFlags(SystemClass systemClass){ SystemMethod systemMeth = systemClass.methods; int unitedFlags = 0; if(systemMeth != null) unitedFlags = 1<<dpfSysPrimitive; while(systemMeth != null){ unitedFlags |= (systemMeth.attributes & dpfSetSysMethProperties); systemMeth = systemMeth.next; } return unitedFlags; } private static void loadSystemClass(SystemClass systemClass, int userReqAttributes) throws IOException{ final boolean verbose = false; String systemClassName = systemClass.name; int systemClassAttributes = systemClass.attributes | unitedSysMethodFlags(systemClass); if(verbose) vrb.println(">loadSystemClass: "+systemClassName); if(verbose) vrb.printf(" sysClsAttributes1=0x%1$x\n", systemClassAttributes); HString hSysClassName = stab.insertCondAndGetEntry(systemClassName); Class cls = (Class)getClassByName(hSysClassName); if(cls == null){ cls = new Class(hSysClassName); appendClass(cls); } cls.loadClass(userReqAttributes); cls.accAndPropFlags |= systemClassAttributes & dpfSetSysClassProperties; if( (systemClassAttributes & (1<<dpfNew)) != 0 ){// set up new memory method table SystemMethod systemMeth = systemClass.methods; while(systemMeth != null){ Item method = cls.methods.getItemByName(systemMeth.name); if(method == null){ errRep.error("method "+systemMeth.name +" in system class "+systemClass.name + " not found"); }else{ if(verbose)vrb.printf("lsc: method=%1$s, attr=0x%2$x\n", (cls.name + "." + method.name), systemMeth.attributes); int methIndex = (systemMeth.attributes-1)&0xFF; if( methIndex >= nofNewMethods ){ errRep.error("method id of"+systemMeth.name +" in system class "+systemClass.name + " out of range"); }else{ if(verbose) vrb.println(" ldSysCls: newMethInx="+methIndex); systemClassAttributes |= method.accAndPropFlags & dpfSetSysMethProperties; newMethods[methIndex] = method; if(verbose)vrb.printf("lsc: newMethods[%1$d]: %2$s\n", methIndex, method.name); } } systemMeth = systemMeth.next; } } //--- update method attributes (with system method attributes) SystemMethod systemMeth = systemClass.methods; Item method = null; while(systemMeth != null){ method = cls.methods.getItemByName(systemMeth.name); if(method != null){ method.offset = systemMeth.offset; int sysMethAttr = systemMeth.attributes & (dpfSetSysMethProperties | sysMethCodeMask); method.accAndPropFlags = (method.accAndPropFlags & ~(dpfSetSysMethProperties | sysMethCodeMask) ) |(1<<dpfSysPrimitive) | sysMethAttr; if( (sysMethAttr & (1<<dpfSynthetic)) != 0) ((Method)method).clearCodeAndAssociatedFields(); } systemMeth = systemMeth.next; } if(verbose) vrb.println("<loadSystemClass"); } private static void loadSystemClasses(SystemClass sysClasses, int userReqAttributes) throws IOException{ while(sysClasses != null){ loadSystemClass(sysClasses, userReqAttributes); if(verbose){ // vrb.println(" *system class: "+sysCls.name); // sysCls.print(0); // printClassList(" *** class list:"); // vrb.println(" end of *system class: "+sysCls.name); // sysCls.print(0); } sysClasses = sysClasses.next; } } private static void printConstPools(){ Item type = classList; while(type != null){ if(type instanceof Class){ Class cls = (Class)type; if( cls.constPool != null) cls.printReducedConstPool(cls.name.toString()); } type = type.next; } } private static void repalceConstPoolStubs(){ final boolean verbose = true; if(verbose) vrb.println(">repalceConstPoolStubs:"); Item type = classList; while(type != null){ if(type instanceof Class){ Class cls = (Class)type; if( cls.constPool != null) { Item[] cp = cls.constPool; for(int cpx = cp.length-1; cpx >= 0; cpx--) cp[cpx] = cp[cpx].getReplacedStub(); } } type = type.next; } if(verbose) vrb.println("<repalceConstPoolStubs"); } public static void buildSystem(String[] rootClassNames, String[] parentDirsOfClassFiles, SystemClass sysClasses, int userReqAttributes) throws IOException{ errRep.nofErrors = 0; Type.nofRootClasses = 0; ClassFileAdmin.registerParentDirs(parentDirsOfClassFiles); int nofRootClasses = rootClassNames.length; startLoading(nofRootClasses); Class clsObject = (Class)wellKnownTypes[txObject]; clsObject.loadClass(userReqAttributes); loadSystemClasses(sysClasses, userReqAttributes); if(verbose) printClassList("state: sysClasses loaded, class list:"); for (int rc = 0; rc < nofRootClasses && errRep.nofErrors == 0; rc++){ String sname = rootClassNames[rc]; vrb.println("\n\nRootClass["+rc +"] = "+ sname); loadRootClass( sname, userReqAttributes); } // printClassList("DbG: state 4, class list:"); // printConstPools(); repalceConstPoolStubs(); // printClassList("DbG: state 4, class list:"); // assert false; completeLoading(); if(verbose) printClassList("end state, class list:"); releaseLoadingResources(); log.printf("number of errors %1$d\n", errRep.nofErrors); log.print("system building "); if(errRep.nofErrors == 0) log.println("successfully done"); else log.println("terminated with errors"); } public static void buildSystem(String[] rootClassNames, int userReqAttributes) throws IOException{ buildSystem(rootClassNames, new String[] {"bin"}, null, userReqAttributes); } public Method getClassConstructor() { if(this.methods != null) return (Method)this.methods.getItemByName("<clinit>"); return null; } //--- debug primitives public void printItemCategory(){ vrb.print("class"); } public static void printClassList(String title){ vrb.println("\nclass list: (nofClasses="+nofClasses +')'); vrb.println(title); if(verbose) vrb.println("\n<class list:"); Item cls = classList; while(cls != null){ Dbg.indent(1); Dbg.printJavaAccAndPropertyFlags(cls.accAndPropFlags); vrb.print(cls.name); vrb.print(" //dFlags"); Dbg.printDeepAccAndPropertyFlags(cls.accAndPropFlags); vrb.println(); cls.printFields(2); cls.printMethods(2); cls = cls.next; vrb.println(); } if(verbose) vrb.println("end of class list>"); } public void printFields(int indentLevel){ indent(indentLevel); vrb.printf("fields: (clsFields: #=%1$d, size=%2$d B; instFields: #=%3$d, size=%4$d B)\n", nOfClassFields, classFieldsSize, nOfInstanceFields, instanceFieldsSize); Item item = fields; while(item != null){ item.println(indentLevel+1); item = item.next; } } public void printMethods(int indentLevel){ indent(indentLevel); vrb.println("methods: (#clsMeths="+ nOfClassMethods + ",#InstMeths=" + nOfInstanceMethods +')' ); Item item = methods; while(item != null){ item.println(indentLevel+1); item = item.next; } } private void printRedCpEntry(int redCpInd){ Item item = constPool[redCpInd]; item.printShort(0); Dbg.printSpace(); Dbg.printJavaAccAndPropertyFlags(item.accAndPropFlags); Dbg.print('+'); Dbg.printDeepAccAndPropertyFlags(item.accAndPropFlags); } private void printRedCpEntryCond(int cpIndex, int tag){ // if(tag < 0){// has entry in the reduced const pool if(constPool != null && tag != cptUtf8 && tag != cptNameAndType){ int redCpInd = cpIndices[cpIndex]; indent(1); vrb.printf(" \t=> [%1$3d] ", redCpInd); printRedCpEntry(redCpInd); } // } } private void printCpEntry(int cpIndex, int tag, int indentLevel){ indent(indentLevel+1); int cpIntValue = cpIndices[cpIndex]; int cpIndH = cpIntValue >>> 16; int cpIndL = cpIntValue & 0xFFFF; vrb.printf("[%1$4d]%2$4d ", cpIndex, tag); Dbg.printCpTagIdent(tag, 12); vrb.printf(" <%1$5d,%2$5d>", cpIndH, cpIndL); switch(tag){ case cptExtSlot: vrb.printf("=0x%1$x", cpIntValue); break; case cptUtf8: vrb.printf("=%1$s", cpStrings[cpIndex]); printRedCpEntryCond(cpIndex, tag); break; case cptInteger: vrb.printf("=0x%1$x", cpIntValue); printRedCpEntryCond(cpIndex, tag); break; case cptFloat: vrb.printf("=0x%1$x", cpIntValue); printRedCpEntryCond(cpIndex, tag); break; // float pattern case cptLong: case cptDouble: printRedCpEntryCond(cpIndex, tag); break; case cptClass: printRedCpEntryCond(cpIndex, tag); break; case cptString: printRedCpEntryCond(cpIndex, tag); break; // string index case cptFieldRef: case cptMethRef: case cptIntfMethRef: case cptNameAndType:// (class index) <<16, nameAndType index printRedCpEntryCond(cpIndex, tag); break; default: assert false; } vrb.println(); } public void printImports(int indentLevel){ if(imports != null){ indent(indentLevel); vrb.print("imports: "); vrb.print(imports[0].name); int nofImp = imports.length; for(int imp = 1; imp < nofImp; imp++){ vrb.print(", "); vrb.print(imports[imp].name); } } } public void printInterfaces(int indentLevel){ if(interfaces != null){ indent(indentLevel); vrb.print("implements "); vrb.print(interfaces[0].name); int nofIntf = interfaces.length; for(int inf = 1; inf < nofIntf; inf++){ vrb.print(", "); vrb.print(interfaces[inf].name); } } } void printOrigConstPool(String title){ vrb.println("\nconstant pool:"); vrb.println(title); for(int pe = 1; pe < constPoolCnt; pe++){ printCpEntry(pe, cpTags[pe], 1); } } private void printReducedConstPool(String title){ vrb.println("\nreduced constant pool:"); vrb.println(title); for(int pe = 0; pe < constPool.length; pe++){ vrb.printf(" [%1$3d] ", pe); printRedCpEntry(pe); vrb.println(); } } public void printShort(int indentLevel){ indent(indentLevel); vrb.print("class "); vrb.print(name); if(type != null) { vrb.print(" extends "); vrb.print(type.name); } } public void print(int indentLevel){ indent(indentLevel); Dbg.printJavaAccAndPropertyFlags(accAndPropFlags); vrb.print("class "); vrb.print(name); if(type != null) { vrb.print(" extends "); vrb.print(type.name); } vrb.print("\n\t// dFlags"); Dbg.printDeepAccAndPropertyFlags(accAndPropFlags); vrb.print("\n\t// category: "); vrb.print((char)category); vrb.print("\n\t// source file: "); vrb.println(srcFileName); printInterfaces(indentLevel+1); vrb.println('{'); printImports(indentLevel+1); vrb.println(); printFields(indentLevel+1); printMethods(indentLevel+1); } public void printConstantBlock() { printConstantBlock(0); } public void printConstantBlock(int indentLevel) { int i = 0; if(this.constantBlock != null) { indent(indentLevel); vrb.printf("> %4d", i); vrb.print(" ["); vrb.printf("%8x", this.constantBlock[i]); vrb.print("] constBlockSize\n"); i++; indent(indentLevel); vrb.printf("> %4d", i); vrb.print(" ["); vrb.printf("%8x", this.constantBlock[i]); vrb.print("] codeBase\n"); i++; indent(indentLevel); vrb.printf("> %4d", i); vrb.print(" ["); vrb.printf("%8x", this.constantBlock[i]); vrb.print("] codeSize\n"); i++; indent(indentLevel); vrb.printf("> %4d", i); vrb.print(" ["); vrb.printf("%8x", this.constantBlock[i]); vrb.print("] varBase\n"); i++; indent(indentLevel); vrb.printf("> %4d", i); vrb.print(" ["); vrb.printf("%8x", this.constantBlock[i]); vrb.print("] varSize\n"); i++; indent(indentLevel); vrb.printf("> %4d", i); vrb.print(" ["); vrb.printf("%8x", this.constantBlock[i]); vrb.print("] clinitAddr\n"); i++; indent(indentLevel); vrb.printf("> %4d", i); vrb.print(" ["); vrb.printf("%8x", this.constantBlock[i]); vrb.print("] nofPtrs\n"); i++; for(int j = 0; j < this.nOfReferences; j++) { indent(indentLevel); vrb.printf("> %4d", i); vrb.print(" ["); vrb.printf("%8x", this.constantBlock[i]); vrb.print("] ptr" + j + "\n"); i++; } for(int j = 0; j < this.classDescriptorSize / 4; j++) { indent(indentLevel); vrb.printf("> %4d", i); vrb.print(" ["); vrb.printf("%8x", this.constantBlock[i]); vrb.print("] CD[" + j + "]\n"); i++; } for(int j = 0; j < this.stringPoolSize / 4; j++) { indent(indentLevel); vrb.printf("> %4d", i); vrb.print(" ["); vrb.printf("%8x", this.constantBlock[i]); vrb.print("] SP[" + j + "]\n"); i++; } for(int j = 0; j < this.constantPoolSize / 4; j++) { indent(indentLevel); vrb.printf("> %4d", i); vrb.print(" ["); vrb.printf("%8x", this.constantBlock[i]); vrb.print("] CP[" + j + "]\n"); i++; } indent(indentLevel); vrb.printf("> %4d", i); vrb.print(" ["); vrb.printf("%8x", this.constantBlock[i]); vrb.print("] fcs\n"); } else { indent(indentLevel); vrb.print("<null>\n"); } } }
Class: problem with double loading of some classes fixed -> solution by Ernst
src/ch/ntb/inf/deep/classItems/Class.java
Class: problem with double loading of some classes fixed -> solution by Ernst
<ide><path>rc/ch/ntb/inf/deep/classItems/Class.java <ide> <ide> if(verbose) vrb.println(">readFields: "+name); <ide> clearFieldLists(); <del>// assert fields == null; <del> fields = null; <add> assert fields == null; <ide> <ide> int fieldCnt = clfInStrm.readUnsignedShort(); <ide> while(fieldCnt > 0){ <ide> analyseByteCode(); <ide> this.accAndPropFlags |= (1<<dpfClassLoaded); <ide> }else if( (accAndPropFlags & (1<<apfInterface)) != 0) this.accAndPropFlags |= (1<<dpfClassLoaded); <add> else if( (accAndPropFlags & (1<<apfInterface)) != 0){ <add> this.accAndPropFlags |= (1<<dpfClassLoaded); <add> } <ide> <ide> if(verbose){ <ide> vrb.println("\n>dump of class: "+name);
Java
lgpl-2.1
31c69bca18d5e5c66f66fac7d7ca972480151657
0
jstourac/wildfly,iweiss/wildfly,jstourac/wildfly,iweiss/wildfly,iweiss/wildfly,wildfly/wildfly,wildfly/wildfly,jstourac/wildfly,rhusar/wildfly,pferraro/wildfly,rhusar/wildfly,rhusar/wildfly,iweiss/wildfly,pferraro/wildfly,pferraro/wildfly,rhusar/wildfly,jstourac/wildfly,wildfly/wildfly,wildfly/wildfly,pferraro/wildfly
/* * JBoss, Home of Professional Open Source. * Copyright 2015, Red Hat Middleware LLC, and individual contributors * as indicated by the @author tags. See the copyright.txt file in the * distribution for a full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.jboss.as.test.integration.domain; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.*; import java.io.IOException; import java.util.Iterator; import java.util.List; import org.jboss.as.controller.PathAddress; import org.jboss.as.controller.client.helpers.domain.DomainClient; import org.jboss.as.controller.operations.common.Util; import org.jboss.as.test.integration.domain.management.util.DomainLifecycleUtil; import org.jboss.as.test.integration.domain.management.util.DomainTestUtils; import org.jboss.as.test.integration.domain.management.util.WildFlyManagedConfiguration; import org.jboss.as.test.integration.management.util.MgmtOperationException; import org.jboss.as.test.shared.TimeoutUtil; import org.jboss.dmr.ModelNode; import org.jboss.dmr.Property; import org.junit.Assert; import org.junit.Test; /** * Checks that the child resources that should be ordered are in fact so on a slave reconnect. * At the moment this is only jgroups protocols. Although we have good tests for the indexed adds * working on reconnect in core, this is here as a sanity that no special describe handler is used * overriding the default mechanism. * * @author Kabir Khan */ public class OrderedChildResourcesTestCase extends BuildConfigurationTestBase { public static final String slaveAddress = System.getProperty("jboss.test.host.slave.address", "127.0.0.1"); private static final String SECONDARY_HOST_NAME = "secondary"; private static final int ADJUSTED_SECOND = TimeoutUtil.adjust(1000); private static final String TARGET_PROTOCOL = "pbcast.STABLE"; @Test public void testOrderedChildResources() throws Exception { final WildFlyManagedConfiguration masterConfig = createConfiguration("domain.xml", "host-primary.xml", getClass().getSimpleName()); final WildFlyManagedConfiguration slaveConfig = createConfiguration("domain.xml", "host-secondary.xml", getClass().getSimpleName(), SECONDARY_HOST_NAME, slaveAddress, 19990); try (DomainLifecycleUtil masterUtils = new DomainLifecycleUtil(masterConfig); DomainLifecycleUtil slaveUtils = new DomainLifecycleUtil(slaveConfig)) { masterUtils.start(); slaveUtils.start(); PathAddress stackAddress = PathAddress.pathAddress(PROFILE, "full-ha") .append(SUBSYSTEM, "jgroups") .append("stack", "tcp"); final ModelNode originalMasterStack = readResource(masterUtils.getDomainClient(), stackAddress); originalMasterStack.protect(); final ModelNode originalSlaveStack = readResource(slaveUtils.getDomainClient(), stackAddress); originalSlaveStack.protect(); Assert.assertEquals(originalMasterStack, originalSlaveStack); int index = -1; ModelNode value = null; Iterator<Property> it = originalMasterStack.get(PROTOCOL).asPropertyList().iterator(); for (int i = 0; it.hasNext(); i++) { Property property = it.next(); if (property.getName().equals(TARGET_PROTOCOL)) { value = property.getValue(); index = i; break; } } //Make sure that we found the protocol and that it is not at the end Assert.assertTrue(0 <= index); Assert.assertTrue(index < originalMasterStack.get(PROTOCOL).keys().size() - 2); PathAddress targetProtocolAddress = stackAddress.append(PROTOCOL, TARGET_PROTOCOL); //Remove the protocol DomainTestUtils.executeForResult(Util.createRemoveOperation(targetProtocolAddress), masterUtils.getDomainClient()); //Reload the master into admin-only and re-add the protocol reloadMaster(masterUtils, true); ModelNode add = value.clone(); add.get(OP).set(ADD); add.get(OP_ADDR).set(targetProtocolAddress.toModelNode()); add.get(ADD_INDEX).set(index); DomainTestUtils.executeForResult(add, masterUtils.getDomainClient()); //Reload the master into normal mode and check the protocol is in the right place on the slave reloadMaster(masterUtils, false); ModelNode slaveStack = readResource(slaveUtils.getDomainClient(), stackAddress); Assert.assertEquals(originalMasterStack, slaveStack); //Check that :read-operation-description has add-index defined; WFLY-6782 ModelNode rodOp = Util.createOperation(READ_OPERATION_DESCRIPTION_OPERATION, targetProtocolAddress); rodOp.get(NAME).set(ADD); ModelNode result = DomainTestUtils.executeForResult(rodOp, masterUtils.getDomainClient()); Assert.assertTrue(result.get(REQUEST_PROPERTIES).hasDefined(ADD_INDEX)); } } private ModelNode readResource(DomainClient client, PathAddress pathAddress) throws IOException, MgmtOperationException { ModelNode rr = Util.createEmptyOperation(READ_RESOURCE_OPERATION, pathAddress); return DomainTestUtils.executeForResult(rr, client); } private void reloadMaster(DomainLifecycleUtil domainMasterLifecycleUtil, boolean adminOnly) throws Exception{ ModelNode restartAdminOnly = Util.createEmptyOperation("reload", PathAddress.pathAddress(HOST, "master")); restartAdminOnly.get("admin-only").set(adminOnly); domainMasterLifecycleUtil.executeAwaitConnectionClosed(restartAdminOnly); domainMasterLifecycleUtil.connect(); domainMasterLifecycleUtil.awaitHostController(System.currentTimeMillis()); if (!adminOnly) { //Wait for the slave to reconnect, look for the slave in the list of hosts long end = System.currentTimeMillis() + 20 * ADJUSTED_SECOND; boolean slaveReconnected = false; do { Thread.sleep(1 * ADJUSTED_SECOND); slaveReconnected = checkSlaveReconnected(domainMasterLifecycleUtil.getDomainClient()); } while (!slaveReconnected && System.currentTimeMillis() < end); } } private boolean checkSlaveReconnected(DomainClient masterClient) throws Exception { ModelNode op = Util.createEmptyOperation(READ_CHILDREN_NAMES_OPERATION, PathAddress.EMPTY_ADDRESS); op.get(CHILD_TYPE).set(HOST); try { ModelNode ret = DomainTestUtils.executeForResult(op, masterClient); List<ModelNode> list = ret.asList(); if (list.size() == 2) { for (ModelNode entry : list) { if (SECONDARY_HOST_NAME.equals(entry.asString())){ return true; } } } } catch (Exception e) { } return false; } }
testsuite/domain/src/test/java/org/jboss/as/test/integration/domain/OrderedChildResourcesTestCase.java
/* * JBoss, Home of Professional Open Source. * Copyright 2015, Red Hat Middleware LLC, and individual contributors * as indicated by the @author tags. See the copyright.txt file in the * distribution for a full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.jboss.as.test.integration.domain; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.*; import java.io.IOException; import java.util.Iterator; import java.util.List; import org.jboss.as.controller.PathAddress; import org.jboss.as.controller.client.helpers.domain.DomainClient; import org.jboss.as.controller.operations.common.Util; import org.jboss.as.test.integration.domain.management.util.DomainLifecycleUtil; import org.jboss.as.test.integration.domain.management.util.DomainTestUtils; import org.jboss.as.test.integration.domain.management.util.WildFlyManagedConfiguration; import org.jboss.as.test.integration.management.util.MgmtOperationException; import org.jboss.as.test.shared.TimeoutUtil; import org.jboss.dmr.ModelNode; import org.jboss.dmr.Property; import org.junit.Assert; import org.junit.Test; /** * Checks that the child resources that should be ordered are in fact so on a slave reconnect. * At the moment this is only jgroups protocols. Although we have good tests for the indexed adds * working on reconnect in core, this is here as a sanity that no special describe handler is used * overriding the default mechanism. * * @author Kabir Khan */ public class OrderedChildResourcesTestCase extends BuildConfigurationTestBase { public static final String slaveAddress = System.getProperty("jboss.test.host.slave.address", "127.0.0.1"); private static final String SECONDARY_HOST_NAME = "secondary"; private static final int ADJUSTED_SECOND = TimeoutUtil.adjust(1000); private static final String TARGET_PROTOCOL = "pbcast.STABLE"; @Test public void testOrderedChildResources() throws Exception { final WildFlyManagedConfiguration masterConfig = createConfiguration("domain.xml", "host-primary.xml", getClass().getSimpleName()); final DomainLifecycleUtil masterUtils = new DomainLifecycleUtil(masterConfig); final WildFlyManagedConfiguration slaveConfig = createConfiguration("domain.xml", "host-secondary.xml", getClass().getSimpleName(), SECONDARY_HOST_NAME, slaveAddress, 19990); final DomainLifecycleUtil slaveUtils = new DomainLifecycleUtil(slaveConfig); try { masterUtils.start(); slaveUtils.start(); PathAddress stackAddress = PathAddress.pathAddress(PROFILE, "full-ha") .append(SUBSYSTEM, "jgroups") .append("stack", "tcp"); final ModelNode originalMasterStack = readResource(masterUtils.getDomainClient(), stackAddress); originalMasterStack.protect(); final ModelNode originalSlaveStack = readResource(slaveUtils.getDomainClient(), stackAddress); originalSlaveStack.protect(); Assert.assertEquals(originalMasterStack, originalSlaveStack); int index = -1; ModelNode value = null; Iterator<Property> it = originalMasterStack.get(PROTOCOL).asPropertyList().iterator(); for (int i = 0; it.hasNext(); i++) { Property property = it.next(); if (property.getName().equals(TARGET_PROTOCOL)) { value = property.getValue(); index = i; break; } } //Make sure that we found the protocol and that it is not at the end Assert.assertTrue(0 <= index); Assert.assertTrue(index < originalMasterStack.get(PROTOCOL).keys().size() - 2); PathAddress targetProtocolAddress = stackAddress.append(PROTOCOL, TARGET_PROTOCOL); //Remove the protocol DomainTestUtils.executeForResult(Util.createRemoveOperation(targetProtocolAddress), masterUtils.getDomainClient()); //Reload the master into admin-only and re-add the protocol reloadMaster(masterUtils, true); ModelNode add = value.clone(); add.get(OP).set(ADD); add.get(OP_ADDR).set(targetProtocolAddress.toModelNode()); add.get(ADD_INDEX).set(index); DomainTestUtils.executeForResult(add, masterUtils.getDomainClient()); //Reload the master into normal mode and check the protocol is in the right place on the slave reloadMaster(masterUtils, false); ModelNode slaveStack = readResource(slaveUtils.getDomainClient(), stackAddress); Assert.assertEquals(originalMasterStack, slaveStack); //Check that :read-operation-description has add-index defined; WFLY-6782 ModelNode rodOp = Util.createOperation(READ_OPERATION_DESCRIPTION_OPERATION, targetProtocolAddress); rodOp.get(NAME).set(ADD); ModelNode result = DomainTestUtils.executeForResult(rodOp, masterUtils.getDomainClient()); Assert.assertTrue(result.get(REQUEST_PROPERTIES).hasDefined(ADD_INDEX)); } finally { try { slaveUtils.stop(); } finally { masterUtils.stop(); } } } private ModelNode readResource(DomainClient client, PathAddress pathAddress) throws IOException, MgmtOperationException { ModelNode rr = Util.createEmptyOperation(READ_RESOURCE_OPERATION, pathAddress); return DomainTestUtils.executeForResult(rr, client); } private void reloadMaster(DomainLifecycleUtil domainMasterLifecycleUtil, boolean adminOnly) throws Exception{ ModelNode restartAdminOnly = Util.createEmptyOperation("reload", PathAddress.pathAddress(HOST, "master")); restartAdminOnly.get("admin-only").set(adminOnly); domainMasterLifecycleUtil.executeAwaitConnectionClosed(restartAdminOnly); domainMasterLifecycleUtil.connect(); domainMasterLifecycleUtil.awaitHostController(System.currentTimeMillis()); if (!adminOnly) { //Wait for the slave to reconnect, look for the slave in the list of hosts long end = System.currentTimeMillis() + 20 * ADJUSTED_SECOND; boolean slaveReconnected = false; do { Thread.sleep(1 * ADJUSTED_SECOND); slaveReconnected = checkSlaveReconnected(domainMasterLifecycleUtil.getDomainClient()); } while (!slaveReconnected && System.currentTimeMillis() < end); } } private boolean checkSlaveReconnected(DomainClient masterClient) throws Exception { ModelNode op = Util.createEmptyOperation(READ_CHILDREN_NAMES_OPERATION, PathAddress.EMPTY_ADDRESS); op.get(CHILD_TYPE).set(HOST); try { ModelNode ret = DomainTestUtils.executeForResult(op, masterClient); List<ModelNode> list = ret.asList(); if (list.size() == 2) { for (ModelNode entry : list) { if (SECONDARY_HOST_NAME.equals(entry.asString())){ return true; } } } } catch (Exception e) { } return false; } }
Utilize try-with-resources.
testsuite/domain/src/test/java/org/jboss/as/test/integration/domain/OrderedChildResourcesTestCase.java
Utilize try-with-resources.
<ide><path>estsuite/domain/src/test/java/org/jboss/as/test/integration/domain/OrderedChildResourcesTestCase.java <ide> @Test <ide> public void testOrderedChildResources() throws Exception { <ide> final WildFlyManagedConfiguration masterConfig = createConfiguration("domain.xml", "host-primary.xml", getClass().getSimpleName()); <del> final DomainLifecycleUtil masterUtils = new DomainLifecycleUtil(masterConfig); <ide> final WildFlyManagedConfiguration slaveConfig = createConfiguration("domain.xml", "host-secondary.xml", getClass().getSimpleName(), <ide> SECONDARY_HOST_NAME, slaveAddress, 19990); <del> final DomainLifecycleUtil slaveUtils = new DomainLifecycleUtil(slaveConfig); <del> try { <add> try (DomainLifecycleUtil masterUtils = new DomainLifecycleUtil(masterConfig); <add> DomainLifecycleUtil slaveUtils = new DomainLifecycleUtil(slaveConfig)) { <ide> masterUtils.start(); <ide> slaveUtils.start(); <ide> <ide> rodOp.get(NAME).set(ADD); <ide> ModelNode result = DomainTestUtils.executeForResult(rodOp, masterUtils.getDomainClient()); <ide> Assert.assertTrue(result.get(REQUEST_PROPERTIES).hasDefined(ADD_INDEX)); <del> } finally { <del> try { <del> slaveUtils.stop(); <del> } finally { <del> masterUtils.stop(); <del> } <ide> } <ide> } <ide>
JavaScript
mit
90f367ed8cf70a295aa17ee0dbdaa0190ad2328c
0
dvajs/dva
import warning from 'warning'; import { isArray } from './utils'; import { NAMESPACE_SEP } from './constants'; function prefix(obj, namespace, type) { return Object.keys(obj).reduce((memo, key) => { warning( key.indexOf(`${namespace}${NAMESPACE_SEP}`) !== 0, `[prefixNamespace]: ${type} ${key} should not be prefixed with namespace ${namespace}`, ); const newKey = `${namespace}${NAMESPACE_SEP}${key}`; memo[newKey] = obj[key]; return memo; }, {}); } export default function prefixNamespace(model) { const { namespace, reducers, effects } = model; if (reducers) { if (isArray(reducers)) { // 需要复制一份,不能直接修改 model.reducers[0], 会导致微前端场景下,重复添加前缀 const [reducer, ...rest] = reducers; model.reducers = [prefix(reducer, namespace, 'reducer'), ...rest]; } else { model.reducers = prefix(reducers, namespace, 'reducer'); } } if (effects) { model.effects = prefix(effects, namespace, 'effect'); } return model; }
packages/dva-core/src/prefixNamespace.js
import warning from 'warning'; import { isArray } from './utils'; import { NAMESPACE_SEP } from './constants'; function prefix(obj, namespace, type) { return Object.keys(obj).reduce((memo, key) => { warning( key.indexOf(`${namespace}${NAMESPACE_SEP}`) !== 0, `[prefixNamespace]: ${type} ${key} should not be prefixed with namespace ${namespace}`, ); const newKey = `${namespace}${NAMESPACE_SEP}${key}`; memo[newKey] = obj[key]; return memo; }, {}); } export default function prefixNamespace(model) { const { namespace, reducers, effects } = model; if (reducers) { if (isArray(reducers)) { model.reducers[0] = prefix(reducers[0], namespace, 'reducer'); } else { model.reducers = prefix(reducers, namespace, 'reducer'); } } if (effects) { model.effects = prefix(effects, namespace, 'effect'); } return model; }
fix: qiankun remount dva model (#2408) * fix: qiankun remount dva model fix: qiankun remount for array type reducers * chore: add comments about changes
packages/dva-core/src/prefixNamespace.js
fix: qiankun remount dva model (#2408)
<ide><path>ackages/dva-core/src/prefixNamespace.js <ide> <ide> if (reducers) { <ide> if (isArray(reducers)) { <del> model.reducers[0] = prefix(reducers[0], namespace, 'reducer'); <add> // 需要复制一份,不能直接修改 model.reducers[0], 会导致微前端场景下,重复添加前缀 <add> const [reducer, ...rest] = reducers; <add> model.reducers = [prefix(reducer, namespace, 'reducer'), ...rest]; <ide> } else { <ide> model.reducers = prefix(reducers, namespace, 'reducer'); <ide> }
Java
mit
e2887b4cc9c0f550a836ab9e0672e14684620586
0
jirkapinkas/jsitemapgenerator
package cz.jiripinkas.jsitemapgenerator; import java.io.*; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; import java.util.zip.GZIPOutputStream; import cz.jiripinkas.jsitemapgenerator.exception.GWTException; public abstract class AbstractSitemapGenerator extends AbstractGenerator { protected W3CDateFormat dateFormat = new W3CDateFormat(); public AbstractSitemapGenerator(String baseUrl) { super(baseUrl); } public abstract String[] constructSitemap(); /** * Construct sitemap into single String * * @return sitemap */ public String constructSitemapString() { String[] sitemapArray = constructSitemap(); StringBuilder result = new StringBuilder(); for (String line : sitemapArray) { result.append(line); } return result.toString(); } private ByteArrayOutputStream gzipIt(InputStream inputStream) { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); byte[] buffer = new byte[1024]; try { try(GZIPOutputStream gzos = new GZIPOutputStream(outputStream); InputStream in = inputStream) { int len; while ((len = in.read(buffer)) > 0) { gzos.write(buffer, 0, len); } } } catch (IOException ex) { throw new RuntimeException("Cannot perform gzip", ex); } return outputStream; } /** * Construct sitemap into gzipped file * * @return byte array */ public byte[] constructSitemapGzip() { String sitemap = constructSitemapString(); ByteArrayInputStream inputStream = new ByteArrayInputStream(sitemap.getBytes(StandardCharsets.UTF_8)); ByteArrayOutputStream outputStream = gzipIt(inputStream); return outputStream.toByteArray(); } /** * Save sitemap to output file * * @param file * Output file * @param sitemap * Sitemap as array of Strings (created by constructSitemap() * method) * @throws IOException * when error */ public void saveSitemap(File file, String[] sitemap) throws IOException { try(BufferedWriter writer = new BufferedWriter(new FileWriter(file))) { for (String string : sitemap) { writer.write(string); } } } /** * Construct and save sitemap to output file * * @param file * Output file * @throws IOException * when error */ public void constructAndSaveSitemap(File file) throws IOException { String[] sitemap = constructSitemap(); saveSitemap(file, sitemap); } /** * Ping Google that sitemap has changed. Will call this URL: * http://www.google * .com/webmasters/tools/ping?sitemap=URL_Encoded_sitemapUrl * * @param sitemapUrl * sitemap url */ public void pingGoogle(String sitemapUrl) { ping("http://www.google.com/webmasters/tools/ping?sitemap=", sitemapUrl); } /** * Ping Bing that sitemap has changed. Will call this URL: * http://www.bing.com/ping?sitemap=URL_Encoded_sitemapUrl * * @param sitemapUrl * sitemap url * */ public void pingBing(String sitemapUrl) { ping("http://www.bing.com/ping?sitemap=", sitemapUrl); } private void ping(String resourceUrl, String sitemapUrl) { try { String pingUrl = resourceUrl + URLEncoder.encode(sitemapUrl, "UTF-8"); // ping Bing int returnCode = HttpClientUtil.get(pingUrl); if (returnCode != 200) { throw new GWTException("Google could not be informed about new sitemap!"); } } catch (Exception ex) { throw new GWTException("Google could not be informed about new sitemap!"); } } /** * Ping Google that sitemap has changed. Sitemap must be on this location: * baseUrl/sitemap.xml (for example http://www.javavids.com/sitemap.xml) */ public void pingGoogle() { pingGoogle(baseUrl + "sitemap.xml"); } /** * Ping Google that sitemap has changed. Sitemap must be on this location: * baseUrl/sitemap.xml (for example http://www.javavids.com/sitemap.xml) */ public void pingBing() { pingBing(baseUrl + "sitemap.xml"); } /** * Escape special characters in XML * @param url Url to be escaped * @return Escaped url */ protected String escapeXmlSpecialCharacters(String url) { // https://stackoverflow.com/questions/1091945/what-characters-do-i-need-to-escape-in-xml-documents return url .replace("&", "&amp;") // must be escaped first!!! .replace("\"", "&quot;") .replace("'", "&apos;") .replace("<", "&lt;") .replace(">", "&gt;"); } }
src/main/java/cz/jiripinkas/jsitemapgenerator/AbstractSitemapGenerator.java
package cz.jiripinkas.jsitemapgenerator; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.net.URLEncoder; import cz.jiripinkas.jsitemapgenerator.exception.GWTException; public abstract class AbstractSitemapGenerator extends AbstractGenerator { protected W3CDateFormat dateFormat = new W3CDateFormat(); public AbstractSitemapGenerator(String baseUrl) { super(baseUrl); } public abstract String[] constructSitemap(); /** * Construct sitemap into single String * * @return sitemap */ public String constructSitemapString() { String[] sitemapArray = constructSitemap(); StringBuilder result = new StringBuilder(); for (String line : sitemapArray) { result.append(line); } return result.toString(); } /** * Save sitemap to output file * * @param file * Output file * @param sitemap * Sitemap as array of Strings (created by constructSitemap() * method) * @throws IOException * when error */ public void saveSitemap(File file, String[] sitemap) throws IOException { try(BufferedWriter writer = new BufferedWriter(new FileWriter(file))) { for (String string : sitemap) { writer.write(string); } } } /** * Construct and save sitemap to output file * * @param file * Output file * @throws IOException * when error */ public void constructAndSaveSitemap(File file) throws IOException { String[] sitemap = constructSitemap(); saveSitemap(file, sitemap); } /** * Ping Google that sitemap has changed. Will call this URL: * http://www.google * .com/webmasters/tools/ping?sitemap=URL_Encoded_sitemapUrl * * @param sitemapUrl * sitemap url */ public void pingGoogle(String sitemapUrl) { ping("http://www.google.com/webmasters/tools/ping?sitemap=", sitemapUrl); } /** * Ping Bing that sitemap has changed. Will call this URL: * http://www.bing.com/ping?sitemap=URL_Encoded_sitemapUrl * * @param sitemapUrl * sitemap url * */ public void pingBing(String sitemapUrl) { ping("http://www.bing.com/ping?sitemap=", sitemapUrl); } private void ping(String resourceUrl, String sitemapUrl) { try { String pingUrl = resourceUrl + URLEncoder.encode(sitemapUrl, "UTF-8"); // ping Bing int returnCode = HttpClientUtil.get(pingUrl); if (returnCode != 200) { throw new GWTException("Google could not be informed about new sitemap!"); } } catch (Exception ex) { throw new GWTException("Google could not be informed about new sitemap!"); } } /** * Ping Google that sitemap has changed. Sitemap must be on this location: * baseUrl/sitemap.xml (for example http://www.javavids.com/sitemap.xml) */ public void pingGoogle() { pingGoogle(baseUrl + "sitemap.xml"); } /** * Ping Google that sitemap has changed. Sitemap must be on this location: * baseUrl/sitemap.xml (for example http://www.javavids.com/sitemap.xml) */ public void pingBing() { pingBing(baseUrl + "sitemap.xml"); } /** * Escape special characters in XML * @param url Url to be escaped * @return Escaped url */ protected String escapeXmlSpecialCharacters(String url) { // https://stackoverflow.com/questions/1091945/what-characters-do-i-need-to-escape-in-xml-documents return url .replace("&", "&amp;") // must be escaped first!!! .replace("\"", "&quot;") .replace("'", "&apos;") .replace("<", "&lt;") .replace(">", "&gt;"); } }
export sitemap to gzip
src/main/java/cz/jiripinkas/jsitemapgenerator/AbstractSitemapGenerator.java
export sitemap to gzip
<ide><path>rc/main/java/cz/jiripinkas/jsitemapgenerator/AbstractSitemapGenerator.java <ide> package cz.jiripinkas.jsitemapgenerator; <ide> <del>import java.io.BufferedWriter; <del>import java.io.File; <del>import java.io.FileWriter; <del>import java.io.IOException; <add>import java.io.*; <ide> import java.net.URLEncoder; <add>import java.nio.charset.StandardCharsets; <add>import java.util.zip.GZIPOutputStream; <ide> <ide> import cz.jiripinkas.jsitemapgenerator.exception.GWTException; <ide> <ide> result.append(line); <ide> } <ide> return result.toString(); <add> } <add> <add> private ByteArrayOutputStream gzipIt(InputStream inputStream) { <add> ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); <add> byte[] buffer = new byte[1024]; <add> try { <add> try(GZIPOutputStream gzos = new GZIPOutputStream(outputStream); <add> InputStream in = inputStream) { <add> int len; <add> while ((len = in.read(buffer)) > 0) { <add> gzos.write(buffer, 0, len); <add> } <add> } <add> } catch (IOException ex) { <add> throw new RuntimeException("Cannot perform gzip", ex); <add> } <add> return outputStream; <add> } <add> <add> /** <add> * Construct sitemap into gzipped file <add> * <add> * @return byte array <add> */ <add> public byte[] constructSitemapGzip() { <add> String sitemap = constructSitemapString(); <add> ByteArrayInputStream inputStream = new ByteArrayInputStream(sitemap.getBytes(StandardCharsets.UTF_8)); <add> ByteArrayOutputStream outputStream = gzipIt(inputStream); <add> return outputStream.toByteArray(); <ide> } <ide> <ide> /**
Java
agpl-3.0
a82d390cb6f0f3db1dac9c1f989ceb8ac0cb328c
0
SICTIAM/ozwillo-portal,ozwillo/ozwillo-portal,ozwillo/ozwillo-portal,SICTIAM/ozwillo-portal,ozwillo/ozwillo-portal,ozwillo/ozwillo-portal,SICTIAM/ozwillo-portal,SICTIAM/ozwillo-portal
package org.oasis_eu.portal.services; import com.google.common.base.Strings; import org.joda.time.format.DateTimeFormat; import org.markdown4j.Markdown4jProcessor; import org.oasis_eu.portal.core.dao.CatalogStore; import org.oasis_eu.portal.core.model.catalog.ApplicationInstance; import org.oasis_eu.portal.core.model.catalog.CatalogEntry; import org.oasis_eu.portal.model.notifications.NotifApp; import org.oasis_eu.portal.model.notifications.UserNotification; import org.oasis_eu.portal.model.notifications.UserNotificationResponse; import org.oasis_eu.spring.kernel.model.InboundNotification; import org.oasis_eu.spring.kernel.model.NotificationStatus; import org.oasis_eu.spring.kernel.service.NotificationService; import org.oasis_eu.spring.kernel.service.UserInfoService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.MessageSource; import org.springframework.stereotype.Service; import org.springframework.web.servlet.support.RequestContextUtils; import javax.servlet.http.HttpServletRequest; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.stream.Collectors; /** * User: schambon * Date: 6/13/14 */ @Service public class PortalNotificationService { @SuppressWarnings("unused") private static final Logger logger = LoggerFactory.getLogger(PortalNotificationService.class); @Autowired private NotificationService notificationService; @Autowired private CatalogStore catalogStore; @Autowired private UserInfoService userInfoHelper; @Autowired private HttpServletRequest request; @Value("${application.notificationsEnabled:true}") private boolean notificationsEnabled; @Value("${application.devmode:false}") private boolean devmode; @Autowired private MessageSource messageSource; public int countNotifications() { if (!notificationsEnabled) { return 0; } // TODO NB. In case the user has +300 notifications, it will be fetch ALL the notification content each time, // this to be filtered and counted at the end, which implies use of unnecessary networking/processing tasks return (int) notificationService.getNotifications(userInfoHelper.currentUser().getUserId(), NotificationStatus.UNREAD) .stream() .count(); } public UserNotificationResponse getNotifications(NotificationStatus status) { return getNotifications(RequestContextUtils.getLocale(request), status); } private UserNotificationResponse getNotifications(Locale locale, NotificationStatus status) { if (!notificationsEnabled) { return new UserNotificationResponse(); } List<InboundNotification> notifications = notificationService.getNotifications(userInfoHelper.currentUser().getUserId(), status); List<UserNotification> notifs = extractNotifications(locale, status, notifications); List<NotifApp> notifApps = notifs.stream() .filter(userNotification -> userNotification.getApplicationId() != null) .map(userNotification -> new NotifApp(userNotification.getApplicationId(), userNotification.getAppName())) .distinct() .sorted() .collect(Collectors.toList()); return new UserNotificationResponse(notifs, notifApps); } private List<UserNotification> extractNotifications(Locale locale, NotificationStatus status, List<InboundNotification> notifications) { return notifications .stream() .filter(n -> NotificationStatus.ANY.equals(status) || status.equals(n.getStatus())) .map(n -> { UserNotification notif = new UserNotification(); CatalogEntry catalogEntry = null; if (n.getServiceId() != null) { catalogEntry = catalogStore.findService(n.getServiceId()); if (catalogEntry == null) { return null; // skip deleted service, probable (?) companion case to #179 Bug with notifications referring destroyed app instances // TODO LATER keep service but with "deleted" flag so it doesn't happen (rather than auto deleting this portal data) } notif.setAppName(catalogEntry.getName(locale)); notif.setServiceId(n.getServiceId()); notif.setApplicationId(catalogEntry.getId()); } else if (n.getInstanceId() != null) { ApplicationInstance instance = catalogStore.findApplicationInstance(n.getInstanceId()); if (instance == null) { // case of #179 Bug with notifications referring destroyed app instances or #206 500 on portal notification api // LATER we could keep app instance with a "deleted" flag so it doesn't happen (rather than auto deleting this portal data), // but this wouldn't address the Forbidden case) if (devmode) { notif.setAppName("Application with deleted or forbidden instance"); // to help debug notif.setServiceId(""); } else { return null; // skip deleted or (newly) Forbidden app instance (rather than displaying no name) } } else { CatalogEntry application = catalogStore.findApplication(instance.getApplicationId()); notif.setAppName(application.getName(locale)); notif.setServiceId(n.getInstanceId()); notif.setApplicationId(application.getId()); } } notif.setDate(n.getTime()); notif.setDateText(DateTimeFormat.forPattern(DateTimeFormat.patternForStyle("MS", locale)).print(n.getTime())); notif.setFormattedText(getFormattedText(n, locale)); notif.setId(n.getId()); if (Strings.isNullOrEmpty(n.getActionUri())) { if (catalogEntry != null) { notif.setUrl(catalogEntry.getNotificationUrl()); } } else { notif.setUrl(n.getActionUri()); } if (Strings.isNullOrEmpty(n.getActionLabel())) { notif.setActionText(messageSource.getMessage("notif.manage", new Object[0], locale)); } else { notif.setActionText(n.getActionLabel(locale)); } notif.setStatus(n.getStatus()); return notif; }) .filter(n -> n != null) // case of deleted or Forbidden app instance, see above .sorted((n1, n2) -> n1.getDate() != null && (n2.getDate() == null // some old notif, but would mean "now" for joda time || n1.getDate().isAfter(n2.getDate())) ? -1 : 1) .collect(Collectors.toList()); } public Map<String, Integer> getAppNotificationCounts() { List<InboundNotification> inboundNotifications = notificationService.getNotifications(userInfoHelper.currentUser().getUserId(), NotificationStatus.UNREAD) .stream() .filter(inboundNotification -> inboundNotification.getServiceId() != null || inboundNotification.getInstanceId() != null) .collect(Collectors.toList()); List<UserNotification> userNotifications = extractNotifications(RequestContextUtils.getLocale(request), NotificationStatus.UNREAD, inboundNotifications); return userNotifications.stream() .filter(userNotification -> !Strings.isNullOrEmpty(userNotification.getServiceId())) .collect(Collectors.groupingBy(UserNotification::getServiceId, Collectors.reducing(0, n -> 1, Integer::sum))); } private static String getFormattedText(InboundNotification notification, Locale locale) { String formattedText; String message = notification.getMessage(locale).replaceAll("[<>]", ""); try { formattedText = new Markdown4jProcessor().process(message); } catch (IOException ignore) { formattedText = message; } return formattedText; } public void archive(String notificationId) { if (!notificationsEnabled) { return; } notificationService.setMessageStatus(userInfoHelper.currentUser().getUserId(), Collections.singletonList(notificationId), NotificationStatus.READ); } }
src/main/java/org/oasis_eu/portal/services/PortalNotificationService.java
package org.oasis_eu.portal.services; import com.google.common.base.Strings; import org.joda.time.format.DateTimeFormat; import org.markdown4j.Markdown4jProcessor; import org.oasis_eu.portal.core.dao.CatalogStore; import org.oasis_eu.portal.core.model.catalog.ApplicationInstance; import org.oasis_eu.portal.core.model.catalog.CatalogEntry; import org.oasis_eu.portal.model.notifications.NotifApp; import org.oasis_eu.portal.model.notifications.UserNotification; import org.oasis_eu.portal.model.notifications.UserNotificationResponse; import org.oasis_eu.spring.kernel.model.InboundNotification; import org.oasis_eu.spring.kernel.model.NotificationStatus; import org.oasis_eu.spring.kernel.service.NotificationService; import org.oasis_eu.spring.kernel.service.UserInfoService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.MessageSource; import org.springframework.stereotype.Service; import org.springframework.web.servlet.support.RequestContextUtils; import javax.servlet.http.HttpServletRequest; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.stream.Collectors; /** * User: schambon * Date: 6/13/14 */ @Service public class PortalNotificationService { @SuppressWarnings("unused") private static final Logger logger = LoggerFactory.getLogger(PortalNotificationService.class); @Autowired private NotificationService notificationService; @Autowired private CatalogStore catalogStore; @Autowired private UserInfoService userInfoHelper; @Autowired private HttpServletRequest request; @Value("${application.notificationsEnabled:true}") private boolean notificationsEnabled; @Value("${application.devmode:false}") private boolean devmode; @Autowired private MessageSource messageSource; public int countNotifications() { if (!notificationsEnabled) { return 0; } // TODO NB. In case the user has +300 notifications, it will be fetch ALL the notification content each time, // this to be filtered and counted at the end, which implies use of unnecessary networking/processing tasks return (int) notificationService.getNotifications(userInfoHelper.currentUser().getUserId(), NotificationStatus.UNREAD) .stream() .count(); } public UserNotificationResponse getNotifications(NotificationStatus status) { return getNotifications(RequestContextUtils.getLocale(request), status); } private UserNotificationResponse getNotifications(Locale locale, NotificationStatus status) { if (!notificationsEnabled) { return new UserNotificationResponse(); } List<InboundNotification> notifications = notificationService.getNotifications(userInfoHelper.currentUser().getUserId(), NotificationStatus.ANY); List<UserNotification> notifs = extractNotifications(locale, status, notifications); List<NotifApp> notifApps = notifs.stream() .filter(userNotification -> userNotification.getApplicationId() != null) .map(userNotification -> new NotifApp(userNotification.getApplicationId(), userNotification.getAppName())) .distinct() .sorted() .collect(Collectors.toList()); return new UserNotificationResponse(notifs, notifApps); } private List<UserNotification> extractNotifications(Locale locale, NotificationStatus status, List<InboundNotification> notifications) { return notifications .stream() .filter(n -> NotificationStatus.ANY.equals(status) || status.equals(n.getStatus())) .map(n -> { UserNotification notif = new UserNotification(); CatalogEntry catalogEntry = null; if (n.getServiceId() != null) { catalogEntry = catalogStore.findService(n.getServiceId()); if (catalogEntry == null) { return null; // skip deleted service, probable (?) companion case to #179 Bug with notifications referring destroyed app instances // TODO LATER keep service but with "deleted" flag so it doesn't happen (rather than auto deleting this portal data) } notif.setAppName(catalogEntry.getName(locale)); notif.setServiceId(n.getServiceId()); notif.setApplicationId(catalogEntry.getId()); } else if (n.getInstanceId() != null) { ApplicationInstance instance = catalogStore.findApplicationInstance(n.getInstanceId()); if (instance == null) { // case of #179 Bug with notifications referring destroyed app instances or #206 500 on portal notification api // LATER we could keep app instance with a "deleted" flag so it doesn't happen (rather than auto deleting this portal data), // but this wouldn't address the Forbidden case) if (devmode) { notif.setAppName("Application with deleted or forbidden instance"); // to help debug notif.setServiceId(""); } else { return null; // skip deleted or (newly) Forbidden app instance (rather than displaying no name) } } else { CatalogEntry application = catalogStore.findApplication(instance.getApplicationId()); notif.setAppName(application.getName(locale)); notif.setServiceId(n.getInstanceId()); notif.setApplicationId(application.getId()); } } notif.setDate(n.getTime()); notif.setDateText(DateTimeFormat.forPattern(DateTimeFormat.patternForStyle("MS", locale)).print(n.getTime())); notif.setFormattedText(getFormattedText(n, locale)); notif.setId(n.getId()); if (Strings.isNullOrEmpty(n.getActionUri())) { if (catalogEntry != null) { notif.setUrl(catalogEntry.getNotificationUrl()); } } else { notif.setUrl(n.getActionUri()); } if (Strings.isNullOrEmpty(n.getActionLabel())) { notif.setActionText(messageSource.getMessage("notif.manage", new Object[0], locale)); } else { notif.setActionText(n.getActionLabel(locale)); } notif.setStatus(n.getStatus()); return notif; }) .filter(n -> n != null) // case of deleted or Forbidden app instance, see above .sorted((n1, n2) -> n1.getDate() != null && (n2.getDate() == null // some old notif, but would mean "now" for joda time || n1.getDate().isAfter(n2.getDate())) ? -1 : 1) .collect(Collectors.toList()); } public Map<String, Integer> getAppNotificationCounts() { List<InboundNotification> inboundNotifications = notificationService.getNotifications(userInfoHelper.currentUser().getUserId(), NotificationStatus.UNREAD) .stream() .filter(inboundNotification -> inboundNotification.getServiceId() != null || inboundNotification.getInstanceId() != null) .collect(Collectors.toList()); List<UserNotification> userNotifications = extractNotifications(RequestContextUtils.getLocale(request), NotificationStatus.UNREAD, inboundNotifications); return userNotifications.stream() .filter(userNotification -> !Strings.isNullOrEmpty(userNotification.getServiceId())) .collect(Collectors.groupingBy(UserNotification::getServiceId, Collectors.reducing(0, n -> 1, Integer::sum))); } private static String getFormattedText(InboundNotification notification, Locale locale) { String formattedText; String message = notification.getMessage(locale).replaceAll("[<>]", ""); try { formattedText = new Markdown4jProcessor().process(message); } catch (IOException ignore) { formattedText = message; } return formattedText; } public void archive(String notificationId) { if (!notificationsEnabled) { return; } notificationService.setMessageStatus(userInfoHelper.currentUser().getUserId(), Collections.singletonList(notificationId), NotificationStatus.READ); } }
Use provided status when asking for notifications
src/main/java/org/oasis_eu/portal/services/PortalNotificationService.java
Use provided status when asking for notifications
<ide><path>rc/main/java/org/oasis_eu/portal/services/PortalNotificationService.java <ide> } <ide> <ide> List<InboundNotification> notifications = <del> notificationService.getNotifications(userInfoHelper.currentUser().getUserId(), NotificationStatus.ANY); <add> notificationService.getNotifications(userInfoHelper.currentUser().getUserId(), status); <ide> <ide> List<UserNotification> notifs = extractNotifications(locale, status, notifications); <ide>
Java
apache-2.0
b8f57bc60a3819e67aa9b203ce695cea14ff407f
0
square/leakcanary,square/leakcanary,square/leakcanary
/* * Copyright (C) 2015 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.squareup.leakcanary; import android.support.annotation.NonNull; import java.lang.ref.PhantomReference; import java.lang.ref.SoftReference; import java.lang.ref.WeakReference; import java.util.EnumSet; import static android.os.Build.MANUFACTURER; import static android.os.Build.VERSION.SDK_INT; import static android.os.Build.VERSION_CODES.ICE_CREAM_SANDWICH; import static android.os.Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1; import static android.os.Build.VERSION_CODES.JELLY_BEAN; import static android.os.Build.VERSION_CODES.JELLY_BEAN_MR2; import static android.os.Build.VERSION_CODES.KITKAT; import static android.os.Build.VERSION_CODES.LOLLIPOP; import static android.os.Build.VERSION_CODES.LOLLIPOP_MR1; import static android.os.Build.VERSION_CODES.M; import static android.os.Build.VERSION_CODES.N; import static android.os.Build.VERSION_CODES.N_MR1; import static android.os.Build.VERSION_CODES.O; import static android.os.Build.VERSION_CODES.O_MR1; import static android.os.Build.VERSION_CODES.P; import static com.squareup.leakcanary.AndroidWatchExecutor.LEAK_CANARY_THREAD_NAME; import static com.squareup.leakcanary.internal.LeakCanaryInternals.HUAWEI; import static com.squareup.leakcanary.internal.LeakCanaryInternals.LENOVO; import static com.squareup.leakcanary.internal.LeakCanaryInternals.LG; import static com.squareup.leakcanary.internal.LeakCanaryInternals.MEIZU; import static com.squareup.leakcanary.internal.LeakCanaryInternals.MOTOROLA; import static com.squareup.leakcanary.internal.LeakCanaryInternals.NVIDIA; import static com.squareup.leakcanary.internal.LeakCanaryInternals.SAMSUNG; import static com.squareup.leakcanary.internal.LeakCanaryInternals.VIVO; /** * This class is a work in progress. You can help by reporting leak traces that seem to be caused * by the Android SDK, here: https://github.com/square/leakcanary/issues/new * * We filter on SDK versions and Manufacturers because many of those leaks are specific to a given * manufacturer implementation, they usually share their builds across multiple models, and the * leaks eventually get fixed in newer versions. * * Most app developers should use {@link #createAppDefaults()}. However, you can also pick the * leaks you want to ignore by creating an {@link EnumSet} that matches your needs and calling * {@link #createBuilder(EnumSet)} */ @SuppressWarnings({ "unused", "WeakerAccess" }) // Public API. public enum AndroidExcludedRefs { // ######## Android SDK Excluded refs ######## ACTIVITY_CLIENT_RECORD__NEXT_IDLE(SDK_INT >= KITKAT && SDK_INT <= LOLLIPOP) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.app.ActivityThread$ActivityClientRecord", "nextIdle") .reason("Android AOSP sometimes keeps a reference to a destroyed activity as a" + " nextIdle client record in the android.app.ActivityThread.mActivities map." + " Not sure what's going on there, input welcome."); } }, SPAN_CONTROLLER(SDK_INT <= KITKAT) { @Override void add(ExcludedRefs.Builder excluded) { String reason = "Editor inserts a special span, which has a reference to the EditText. That span is a" + " NoCopySpan, which makes sure it gets dropped when creating a new" + " SpannableStringBuilder from a given CharSequence." + " TextView.onSaveInstanceState() does a copy of its mText before saving it in the" + " bundle. Prior to KitKat, that copy was done using the SpannableString" + " constructor, instead of SpannableStringBuilder. The SpannableString constructor" + " does not drop NoCopySpan spans. So we end up with a saved state that holds a" + " reference to the textview and therefore the entire view hierarchy & activity" + " context. Fix: https://github.com/android/platform_frameworks_base/commit" + "/af7dcdf35a37d7a7dbaad7d9869c1c91bce2272b ." + " To fix this, you could override TextView.onSaveInstanceState(), and then use" + " reflection to access TextView.SavedState.mText and clear the NoCopySpan spans."; excluded.instanceField("android.widget.Editor$EasyEditSpanController", "this$0") .reason(reason); excluded.instanceField("android.widget.Editor$SpanController", "this$0").reason(reason); } }, MEDIA_SESSION_LEGACY_HELPER__SINSTANCE(SDK_INT == LOLLIPOP) { @Override void add(ExcludedRefs.Builder excluded) { excluded.staticField("android.media.session.MediaSessionLegacyHelper", "sInstance") .reason("MediaSessionLegacyHelper is a static singleton that is lazily instantiated and" + " keeps a reference to the context it's given the first time" + " MediaSessionLegacyHelper.getHelper() is called." + " This leak was introduced in android-5.0.1_r1 and fixed in Android 5.1.0_r1 by" + " calling context.getApplicationContext()." + " Fix: https://github.com/android/platform_frameworks_base/commit" + "/9b5257c9c99c4cb541d8e8e78fb04f008b1a9091" + " To fix this, you could call MediaSessionLegacyHelper.getHelper() early" + " in Application.onCreate() and pass it the application context."); } }, TEXT_LINE__SCACHED(SDK_INT <= LOLLIPOP_MR1) { @Override void add(ExcludedRefs.Builder excluded) { excluded.staticField("android.text.TextLine", "sCached") .reason("TextLine.sCached is a pool of 3 TextLine instances. TextLine.recycle() has had" + " at least two bugs that created memory leaks by not correctly clearing the" + " recycled TextLine instances. The first was fixed in android-5.1.0_r1:" + " https://github.com/android/platform_frameworks_base/commit" + "/893d6fe48d37f71e683f722457bea646994a10" + " The second was fixed, not released yet:" + " https://github.com/android/platform_frameworks_base/commit" + "/b3a9bc038d3a218b1dbdf7b5668e3d6c12be5e" + " To fix this, you could access TextLine.sCached and clear the pool every now" + " and then (e.g. on activity destroy)."); } }, BLOCKING_QUEUE() { @Override void add(ExcludedRefs.Builder excluded) { String reason = "A thread waiting on a blocking queue will leak the last" + " dequeued object as a stack local reference. So when a HandlerThread becomes idle, it" + " keeps a local reference to the last message it received. That message then gets" + " recycled and can be used again. As long as all messages are recycled after being" + "used, this won't be a problem, because these references are cleared when being" + "recycled. However, dialogs create template Message instances to be copied when a" + "message needs to be sent. These Message templates holds references to the dialog" + "listeners, which most likely leads to holding a reference onto the activity in some" + "way. Dialogs never recycle their template Message, assuming these Message instances" + " will get GCed when the dialog is GCed." + " The combination of these two things creates a high potential for memory leaks as soon" + " as you use dialogs. These memory leaks might be temporary, but some handler threads" + " sleep for a long time." + " To fix this, you could post empty messages to the idle handler threads from time to" + " time. This won't be easy because you cannot access all handler threads, but a library" + "that is widely used should consider doing this for its own handler threads. This leaks" + "has been shown to happen in both Dalvik and ART."; excluded.instanceField("android.os.Message", "obj").reason(reason); excluded.instanceField("android.os.Message", "next").reason(reason); excluded.instanceField("android.os.Message", "target").reason(reason); } }, INPUT_METHOD_MANAGER__SERVED_VIEW(SDK_INT >= ICE_CREAM_SANDWICH_MR1 && SDK_INT <= O_MR1) { @Override void add(ExcludedRefs.Builder excluded) { String reason = "When we detach a view that receives keyboard input, the InputMethodManager" + " leaks a reference to it until a new view asks for keyboard input." + " Tracked here: https://code.google.com/p/android/issues/detail?id=171190" + " Hack: https://gist.github.com/pyricau/4df64341cc978a7de414"; excluded.instanceField("android.view.inputmethod.InputMethodManager", "mNextServedView") .reason(reason); excluded.instanceField("android.view.inputmethod.InputMethodManager", "mServedView") .reason(reason); excluded.instanceField("android.view.inputmethod.InputMethodManager", "mServedInputConnection").reason(reason); } }, INPUT_METHOD_MANAGER__ROOT_VIEW(SDK_INT >= ICE_CREAM_SANDWICH_MR1 && SDK_INT <= O_MR1) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.view.inputmethod.InputMethodManager", "mCurRootView") .reason("The singleton InputMethodManager is holding a reference to mCurRootView long" + " after the activity has been destroyed." + " Observed on ICS MR1: https://github.com/square/leakcanary/issues/1" + "#issuecomment-100579429" + " Hack: https://gist.github.com/pyricau/4df64341cc978a7de414"); } }, LAYOUT_TRANSITION(SDK_INT >= ICE_CREAM_SANDWICH && SDK_INT <= LOLLIPOP_MR1) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.animation.LayoutTransition$1", "val$parent") .reason("LayoutTransition leaks parent ViewGroup through" + " ViewTreeObserver.OnPreDrawListener When triggered, this leaks stays until the" + " window is destroyed. Tracked here:" + " https://code.google.com/p/android/issues/detail?id=171830"); } }, SPELL_CHECKER_SESSION(SDK_INT >= JELLY_BEAN && SDK_INT <= N) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.view.textservice.SpellCheckerSession$1", "this$0") .reason("SpellCheckerSessionListenerImpl.mHandler is leaking destroyed Activity when the" + " SpellCheckerSession is closed before the service is connected." + " Tracked here: https://code.google.com/p/android/issues/detail?id=172542"); } }, SPELL_CHECKER(SDK_INT == LOLLIPOP_MR1) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.widget.SpellChecker$1", "this$0") .reason("SpellChecker holds on to a detached view that points to a destroyed activity." + "mSpellRunnable is being enqueued, and that callback should be removed when " + "closeSession() is called. Maybe closeSession() wasn't called, or maybe it was " + "called after the view was detached."); } }, ACTIVITY_CHOOSE_MODEL(SDK_INT > ICE_CREAM_SANDWICH && SDK_INT <= LOLLIPOP_MR1) { @Override void add(ExcludedRefs.Builder excluded) { String reason = "ActivityChooserModel holds a static reference to the last set" + " ActivityChooserModelPolicy which can be an activity context." + " Tracked here: https://code.google.com/p/android/issues/detail?id=172659" + " Hack: https://gist.github.com/andaag/b05ab66ed0f06167d6e0"; excluded.instanceField("android.support.v7.internal.widget.ActivityChooserModel", "mActivityChoserModelPolicy").reason(reason); excluded.instanceField("android.widget.ActivityChooserModel", "mActivityChoserModelPolicy") .reason(reason); } }, SPEECH_RECOGNIZER(SDK_INT < LOLLIPOP) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.speech.SpeechRecognizer$InternalListener", "this$0") .reason("Prior to Android 5, SpeechRecognizer.InternalListener was a non static inner" + " class and leaked the SpeechRecognizer which leaked an activity context." + " Fixed in AOSP: https://github.com/android/platform_frameworks_base/commit" + " /b37866db469e81aca534ff6186bdafd44352329b"); } }, ACCOUNT_MANAGER(SDK_INT <= O_MR1) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.accounts.AccountManager$AmsTask$Response", "this$1") .reason("AccountManager$AmsTask$Response is a stub and is held in memory by native code," + " probably because the reference to the response in the other process hasn't been" + " cleared." + " AccountManager$AmsTask is holding on to the activity reference to use for" + " launching a new sub- Activity." + " Tracked here: https://code.google.com/p/android/issues/detail?id=173689" + " Fix: Pass a null activity reference to the AccountManager methods and then deal" + " with the returned future to to get the result and correctly start an activity" + " when it's available."); } }, MEDIA_SCANNER_CONNECTION(SDK_INT <= LOLLIPOP_MR1) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.media.MediaScannerConnection", "mContext") .reason("The static method MediaScannerConnection.scanFile() takes an activity context" + " but the service might not disconnect after the activity has been destroyed." + " Tracked here: https://code.google.com/p/android/issues/detail?id=173788" + " Fix: Create an instance of MediaScannerConnection yourself and pass in the" + " application context. Call connect() and disconnect() manually."); } }, USER_MANAGER__SINSTANCE(SDK_INT >= JELLY_BEAN_MR2 && SDK_INT < O) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.os.UserManager", "mContext") .reason("UserManager has a static sInstance field that creates an instance and caches it" + " the first time UserManager.get() is called. This instance is created with the" + " outer context (which is an activity base context)." + " Tracked here: https://code.google.com/p/android/issues/detail?id=173789" + " Introduced by: https://github.com/android/platform_frameworks_base/commit" + "/27db46850b708070452c0ce49daf5f79503fbde6" + " Fix: trigger a call to UserManager.get() in Application.onCreate(), so that the" + " UserManager instance gets cached with a reference to the application context."); } }, APP_WIDGET_HOST_CALLBACKS(SDK_INT < LOLLIPOP_MR1) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.appwidget.AppWidgetHost$Callbacks", "this$0") .reason("android.appwidget.AppWidgetHost$Callbacks is a stub and is held in memory native" + " code. The reference to the `mContext` was not being cleared, which caused the" + " Callbacks instance to retain this reference" + " Fixed in AOSP: https://github.com/android/platform_frameworks_base/commit" + "/7a96f3c917e0001ee739b65da37b2fadec7d7765"); } }, AUDIO_MANAGER(SDK_INT <= LOLLIPOP_MR1) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.media.AudioManager$1", "this$0") .reason("Prior to Android M, VideoView required audio focus from AudioManager and" + " never abandoned it, which leaks the Activity context through the AudioManager." + " The root of the problem is that AudioManager uses whichever" + " context it receives, which in the case of the VideoView example is an Activity," + " even though it only needs the application's context. The issue is fixed in" + " Android M, and the AudioManager now uses the application's context." + " Tracked here: https://code.google.com/p/android/issues/detail?id=152173" + " Fix: https://gist.github.com/jankovd/891d96f476f7a9ce24e2"); } }, EDITTEXT_BLINK_MESSAGEQUEUE(SDK_INT <= LOLLIPOP_MR1) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.widget.Editor$Blink", "this$0") .reason("The EditText Blink of the Cursor is implemented using a callback and Messages," + " which trigger the display of the Cursor. If an AlertDialog or DialogFragment that" + " contains a blinking cursor is detached, a message is posted with a delay after the" + " dialog has been closed and as a result leaks the Activity." + " This can be fixed manually by calling TextView.setCursorVisible(false) in the" + " dismiss() method of the dialog." + " Tracked here: https://code.google.com/p/android/issues/detail?id=188551" + " Fixed in AOSP: https://android.googlesource.com/platform/frameworks/base/+" + "/5b734f2430e9f26c769d6af8ea5645e390fcf5af%5E%21/"); } }, CONNECTIVITY_MANAGER__SINSTANCE(SDK_INT <= M) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.net.ConnectivityManager", "sInstance") .reason("ConnectivityManager has a sInstance field that is set when the first" + "ConnectivityManager instance is created. ConnectivityManager has a mContext field." + "When calling activity.getSystemService(Context.CONNECTIVITY_SERVICE) , the first" + "ConnectivityManager instance is created with the activity context and stored in" + "sInstance. That activity context then leaks forever." + "Until this is fixed, app developers can prevent this leak by making sure the" + " ConnectivityManager is first created with an App Context. E.g. in some static" + " init do: context.getApplicationContext()" + ".getSystemService(Context.CONNECTIVITY_SERVICE)" + " Tracked here: https://code.google.com/p/android/issues/detail?id=198852" + " Introduced here: https://github.com/android/platform_frameworks_base/commit/" + "e0bef71662d81caaaa0d7214fb0bef5d39996a69"); } }, ACCESSIBILITY_NODE_INFO__MORIGINALTEXT(SDK_INT >= O && SDK_INT <= O_MR1) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.view.accessibility.AccessibilityNodeInfo", "mOriginalText") .reason("AccessibilityNodeInfo has a static sPool of AccessibilityNodeInfo. When " + "AccessibilityNodeInfo instances are released back in the pool, " + "AccessibilityNodeInfo.clear() does not clear the mOriginalText field, which " + "causes spans to leak which in turns causes TextView.ChangeWatcher to leak and the " + "whole view hierarchy. Introduced here: https://android.googlesource.com/platform/" + "frameworks/base/+/193520e3dff5248ddcf8435203bf99d2ba667219%5E%21/core/java/" + "android/view/accessibility/AccessibilityNodeInfo.java"); } }, BACKDROP_FRAME_RENDERER__MDECORVIEW(SDK_INT >= N && SDK_INT <= O) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("com.android.internal.policy.BackdropFrameRenderer", "mDecorView") .reason("When BackdropFrameRenderer.releaseRenderer() is called, there's an unknown case " + "where mRenderer becomes null but mChoreographer doesn't and the thread doesn't" + "stop and ends up leaking mDecorView which itself holds on to a destroyed" + "activity"); } }, // ######## Manufacturer specific Excluded refs ######## INSTRUMENTATION_RECOMMEND_ACTIVITY( MEIZU.equals(MANUFACTURER) && SDK_INT >= LOLLIPOP && SDK_INT <= LOLLIPOP_MR1) { @Override void add(ExcludedRefs.Builder excluded) { excluded.staticField("android.app.Instrumentation", "mRecommendActivity") .reason("Instrumentation would leak com.android.internal.app.RecommendActivity (in " + "framework.jar) in Meizu FlymeOS 4.5 and above, which is based on Android 5.0 and " + "above"); } }, DEVICE_POLICY_MANAGER__SETTINGS_OBSERVER( MOTOROLA.equals(MANUFACTURER) && SDK_INT >= KITKAT && SDK_INT <= LOLLIPOP_MR1) { @Override void add(ExcludedRefs.Builder excluded) { if (MOTOROLA.equals(MANUFACTURER) && SDK_INT == KITKAT) { excluded.instanceField("android.app.admin.DevicePolicyManager$SettingsObserver", "this$0") .reason("DevicePolicyManager keeps a reference to the context it has been created with" + " instead of extracting the application context. In this Motorola build," + " DevicePolicyManager has an inner SettingsObserver class that is a content" + " observer, which is held into memory by a binder transport object."); } } }, SPEN_GESTURE_MANAGER(SAMSUNG.equals(MANUFACTURER) && SDK_INT == KITKAT) { @Override void add(ExcludedRefs.Builder excluded) { excluded.staticField("com.samsung.android.smartclip.SpenGestureManager", "mContext") .reason("SpenGestureManager has a static mContext field that leaks a reference to the" + " activity. Yes, a STATIC mContext field."); } }, GESTURE_BOOST_MANAGER(HUAWEI.equals(MANUFACTURER) && SDK_INT >= N && SDK_INT <= N_MR1) { @Override void add(ExcludedRefs.Builder excluded) { excluded.staticField("android.gestureboost.GestureBoostManager", "mContext") .reason("GestureBoostManager is a static singleton that leaks an activity context." + "Fix: https://github.com/square/leakcanary/issues/696#issuecomment-296420756"); } }, INPUT_METHOD_MANAGER__LAST_SERVED_VIEW( HUAWEI.equals(MANUFACTURER) && SDK_INT >= M && SDK_INT <= O_MR1) { @Override void add(ExcludedRefs.Builder excluded) { String reason = "HUAWEI added a mLastSrvView field to InputMethodManager" + " that leaks a reference to the last served view."; excluded.instanceField("android.view.inputmethod.InputMethodManager", "mLastSrvView") .reason(reason); } }, CLIPBOARD_UI_MANAGER__SINSTANCE( SAMSUNG.equals(MANUFACTURER) && SDK_INT >= KITKAT && SDK_INT <= LOLLIPOP) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.sec.clipboard.ClipboardUIManager", "mContext") .reason("ClipboardUIManager is a static singleton that leaks an activity context." + " Fix: trigger a call to ClipboardUIManager.getInstance() in Application.onCreate()" + " , so that the ClipboardUIManager instance gets cached with a reference to the" + " application context. Example: https://gist.github.com/cypressious/" + "91c4fb1455470d803a602838dfcd5774"); } }, SEM_CLIPBOARD_MANAGER__MCONTEXT( SAMSUNG.equals(MANUFACTURER) && SDK_INT >= KITKAT && SDK_INT <= N) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("com.samsung.android.content.clipboard.SemClipboardManager", "mContext") .reason("SemClipboardManager is held in memory by an anonymous inner class " + "implementation of android.os.Binder, thereby leaking an activity context."); } }, SEM_EMERGENCY_MANAGER__MCONTEXT( SAMSUNG.equals(MANUFACTURER) && SDK_INT >= KITKAT && SDK_INT <= N) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("com.samsung.android.emergencymode.SemEmergencyManager", "mContext") .reason("SemEmergencyManager is a static singleton that leaks a DecorContext. " + "Fix: https://gist.github.com/jankovd/a210460b814c04d500eb12025902d60d"); } }, BUBBLE_POPUP_HELPER__SHELPER( LG.equals(MANUFACTURER) && SDK_INT >= KITKAT && SDK_INT <= LOLLIPOP) { @Override void add(ExcludedRefs.Builder excluded) { excluded.staticField("android.widget.BubblePopupHelper", "sHelper") .reason("A static helper for EditText bubble popups leaks a reference to the latest" + "focused view."); } }, LGCONTEXT__MCONTEXT(LG.equals(MANUFACTURER) && SDK_INT == LOLLIPOP) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("com.lge.systemservice.core.LGContext", "mContext") .reason("LGContext is a static singleton that leaks an activity context."); } }, AW_RESOURCE__SRESOURCES(SAMSUNG.equals(MANUFACTURER) && SDK_INT == KITKAT) { @Override void add(ExcludedRefs.Builder excluded) { // AwResource#setResources() is called with resources that hold a reference to the // activity context (instead of the application context) and doesn't clear it. // Not sure what's going on there, input welcome. excluded.staticField("com.android.org.chromium.android_webview.AwResource", "sResources"); } }, MAPPER_CLIENT(NVIDIA.equals(MANUFACTURER) && SDK_INT == KITKAT) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("com.nvidia.ControllerMapper.MapperClient$ServiceClient", "this$0") .reason("Not sure exactly what ControllerMapper is about, but there is an anonymous" + " Handler in ControllerMapper.MapperClient.ServiceClient, which leaks" + " ControllerMapper.MapperClient which leaks the activity context."); } }, TEXT_VIEW__MLAST_HOVERED_VIEW( SAMSUNG.equals(MANUFACTURER) && SDK_INT >= KITKAT && SDK_INT <= O) { @Override void add(ExcludedRefs.Builder excluded) { excluded.staticField("android.widget.TextView", "mLastHoveredView") .reason("mLastHoveredView is a static field in TextView that leaks the last hovered" + " view."); } }, PERSONA_MANAGER(SAMSUNG.equals(MANUFACTURER) && SDK_INT == KITKAT) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.os.PersonaManager", "mContext") .reason("android.app.LoadedApk.mResources has a reference to" + " android.content.res.Resources.mPersonaManager which has a reference to" + " android.os.PersonaManager.mContext which is an activity."); } }, RESOURCES__MCONTEXT(SAMSUNG.equals(MANUFACTURER) && SDK_INT == KITKAT) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.content.res.Resources", "mContext") .reason("In AOSP the Resources class does not have a context." + " Here we have ZygoteInit.mResources (static field) holding on to a Resources" + " instance that has a context that is the activity." + " Observed here: https://github.com/square/leakcanary/issues/1#issue-74450184"); } }, VIEW_CONFIGURATION__MCONTEXT(SAMSUNG.equals(MANUFACTURER) && SDK_INT == KITKAT) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.view.ViewConfiguration", "mContext") .reason("In AOSP the ViewConfiguration class does not have a context." + " Here we have ViewConfiguration.sConfigurations (static field) holding on to a" + " ViewConfiguration instance that has a context that is the activity." + " Observed here: https://github.com/square/leakcanary/issues" + "/1#issuecomment-100324683"); } }, SYSTEM_SENSOR_MANAGER__MAPPCONTEXTIMPL((LENOVO.equals(MANUFACTURER) && SDK_INT == KITKAT) // || (VIVO.equals(MANUFACTURER) && SDK_INT == LOLLIPOP_MR1)) { @Override void add(ExcludedRefs.Builder excluded) { excluded.staticField("android.hardware.SystemSensorManager", "mAppContextImpl") .reason("SystemSensorManager stores a reference to context " + "in a static field in its constructor." + "Fix: use application context to get SensorManager"); } }, AUDIO_MANAGER__MCONTEXT_STATIC(SAMSUNG.equals(MANUFACTURER) && SDK_INT == KITKAT) { @Override void add(ExcludedRefs.Builder excluded) { excluded.staticField("android.media.AudioManager", "mContext_static") .reason("Samsung added a static mContext_static field to AudioManager, holds a reference" + " to the activity." + " Observed here: https://github.com/square/leakcanary/issues/32"); } }, ACTIVITY_MANAGER_MCONTEXT(SAMSUNG.equals(MANUFACTURER) && SDK_INT == LOLLIPOP_MR1) { @Override void add(ExcludedRefs.Builder excluded) { excluded.staticField("android.app.ActivityManager", "mContext") .reason("Samsung added a static mContext field to ActivityManager, holds a reference" + " to the activity." + " Observed here: https://github.com/square/leakcanary/issues/177 Fix in comment:" + " https://github.com/square/leakcanary/issues/177#issuecomment-222724283"); } }, // ######## General Excluded refs ######## SOFT_REFERENCES { @Override void add(ExcludedRefs.Builder excluded) { excluded.clazz(WeakReference.class.getName()).alwaysExclude(); excluded.clazz(SoftReference.class.getName()).alwaysExclude(); excluded.clazz(PhantomReference.class.getName()).alwaysExclude(); excluded.clazz("java.lang.ref.Finalizer").alwaysExclude(); excluded.clazz("java.lang.ref.FinalizerReference").alwaysExclude(); } }, FINALIZER_WATCHDOG_DAEMON { @Override void add(ExcludedRefs.Builder excluded) { // If the FinalizerWatchdogDaemon thread is on the shortest path, then there was no other // reference to the object and it was about to be GCed. excluded.thread("FinalizerWatchdogDaemon").alwaysExclude(); } }, MAIN { @Override void add(ExcludedRefs.Builder excluded) { // The main thread stack is ever changing so local variables aren't likely to hold references // for long. If this is on the shortest path, it's probably that there's a longer path with // a real leak. excluded.thread("main").alwaysExclude(); } }, LEAK_CANARY_THREAD { @Override void add(ExcludedRefs.Builder excluded) { excluded.thread(LEAK_CANARY_THREAD_NAME).alwaysExclude(); } }, EVENT_RECEIVER__MMESSAGE_QUEUE { @Override void add(ExcludedRefs.Builder excluded) { // DisplayEventReceiver keeps a reference message queue object so that it is not GC'd while // the native peer of the receiver is using them. // The main thread message queue is held on by the main Looper, but that might be a longer // path. Let's not confuse people with a shorter path that is less meaningful. excluded.instanceField("android.view.Choreographer$FrameDisplayEventReceiver", "mMessageQueue").alwaysExclude(); } }, VIEWLOCATIONHOLDER_ROOT(SDK_INT == P) { @Override void add(ExcludedRefs.Builder excluded) { // In Android P, ViewLocationHolder has an mRoot field that is not cleared in its clear() // method. // Introduced in https://github.com/aosp-mirror/platform_frameworks_base/commit/86b326012813f09d8f1de7d6d26c986a909de894 // Bug report: https://issuetracker.google.com/issues/112792715 excluded.instanceField("android.view.ViewGroup$ViewLocationHolder", "mRoot"); } }; /** * This returns the references in the leak path that should be ignored by all on Android. */ public static @NonNull ExcludedRefs.Builder createAndroidDefaults() { return createBuilder( EnumSet.of(SOFT_REFERENCES, FINALIZER_WATCHDOG_DAEMON, MAIN, LEAK_CANARY_THREAD, EVENT_RECEIVER__MMESSAGE_QUEUE)); } /** * This returns the references in the leak path that can be ignored for app developers. This * doesn't mean there is no memory leak, to the contrary. However, some leaks are caused by bugs * in AOSP or manufacturer forks of AOSP. In such cases, there is very little we can do as app * developers except by resorting to serious hacks, so we remove the noise caused by those leaks. */ public static @NonNull ExcludedRefs.Builder createAppDefaults() { return createBuilder(EnumSet.allOf(AndroidExcludedRefs.class)); } public static @NonNull ExcludedRefs.Builder createBuilder(EnumSet<AndroidExcludedRefs> refs) { ExcludedRefs.Builder excluded = ExcludedRefs.builder(); for (AndroidExcludedRefs ref : refs) { if (ref.applies) { ref.add(excluded); ((ExcludedRefs.BuilderWithParams) excluded).named(ref.name()); } } return excluded; } final boolean applies; AndroidExcludedRefs() { this(true); } AndroidExcludedRefs(boolean applies) { this.applies = applies; } abstract void add(ExcludedRefs.Builder excluded); }
leakcanary-android/src/main/java/com/squareup/leakcanary/AndroidExcludedRefs.java
/* * Copyright (C) 2015 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.squareup.leakcanary; import android.support.annotation.NonNull; import java.lang.ref.PhantomReference; import java.lang.ref.SoftReference; import java.lang.ref.WeakReference; import java.util.EnumSet; import static android.os.Build.MANUFACTURER; import static android.os.Build.VERSION.SDK_INT; import static android.os.Build.VERSION_CODES.ICE_CREAM_SANDWICH; import static android.os.Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1; import static android.os.Build.VERSION_CODES.JELLY_BEAN; import static android.os.Build.VERSION_CODES.JELLY_BEAN_MR2; import static android.os.Build.VERSION_CODES.KITKAT; import static android.os.Build.VERSION_CODES.LOLLIPOP; import static android.os.Build.VERSION_CODES.LOLLIPOP_MR1; import static android.os.Build.VERSION_CODES.M; import static android.os.Build.VERSION_CODES.N; import static android.os.Build.VERSION_CODES.N_MR1; import static android.os.Build.VERSION_CODES.O; import static android.os.Build.VERSION_CODES.O_MR1; import static android.os.Build.VERSION_CODES.P; import static com.squareup.leakcanary.AndroidWatchExecutor.LEAK_CANARY_THREAD_NAME; import static com.squareup.leakcanary.internal.LeakCanaryInternals.HUAWEI; import static com.squareup.leakcanary.internal.LeakCanaryInternals.LENOVO; import static com.squareup.leakcanary.internal.LeakCanaryInternals.LG; import static com.squareup.leakcanary.internal.LeakCanaryInternals.MEIZU; import static com.squareup.leakcanary.internal.LeakCanaryInternals.MOTOROLA; import static com.squareup.leakcanary.internal.LeakCanaryInternals.NVIDIA; import static com.squareup.leakcanary.internal.LeakCanaryInternals.SAMSUNG; import static com.squareup.leakcanary.internal.LeakCanaryInternals.VIVO; /** * This class is a work in progress. You can help by reporting leak traces that seem to be caused * by the Android SDK, here: https://github.com/square/leakcanary/issues/new * * We filter on SDK versions and Manufacturers because many of those leaks are specific to a given * manufacturer implementation, they usually share their builds across multiple models, and the * leaks eventually get fixed in newer versions. * * Most app developers should use {@link #createAppDefaults()}. However, you can also pick the * leaks you want to ignore by creating an {@link EnumSet} that matches your needs and calling * {@link #createBuilder(EnumSet)} */ @SuppressWarnings({ "unused", "WeakerAccess" }) // Public API. public enum AndroidExcludedRefs { // ######## Android SDK Excluded refs ######## ACTIVITY_CLIENT_RECORD__NEXT_IDLE(SDK_INT >= KITKAT && SDK_INT <= LOLLIPOP) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.app.ActivityThread$ActivityClientRecord", "nextIdle") .reason("Android AOSP sometimes keeps a reference to a destroyed activity as a" + " nextIdle client record in the android.app.ActivityThread.mActivities map." + " Not sure what's going on there, input welcome."); } }, SPAN_CONTROLLER(SDK_INT <= KITKAT) { @Override void add(ExcludedRefs.Builder excluded) { String reason = "Editor inserts a special span, which has a reference to the EditText. That span is a" + " NoCopySpan, which makes sure it gets dropped when creating a new" + " SpannableStringBuilder from a given CharSequence." + " TextView.onSaveInstanceState() does a copy of its mText before saving it in the" + " bundle. Prior to KitKat, that copy was done using the SpannableString" + " constructor, instead of SpannableStringBuilder. The SpannableString constructor" + " does not drop NoCopySpan spans. So we end up with a saved state that holds a" + " reference to the textview and therefore the entire view hierarchy & activity" + " context. Fix: https://github.com/android/platform_frameworks_base/commit" + "/af7dcdf35a37d7a7dbaad7d9869c1c91bce2272b ." + " To fix this, you could override TextView.onSaveInstanceState(), and then use" + " reflection to access TextView.SavedState.mText and clear the NoCopySpan spans."; excluded.instanceField("android.widget.Editor$EasyEditSpanController", "this$0") .reason(reason); excluded.instanceField("android.widget.Editor$SpanController", "this$0").reason(reason); } }, MEDIA_SESSION_LEGACY_HELPER__SINSTANCE(SDK_INT == LOLLIPOP) { @Override void add(ExcludedRefs.Builder excluded) { excluded.staticField("android.media.session.MediaSessionLegacyHelper", "sInstance") .reason("MediaSessionLegacyHelper is a static singleton that is lazily instantiated and" + " keeps a reference to the context it's given the first time" + " MediaSessionLegacyHelper.getHelper() is called." + " This leak was introduced in android-5.0.1_r1 and fixed in Android 5.1.0_r1 by" + " calling context.getApplicationContext()." + " Fix: https://github.com/android/platform_frameworks_base/commit" + "/9b5257c9c99c4cb541d8e8e78fb04f008b1a9091" + " To fix this, you could call MediaSessionLegacyHelper.getHelper() early" + " in Application.onCreate() and pass it the application context."); } }, TEXT_LINE__SCACHED(SDK_INT <= LOLLIPOP_MR1) { @Override void add(ExcludedRefs.Builder excluded) { excluded.staticField("android.text.TextLine", "sCached") .reason("TextLine.sCached is a pool of 3 TextLine instances. TextLine.recycle() has had" + " at least two bugs that created memory leaks by not correctly clearing the" + " recycled TextLine instances. The first was fixed in android-5.1.0_r1:" + " https://github.com/android/platform_frameworks_base/commit" + "/893d6fe48d37f71e683f722457bea646994a10" + " The second was fixed, not released yet:" + " https://github.com/android/platform_frameworks_base/commit" + "/b3a9bc038d3a218b1dbdf7b5668e3d6c12be5e" + " To fix this, you could access TextLine.sCached and clear the pool every now" + " and then (e.g. on activity destroy)."); } }, BLOCKING_QUEUE() { @Override void add(ExcludedRefs.Builder excluded) { String reason = "A thread waiting on a blocking queue will leak the last" + " dequeued object as a stack local reference. So when a HandlerThread becomes idle, it" + " keeps a local reference to the last message it received. That message then gets" + " recycled and can be used again. As long as all messages are recycled after being" + "used, this won't be a problem, because these references are cleared when being" + "recycled. However, dialogs create template Message instances to be copied when a" + "message needs to be sent. These Message templates holds references to the dialog" + "listeners, which most likely leads to holding a reference onto the activity in some" + "way. Dialogs never recycle their template Message, assuming these Message instances" + " will get GCed when the dialog is GCed." + " The combination of these two things creates a high potential for memory leaks as soon" + " as you use dialogs. These memory leaks might be temporary, but some handler threads" + " sleep for a long time." + " To fix this, you could post empty messages to the idle handler threads from time to" + " time. This won't be easy because you cannot access all handler threads, but a library" + "that is widely used should consider doing this for its own handler threads. This leaks" + "has been shown to happen in both Dalvik and ART."; excluded.instanceField("android.os.Message", "obj").reason(reason); excluded.instanceField("android.os.Message", "next").reason(reason); excluded.instanceField("android.os.Message", "target").reason(reason); } }, INPUT_METHOD_MANAGER__SERVED_VIEW(SDK_INT >= ICE_CREAM_SANDWICH_MR1 && SDK_INT <= O_MR1) { @Override void add(ExcludedRefs.Builder excluded) { String reason = "When we detach a view that receives keyboard input, the InputMethodManager" + " leaks a reference to it until a new view asks for keyboard input." + " Tracked here: https://code.google.com/p/android/issues/detail?id=171190" + " Hack: https://gist.github.com/pyricau/4df64341cc978a7de414"; excluded.instanceField("android.view.inputmethod.InputMethodManager", "mNextServedView") .reason(reason); excluded.instanceField("android.view.inputmethod.InputMethodManager", "mServedView") .reason(reason); excluded.instanceField("android.view.inputmethod.InputMethodManager", "mServedInputConnection").reason(reason); } }, INPUT_METHOD_MANAGER__ROOT_VIEW(SDK_INT >= ICE_CREAM_SANDWICH_MR1 && SDK_INT <= O_MR1) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.view.inputmethod.InputMethodManager", "mCurRootView") .reason("The singleton InputMethodManager is holding a reference to mCurRootView long" + " after the activity has been destroyed." + " Observed on ICS MR1: https://github.com/square/leakcanary/issues/1" + "#issuecomment-100579429" + " Hack: https://gist.github.com/pyricau/4df64341cc978a7de414"); } }, LAYOUT_TRANSITION(SDK_INT >= ICE_CREAM_SANDWICH && SDK_INT <= LOLLIPOP_MR1) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.animation.LayoutTransition$1", "val$parent") .reason("LayoutTransition leaks parent ViewGroup through" + " ViewTreeObserver.OnPreDrawListener When triggered, this leaks stays until the" + " window is destroyed. Tracked here:" + " https://code.google.com/p/android/issues/detail?id=171830"); } }, SPELL_CHECKER_SESSION(SDK_INT >= JELLY_BEAN && SDK_INT <= N) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.view.textservice.SpellCheckerSession$1", "this$0") .reason("SpellCheckerSessionListenerImpl.mHandler is leaking destroyed Activity when the" + " SpellCheckerSession is closed before the service is connected." + " Tracked here: https://code.google.com/p/android/issues/detail?id=172542"); } }, SPELL_CHECKER(SDK_INT == LOLLIPOP_MR1) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.widget.SpellChecker$1", "this$0") .reason("SpellChecker holds on to a detached view that points to a destroyed activity." + "mSpellRunnable is being enqueued, and that callback should be removed when " + "closeSession() is called. Maybe closeSession() wasn't called, or maybe it was " + "called after the view was detached."); } }, ACTIVITY_CHOOSE_MODEL(SDK_INT > ICE_CREAM_SANDWICH && SDK_INT <= LOLLIPOP_MR1) { @Override void add(ExcludedRefs.Builder excluded) { String reason = "ActivityChooserModel holds a static reference to the last set" + " ActivityChooserModelPolicy which can be an activity context." + " Tracked here: https://code.google.com/p/android/issues/detail?id=172659" + " Hack: https://gist.github.com/andaag/b05ab66ed0f06167d6e0"; excluded.instanceField("android.support.v7.internal.widget.ActivityChooserModel", "mActivityChoserModelPolicy").reason(reason); excluded.instanceField("android.widget.ActivityChooserModel", "mActivityChoserModelPolicy") .reason(reason); } }, SPEECH_RECOGNIZER(SDK_INT < LOLLIPOP) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.speech.SpeechRecognizer$InternalListener", "this$0") .reason("Prior to Android 5, SpeechRecognizer.InternalListener was a non static inner" + " class and leaked the SpeechRecognizer which leaked an activity context." + " Fixed in AOSP: https://github.com/android/platform_frameworks_base/commit" + " /b37866db469e81aca534ff6186bdafd44352329b"); } }, ACCOUNT_MANAGER(SDK_INT <= O_MR1) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.accounts.AccountManager$AmsTask$Response", "this$1") .reason("AccountManager$AmsTask$Response is a stub and is held in memory by native code," + " probably because the reference to the response in the other process hasn't been" + " cleared." + " AccountManager$AmsTask is holding on to the activity reference to use for" + " launching a new sub- Activity." + " Tracked here: https://code.google.com/p/android/issues/detail?id=173689" + " Fix: Pass a null activity reference to the AccountManager methods and then deal" + " with the returned future to to get the result and correctly start an activity" + " when it's available."); } }, MEDIA_SCANNER_CONNECTION(SDK_INT <= LOLLIPOP_MR1) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.media.MediaScannerConnection", "mContext") .reason("The static method MediaScannerConnection.scanFile() takes an activity context" + " but the service might not disconnect after the activity has been destroyed." + " Tracked here: https://code.google.com/p/android/issues/detail?id=173788" + " Fix: Create an instance of MediaScannerConnection yourself and pass in the" + " application context. Call connect() and disconnect() manually."); } }, USER_MANAGER__SINSTANCE(SDK_INT >= JELLY_BEAN_MR2 && SDK_INT < O) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.os.UserManager", "mContext") .reason("UserManager has a static sInstance field that creates an instance and caches it" + " the first time UserManager.get() is called. This instance is created with the" + " outer context (which is an activity base context)." + " Tracked here: https://code.google.com/p/android/issues/detail?id=173789" + " Introduced by: https://github.com/android/platform_frameworks_base/commit" + "/27db46850b708070452c0ce49daf5f79503fbde6" + " Fix: trigger a call to UserManager.get() in Application.onCreate(), so that the" + " UserManager instance gets cached with a reference to the application context."); } }, APP_WIDGET_HOST_CALLBACKS(SDK_INT < LOLLIPOP_MR1) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.appwidget.AppWidgetHost$Callbacks", "this$0") .reason("android.appwidget.AppWidgetHost$Callbacks is a stub and is held in memory native" + " code. The reference to the `mContext` was not being cleared, which caused the" + " Callbacks instance to retain this reference" + " Fixed in AOSP: https://github.com/android/platform_frameworks_base/commit" + "/7a96f3c917e0001ee739b65da37b2fadec7d7765"); } }, AUDIO_MANAGER(SDK_INT <= LOLLIPOP_MR1) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.media.AudioManager$1", "this$0") .reason("Prior to Android M, VideoView required audio focus from AudioManager and" + " never abandoned it, which leaks the Activity context through the AudioManager." + " The root of the problem is that AudioManager uses whichever" + " context it receives, which in the case of the VideoView example is an Activity," + " even though it only needs the application's context. The issue is fixed in" + " Android M, and the AudioManager now uses the application's context." + " Tracked here: https://code.google.com/p/android/issues/detail?id=152173" + " Fix: https://gist.github.com/jankovd/891d96f476f7a9ce24e2"); } }, EDITTEXT_BLINK_MESSAGEQUEUE(SDK_INT <= LOLLIPOP_MR1) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.widget.Editor$Blink", "this$0") .reason("The EditText Blink of the Cursor is implemented using a callback and Messages," + " which trigger the display of the Cursor. If an AlertDialog or DialogFragment that" + " contains a blinking cursor is detached, a message is posted with a delay after the" + " dialog has been closed and as a result leaks the Activity." + " This can be fixed manually by calling TextView.setCursorVisible(false) in the" + " dismiss() method of the dialog." + " Tracked here: https://code.google.com/p/android/issues/detail?id=188551" + " Fixed in AOSP: https://android.googlesource.com/platform/frameworks/base/+" + "/5b734f2430e9f26c769d6af8ea5645e390fcf5af%5E%21/"); } }, CONNECTIVITY_MANAGER__SINSTANCE(SDK_INT <= M) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.net.ConnectivityManager", "sInstance") .reason("ConnectivityManager has a sInstance field that is set when the first" + "ConnectivityManager instance is created. ConnectivityManager has a mContext field." + "When calling activity.getSystemService(Context.CONNECTIVITY_SERVICE) , the first" + "ConnectivityManager instance is created with the activity context and stored in" + "sInstance. That activity context then leaks forever." + "Until this is fixed, app developers can prevent this leak by making sure the" + " ConnectivityManager is first created with an App Context. E.g. in some static" + " init do: context.getApplicationContext()" + ".getSystemService(Context.CONNECTIVITY_SERVICE)" + " Tracked here: https://code.google.com/p/android/issues/detail?id=198852" + " Introduced here: https://github.com/android/platform_frameworks_base/commit/" + "e0bef71662d81caaaa0d7214fb0bef5d39996a69"); } }, ACCESSIBILITY_NODE_INFO__MORIGINALTEXT(SDK_INT >= O && SDK_INT <= O_MR1) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.view.accessibility.AccessibilityNodeInfo", "mOriginalText") .reason("AccessibilityNodeInfo has a static sPool of AccessibilityNodeInfo. When " + "AccessibilityNodeInfo instances are released back in the pool, " + "AccessibilityNodeInfo.clear() does not clear the mOriginalText field, which " + "causes spans to leak which in turns causes TextView.ChangeWatcher to leak and the " + "whole view hierarchy. Introduced here: https://android.googlesource.com/platform/" + "frameworks/base/+/193520e3dff5248ddcf8435203bf99d2ba667219%5E%21/core/java/" + "android/view/accessibility/AccessibilityNodeInfo.java"); } }, BACKDROP_FRAME_RENDERER__MDECORVIEW(SDK_INT >= N && SDK_INT <= O) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("com.android.internal.policy.BackdropFrameRenderer", "mDecorView") .reason("When BackdropFrameRenderer.releaseRenderer() is called, there's an unknown case " + "where mRenderer becomes null but mChoreographer doesn't and the thread doesn't" + "stop and ends up leaking mDecorView which itself holds on to a destroyed" + "activity"); } }, // ######## Manufacturer specific Excluded refs ######## INSTRUMENTATION_RECOMMEND_ACTIVITY( MEIZU.equals(MANUFACTURER) && SDK_INT >= LOLLIPOP && SDK_INT <= LOLLIPOP_MR1) { @Override void add(ExcludedRefs.Builder excluded) { excluded.staticField("android.app.Instrumentation", "mRecommendActivity") .reason("Instrumentation would leak com.android.internal.app.RecommendActivity (in " + "framework.jar) in Meizu FlymeOS 4.5 and above, which is based on Android 5.0 and " + "above"); } }, DEVICE_POLICY_MANAGER__SETTINGS_OBSERVER( MOTOROLA.equals(MANUFACTURER) && SDK_INT >= KITKAT && SDK_INT <= LOLLIPOP_MR1) { @Override void add(ExcludedRefs.Builder excluded) { if (MOTOROLA.equals(MANUFACTURER) && SDK_INT == KITKAT) { excluded.instanceField("android.app.admin.DevicePolicyManager$SettingsObserver", "this$0") .reason("DevicePolicyManager keeps a reference to the context it has been created with" + " instead of extracting the application context. In this Motorola build," + " DevicePolicyManager has an inner SettingsObserver class that is a content" + " observer, which is held into memory by a binder transport object."); } } }, SPEN_GESTURE_MANAGER(SAMSUNG.equals(MANUFACTURER) && SDK_INT == KITKAT) { @Override void add(ExcludedRefs.Builder excluded) { excluded.staticField("com.samsung.android.smartclip.SpenGestureManager", "mContext") .reason("SpenGestureManager has a static mContext field that leaks a reference to the" + " activity. Yes, a STATIC mContext field."); } }, GESTURE_BOOST_MANAGER(HUAWEI.equals(MANUFACTURER) && SDK_INT >= N && SDK_INT <= N_MR1) { @Override void add(ExcludedRefs.Builder excluded) { excluded.staticField("android.gestureboost.GestureBoostManager", "mContext") .reason("GestureBoostManager is a static singleton that leaks an activity context." + "Fix: https://github.com/square/leakcanary/issues/696#issuecomment-296420756"); } }, INPUT_METHOD_MANAGER__LAST_SERVED_VIEW( HUAWEI.equals(MANUFACTURER) && SDK_INT >= M && SDK_INT <= N) { @Override void add(ExcludedRefs.Builder excluded) { String reason = "HUAWEI added a mLastSrvView field to InputMethodManager" + " that leaks a reference to the last served view."; excluded.instanceField("android.view.inputmethod.InputMethodManager", "mLastSrvView") .reason(reason); } }, CLIPBOARD_UI_MANAGER__SINSTANCE( SAMSUNG.equals(MANUFACTURER) && SDK_INT >= KITKAT && SDK_INT <= LOLLIPOP) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.sec.clipboard.ClipboardUIManager", "mContext") .reason("ClipboardUIManager is a static singleton that leaks an activity context." + " Fix: trigger a call to ClipboardUIManager.getInstance() in Application.onCreate()" + " , so that the ClipboardUIManager instance gets cached with a reference to the" + " application context. Example: https://gist.github.com/cypressious/" + "91c4fb1455470d803a602838dfcd5774"); } }, SEM_CLIPBOARD_MANAGER__MCONTEXT( SAMSUNG.equals(MANUFACTURER) && SDK_INT >= KITKAT && SDK_INT <= N) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("com.samsung.android.content.clipboard.SemClipboardManager", "mContext") .reason("SemClipboardManager is held in memory by an anonymous inner class " + "implementation of android.os.Binder, thereby leaking an activity context."); } }, SEM_EMERGENCY_MANAGER__MCONTEXT( SAMSUNG.equals(MANUFACTURER) && SDK_INT >= KITKAT && SDK_INT <= N) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("com.samsung.android.emergencymode.SemEmergencyManager", "mContext") .reason("SemEmergencyManager is a static singleton that leaks a DecorContext. " + "Fix: https://gist.github.com/jankovd/a210460b814c04d500eb12025902d60d"); } }, BUBBLE_POPUP_HELPER__SHELPER( LG.equals(MANUFACTURER) && SDK_INT >= KITKAT && SDK_INT <= LOLLIPOP) { @Override void add(ExcludedRefs.Builder excluded) { excluded.staticField("android.widget.BubblePopupHelper", "sHelper") .reason("A static helper for EditText bubble popups leaks a reference to the latest" + "focused view."); } }, LGCONTEXT__MCONTEXT(LG.equals(MANUFACTURER) && SDK_INT == LOLLIPOP) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("com.lge.systemservice.core.LGContext", "mContext") .reason("LGContext is a static singleton that leaks an activity context."); } }, AW_RESOURCE__SRESOURCES(SAMSUNG.equals(MANUFACTURER) && SDK_INT == KITKAT) { @Override void add(ExcludedRefs.Builder excluded) { // AwResource#setResources() is called with resources that hold a reference to the // activity context (instead of the application context) and doesn't clear it. // Not sure what's going on there, input welcome. excluded.staticField("com.android.org.chromium.android_webview.AwResource", "sResources"); } }, MAPPER_CLIENT(NVIDIA.equals(MANUFACTURER) && SDK_INT == KITKAT) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("com.nvidia.ControllerMapper.MapperClient$ServiceClient", "this$0") .reason("Not sure exactly what ControllerMapper is about, but there is an anonymous" + " Handler in ControllerMapper.MapperClient.ServiceClient, which leaks" + " ControllerMapper.MapperClient which leaks the activity context."); } }, TEXT_VIEW__MLAST_HOVERED_VIEW( SAMSUNG.equals(MANUFACTURER) && SDK_INT >= KITKAT && SDK_INT <= O) { @Override void add(ExcludedRefs.Builder excluded) { excluded.staticField("android.widget.TextView", "mLastHoveredView") .reason("mLastHoveredView is a static field in TextView that leaks the last hovered" + " view."); } }, PERSONA_MANAGER(SAMSUNG.equals(MANUFACTURER) && SDK_INT == KITKAT) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.os.PersonaManager", "mContext") .reason("android.app.LoadedApk.mResources has a reference to" + " android.content.res.Resources.mPersonaManager which has a reference to" + " android.os.PersonaManager.mContext which is an activity."); } }, RESOURCES__MCONTEXT(SAMSUNG.equals(MANUFACTURER) && SDK_INT == KITKAT) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.content.res.Resources", "mContext") .reason("In AOSP the Resources class does not have a context." + " Here we have ZygoteInit.mResources (static field) holding on to a Resources" + " instance that has a context that is the activity." + " Observed here: https://github.com/square/leakcanary/issues/1#issue-74450184"); } }, VIEW_CONFIGURATION__MCONTEXT(SAMSUNG.equals(MANUFACTURER) && SDK_INT == KITKAT) { @Override void add(ExcludedRefs.Builder excluded) { excluded.instanceField("android.view.ViewConfiguration", "mContext") .reason("In AOSP the ViewConfiguration class does not have a context." + " Here we have ViewConfiguration.sConfigurations (static field) holding on to a" + " ViewConfiguration instance that has a context that is the activity." + " Observed here: https://github.com/square/leakcanary/issues" + "/1#issuecomment-100324683"); } }, SYSTEM_SENSOR_MANAGER__MAPPCONTEXTIMPL((LENOVO.equals(MANUFACTURER) && SDK_INT == KITKAT) // || (VIVO.equals(MANUFACTURER) && SDK_INT == LOLLIPOP_MR1)) { @Override void add(ExcludedRefs.Builder excluded) { excluded.staticField("android.hardware.SystemSensorManager", "mAppContextImpl") .reason("SystemSensorManager stores a reference to context " + "in a static field in its constructor." + "Fix: use application context to get SensorManager"); } }, AUDIO_MANAGER__MCONTEXT_STATIC(SAMSUNG.equals(MANUFACTURER) && SDK_INT == KITKAT) { @Override void add(ExcludedRefs.Builder excluded) { excluded.staticField("android.media.AudioManager", "mContext_static") .reason("Samsung added a static mContext_static field to AudioManager, holds a reference" + " to the activity." + " Observed here: https://github.com/square/leakcanary/issues/32"); } }, ACTIVITY_MANAGER_MCONTEXT(SAMSUNG.equals(MANUFACTURER) && SDK_INT == LOLLIPOP_MR1) { @Override void add(ExcludedRefs.Builder excluded) { excluded.staticField("android.app.ActivityManager", "mContext") .reason("Samsung added a static mContext field to ActivityManager, holds a reference" + " to the activity." + " Observed here: https://github.com/square/leakcanary/issues/177 Fix in comment:" + " https://github.com/square/leakcanary/issues/177#issuecomment-222724283"); } }, // ######## General Excluded refs ######## SOFT_REFERENCES { @Override void add(ExcludedRefs.Builder excluded) { excluded.clazz(WeakReference.class.getName()).alwaysExclude(); excluded.clazz(SoftReference.class.getName()).alwaysExclude(); excluded.clazz(PhantomReference.class.getName()).alwaysExclude(); excluded.clazz("java.lang.ref.Finalizer").alwaysExclude(); excluded.clazz("java.lang.ref.FinalizerReference").alwaysExclude(); } }, FINALIZER_WATCHDOG_DAEMON { @Override void add(ExcludedRefs.Builder excluded) { // If the FinalizerWatchdogDaemon thread is on the shortest path, then there was no other // reference to the object and it was about to be GCed. excluded.thread("FinalizerWatchdogDaemon").alwaysExclude(); } }, MAIN { @Override void add(ExcludedRefs.Builder excluded) { // The main thread stack is ever changing so local variables aren't likely to hold references // for long. If this is on the shortest path, it's probably that there's a longer path with // a real leak. excluded.thread("main").alwaysExclude(); } }, LEAK_CANARY_THREAD { @Override void add(ExcludedRefs.Builder excluded) { excluded.thread(LEAK_CANARY_THREAD_NAME).alwaysExclude(); } }, EVENT_RECEIVER__MMESSAGE_QUEUE { @Override void add(ExcludedRefs.Builder excluded) { // DisplayEventReceiver keeps a reference message queue object so that it is not GC'd while // the native peer of the receiver is using them. // The main thread message queue is held on by the main Looper, but that might be a longer // path. Let's not confuse people with a shorter path that is less meaningful. excluded.instanceField("android.view.Choreographer$FrameDisplayEventReceiver", "mMessageQueue").alwaysExclude(); } }, VIEWLOCATIONHOLDER_ROOT(SDK_INT == P) { @Override void add(ExcludedRefs.Builder excluded) { // In Android P, ViewLocationHolder has an mRoot field that is not cleared in its clear() // method. // Introduced in https://github.com/aosp-mirror/platform_frameworks_base/commit/86b326012813f09d8f1de7d6d26c986a909de894 // Bug report: https://issuetracker.google.com/issues/112792715 excluded.instanceField("android.view.ViewGroup$ViewLocationHolder", "mRoot"); } }; /** * This returns the references in the leak path that should be ignored by all on Android. */ public static @NonNull ExcludedRefs.Builder createAndroidDefaults() { return createBuilder( EnumSet.of(SOFT_REFERENCES, FINALIZER_WATCHDOG_DAEMON, MAIN, LEAK_CANARY_THREAD, EVENT_RECEIVER__MMESSAGE_QUEUE)); } /** * This returns the references in the leak path that can be ignored for app developers. This * doesn't mean there is no memory leak, to the contrary. However, some leaks are caused by bugs * in AOSP or manufacturer forks of AOSP. In such cases, there is very little we can do as app * developers except by resorting to serious hacks, so we remove the noise caused by those leaks. */ public static @NonNull ExcludedRefs.Builder createAppDefaults() { return createBuilder(EnumSet.allOf(AndroidExcludedRefs.class)); } public static @NonNull ExcludedRefs.Builder createBuilder(EnumSet<AndroidExcludedRefs> refs) { ExcludedRefs.Builder excluded = ExcludedRefs.builder(); for (AndroidExcludedRefs ref : refs) { if (ref.applies) { ref.add(excluded); ((ExcludedRefs.BuilderWithParams) excluded).named(ref.name()); } } return excluded; } final boolean applies; AndroidExcludedRefs() { this(true); } AndroidExcludedRefs(boolean applies) { this.applies = applies; } abstract void add(ExcludedRefs.Builder excluded); }
HUAWEI mLastSrvView leak happens on O (#1134) https://github.com/square/leakcanary/pull/932#issuecomment-431732347
leakcanary-android/src/main/java/com/squareup/leakcanary/AndroidExcludedRefs.java
HUAWEI mLastSrvView leak happens on O (#1134)
<ide><path>eakcanary-android/src/main/java/com/squareup/leakcanary/AndroidExcludedRefs.java <ide> }, <ide> <ide> INPUT_METHOD_MANAGER__LAST_SERVED_VIEW( <del> HUAWEI.equals(MANUFACTURER) && SDK_INT >= M && SDK_INT <= N) { <add> HUAWEI.equals(MANUFACTURER) && SDK_INT >= M && SDK_INT <= O_MR1) { <ide> @Override void add(ExcludedRefs.Builder excluded) { <ide> String reason = "HUAWEI added a mLastSrvView field to InputMethodManager" <ide> + " that leaks a reference to the last served view.";
Java
apache-2.0
701360bbe515e336d042acb805fde54e4e1d757a
0
spadgett/hawkular-metrics,jotak/hawkular-metrics,burmanm/hawkular-metrics,hawkular/hawkular-metrics,hawkular/hawkular-metrics,spadgett/hawkular-metrics,ppalaga/hawkular-metrics,mwringe/hawkular-metrics,hawkular/hawkular-metrics,tsegismont/hawkular-metrics,spadgett/hawkular-metrics,spadgett/hawkular-metrics,jotak/hawkular-metrics,pilhuhn/rhq-metrics,pilhuhn/rhq-metrics,jotak/hawkular-metrics,ppalaga/hawkular-metrics,pilhuhn/rhq-metrics,tsegismont/hawkular-metrics,mwringe/hawkular-metrics,ppalaga/hawkular-metrics,tsegismont/hawkular-metrics,tsegismont/hawkular-metrics,ppalaga/hawkular-metrics,mwringe/hawkular-metrics,pilhuhn/rhq-metrics,burmanm/hawkular-metrics,spadgett/hawkular-metrics,mwringe/hawkular-metrics,burmanm/hawkular-metrics,burmanm/hawkular-metrics,jotak/hawkular-metrics,hawkular/hawkular-metrics
/* * Copyright 2014-2015 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hawkular.metrics.clients.ptrans.backend; import static org.hawkular.metrics.clients.ptrans.backend.Constants.METRIC_ADDRESS; import java.net.URI; import java.util.List; import org.hawkular.metrics.client.common.Batcher; import org.hawkular.metrics.client.common.MetricBuffer; import org.hawkular.metrics.client.common.SingleMetric; import org.hawkular.metrics.clients.ptrans.Configuration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.vertx.core.AbstractVerticle; import io.vertx.core.Future; import io.vertx.core.buffer.Buffer; import io.vertx.core.eventbus.Message; import io.vertx.core.eventbus.MessageCodec; import io.vertx.core.http.HttpClient; import io.vertx.core.http.HttpClientOptions; import io.vertx.core.http.HttpClientRequest; import io.vertx.core.http.HttpHeaders; /** * Forwards metrics to the REST backend. This verticle consumes metrics published on the bus and inserts them in a * buffer. Metrics are sent as soon as the buffer is larger than the batch size. If servers are idle then the buffer is * flushed, regardless of its size. * <p> * When batches fail, the corresponding metrics are re-inserted in the buffer. * * @author Thomas Segismont */ public class MetricsSender extends AbstractVerticle { private static final Logger LOG = LoggerFactory.getLogger(MetricsSender.class); private final String host; private final int port; private final String postUri; private final CharSequence hostHeader; private final CharSequence tenant; private final MetricBuffer buffer; private final int batchSize; private final int maxConnections; private HttpClient httpClient; private int connectionsUsed; private boolean flushScheduled; private long flushScheduleId; public MetricsSender(Configuration configuration) { URI restUrl = configuration.getRestUrl(); URI httpProxy = configuration.getHttpProxy(); if (httpProxy == null) { host = restUrl.getHost(); port = restUrl.getPort(); postUri = restUrl.getPath(); } else { host = httpProxy.getHost(); port = httpProxy.getPort(); postUri = restUrl.toString(); } hostHeader = HttpHeaders.createOptimized(restUrl.getHost()); tenant = HttpHeaders.createOptimized(configuration.getTenant()); buffer = new MetricBuffer(configuration.getBufferCapacity()); batchSize = configuration.getBatchSize(); maxConnections = configuration.getRestMaxConnections(); } @Override public void start(Future<Void> startFuture) throws Exception { HttpClientOptions httpClientOptions = new HttpClientOptions() .setDefaultHost(host) .setDefaultPort(port) .setKeepAlive(true) .setTryUseCompression(true) .setMaxPoolSize(maxConnections); httpClient = vertx.createHttpClient(httpClientOptions); connectionsUsed = 0; flushScheduled = false; vertx.eventBus().registerDefaultCodec(SingleMetric.class, new SingleMetricCodec()); vertx.eventBus().localConsumer(METRIC_ADDRESS, this::handleMetric) .completionHandler(v -> startFuture.complete()); } private void handleMetric(Message<SingleMetric> metricMessage) { buffer.insert(metricMessage.body()); metricInserted(); } private void metricInserted() { sendBatches(false); scheduleFlush(); } private void sendBatches(boolean force) { for (int bufferSize = buffer.size(); ; bufferSize = buffer.size()) { if ((!force && bufferSize < batchSize) || bufferSize < 1) { break; } if (connectionsUsed >= maxConnections) { break; } List<SingleMetric> metrics = buffer.remove(Math.min(bufferSize, batchSize)); send(metrics); } } private void scheduleFlush() { if (flushScheduled) { vertx.cancelTimer(flushScheduleId); } else { flushScheduled = true; } flushScheduleId = vertx.setTimer(10, h -> { flushScheduled = false; sendBatches(true); }); } private void send(List<SingleMetric> metrics) { connectionsUsed++; String json = Batcher.metricListToJson(metrics); Buffer data = Buffer.buffer(json); HttpClientRequest req = httpClient.post(postUri, response -> { connectionsUsed--; if (response.statusCode() != 200) { if (LOG.isTraceEnabled()) { response.bodyHandler(msg -> { LOG.trace("Could not send metrics: " + response.statusCode() + " : " + msg.toString()); }); } buffer.reInsert(metrics); metricInserted(); } }); req.putHeader(HttpHeaders.HOST, hostHeader); req.putHeader(HttpHeaders.CONTENT_LENGTH, String.valueOf(data.length())); req.putHeader(HttpHeaders.CONTENT_TYPE, Constants.APPLICATION_JSON); req.putHeader(Constants.TENANT_HEADER_NAME, tenant); req.exceptionHandler(err -> { connectionsUsed--; LOG.trace("Could not send metrics", err); buffer.reInsert(metrics); metricInserted(); }); req.write(data); req.end(); } @Override public void stop() throws Exception { if (flushScheduled) { vertx.cancelTimer(flushScheduleId); } } private static class SingleMetricCodec implements MessageCodec<SingleMetric, SingleMetric> { @Override public void encodeToWire(Buffer buffer, SingleMetric singleMetric) { } @Override public SingleMetric decodeFromWire(int pos, Buffer buffer) { return null; } @Override public SingleMetric transform(SingleMetric singleMetric) { return singleMetric; } @Override public String name() { return SingleMetricCodec.class.getCanonicalName(); } @Override public byte systemCodecID() { return -1; } } }
clients/ptranslator/src/main/java/org/hawkular/metrics/clients/ptrans/backend/MetricsSender.java
/* * Copyright 2014-2015 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hawkular.metrics.clients.ptrans.backend; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.NANOSECONDS; import static java.util.concurrent.TimeUnit.SECONDS; import static org.hawkular.metrics.clients.ptrans.backend.Constants.METRIC_ADDRESS; import java.net.URI; import java.util.ArrayList; import java.util.List; import org.hawkular.metrics.client.common.Batcher; import org.hawkular.metrics.client.common.SingleMetric; import org.hawkular.metrics.clients.ptrans.Configuration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.vertx.core.AbstractVerticle; import io.vertx.core.Future; import io.vertx.core.buffer.Buffer; import io.vertx.core.eventbus.Message; import io.vertx.core.eventbus.MessageCodec; import io.vertx.core.http.HttpClient; import io.vertx.core.http.HttpClientOptions; import io.vertx.core.http.HttpClientRequest; import io.vertx.core.http.HttpHeaders; /** * @author Thomas Segismont */ public class MetricsSender extends AbstractVerticle { private static final Logger LOG = LoggerFactory.getLogger(MetricsSender.class); private final String host; private final int port; private final String postUri; private final CharSequence hostHeader; private final CharSequence tenant; private final int batchSize; private final long batchDelay; private final List<SingleMetric> queue; private HttpClient httpClient; private long sendTime; public MetricsSender(Configuration configuration) { URI restUrl = configuration.getRestUrl(); URI httpProxy = configuration.getHttpProxy(); if (httpProxy == null) { host = restUrl.getHost(); port = restUrl.getPort(); postUri = restUrl.getPath(); } else { host = httpProxy.getHost(); port = httpProxy.getPort(); postUri = restUrl.toString(); } hostHeader = HttpHeaders.createOptimized(restUrl.getHost()); tenant = HttpHeaders.createOptimized(configuration.getTenant()); batchSize = configuration.getMinimumBatchSize(); batchDelay = configuration.getMaximumBatchDelay(); queue = new ArrayList<>(batchSize); } @Override public void start(Future<Void> startFuture) throws Exception { HttpClientOptions httpClientOptions = new HttpClientOptions().setDefaultHost(host) .setDefaultPort(port) .setKeepAlive(true) .setTryUseCompression(true); httpClient = vertx.createHttpClient(httpClientOptions); vertx.setPeriodic(MILLISECONDS.convert(batchDelay, SECONDS), this::flushIfIdle); sendTime = System.nanoTime(); vertx.eventBus().registerDefaultCodec(SingleMetric.class, new SingleMetricCodec()); vertx.eventBus().localConsumer(METRIC_ADDRESS, this::handleMetric) .completionHandler(v -> startFuture.complete()); } private void handleMetric(Message<SingleMetric> metricMessage) { queue.add(metricMessage.body()); if (queue.size() < batchSize) { return; } List<SingleMetric> metrics = new ArrayList<>(queue); queue.clear(); do { List<SingleMetric> subList = metrics.subList(0, batchSize); send(subList); subList.clear(); } while (metrics.size() >= batchSize); queue.addAll(metrics); } private void send(List<SingleMetric> metrics) { String json = Batcher.metricListToJson(metrics); Buffer buffer = Buffer.buffer(json); HttpClientRequest req = httpClient.post( postUri, response -> { if (response.statusCode() != 200 && LOG.isTraceEnabled()) { response.bodyHandler( msg -> LOG.trace( "Could not send metrics: " + response.statusCode() + " : " + msg.toString() ) ); } } ); req.putHeader(HttpHeaders.HOST, hostHeader); req.putHeader(HttpHeaders.CONTENT_LENGTH, String.valueOf(buffer.length())); req.putHeader(HttpHeaders.CONTENT_TYPE, Constants.APPLICATION_JSON); req.putHeader(Constants.TENANT_HEADER_NAME, tenant); req.exceptionHandler(err -> LOG.trace("Could not send metrics", err)); req.write(buffer); req.end(); sendTime = System.nanoTime(); } private void flushIfIdle(Long timerId) { if (System.nanoTime() - sendTime > NANOSECONDS.convert(batchDelay, SECONDS) && queue.size() > 0) { List<SingleMetric> metrics = new ArrayList<>(queue); queue.clear(); send(metrics); } } public static class SingleMetricCodec implements MessageCodec<SingleMetric, SingleMetric> { @Override public void encodeToWire(Buffer buffer, SingleMetric singleMetric) { } @Override public SingleMetric decodeFromWire(int pos, Buffer buffer) { return null; } @Override public SingleMetric transform(SingleMetric singleMetric) { return singleMetric; } @Override public String name() { return SingleMetricCodec.class.getCanonicalName(); } @Override public byte systemCodecID() { return -1; } } }
Updated MetricsSender to use the buffer from clients/commons This verticle consumes metrics published on the bus and inserts them in a buffer. Metrics are sent as soon as the buffer is larger than the batch size. If servers are idle then the buffer is flushed, regardless of its size. When batches fail, the corresponding metrics are re-inserted in the buffer.
clients/ptranslator/src/main/java/org/hawkular/metrics/clients/ptrans/backend/MetricsSender.java
Updated MetricsSender to use the buffer from clients/commons
<ide><path>lients/ptranslator/src/main/java/org/hawkular/metrics/clients/ptrans/backend/MetricsSender.java <ide> */ <ide> package org.hawkular.metrics.clients.ptrans.backend; <ide> <del>import static java.util.concurrent.TimeUnit.MILLISECONDS; <del>import static java.util.concurrent.TimeUnit.NANOSECONDS; <del>import static java.util.concurrent.TimeUnit.SECONDS; <del> <ide> import static org.hawkular.metrics.clients.ptrans.backend.Constants.METRIC_ADDRESS; <ide> <ide> import java.net.URI; <del>import java.util.ArrayList; <ide> import java.util.List; <ide> <ide> import org.hawkular.metrics.client.common.Batcher; <add>import org.hawkular.metrics.client.common.MetricBuffer; <ide> import org.hawkular.metrics.client.common.SingleMetric; <ide> import org.hawkular.metrics.clients.ptrans.Configuration; <ide> import org.slf4j.Logger; <ide> import io.vertx.core.http.HttpHeaders; <ide> <ide> /** <add> * Forwards metrics to the REST backend. This verticle consumes metrics published on the bus and inserts them in a <add> * buffer. Metrics are sent as soon as the buffer is larger than the batch size. If servers are idle then the buffer is <add> * flushed, regardless of its size. <add> * <p> <add> * When batches fail, the corresponding metrics are re-inserted in the buffer. <add> * <ide> * @author Thomas Segismont <ide> */ <ide> public class MetricsSender extends AbstractVerticle { <ide> <ide> private final CharSequence tenant; <ide> <add> private final MetricBuffer buffer; <ide> private final int batchSize; <del> private final long batchDelay; <del> private final List<SingleMetric> queue; <add> private final int maxConnections; <ide> <ide> private HttpClient httpClient; <del> private long sendTime; <add> <add> private int connectionsUsed; <add> <add> private boolean flushScheduled; <add> private long flushScheduleId; <ide> <ide> public MetricsSender(Configuration configuration) { <ide> URI restUrl = configuration.getRestUrl(); <ide> <ide> tenant = HttpHeaders.createOptimized(configuration.getTenant()); <ide> <del> batchSize = configuration.getMinimumBatchSize(); <del> batchDelay = configuration.getMaximumBatchDelay(); <del> queue = new ArrayList<>(batchSize); <add> buffer = new MetricBuffer(configuration.getBufferCapacity()); <add> batchSize = configuration.getBatchSize(); <add> maxConnections = configuration.getRestMaxConnections(); <ide> } <ide> <ide> @Override <ide> public void start(Future<Void> startFuture) throws Exception { <del> HttpClientOptions httpClientOptions = new HttpClientOptions().setDefaultHost(host) <del> .setDefaultPort(port) <del> .setKeepAlive(true) <del> .setTryUseCompression(true); <add> HttpClientOptions httpClientOptions = new HttpClientOptions() <add> .setDefaultHost(host) <add> .setDefaultPort(port) <add> .setKeepAlive(true) <add> .setTryUseCompression(true) <add> .setMaxPoolSize(maxConnections); <ide> httpClient = vertx.createHttpClient(httpClientOptions); <del> vertx.setPeriodic(MILLISECONDS.convert(batchDelay, SECONDS), this::flushIfIdle); <del> sendTime = System.nanoTime(); <add> <add> connectionsUsed = 0; <add> <add> flushScheduled = false; <add> <ide> vertx.eventBus().registerDefaultCodec(SingleMetric.class, new SingleMetricCodec()); <ide> vertx.eventBus().localConsumer(METRIC_ADDRESS, this::handleMetric) <del> .completionHandler(v -> startFuture.complete()); <add> .completionHandler(v -> startFuture.complete()); <ide> } <ide> <ide> private void handleMetric(Message<SingleMetric> metricMessage) { <del> queue.add(metricMessage.body()); <del> if (queue.size() < batchSize) { <del> return; <del> } <del> List<SingleMetric> metrics = new ArrayList<>(queue); <del> queue.clear(); <del> do { <del> List<SingleMetric> subList = metrics.subList(0, batchSize); <del> send(subList); <del> subList.clear(); <del> } while (metrics.size() >= batchSize); <del> queue.addAll(metrics); <add> buffer.insert(metricMessage.body()); <add> metricInserted(); <add> } <add> <add> private void metricInserted() { <add> sendBatches(false); <add> scheduleFlush(); <add> } <add> <add> private void sendBatches(boolean force) { <add> for (int bufferSize = buffer.size(); ; bufferSize = buffer.size()) { <add> if ((!force && bufferSize < batchSize) || bufferSize < 1) { <add> break; <add> } <add> if (connectionsUsed >= maxConnections) { <add> break; <add> } <add> List<SingleMetric> metrics = buffer.remove(Math.min(bufferSize, batchSize)); <add> send(metrics); <add> } <add> } <add> <add> private void scheduleFlush() { <add> if (flushScheduled) { <add> vertx.cancelTimer(flushScheduleId); <add> } else { <add> flushScheduled = true; <add> } <add> flushScheduleId = vertx.setTimer(10, h -> { <add> flushScheduled = false; <add> sendBatches(true); <add> }); <ide> } <ide> <ide> private void send(List<SingleMetric> metrics) { <add> connectionsUsed++; <ide> String json = Batcher.metricListToJson(metrics); <del> Buffer buffer = Buffer.buffer(json); <del> HttpClientRequest req = httpClient.post( <del> postUri, <del> response -> { <del> if (response.statusCode() != 200 && LOG.isTraceEnabled()) { <del> response.bodyHandler( <del> msg -> LOG.trace( <del> "Could not send metrics: " + response.statusCode() + " : " <del> + msg.toString() <del> ) <del> ); <del> } <add> Buffer data = Buffer.buffer(json); <add> HttpClientRequest req = httpClient.post(postUri, response -> { <add> connectionsUsed--; <add> if (response.statusCode() != 200) { <add> if (LOG.isTraceEnabled()) { <add> response.bodyHandler(msg -> { <add> LOG.trace("Could not send metrics: " + response.statusCode() + " : " + msg.toString()); <add> }); <ide> } <del> ); <add> buffer.reInsert(metrics); <add> metricInserted(); <add> } <add> }); <ide> req.putHeader(HttpHeaders.HOST, hostHeader); <del> req.putHeader(HttpHeaders.CONTENT_LENGTH, String.valueOf(buffer.length())); <add> req.putHeader(HttpHeaders.CONTENT_LENGTH, String.valueOf(data.length())); <ide> req.putHeader(HttpHeaders.CONTENT_TYPE, Constants.APPLICATION_JSON); <ide> req.putHeader(Constants.TENANT_HEADER_NAME, tenant); <del> req.exceptionHandler(err -> LOG.trace("Could not send metrics", err)); <del> req.write(buffer); <add> req.exceptionHandler(err -> { <add> connectionsUsed--; <add> LOG.trace("Could not send metrics", err); <add> buffer.reInsert(metrics); <add> metricInserted(); <add> }); <add> req.write(data); <ide> req.end(); <del> sendTime = System.nanoTime(); <del> } <del> <del> private void flushIfIdle(Long timerId) { <del> if (System.nanoTime() - sendTime > NANOSECONDS.convert(batchDelay, SECONDS) <del> && queue.size() > 0) { <del> List<SingleMetric> metrics = new ArrayList<>(queue); <del> queue.clear(); <del> send(metrics); <del> } <del> } <del> <del> public static class SingleMetricCodec implements MessageCodec<SingleMetric, SingleMetric> { <add> } <add> <add> @Override <add> public void stop() throws Exception { <add> if (flushScheduled) { <add> vertx.cancelTimer(flushScheduleId); <add> } <add> } <add> <add> private static class SingleMetricCodec implements MessageCodec<SingleMetric, SingleMetric> { <ide> @Override <ide> public void encodeToWire(Buffer buffer, SingleMetric singleMetric) { <ide> }
JavaScript
apache-2.0
32f56bf549f953ecb6a7bedde7fc807995fed8a3
0
dtom90/sonny-weather-bot,dtom90/sonny-weather-bot
/* * Copyright © 2016 I.B.M. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the “License”); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an “AS IS” BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* The Conversation module handles the display and behavior of the chat section * of the application, including the messages to and from Watson and the input box */ /* eslint no-unused-vars: ["error", { "varsIgnorePattern": "^Conversation$" }] */ /* global Api: true, Common: true */ var Conversation = (function() { 'use strict'; var ids = { userInput: 'user-input', chatFlow: 'chat-flow', chatScrollWrapper: 'chat-scroll-wrapper' }; var classes = { messageWrapper: 'message-wrapper', preBar: 'pre-bar', underline: 'underline' }; var authorTypes = { user: 'user', watson: 'watson' }; // Publicly accessible methods defined return { init: init, setMessage: setMessage, sendMessage: sendMessage, focusInput: focusInput }; // Initialize Conversation module function init() { chatSetup(); initEnterSubmit(); // setupInputBox(); Api.initConversation(); // Load initial Watson greeting } // Hide chat box until there are messages, // set up messages to display when user or Watson sends message function chatSetup() { document.getElementById(ids.chatScrollWrapper).style.display = 'none'; var search = location.search.substring(1); console.log(search); if(search){ var context = JSON.parse('{"' + decodeURI(search).replace(/"/g, '\\"').replace(/&/g, '","').replace(/=/g,'":"') + '"}'); Api.setContext(context); } var currentRequestPayloadSetter = Api.setUserPayload; Api.setUserPayload = function(payload) { currentRequestPayloadSetter.call(Api, payload); displayMessage(payload, authorTypes.user); }; var currentResponsePayloadSetter = Api.setWatsonPayload; Api.setWatsonPayload = function(payload) { currentResponsePayloadSetter.call(Api, payload); displayMessage(payload, authorTypes.watson); }; } // Set up the input box to submit a message when enter is pressed function initEnterSubmit() { document.getElementById(ids.userInput) .addEventListener('keypress', function(event) { if (event.keyCode === 13) { sendMessage(); event.preventDefault(); } }, false); } // Set up the input box to underline text as it is typed // This is done by creating a hidden dummy version of the input box that // is used to determine what the width of the input text should be. // This value is then used to set the new width of the visible input box. function setupInputBox() { var input = document.getElementById(ids.userInput); var dummy = document.getElementById(ids.userInputDummy); var minFontSize = 9; var maxFontSize = 16; var minPadding = 5; var maxPadding = 9; // If no dummy input box exists, create one if (dummy === null) { var dummyJson = { 'tagName': 'div', 'attributes': [{ 'name': 'id', 'value': (ids.userInputDummy) }] }; dummy = Common.buildDomElement(dummyJson); document.body.appendChild(dummy); } function adjustInput() { if (input.value === '') { // If the input box is empty, remove the underline Common.removeClass(input, 'underline'); input.setAttribute('style', 'width:' + '100%'); input.style.width = '100%'; } else { // otherwise, adjust the dummy text to match, and then set the width of // the visible input box to match it (thus extending the underline) Common.addClass(input, classes.underline); var txtNode = document.createTextNode(input.value); ['font-size', 'font-style', 'font-weight', 'font-family', 'line-height', 'text-transform', 'letter-spacing'].forEach(function(index) { dummy.style[index] = window.getComputedStyle(input, null).getPropertyValue(index); }); dummy.textContent = txtNode.textContent; var padding = 0; var htmlElem = document.getElementsByTagName('html')[0]; var currentFontSize = parseInt(window.getComputedStyle(htmlElem, null).getPropertyValue('font-size'), 10); if (currentFontSize) { padding = Math.floor((currentFontSize - minFontSize) / (maxFontSize - minFontSize) * (maxPadding - minPadding) + minPadding); } else { padding = maxPadding; } var widthValue = ( dummy.offsetWidth + padding) + 'px'; input.setAttribute('style', 'width:' + widthValue); input.style.width = widthValue; } } // Any time the input changes, or the window resizes, adjust the size of the input box input.addEventListener('input', adjustInput); window.addEventListener('resize', adjustInput); // Trigger the input event once to set up the input box and dummy element Common.fireEvent(input, 'input'); } // Retrieve the value of the input box function getMessage() { var userInput = document.getElementById(ids.userInput); return userInput.value; } // Set the value of the input box function setMessage(text) { var userInput = document.getElementById(ids.userInput); userInput.value = text; userInput.focus(); Common.fireEvent(userInput, 'input'); } // Send the message from the input box function sendMessage(newText) { var text; if (newText) { text = newText; } else { text = getMessage(); } if (!text) { return; } setMessage(''); Api.postConversationMessage(text); } function addMessage(chatBoxElement, messageDiv) { chatBoxElement.appendChild(messageDiv); updateChat(); } function delayMessagePost(chatBoxElement, messageDiv, i, delay) { setTimeout(function() { addMessage(chatBoxElement, messageDiv); }, i*delay*1000); } // Display a message, given a message payload and a message type (user or Watson) // TODO: Make sure that newline characters at the end don't mess with the question mark detection function displayMessage(newPayload, typeValue) { var isUser = isUserMessage(typeValue); var textExists = (newPayload.input && newPayload.input.text) || (newPayload.output && newPayload.output.text); if (isUser !== null && textExists) { // if (newPayload.output && Object.prototype.toString.call( newPayload.output.text ) === '[object Array]') { // newPayload.output.text = newPayload.output.text.filter(function(item) { // return item && item.length > 0; // }).join(' '); // } var dataObj = isUser ? newPayload.input : newPayload.output; var text = dataObj.text; if (!String(text).trim()) { return; } var chatBoxElement = document.getElementById(ids.chatFlow); //TODO: updateChat after images have been loaded if(Array.isArray(text)){ for(var i in text){ var messageDiv = buildMessageDomElement(text[i], isUser); delayMessagePost(chatBoxElement, messageDiv, i, dataObj.delay); } } else { var messageDiv = buildMessageDomElement(text, isUser); addMessage(chatBoxElement, messageDiv); } } } // Determine whether a given message type is user or Watson function isUserMessage(typeValue) { if (typeValue === authorTypes.user) { return true; } else if (typeValue === authorTypes.watson) { return false; } return null; } // Builds the message DOM element (using auxiliary function Common.buildDomElement) function buildMessageDomElement(text, isUser) { // var dataObj = isUser ? newPayload.input : newPayload.output; var content = []; if(isUser) content += '<img class=\'message-icon user-icon\' src=\'/images/head.svg\' />'; content += { 'tagName': 'p', 'html': text }; if(!isUser) content += '<img class=\'message-icon watson-icon\' src=\'/images/watson-logo-round.png\' />'; var messageJson = { // <div class='user / watson'> 'tagName': 'div', 'classNames': ['message-wrapper', (isUser ? authorTypes.user : authorTypes.watson)], 'children': [{ // <div class='user-message / watson-message'> 'tagName': 'div', 'classNames': (isUser ? [authorTypes.user + '-message'] : [authorTypes.watson + '-message']),//, classes.preBar // 'children': content 'html': (isUser ? '<img class=\'message-icon user-icon\' src=\'/images/head.svg\' />' + text : text + '<img class=\'message-icon watson-icon\' src=\'/images/watson-logo-round.png\' />') }] }; return Common.buildDomElement(messageJson); } // Display the chat box if it's currently hidden // (i.e. if this is the first message), scroll to the bottom of the chat function updateChat() { document.getElementById(ids.chatScrollWrapper).style.display = ''; var messages = document.getElementById(ids.chatFlow).getElementsByClassName(classes.messageWrapper); document.getElementById(ids.chatFlow).scrollTop = messages[messages.length - 1].offsetTop; } // Set browser focus on the input box function focusInput() { document.getElementById(ids.userInput).focus(); } }());
ui/js/conversation.js
/* * Copyright © 2016 I.B.M. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the “License”); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an “AS IS” BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* The Conversation module handles the display and behavior of the chat section * of the application, including the messages to and from Watson and the input box */ /* eslint no-unused-vars: ["error", { "varsIgnorePattern": "^Conversation$" }] */ /* global Api: true, Common: true */ var Conversation = (function() { 'use strict'; var ids = { userInput: 'user-input', chatFlow: 'chat-flow', chatScrollWrapper: 'chat-scroll-wrapper' }; var classes = { messageWrapper: 'message-wrapper', preBar: 'pre-bar', underline: 'underline' }; var authorTypes = { user: 'user', watson: 'watson' }; // Publicly accessible methods defined return { init: init, setMessage: setMessage, sendMessage: sendMessage, focusInput: focusInput }; // Initialize Conversation module function init() { chatSetup(); initEnterSubmit(); // setupInputBox(); Api.initConversation(); // Load initial Watson greeting } // Hide chat box until there are messages, // set up messages to display when user or Watson sends message function chatSetup() { document.getElementById(ids.chatScrollWrapper).style.display = 'none'; var search = location.search.substring(1); console.log(search); if(search){ var context = JSON.parse('{"' + decodeURI(search).replace(/"/g, '\\"').replace(/&/g, '","').replace(/=/g,'":"') + '"}'); Api.setContext(context); } var currentRequestPayloadSetter = Api.setUserPayload; Api.setUserPayload = function(payload) { currentRequestPayloadSetter.call(Api, payload); displayMessage(payload, authorTypes.user); }; var currentResponsePayloadSetter = Api.setWatsonPayload; Api.setWatsonPayload = function(payload) { currentResponsePayloadSetter.call(Api, payload); displayMessage(payload, authorTypes.watson); }; } // Set up the input box to submit a message when enter is pressed function initEnterSubmit() { document.getElementById(ids.userInput) .addEventListener('keypress', function(event) { if (event.keyCode === 13) { sendMessage(); event.preventDefault(); } }, false); } // Set up the input box to underline text as it is typed // This is done by creating a hidden dummy version of the input box that // is used to determine what the width of the input text should be. // This value is then used to set the new width of the visible input box. function setupInputBox() { var input = document.getElementById(ids.userInput); var dummy = document.getElementById(ids.userInputDummy); var minFontSize = 9; var maxFontSize = 16; var minPadding = 5; var maxPadding = 9; // If no dummy input box exists, create one if (dummy === null) { var dummyJson = { 'tagName': 'div', 'attributes': [{ 'name': 'id', 'value': (ids.userInputDummy) }] }; dummy = Common.buildDomElement(dummyJson); document.body.appendChild(dummy); } function adjustInput() { if (input.value === '') { // If the input box is empty, remove the underline Common.removeClass(input, 'underline'); input.setAttribute('style', 'width:' + '100%'); input.style.width = '100%'; } else { // otherwise, adjust the dummy text to match, and then set the width of // the visible input box to match it (thus extending the underline) Common.addClass(input, classes.underline); var txtNode = document.createTextNode(input.value); ['font-size', 'font-style', 'font-weight', 'font-family', 'line-height', 'text-transform', 'letter-spacing'].forEach(function(index) { dummy.style[index] = window.getComputedStyle(input, null).getPropertyValue(index); }); dummy.textContent = txtNode.textContent; var padding = 0; var htmlElem = document.getElementsByTagName('html')[0]; var currentFontSize = parseInt(window.getComputedStyle(htmlElem, null).getPropertyValue('font-size'), 10); if (currentFontSize) { padding = Math.floor((currentFontSize - minFontSize) / (maxFontSize - minFontSize) * (maxPadding - minPadding) + minPadding); } else { padding = maxPadding; } var widthValue = ( dummy.offsetWidth + padding) + 'px'; input.setAttribute('style', 'width:' + widthValue); input.style.width = widthValue; } } // Any time the input changes, or the window resizes, adjust the size of the input box input.addEventListener('input', adjustInput); window.addEventListener('resize', adjustInput); // Trigger the input event once to set up the input box and dummy element Common.fireEvent(input, 'input'); } // Retrieve the value of the input box function getMessage() { var userInput = document.getElementById(ids.userInput); return userInput.value; } // Set the value of the input box function setMessage(text) { var userInput = document.getElementById(ids.userInput); userInput.value = text; userInput.focus(); Common.fireEvent(userInput, 'input'); } // Send the message from the input box function sendMessage(newText) { var text; if (newText) { text = newText; } else { text = getMessage(); } if (!text) { return; } setMessage(''); Api.postConversationMessage(text); } // Display a message, given a message payload and a message type (user or Watson) // TODO: Make sure that newline characters at the end don't mess with the question mark detection function displayMessage(newPayload, typeValue) { var isUser = isUserMessage(typeValue); var textExists = (newPayload.input && newPayload.input.text) || (newPayload.output && newPayload.output.text); if (isUser !== null && textExists) { // if (newPayload.output && Object.prototype.toString.call( newPayload.output.text ) === '[object Array]') { // newPayload.output.text = newPayload.output.text.filter(function(item) { // return item && item.length > 0; // }).join(' '); // } var dataObj = isUser ? newPayload.input : newPayload.output; var text = dataObj.text; if (!String(text).trim()) { return; } var chatBoxElement = document.getElementById(ids.chatFlow); //TODO: updateChat after images have been loaded if(Array.isArray(text)){ for(var i in text){ var messageDiv = buildMessageDomElement(text[i], isUser); chatBoxElement.appendChild(messageDiv); } } else { var messageDiv = buildMessageDomElement(text, isUser); chatBoxElement.appendChild(messageDiv); } updateChat(); } } // Determine whether a given message type is user or Watson function isUserMessage(typeValue) { if (typeValue === authorTypes.user) { return true; } else if (typeValue === authorTypes.watson) { return false; } return null; } // Builds the message DOM element (using auxiliary function Common.buildDomElement) function buildMessageDomElement(text, isUser) { // var dataObj = isUser ? newPayload.input : newPayload.output; var content = []; if(isUser) content += '<img class=\'message-icon user-icon\' src=\'/images/head.svg\' />'; content += { 'tagName': 'p', 'html': text }; if(!isUser) content += '<img class=\'message-icon watson-icon\' src=\'/images/watson-logo-round.png\' />'; var messageJson = { // <div class='user / watson'> 'tagName': 'div', 'classNames': ['message-wrapper', (isUser ? authorTypes.user : authorTypes.watson)], 'children': [{ // <div class='user-message / watson-message'> 'tagName': 'div', 'classNames': (isUser ? [authorTypes.user + '-message'] : [authorTypes.watson + '-message']),//, classes.preBar // 'children': content 'html': (isUser ? '<img class=\'message-icon user-icon\' src=\'/images/head.svg\' />' + text : text + '<img class=\'message-icon watson-icon\' src=\'/images/watson-logo-round.png\' />') }] }; return Common.buildDomElement(messageJson); } // Display the chat box if it's currently hidden // (i.e. if this is the first message), scroll to the bottom of the chat function updateChat() { document.getElementById(ids.chatScrollWrapper).style.display = ''; var messages = document.getElementById(ids.chatFlow).getElementsByClassName(classes.messageWrapper); document.getElementById(ids.chatFlow).scrollTop = messages[messages.length - 1].offsetTop; } // Set browser focus on the input box function focusInput() { document.getElementById(ids.userInput).focus(); } }());
Message delay when output:delay is set to a number
ui/js/conversation.js
Message delay when output:delay is set to a number
<ide><path>i/js/conversation.js <ide> setMessage(''); <ide> <ide> Api.postConversationMessage(text); <add> } <add> <add> function addMessage(chatBoxElement, messageDiv) { <add> chatBoxElement.appendChild(messageDiv); <add> updateChat(); <add> } <add> <add> function delayMessagePost(chatBoxElement, messageDiv, i, delay) { <add> setTimeout(function() { addMessage(chatBoxElement, messageDiv); }, i*delay*1000); <ide> } <ide> <ide> // Display a message, given a message payload and a message type (user or Watson) <ide> if(Array.isArray(text)){ <ide> for(var i in text){ <ide> var messageDiv = buildMessageDomElement(text[i], isUser); <del> chatBoxElement.appendChild(messageDiv); <add> delayMessagePost(chatBoxElement, messageDiv, i, dataObj.delay); <ide> } <ide> } else { <ide> var messageDiv = buildMessageDomElement(text, isUser); <del> chatBoxElement.appendChild(messageDiv); <add> addMessage(chatBoxElement, messageDiv); <ide> } <del> updateChat(); <ide> } <ide> } <ide>
Java
agpl-3.0
598c14c54ad644fa5e33952be2905735c03650a3
0
relateiq/sql-layer,qiuyesuifeng/sql-layer,jaytaylor/sql-layer,qiuyesuifeng/sql-layer,wfxiang08/sql-layer-1,ngaut/sql-layer,relateiq/sql-layer,relateiq/sql-layer,qiuyesuifeng/sql-layer,shunwang/sql-layer-1,jaytaylor/sql-layer,wfxiang08/sql-layer-1,ngaut/sql-layer,wfxiang08/sql-layer-1,shunwang/sql-layer-1,shunwang/sql-layer-1,jaytaylor/sql-layer,wfxiang08/sql-layer-1,qiuyesuifeng/sql-layer,ngaut/sql-layer,jaytaylor/sql-layer,relateiq/sql-layer,ngaut/sql-layer,shunwang/sql-layer-1
/** * Copyright (C) 2011 Akiban Technologies Inc. * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License, version 3, * as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see http://www.gnu.org/licenses. */ package com.akiban.server.api.dml.scan; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.List; import java.util.TreeMap; import com.akiban.server.rowdata.RowData; import com.akiban.server.rowdata.RowDef; import com.akiban.server.rowdata.RowDefCache; import com.akiban.server.rowdata.SchemaFactory; import org.junit.Test; public final class NiceRowTest { @Test public void toRowDataBasic() throws Exception { RowDef rowDef = createRowDef(2); Object[] objects = new Object[2]; objects[0] = 5; objects[1] = "Bob"; RowData rowData = create(rowDef, objects); NewRow newRow = NiceRow.fromRowData(rowData, rowDef); // Why -1: because an __akiban_pk column gets added assertEquals("fields count", 2, newRow.getFields().size() - 1); assertEquals("field[0]", 5L, newRow.get(0)); assertEquals("field[1]", "Bob", newRow.get(1)); compareRowDatas(rowData, newRow.toRowData()); } @Test public void toRowDataLarge() throws Exception { final int NUM = 30; RowDef rowDef = createRowDef(NUM); Object[] objects = new Object[NUM]; objects[0] = 15; objects[1] = "Robert"; for (int i=2; i < NUM; ++i) { objects[i] = i + 1000; } RowData rowData = create(rowDef, objects); NewRow newRow = NiceRow.fromRowData(rowData, rowDef); // Why -1: because an __akiban_pk column gets added assertEquals("fields count", NUM, newRow.getFields().size() - 1); assertEquals("field[0]", 15L, newRow.get(0)); assertEquals("field[1]", "Robert", newRow.get(1)); for (int i=2; i < NUM; ++i) { long expected = i + 1000; assertEquals("field[1]", expected, newRow.get(i)); } compareRowDatas(rowData, newRow.toRowData()); } @Test public void toRowDataSparse() throws Exception { final int NUM = 30; RowDef rowDef = createRowDef(NUM); Object[] objects = new Object[NUM]; objects[0] = 15; objects[1] = "Robert"; int nulls = 0; for (int i=2; i < NUM; ++i) { if ( (i % 3) == 0) { ++nulls; } else { objects[i] = i + 1000; } } assertTrue("nulls==0", nulls > 0); RowData rowData = create(rowDef, objects); NewRow newRow = NiceRow.fromRowData(rowData, rowDef); // Why -1: because an __akiban_pk column gets added assertEquals("fields count", NUM, newRow.getFields().size() - 1); assertEquals("field[0]", 15L, newRow.get(0)); assertEquals("field[1]", "Robert", newRow.get(1)); for (int i=2; i < NUM; ++i) { Long expected = (i % 3) == 0 ? null : i + 1000L; assertEquals("field[1]", expected, newRow.get(i)); } compareRowDatas(rowData, newRow.toRowData()); } @Test public void testEquality() { TreeMap<Integer,NiceRow> mapOne = new TreeMap<Integer, NiceRow>(); TreeMap<Integer,NiceRow> mapTwo = new TreeMap<Integer, NiceRow>(); NiceRow rowOne = new NiceRow(1, (RowDef)null); rowOne.put(0, Long.valueOf(0l)); rowOne.put(1, "hello world"); mapOne.put(0, rowOne); NiceRow rowTwo = new NiceRow(1, (RowDef)null); rowTwo.put(0, Long.valueOf(0l)); rowTwo.put(1, "hello world"); mapTwo.put(0, rowTwo); assertEquals("rows", rowOne, rowTwo); assertEquals("maps", mapOne, mapTwo); } private static byte[] bytes() { return new byte[1024]; } private static RowDef createRowDef(int totalColumns) throws Exception { assertTrue("bad totalColumns=" + totalColumns, totalColumns >= 2); String[] ddl = new String[totalColumns + 2]; int i = 0; ddl[i++] = "create table test_table("; ddl[i++] = "id int"; ddl[i++] = ", name varchar(128)"; for (int c = 2; c < totalColumns; c++) { ddl[i++] = String.format(", field_%s int", c); } ddl[i] = ");"; RowDefCache rowDefCache = SCHEMA_FACTORY.rowDefCache(ddl); return rowDefCache.getRowDef("test_schema", "test_table"); } private RowData create(RowDef rowDef, Object[] objects) { RowData rowData = new RowData(bytes()); rowData.createRow(rowDef, objects); assertEquals("start", 0, rowData.getBufferStart()); assertEquals("end and length", rowData.getBufferEnd(), rowData.getBufferLength()); return rowData; } private void compareRowDatas(RowData expected, RowData actual) { if (expected == actual) { return; } List<Byte> expectedBytes = byteListFor(expected); List<Byte> actualBytes = byteListFor(actual); assertEquals("bytes", expectedBytes, actualBytes); } private List<Byte> byteListFor(RowData rowData) { byte[] bytes = rowData.getBytes(); assertNotNull("RowData bytes[] null", bytes); assertTrue("start < 0: " + rowData.getRowStart(), rowData.getRowStart() >= 0); assertTrue("end out of range: " + rowData.getRowEnd(), rowData.getRowEnd() <= bytes.length); List<Byte> bytesList = new ArrayList<Byte>(); for (int i=rowData.getBufferStart(), MAX=rowData.getRowEnd(); i < MAX; ++i) { bytesList.add(bytes[i]); } return bytesList; } private static final SchemaFactory SCHEMA_FACTORY = new SchemaFactory("test_schema"); }
src/test/java/com/akiban/server/api/dml/scan/NiceRowTest.java
/** * Copyright (C) 2011 Akiban Technologies Inc. * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License, version 3, * as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see http://www.gnu.org/licenses. */ package com.akiban.server.api.dml.scan; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.List; import java.util.TreeMap; import com.akiban.server.rowdata.RowData; import com.akiban.server.rowdata.RowDef; import com.akiban.server.rowdata.RowDefCache; import com.akiban.server.rowdata.SchemaFactory; import org.junit.Test; public final class NiceRowTest { @Test public void toRowDataBasic() throws Exception { RowDef rowDef = createRowDef(2); Object[] objects = new Object[2]; objects[0] = 5; objects[1] = "Bob"; RowData rowData = create(rowDef, objects); NewRow newRow = NiceRow.fromRowData(rowData, rowDef); // Why -1: because an __akiban_pk column gets added assertEquals("fields count", 2, newRow.getFields().size() - 1); assertEquals("field[0]", 5L, newRow.get(0)); assertEquals("field[1]", "Bob", newRow.get(1)); compareRowDatas(rowData, newRow.toRowData()); } @Test public void toRowDataLarge() throws Exception { final int NUM = 30; RowDef rowDef = createRowDef(NUM); Object[] objects = new Object[NUM]; objects[0] = 15; objects[1] = "Robert"; for (int i=2; i < NUM; ++i) { objects[i] = i + 1000; } RowData rowData = create(rowDef, objects); NewRow newRow = NiceRow.fromRowData(rowData, rowDef); // Why -1: because an __akiban_pk column gets added assertEquals("fields count", NUM, newRow.getFields().size() - 1); assertEquals("field[0]", 15L, newRow.get(0)); assertEquals("field[1]", "Robert", newRow.get(1)); for (int i=2; i < NUM; ++i) { long expected = i + 1000; assertEquals("field[1]", expected, newRow.get(i)); } compareRowDatas(rowData, newRow.toRowData()); } @Test public void toRowDataSparse() throws Exception { final int NUM = 30; RowDef rowDef = createRowDef(NUM); Object[] objects = new Object[NUM]; objects[0] = 15; objects[1] = "Robert"; int nulls = 0; for (int i=2; i < NUM; ++i) { if ( (i % 3) == 0) { ++nulls; } else { objects[i] = i + 1000; } } assertTrue("nulls==0", nulls > 0); RowData rowData = create(rowDef, objects); NewRow newRow = NiceRow.fromRowData(rowData, rowDef); // Why -1: because an __akiban_pk column gets added assertEquals("fields count", NUM, newRow.getFields().size() - 1); assertEquals("field[0]", 15L, newRow.get(0)); assertEquals("field[1]", "Robert", newRow.get(1)); for (int i=2; i < NUM; ++i) { Long expected = (i % 3) == 0 ? null : i + 1000L; assertEquals("field[1]", expected, newRow.get(i)); } compareRowDatas(rowData, newRow.toRowData()); } @Test public void testEquality() { TreeMap<Integer,NiceRow> mapOne = new TreeMap<Integer, NiceRow>(); TreeMap<Integer,NiceRow> mapTwo = new TreeMap<Integer, NiceRow>(); NiceRow rowOne = new NiceRow(1, (RowDef)null); rowOne.put(0, Long.valueOf(0l)); rowOne.put(1, "hello world"); mapOne.put(0, rowOne); NiceRow rowTwo = new NiceRow(1, (RowDef)null); rowTwo.put(0, Long.valueOf(0l)); rowTwo.put(1, "hello world"); mapTwo.put(0, rowTwo); assertEquals("rows", rowOne, rowTwo); assertEquals("maps", mapOne, mapTwo); } private static byte[] bytes() { return new byte[1024]; } private static RowDef createRowDef(int totalColumns) throws Exception { assertTrue("bad totalColumns=" + totalColumns, totalColumns >= 2); String[] ddl = new String[totalColumns + 3]; int i = 0; ddl[i++] = "use test_schema; "; ddl[i++] = "create table test_table("; ddl[i++] = "id int"; ddl[i++] = ", name varchar(128)"; for (int c = 2; c < totalColumns; c++) { ddl[i++] = String.format(", field_%s int", c); } ddl[i] = ") engine = akibandb;"; RowDefCache rowDefCache = SCHEMA_FACTORY.rowDefCache(ddl); return rowDefCache.getRowDef("test_schema", "test_table"); } private RowData create(RowDef rowDef, Object[] objects) { RowData rowData = new RowData(bytes()); rowData.createRow(rowDef, objects); assertEquals("start", 0, rowData.getBufferStart()); assertEquals("end and length", rowData.getBufferEnd(), rowData.getBufferLength()); return rowData; } private void compareRowDatas(RowData expected, RowData actual) { if (expected == actual) { return; } List<Byte> expectedBytes = byteListFor(expected); List<Byte> actualBytes = byteListFor(actual); assertEquals("bytes", expectedBytes, actualBytes); } private List<Byte> byteListFor(RowData rowData) { byte[] bytes = rowData.getBytes(); assertNotNull("RowData bytes[] null", bytes); assertTrue("start < 0: " + rowData.getRowStart(), rowData.getRowStart() >= 0); assertTrue("end out of range: " + rowData.getRowEnd(), rowData.getRowEnd() <= bytes.length); List<Byte> bytesList = new ArrayList<Byte>(); for (int i=rowData.getBufferStart(), MAX=rowData.getRowEnd(); i < MAX; ++i) { bytesList.add(bytes[i]); } return bytesList; } private static final SchemaFactory SCHEMA_FACTORY = new SchemaFactory(); }
Fix NiceRowTest ddl
src/test/java/com/akiban/server/api/dml/scan/NiceRowTest.java
Fix NiceRowTest ddl
<ide><path>rc/test/java/com/akiban/server/api/dml/scan/NiceRowTest.java <ide> <ide> private static RowDef createRowDef(int totalColumns) throws Exception { <ide> assertTrue("bad totalColumns=" + totalColumns, totalColumns >= 2); <del> String[] ddl = new String[totalColumns + 3]; <add> String[] ddl = new String[totalColumns + 2]; <ide> int i = 0; <del> ddl[i++] = "use test_schema; "; <ide> ddl[i++] = "create table test_table("; <ide> ddl[i++] = "id int"; <ide> ddl[i++] = ", name varchar(128)"; <ide> for (int c = 2; c < totalColumns; c++) { <ide> ddl[i++] = String.format(", field_%s int", c); <ide> } <del> ddl[i] = ") engine = akibandb;"; <add> ddl[i] = ");"; <ide> RowDefCache rowDefCache = SCHEMA_FACTORY.rowDefCache(ddl); <ide> return rowDefCache.getRowDef("test_schema", "test_table"); <ide> } <ide> return bytesList; <ide> } <ide> <del> private static final SchemaFactory SCHEMA_FACTORY = new SchemaFactory(); <add> private static final SchemaFactory SCHEMA_FACTORY = new SchemaFactory("test_schema"); <ide> }
Java
apache-2.0
706f3330302221ce52e551da6814f576ef26ed71
0
TeamCanjica/android_packages_apps_DeviceSettings,Carbonite12/android_packages_apps_DeviceSettings,andi34/android_packages_apps_DeviceSettings,Epirex/android_packages_apps_NovathorSettings
/* * Copyright (C) 2014 TeamCanjica https://github.com/TeamCanjica * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.teamcanjica.settings.device; import android.content.Context; import android.content.SharedPreferences; import android.content.res.TypedArray; import android.os.Parcel; import android.os.Parcelable; import android.preference.DialogPreference; import android.preference.Preference; import android.preference.Preference.OnPreferenceChangeListener; import android.preference.PreferenceManager; import android.util.AttributeSet; import android.view.View; import android.widget.SeekBar; import android.widget.Toast; import android.widget.SeekBar.OnSeekBarChangeListener; import android.widget.TextView; import com.teamcanjica.settings.device.fragments.AudioFragmentActivity; public class MasterSeekBarDialogPreference extends DialogPreference implements OnPreferenceChangeListener { private static final int DEFAULT_MIN_PROGRESS = 0; private static final int DEFAULT_MAX_PROGRESS = 100; private static final int DEFAULT_PROGRESS = 0; private int mMinProgress; private int mMaxProgress; private int mProgress; private int stepSize = 0; private CharSequence mProgressTextSuffix; private TextView mProgressText; private SeekBar mSeekBar; private boolean isFloat = false; private static Context mCtx; private static final String FILE_READAHEADKB = "/sys/block/mmcblk0/queue/read_ahead_kb"; private static final String FILE_CPU_VOLTAGE = "/sys/kernel/liveopp/arm_step"; private static final String FILE_CYCLE_CHARGING = "/sys/kernel/abb-fg/fg_cyc"; private static final String FILE_GPU_VOLTAGE = "/sys/kernel/mali/mali_dvfs_config"; private static final int defaultGPUVoltValues[] = {0x26, 0x26, 0x26, 0x26, 0x26, 0x26, 0x26, 0x26, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2f, 0x30, 0x32, 0x33, 0x34, 0x3f, 0x3f, 0x3f, 0x3f, 0x3f, 0x3f}; private static final int defaultCPUVoltValues[] = {0x18, 0x1a, 0x20, 0x24, 0x2f, 0x32, 0x3f, 0x3f, 0x3f, 0x3f}; private static final double voltSteps[] = {0, 12.5, 25, 37.5, 50, 62.5, 75, 87.5, 100}; public MasterSeekBarDialogPreference(Context context) { this(context, null); } public MasterSeekBarDialogPreference(Context context, AttributeSet attrs) { super(context, attrs); this.setOnPreferenceChangeListener(this); // Get attributes specified in XML TypedArray a = context.getTheme().obtainStyledAttributes(attrs, R.styleable.MasterSeekBarDialogPreference, 0, 0); try { setMinProgress(a.getInteger(R.styleable.MasterSeekBarDialogPreference_min, DEFAULT_MIN_PROGRESS)); setMaxProgress(a.getInteger(R.styleable.MasterSeekBarDialogPreference_android_max, DEFAULT_MAX_PROGRESS)); setProgressTextSuffix(a.getString(R.styleable.MasterSeekBarDialogPreference_progressTextSuffix)); stepSize = a.getInteger(R.styleable.MasterSeekBarDialogPreference_stepSize, 1); isFloat = a.getBoolean(R.styleable.MasterSeekBarDialogPreference_isFloat, false); } finally { a.recycle(); } // Set layout setDialogLayoutResource(R.layout.preference_seek_bar_dialog); setPositiveButtonText(android.R.string.ok); setNegativeButtonText(android.R.string.cancel); setDialogIcon(null); mCtx = context; } @Override protected void onSetInitialValue(boolean restore, Object defaultValue) { setProgress(restore ? getPersistedInt(DEFAULT_PROGRESS) : (Integer) defaultValue); } @Override protected Object onGetDefaultValue(TypedArray a, int index) { return a.getInt(index, DEFAULT_PROGRESS); } @Override protected void onBindDialogView(View view) { super.onBindDialogView(view); TextView dialogMessageText = (TextView) view.findViewById(R.id.text_dialog_message); dialogMessageText.setText(getDialogMessage()); mProgressText = (TextView) view.findViewById(R.id.text_progress); mSeekBar = (SeekBar) view.findViewById(R.id.seek_bar); mSeekBar.setOnSeekBarChangeListener(new OnSeekBarChangeListener() { @Override public void onStopTrackingTouch(SeekBar seekBar) { } @Override public void onStartTrackingTouch(SeekBar seekBar) { } @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { // Update text that displays the current SeekBar progress value // Note: This does not persist the progress value. that is only ever done in setProgress() String progressStr; double mStepSize = stepSize; if (isFloat) { mStepSize = (double) stepSize / 10; } if (mStepSize >= 1) { progressStr = String.valueOf(Math.round((progress + mMinProgress) / mStepSize) * mStepSize); if (!isFloat) { progressStr = progressStr.substring(0, progressStr.length()-2); } } else { progressStr = String.valueOf(progress + mMinProgress); } mProgressText.setText(mProgressTextSuffix == null ? progressStr : progressStr.concat(mProgressTextSuffix.toString())); } }); mSeekBar.setMax(mMaxProgress - mMinProgress); mSeekBar.setProgress(mProgress - mMinProgress); // mSeekBar.setKeyProgressIncrement(stepSize); } public int getMinProgress() { return mMinProgress; } public void setMinProgress(int minProgress) { mMinProgress = minProgress; setProgress(Math.max(mProgress, mMinProgress)); } public int getMaxProgress() { return mMaxProgress; } public void setMaxProgress(int maxProgress) { mMaxProgress = maxProgress; setProgress(Math.min(mProgress, mMaxProgress)); } public int getProgress() { return mProgress; } public void setProgress(int progress) { progress = Math.max(Math.min(progress, mMaxProgress), mMinProgress); double mStepSize = stepSize; if (isFloat) { mStepSize = (double) stepSize / 10; } if (progress != mProgress) { if (mStepSize >= 1) { progress = (int) (Math.round(progress / mStepSize) * mStepSize); } mProgress = progress; persistInt(progress); notifyChanged(); } } public CharSequence getProgressTextSuffix() { return mProgressTextSuffix; } public void setProgressTextSuffix(CharSequence progressTextSuffix) { mProgressTextSuffix = progressTextSuffix; } @Override protected void onDialogClosed(boolean positiveResult) { super.onDialogClosed(positiveResult); // When the user selects "OK", persist the new value if (positiveResult) { int seekBarProgress = mSeekBar.getProgress() + mMinProgress; if (callChangeListener(seekBarProgress)) { setProgress(seekBarProgress); } } } @Override protected Parcelable onSaveInstanceState() { // Save the instance state so that it will survive screen orientation changes and other events that may temporarily destroy it final Parcelable superState = super.onSaveInstanceState(); // Set the state's value with the class member that holds current setting value final SavedState myState = new SavedState(superState); myState.minProgress = getMinProgress(); myState.maxProgress = getMaxProgress(); myState.progress = getProgress(); return myState; } @Override protected void onRestoreInstanceState(Parcelable state) { // Check whether we saved the state in onSaveInstanceState() if (state == null || !state.getClass().equals(SavedState.class)) { // Didn't save the state, so call superclass super.onRestoreInstanceState(state); return; } // Restore the state SavedState myState = (SavedState) state; setMinProgress(myState.minProgress); setMaxProgress(myState.maxProgress); setProgress(myState.progress); super.onRestoreInstanceState(myState.getSuperState()); } private static class SavedState extends BaseSavedState { int minProgress; int maxProgress; int progress; public SavedState(Parcelable superState) { super(superState); } public SavedState(Parcel source) { super(source); minProgress = source.readInt(); maxProgress = source.readInt(); progress = source.readInt(); } @Override public void writeToParcel(Parcel dest, int flags) { super.writeToParcel(dest, flags); dest.writeInt(minProgress); dest.writeInt(maxProgress); dest.writeInt(progress); } @SuppressWarnings("unused") public static final Parcelable.Creator<SavedState> CREATOR = new Parcelable.Creator<SavedState>() { @Override public SavedState createFromParcel(Parcel in) { return new SavedState(in); } @Override public SavedState[] newArray(int size) { return new SavedState[size]; } }; } // PREFERENCE STUFF STARTS HERE @Override public boolean onPreferenceChange(Preference preference, Object newValue) { String key = preference.getKey(); // ReadAheadKB if (key.equals(DeviceSettings.KEY_READAHEADKB)) { Utils.writeValue(FILE_READAHEADKB, String.valueOf((Integer) (Math.round((Integer) newValue / 128 + 1) * 128))); } // CPU Voltage else if (key.equals(DeviceSettings.KEY_CPU_VOLTAGE)) { double currentCPUVolt = Math.round((Integer) newValue / 12.5) * 12.5; int i; for (i = 0; voltSteps[i] != Math.abs(currentCPUVolt); i++) { } if (currentCPUVolt < 0) { i *= -1; } for (int j = 0; j <= defaultCPUVoltValues.length - 1; j++) { Utils.writeValue(FILE_CPU_VOLTAGE + String.valueOf(j), "varm=0x" + Integer.toHexString(defaultCPUVoltValues[j] + i)); } } // GPU Voltage else if (key.equals(DeviceSettings.KEY_GPU_VOLTAGE)) { double currentGPUVolt = Math.round((Integer) newValue / 12.5) * 12.5; int i; for (i = 0; voltSteps[i] != Math.abs(currentGPUVolt); i++) { } for (int j = 0; j <= defaultGPUVoltValues.length - 1; j++) { Utils.writeValue(FILE_GPU_VOLTAGE, j + " vape=0x" + Integer.toHexString(defaultGPUVoltValues[j] - i)); } } // Discharging Threshold else if (key.equals(DeviceSettings.KEY_DISCHARGING_THRESHOLD)) { // Check if discharging threshold value is less than or equal to recharging threshold if ((Integer) newValue <= PreferenceManager.getDefaultSharedPreferences(mCtx). getInt(DeviceSettings.KEY_RECHARGING_THRESHOLD, 5)) { Toast.makeText(mCtx, R.string.invalid_value, Toast.LENGTH_SHORT).show(); return true; } Utils.writeValue(FILE_CYCLE_CHARGING, "dischar=" + String.valueOf((Integer) newValue)); } // Recharging Threshold else if (key.equals(DeviceSettings.KEY_RECHARGING_THRESHOLD)) { // Check if recharging threshold value is greater than or equal to discharging threshold if ((Integer) newValue >= PreferenceManager.getDefaultSharedPreferences(mCtx). getInt(DeviceSettings.KEY_DISCHARGING_THRESHOLD, 100)) { Toast.makeText(mCtx, R.string.invalid_value, Toast.LENGTH_SHORT).show(); return true; } Utils.writeValue(FILE_CYCLE_CHARGING, "rechar=" + String.valueOf((Integer) newValue)); } // ABBamp Audio - ADDigGain2 Control else if (key.equals(DeviceSettings.KEY_ADDIGGAIN2_CONTROL)) { Utils.writeValue(AudioFragmentActivity.FILE_ADDIGGAIN2, "gain=" + String.valueOf((Integer) newValue)); } // ABBamp Audio - Anagain3 Control else if (key.equals(DeviceSettings.KEY_ANAGAIN3_CONTROL)) { Utils.writeValue(AudioFragmentActivity.FILE_ANAGAIN3, "gain=" + String.valueOf((Integer) newValue)); } // ABBamp Audio - ClassDHPG Control else if (key.equals(DeviceSettings.KEY_CLASSDHPG_CONTROL)) { Utils.writeValue(AudioFragmentActivity.FILE_CLASSDHPG, "gain=" + String.valueOf((Integer) newValue)); } // ABBamp Audio - ClassDWG Control else if (key.equals(DeviceSettings.KEY_CLASSDWG_CONTROL)) { Utils.writeValue(AudioFragmentActivity.FILE_CLASSDWG, "gain=" + String.valueOf((Integer) newValue)); } // ABBamp Audio - EarDigGain Control else if (key.equals(DeviceSettings.KEY_EARDIGGAIN_CONTROL)) { Utils.writeValue(AudioFragmentActivity.FILE_EARDIGGAIN, "gain=" + String.valueOf((Integer) newValue)); } // ABBamp Audio - HsLDigGain Control else if (key.equals(DeviceSettings.KEY_HSLDIGGAIN_CONTROL)) { Utils.writeValue(AudioFragmentActivity.FILE_HSLDIGGAIN, "gain=" + String.valueOf((Integer) newValue)); } // ABBamp Audio - HsRDigGain Control else if (key.equals(DeviceSettings.KEY_HSRDIGGAIN_CONTROL)) { Utils.writeValue(AudioFragmentActivity.FILE_HSRDIGGAIN, "gain=" + String.valueOf((Integer) newValue)); } // ABBamp Audio - LPA Mode Control else if (key.equals(DeviceSettings.KEY_LPA_MODE_CONTROL)) { Utils.writeValue(AudioFragmentActivity.FILE_LPA_MODE, "vape=0x" + Integer.toHexString((Integer) newValue)); } return true; } public static void restore(Context context) { SharedPreferences sharedPrefs = PreferenceManager .getDefaultSharedPreferences(context); // Readahead kB control Utils.writeValue(FILE_READAHEADKB, String.valueOf((Math.round(sharedPrefs. getInt(DeviceSettings.KEY_READAHEADKB, 512) / 128) + 1) * 128)); // ABBamp Audio - ADDigGain2 Control Utils.writeValue(AudioFragmentActivity.FILE_ADDIGGAIN2, "gain=" + sharedPrefs. getInt(DeviceSettings.KEY_ADDIGGAIN2_CONTROL, 25)); // ABBamp Audio - Anagain3 Control Utils.writeValue(AudioFragmentActivity.FILE_ANAGAIN3, "gain=" + sharedPrefs. getInt(DeviceSettings.KEY_ANAGAIN3_CONTROL, 0)); // ABBamp Audio - ClassDHPG Control Utils.writeValue(AudioFragmentActivity.FILE_CLASSDHPG, "gain=" + sharedPrefs. getInt(DeviceSettings.KEY_CLASSDHPG_CONTROL, 10)); // ABBamp Audio - ClassDWG Control Utils.writeValue(AudioFragmentActivity.FILE_CLASSDWG, "gain=" + sharedPrefs. getInt(DeviceSettings.KEY_CLASSDWG_CONTROL, 10)); // ABBamp Audio - EarDigGain Control Utils.writeValue(AudioFragmentActivity.FILE_EARDIGGAIN, "gain=" + sharedPrefs. getInt(DeviceSettings.KEY_EARDIGGAIN_CONTROL, 4)); // ABBamp Audio - HsLDigGain Control Utils.writeValue(AudioFragmentActivity.FILE_HSLDIGGAIN, "gain=" + sharedPrefs. getInt(DeviceSettings.KEY_HSLDIGGAIN_CONTROL, 4)); // ABBamp Audio - HsRDigGain Control Utils.writeValue(AudioFragmentActivity.FILE_HSRDIGGAIN, "gain=" + sharedPrefs. getInt(DeviceSettings.KEY_HSRDIGGAIN_CONTROL, 4)); // ABBamp Audio - LPA Mode Control Utils.writeValue(AudioFragmentActivity.FILE_LPA_MODE, "vape=0x" + sharedPrefs. getInt(DeviceSettings.KEY_LPA_MODE_CONTROL, 16)); // Cycle Charging - Discharging threshold Utils.writeValue(FILE_CYCLE_CHARGING, "dischar=" + sharedPrefs. getInt(DeviceSettings.KEY_DISCHARGING_THRESHOLD, 100)); // Cycle Charging - Recharging threshold Utils.writeValue(FILE_CYCLE_CHARGING, "rechar=" + sharedPrefs. getInt(DeviceSettings.KEY_RECHARGING_THRESHOLD, 5)); // CPU Voltage int i; double currentCPUVolt = Math.round(sharedPrefs. getInt(DeviceSettings.KEY_CPU_VOLTAGE, 0) / 12.5) * 12.5; for (i = 0; voltSteps[i] != Math.abs(currentCPUVolt); i++) { } if (currentCPUVolt < 0) { i *= -1; } for (int j = 0; j <= defaultCPUVoltValues.length - 1; j++) { Utils.writeValue(FILE_CPU_VOLTAGE + String.valueOf(j), "varm=0x" + Integer.toHexString(defaultCPUVoltValues[j] + i)); } // GPU Voltage double currentGPUVolt = Math.round(sharedPrefs. getInt(DeviceSettings.KEY_GPU_VOLTAGE, 0) / 12.5) * 12.5; for (i = 0; voltSteps[i] != Math.abs(currentGPUVolt); i++) { } for (int j = 0; j <= defaultGPUVoltValues.length - 1; j++) { Utils.writeValue(FILE_GPU_VOLTAGE, j + " vape=0x" + Integer.toHexString(defaultGPUVoltValues[j] - i)); } } }
src/com/teamcanjica/settings/device/MasterSeekBarDialogPreference.java
/* * Copyright (C) 2014 TeamCanjica https://github.com/TeamCanjica * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.teamcanjica.settings.device; import android.content.Context; import android.content.SharedPreferences; import android.content.res.TypedArray; import android.os.Parcel; import android.os.Parcelable; import android.preference.DialogPreference; import android.preference.Preference; import android.preference.Preference.OnPreferenceChangeListener; import android.preference.PreferenceManager; import android.util.AttributeSet; import android.view.View; import android.widget.SeekBar; import android.widget.Toast; import android.widget.SeekBar.OnSeekBarChangeListener; import android.widget.TextView; import com.teamcanjica.settings.device.fragments.AudioFragmentActivity; public class MasterSeekBarDialogPreference extends DialogPreference implements OnPreferenceChangeListener { private static final int DEFAULT_MIN_PROGRESS = 0; private static final int DEFAULT_MAX_PROGRESS = 100; private static final int DEFAULT_PROGRESS = 0; private int mMinProgress; private int mMaxProgress; private int mProgress; private int stepSize = 0; private CharSequence mProgressTextSuffix; private TextView mProgressText; private SeekBar mSeekBar; private boolean isFloat = false; private static Context mCtx; private static final String FILE_READAHEADKB = "/sys/block/mmcblk0/queue/read_ahead_kb"; private static final String FILE_CPU_VOLTAGE = "/sys/kernel/liveopp/arm_step"; private static final String FILE_CYCLE_CHARGING = "/sys/kernel/abb-fg/fg_cyc"; private static final String FILE_GPU_VOLTAGE = "/sys/kernel/mali/mali_dvfs_config"; private static final int defaultGPUVoltValues[] = {0x26, 0x26, 0x26, 0x26, 0x26, 0x26, 0x26, 0x26, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2f, 0x30, 0x32, 0x33, 0x34, 0x3f, 0x3f, 0x3f, 0x3f, 0x3f, 0x3f}; private static final int defaultCPUVoltValues[] = {0x18, 0x1a, 0x20, 0x24, 0x2f, 0x32, 0x3f, 0x3f, 0x3f, 0x3f}; private static final double voltSteps[] = {0, 12.5, 25, 37.5, 50, 62.5, 75, 87.5, 100}; public MasterSeekBarDialogPreference(Context context) { this(context, null); } public MasterSeekBarDialogPreference(Context context, AttributeSet attrs) { super(context, attrs); this.setOnPreferenceChangeListener(this); // Get attributes specified in XML TypedArray a = context.getTheme().obtainStyledAttributes(attrs, R.styleable.MasterSeekBarDialogPreference, 0, 0); try { setMinProgress(a.getInteger(R.styleable.MasterSeekBarDialogPreference_min, DEFAULT_MIN_PROGRESS)); setMaxProgress(a.getInteger(R.styleable.MasterSeekBarDialogPreference_android_max, DEFAULT_MAX_PROGRESS)); setProgressTextSuffix(a.getString(R.styleable.MasterSeekBarDialogPreference_progressTextSuffix)); stepSize = a.getInteger(R.styleable.MasterSeekBarDialogPreference_stepSize, 1); isFloat = a.getBoolean(R.styleable.MasterSeekBarDialogPreference_isFloat, false); } finally { a.recycle(); } // Set layout setDialogLayoutResource(R.layout.preference_seek_bar_dialog); setPositiveButtonText(android.R.string.ok); setNegativeButtonText(android.R.string.cancel); setDialogIcon(null); mCtx = context; } @Override protected void onSetInitialValue(boolean restore, Object defaultValue) { setProgress(restore ? getPersistedInt(DEFAULT_PROGRESS) : (Integer) defaultValue); } @Override protected Object onGetDefaultValue(TypedArray a, int index) { return a.getInt(index, DEFAULT_PROGRESS); } @Override protected void onBindDialogView(View view) { super.onBindDialogView(view); TextView dialogMessageText = (TextView) view.findViewById(R.id.text_dialog_message); dialogMessageText.setText(getDialogMessage()); mProgressText = (TextView) view.findViewById(R.id.text_progress); mSeekBar = (SeekBar) view.findViewById(R.id.seek_bar); mSeekBar.setOnSeekBarChangeListener(new OnSeekBarChangeListener() { @Override public void onStopTrackingTouch(SeekBar seekBar) { } @Override public void onStartTrackingTouch(SeekBar seekBar) { } @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { // Update text that displays the current SeekBar progress value // Note: This does not persist the progress value. that is only ever done in setProgress() String progressStr; double mStepSize = stepSize; if (isFloat) { mStepSize = (double) stepSize / 10; } if (mStepSize >= 1) { progressStr = String.valueOf(Math.round((progress + mMinProgress) / mStepSize) * mStepSize); if (!isFloat) { progressStr = progressStr.substring(0, progressStr.length()-2); } } else { progressStr = String.valueOf(progress + mMinProgress); } mProgressText.setText(mProgressTextSuffix == null ? progressStr : progressStr.concat(mProgressTextSuffix.toString())); } }); mSeekBar.setMax(mMaxProgress - mMinProgress); mSeekBar.setProgress(mProgress - mMinProgress); // mSeekBar.setKeyProgressIncrement(stepSize); } public int getMinProgress() { return mMinProgress; } public void setMinProgress(int minProgress) { mMinProgress = minProgress; setProgress(Math.max(mProgress, mMinProgress)); } public int getMaxProgress() { return mMaxProgress; } public void setMaxProgress(int maxProgress) { mMaxProgress = maxProgress; setProgress(Math.min(mProgress, mMaxProgress)); } public int getProgress() { return mProgress; } public void setProgress(int progress) { progress = Math.max(Math.min(progress, mMaxProgress), mMinProgress); double mStepSize = stepSize; if (isFloat) { mStepSize = (double) stepSize / 10; } if (progress != mProgress) { if (mStepSize >= 1) { progress = (int) (Math.round(progress / mStepSize) * mStepSize); } mProgress = progress; persistInt(progress); notifyChanged(); } } public CharSequence getProgressTextSuffix() { return mProgressTextSuffix; } public void setProgressTextSuffix(CharSequence progressTextSuffix) { mProgressTextSuffix = progressTextSuffix; } @Override protected void onDialogClosed(boolean positiveResult) { super.onDialogClosed(positiveResult); // When the user selects "OK", persist the new value if (positiveResult) { int seekBarProgress = mSeekBar.getProgress() + mMinProgress; if (callChangeListener(seekBarProgress)) { setProgress(seekBarProgress); } } } @Override protected Parcelable onSaveInstanceState() { // Save the instance state so that it will survive screen orientation changes and other events that may temporarily destroy it final Parcelable superState = super.onSaveInstanceState(); // Set the state's value with the class member that holds current setting value final SavedState myState = new SavedState(superState); myState.minProgress = getMinProgress(); myState.maxProgress = getMaxProgress(); myState.progress = getProgress(); return myState; } @Override protected void onRestoreInstanceState(Parcelable state) { // Check whether we saved the state in onSaveInstanceState() if (state == null || !state.getClass().equals(SavedState.class)) { // Didn't save the state, so call superclass super.onRestoreInstanceState(state); return; } // Restore the state SavedState myState = (SavedState) state; setMinProgress(myState.minProgress); setMaxProgress(myState.maxProgress); setProgress(myState.progress); super.onRestoreInstanceState(myState.getSuperState()); } private static class SavedState extends BaseSavedState { int minProgress; int maxProgress; int progress; public SavedState(Parcelable superState) { super(superState); } public SavedState(Parcel source) { super(source); minProgress = source.readInt(); maxProgress = source.readInt(); progress = source.readInt(); } @Override public void writeToParcel(Parcel dest, int flags) { super.writeToParcel(dest, flags); dest.writeInt(minProgress); dest.writeInt(maxProgress); dest.writeInt(progress); } @SuppressWarnings("unused") public static final Parcelable.Creator<SavedState> CREATOR = new Parcelable.Creator<SavedState>() { @Override public SavedState createFromParcel(Parcel in) { return new SavedState(in); } @Override public SavedState[] newArray(int size) { return new SavedState[size]; } }; } // PREFERENCE STUFF STARTS HERE @Override public boolean onPreferenceChange(Preference preference, Object newValue) { String key = preference.getKey(); // ReadAheadKB if (key.equals(DeviceSettings.KEY_READAHEADKB)) { Utils.writeValue(FILE_READAHEADKB, String.valueOf((Integer) (Math.round((Integer) newValue / 128 + 1) * 128))); } // CPU Voltage else if (key.equals(DeviceSettings.KEY_CPU_VOLTAGE)) { double currentCPUVolt = Math.round((Integer) newValue / 12.5) * 12.5; int i; for (i = 0; voltSteps[i] != Math.abs(currentCPUVolt); i++) { } if (currentCPUVolt < 0) { i *= -1; } for (int j = 0; j <= defaultCPUVoltValues.length - 1; j++) { Utils.writeValue(FILE_CPU_VOLTAGE + String.valueOf(j), "varm=0x" + Integer.toHexString(defaultCPUVoltValues[j] + i)); } } // GPU Voltage else if (key.equals(DeviceSettings.KEY_GPU_VOLTAGE)) { double currentGPUVolt = Math.round((Integer) newValue / 12.5) * 12.5; int i; for (i = 0; voltSteps[i] != Math.abs(currentGPUVolt); i++) { } for (int j = 0; j <= defaultGPUVoltValues.length - 1; j++) { Utils.writeValue(FILE_GPU_VOLTAGE, j + " vape=0x" + Integer.toHexString(defaultGPUVoltValues[j] - i)); } } // Discharging Threshold else if (key.equals(DeviceSettings.KEY_DISCHARGING_THRESHOLD)) { // Check if discharging threshold value is less than or equal to recharging threshold if ((Integer) newValue <= PreferenceManager.getDefaultSharedPreferences(mCtx). getInt(DeviceSettings.KEY_RECHARGING_THRESHOLD, 5)) { Toast.makeText(mCtx, R.string.invalid_value, Toast.LENGTH_SHORT).show(); return true; } Utils.writeValue(FILE_CYCLE_CHARGING, "dischar=" + String.valueOf((Integer) newValue)); } // Recharging Threshold else if (key.equals(DeviceSettings.KEY_RECHARGING_THRESHOLD)) { // Check if recharging threshold value is greater than or equal to discharging threshold if ((Integer) newValue >= PreferenceManager.getDefaultSharedPreferences(mCtx). getInt(DeviceSettings.KEY_DISCHARGING_THRESHOLD, 100)) { Toast.makeText(mCtx, R.string.invalid_value, Toast.LENGTH_SHORT).show(); return true; } Utils.writeValue(FILE_CYCLE_CHARGING, "rechar=" + String.valueOf((Integer) newValue)); } // ABBamp Audio - ADDigGain2 Control else if (key.equals(DeviceSettings.KEY_ADDIGGAIN2_CONTROL)) { Utils.writeValue(AudioFragmentActivity.FILE_ADDIGGAIN2, "gain=" + String.valueOf((Integer) newValue)); } // ABBamp Audio - Anagain3 Control else if (key.equals(DeviceSettings.KEY_ANAGAIN3_CONTROL)) { Utils.writeValue(AudioFragmentActivity.FILE_ANAGAIN3, "gain=" + String.valueOf((Integer) newValue)); } // ABBamp Audio - ClassDHPG Control else if (key.equals(DeviceSettings.KEY_CLASSDHPG_CONTROL)) { Utils.writeValue(AudioFragmentActivity.FILE_CLASSDHPG, "gain=" + String.valueOf((Integer) newValue)); } // ABBamp Audio - ClassDWG Control else if (key.equals(DeviceSettings.KEY_CLASSDWG_CONTROL)) { Utils.writeValue(AudioFragmentActivity.FILE_CLASSDWG, "gain=" + String.valueOf((Integer) newValue)); } // ABBamp Audio - EarDigGain Control else if (key.equals(DeviceSettings.KEY_EARDIGGAIN_CONTROL)) { Utils.writeValue(AudioFragmentActivity.FILE_EARDIGGAIN, "gain=" + String.valueOf((Integer) newValue)); } // ABBamp Audio - HsLDigGain Control else if (key.equals(DeviceSettings.KEY_HSLDIGGAIN_CONTROL)) { Utils.writeValue(AudioFragmentActivity.FILE_HSLDIGGAIN, "gain=" + String.valueOf((Integer) newValue)); } // ABBamp Audio - HsRDigGain Control else if (key.equals(DeviceSettings.KEY_HSRDIGGAIN_CONTROL)) { Utils.writeValue(AudioFragmentActivity.FILE_HSRDIGGAIN, "gain=" + String.valueOf((Integer) newValue)); } // ABBamp Audio - LPA Mode Control else if (key.equals(DeviceSettings.KEY_LPA_MODE_CONTROL)) { Utils.writeValue(AudioFragmentActivity.FILE_LPA_MODE, "vape=0x" + Integer.toHexString((Integer) newValue)); } return true; } public static void restore(Context context) { SharedPreferences sharedPrefs = PreferenceManager .getDefaultSharedPreferences(context); // Readahead kB control Utils.writeValue(FILE_READAHEADKB, String.valueOf((Math.round(sharedPrefs. getInt(DeviceSettings.KEY_READAHEADKB, 512) / 128) + 1) * 128)); // ABBamp Audio - ADDigGain2 Control if (sharedPrefs.getBoolean(DeviceSettings.KEY_ENABLE_ADDIGGAIN2, false)) { Utils.writeValue(AudioFragmentActivity.FILE_ADDIGGAIN2, "gain=" + sharedPrefs. getInt(DeviceSettings.KEY_ADDIGGAIN2_CONTROL, 25)); } // ABBamp Audio - Anagain3 Control if (sharedPrefs.getBoolean(DeviceSettings.KEY_ENABLE_ANAGAIN3, false)) { Utils.writeValue(AudioFragmentActivity.FILE_ANAGAIN3, "gain=" + sharedPrefs. getInt(DeviceSettings.KEY_ANAGAIN3_CONTROL, 0)); } // ABBamp Audio - ClassDHPG Control if (sharedPrefs.getBoolean(DeviceSettings.KEY_ENABLE_CLASSDHPG, false)) { Utils.writeValue(AudioFragmentActivity.FILE_CLASSDHPG, "gain=" + sharedPrefs. getInt(DeviceSettings.KEY_CLASSDHPG_CONTROL, 10)); } // ABBamp Audio - ClassDWG Control if (sharedPrefs.getBoolean(DeviceSettings.KEY_ENABLE_CLASSDWG, false)) { Utils.writeValue(AudioFragmentActivity.FILE_CLASSDWG, "gain=" + sharedPrefs. getInt(DeviceSettings.KEY_CLASSDWG_CONTROL, 10)); } // ABBamp Audio - EarDigGain Control if (sharedPrefs.getBoolean(DeviceSettings.KEY_ENABLE_EARDIGGAIN, false)) { Utils.writeValue(AudioFragmentActivity.FILE_EARDIGGAIN, "gain=" + sharedPrefs. getInt(DeviceSettings.KEY_EARDIGGAIN_CONTROL, 4)); } // ABBamp Audio - HsLDigGain Control if (sharedPrefs.getBoolean(DeviceSettings.KEY_ENABLE_HSLDIGGAIN, false)) { Utils.writeValue(AudioFragmentActivity.FILE_HSLDIGGAIN, "gain=" + sharedPrefs. getInt(DeviceSettings.KEY_HSLDIGGAIN_CONTROL, 4)); } // ABBamp Audio - HsRDigGain Control if (sharedPrefs.getBoolean(DeviceSettings.KEY_ENABLE_HSRDIGGAIN, false)) { Utils.writeValue(AudioFragmentActivity.FILE_HSRDIGGAIN, "gain=" + sharedPrefs. getInt(DeviceSettings.KEY_HSRDIGGAIN_CONTROL, 4)); } // ABBamp Audio - LPA Mode Control if (sharedPrefs.getBoolean(DeviceSettings.KEY_ENABLE_LPA_MODE, false)) { Utils.writeValue(AudioFragmentActivity.FILE_LPA_MODE, "vape=0x" + sharedPrefs. getInt(DeviceSettings.KEY_LPA_MODE_CONTROL, 16)); } // Cycle Charging - Discharging threshold Utils.writeValue(FILE_CYCLE_CHARGING, "dischar=" + sharedPrefs. getInt(DeviceSettings.KEY_DISCHARGING_THRESHOLD, 100)); // Cycle Charging - Recharging threshold Utils.writeValue(FILE_CYCLE_CHARGING, "rechar=" + sharedPrefs. getInt(DeviceSettings.KEY_RECHARGING_THRESHOLD, 5)); // CPU Voltage int i; double currentCPUVolt = Math.round(sharedPrefs. getInt(DeviceSettings.KEY_CPU_VOLTAGE, 0) / 12.5) * 12.5; for (i = 0; voltSteps[i] != Math.abs(currentCPUVolt); i++) { } if (currentCPUVolt < 0) { i *= -1; } for (int j = 0; j <= defaultCPUVoltValues.length - 1; j++) { Utils.writeValue(FILE_CPU_VOLTAGE + String.valueOf(j), "varm=0x" + Integer.toHexString(defaultCPUVoltValues[j] + i)); } // GPU Voltage double currentGPUVolt = Math.round(sharedPrefs. getInt(DeviceSettings.KEY_GPU_VOLTAGE, 0) / 12.5) * 12.5; for (i = 0; voltSteps[i] != Math.abs(currentGPUVolt); i++) { } for (int j = 0; j <= defaultGPUVoltValues.length - 1; j++) { Utils.writeValue(FILE_GPU_VOLTAGE, j + " vape=0x" + Integer.toHexString(defaultGPUVoltValues[j] - i)); } } }
Audio : Remove restore workaround We fixed this on kernel source with this commit https://github.com/TeamCanjica/Samsung_STE_Kernel/commit/1604c2796087644c2dbaedbe47b9860457155647 Thanks @cocafe
src/com/teamcanjica/settings/device/MasterSeekBarDialogPreference.java
Audio : Remove restore workaround
<ide><path>rc/com/teamcanjica/settings/device/MasterSeekBarDialogPreference.java <ide> getInt(DeviceSettings.KEY_READAHEADKB, 512) / 128) + 1) * 128)); <ide> <ide> // ABBamp Audio - ADDigGain2 Control <del> if (sharedPrefs.getBoolean(DeviceSettings.KEY_ENABLE_ADDIGGAIN2, false)) { <ide> Utils.writeValue(AudioFragmentActivity.FILE_ADDIGGAIN2, <ide> "gain=" + sharedPrefs. <ide> getInt(DeviceSettings.KEY_ADDIGGAIN2_CONTROL, 25)); <del> } <del> <add> <ide> // ABBamp Audio - Anagain3 Control <del> if (sharedPrefs.getBoolean(DeviceSettings.KEY_ENABLE_ANAGAIN3, false)) { <ide> Utils.writeValue(AudioFragmentActivity.FILE_ANAGAIN3, <ide> "gain=" + sharedPrefs. <ide> getInt(DeviceSettings.KEY_ANAGAIN3_CONTROL, 0)); <del> } <del> <add> <ide> // ABBamp Audio - ClassDHPG Control <del> if (sharedPrefs.getBoolean(DeviceSettings.KEY_ENABLE_CLASSDHPG, false)) { <ide> Utils.writeValue(AudioFragmentActivity.FILE_CLASSDHPG, <ide> "gain=" + sharedPrefs. <ide> getInt(DeviceSettings.KEY_CLASSDHPG_CONTROL, 10)); <del> } <del> <add> <ide> // ABBamp Audio - ClassDWG Control <del> if (sharedPrefs.getBoolean(DeviceSettings.KEY_ENABLE_CLASSDWG, false)) { <ide> Utils.writeValue(AudioFragmentActivity.FILE_CLASSDWG, <ide> "gain=" + sharedPrefs. <ide> getInt(DeviceSettings.KEY_CLASSDWG_CONTROL, 10)); <del> } <del> <add> <ide> // ABBamp Audio - EarDigGain Control <del> if (sharedPrefs.getBoolean(DeviceSettings.KEY_ENABLE_EARDIGGAIN, false)) { <ide> Utils.writeValue(AudioFragmentActivity.FILE_EARDIGGAIN, <ide> "gain=" + sharedPrefs. <ide> getInt(DeviceSettings.KEY_EARDIGGAIN_CONTROL, 4)); <del> } <del> <add> <ide> // ABBamp Audio - HsLDigGain Control <del> if (sharedPrefs.getBoolean(DeviceSettings.KEY_ENABLE_HSLDIGGAIN, false)) { <ide> Utils.writeValue(AudioFragmentActivity.FILE_HSLDIGGAIN, <ide> "gain=" + sharedPrefs. <ide> getInt(DeviceSettings.KEY_HSLDIGGAIN_CONTROL, 4)); <del> } <del> <add> <ide> // ABBamp Audio - HsRDigGain Control <del> if (sharedPrefs.getBoolean(DeviceSettings.KEY_ENABLE_HSRDIGGAIN, false)) { <ide> Utils.writeValue(AudioFragmentActivity.FILE_HSRDIGGAIN, <ide> "gain=" + sharedPrefs. <ide> getInt(DeviceSettings.KEY_HSRDIGGAIN_CONTROL, 4)); <del> } <ide> <ide> // ABBamp Audio - LPA Mode Control <del> if (sharedPrefs.getBoolean(DeviceSettings.KEY_ENABLE_LPA_MODE, false)) { <ide> Utils.writeValue(AudioFragmentActivity.FILE_LPA_MODE, <ide> "vape=0x" + sharedPrefs. <ide> getInt(DeviceSettings.KEY_LPA_MODE_CONTROL, 16)); <del> } <del> <add> <ide> // Cycle Charging - Discharging threshold <ide> Utils.writeValue(FILE_CYCLE_CHARGING, <ide> "dischar=" + sharedPrefs.
JavaScript
mit
c675c3464543b8ea76f40036b20f8d58e25ab3c2
0
Eloqua/sproutcore,Eloqua/sproutcore,Eloqua/sproutcore
// ========================================================================== // Project: SproutCore - JavaScript Application Framework // Copyright: ©2006-2009 Sprout Systems, Inc. and contributors. // Portions ©2008-2009 Apple Inc. All rights reserved. // License: Licened under MIT license (see license.js) // ========================================================================== /** @namespace Implements some enhancements to the built-in Number object that makes it easier to handle rounding and display of numbers. @since SproutCore 1.0 @author Colin Campbell */ SC.Number = /** @lends Number */ { /** Checks to see if the number is near the supplied parameter to a certain lambda. @param {Number} number Number to test for closeness @param {Number} lambda The closeness sufficient for a positive result. Default 0.00001 @returns {Boolean} */ near: function(number, lambda) { if (!lambda) lambda = 0.00001; return Math.abs(this.valueOf() - number) <= lambda; }, /** Rounds the number to a given decimal place. If a negative decimalPlace parameter is provided, the number will be rounded outward (ie. providing -3 will round to the thousands). Function is insufficient for high negative values of decimalPlace parameter. For example, (123456.789).round(-5) should evaluate to 100000 but instead evaluates to 99999.999... @param {Integer} decimalPlace @returns {Number} */ round: function(decimalPlace) { if (!decimalPlace) decimalPlace = 0; var factor = Math.pow(10, decimalPlace); n = this.valueOf(); return Math.round(n * factor) / factor; } }; // Apply SC.Number mixin to built-in Number object SC.supplement(Number.prototype, SC.Number) ;
frameworks/runtime/mixins/number.js
// ========================================================================== // Project: SproutCore - JavaScript Application Framework // Copyright: ©2006-2009 Sprout Systems, Inc. and contributors. // Portions ©2008-2009 Apple Inc. All rights reserved. // License: Licened under MIT license (see license.js) // ========================================================================== /** @namespace Implements some enhancements to the built-in Number object that makes it easier to handle rounding and display of numbers. @since SproutCore 1.0 @author Colin Campbell */ SC.Number = /** @lends Number */ { /** Checks to see if the number is near the supplied parameter to a certain lambda. @param {Number} number Number to test for closeness @param {Number} lambda The closeness sufficient for a positive result. Default 0.00001 @returns {Boolean} */ near: function(number, lambda) { if (!lambda) lambda = 0.00001; return Math.abs(this.valueOf() - number) <= lambda; }, /** Rounds the number to a given decimal place. If a negative decimalPlace parameter is provided, the number will be rounded outward (ie. providing -3 will round to the thousands). Function is insufficient for high negative values of decimalPlace parameter. For example, (123456.789).round(-5) should evaluate to 100000 but instead evaluates to 99999.999... @param {Integer} decimalPlace @returns {Number} */ round: function(decimalPlace) { if (!decimalPlace) decimalPlace = 0; var factor = Math.pow(10, decimalPlace); n = this.valueOf(); SC.Logger.log(n, factor, n * factor, Math.round(n * factor), Math.round(n * factor) / factor); return Math.round(n * factor) / factor; } }; // Apply SC.Number mixin to built-in Number object SC.supplement(Number.prototype, SC.Number) ;
Removed SC.Logger statement from SC.Number.round()
frameworks/runtime/mixins/number.js
Removed SC.Logger statement from SC.Number.round()
<ide><path>rameworks/runtime/mixins/number.js <ide> if (!decimalPlace) decimalPlace = 0; <ide> var factor = Math.pow(10, decimalPlace); <ide> n = this.valueOf(); <del> SC.Logger.log(n, factor, n * factor, Math.round(n * factor), Math.round(n * factor) / factor); <ide> return Math.round(n * factor) / factor; <ide> } <ide>
JavaScript
bsd-3-clause
c8c7ed4ac1fce94c302ce8351996ce74b5cef237
0
Machyne/pal,Machyne/pal,Machyne/pal,Machyne/pal
var queryPAL = function(query, usdat, clidat, callback) { $.ajax({ type: 'POST', url: '/api/pal', data: { query: query, client: 'web', 'user-data': usdat, 'client-data': clidat }, success: function (response) { callback(query, response); }, error: function () { console.log('server error'); $('.prompt').removeAttr('disabled'); $('#go-btn').removeAttr('disabled'); } }); }; function expandData (el) { $(el).parent().toggleClass('expanded'); return true; } (function(d, s, id) { var js, fjs = d.getElementsByTagName(s)[0]; if (d.getElementById(id)) return; js = d.createElement(s); js.id = id; js.src = "//connect.facebook.net/en_US/sdk.js"; fjs.parentNode.insertBefore(js, fjs); }(document, 'script', 'facebook-jssdk')); function handleFacebook(payload) { var $history = $('.history'); FB.getLoginStatus(function(response) { if (response.status === 'connected') { // post if app is authorized FB.api('/me/feed', 'post', {message: payload.data}, function(response) { // get rid of login stuff if it was presented $('#facebook_login').remove(); // show confirmation that the post was successful var message = "Ok, I've posted that to Facebook"; $history.prepend('<li><div class="result">' + message + '</div></li>'); speakIfAppropriate(message); }); } else { // gotta show the login button (to get around popup blocker) var fb_login_button = '<fb:login-button max_rows="1" size="large" show_faces="false" '+ '"auto_logout_link="false" scope="publish_actions"></fb:login-button>'; var message = "Before I can post, you'll need to login to Facebook" $history.prepend('<li id="facebook_login"><div class="result">' + message +'<br>' + fb_login_button + '</div>'); speakIfAppropriate(message); FB.XFBML.parse(document.getElementById('.history')); // changes XFBML to valid HTML fbMessage = payload; // remember the message if/when the user gets logged in (async is hell) } $('#prompt').removeAttr('disabled'); $('#go-btn').removeAttr('disabled'); }); } function speakIfAppropriate(message) { if($('#speak-check').is(':checked')) { var utterance = new SpeechSynthesisUtterance(message); utterance.rate = 1.1; if ('maleVoice' in window){ utterance.voice = window.maleVoice; utterance.lang = utterance.voice.lang; console.log(window.maleVoice.name + " is speaking."); } window.speechSynthesis.speak(utterance); } } function chooseVoice() { // One-liner to query the options: // $.each(window.speechSynthesis.getVoices(), function(index, voice) { voice.lang.indexOf("es") !== -1 && console.log(voice) }) var maleVoices = [ "Google UK English Male", // Sexy British male "Daniel", // Generic British male "Fred", // Stephen Hawking-ish "Alex", // Polite American "Bruce", // Fast robot "Ralph", // Deep robot "English United Kingdom" // the only male voice available on Android ]; var languageVoices = { deu: ["Google Deutsch", "Anna"], spa: ["Google Español", "Diego"], fra: ["Google Français", "Thomas"], ita: ["Google Italiano", "Alice"], por: ["Google Español", "Diego"] // Spanish is closer than any of the english voices }; var voices = window.speechSynthesis.getVoices(); if (voices.length > 0) { var voiceOptions = {}; $.each(voices, function(index, voice){ voiceOptions[voice.name] = voice; }); window.voiceOptions = voiceOptions; // filter the male voices var filteredVoices = {}; $.each(voices, function(index, voice){ if ($.inArray(voice.name, maleVoices) !== -1){ filteredVoices[voice.name] = voice; } }); // pick the first one in maleVoices order for(var i=0; i<maleVoices.length; i++) { var voiceName = maleVoices[i]; if (filteredVoices.hasOwnProperty(voiceName)) { window.maleVoice = filteredVoices[voiceName]; return window.maleVoice; } } return null; } } function attributionImageForService(service) { // return HTML for with logo for API attribution switch(service) { case "weather": var yahooImage ='./static/yahoo_purple_retina.png' return '<a href="https://www.yahoo.com/?ilc=401" target="_blank"> <img src="' + yahooImage + '" height="25"/></a>'; case "yelp": var yelpImage = './static/yelp_logo_50x25.png'; return '<a href="http://yelp.com/" target="_blank"> <img src="' + yelpImage + '" width="50" height="25"/></a>' case "wa": var waImage = './static/wa-logo.jpg' return '<a href="http://wolframalpha.com/" target="_blank"> <img src="' + waImage + '" height="25"/></a>' // TODO: Movies once it's fixed default: return ""; } } var mapGo; $(document).ready(function () { var $prompt = $('#prompt'); var $goBtn = $('#go-btn'); var $userData = $('#user-data'); var $speakCheck = $('#speak-check'); var $history = $('.history'); var lastQuery = ''; FB.init({ appId : '363891403803678', xfbml : true, version : 'v2.2', cookie : true }); FB.Event.subscribe('auth.authResponseChange', function(fbResponse) { handleFacebook(fbMessage); }); // show speak checkbox only if browser supports it if ('SpeechSynthesisUtterance' in window && !navigator.userAgent.match(/(iPad|iPhone|iPod)/g) ? true : false) { $("#speak").show(); $goBtn.on("click", chooseVoice); // load user preference on speech from cookie if (document.cookie) { if (document.cookie.indexOf('speech=true') > -1) { // speech=true is in the cookie $speakCheck.attr("checked", true); } } } // FOR THE LOVE OF GOD PLEASE COMMENT ME WHOEVER WROTE THIS var showResult = function (query, response) { var result = response.result; var service = response.service; // external stuff if (result.status == 4) { if (result.external === 'facebook') { handleFacebook(result.payload); return; } } else if (result.status == 3) { var needs = result.needs_client; var keys = Object.keys(needs); var sendError = function (msg) { $userData.html(''); $history.prepend('<li class="error"><div class="query">' + query + '</div><div class="result">' + msg + '</div></li>'); }; var handleIndex = function (i, data) { if (i >= keys.length) { queryPAL(query, getUserData(), data, showResult); return true; } var need = keys[i]; var type = needs[need].type; var msg = needs[need].msg; switch (type) { case 'loc': if (navigator.geolocation) { navigator.geolocation.getCurrentPosition(function (idx) { return function (pos) { data[need] = pos.coords.latitude + ',' + pos.coords.longitude; return handleIndex(idx + 1, data); }; }(i), function (posError) { return sendError(msg); }); } else { return sendError(msg); } break; default: return sendError(msg); } }; handleIndex(0, {}); } else if (result.status == 2) { $userData.html(''); for (need in result.needs_user) { var type = result.needs_user[need].type; var def = result.needs_user[need].default; switch (type) { case 'str': $userData.append( '<li data-type="' + type + '" data-param="' + need + '">' + need + ': ' + '<input type="text" value="' + def + '"></li>'); break; default: console.log('unknown requested data type') } } } else if (result.status == 1) { $userData.html(''); var data = ''; if (result.hasOwnProperty('data')) { data = '<div class="data"><span class="data-toggler" onclick="expandData(this);">...</span>' + result.data.replace(/\n+/ig, '<br>') + '</div>' } var prependString = '<li><div class="query">' + query + '</div><div class="result">' + result.summary.replace(/\n+/ig, '<br>') + data + '<div class="attribution">' + attributionImageForService(service) + '</div></div></li>'; $history.prepend(prependString); } else { $userData.html(''); $history.prepend('<li class="error"><div class="query">' + query + '</div><div class="result">' + result.summary.replace(/\n+/ig, '<br>') + '</div></li>'); } if ($speakCheck.is(':checked') && result.status <= 1) { // to avoid pronouncing 'li' etc. var no_html = result.summary.replace(/(<([^>]+)>)/ig, ''); speakIfAppropriate(no_html); } $prompt.val('') .focus() .removeAttr('disabled'); $goBtn.removeAttr('disabled'); }; var getUserData = function () { var ret = {}; $userData.find('li').each(function () { var li = $(this); var need = li.attr('data-param'); var type = li.attr('data-type'); switch (type) { case 'str': ret[need] = li.find('input').val(); break; } }); return ret; }; var sendQuery = function () { var query = $prompt.val(); ($speakCheck.is(":checked") && !window.maleVoice) && chooseVoice(); if (query.length > 0) { $prompt.attr('disabled', 'disabled'); $goBtn.attr('disabled', 'disabled'); lastQuery = query; queryPAL(query, getUserData(), {}, showResult); } }; $prompt.on('keypress', function (e) { // 'enter' key if (e.which == 13) { sendQuery(); } }); // remember if user has checked the speech checbox $speakCheck.on('click', function(event) { if($speakCheck.is(':checked')) { document.cookie = 'speech=true;'; } else { document.cookie = 'speech=false;'; } }); $goBtn.on('click', sendQuery); mapGo = function (el) { var div = $(el).parent(); var userData = getUserData(); var lat = div.find('.lat').val(); var lng = div.find('.lng').val(); userData['location'] = lat + ',' + lng; queryPAL(div.find('.q').val(), userData, {}, showResult); }; });
static/home.js
var queryPAL = function(query, usdat, clidat, callback) { $.ajax({ type: 'POST', url: '/api/pal', data: { query: query, client: 'web', 'user-data': usdat, 'client-data': clidat }, success: function (response) { callback(query, response); }, error: function () { console.log('server error'); $('.prompt').removeAttr('disabled'); $('#go-btn').removeAttr('disabled'); } }); }; function expandData (el) { $(el).parent().toggleClass('expanded'); return true; } (function(d, s, id) { var js, fjs = d.getElementsByTagName(s)[0]; if (d.getElementById(id)) return; js = d.createElement(s); js.id = id; js.src = "//connect.facebook.net/en_US/sdk.js"; fjs.parentNode.insertBefore(js, fjs); }(document, 'script', 'facebook-jssdk')); function handleFacebook(payload) { var $history = $('.history'); FB.getLoginStatus(function(response) { if (response.status === 'connected') { // post if app is authorized FB.api('/me/feed', 'post', {message: payload.data}, function(response) { // get rid of login stuff if it was presented $('#facebook_login').remove(); // show confirmation that the post was successful var message = "Ok, I've posted that to Facebook"; $history.prepend('<li><div class="result">' + message + '</div></li>'); speakIfAppropriate(message); }); } else { // gotta show the login button (to get around popup blocker) var fb_login_button = '<fb:login-button max_rows="1" size="large" show_faces="false" '+ '"auto_logout_link="false" scope="publish_actions"></fb:login-button>'; var message = "Before I can post, you'll need to login to Facebook" $history.prepend('<li id="facebook_login"><div class="result">' + message +'<br>' + fb_login_button + '</div>'); speakIfAppropriate(message); FB.XFBML.parse(document.getElementById('.history')); // changes XFBML to valid HTML fbMessage = payload; // remember the message if/when the user gets logged in (async is hell) } $('#prompt').removeAttr('disabled'); $('#go-btn').removeAttr('disabled'); }); } function speakIfAppropriate(message) { if($('#speak-check').is(':checked')) { var utterance = new SpeechSynthesisUtterance(message); utterance.rate = 1.1; if ('maleVoice' in window){ utterance.voice = window.maleVoice; utterance.lang = utterance.voice.lang; console.log(window.maleVoice.name + " is speaking."); } window.speechSynthesis.speak(utterance); } } function chooseVoice() { // One-liner to query the options: // $.each(window.speechSynthesis.getVoices(), function(index, voice) { voice.lang.indexOf("es") !== -1 && console.log(voice) }) var maleVoices = [ "Google UK English Male", // Sexy British male "Daniel", // Generic British male "Fred", // Stephen Hawking-ish "Alex", // Polite American "Bruce", // Fast robot "Ralph", // Deep robot "English United Kingdom" // the only male voice available on Android ]; var languageVoices = { deu: ["Google Deutsch", "Anna"], spa: ["Google Español", "Diego"], fra: ["Google Français", "Thomas"], ita: ["Google Italiano", "Alice"], por: ["Google Español", "Diego"] // Spanish is closer than any of the english voices }; var voices = window.speechSynthesis.getVoices(); if (voices.length > 0) { var voiceOptions = {}; $.each(voices, function(index, voice){ voiceOptions[voice.name] = voice; }); window.voiceOptions = voiceOptions; // filter the male voices var filteredVoices = {}; $.each(voices, function(index, voice){ if ($.inArray(voice.name, maleVoices) !== -1){ filteredVoices[voice.name] = voice; } }); // pick the first one in maleVoices order for(var i=0; i<maleVoices.length; i++) { var voiceName = maleVoices[i]; if (filteredVoices.hasOwnProperty(voiceName)) { window.maleVoice = filteredVoices[voiceName]; return window.maleVoice; } } return null; } } function attributionImageForService(service) { // return HTML for with logo for API attribution switch(service) { case "weather": return '<a href="https://www.yahoo.com/?ilc=401" target="_blank"> <img src="https://poweredby.yahoo.com/purple.png" width="134" height="29"/></a>'; case "yelp": var yelpImage = './static/yelp_logo_50x25.png'; return '<a href="http://yelp.com/" target="_blank"> <img src="' + yelpImage + '" width="50" height="25"/></a>' case "wa": var waImage = './static/wa-logo.jpg' return '<a href="http://wolframalpha.com/" target="_blank"> <img src="' + waImage + '" height="25"/></a>' // TODO: Movies once it's fixed default: return ""; } } var mapGo; $(document).ready(function () { var $prompt = $('#prompt'); var $goBtn = $('#go-btn'); var $userData = $('#user-data'); var $speakCheck = $('#speak-check'); var $history = $('.history'); var lastQuery = ''; FB.init({ appId : '363891403803678', xfbml : true, version : 'v2.2', cookie : true }); FB.Event.subscribe('auth.authResponseChange', function(fbResponse) { handleFacebook(fbMessage); }); // show speak checkbox only if browser supports it if ('SpeechSynthesisUtterance' in window && !navigator.userAgent.match(/(iPad|iPhone|iPod)/g) ? true : false) { $("#speak").show(); $goBtn.on("click", chooseVoice); // load user preference on speech from cookie if (document.cookie) { if (document.cookie.indexOf('speech=true') > -1) { // speech=true is in the cookie $speakCheck.attr("checked", true); } } } // FOR THE LOVE OF GOD PLEASE COMMENT ME WHOEVER WROTE THIS var showResult = function (query, response) { var result = response.result; var service = response.service; // external stuff if (result.status == 4) { if (result.external === 'facebook') { handleFacebook(result.payload); return; } } else if (result.status == 3) { var needs = result.needs_client; var keys = Object.keys(needs); var sendError = function (msg) { $userData.html(''); $history.prepend('<li class="error"><div class="query">' + query + '</div><div class="result">' + msg + '</div></li>'); }; var handleIndex = function (i, data) { if (i >= keys.length) { queryPAL(query, getUserData(), data, showResult); return true; } var need = keys[i]; var type = needs[need].type; var msg = needs[need].msg; switch (type) { case 'loc': if (navigator.geolocation) { navigator.geolocation.getCurrentPosition(function (idx) { return function (pos) { data[need] = pos.coords.latitude + ',' + pos.coords.longitude; return handleIndex(idx + 1, data); }; }(i), function (posError) { return sendError(msg); }); } else { return sendError(msg); } break; default: return sendError(msg); } }; handleIndex(0, {}); } else if (result.status == 2) { $userData.html(''); for (need in result.needs_user) { var type = result.needs_user[need].type; var def = result.needs_user[need].default; switch (type) { case 'str': $userData.append( '<li data-type="' + type + '" data-param="' + need + '">' + need + ': ' + '<input type="text" value="' + def + '"></li>'); break; default: console.log('unknown requested data type') } } } else if (result.status == 1) { $userData.html(''); var data = ''; if (result.hasOwnProperty('data')) { data = '<div class="data"><span class="data-toggler" onclick="expandData(this);">...</span>' + result.data.replace(/\n+/ig, '<br>') + '</div>' } var prependString = '<li><div class="query">' + query + '</div><div class="result">' + result.summary.replace(/\n+/ig, '<br>') + data + '<div class="attribution">' + attributionImageForService(service) + '</div></div></li>'; $history.prepend(prependString); } else { $userData.html(''); $history.prepend('<li class="error"><div class="query">' + query + '</div><div class="result">' + result.summary.replace(/\n+/ig, '<br>') + '</div></li>'); } if ($speakCheck.is(':checked') && result.status <= 1) { // to avoid pronouncing 'li' etc. var no_html = result.summary.replace(/(<([^>]+)>)/ig, ''); speakIfAppropriate(no_html); } $prompt.val('') .focus() .removeAttr('disabled'); $goBtn.removeAttr('disabled'); }; var getUserData = function () { var ret = {}; $userData.find('li').each(function () { var li = $(this); var need = li.attr('data-param'); var type = li.attr('data-type'); switch (type) { case 'str': ret[need] = li.find('input').val(); break; } }); return ret; }; var sendQuery = function () { var query = $prompt.val(); ($speakCheck.is(":checked") && !window.maleVoice) && chooseVoice(); if (query.length > 0) { $prompt.attr('disabled', 'disabled'); $goBtn.attr('disabled', 'disabled'); lastQuery = query; queryPAL(query, getUserData(), {}, showResult); } }; $prompt.on('keypress', function (e) { // 'enter' key if (e.which == 13) { sendQuery(); } }); // remember if user has checked the speech checbox $speakCheck.on('click', function(event) { if($speakCheck.is(':checked')) { document.cookie = 'speech=true;'; } else { document.cookie = 'speech=false;'; } }); $goBtn.on('click', sendQuery); mapGo = function (el) { var div = $(el).parent(); var userData = getUserData(); var lat = div.find('.lat').val(); var lng = div.find('.lng').val(); userData['location'] = lat + ',' + lng; queryPAL(div.find('.q').val(), userData, {}, showResult); }; });
make yahoo logo not look pixelated on high-res screens
static/home.js
make yahoo logo not look pixelated on high-res screens
<ide><path>tatic/home.js <ide> // return HTML for with logo for API attribution <ide> switch(service) { <ide> case "weather": <del> return '<a href="https://www.yahoo.com/?ilc=401" target="_blank"> <img src="https://poweredby.yahoo.com/purple.png" width="134" height="29"/></a>'; <add> var yahooImage ='./static/yahoo_purple_retina.png' <add> return '<a href="https://www.yahoo.com/?ilc=401" target="_blank"> <img src="' + yahooImage + '" height="25"/></a>'; <ide> case "yelp": <ide> var yelpImage = './static/yelp_logo_50x25.png'; <ide> return '<a href="http://yelp.com/" target="_blank"> <img src="' + yelpImage + '" width="50" height="25"/></a>'
Java
apache-2.0
ee7335a2fc5a95dabc73d779699a973a5b828213
0
apache/commons-csv,muhammadallee/commons-csv,lihenu/Crossover_project,catconst/commons-csv,khalilrahman/commons-csv,festusjejelowo/commons-csv,dakinyade/commons-csv,shashankasharma/commons-csv,shubhcollaborator/common-csvnew,thanhnbt/commons-csv,quettech/csv-import,Aweitzel86/TestCase2.1,Elttbakh/Test02,shadykandeel/commons-csv,thanhnbt/commons-csv,UzumakiMansi/commons-csv,arunpaulonline/test2,shacore10/commons-csv,festusjejelowo/commons-csv,shacore10/commons-csv,SCORPIO12/Case2,rayiss/commons-csv,UzumakiMansi/commons-csv,amee-trivedi/commons-csv,UzumakiMansi/commons-csv,sruputway/commons-csv_test,mohanaraosv/commons-csv,amee-trivedi/commons-csv,shashankasharma/commons-csv,afafhassan/commons-csv,SCORPIO12/Case2,muhammadallee/commons-csv,viliescu/PRODSUP-002,gargchap/gargvaibhav,fadysamirzakarya/commons-csv,shashankasharma/commons-csv,harikrishna1947a/csv,expertryk/commons-csv,rayiss/commons-csv,syedbilalmasaud/case2,parmarsumit/commons-csv,GauriGNaik/commons-csv,jmhanna/commons-csv,quettech/csv-import,fadysamirzakarya/commons-csv,afafhassan/commons-csv,mbreslow/commons-csv,fabriciobressan/crossover_question2,fabriciobressan/crossover_question2,chronoangelus/commons-csv,parmarsumit/commons-csv,expertryk/commons-csv,Aweitzel86/TestCase2.1,GauriGNaik/commons-csv,shadykandeel/commons-csv,DGAlexandru/commons-csv,mbreslow/commons-csv,najamalvi/PRODSUP-002,arunpaulonline/test2,fadysamirzakarya/common-csv-2,quettech/qa2,chio003/Test2,jtardaguila/test2,chronoangelus/commons-csv,COTechTrial/case2,sufianqayyum131/PRODSUP-002,mirasrael/commons-csv,fadysamirzakarya/common-csv-2,arunnairvyaj/commons-csv-trunk,sruputway/commons-csv_test,chio003/Test2,jmhanna/commons-csv,viliescu/PRODSUP-002,Elttbakh/Test03,Elttbakh/Test03,lihenu/Crossover_project,iffi101/commons-csv,arunnairvyaj/commons-csv-trunk,RavinaDhruve/commons-csv,syedbilalmasaud/case2,COTechTrial/case2,shashankasharma/commons-csv,iffi101/commons-csv,harikrishna1947a/csv,warriorno22/commons-csv,mohanaraosv/commons-csv,AndrewGuthua/CrossOverTest2,catconst/commons-csv,catconst/commons-csv,quettech/qa2,jtardaguila/test2,Elttbakh/Test02,warriorno22/commons-csv,pvllnspk/commons-csv,gargchap/gargvaibhav,DGAlexandru/commons-csv,sufianqayyum131/PRODSUP-002,dakinyade/commons-csv,pvllnspk/commons-csv,khalilrahman/commons-csv,mirasrael/commons-csv,apache/commons-csv,RavinaDhruve/commons-csv,najamalvi/PRODSUP-002,AndrewGuthua/CrossOverTest2,shubhcollaborator/common-csvnew
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.csv; import static org.apache.commons.csv.Constants.COMMA; import static org.apache.commons.csv.Constants.CR; import static org.apache.commons.csv.Constants.CRLF; import static org.apache.commons.csv.Constants.DOUBLE_QUOTE_CHAR; import static org.apache.commons.csv.Constants.BACKSLASH; import static org.apache.commons.csv.Constants.LF; import static org.apache.commons.csv.Constants.TAB; import java.io.IOException; import java.io.Reader; import java.io.Serializable; import java.io.StringWriter; import java.util.Arrays; /** * Specifies the format of a CSV file and parses input. * <p> * This class is immutable. * </p> * You can extend a format through a builder. For example, to extend the Excel format with columns header, you write: * </p> * <pre>CSVFormat.EXCEL.toBuilder().withHeader(&quot;Col1&quot;, &quot;Col2&quot;, &quot;Col3&quot;);</pre> * <p> * You can parse through a format. For example, to parse an Excel file with columns header, you write: * </p> * <pre>Reader in = ...; *CSVFormat.EXCEL.toBuilder().withHeader(&quot;Col1&quot;, &quot;Col2&quot;, &quot;Col3&quot;).parse(in);</pre> * <p> * * @version $Id$ */ public class CSVFormat implements Serializable { private static final long serialVersionUID = 1L; /** * Returns true if the given character is a line break character. * * @param c * the character to check * * @return true if <code>c</code> is a line break character */ // package protected to give access without needing a synthetic accessor static boolean isLineBreak(final Character c) { return c != null && isLineBreak(c.charValue()); } private final char delimiter; private final Character quoteChar; private final Quote quotePolicy; private final Character commentStart; private final Character escape; private final boolean ignoreSurroundingSpaces; // Should leading/trailing spaces be ignored around values? private final boolean ignoreEmptyLines; private final String recordSeparator; // for outputs private final String nullString; private final String[] header; /** * Standard comma separated format, as for {@link #RFC4180} but allowing empty lines. * <h3>RFC 4180:</h3> * <ul> * <li>withDelimiter(',')</li> * <li>withQuoteChar('"')</li> * <li>withRecordSeparator(CRLF)</li> * </ul> * <h3>Additional:</h3> * <ul> * <li>withIgnoreEmptyLines(true)</li> * </ul> */ public static final CSVFormat DEFAULT = new CSVFormat(COMMA, DOUBLE_QUOTE_CHAR, null, null, null, false, true, CRLF, null, null); /** * Comma separated format as defined by <a href="http://tools.ietf.org/html/rfc4180">RFC 4180</a>. * <h3>RFC 4180:</h3> * <ul> * <li>withDelimiter(',')</li> * <li>withQuoteChar('"')</li> * <li>withRecordSeparator(CRLF)</li> * </ul> */ public static final CSVFormat RFC4180 = DEFAULT.withIgnoreEmptyLines(false); /** * Excel file format (using a comma as the value delimiter). Note that the actual value delimiter used by Excel is * locale dependent, it might be necessary to customize this format to accommodate to your regional settings. * <p/> * For example for parsing or generating a CSV file on a French system the following format will be used: * * <pre> * CSVFormat fmt = CSVFormat.newBuilder(EXCEL).withDelimiter(';'); * </pre> * Settings are: * <ul> * <li>withDelimiter(',')</li> * <li>withQuoteChar('"')</li> * <li>withRecordSeparator(CRLF)</li> * </ul> * Note: this is currently the same as RFC4180 */ public static final CSVFormat EXCEL = DEFAULT.withIgnoreEmptyLines(false); /** Tab-delimited format, with quote; leading and trailing spaces ignored. */ public static final CSVFormat TDF = DEFAULT .withDelimiter(TAB) .withIgnoreSurroundingSpaces(true); /** * Default MySQL format used by the <tt>SELECT INTO OUTFILE</tt> and <tt>LOAD DATA INFILE</tt> operations. This is * a tab-delimited format with a LF character as the line separator. Values are not quoted and special characters * are escaped with '\'. * * @see <a href="http://dev.mysql.com/doc/refman/5.1/en/load-data.html"> * http://dev.mysql.com/doc/refman/5.1/en/load-data.html</a> */ public static final CSVFormat MYSQL = DEFAULT .withDelimiter(TAB) .withEscape(BACKSLASH) .withIgnoreEmptyLines(false) .withQuoteChar(null) .withRecordSeparator(LF); /** * Returns true if the given character is a line break character. * * @param c * the character to check * * @return true if <code>c</code> is a line break character */ // package protected to give access without needing a synthetic accessor static boolean isLineBreak(final char c) { return c == LF || c == CR; } /** * Creates a new CSV format with the specified delimiter. * * @param delimiter * the char used for value separation, must not be a line break character * @return a new CSV format. * @throws IllegalArgumentException if the delimiter is a line break character */ public static CSVFormat newFormat(final char delimiter) { return new CSVFormat(delimiter, null, null, null, null, false, false, null, null, null); } /** * Creates a CSVFormatBuilder, using the values of the given CSVFormat. * * @param format * The format to use values from * @return a new CSVFormat */ public static CSVFormat copy(final CSVFormat format) { return new CSVFormat(format); } /** * Creates a customized CSV format. * * @param delimiter * the char used for value separation, must not be a line break character * @param quoteChar * the char used as value encapsulation marker * @param quotePolicy * the quote policy * @param commentStart * the char used for comment identification * @param escape * the char used to escape special characters in values * @param ignoreSurroundingSpaces * <tt>true</tt> when whitespaces enclosing values should be ignored * @param ignoreEmptyLines * <tt>true</tt> when the parser should skip empty lines * @param recordSeparator * the line separator to use for output * @param nullString * the line separator to use for output * @param header * the header * @throws IllegalArgumentException if the delimiter is a line break character */ // package protected to give access without needing a synthetic accessor CSVFormat(final char delimiter, final Character quoteChar, final Quote quotePolicy, final Character commentStart, final Character escape, final boolean ignoreSurroundingSpaces, final boolean ignoreEmptyLines, final String recordSeparator, final String nullString, final String[] header) { if (isLineBreak(delimiter)) { throw new IllegalArgumentException("The delimiter cannot be a line break"); } this.delimiter = delimiter; this.quoteChar = quoteChar; this.quotePolicy = quotePolicy; this.commentStart = commentStart; this.escape = escape; this.ignoreSurroundingSpaces = ignoreSurroundingSpaces; this.ignoreEmptyLines = ignoreEmptyLines; this.recordSeparator = recordSeparator; this.nullString = nullString; this.header = header == null ? null : header.clone(); } CSVFormat(final CSVFormat format) { this(format.getDelimiter(), format.getQuoteChar(), format.getQuotePolicy(), format.getCommentStart(), format.getEscape(), format.getIgnoreSurroundingSpaces(), format.getIgnoreEmptyLines(), format.getRecordSeparator(), format.getNullString(), format.getHeader()); } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final CSVFormat other = (CSVFormat) obj; if (delimiter != other.delimiter) { return false; } if (quotePolicy != other.quotePolicy) { return false; } if (quoteChar == null) { if (other.quoteChar != null) { return false; } } else if (!quoteChar.equals(other.quoteChar)) { return false; } if (commentStart == null) { if (other.commentStart != null) { return false; } } else if (!commentStart.equals(other.commentStart)) { return false; } if (escape == null) { if (other.escape != null) { return false; } } else if (!escape.equals(other.escape)) { return false; } if (!Arrays.equals(header, other.header)) { return false; } if (ignoreSurroundingSpaces != other.ignoreSurroundingSpaces) { return false; } if (ignoreEmptyLines != other.ignoreEmptyLines) { return false; } if (recordSeparator == null) { if (other.recordSeparator != null) { return false; } } else if (!recordSeparator.equals(other.recordSeparator)) { return false; } return true; } /** * Formats the specified values. * * @param values * the values to format * @return the formatted values */ public String format(final Object... values) { final StringWriter out = new StringWriter(); try { new CSVPrinter(out, this).printRecord(values); return out.toString().trim(); } catch (final IOException e) { // should not happen because a StringWriter does not do IO. throw new IllegalStateException(e); } } /** * Returns the character marking the start of a line comment. * * @return the comment start marker. */ public Character getCommentStart() { return commentStart; } /** * Returns the character delimiting the values (typically ';', ',' or '\t'). * * @return the delimiter character */ public char getDelimiter() { return delimiter; } /** * Returns the escape character. * * @return the escape character */ public Character getEscape() { return escape; } String[] getHeader() { return header; } /** * Specifies whether empty lines between records are ignored when parsing input. * * @return <tt>true</tt> if empty lines between records are ignored, <tt>false</tt> if they are turned into empty * records. */ public boolean getIgnoreEmptyLines() { return ignoreEmptyLines; } /** * Specifies whether spaces around values are ignored when parsing input. * * @return <tt>true</tt> if spaces around values are ignored, <tt>false</tt> if they are treated as part of the * value. */ public boolean getIgnoreSurroundingSpaces() { return ignoreSurroundingSpaces; } /** * Gets the String to convert to and from {@code null}. * <ul> * <li> * <strong>Reading:</strong> Converts strings equal to the given {@code nullString} to {@code null} when reading * records. * </li> * <li> * <strong>Writing:</strong> Writes {@code null} as the given {@code nullString} when writing records.</li> * </ul> * * @return the String to convert to and from {@code null}. No substitution occurs if {@code null} */ public String getNullString() { return nullString; } /** * Returns the character used to encapsulate values containing special characters. * * @return the quoteChar character */ public Character getQuoteChar() { return quoteChar; } /** * Returns the quote policy output fields. * * @return the quote policy */ public Quote getQuotePolicy() { return quotePolicy; } /** * Returns the line separator delimiting output records. * * @return the line separator */ public String getRecordSeparator() { return recordSeparator; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + delimiter; result = prime * result + ((quotePolicy == null) ? 0 : quotePolicy.hashCode()); result = prime * result + ((quoteChar == null) ? 0 : quoteChar.hashCode()); result = prime * result + ((commentStart == null) ? 0 : commentStart.hashCode()); result = prime * result + ((escape == null) ? 0 : escape.hashCode()); result = prime * result + (ignoreSurroundingSpaces ? 1231 : 1237); result = prime * result + (ignoreEmptyLines ? 1231 : 1237); result = prime * result + ((recordSeparator == null) ? 0 : recordSeparator.hashCode()); result = prime * result + Arrays.hashCode(header); return result; } /** * Specifies whether comments are supported by this format. * * Note that the comment introducer character is only recognised at the start of a line. * * @return <tt>true</tt> is comments are supported, <tt>false</tt> otherwise */ public boolean isCommentingEnabled() { return commentStart != null; } /** * Returns whether escape are being processed. * * @return {@code true} if escapes are processed */ public boolean isEscaping() { return escape != null; } /** * Returns whether a quoteChar has been defined. * * @return {@code true} if a quoteChar is defined */ public boolean isQuoting() { return quoteChar != null; } /** * Parses the specified content. * * @param in * the input stream * @return a stream of CSVRecord * @throws IOException * If an I/O error occurs */ public Iterable<CSVRecord> parse(final Reader in) throws IOException { return new CSVParser(in, this); } @Override public String toString() { final StringBuilder sb = new StringBuilder(); sb.append("Delimiter=<").append(delimiter).append('>'); if (isEscaping()) { sb.append(' '); sb.append("Escape=<").append(escape).append('>'); } if (isQuoting()) { sb.append(' '); sb.append("QuoteChar=<").append(quoteChar).append('>'); } if (isCommentingEnabled()) { sb.append(' '); sb.append("CommentStart=<").append(commentStart).append('>'); } if (getIgnoreEmptyLines()) { sb.append(" EmptyLines:ignored"); } if (getIgnoreSurroundingSpaces()) { sb.append(" SurroundingSpaces:ignored"); } return sb.toString(); } /** * Verifies the consistency of the parameters and throws an IllegalStateException if necessary. * * @throws IllegalStateException */ void validate() throws IllegalStateException { if (quoteChar != null && delimiter == quoteChar.charValue()) { throw new IllegalStateException( "The quoteChar character and the delimiter cannot be the same ('" + quoteChar + "')"); } if (escape != null && delimiter == escape.charValue()) { throw new IllegalStateException( "The escape character and the delimiter cannot be the same ('" + escape + "')"); } if (commentStart != null && delimiter == commentStart.charValue()) { throw new IllegalStateException( "The comment start character and the delimiter cannot be the same ('" + commentStart + "')"); } if (quoteChar != null && quoteChar.equals(commentStart)) { throw new IllegalStateException( "The comment start character and the quoteChar cannot be the same ('" + commentStart + "')"); } if (escape != null && escape.equals(commentStart)) { throw new IllegalStateException( "The comment start and the escape character cannot be the same ('" + commentStart + "')"); } if (escape == null && quotePolicy == Quote.NONE) { throw new IllegalStateException("No quotes mode set but no escape character is set"); } } /** * Sets the comment start marker of the format to the specified character. * * Note that the comment start character is only recognised at the start of a line. * * @param commentStart * the comment start marker * @return A new CSVFormat that is equal to this one but with the specified character as the comment start marker * @throws IllegalArgumentException * thrown if the specified character is a line break */ public CSVFormat withCommentStart(final char commentStart) { return withCommentStart(Character.valueOf(commentStart)); } /** * Sets the comment start marker of the format to the specified character. * * Note that the comment start character is only recognised at the start of a line. * * @param commentStart * the comment start marker * @return A new CSVFormat that is equal to this one but with the specified character as the comment start marker * @throws IllegalArgumentException * thrown if the specified character is a line break */ public CSVFormat withCommentStart(final Character commentStart) { if (isLineBreak(commentStart)) { throw new IllegalArgumentException("The comment start character cannot be a line break"); } return new CSVFormat(delimiter, quoteChar, quotePolicy, commentStart, escape, ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, header); } /** * Sets the delimiter of the format to the specified character. * * @param delimiter * the delimiter character * @return A new CSVFormat that is equal to this with the specified character as delimiter * @throws IllegalArgumentException * thrown if the specified character is a line break */ public CSVFormat withDelimiter(final char delimiter) { if (isLineBreak(delimiter)) { throw new IllegalArgumentException("The delimiter cannot be a line break"); } return new CSVFormat(delimiter, quoteChar, quotePolicy, commentStart, escape, ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, header); } /** * Sets the escape character of the format to the specified character. * * @param escape * the escape character * @return A new CSVFormat that is equal to his but with the specified character as the escape character * @throws IllegalArgumentException * thrown if the specified character is a line break */ public CSVFormat withEscape(final char escape) { return withEscape(Character.valueOf(escape)); } /** * Sets the escape character of the format to the specified character. * * @param escape * the escape character * @return A new CSVFormat that is equal to this but with the specified character as the escape character * @throws IllegalArgumentException * thrown if the specified character is a line break */ public CSVFormat withEscape(final Character escape) { if (isLineBreak(escape)) { throw new IllegalArgumentException("The escape character cannot be a line break"); } return new CSVFormat(delimiter, quoteChar, quotePolicy, commentStart, escape, ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, header); } /** * Sets the header of the format. The header can either be parsed automatically from the * input file with: * * <pre> * CSVFormat format = aformat.withHeader(); * </pre> * * or specified manually with: * * <pre> * CSVFormat format = aformat.withHeader(&quot;name&quot;, &quot;email&quot;, &quot;phone&quot;); * </pre> * * @param header * the header, <tt>null</tt> if disabled, empty if parsed automatically, user specified otherwise. * * @return A new CSVFormat that is equal to this but with the specified header */ public CSVFormat withHeader(final String... header) { return new CSVFormat(delimiter, quoteChar, quotePolicy, commentStart, escape, ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, header); } /** * Sets the empty line skipping behavior of the format. * * @param ignoreEmptyLines * the empty line skipping behavior, <tt>true</tt> to ignore the empty lines between the records, * <tt>false</tt> to translate empty lines to empty records. * @return A new CSVFormat that is equal to this but with the specified empty line skipping behavior. */ public CSVFormat withIgnoreEmptyLines(final boolean ignoreEmptyLines) { return new CSVFormat(delimiter, quoteChar, quotePolicy, commentStart, escape, ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, header); } /** * Sets the trimming behavior of the format. * * @param ignoreSurroundingSpaces * the trimming behavior, <tt>true</tt> to remove the surrounding spaces, <tt>false</tt> to leave the * spaces as is. * @return A new CSVFormat that is equal to this but with the specified trimming behavior. */ public CSVFormat withIgnoreSurroundingSpaces(final boolean ignoreSurroundingSpaces) { return new CSVFormat(delimiter, quoteChar, quotePolicy, commentStart, escape, ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, header); } /** * Performs conversions to and from null for strings on input and output. * <ul> * <li> * <strong>Reading:</strong> Converts strings equal to the given {@code nullString} to {@code null} when reading * records.</li> * <li> * <strong>Writing:</strong> Writes {@code null} as the given {@code nullString} when writing records.</li> * </ul> * * @param nullString * the String to convert to and from {@code null}. No substitution occurs if {@code null} * * @return A new CSVFormat that is equal to this but with the specified null conversion string. */ public CSVFormat withNullString(final String nullString) { return new CSVFormat(delimiter, quoteChar, quotePolicy, commentStart, escape, ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, header); } /** * Sets the quoteChar of the format to the specified character. * * @param quoteChar * the quoteChar character * @return A new CSVFormat that is equal to this but with the specified character as quoteChar * @throws IllegalArgumentException * thrown if the specified character is a line break */ public CSVFormat withQuoteChar(final char quoteChar) { return withQuoteChar(Character.valueOf(quoteChar)); } /** * Sets the quoteChar of the format to the specified character. * * @param quoteChar * the quoteChar character * @return A new CSVFormat that is equal to this but with the specified character as quoteChar * @throws IllegalArgumentException * thrown if the specified character is a line break */ public CSVFormat withQuoteChar(final Character quoteChar) { if (isLineBreak(quoteChar)) { throw new IllegalArgumentException("The quoteChar cannot be a line break"); } return new CSVFormat(delimiter, quoteChar, quotePolicy, commentStart, escape, ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, header); } /** * Sets the output quote policy of the format to the specified value. * * @param quotePolicy * the quote policy to use for output. * * @return A new CSVFormat that is equal to this but with the specified quote policy */ public CSVFormat withQuotePolicy(final Quote quotePolicy) { return new CSVFormat(delimiter, quoteChar, quotePolicy, commentStart, escape, ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, header); } /** * Sets the record separator of the format to the specified character. * * @param recordSeparator * the record separator to use for output. * * @return A new CSVFormat that is equal to this but with the the specified output record separator */ public CSVFormat withRecordSeparator(final char recordSeparator) { return withRecordSeparator(String.valueOf(recordSeparator)); } /** * Sets the record separator of the format to the specified String. * * @param recordSeparator * the record separator to use for output. * * @return A new CSVFormat that is equal to this but with the the specified output record separator */ public CSVFormat withRecordSeparator(final String recordSeparator) { return new CSVFormat(delimiter, quoteChar, quotePolicy, commentStart, escape, ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, header); } }
src/main/java/org/apache/commons/csv/CSVFormat.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.csv; import static org.apache.commons.csv.Constants.COMMA; import static org.apache.commons.csv.Constants.CR; import static org.apache.commons.csv.Constants.CRLF; import static org.apache.commons.csv.Constants.DOUBLE_QUOTE_CHAR; import static org.apache.commons.csv.Constants.BACKSLASH; import static org.apache.commons.csv.Constants.LF; import static org.apache.commons.csv.Constants.TAB; import java.io.IOException; import java.io.Reader; import java.io.Serializable; import java.io.StringWriter; import java.util.Arrays; /** * Specifies the format of a CSV file and parses input. * <p> * This class is immutable. * </p> * You can extend a format through a builder. For example, to extend the Excel format with columns header, you write: * </p> * <pre>CSVFormat.EXCEL.toBuilder().withHeader(&quot;Col1&quot;, &quot;Col2&quot;, &quot;Col3&quot;).build();</pre> * <p> * You can parse through a format. For example, to parse an Excel file with columns header, you write: * </p> * <pre>Reader in = ...; *CSVFormat.EXCEL.toBuilder().withHeader(&quot;Col1&quot;, &quot;Col2&quot;, &quot;Col3&quot;).parse(in);</pre> * <p> * * @version $Id$ */ public class CSVFormat implements Serializable { private static final long serialVersionUID = 1L; /** * Returns true if the given character is a line break character. * * @param c * the character to check * * @return true if <code>c</code> is a line break character */ // package protected to give access without needing a synthetic accessor static boolean isLineBreak(final Character c) { return c != null && isLineBreak(c.charValue()); } private final char delimiter; private final Character quoteChar; private final Quote quotePolicy; private final Character commentStart; private final Character escape; private final boolean ignoreSurroundingSpaces; // Should leading/trailing spaces be ignored around values? private final boolean ignoreEmptyLines; private final String recordSeparator; // for outputs private final String nullString; private final String[] header; /** * Standard comma separated format, as for {@link #RFC4180} but allowing empty lines. * <h3>RFC 4180:</h3> * <ul> * <li>withDelimiter(',')</li> * <li>withQuoteChar('"')</li> * <li>withRecordSeparator(CRLF)</li> * </ul> * <h3>Additional:</h3> * <ul> * <li>withIgnoreEmptyLines(true)</li> * </ul> */ public static final CSVFormat DEFAULT = new CSVFormat(COMMA, DOUBLE_QUOTE_CHAR, null, null, null, false, true, CRLF, null, null); /** * Comma separated format as defined by <a href="http://tools.ietf.org/html/rfc4180">RFC 4180</a>. * <h3>RFC 4180:</h3> * <ul> * <li>withDelimiter(',')</li> * <li>withQuoteChar('"')</li> * <li>withRecordSeparator(CRLF)</li> * </ul> */ public static final CSVFormat RFC4180 = DEFAULT.withIgnoreEmptyLines(false); /** * Excel file format (using a comma as the value delimiter). Note that the actual value delimiter used by Excel is * locale dependent, it might be necessary to customize this format to accommodate to your regional settings. * <p/> * For example for parsing or generating a CSV file on a French system the following format will be used: * * <pre> * CSVFormat fmt = CSVFormat.newBuilder(EXCEL).withDelimiter(';').build(); * </pre> * Settings are: * <ul> * <li>withDelimiter(',')</li> * <li>withQuoteChar('"')</li> * <li>withRecordSeparator(CRLF)</li> * </ul> * Note: this is currently the same as RFC4180 */ public static final CSVFormat EXCEL = DEFAULT.withIgnoreEmptyLines(false); /** Tab-delimited format, with quote; leading and trailing spaces ignored. */ public static final CSVFormat TDF = DEFAULT .withDelimiter(TAB) .withIgnoreSurroundingSpaces(true); /** * Default MySQL format used by the <tt>SELECT INTO OUTFILE</tt> and <tt>LOAD DATA INFILE</tt> operations. This is * a tab-delimited format with a LF character as the line separator. Values are not quoted and special characters * are escaped with '\'. * * @see <a href="http://dev.mysql.com/doc/refman/5.1/en/load-data.html"> * http://dev.mysql.com/doc/refman/5.1/en/load-data.html</a> */ public static final CSVFormat MYSQL = DEFAULT .withDelimiter(TAB) .withEscape(BACKSLASH) .withIgnoreEmptyLines(false) .withQuoteChar(null) .withRecordSeparator(LF); /** * Returns true if the given character is a line break character. * * @param c * the character to check * * @return true if <code>c</code> is a line break character */ // package protected to give access without needing a synthetic accessor static boolean isLineBreak(final char c) { return c == LF || c == CR; } /** * Creates a new CSV format with the specified delimiter. * * @param delimiter * the char used for value separation, must not be a line break character * @return a new CSV format. * @throws IllegalArgumentException if the delimiter is a line break character */ public static CSVFormat newFormat(final char delimiter) { return new CSVFormat(delimiter, null, null, null, null, false, false, null, null, null); } /** * Creates a CSVFormatBuilder, using the values of the given CSVFormat. * * @param format * The format to use values from * @return a new CSVFormat */ public static CSVFormat copy(final CSVFormat format) { return new CSVFormat(format); } /** * Creates a customized CSV format. * * @param delimiter * the char used for value separation, must not be a line break character * @param quoteChar * the char used as value encapsulation marker * @param quotePolicy * the quote policy * @param commentStart * the char used for comment identification * @param escape * the char used to escape special characters in values * @param ignoreSurroundingSpaces * <tt>true</tt> when whitespaces enclosing values should be ignored * @param ignoreEmptyLines * <tt>true</tt> when the parser should skip empty lines * @param recordSeparator * the line separator to use for output * @param nullString * the line separator to use for output * @param header * the header * @throws IllegalArgumentException if the delimiter is a line break character */ // package protected to give access without needing a synthetic accessor CSVFormat(final char delimiter, final Character quoteChar, final Quote quotePolicy, final Character commentStart, final Character escape, final boolean ignoreSurroundingSpaces, final boolean ignoreEmptyLines, final String recordSeparator, final String nullString, final String[] header) { if (isLineBreak(delimiter)) { throw new IllegalArgumentException("The delimiter cannot be a line break"); } this.delimiter = delimiter; this.quoteChar = quoteChar; this.quotePolicy = quotePolicy; this.commentStart = commentStart; this.escape = escape; this.ignoreSurroundingSpaces = ignoreSurroundingSpaces; this.ignoreEmptyLines = ignoreEmptyLines; this.recordSeparator = recordSeparator; this.nullString = nullString; this.header = header == null ? null : header.clone(); } CSVFormat(final CSVFormat format) { this(format.getDelimiter(), format.getQuoteChar(), format.getQuotePolicy(), format.getCommentStart(), format.getEscape(), format.getIgnoreSurroundingSpaces(), format.getIgnoreEmptyLines(), format.getRecordSeparator(), format.getNullString(), format.getHeader()); } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final CSVFormat other = (CSVFormat) obj; if (delimiter != other.delimiter) { return false; } if (quotePolicy != other.quotePolicy) { return false; } if (quoteChar == null) { if (other.quoteChar != null) { return false; } } else if (!quoteChar.equals(other.quoteChar)) { return false; } if (commentStart == null) { if (other.commentStart != null) { return false; } } else if (!commentStart.equals(other.commentStart)) { return false; } if (escape == null) { if (other.escape != null) { return false; } } else if (!escape.equals(other.escape)) { return false; } if (!Arrays.equals(header, other.header)) { return false; } if (ignoreSurroundingSpaces != other.ignoreSurroundingSpaces) { return false; } if (ignoreEmptyLines != other.ignoreEmptyLines) { return false; } if (recordSeparator == null) { if (other.recordSeparator != null) { return false; } } else if (!recordSeparator.equals(other.recordSeparator)) { return false; } return true; } /** * Formats the specified values. * * @param values * the values to format * @return the formatted values */ public String format(final Object... values) { final StringWriter out = new StringWriter(); try { new CSVPrinter(out, this).printRecord(values); return out.toString().trim(); } catch (final IOException e) { // should not happen because a StringWriter does not do IO. throw new IllegalStateException(e); } } /** * Returns the character marking the start of a line comment. * * @return the comment start marker. */ public Character getCommentStart() { return commentStart; } /** * Returns the character delimiting the values (typically ';', ',' or '\t'). * * @return the delimiter character */ public char getDelimiter() { return delimiter; } /** * Returns the escape character. * * @return the escape character */ public Character getEscape() { return escape; } String[] getHeader() { return header; } /** * Specifies whether empty lines between records are ignored when parsing input. * * @return <tt>true</tt> if empty lines between records are ignored, <tt>false</tt> if they are turned into empty * records. */ public boolean getIgnoreEmptyLines() { return ignoreEmptyLines; } /** * Specifies whether spaces around values are ignored when parsing input. * * @return <tt>true</tt> if spaces around values are ignored, <tt>false</tt> if they are treated as part of the * value. */ public boolean getIgnoreSurroundingSpaces() { return ignoreSurroundingSpaces; } /** * Gets the String to convert to and from {@code null}. * <ul> * <li> * <strong>Reading:</strong> Converts strings equal to the given {@code nullString} to {@code null} when reading * records. * </li> * <li> * <strong>Writing:</strong> Writes {@code null} as the given {@code nullString} when writing records.</li> * </ul> * * @return the String to convert to and from {@code null}. No substitution occurs if {@code null} */ public String getNullString() { return nullString; } /** * Returns the character used to encapsulate values containing special characters. * * @return the quoteChar character */ public Character getQuoteChar() { return quoteChar; } /** * Returns the quote policy output fields. * * @return the quote policy */ public Quote getQuotePolicy() { return quotePolicy; } /** * Returns the line separator delimiting output records. * * @return the line separator */ public String getRecordSeparator() { return recordSeparator; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + delimiter; result = prime * result + ((quotePolicy == null) ? 0 : quotePolicy.hashCode()); result = prime * result + ((quoteChar == null) ? 0 : quoteChar.hashCode()); result = prime * result + ((commentStart == null) ? 0 : commentStart.hashCode()); result = prime * result + ((escape == null) ? 0 : escape.hashCode()); result = prime * result + (ignoreSurroundingSpaces ? 1231 : 1237); result = prime * result + (ignoreEmptyLines ? 1231 : 1237); result = prime * result + ((recordSeparator == null) ? 0 : recordSeparator.hashCode()); result = prime * result + Arrays.hashCode(header); return result; } /** * Specifies whether comments are supported by this format. * * Note that the comment introducer character is only recognised at the start of a line. * * @return <tt>true</tt> is comments are supported, <tt>false</tt> otherwise */ public boolean isCommentingEnabled() { return commentStart != null; } /** * Returns whether escape are being processed. * * @return {@code true} if escapes are processed */ public boolean isEscaping() { return escape != null; } /** * Returns whether a quoteChar has been defined. * * @return {@code true} if a quoteChar is defined */ public boolean isQuoting() { return quoteChar != null; } /** * Parses the specified content. * * @param in * the input stream * @return a stream of CSVRecord * @throws IOException * If an I/O error occurs */ public Iterable<CSVRecord> parse(final Reader in) throws IOException { return new CSVParser(in, this); } @Override public String toString() { final StringBuilder sb = new StringBuilder(); sb.append("Delimiter=<").append(delimiter).append('>'); if (isEscaping()) { sb.append(' '); sb.append("Escape=<").append(escape).append('>'); } if (isQuoting()) { sb.append(' '); sb.append("QuoteChar=<").append(quoteChar).append('>'); } if (isCommentingEnabled()) { sb.append(' '); sb.append("CommentStart=<").append(commentStart).append('>'); } if (getIgnoreEmptyLines()) { sb.append(" EmptyLines:ignored"); } if (getIgnoreSurroundingSpaces()) { sb.append(" SurroundingSpaces:ignored"); } return sb.toString(); } /** * Verifies the consistency of the parameters and throws an IllegalStateException if necessary. * * @throws IllegalStateException */ void validate() throws IllegalStateException { if (quoteChar != null && delimiter == quoteChar.charValue()) { throw new IllegalStateException( "The quoteChar character and the delimiter cannot be the same ('" + quoteChar + "')"); } if (escape != null && delimiter == escape.charValue()) { throw new IllegalStateException( "The escape character and the delimiter cannot be the same ('" + escape + "')"); } if (commentStart != null && delimiter == commentStart.charValue()) { throw new IllegalStateException( "The comment start character and the delimiter cannot be the same ('" + commentStart + "')"); } if (quoteChar != null && quoteChar.equals(commentStart)) { throw new IllegalStateException( "The comment start character and the quoteChar cannot be the same ('" + commentStart + "')"); } if (escape != null && escape.equals(commentStart)) { throw new IllegalStateException( "The comment start and the escape character cannot be the same ('" + commentStart + "')"); } if (escape == null && quotePolicy == Quote.NONE) { throw new IllegalStateException("No quotes mode set but no escape character is set"); } } /** * Sets the comment start marker of the format to the specified character. * * Note that the comment start character is only recognised at the start of a line. * * @param commentStart * the comment start marker * @return A new CSVFormat that is equal to this one but with the specified character as the comment start marker * @throws IllegalArgumentException * thrown if the specified character is a line break */ public CSVFormat withCommentStart(final char commentStart) { return withCommentStart(Character.valueOf(commentStart)); } /** * Sets the comment start marker of the format to the specified character. * * Note that the comment start character is only recognised at the start of a line. * * @param commentStart * the comment start marker * @return A new CSVFormat that is equal to this one but with the specified character as the comment start marker * @throws IllegalArgumentException * thrown if the specified character is a line break */ public CSVFormat withCommentStart(final Character commentStart) { if (isLineBreak(commentStart)) { throw new IllegalArgumentException("The comment start character cannot be a line break"); } return new CSVFormat(delimiter, quoteChar, quotePolicy, commentStart, escape, ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, header); } /** * Sets the delimiter of the format to the specified character. * * @param delimiter * the delimiter character * @return A new CSVFormat that is equal to this with the specified character as delimiter * @throws IllegalArgumentException * thrown if the specified character is a line break */ public CSVFormat withDelimiter(final char delimiter) { if (isLineBreak(delimiter)) { throw new IllegalArgumentException("The delimiter cannot be a line break"); } return new CSVFormat(delimiter, quoteChar, quotePolicy, commentStart, escape, ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, header); } /** * Sets the escape character of the format to the specified character. * * @param escape * the escape character * @return A new CSVFormat that is equal to his but with the specified character as the escape character * @throws IllegalArgumentException * thrown if the specified character is a line break */ public CSVFormat withEscape(final char escape) { return withEscape(Character.valueOf(escape)); } /** * Sets the escape character of the format to the specified character. * * @param escape * the escape character * @return A new CSVFormat that is equal to this but with the specified character as the escape character * @throws IllegalArgumentException * thrown if the specified character is a line break */ public CSVFormat withEscape(final Character escape) { if (isLineBreak(escape)) { throw new IllegalArgumentException("The escape character cannot be a line break"); } return new CSVFormat(delimiter, quoteChar, quotePolicy, commentStart, escape, ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, header); } /** * Sets the header of the format. The header can either be parsed automatically from the * input file with: * * <pre> * CSVFormat format = aformat.withHeader(); * </pre> * * or specified manually with: * * <pre> * CSVFormat format = aformat.withHeader(&quot;name&quot;, &quot;email&quot;, &quot;phone&quot;); * </pre> * * @param header * the header, <tt>null</tt> if disabled, empty if parsed automatically, user specified otherwise. * * @return A new CSVFormat that is equal to this but with the specified header */ public CSVFormat withHeader(final String... header) { return new CSVFormat(delimiter, quoteChar, quotePolicy, commentStart, escape, ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, header); } /** * Sets the empty line skipping behavior of the format. * * @param ignoreEmptyLines * the empty line skipping behavior, <tt>true</tt> to ignore the empty lines between the records, * <tt>false</tt> to translate empty lines to empty records. * @return A new CSVFormat that is equal to this but with the specified empty line skipping behavior. */ public CSVFormat withIgnoreEmptyLines(final boolean ignoreEmptyLines) { return new CSVFormat(delimiter, quoteChar, quotePolicy, commentStart, escape, ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, header); } /** * Sets the trimming behavior of the format. * * @param ignoreSurroundingSpaces * the trimming behavior, <tt>true</tt> to remove the surrounding spaces, <tt>false</tt> to leave the * spaces as is. * @return A new CSVFormat that is equal to this but with the specified trimming behavior. */ public CSVFormat withIgnoreSurroundingSpaces(final boolean ignoreSurroundingSpaces) { return new CSVFormat(delimiter, quoteChar, quotePolicy, commentStart, escape, ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, header); } /** * Performs conversions to and from null for strings on input and output. * <ul> * <li> * <strong>Reading:</strong> Converts strings equal to the given {@code nullString} to {@code null} when reading * records.</li> * <li> * <strong>Writing:</strong> Writes {@code null} as the given {@code nullString} when writing records.</li> * </ul> * * @param nullString * the String to convert to and from {@code null}. No substitution occurs if {@code null} * * @return A new CSVFormat that is equal to this but with the specified null conversion string. */ public CSVFormat withNullString(final String nullString) { return new CSVFormat(delimiter, quoteChar, quotePolicy, commentStart, escape, ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, header); } /** * Sets the quoteChar of the format to the specified character. * * @param quoteChar * the quoteChar character * @return A new CSVFormat that is equal to this but with the specified character as quoteChar * @throws IllegalArgumentException * thrown if the specified character is a line break */ public CSVFormat withQuoteChar(final char quoteChar) { return withQuoteChar(Character.valueOf(quoteChar)); } /** * Sets the quoteChar of the format to the specified character. * * @param quoteChar * the quoteChar character * @return A new CSVFormat that is equal to this but with the specified character as quoteChar * @throws IllegalArgumentException * thrown if the specified character is a line break */ public CSVFormat withQuoteChar(final Character quoteChar) { if (isLineBreak(quoteChar)) { throw new IllegalArgumentException("The quoteChar cannot be a line break"); } return new CSVFormat(delimiter, quoteChar, quotePolicy, commentStart, escape, ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, header); } /** * Sets the output quote policy of the format to the specified value. * * @param quotePolicy * the quote policy to use for output. * * @return A new CSVFormat that is equal to this but with the specified quote policy */ public CSVFormat withQuotePolicy(final Quote quotePolicy) { return new CSVFormat(delimiter, quoteChar, quotePolicy, commentStart, escape, ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, header); } /** * Sets the record separator of the format to the specified character. * * @param recordSeparator * the record separator to use for output. * * @return A new CSVFormat that is equal to this but with the the specified output record separator */ public CSVFormat withRecordSeparator(final char recordSeparator) { return withRecordSeparator(String.valueOf(recordSeparator)); } /** * Sets the record separator of the format to the specified String. * * @param recordSeparator * the record separator to use for output. * * @return A new CSVFormat that is equal to this but with the the specified output record separator */ public CSVFormat withRecordSeparator(final String recordSeparator) { return new CSVFormat(delimiter, quoteChar, quotePolicy, commentStart, escape, ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, header); } }
[CSV-99] Revert Builder implementation in CSVFormat. git-svn-id: 77bd0fb3f0b1af3312cb764eaf99792cfc1ce6c4@1508472 13f79535-47bb-0310-9956-ffa450edef68
src/main/java/org/apache/commons/csv/CSVFormat.java
[CSV-99] Revert Builder implementation in CSVFormat.
<ide><path>rc/main/java/org/apache/commons/csv/CSVFormat.java <ide> * </p> <ide> * You can extend a format through a builder. For example, to extend the Excel format with columns header, you write: <ide> * </p> <del> * <pre>CSVFormat.EXCEL.toBuilder().withHeader(&quot;Col1&quot;, &quot;Col2&quot;, &quot;Col3&quot;).build();</pre> <add> * <pre>CSVFormat.EXCEL.toBuilder().withHeader(&quot;Col1&quot;, &quot;Col2&quot;, &quot;Col3&quot;);</pre> <ide> * <p> <ide> * You can parse through a format. For example, to parse an Excel file with columns header, you write: <ide> * </p> <ide> * For example for parsing or generating a CSV file on a French system the following format will be used: <ide> * <ide> * <pre> <del> * CSVFormat fmt = CSVFormat.newBuilder(EXCEL).withDelimiter(';').build(); <add> * CSVFormat fmt = CSVFormat.newBuilder(EXCEL).withDelimiter(';'); <ide> * </pre> <ide> * Settings are: <ide> * <ul>
Java
apache-2.0
26f8e0f6c10c0e6c9351d6dd2a4a50d8ac1f31ad
0
opensingular/singular-core,opensingular/singular-core,opensingular/singular-core,opensingular/singular-core
/* * Copyright (C) 2016 Singular Studios (a.k.a Atom Tecnologia) - www.opensingular.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.opensingular.form.wicket.mapper.attachment.single; import org.apache.wicket.ClassAttributeModifier; import org.apache.wicket.Component; import org.apache.wicket.ajax.AbstractDefaultAjaxBehavior; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.ajax.json.JSONObject; import org.apache.wicket.ajax.markup.html.form.AjaxButton; import org.apache.wicket.behavior.AttributeAppender; import org.apache.wicket.behavior.Behavior; import org.apache.wicket.markup.head.IHeaderResponse; import org.apache.wicket.markup.head.OnDomReadyHeaderItem; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.form.Form; import org.apache.wicket.markup.html.form.upload.FileUpload; import org.apache.wicket.markup.html.form.upload.FileUploadField; import org.apache.wicket.markup.html.image.Image; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.model.IModel; import org.apache.wicket.request.http.flow.AbortWithHttpErrorCodeException; import org.apache.wicket.request.resource.ResourceStreamResource; import org.apache.wicket.util.template.PackageTextTemplate; import org.opensingular.form.SIList; import org.opensingular.form.SInstance; import org.opensingular.form.servlet.MimeTypes; import org.opensingular.form.type.core.attachment.IAttachmentPersistenceHandler; import org.opensingular.form.type.core.attachment.SIAttachment; import org.opensingular.form.view.FileEventListener; import org.opensingular.form.wicket.behavior.DisabledClassBehavior; import org.opensingular.form.wicket.enums.ViewMode; import org.opensingular.form.wicket.mapper.attachment.BaseJQueryFileUploadBehavior; import org.opensingular.form.wicket.mapper.attachment.DownloadLink; import org.opensingular.form.wicket.mapper.attachment.DownloadSupportedBehavior; import org.opensingular.form.wicket.mapper.attachment.image.SIAttachmentIResourceStream; import org.opensingular.form.wicket.mapper.attachment.upload.AttachmentKey; import org.opensingular.form.wicket.mapper.attachment.upload.FileUploadManager; import org.opensingular.form.wicket.mapper.attachment.upload.FileUploadManagerFactory; import org.opensingular.form.wicket.mapper.attachment.upload.UploadResponseWriter; import org.opensingular.form.wicket.mapper.attachment.upload.info.UploadResponseInfo; import org.opensingular.form.wicket.mapper.attachment.upload.servlet.strategy.AttachmentKeyStrategy; import org.opensingular.form.wicket.model.ISInstanceAwareModel; import org.opensingular.lib.commons.base.SingularProperties; import org.opensingular.lib.commons.lambda.IConsumer; import org.opensingular.lib.commons.util.Loggable; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import static org.opensingular.form.wicket.mapper.attachment.upload.servlet.strategy.ServletFileUploadStrategy.PARAM_NAME; import static org.opensingular.lib.commons.base.SingularProperties.SINGULAR_FILEUPLOAD_MAXCHUNKSIZE; public class FileUploadPanel extends Panel implements Loggable { public static final String DEFAULT_FILE_UPLOAD_MAX_CHUNK_SIZE = "2000000"; private final FileUploadManagerFactory upManagerFactory = new FileUploadManagerFactory(); private final UploadResponseWriter upResponseWriter = new UploadResponseWriter(); private AddFileBehavior adder; private final ViewMode viewMode; private final FileUploadPanel self = this; private final AjaxButton removeFileButton = new RemoveButton("remove_btn"); private final WebMarkupContainer uploadFileButton = new UploadButton("upload_btn"); private FileUploadField fileField; private WebMarkupContainer filesContainer, progressBar, downloadLinkContainer; private DownloadSupportedBehavior downloader; private DownloadLink downloadLink; private AttachmentKey uploadId; private boolean showPreview = false; private WebMarkupContainer preview; private AbstractDefaultAjaxBehavior previewCallBack; private List<FileEventListener> fileUploadedListeners = new ArrayList<>(); private List<FileEventListener> fileRemovedListeners = new ArrayList<>(); private IConsumer<AjaxRequestTarget> consumerAfterLoadImage; //Behavior that will be executed after load the image. private IConsumer<AjaxRequestTarget> consumerAfterRemoveImage; //Behavior that will be executed after remove the image. public FileUploadPanel(String id, IModel<SIAttachment> model, ViewMode viewMode) { super(id, model); this.viewMode = viewMode; buildFileUploadInput(); } @SuppressWarnings("unchecked") public IModel<SIAttachment> getModel() { return (IModel<SIAttachment>) getDefaultModel(); } public SIAttachment getModelObject() { return (SIAttachment) getDefaultModelObject(); } private ISInstanceAwareModel<List<FileUpload>> dummyModel(final IModel<SIAttachment> model) { return new ISInstanceAwareModel<List<FileUpload>>() { //@formatter:off @Override public List<FileUpload> getObject() { return null; } @Override public void setObject(List<FileUpload> object) { } @Override public void detach() { } @Override public SInstance getSInstance() { return model.getObject(); } //@formatter:on }; } protected void buildFileUploadInput() { adder = new AddFileBehavior(); add(adder); downloader = new DownloadSupportedBehavior(self.getModel()); add(downloader); downloadLinkContainer = new WebMarkupContainer("input-div"); downloadLinkContainer.add(new DisabledClassBehavior("singular-upload-field-disabled")); downloadLink = new DownloadLink("downloadLink", self.getModel(), downloader); filesContainer = new WebMarkupContainer("files"); progressBar = new WebMarkupContainer("progress"); add(downloadLinkContainer); downloadLinkContainer.add(filesContainer); filesContainer.add(downloadLink); downloadLinkContainer.add(progressBar); fileField = new FileUploadField("fileUpload", dummyModel(self.getModel())); fileField.add(new DisabledClassBehavior("singular-upload-disabled")); add(uploadFileButton.add(fileField)); add(removeFileButton.add(new AttributeAppender("title", "Excluir"))); add(new ClassAttributeModifier() { @Override protected Set<String> update(Set<String> oldClasses) { oldClasses.add("fileinput fileinput-new upload-single upload-single-uploaded"); return oldClasses; } }); addPreview(); } private void addPreview() { preview = new WebMarkupContainer("preview"); Image imagePreview = new Image("imagePreview", new ResourceStreamResource(new SIAttachmentIResourceStream(self.getModel()))); add(preview.add(imagePreview)); preview.add(new Behavior() { @Override public void onConfigure(Component component) { super.onConfigure(component); component.setVisible(showPreview && !self.getModel().getObject().isEmptyOfData()); } }); previewCallBack = new AbstractDefaultAjaxBehavior() { @Override protected void respond(AjaxRequestTarget target) { target.add(preview); if (consumerAfterLoadImage != null) { consumerAfterLoadImage.accept(target); } } }; this.add(previewCallBack); } @Override protected void onConfigure() { super.onConfigure(); final FileUploadManager fileUploadManager = getFileUploadManager(); if (uploadId == null || !fileUploadManager.findUploadInfoByAttachmentKey(uploadId).isPresent()) { final SIAttachment attachment = getModelObject(); this.uploadId = fileUploadManager.createUpload(attachment.asAtr().getMaxFileSize(), null, attachment.asAtr().getAllowedFileTypes(), this::getTemporaryHandler); } } private IAttachmentPersistenceHandler getTemporaryHandler() { return getModel().getObject().getDocument().getAttachmentPersistenceTemporaryHandler(); } @Override @SuppressWarnings("squid:S2095") public void renderHead(IHeaderResponse response) { super.renderHead(response); PackageTextTemplate fileUploadJSTemplate = new PackageTextTemplate(FileUploadPanel.class, "FileUploadPanel.js"); Map<String, String> params = new HashMap<>(); params.put("maxChunkSize", SingularProperties.get(SINGULAR_FILEUPLOAD_MAXCHUNKSIZE, DEFAULT_FILE_UPLOAD_MAX_CHUNK_SIZE)); response.render(OnDomReadyHeaderItem.forScript(fileUploadJSTemplate.interpolate(params).asString())); response.render(OnDomReadyHeaderItem.forScript(generateInitJS())); } private String generateInitJS() { if (viewMode.isEdition()) { return "" //@formatter:off + "\n $(function () { " + "\n window.FileUploadPanel.setup(" + new JSONObject() .put("param_name", PARAM_NAME) .put("panel_id", self.getMarkupId()) .put("file_field_id", fileField.getMarkupId()) .put("files_id", filesContainer.getMarkupId()) .put("progress_bar_id", progressBar.getMarkupId()) .put("upload_url", getUploadUrl()) .put("download_url", getDownloaderUrl()) .put("add_url", getAdderUrl()) .put("max_file_size", getMaxFileSize()) .put("allowed_file_types", getAllowedFileTypes()) .put("preview_update_callback", previewCallBack.getCallbackUrl()) .put("allowed_file_extensions", getAllowedExtensions()) .toString(2) + "); " + "\n });"; //@formatter:on } else { return ""; } } private String getAdderUrl() { return adder.getUrl(); } private String getDownloaderUrl() { return downloader.getUrl(); } private String getUploadUrl() { return AttachmentKeyStrategy.getUploadUrl(getServletRequest(), uploadId); } private FileUploadManager getFileUploadManager() { return upManagerFactory.getFileUploadManagerFromSessionOrMakeAndAttach(getServletRequest().getSession()); } private HttpServletRequest getServletRequest() { return (HttpServletRequest) getWebRequest().getContainerRequest(); } private long getMaxFileSize() { return getModelObject().asAtr().getMaxFileSize(); } private List<String> getAllowedFileTypes() { return getModelObject().asAtr().getAllowedFileTypes(); } private Set<String> getAllowedExtensions() { return MimeTypes.getExtensionsFormMimeTypes(getAllowedFileTypes(), true); } public FileUploadField getUploadField() { return fileField; } public FileUploadPanel registerFileUploadedListener(FileEventListener fileUploadedListener) { this.fileUploadedListeners.add(fileUploadedListener); return this; } public FileUploadPanel registerFileRemovedListener(FileEventListener fileRemovedListener) { this.fileRemovedListeners.add(fileRemovedListener); return this; } private final class UploadButton extends WebMarkupContainer { private UploadButton(String id) { super(id); } @Override protected void onInitialize() { super.onInitialize(); add(new ClassAttributeModifier() { protected Set<String> update(Set<String> oldClasses) { if (self.getModelObject().getFileId() != null) { oldClasses.add("file-trash-button-hidden"); } return oldClasses; } }); add(DisabledClassBehavior.getInstance()); } } private final class RemoveButton extends AjaxButton { private RemoveButton(String id) { super(id); } @Override protected void onInitialize() { super.onInitialize(); add(new ClassAttributeModifier() { protected Set<String> update(Set<String> oldClasses) { if (self.getModelObject().getFileId() == null) { oldClasses.add("file-trash-button-hidden"); } return oldClasses; } }); } @Override protected void onSubmit(AjaxRequestTarget target, Form<?> form) { super.onSubmit(target, form); for (FileEventListener fileRemovedListener : fileRemovedListeners) { fileRemovedListener.accept(self.getModelObject()); } self.getModelObject().clearInstance(); if (self.getModelObject().getParent() instanceof SIList) { final SIList<?> parent = (SIList<?>) self.getModelObject().getParent(); parent.remove(parent.indexOf(self.getModelObject())); target.add(form); } else { target.add(FileUploadPanel.this); } if (consumerAfterRemoveImage != null) { consumerAfterRemoveImage.accept(target); } } } private class AddFileBehavior extends BaseJQueryFileUploadBehavior<SIAttachment> { public AddFileBehavior() { super(FileUploadPanel.this.getModel()); } @Override public void onResourceRequested() { final HttpServletResponse httpResp = (HttpServletResponse) getWebResponse().getContainerResponse(); try { final String pFileId = getParamFileId("fileId").toString(); final String pName = getParamFileId("name").toString(); getLogger().debug("FileUploadPanel.AddFileBehavior(fileId={},name={})", pFileId, pName); Optional<UploadResponseInfo> responseInfo = getFileUploadManager().consumeFile(pFileId, attachment -> { final SIAttachment si = (SIAttachment) FileUploadPanel.this.getDefaultModel().getObject(); si.update(attachment); for (FileEventListener fileUploadedListener : fileUploadedListeners) { fileUploadedListener.accept(si); } return new UploadResponseInfo(si); }); UploadResponseInfo uploadResponseInfo = responseInfo .orElseThrow(() -> new AbortWithHttpErrorCodeException(HttpServletResponse.SC_NOT_FOUND)); upResponseWriter.writeJsonObjectResponseTo(httpResp, uploadResponseInfo); } catch (AbortWithHttpErrorCodeException e) { getLogger().error(e.getMessage(), e); throw e; } catch (Exception e) { getLogger().error(e.getMessage(), e); throw new AbortWithHttpErrorCodeException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); } } } public void setShowPreview(boolean showPreview) { this.showPreview = showPreview; } public IConsumer<AjaxRequestTarget> getConsumerAfterLoadImage() { return consumerAfterLoadImage; } public void setConsumerAfterLoadImage(IConsumer<AjaxRequestTarget> consumerAfterLoadImage) { this.consumerAfterLoadImage = consumerAfterLoadImage; } public IConsumer<AjaxRequestTarget> getConsumerAfterRemoveImage() { return consumerAfterRemoveImage; } public void setConsumerAfterRemoveImage(IConsumer<AjaxRequestTarget> consumerAfterRemoveImage) { this.consumerAfterRemoveImage = consumerAfterRemoveImage; } }
form/wicket/src/main/java/org/opensingular/form/wicket/mapper/attachment/single/FileUploadPanel.java
/* * Copyright (C) 2016 Singular Studios (a.k.a Atom Tecnologia) - www.opensingular.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.opensingular.form.wicket.mapper.attachment.single; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.Set; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.wicket.ClassAttributeModifier; import org.apache.wicket.Component; import org.apache.wicket.ajax.AbstractDefaultAjaxBehavior; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.ajax.json.JSONObject; import org.apache.wicket.ajax.markup.html.form.AjaxButton; import org.apache.wicket.behavior.AttributeAppender; import org.apache.wicket.behavior.Behavior; import org.apache.wicket.markup.head.IHeaderResponse; import org.apache.wicket.markup.head.JavaScriptReferenceHeaderItem; import org.apache.wicket.markup.head.OnDomReadyHeaderItem; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.form.Form; import org.apache.wicket.markup.html.form.upload.FileUpload; import org.apache.wicket.markup.html.form.upload.FileUploadField; import org.apache.wicket.markup.html.image.Image; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.model.IModel; import org.apache.wicket.request.http.flow.AbortWithHttpErrorCodeException; import org.apache.wicket.request.resource.PackageResourceReference; import org.apache.wicket.request.resource.ResourceStreamResource; import org.apache.wicket.util.template.PackageTextTemplate; import org.opensingular.form.SIList; import org.opensingular.form.SInstance; import org.opensingular.form.servlet.MimeTypes; import org.opensingular.form.type.core.attachment.IAttachmentPersistenceHandler; import org.opensingular.form.type.core.attachment.SIAttachment; import org.opensingular.form.view.FileEventListener; import org.opensingular.form.wicket.behavior.DisabledClassBehavior; import org.opensingular.form.wicket.enums.ViewMode; import org.opensingular.form.wicket.mapper.attachment.BaseJQueryFileUploadBehavior; import org.opensingular.form.wicket.mapper.attachment.DownloadLink; import org.opensingular.form.wicket.mapper.attachment.DownloadSupportedBehavior; import org.opensingular.form.wicket.mapper.attachment.image.SIAttachmentIResourceStream; import org.opensingular.form.wicket.mapper.attachment.upload.AttachmentKey; import org.opensingular.form.wicket.mapper.attachment.upload.FileUploadManager; import org.opensingular.form.wicket.mapper.attachment.upload.FileUploadManagerFactory; import org.opensingular.form.wicket.mapper.attachment.upload.UploadResponseWriter; import org.opensingular.form.wicket.mapper.attachment.upload.info.UploadResponseInfo; import org.opensingular.form.wicket.mapper.attachment.upload.servlet.strategy.AttachmentKeyStrategy; import org.opensingular.form.wicket.model.ISInstanceAwareModel; import org.opensingular.lib.commons.lambda.IConsumer; import org.opensingular.lib.commons.util.Loggable; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.Set; import static org.opensingular.form.wicket.mapper.attachment.upload.servlet.strategy.ServletFileUploadStrategy.PARAM_NAME; import static org.opensingular.lib.commons.base.SingularProperties.*; public class FileUploadPanel extends Panel implements Loggable { public static final String DEFAULT_FILE_UPLOAD_MAX_CHUNK_SIZE = "2000000"; private final FileUploadManagerFactory upManagerFactory = new FileUploadManagerFactory(); private final UploadResponseWriter upResponseWriter = new UploadResponseWriter(); private AddFileBehavior adder; private final ViewMode viewMode; private final FileUploadPanel self = this; private final AjaxButton removeFileButton = new RemoveButton("remove_btn"); private final WebMarkupContainer uploadFileButton = new UploadButton("upload_btn"); private FileUploadField fileField; private WebMarkupContainer filesContainer, progressBar, downloadLinkContainer; private DownloadSupportedBehavior downloader; private DownloadLink downloadLink; private AttachmentKey uploadId; private boolean showPreview = false; private WebMarkupContainer preview; private AbstractDefaultAjaxBehavior previewCallBack; private List<FileEventListener> fileUploadedListeners = new ArrayList<>(); private List<FileEventListener> fileRemovedListeners = new ArrayList<>(); private IConsumer<AjaxRequestTarget> consumerAfterLoadImage; //Behavior that will be executed after load the image. private IConsumer<AjaxRequestTarget> consumerAfterRemoveImage; //Behavior that will be executed after remove the image. public FileUploadPanel(String id, IModel<SIAttachment> model, ViewMode viewMode) { super(id, model); this.viewMode = viewMode; buildFileUploadInput(); } @SuppressWarnings("unchecked") public IModel<SIAttachment> getModel() { return (IModel<SIAttachment>) getDefaultModel(); } public SIAttachment getModelObject() { return (SIAttachment) getDefaultModelObject(); } private ISInstanceAwareModel<List<FileUpload>> dummyModel(final IModel<SIAttachment> model) { return new ISInstanceAwareModel<List<FileUpload>>() { //@formatter:off @Override public List<FileUpload> getObject() { return null; } @Override public void setObject(List<FileUpload> object) { } @Override public void detach() { } @Override public SInstance getSInstance() { return model.getObject(); } //@formatter:on }; } protected void buildFileUploadInput() { adder = new AddFileBehavior(); add(adder); downloader = new DownloadSupportedBehavior(self.getModel()); add(downloader); downloadLinkContainer = new WebMarkupContainer("input-div"); downloadLinkContainer.add(new DisabledClassBehavior("singular-upload-field-disabled")); downloadLink = new DownloadLink("downloadLink", self.getModel(), downloader); filesContainer = new WebMarkupContainer("files"); progressBar = new WebMarkupContainer("progress"); add(downloadLinkContainer); downloadLinkContainer.add(filesContainer); filesContainer.add(downloadLink); downloadLinkContainer.add(progressBar); fileField = new FileUploadField("fileUpload", dummyModel(self.getModel())); fileField.add(new DisabledClassBehavior("singular-upload-disabled")); add(uploadFileButton.add(fileField)); add(removeFileButton.add(new AttributeAppender("title", "Excluir"))); add(new ClassAttributeModifier() { @Override protected Set<String> update(Set<String> oldClasses) { oldClasses.add("fileinput fileinput-new upload-single upload-single-uploaded"); return oldClasses; } }); addPreview(); } private void addPreview() { preview = new WebMarkupContainer("preview"); Image imagePreview = new Image("imagePreview", new ResourceStreamResource(new SIAttachmentIResourceStream(self.getModel()))); add(preview.add(imagePreview)); preview.add(new Behavior() { @Override public void onConfigure(Component component) { super.onConfigure(component); component.setVisible(showPreview && !self.getModel().getObject().isEmptyOfData()); } }); previewCallBack = new AbstractDefaultAjaxBehavior() { @Override protected void respond(AjaxRequestTarget target) { target.add(preview); if (consumerAfterLoadImage != null) { consumerAfterLoadImage.accept(target); } } }; this.add(previewCallBack); } @Override protected void onConfigure() { super.onConfigure(); final FileUploadManager fileUploadManager = getFileUploadManager(); if (uploadId == null || !fileUploadManager.findUploadInfoByAttachmentKey(uploadId).isPresent()) { final SIAttachment attachment = getModelObject(); this.uploadId = fileUploadManager.createUpload(attachment.asAtr().getMaxFileSize(), null, attachment.asAtr().getAllowedFileTypes(), this::getTemporaryHandler); } } private IAttachmentPersistenceHandler getTemporaryHandler() { return getModel().getObject().getDocument().getAttachmentPersistenceTemporaryHandler(); } @Override @SuppressWarnings("squid:S2095") public void renderHead(IHeaderResponse response) { super.renderHead(response); PackageTextTemplate fileUploadJSTemplate = new PackageTextTemplate(FileUploadPanel.class, "FileUploadPanel.js"); Map<String, String> params = new HashMap<>(); params.put("maxChunkSize", SingularProperties.get(SINGULAR_FILEUPLOAD_MAXCHUNKSIZE, DEFAULT_FILE_UPLOAD_MAX_CHUNK_SIZE)); response.render(OnDomReadyHeaderItem.forScript(fileUploadJSTemplate.interpolate(params).asString())); response.render(OnDomReadyHeaderItem.forScript(generateInitJS())); } private String generateInitJS() { if (viewMode.isEdition()) { return "" //@formatter:off + "\n $(function () { " + "\n window.FileUploadPanel.setup(" + new JSONObject() .put("param_name", PARAM_NAME) .put("panel_id", self.getMarkupId()) .put("file_field_id", fileField.getMarkupId()) .put("files_id", filesContainer.getMarkupId()) .put("progress_bar_id", progressBar.getMarkupId()) .put("upload_url", getUploadUrl()) .put("download_url", getDownloaderUrl()) .put("add_url", getAdderUrl()) .put("max_file_size", getMaxFileSize()) .put("allowed_file_types", getAllowedFileTypes()) .put("preview_update_callback", previewCallBack.getCallbackUrl()) .put("allowed_file_extensions", getAllowedExtensions()) .toString(2) + "); " + "\n });"; //@formatter:on } else { return ""; } } private String getAdderUrl() { return adder.getUrl(); } private String getDownloaderUrl() { return downloader.getUrl(); } private String getUploadUrl() { return AttachmentKeyStrategy.getUploadUrl(getServletRequest(), uploadId); } private FileUploadManager getFileUploadManager() { return upManagerFactory.getFileUploadManagerFromSessionOrMakeAndAttach(getServletRequest().getSession()); } private HttpServletRequest getServletRequest() { return (HttpServletRequest) getWebRequest().getContainerRequest(); } private long getMaxFileSize() { return getModelObject().asAtr().getMaxFileSize(); } private List<String> getAllowedFileTypes() { return getModelObject().asAtr().getAllowedFileTypes(); } private Set<String> getAllowedExtensions() { return MimeTypes.getExtensionsFormMimeTypes(getAllowedFileTypes(), true); } public FileUploadField getUploadField() { return fileField; } public FileUploadPanel registerFileUploadedListener(FileEventListener fileUploadedListener) { this.fileUploadedListeners.add(fileUploadedListener); return this; } public FileUploadPanel registerFileRemovedListener(FileEventListener fileRemovedListener) { this.fileRemovedListeners.add(fileRemovedListener); return this; } private final class UploadButton extends WebMarkupContainer { private UploadButton(String id) { super(id); } @Override protected void onInitialize() { super.onInitialize(); add(new ClassAttributeModifier() { protected Set<String> update(Set<String> oldClasses) { if (self.getModelObject().getFileId() != null) { oldClasses.add("file-trash-button-hidden"); } return oldClasses; } }); add(DisabledClassBehavior.getInstance()); } } private final class RemoveButton extends AjaxButton { private RemoveButton(String id) { super(id); } @Override protected void onInitialize() { super.onInitialize(); add(new ClassAttributeModifier() { protected Set<String> update(Set<String> oldClasses) { if (self.getModelObject().getFileId() == null) { oldClasses.add("file-trash-button-hidden"); } return oldClasses; } }); } @Override protected void onSubmit(AjaxRequestTarget target, Form<?> form) { super.onSubmit(target, form); for (FileEventListener fileRemovedListener : fileRemovedListeners) { fileRemovedListener.accept(self.getModelObject()); } self.getModelObject().clearInstance(); if (self.getModelObject().getParent() instanceof SIList) { final SIList<?> parent = (SIList<?>) self.getModelObject().getParent(); parent.remove(parent.indexOf(self.getModelObject())); target.add(form); } else { target.add(FileUploadPanel.this); } if (consumerAfterRemoveImage != null) { consumerAfterRemoveImage.accept(target); } } } private class AddFileBehavior extends BaseJQueryFileUploadBehavior<SIAttachment> { public AddFileBehavior() { super(FileUploadPanel.this.getModel()); } @Override public void onResourceRequested() { final HttpServletResponse httpResp = (HttpServletResponse) getWebResponse().getContainerResponse(); try { final String pFileId = getParamFileId("fileId").toString(); final String pName = getParamFileId("name").toString(); getLogger().debug("FileUploadPanel.AddFileBehavior(fileId={},name={})", pFileId, pName); Optional<UploadResponseInfo> responseInfo = getFileUploadManager().consumeFile(pFileId, attachment -> { final SIAttachment si = (SIAttachment) FileUploadPanel.this.getDefaultModel().getObject(); si.update(attachment); for (FileEventListener fileUploadedListener : fileUploadedListeners) { fileUploadedListener.accept(si); } return new UploadResponseInfo(si); }); UploadResponseInfo uploadResponseInfo = responseInfo .orElseThrow(() -> new AbortWithHttpErrorCodeException(HttpServletResponse.SC_NOT_FOUND)); upResponseWriter.writeJsonObjectResponseTo(httpResp, uploadResponseInfo); } catch (AbortWithHttpErrorCodeException e) { getLogger().error(e.getMessage(), e); throw e; } catch (Exception e) { getLogger().error(e.getMessage(), e); throw new AbortWithHttpErrorCodeException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); } } } public void setShowPreview(boolean showPreview) { this.showPreview = showPreview; } public IConsumer<AjaxRequestTarget> getConsumerAfterLoadImage() { return consumerAfterLoadImage; } public void setConsumerAfterLoadImage(IConsumer<AjaxRequestTarget> consumerAfterLoadImage) { this.consumerAfterLoadImage = consumerAfterLoadImage; } public IConsumer<AjaxRequestTarget> getConsumerAfterRemoveImage() { return consumerAfterRemoveImage; } public void setConsumerAfterRemoveImage(IConsumer<AjaxRequestTarget> consumerAfterRemoveImage) { this.consumerAfterRemoveImage = consumerAfterRemoveImage; } }
[SGL-645] - Correções no import
form/wicket/src/main/java/org/opensingular/form/wicket/mapper/attachment/single/FileUploadPanel.java
[SGL-645] - Correções no import
<ide><path>orm/wicket/src/main/java/org/opensingular/form/wicket/mapper/attachment/single/FileUploadPanel.java <ide> <ide> package org.opensingular.form.wicket.mapper.attachment.single; <ide> <del>import java.util.ArrayList; <del>import java.util.List; <del>import java.util.Optional; <del>import java.util.Set; <del>import javax.servlet.http.HttpServletRequest; <del>import javax.servlet.http.HttpServletResponse; <del> <ide> import org.apache.wicket.ClassAttributeModifier; <ide> import org.apache.wicket.Component; <ide> import org.apache.wicket.ajax.AbstractDefaultAjaxBehavior; <ide> import org.apache.wicket.behavior.AttributeAppender; <ide> import org.apache.wicket.behavior.Behavior; <ide> import org.apache.wicket.markup.head.IHeaderResponse; <del>import org.apache.wicket.markup.head.JavaScriptReferenceHeaderItem; <ide> import org.apache.wicket.markup.head.OnDomReadyHeaderItem; <ide> import org.apache.wicket.markup.html.WebMarkupContainer; <ide> import org.apache.wicket.markup.html.form.Form; <ide> import org.apache.wicket.markup.html.panel.Panel; <ide> import org.apache.wicket.model.IModel; <ide> import org.apache.wicket.request.http.flow.AbortWithHttpErrorCodeException; <del>import org.apache.wicket.request.resource.PackageResourceReference; <ide> import org.apache.wicket.request.resource.ResourceStreamResource; <ide> import org.apache.wicket.util.template.PackageTextTemplate; <ide> import org.opensingular.form.SIList; <ide> import org.opensingular.form.wicket.mapper.attachment.upload.info.UploadResponseInfo; <ide> import org.opensingular.form.wicket.mapper.attachment.upload.servlet.strategy.AttachmentKeyStrategy; <ide> import org.opensingular.form.wicket.model.ISInstanceAwareModel; <add>import org.opensingular.lib.commons.base.SingularProperties; <ide> import org.opensingular.lib.commons.lambda.IConsumer; <ide> import org.opensingular.lib.commons.util.Loggable; <ide> <ide> import javax.servlet.http.HttpServletRequest; <ide> import javax.servlet.http.HttpServletResponse; <ide> import java.util.ArrayList; <add>import java.util.HashMap; <ide> import java.util.List; <add>import java.util.Map; <ide> import java.util.Optional; <ide> import java.util.Set; <ide> <ide> import static org.opensingular.form.wicket.mapper.attachment.upload.servlet.strategy.ServletFileUploadStrategy.PARAM_NAME; <del>import static org.opensingular.lib.commons.base.SingularProperties.*; <add>import static org.opensingular.lib.commons.base.SingularProperties.SINGULAR_FILEUPLOAD_MAXCHUNKSIZE; <ide> <ide> public class FileUploadPanel extends Panel implements Loggable { <ide>
Java
agpl-3.0
3c8a124b36f8fbcee97314c147a57d1578a2e079
0
ua-eas/kfs,kuali/kfs,quikkian-ua-devops/kfs,UniversityOfHawaii/kfs,smith750/kfs,kkronenb/kfs,ua-eas/kfs,UniversityOfHawaii/kfs,ua-eas/kfs-devops-automation-fork,quikkian-ua-devops/will-financials,quikkian-ua-devops/kfs,ua-eas/kfs-devops-automation-fork,quikkian-ua-devops/kfs,ua-eas/kfs-devops-automation-fork,ua-eas/kfs,kkronenb/kfs,kuali/kfs,kuali/kfs,bhutchinson/kfs,quikkian-ua-devops/will-financials,UniversityOfHawaii/kfs,quikkian-ua-devops/kfs,smith750/kfs,quikkian-ua-devops/will-financials,UniversityOfHawaii/kfs,ua-eas/kfs,bhutchinson/kfs,quikkian-ua-devops/will-financials,ua-eas/kfs-devops-automation-fork,ua-eas/kfs-devops-automation-fork,kuali/kfs,quikkian-ua-devops/will-financials,smith750/kfs,bhutchinson/kfs,kuali/kfs,UniversityOfHawaii/kfs,ua-eas/kfs,kkronenb/kfs,kkronenb/kfs,quikkian-ua-devops/will-financials,smith750/kfs,quikkian-ua-devops/kfs,quikkian-ua-devops/kfs,bhutchinson/kfs
/* * Copyright 2006-2007 The Kuali Foundation. * * Licensed under the Educational Community License, Version 1.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.module.purap.bo; import java.util.LinkedHashMap; import org.kuali.core.bo.PersistableBusinessObjectBase; import org.kuali.kfs.context.SpringContext; import org.kuali.module.vendor.bo.VendorDetail; import org.kuali.module.vendor.service.VendorService; /** * Purchase Order Quote List Vendor Business Object. */ public class PurchaseOrderQuoteListVendor extends PersistableBusinessObjectBase { private Integer purchaseOrderQuoteListIdentifier; private Integer vendorHeaderGeneratedIdentifier; private Integer vendorDetailAssignedIdentifier; private PurchaseOrderQuoteList purchaseOrderQuoteList; private VendorDetail vendorDetail; /** * Default constructor. */ public PurchaseOrderQuoteListVendor() { } public Integer getPurchaseOrderQuoteListIdentifier() { return purchaseOrderQuoteListIdentifier; } public void setPurchaseOrderQuoteListIdentifier(Integer purchaseOrderQuoteListIdentifier) { this.purchaseOrderQuoteListIdentifier = purchaseOrderQuoteListIdentifier; } public Integer getVendorHeaderGeneratedIdentifier() { return vendorHeaderGeneratedIdentifier; } public void setVendorHeaderGeneratedIdentifier(Integer vendorHeaderGeneratedIdentifier) { this.vendorHeaderGeneratedIdentifier = vendorHeaderGeneratedIdentifier; } public Integer getVendorDetailAssignedIdentifier() { return vendorDetailAssignedIdentifier; } public void setVendorDetailAssignedIdentifier(Integer vendorDetailAssignedIdentifier) { this.vendorDetailAssignedIdentifier = vendorDetailAssignedIdentifier; } public PurchaseOrderQuoteList getPurchaseOrderQuoteList() { return purchaseOrderQuoteList; } /** * Sets the purchaseOrderQuoteList attribute value. * * @param purchaseOrderQuoteList The purchaseOrderQuoteList to set. * @deprecated */ public void setPurchaseOrderQuoteList(PurchaseOrderQuoteList purchaseOrderQuoteList) { this.purchaseOrderQuoteList = purchaseOrderQuoteList; } public VendorDetail getVendorDetail() { if (vendorHeaderGeneratedIdentifier != null && vendorDetailAssignedIdentifier != null && (vendorDetail == null || vendorDetail.getVendorHeaderGeneratedIdentifier() != vendorHeaderGeneratedIdentifier || vendorDetail.getVendorDetailAssignedIdentifier() != vendorDetailAssignedIdentifier)) { vendorDetail = SpringContext.getBean(VendorService.class).getVendorDetail(vendorHeaderGeneratedIdentifier, vendorDetailAssignedIdentifier); } return vendorDetail; } public void setVendorDetail(VendorDetail vendorDetail) { this.vendorDetail = vendorDetail; } /** * @see org.kuali.core.bo.BusinessObjectBase#toStringMapper() */ protected LinkedHashMap toStringMapper() { LinkedHashMap m = new LinkedHashMap(); if (this.purchaseOrderQuoteListIdentifier != null) { m.put("purchaseOrderQuoteListIdentifier", this.purchaseOrderQuoteListIdentifier.toString()); } if (this.vendorHeaderGeneratedIdentifier != null) { m.put("vendorHeaderGeneratedIdentifier", this.vendorHeaderGeneratedIdentifier.toString()); } if (this.vendorDetailAssignedIdentifier != null) { m.put("vendorDetailAssignedIdentifier", this.vendorDetailAssignedIdentifier.toString()); } return m; } }
work/src/org/kuali/kfs/module/purap/businessobject/PurchaseOrderQuoteListVendor.java
/* * Copyright 2006-2007 The Kuali Foundation. * * Licensed under the Educational Community License, Version 1.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.module.purap.bo; import java.util.LinkedHashMap; import org.kuali.core.bo.PersistableBusinessObjectBase; import org.kuali.module.vendor.bo.VendorDetail; /** * Purchase Order Quote List Vendor Business Object. */ public class PurchaseOrderQuoteListVendor extends PersistableBusinessObjectBase { private Integer purchaseOrderQuoteListIdentifier; private Integer vendorHeaderGeneratedIdentifier; private Integer vendorDetailAssignedIdentifier; private PurchaseOrderQuoteList purchaseOrderQuoteList; private VendorDetail vendorDetail; /** * Default constructor. */ public PurchaseOrderQuoteListVendor() { } public Integer getPurchaseOrderQuoteListIdentifier() { return purchaseOrderQuoteListIdentifier; } public void setPurchaseOrderQuoteListIdentifier(Integer purchaseOrderQuoteListIdentifier) { this.purchaseOrderQuoteListIdentifier = purchaseOrderQuoteListIdentifier; } public Integer getVendorHeaderGeneratedIdentifier() { return vendorHeaderGeneratedIdentifier; } public void setVendorHeaderGeneratedIdentifier(Integer vendorHeaderGeneratedIdentifier) { this.vendorHeaderGeneratedIdentifier = vendorHeaderGeneratedIdentifier; } public Integer getVendorDetailAssignedIdentifier() { return vendorDetailAssignedIdentifier; } public void setVendorDetailAssignedIdentifier(Integer vendorDetailAssignedIdentifier) { this.vendorDetailAssignedIdentifier = vendorDetailAssignedIdentifier; } public PurchaseOrderQuoteList getPurchaseOrderQuoteList() { return purchaseOrderQuoteList; } /** * Sets the purchaseOrderQuoteList attribute value. * * @param purchaseOrderQuoteList The purchaseOrderQuoteList to set. * @deprecated */ public void setPurchaseOrderQuoteList(PurchaseOrderQuoteList purchaseOrderQuoteList) { this.purchaseOrderQuoteList = purchaseOrderQuoteList; } public VendorDetail getVendorDetail() { return vendorDetail; } public void setVendorDetail(VendorDetail vendorDetail) { this.vendorDetail = vendorDetail; } /** * @see org.kuali.core.bo.BusinessObjectBase#toStringMapper() */ protected LinkedHashMap toStringMapper() { LinkedHashMap m = new LinkedHashMap(); if (this.purchaseOrderQuoteListIdentifier != null) { m.put("purchaseOrderQuoteListIdentifier", this.purchaseOrderQuoteListIdentifier.toString()); } if (this.vendorHeaderGeneratedIdentifier != null) { m.put("vendorHeaderGeneratedIdentifier", this.vendorHeaderGeneratedIdentifier.toString()); } if (this.vendorDetailAssignedIdentifier != null) { m.put("vendorDetailAssignedIdentifier", this.vendorDetailAssignedIdentifier.toString()); } return m; } }
KULPURAP-766
work/src/org/kuali/kfs/module/purap/businessobject/PurchaseOrderQuoteListVendor.java
KULPURAP-766
<ide><path>ork/src/org/kuali/kfs/module/purap/businessobject/PurchaseOrderQuoteListVendor.java <ide> import java.util.LinkedHashMap; <ide> <ide> import org.kuali.core.bo.PersistableBusinessObjectBase; <add>import org.kuali.kfs.context.SpringContext; <ide> import org.kuali.module.vendor.bo.VendorDetail; <add>import org.kuali.module.vendor.service.VendorService; <ide> <ide> /** <ide> * Purchase Order Quote List Vendor Business Object. <ide> } <ide> <ide> public VendorDetail getVendorDetail() { <add> if (vendorHeaderGeneratedIdentifier != null && vendorDetailAssignedIdentifier != null && (vendorDetail == null || vendorDetail.getVendorHeaderGeneratedIdentifier() != vendorHeaderGeneratedIdentifier || vendorDetail.getVendorDetailAssignedIdentifier() != vendorDetailAssignedIdentifier)) { <add> vendorDetail = SpringContext.getBean(VendorService.class).getVendorDetail(vendorHeaderGeneratedIdentifier, vendorDetailAssignedIdentifier); <add> } <ide> return vendorDetail; <ide> } <ide>
JavaScript
mit
8d63b3debc0482c314c9fdbf47b8bfdef7579a19
0
jywarren/cartagen,goododd/cartagen-monza,jywarren/cartagen,jywarren/cartagen,jywarren/cartagen,goododd/cartagen-monza,goododd/cartagen-monza,jywarren/cartagen,jywarren/cartagen
// cartagen.js // // Copyright (C) 2009 Jeffrey Warren, Design Ecology, MIT Media Lab // // This file is part of the Cartagen mapping framework. Read more at // <http://cartagen.org> // // Cartagen is free software: you can redistribute it and/or modify // it under the terms of the MIT License. You should have received a copy // of the MIT License along with Cartagen. If not, see // <http://www.opensource.org/licenses/mit-license.php>. // // these belong in other objects... move them var lastPos = [0,0] var objects = [] PhoneGap = window.DeviceInfo && DeviceInfo.uuid != undefined // temp object unitl PhoneGap is initialized if (typeof cartagen_base_uri == 'undefined') { cartagen_base_uri = 'cartagen' } // additional dependencies: var scripts = [ cartagen_base_uri + '/canvastext.js', cartagen_base_uri + '/glop.js', cartagen_base_uri + '/events.js', cartagen_base_uri + '/lib/geohash.js', ] // load phonegap js if needed if(window.PhoneGap) { scripts.unshift(cartagen_base_uri + '/lib/phonegap/phonegap.base.js', cartagen_base_uri + '/lib/phonegap/geolocation.js', cartagen_base_uri + '/lib/phonegap/iphone/phonegap.js', cartagen_base_uri + '/lib/phonegap/iphone/geolocation.js') } // loads each script in scripts array, sequentially. // requires a load_next_script() call at the end of each // dependent script to trigger the next one. function load_next_script() { Cartagen.debug("loading: "+scripts[0]) if (scripts.length > 0) { load_script(scripts.splice(0,1)[0]) } } // loads a script into <script> tags, no cross-domain limits: function load_script(script) { $$('head')[0].insert(new Element('script', { 'src': script, 'type': 'text/javascript', 'charset': 'utf-8', evalJSON: 'force' })); } // some browsers don't have a console object, so create a dud one for them: if (typeof console == "undefined") { console = { log: function(param) {}}} // if (Prototype.Browser.MobileSafari) $('brief').hide() var Mouse = { x: 0, y: 0, click_x: 0, click_y: 0 } var Style = { styles: { // this doesn't get used. We don't have a body object to load them into... body: { fillStyle: "#eee", fontColor: "#eee", fontSize: 12 } }, style_body: function() { if (Style.styles) { if (Style.styles.body.fillStyle) fillStyle(Style.styles.body.fillStyle) if (Style.styles.body.strokeStyle) strokeStyle(Style.styles.body.strokeStyle) if (Style.styles.body.lineWidth || Style.styles.body.lineWidth == 0) lineWidth(Style.styles.body.lineWidth) if (Style.styles.body.pattern && Object.isUndefined(Style.styles.body.pattern_img)) { Style.styles.body.pattern_img = new Image() Style.styles.body.pattern_img.src = Style.styles.body.pattern } if (Style.styles.body.pattern_img) { try { fillStyle(canvas.createPattern(Style.styles.body.pattern_img,'repeat')) } catch(e) {} } rect(0,0,width,height) strokeRect(0,0,width,height) } canvas.lineJoin = 'round' canvas.lineCap = 'round' }, parse_styles: function(feature,selector) { try { // check for function or parameter for each style type... // or is it copying the function itself, and doesn't need to know if it's a function or parameter? if (selector.opacity) feature.opacity = selector.opacity if (selector.fillStyle) feature.fillStyle = selector.fillStyle if (selector.lineWidth || selector.lineWidth == 0) feature.lineWidth = selector.lineWidth if (selector.strokeStyle && Object.isFunction(selector.strokeStyle)) { // bind the styles object to the context of this Way: feature.strokeStyle = selector.strokeStyle() } else { feature.strokeStyle = selector.strokeStyle } // patterns if (selector.pattern) { feature.pattern_img = new Image() feature.pattern_img.src = selector.pattern } // radius is relevant to nodes, i.e. single points if (selector.radius) feature.radius = selector.radius // check selector for hover: if (selector['hover']) feature.hover = selector['hover'] if (selector['mouseDown']) feature.mouseDown = selector['mouseDown'] // copy styles based on feature name if (Style.styles[feature.name] && Style.styles[feature.name].fillStyle) feature.fillStyle = Style.styles[feature.name].fillStyle if (Style.styles[feature.name] && Style.styles[feature.name].strokeStyle) feature.strokeStyle = Style.styles[feature.name].strokeStyle // font styling: if (selector.fontColor) feature.label.fontColor = selector.fontColor if (selector.fontSize) feature.label.fontSize = selector.fontSize if (selector.fontScale) feature.label.fontScale = selector.fontScale if (selector.fontBackground) feature.label.fontBackground = selector.fontBackground if (selector.text) feature.label.text = selector.text feature.tags.each(function(tag) { //look for a style like this: if (Style.styles[tag.key]) { if (Style.styles[tag.key].opacity) feature.opacity = Style.styles[tag.key].opacity if (Style.styles[tag.key].fillStyle) feature.fillStyle = Style.styles[tag.key].fillStyle if (Style.styles[tag.key].strokeStyle) feature.strokeStyle = Style.styles[tag.key].strokeStyle if (Style.styles[tag.key].lineWidth) feature.lineWidth = Style.styles[tag.key].lineWidth if (Style.styles[tag.key].fontColor) feature.label.fontColor = Style.styles[tag.key].fontColor if (Style.styles[tag.key].fontSize) feature.label.fontSize = Style.styles[tag.key].fontSize if (Style.styles[tag.key].fontScale) feature.label.fontScale = Style.styles[tag.key].fontScale if (Style.styles[tag.key].fontBackground) feature.label.fontBackground = Style.styles[tag.key].fontBackground if (Style.styles[tag.key].text) { if (Object.isFunction(Style.styles[tag.key].text)) feature.label.text = Style.styles[tag.key].text.apply(feature) else feature.label.text = Style.styles[tag.key].text } if (Style.styles[tag.key].pattern) { feature.pattern_img = new Image() feature.pattern_img.src = Style.styles[tag.key].pattern } } if (Style.styles[tag.value]) { if (Style.styles[tag.value].opacity) feature.opacity = Style.styles[tag.value].opacity if (Style.styles[tag.value].fillStyle) feature.fillStyle = Style.styles[tag.value].fillStyle if (Style.styles[tag.value].strokeStyle) feature.strokeStyle = Style.styles[tag.value].strokeStyle if (Style.styles[tag.value].lineWidth) feature.label.lineWidth = Style.styles[tag.value].lineWidth if (Style.styles[tag.value].fontColor) feature.label.fontColor = Style.styles[tag.value].fontColor if (Style.styles[tag.value].fontSize) feature.label.fontSize = Style.styles[tag.value].fontSize if (Style.styles[tag.value].fontScale) feature.label.fontScale = Style.styles[tag.value].fontScale if (Style.styles[tag.value].fontBackground) feature.label.fontBackground = Style.styles[tag.value].fontBackground if (Style.styles[tag.value].text) { if (Object.isFunction(Style.styles[tag.value].text)) feature.label.text = Style.styles[tag.value].text.apply(feature) else feature.label.text = Style.styles[tag.value].text } if (Style.styles[tag.value].pattern) { feature.pattern_img = new Image() feature.pattern_img.src = Style.styles[tag.value].pattern } } //check tags for hover: if (Style.styles[tag.key] && Style.styles[tag.key]['hover']) { feature.hover = Style.styles[tag.key]['hover'] } if (Style.styles[tag.value] && Style.styles[tag.value]['hover']) { feature.hover = Style.styles[tag.value]['hover'] } //check tags for mouseDown: if (Style.styles[tag.key] && Style.styles[tag.key]['mouseDown']) { feature.mouseDown = Style.styles[tag.key]['mouseDown'] } if (Style.styles[tag.value] && Style.styles[tag.value]['mouseDown']) { feature.mouseDown = Style.styles[tag.value]['mouseDown'] } // check tags for refresh: if (Style.styles[tag.key] && Style.styles[tag.key]['refresh']) { $H(Style.styles[tag.key]['refresh']).each(function(pair) { Style.create_refresher(feature, pair.key, pair.value) }) } if (Style.styles[tag.value] && Style.styles[tag.value]['refresh']) { if(!feature.style_generators) feature.style_generators = {} $H(Style.styles[tag.value]['refresh']).each(function(pair) { Style.create_refresher(feature, pair.key, pair.value) }) } }) } catch(e) { Cartagen.debug("There was an error in your stylesheet. Please check http://wiki.cartagen.org for the GSS spec. Error: "+trace(e)) } }, create_refresher: function(feature, property, interval) { if (Object.isFunction(feature[property])) { //sanity check if (['fontBackground', 'fontColor', 'fontScale', 'fontSize', 'text'].include(property)) { feature = feature.label } if(!feature.style_generators) feature.style_generators = {} if(!feature.style_generators.executers) feature.style_generators.executers = {} feature.style_generators[property] = feature[property] Style.refresh_style(feature, property) feature.style_generators.executers[property] = new PeriodicalExecuter(function() { Style.refresh_style(feature, property) }, interval) } }, refresh_style: function(feature, property) { feature[property] = feature.style_generators[property]() }, // sets the canvas 'pen' styles using the object.foo style definitions apply_style: function(feature) { canvas.globalOpacity = 1 if (feature.opacity) { canvas.globalOpacity = Object.value(feature.opacity) } if (feature.strokeStyle) { strokeStyle(Object.value(feature.strokeStyle)) } if (feature.fillStyle) { fillStyle(Object.value(feature.fillStyle)) } if (feature.pattern_img) { fillStyle(canvas.createPattern(feature.pattern_img,'repeat')) } if (feature.lineWidth) { lineWidth(Object.value(feature.lineWidth)) } // trigger hover and mouseDown styles: if (feature instanceof Way) { if (feature.hover && feature.closed_poly && is_point_in_poly(feature.nodes,Map.pointer_x(),Map.pointer_y())) { Style.apply_style(feature.hover) if (!Object.isUndefined(feature.hover.action)) feature.hover.action() } if (feature.mouseDown && mouseDown == true && feature.closed_poly && is_point_in_poly(feature.nodes,Map.pointer_x(),Map.pointer_y())) { Style.apply_style(feature.mouseDown) if (!Object.isUndefined(feature.mouseDown.action)) feature.mouseDown.action() } } else if (feature instanceof Node) { if (feature.hover && overlaps(feature.x,feature.y,Map.pointer_x(),Map.pointer_y(),100)) { Style.apply_style(feature.hover) if (feature.hover.action) feature.hover.action() } if (feature.mouseDown && mouseDown == true && overlaps(feature.x,feature.y,Map.pointer_x(),Map.pointer_y(),100)) { Style.apply_style(feature.mouseDown) if (feature.mouseDown.action) feature.mouseDown.action() } } }, // same as apply_style but just for fonts. This was necessary because // strokeStyle and such have to be reset *after* drawing actual polygons but // *before* drawing text. apply_font_style: function(feature) { if (feature.fontColor) { if (Object.isFunction(feature.fontColor)) strokeStyle(feature.fontColor()) else strokeStyle(feature.fontColor) } }, // add an individual style to the styles object. May not actually work; old code. // add_style('highway','strokeStyle','red') add_style: function(tag,style,value) { eval("styles."+tag+" = {"+style+": '"+value+"'}") }, // load a remote stylesheet, given a URL load_styles: function(stylesheet_url) { if (stylesheet_url[0,4] == "http") { stylesheet_url = "/map/style?url="+stylesheet_url } new Ajax.Request(stylesheet_url,{ method: 'get', onComplete: function(result) { Cartagen.debug('applying '+stylesheet_url) Style.styles = ("{"+result.responseText+"}").evalJSON() Style.stylesheet_source = "{"+result.responseText+"}" Style.apply_gss(Style.stylesheet_source) // populate the gss field if($('gss_textarea')) { $('gss_textarea').value = Style.stylesheet_source } } }) }, // given a string of gss, applies the string to all Ways and Nodes in the objects array apply_gss: function(gss) { if (Object.isUndefined(arguments[1])) var clear_styles = true else clear_styles = arguments[1] Style.styles = gss.evalJSON() objects.each(function(object) { if (clear_styles) { object.lineWeight = null object.strokeStyle = null object.fillStyle = null object.hover = null object.mouseDown = null } if (object instanceof Node) Style.parse_styles(object,Style.styles.node) if (object instanceof Way) Style.parse_styles(object,Style.styles.way) },this) } } var Viewport = { } var Geohash = { hash: new Hash(), default_length: 4, // default length of geohash // adds a feature to a geohash index put: function(lat,lon,feature,length) { if (!length) length = this.default_length var _short_hash = encodeGeoHash(lat,lon).truncate(length,"") // check to see if the geohash is already populated: var merge_hash = this.hash.get(_short_hash) if (!merge_hash) { merge_hash = [feature] } else { merge_hash.push(feature) } this.hash.set(_short_hash,merge_hash) }, // fetch features in a geohash index get: function(key,length) { if (!length) length = this.default_length key = key.truncate(length,"") // default length of geohash return this.hash.get(key) }, trace: function() { this.hash.keys().each(function(key) { Cartagen.debug(this.hash.get(key).length) },this) } } var Cartagen = { object_count: 0, way_count: 0, node_count: 0, requested_plots: 0, stylesheet: "/style.gss", live: false, powersave: true, zoom_out_limit: 0.02, zoom_in_limit: 0, simplify: 1, live_gss: false, // this is for inline gss editing, generally only on cartagen.org static_map: true, static_map_layers: ["/static/rome/park.js"], dynamic_layers: [], range: 0.001, lat1: 41.9227, // these are the initial bounding boxes for the viewport lat2: 41.861, lng1: 12.4502, lng2: 12.5341, zoom_level: 0.05, plots: new Hash(), nodes: new Hash(), ways: new Hash(), fullscreen: true, bleed_level: 1, initial_bleed_level: 2, // this is how much plots bleed on the initial pageload label_queue: [], // queue of labels to draw debug_mode: typeof console != "undefined", setup: function(configs) { // geolocate with IP if available if (navigator.geolocation) navigator.geolocation.getCurrentPosition(User.set_loc) // wait for window load: Event.observe(window, 'load', this.initialize.bind(this,configs)) }, initialize: function(configs) { // queue dependencies: load_next_script() this.browser_check() // draw on window resize: Event.observe(window, 'resize', function() {try{draw()}catch(e){}}); // we can override right-click: // Event.observe(window, 'oncontextmenu', function() { return false }) Object.keys(configs).each(function(key,index) { this[key] = Object.values(configs)[index] // Cartagen.debug('configuring '+key+': '+this[key]) },this) if (this.get_url_param('gss')) this.stylesheet = this.get_url_param('gss') Map.initialize() // Startup: Style.load_styles(this.stylesheet) // stylesheet if (!this.static_map) { this.get_cached_plot(this.lat1,this.lng1,this.lat2,this.lng2,Cartagen.initial_bleed_level) new PeriodicalExecuter(this.get_current_plot,0.33) } else { if (Prototype.Browser.MobileSafari) { this.get_static_plot(this.static_map_layers[0]) this.get_static_plot(this.static_map_layers[1]) } else { this.static_map_layers.each(function(layer_url) { Cartagen.debug('fetching '+layer_url) this.get_static_plot(layer_url) },this) if (this.dynamic_layers.length > 0) { this.dynamic_layers.each(function(layer_url) { Cartagen.debug('fetching '+layer_url) load_script(layer_url) },this) } } } }, // Runs every frame in the draw() method. An attempt to isolate cartagen code from general GLOP code draw: function() { this.object_count = 0 this.way_count = 0 this.node_count = 0 Map.refresh_resolution() if (Prototype.Browser.MobileSafari || window.PhoneGap) Cartagen.simplify = 2 Style.style_body() translate(width/2,height/2) rotate(Map.rotate) scale(Cartagen.zoom_level,Cartagen.zoom_level) translate(width/-2,height/-2) // rotate(-1*Map.rotate) translate((-1*Map.x)+(width/2),(-1*Map.y)+(height/2)) // rotate(Map.rotate) // viewport stuff: strokeStyle('white') lineWidth(10) viewport_width = width*(1/Cartagen.zoom_level)-(100*(1/Cartagen.zoom_level)) viewport_height = height*(1/Cartagen.zoom_level)-(100*(1/Cartagen.zoom_level)) viewport = [Map.y-viewport_height/2,Map.x-viewport_width/2,Map.y+viewport_height/2,Map.x+viewport_width/2] strokeRect(Map.x-viewport_width/2,Map.y-viewport_height/2,viewport_width,viewport_height) }, // runs every frame in the draw() method, after Globjects have been drawn post_draw: function() { this.label_queue.each(function(item) { item[0].draw(item[1], item[2]) }) this.label_queue = [] }, // adds the label to the list of labels to be drawn when queue_label: function(label, x, y) { this.label_queue.push([label, x, y]) }, // show alert if it's IE: browser_check: function() { if ($('browsers')) { $('browsers').absolutize; $('browsers').style.top = "100px"; $('browsers').style.margin = "0 auto"; if (Prototype.Browser.IE) $('browsers').show(); } }, get_url_param: function(name) { name = name.replace(/[\[]/,"\\\[").replace(/[\]]/,"\\\]"); var regexS = "[\\?&]"+name+"=([^&#]*)"; var regex = new RegExp( regexS ); var results = regex.exec( window.location.href ); if( results == null ) return ""; else return results[1]; }, // sort ways by area: sort_by_area: function(a,b) { if (a instanceof Way) { if (b instanceof Way) { if ( a.area < b.area ) return 1; if ( a.area > b.area ) return -1; return 0; // a == b } else { return -1 // a wins no matter what if b is not a Way } } else { return 1 // b wins no matter what if a is not a Way } }, parse_objects: function(data) { data.osm.node.each(function(node){ var n = new Node n.h = 10 n.w = 10 n.color = randomColor() n.timestamp = node.timestamp n.user = node.user n.id = node.id n.lat = node.lat n.lon = node.lon n.x = Projection.lon_to_x(n.lon) n.y = Projection.lat_to_y(n.lat) Style.parse_styles(n,Style.styles.node) // can't currently afford to have all nodes in the map as well as all ways. // but we're missing some nodes when we render... semantic ones i think. cross-check. // objects.push(n) Cartagen.nodes.set(n.id,n) }) data.osm.way.each(function(way){ if (Cartagen.live || !Cartagen.ways.get(way.id)) { var data = { id: way.id, user: way.user, timestamp: way.timestamp, nodes: [], tags: new Hash() } if (way.name) data.name = way.name way.nd.each(function(nd, index) { if ((index % Cartagen.simplify) == 0 || index == 0 || index == way.nd.length-1 || way.nd.length <= Cartagen.simplify*2) { node = Cartagen.nodes.get(nd.ref) if (!Object.isUndefined(node)) data.nodes.push(node) } }) if (way.tag instanceof Array) { way.tag.each(function(tag) { data.tags.set(tag.k,tag.v) }) } else { data.tags.set(way.tag.k,way.tag.v) } new Way(data) } }) // data.osm.relation.each(function(way){ // var w = new Way // w.id = way.id // w.user = way.user // w.timestamp = way.timestamp // w.nodes = [] // way.nd.each(function(nd){ // //find the node corresponding to nd.ref // try { // w.nodes.push([nodes.get(nd.ref).x,nodes.get(nd.ref).y]) // } catch(e) { // // alert(nd.ref) // } // // }) // way.tag.each(function(tag) { // w.tags.push([tag.k,tag.v]) // }) // objects.push(w) // }) // sort by polygons' node count: objects.sort(Cartagen.sort_by_area) }, number_precision: function(num,prec) { return (num * (1/prec)).round()/(1/prec) }, // gets the plot under the current center of the viewport get_current_plot: function() { if (Map.x != lastPos[0] && Map.y != lastPos[1]) { var new_lat1,new_lat2,new_lng1,new_lng2 new_lat1 = y_to_lat(Map.y)-range new_lng1 = x_to_lon(Map.x)-range new_lat2 = y_to_lat(Map.y)+range new_lng2 = x_to_lon(Map.x)+range // this will look for cached plots, or get new ones if it fails Cartagen.get_cached_plot(new_lat1,new_lng1,new_lat2,new_lng2,Cartagen.bleed_level) } lastPos[0] = Map.x lastPos[1] = Map.y }, // fetches a JSON plot from a static file, given a full url get_static_plot: function(url) { Cartagen.debug('fetching ' + url) Cartagen.requested_plots++ new Ajax.Request(url,{ method: 'get', onComplete: function(result) { // Cartagen.debug(result.responseText.evalJSON().osm.ways.length+" ways") Cartagen.debug('got ' + url) Cartagen.parse_objects(result.responseText.evalJSON()) Cartagen.debug(objects.length+" objects") Cartagen.requested_plots-- if (Cartagen.requested_plots == 0) last_event = frame Cartagen.debug("Total plots: "+Cartagen.plots.size()+", of which "+Cartagen.requested_plots+" are still loading.") } }) }, // reduces precision of a plot request to quantize plot requests // checks against local storage for browers with HTML 5 // then fetches the plot and parses the data into the objects array get_cached_plot: function(_lat1,_lng1,_lat2,_lng2,_bleed) { plot_precision = 0.001 _lat1 = Cartagen.number_precision(_lat1,plot_precision) _lng1 = Cartagen.number_precision(_lng1,plot_precision) _lat2 = Cartagen.number_precision(_lat2,plot_precision) _lng2 = Cartagen.number_precision(_lng2,plot_precision) var cached = false // Remember that parse_objects() will fill localStorage. // We can't do it here because it's an asychronous AJAX call. // if we're not live-loading: if (!Cartagen.live) { // check if we've loaded already this session: if (Cartagen.plots.get(_lat1+","+_lng1+","+_lat2+","+_lng2) && Cartagen.plots.get(_lat1+","+_lng1+","+_lat2+","+_lng2)[0]) { // no live-loading, so: Cartagen.debug("already loaded plot") } else { // if we haven't, check if HTML 5 localStorage exists in this browser: if (typeof localStorage != "undefined") { var ls = localStorage.getItem(_lat1+","+_lng1+","+_lat2+","+_lng2) if (ls) { Cartagen.plots.set(_lat1+","+_lng1+","+_lat2+","+_lng2,[true,_bleed]) Cartagen.debug("localStorage cached plot") Cartagen.parse_objects(ls) } else { // it's not in the localStorage: Cartagen.load_plot(_lat1,_lng1,_lat2,_lng2) } } else { // not loaded this session and no localStorage, so: Cartagen.load_plot(_lat1,_lng1,_lat2,_lng2) Cartagen.plots.set(_lat1+","+_lng1+","+_lat2+","+_lng2,[true,_bleed]) } } // if the bleed level of this plot is > 0 if (_bleed > 0) { Cartagen.debug('bleeding to neighbors with bleed = '+_bleed) // bleed to 8 neighboring plots, decrement bleed: Cartagen.delayed_get_cached_plot(_lat1+plot_precision,_lng1+plot_precision,_lat2+plot_precision,_lng2+plot_precision,_bleed-1) Cartagen.delayed_get_cached_plot(_lat1-plot_precision,_lng1-plot_precision,_lat2-plot_precision,_lng2-plot_precision,_bleed-1) Cartagen.delayed_get_cached_plot(_lat1+plot_precision,_lng1,_lat2+plot_precision,_lng2,_bleed-1) Cartagen.delayed_get_cached_plot(_lat1,_lng1+plot_precision,_lat2,_lng2+plot_precision,_bleed-1) Cartagen.delayed_get_cached_plot(_lat1-plot_precision,_lng1,_lat2-plot_precision,_lng2,_bleed-1) Cartagen.delayed_get_cached_plot(_lat1,_lng1-plot_precision,_lat2,_lng2-plot_precision,_bleed-1) Cartagen.delayed_get_cached_plot(_lat1-plot_precision,_lng1+plot_precision,_lat2-plot_precision,_lng2+plot_precision,_bleed-1) Cartagen.delayed_get_cached_plot(_lat1+plot_precision,_lng1-plot_precision,_lat2+plot_precision,_lng2-plot_precision,_bleed-1) } } else { // we're live-loading! Gotta get it no matter what: load_plot(_lat1,_lng1,_lat2,_lng2) } }, // peforms get_cached_plot() with a randomized delay of between 1 and 3 seconds // this prevents a zillion requests to the server at the same time and is useful for live viewing // for viewing page_cached plots, it doesn't matter delayed_get_cached_plot: function(_lat1,_lng1,_lat2,_lng2,_bleed) { bleed_delay = 1000+(2000*Math.random(_lat1+_lng1)) //milliseconds, with a random factor to stagger requests setTimeout("get_cached_plot("+_lat1+","+_lng1+","+_lat2+","+_lng2+","+_bleed+")",bleed_delay) }, // requests a JSON plot for a bbox from the server load_plot: function(_lat1,_lng1,_lat2,_lng2) { Cartagen.requested_plots++ new Ajax.Request('/map/plot.js?lat1='+_lat1+'&lng1='+_lng1+'&lat2='+_lat2+'&lng2='+_lng2+'',{ method: 'get', onComplete: function(result) { Cartagen.parse_objects(result.responseText.evalJSON()) Cartagen.requested_plots-- if (Cartagen.requested_plots == 0) last_event = frame Cartagen.debug("Total plots: "+Cartagen.plots.size()+", of which "+Cartagen.requested_plots+" are still loading.") } }) }, // Searches all objects by tags, and sets highlight=true for all matches. highlight: function(query) { objects.each(function(object) { object.highlight = false if (query != "" && object.tags && object instanceof Way) { object.tags.each(function(tag) { if (tag.key.toLowerCase().match(query.toLowerCase()) || tag.value.toLowerCase().match(query.toLowerCase())) { object.highlight = true } }) if (object.user && object.user.toLowerCase().match(query.toLowerCase())) object.highlight = true if (object.description && object.description.toLowerCase().match(query.toLowerCase())) object.highlight = true } }) }, show_gss_editor: function() { $('description').hide() $('brief').style.width = '28%' $('brief_first').style.width = '92%'; $('gss').toggle() Cartagen.live_gss = !Cartagen.live_gss }, // sends user to an image of the current canvas redirect_to_image: function() { document.location = canvas.canvas.toDataURL(); }, debug: function(msg) { console.log(msg) } } var Map = { initialize: function() { this.x = Projection.lon_to_x((Cartagen.lng1+Cartagen.lng2)/2) this.y = Projection.lat_to_y((Cartagen.lat1+Cartagen.lat2)/2) }, pointer_x: function() { return Map.x+(((width/-2)-Mouse.x)/Cartagen.zoom_level) }, pointer_y: function() { return Map.y+(((height/-2)-Mouse.y)/Cartagen.zoom_level) }, x: 0, y: 0, rotate: 0, rotate_old: 0, x_old: 0, y_old: 0, // Res down for zoomed-out... getting a NaN for x % 0. Not that much savings yet. resolution: Math.round(Math.abs(Math.log(Cartagen.zoom_level))), refresh_resolution: function() { this.resolution = Math.round(Math.abs(Math.log(Cartagen.zoom_level))) }, // [lon1, lat2, lon2, lat1] get_bbox: function() { var lon1 = Projection.x_to_lon(Map.x - (width/2)) var lon2 = Projection.x_to_lon(Map.x + (width/2)) var lat1 = Projection.y_to_lat(Map.y - (height/2)) var lat2 = Projection.y_to_lat(Map.y + (height/2)) return [lon1, lat2, lon2, lat1] } } var Node = Class.create({ radius: 6, tags: new Hash(), fillStyle: "#555", fontColor: "#eee", fontSize: 12, draw: function() { Cartagen.object_count++ Cartagen.point_count++ canvas.save() this.shape() canvas.restore() }, shape: function() { canvas.save() Style.apply_style(this) beginPath() translate(this.x,this.y-6) arc(0,this.radius,this.radius,0,Math.PI*2,true) fill() stroke() canvas.restore() } }) var Way = Class.create({ age: 0, highlight: false, nodes: [], label: null, closed_poly: false, tags: new Hash(), fillStyle: "#555", fontColor: "#eee", fontSize: 12, initialize: function(data) { Object.extend(this, data) this.bbox = Geometry.calculate_bounding_box(this.nodes) if (this.nodes[0].x == this.nodes[this.nodes.length-1].x && this.nodes[0].y == this.nodes[this.nodes.length-1].y) this.closed_poly = true if (this.tags.get('natural') == "coastline") this.closed_poly = true if (this.closed_poly) { var centroid = Geometry.poly_centroid(this.nodes) this.x = centroid[0]*2 this.y = centroid[1]*2 } else { // attempt to make letters follow line segments: this.x = (this.middle_segment()[0].x+this.middle_segment()[1].x)/2 this.y = (this.middle_segment()[0].y+this.middle_segment()[1].y)/2 } this.area = poly_area(this.nodes) this.label = new Label(this) Style.parse_styles(this,Style.styles.way) // geohash.set(encodeGeoHash()) objects.push(this) Geohash.put(Projection.y_to_lat(this.y),Projection.x_to_lon(this.x),this,6) Cartagen.ways.set(this.id,this) }, // returns the middle-most line segment as a tuple [node_1,node_2] middle_segment: function() { // Cartagen.debug(this.nodes[Math.floor(this.nodes.length/2)+1]) if (this.nodes.length == 1) { return this.nodes[0] } else if (this.nodes.length == 2) { return [this.nodes[0], this.nodes[1]] } else { return [this.nodes[Math.floor(this.nodes.length/2)],this.nodes[Math.floor(this.nodes.length/2)+1]] } }, middle_segment_angle: function() { var segment = this.middle_segment() if (segment[1]) { var _x = segment[0].x-segment[1].x var _y = segment[0].y-segment[1].y return (Math.tan(_y/_x)/1.7) } else return 90 }, draw: function() { Cartagen.object_count++ // only draw if in the viewport: if (intersect(viewport[0],viewport[1],viewport[2],viewport[3],this.bbox[0],this.bbox[1],this.bbox[2],this.bbox[3])) { Cartagen.way_count++ this.shape() this.age += 1; } }, shape: function() { canvas.save() Style.apply_style(this) if (this.highlight) { lineWidth(3/Cartagen.zoom_level) strokeStyle("red") } // fade in after load: if (Object.isUndefined(this.opacity)) this.opacity = 1 if (this.age < 20) { canvas.globalAlpha = this.opacity*(this.age/20) } else { canvas.globalAlpha = this.opacity } beginPath() moveTo(this.nodes[0].x,this.nodes[0].y) if (Map.resolution == 0) Map.resolution = 1 this.nodes.each(function(node,index){ if ((index % Map.resolution == 0) || index == 0 || index == this.nodes.length-1 || this.nodes.length <= 30) { Cartagen.node_count++ lineTo(node.x,node.y) } },this) // fill the polygon if the beginning and end nodes are the same. // we'll have to change this for open polys, like coastlines stroke() if (this.closed_poly) fill() // show bboxes for ways: // lineWidth(1) // strokeStyle('red') // strokeRect(this.bbox[1],this.bbox[0],this.bbox[3]-this.bbox[1],this.bbox[2]-this.bbox[0]) // draw label if we're zoomed in enough' if (Cartagen.zoom_level > 0.3) { Cartagen.queue_label(this.label, this.x, this.y) } canvas.restore() } }) var Label = Class.create({ fontFamily: 'Lucida Grande', fontSize: 11, fontBackground: null, text: null, fontScale: false, padding: 6, fontColor: '#eee', initialize: function(_way) { this.way = _way }, draw: function(_x, _y) { if (this.text) { canvas.save() //this.text = this.way.id Style.apply_font_style(this) // try to rotate the labels on unclosed ways: try { if (!this.way.closed_poly) { translate(_x,_y) rotate(this.way.middle_segment_angle()) translate(-1*_x,-1*_y) } } catch(e) {console.log(e)} if (this.fontScale == "fixed") { var _height = Object.value(this.fontSize) var _padding = Object.value(this.padding) } else { var _height = Object.value(this.fontSize)/Cartagen.zoom_level var _padding = Object.value(this.padding)/Cartagen.zoom_level } if (canvas.fillText) { // || Prototype.Browser.Gecko) { canvas.font = _height+"pt "+this.fontFamily var _width = canvas.measureText(Object.value(this.text)).width if (this.fontBackground) { fillStyle(Object.value(this.fontBackground)) rect(_x-((_width+_padding)/2),_y-((_height/2+(_padding/2))),_width+_padding,_height+_padding) } fillStyle(Object.value(this.fontColor)) canvas.fillText(Object.value(this.text),_x-(_width/2),_y+(_height/2)) } else { var _width = canvas.measureCanvasText(Object.value(this.fontFamily),_height,this.text) if (this.fontBackground) { fillStyle(Object.value(this.fontBackground)) rect(_x-((_width+_padding)/2),_y-((_height/2+(_padding/2))),_width+_padding,_height+_padding) } drawTextCenter(Object.value(this.fontFamily),_height,_x,_y+(_height/2),Object.value(this.text)) } canvas.restore() } } }) var Projection = { current_projection: 'spherical_mercator', scale_factor: 100000, set: function(new_projection) { this.current_projection = new_projection }, lon_to_x: function(lon) { return -1*Projection[Projection.current_projection].lon_to_x(lon) }, x_to_lon: function(x) { return -1*Projection[Projection.current_projection].x_to_lon(x) }, lat_to_y: function(lat) { return -1*Projection[Projection.current_projection].lat_to_y(lat) }, y_to_lat: function(y) { return -1*Projection[Projection.current_projection].y_to_lat(y) }, //required by spherical mercator: center_lon: function() { return (Cartagen.lng2+Cartagen.lng1)/2 }, spherical_mercator: { lon_to_x: function(lon) { return (lon - Projection.center_lon()) * -1 * Projection.scale_factor }, x_to_lon: function(x) { return (x/(-1*Projection.scale_factor)) + Projection.center_lon() }, lat_to_y: function(lat) { return ((180/Math.PI * (2 * Math.atan(Math.exp(lat*Math.PI/180)) - Math.PI/2))) * Projection.scale_factor * 1.7 }, y_to_lat: function(y) { return (180/Math.PI * Math.log(Math.tan(Math.PI/4+(y/(Projection.scale_factor*1.7))*(Math.PI/180)/2))) } }, elliptical_mercator: { lon_to_x: function(lon) { var r_major = 6378137.000; return r_major * lon; }, x_to_lon: function(x) { var r_major = 6378137.000; return lon/r_major; }, lat_to_y: function(lat) { if (lat > 89.5) lat = 89.5; if (lat < -89.5) lat = -89.5; var r_major = 6378137.000; var r_minor = 6356752.3142; var temp = r_minor / r_major; var es = 1.0 - (temp * temp); var eccent = Math.sqrt(es); var phi = lat; var sinphi = Math.sin(phi); var con = eccent * sinphi; var com = .5 * eccent; con = Math.pow(((1.0-con)/(1.0+con)), com); var ts = Math.tan(.5 * ((Math.PI*0.5) - phi))/con; var y = 0 - r_major * Math.log(ts); return y; }, y_to_lat: function(y) { // unknown } } } User = { color: randomColor(), name: 'anonymous', // lat & lon are based on geolocation: lat: 0, lon: 0, x: -118.31700000003664, y: -6562600.9880228145, node_submit_uri: '/node/write', node_updates_uri: '/node/read', way_submit_uri: '/way/write', way_update_uri: '/way/read', line_width: 10, node_radius: 50, follow_interval: 60, following: false, following_executer: null, drawing_way: false, set_loc: function(loc) { if (loc.coords) { User.lat = loc.coords.latitude User.lon = loc.coords.longitude } else { User.lat = loc.latitude User.lon = loc.longitude } // User.calculate_coords() Cartagen.debug('detected location: '+this.lat+","+this.lon) }, calculate_coords: function() { // this should be based on lat and lon }, create_node: function(_x, _y, _draw, id) { if (Object.isUndefined(_x)) _x = User.x if (Object.isUndefined(_y)) _y = User.y if (Object.isUndefined(id)) id = 'temp_' + (Math.random() * 999999999).floor() var node = new Node() node.x = _x node.y = _y node.radius = User.node_radius node.id = id node.lon = Projection.x_to_lon(_x) node.lat = Projection.y_to_lat(_y) node.fillStyle = User.color if (_draw) { objects.push(node) draw() } return node }, submit_node: function(_x, _y) { var node = User.create_node(_x, _y, true) var params = { color: User.color, lon: node.lon, lat: node.lat, author: User.name } new Ajax.Request(User.node_submit_uri, { method: 'post', parameters: params, onSuccess: function(transport) { node.id = 'cartagen_' + transport.responseText Cartagen.debug('saved node with id ' + node.id) } }) }, toggle_following: function() { if (User.following) { User.following_executer.stop() User.following = false } else { User.following_executer = new PeriodicalExecuter(User.center_map_on_user, User.follow_interval) User.following = true User.center_map_on_user() } }, center_map_on_user: function() { //navigator.geolocation.getCurrentPosition(User.set_loc_and_center) User.set_loc_and_center() }, set_loc_and_center: function(loc) { //User.set_loc(loc) Map.x = User.x Map.y = User.y draw() }, toggle_way_drawing: function(_x, _y) { if (User.drawing_way) { User.add_node(_x, _y) User.submit_way(User.way) } else { User.way = new Way({ id: 'temp_' + (Math.random() * 999999999).floor(), author: User.name, nodes: [User.create_node(_x,_y,true)], tags: new Hash() }) User.way.closed_poly = false User.way.strokeStyle = User.color User.way.lineWidth = User.line_width User.way.age = 40 draw() } User.drawing_way = !User.drawing_way }, submit_way: function(_way) { var params = { color: User.color, author: User.name, bbox: _way.bbox, nodes: _way.nodes.collect(function(node) { return [node.lon, node.lat] }) } Cartagen.debug(_way.nodes) Cartagen.debug(params) new Ajax.Request(User.way_submit_uri, { parameters: {way: Object.toJSON(params)}, onSuccess: function(transport) { _way.id = 'cartagen_' + transport.responseJSON.way_id _way.nodes.each(function(nd) { nd.id = 'cartagen_' + transport.responseJSON.node_ids.shift() }) } }) Cartagen.debug(_way) }, add_node: function(_x, _y) { node = User.create_node(_x, _y, true) User.way.nodes.push(node) User.way.bbox = Geometry.calculate_bounding_box(User.way.nodes) draw() }, update: function() { if (User.last_pos && User.last_pos == lastPos) { var timestamp = User.last_update } User.last_pos = lastPos User.last_update = (new Date()).toUTCString() new Ajax.Request(User.node_update_uri, { }) } } function overlaps(x1,y1,x2,y2,fudge) { if (x2 > x1-fudge && x2 < x1+fudge) { if (y2 > y1-fudge && y2 < y1+fudge) { return true } else { return false } } else { return false } } function intersect(box1top,box1left,box1bottom,box1right,box2top,box2left,box2bottom,box2right) { return !(box2left > box1right || box2right < box1left || box2top > box1bottom || box2bottom < box1top) } //+ Jonas Raoni Soares Silva //@ http://jsfromhell.com/math/is-point-in-poly [rev. #0] function is_point_in_poly(poly, _x, _y){ for(var c = false, i = -1, l = poly.length, j = l - 1; ++i < l; j = i) ((poly[i].y <= _y && _y < poly[j].y) || (poly[j].y <= _y && _y < poly[i].y)) && (_x < (poly[j].x - poly[i].x) * (_y - poly[i].y) / (poly[j].y - poly[i].y) + poly[i].x) && (c = !c); return c; } // use poly_area(nodes,true) for signed area function poly_area(nodes) { var area = 0 nodes.each(function(node,index) { if (index < nodes.length-1) next = nodes[index+1] else next = nodes[0] if (index > 0) last = nodes[index-1] else last = nodes[nodes.length-1] area += last.x*node.y-node.x*last.y+node.x*next.y-next.x*node.y }) if (arguments[1] == true) return area/2 else return Math.abs(area/2) } var Geometry = { poly_centroid: function(polygon) { var n = polygon.length var cx = 0, cy = 0 var a = poly_area(polygon,true) var centroid = [] var i,j var factor = 0 for (i=0;i<n;i++) { j = (i + 1) % n factor = (polygon[i].x * polygon[j].y - polygon[j].x * polygon[i].y) cx += (polygon[i].x + polygon[j].x) * factor cy += (polygon[i].y + polygon[j].y) * factor } a *= 6 factor = 1/a cx *= factor cy *= factor centroid[0] = cx centroid[1] = cy return centroid }, calculate_bounding_box: function(points) { var bbox = [0,0,0,0] // top, left, bottom, right points.each(function(node) { if (node.x < bbox[1] || bbox[1] == 0) bbox[1] = node.x if (node.x > bbox[3] || bbox[3] == 0) bbox[3] = node.x if (node.y < bbox[0] || bbox[0] == 0) bbox[0] = node.y if (node.y > bbox[2] || bbox[2] == 0) bbox[2] = node.y }) return bbox } /* PolygonCenterOfMass(Point[] polygon,int N) { float cx=0,cy=0; float A=(float)SignedPolygonArea(polygon,N); Point2Df res=new Point2Df(); int i,j; float factor=0; for (i=0;i<N;i++) { j = (i + 1) % N; factor=(polygon[i].x*polygon[j].y-polygon[j].x*polygon[i].y); cx+=(polygon[i].x+polygon[j].x)*factor; cy+=(polygon[i].y+polygon[j].y)*factor; } A*=6.0f; factor=1/A; cx*=factor; cy*=factor; res.x=cx; res.y=cy; return res; } */ } // add Object.value, which returns the argument, unless the argument is a function, // in which case it calls the function and returns the result Object.value = function(obj) { if(Object.isFunction(obj)) return obj() return obj } // This duplicates a function call in glop.js... load order issues function randomColor() { return "rgb("+Math.round(Math.random()*255)+","+Math.round(Math.random()*255)+","+Math.round(Math.random()*255)+")" } // Rotates view slowly for cool demo purposes. function demo() { try { Map.rotate += 0.005 } catch(e) {}}
cartagen/public/cartagen/cartagen.js
// cartagen.js // // Copyright (C) 2009 Jeffrey Warren, Design Ecology, MIT Media Lab // // This file is part of the Cartagen mapping framework. Read more at // <http://cartagen.org> // // Cartagen is free software: you can redistribute it and/or modify // it under the terms of the MIT License. You should have received a copy // of the MIT License along with Cartagen. If not, see // <http://www.opensource.org/licenses/mit-license.php>. // // these belong in other objects... move them var lastPos = [0,0] var objects = [] PhoneGap = window.DeviceInfo && DeviceInfo.uuid != undefined // temp object unitl PhoneGap is initialized if (typeof cartagen_base_uri == 'undefined') { cartagen_base_uri = 'cartagen' } // additional dependencies: var scripts = [ cartagen_base_uri + '/canvastext.js', cartagen_base_uri + '/glop.js', cartagen_base_uri + '/events.js', cartagen_base_uri + '/lib/geohash.js', ] // load phonegap js if needed if(window.PhoneGap) { scripts.unshift(cartagen_base_uri + '/lib/phonegap/phonegap.base.js', cartagen_base_uri + '/lib/phonegap/geolocation.js', cartagen_base_uri + '/lib/phonegap/iphone/phonegap.js', cartagen_base_uri + '/lib/phonegap/iphone/geolocation.js') } // loads each script in scripts array, sequentially. // requires a load_next_script() call at the end of each // dependent script to trigger the next one. function load_next_script() { Cartagen.debug("loading: "+scripts[0]) if (scripts.length > 0) { load_script(scripts.splice(0,1)[0]) } } // loads a script into <script> tags, no cross-domain limits: function load_script(script) { $$('head')[0].insert(new Element('script', { 'src': script, 'type': 'text/javascript', 'charset': 'utf-8', evalJSON: 'force' })); } // some browsers don't have a console object, so create a dud one for them: if (typeof console == "undefined") { console = { log: function(param) {}}} // if (Prototype.Browser.MobileSafari) $('brief').hide() var Mouse = { x: 0, y: 0, click_x: 0, click_y: 0 } var Style = { styles: { // this doesn't get used. We don't have a body object to load them into... body: { fillStyle: "#eee", fontColor: "#eee", fontSize: 12 } }, style_body: function() { if (Style.styles) { if (Style.styles.body.fillStyle) fillStyle(Style.styles.body.fillStyle) if (Style.styles.body.strokeStyle) strokeStyle(Style.styles.body.strokeStyle) if (Style.styles.body.lineWidth || Style.styles.body.lineWidth == 0) lineWidth(Style.styles.body.lineWidth) if (Style.styles.body.pattern && Object.isUndefined(Style.styles.body.pattern_img)) { Style.styles.body.pattern_img = new Image() Style.styles.body.pattern_img.src = Style.styles.body.pattern } if (Style.styles.body.pattern_img) { try { fillStyle(canvas.createPattern(Style.styles.body.pattern_img,'repeat')) } catch(e) {} } rect(0,0,width,height) strokeRect(0,0,width,height) } canvas.lineJoin = 'round' canvas.lineCap = 'round' }, parse_styles: function(feature,selector) { try { // check for function or parameter for each style type... // or is it copying the function itself, and doesn't need to know if it's a function or parameter? if (selector.opacity) feature.opacity = selector.opacity if (selector.fillStyle) feature.fillStyle = selector.fillStyle if (selector.lineWidth || selector.lineWidth == 0) feature.lineWidth = selector.lineWidth if (selector.strokeStyle && Object.isFunction(selector.strokeStyle)) { // bind the styles object to the context of this Way: feature.strokeStyle = selector.strokeStyle() } else { feature.strokeStyle = selector.strokeStyle } // patterns if (selector.pattern) { feature.pattern_img = new Image() feature.pattern_img.src = selector.pattern } // radius is relevant to nodes, i.e. single points if (selector.radius) feature.radius = selector.radius // check selector for hover: if (selector['hover']) feature.hover = selector['hover'] if (selector['mouseDown']) feature.mouseDown = selector['mouseDown'] // copy styles based on feature name if (Style.styles[feature.name] && Style.styles[feature.name].fillStyle) feature.fillStyle = Style.styles[feature.name].fillStyle if (Style.styles[feature.name] && Style.styles[feature.name].strokeStyle) feature.strokeStyle = Style.styles[feature.name].strokeStyle // font styling: if (selector.fontColor) feature.label.fontColor = selector.fontColor if (selector.fontSize) feature.label.fontSize = selector.fontSize if (selector.fontScale) feature.label.fontScale = selector.fontScale if (selector.fontBackground) feature.label.fontBackground = selector.fontBackground if (selector.text) feature.label.text = selector.text feature.tags.each(function(tag) { //look for a style like this: if (Style.styles[tag.key]) { if (Style.styles[tag.key].opacity) feature.opacity = Style.styles[tag.key].opacity if (Style.styles[tag.key].fillStyle) feature.fillStyle = Style.styles[tag.key].fillStyle if (Style.styles[tag.key].strokeStyle) feature.strokeStyle = Style.styles[tag.key].strokeStyle if (Style.styles[tag.key].lineWidth) feature.lineWidth = Style.styles[tag.key].lineWidth if (Style.styles[tag.key].fontColor) feature.label.fontColor = Style.styles[tag.key].fontColor if (Style.styles[tag.key].fontSize) feature.label.fontSize = Style.styles[tag.key].fontSize if (Style.styles[tag.key].fontScale) feature.label.fontScale = Style.styles[tag.key].fontScale if (Style.styles[tag.key].fontBackground) feature.label.fontBackground = Style.styles[tag.key].fontBackground if (Style.styles[tag.key].text) { if (Object.isFunction(Style.styles[tag.key].text)) feature.label.text = Style.styles[tag.key].text.apply(feature) else feature.label.text = Style.styles[tag.key].text } if (Style.styles[tag.key].pattern) { feature.pattern_img = new Image() feature.pattern_img.src = Style.styles[tag.key].pattern } } if (Style.styles[tag.value]) { if (Style.styles[tag.value].opacity) feature.opacity = Style.styles[tag.value].opacity if (Style.styles[tag.value].fillStyle) feature.fillStyle = Style.styles[tag.value].fillStyle if (Style.styles[tag.value].strokeStyle) feature.strokeStyle = Style.styles[tag.value].strokeStyle if (Style.styles[tag.value].lineWidth) feature.label.lineWidth = Style.styles[tag.value].lineWidth if (Style.styles[tag.value].fontColor) feature.label.fontColor = Style.styles[tag.value].fontColor if (Style.styles[tag.value].fontSize) feature.label.fontSize = Style.styles[tag.value].fontSize if (Style.styles[tag.value].fontScale) feature.label.fontScale = Style.styles[tag.value].fontScale if (Style.styles[tag.value].fontBackground) feature.label.fontBackground = Style.styles[tag.value].fontBackground if (Style.styles[tag.value].text) { if (Object.isFunction(Style.styles[tag.value].text)) feature.label.text = Style.styles[tag.value].text.apply(feature) else feature.label.text = Style.styles[tag.value].text } if (Style.styles[tag.value].pattern) { feature.pattern_img = new Image() feature.pattern_img.src = Style.styles[tag.value].pattern } } //check tags for hover: if (Style.styles[tag.key] && Style.styles[tag.key]['hover']) { feature.hover = Style.styles[tag.key]['hover'] } if (Style.styles[tag.value] && Style.styles[tag.value]['hover']) { feature.hover = Style.styles[tag.value]['hover'] } //check tags for mouseDown: if (Style.styles[tag.key] && Style.styles[tag.key]['mouseDown']) { feature.mouseDown = Style.styles[tag.key]['mouseDown'] } if (Style.styles[tag.value] && Style.styles[tag.value]['mouseDown']) { feature.mouseDown = Style.styles[tag.value]['mouseDown'] } // check tags for refresh: if (Style.styles[tag.key] && Style.styles[tag.key]['refresh']) { $H(Style.styles[tag.key]['refresh']).each(function(pair) { Style.create_refresher(feature, pair.key, pair.value) }) } if (Style.styles[tag.value] && Style.styles[tag.value]['refresh']) { if(!feature.style_generators) feature.style_generators = {} $H(Style.styles[tag.value]['refresh']).each(function(pair) { Style.create_refresher(feature, pair.key, pair.value) }) } }) } catch(e) { Cartagen.debug("There was an error in your stylesheet. Please check http://wiki.cartagen.org for the GSS spec. Error: "+trace(e)) } }, create_refresher: function(feature, property, interval) { if (Object.isFunction(feature[property])) { //sanity check if (['fontBackground', 'fontColor', 'fontScale', 'fontSize', 'text'].include(property)) { feature = feature.label } if(!feature.style_generators) feature.style_generators = {} if(!feature.style_generators.executers) feature.style_generators.executers = {} feature.style_generators[property] = feature[property] Style.refresh_style(feature, property) feature.style_generators.executers[property] = new PeriodicalExecuter(function() { Style.refresh_style(feature, property) }, interval) } }, refresh_style: function(feature, property) { feature[property] = feature.style_generators[property]() }, // sets the canvas 'pen' styles using the object.foo style definitions apply_style: function(feature) { canvas.globalOpacity = 1 if (feature.opacity) { canvas.globalOpacity = Object.value(feature.opacity) } if (feature.strokeStyle) { strokeStyle(Object.value(feature.strokeStyle)) } if (feature.fillStyle) { fillStyle(Object.value(feature.fillStyle)) } if (feature.pattern_img) { fillStyle(canvas.createPattern(feature.pattern_img,'repeat')) } if (feature.lineWidth) { lineWidth(Object.value(feature.lineWidth)) } // trigger hover and mouseDown styles: if (feature instanceof Way) { if (feature.hover && feature.closed_poly && is_point_in_poly(feature.nodes,Map.pointer_x(),Map.pointer_y())) { Style.apply_style(feature.hover) if (!Object.isUndefined(feature.hover.action)) feature.hover.action() } if (feature.mouseDown && mouseDown == true && feature.closed_poly && is_point_in_poly(feature.nodes,Map.pointer_x(),Map.pointer_y())) { Style.apply_style(feature.mouseDown) if (!Object.isUndefined(feature.mouseDown.action)) feature.mouseDown.action() } } else if (feature instanceof Node) { if (feature.hover && overlaps(feature.x,feature.y,Map.pointer_x(),Map.pointer_y(),100)) { Style.apply_style(feature.hover) if (feature.hover.action) feature.hover.action() } if (feature.mouseDown && mouseDown == true && overlaps(feature.x,feature.y,Map.pointer_x(),Map.pointer_y(),100)) { Style.apply_style(feature.mouseDown) if (feature.mouseDown.action) feature.mouseDown.action() } } }, // same as apply_style but just for fonts. This was necessary because // strokeStyle and such have to be reset *after* drawing actual polygons but // *before* drawing text. apply_font_style: function(feature) { if (feature.fontColor) { if (Object.isFunction(feature.fontColor)) strokeStyle(feature.fontColor()) else strokeStyle(feature.fontColor) } }, // add an individual style to the styles object. May not actually work; old code. // add_style('highway','strokeStyle','red') add_style: function(tag,style,value) { eval("styles."+tag+" = {"+style+": '"+value+"'}") }, // load a remote stylesheet, given a URL load_styles: function(stylesheet_url) { if (stylesheet_url[0,4] == "http") { stylesheet_url = "/map/style?url="+stylesheet_url } new Ajax.Request(stylesheet_url,{ method: 'get', onComplete: function(result) { Cartagen.debug('applying '+stylesheet_url) Style.styles = ("{"+result.responseText+"}").evalJSON() Style.stylesheet_source = "{"+result.responseText+"}" Style.apply_gss(Style.stylesheet_source) // populate the gss field if($('gss_textarea')) { $('gss_textarea').value = Style.stylesheet_source } } }) }, // given a string of gss, applies the string to all Ways and Nodes in the objects array apply_gss: function(gss) { if (Object.isUndefined(arguments[1])) var clear_styles = true else clear_styles = arguments[1] Style.styles = gss.evalJSON() objects.each(function(object) { if (clear_styles) { object.lineWeight = null object.strokeStyle = null object.fillStyle = null object.hover = null object.mouseDown = null } if (object instanceof Node) Style.parse_styles(object,Style.styles.node) if (object instanceof Way) Style.parse_styles(object,Style.styles.way) },this) } } var Viewport = { } var Geohash = { hash: new Hash(), default_length: 4, // default length of geohash // adds a feature to a geohash index put: function(lat,lon,feature,length) { if (!length) length = this.default_length var _short_hash = encodeGeoHash(lat,lon).truncate(length,"") // check to see if the geohash is already populated: var merge_hash = this.hash.get(_short_hash) if (!merge_hash) { merge_hash = [feature] } else { merge_hash.push(feature) } this.hash.set(_short_hash,merge_hash) }, // fetch features in a geohash index get: function(key,length) { if (!length) length = this.default_length key = key.truncate(length,"") // default length of geohash return this.hash.get(key) }, trace: function() { this.hash.keys().each(function(key) { Cartagen.debug(this.hash.get(key).length) },this) } } var Cartagen = { object_count: 0, way_count: 0, node_count: 0, requested_plots: 0, stylesheet: "/style.gss", live: false, powersave: true, zoom_out_limit: 0.02, zoom_in_limit: 0, simplify: 1, live_gss: false, // this is for inline gss editing, generally only on cartagen.org static_map: true, static_map_layers: ["/static/rome/park.js"], dynamic_layers: [], range: 0.001, lat1: 41.9227, // these are the initial bounding boxes for the viewport lat2: 41.861, lng1: 12.4502, lng2: 12.5341, zoom_level: 0.05, plots: new Hash(), nodes: new Hash(), ways: new Hash(), fullscreen: true, bleed_level: 1, initial_bleed_level: 2, // this is how much plots bleed on the initial pageload label_queue: [], // queue of labels to draw debug_mode: typeof console != "undefined", setup: function(configs) { // geolocate with IP if available if (navigator.geolocation) navigator.geolocation.getCurrentPosition(User.set_loc) // wait for window load: Event.observe(window, 'load', this.initialize.bind(this,configs)) }, initialize: function(configs) { // queue dependencies: load_next_script() this.browser_check() // draw on window resize: Event.observe(window, 'resize', function() {try{draw()}catch(e){}}); // we can override right-click: // Event.observe(window, 'oncontextmenu', function() { return false }) Object.keys(configs).each(function(key,index) { this[key] = Object.values(configs)[index] // Cartagen.debug('configuring '+key+': '+this[key]) },this) if (this.get_url_param('gss')) this.stylesheet = this.get_url_param('gss') Map.initialize() // Startup: Style.load_styles(this.stylesheet) // stylesheet if (!this.static_map) { this.get_cached_plot(this.lat1,this.lng1,this.lat2,this.lng2,Cartagen.initial_bleed_level) new PeriodicalExecuter(this.get_current_plot,0.33) } else { if (Prototype.Browser.MobileSafari) { this.get_static_plot(this.static_map_layers[0]) this.get_static_plot(this.static_map_layers[1]) } else { this.static_map_layers.each(function(layer_url) { Cartagen.debug('fetching '+layer_url) this.get_static_plot(layer_url) },this) if (this.dynamic_layers.length > 0) { this.dynamic_layers.each(function(layer_url) { Cartagen.debug('fetching '+layer_url) load_script(layer_url) },this) } } } }, // Runs every frame in the draw() method. An attempt to isolate cartagen code from general GLOP code draw: function() { this.object_count = 0 this.way_count = 0 this.node_count = 0 Map.refresh_resolution() if (Prototype.Browser.MobileSafari || window.PhoneGap) Cartagen.simplify = 2 Style.style_body() translate(width/2,height/2) rotate(Map.rotate) scale(Cartagen.zoom_level,Cartagen.zoom_level) translate(width/-2,height/-2) // rotate(-1*Map.rotate) translate((-1*Map.x)+(width/2),(-1*Map.y)+(height/2)) // rotate(Map.rotate) // viewport stuff: strokeStyle('white') lineWidth(10) viewport_width = width*(1/Cartagen.zoom_level)-(100*(1/Cartagen.zoom_level)) viewport_height = height*(1/Cartagen.zoom_level)-(100*(1/Cartagen.zoom_level)) viewport = [Map.y-viewport_height/2,Map.x-viewport_width/2,Map.y+viewport_height/2,Map.x+viewport_width/2] strokeRect(Map.x-viewport_width/2,Map.y-viewport_height/2,viewport_width,viewport_height) }, // runs every frame in the draw() method, after Globjects have been drawn post_draw: function() { this.label_queue.each(function(item) { item[0].draw(item[1], item[2]) }) this.label_queue = [] }, // adds the label to the list of labels to be drawn when queue_label: function(label, x, y) { this.label_queue.push([label, x, y]) }, // show alert if it's IE: browser_check: function() { if ($('browsers')) { $('browsers').absolutize; $('browsers').style.top = "100px"; $('browsers').style.margin = "0 auto"; if (Prototype.Browser.IE) $('browsers').show(); } }, get_url_param: function(name) { name = name.replace(/[\[]/,"\\\[").replace(/[\]]/,"\\\]"); var regexS = "[\\?&]"+name+"=([^&#]*)"; var regex = new RegExp( regexS ); var results = regex.exec( window.location.href ); if( results == null ) return ""; else return results[1]; }, // sort ways by area: sort_by_area: function(a,b) { if (a instanceof Way) { if (b instanceof Way) { if ( a.area < b.area ) return 1; if ( a.area > b.area ) return -1; return 0; // a == b } else { return -1 // a wins no matter what if b is not a Way } } else { return 1 // b wins no matter what if a is not a Way } }, parse_objects: function(data) { data.osm.node.each(function(node){ var n = new Node n.h = 10 n.w = 10 n.color = randomColor() n.timestamp = node.timestamp n.user = node.user n.id = node.id n.lat = node.lat n.lon = node.lon n.x = Projection.lon_to_x(n.lon) n.y = Projection.lat_to_y(n.lat) Style.parse_styles(n,Style.styles.node) // can't currently afford to have all nodes in the map as well as all ways. // but we're missing some nodes when we render... semantic ones i think. cross-check. // objects.push(n) Cartagen.nodes.set(n.id,n) }) data.osm.way.each(function(way){ if (Cartagen.live || !Cartagen.ways.get(way.id)) { var data = { id: way.id, user: way.user, timestamp: way.timestamp, nodes: [], tags: new Hash() } if (way.name) data.name = way.name way.nd.each(function(nd, index) { if ((index % Cartagen.simplify) == 0 || index == 0 || index == way.nd.length-1 || way.nd.length <= Cartagen.simplify*2) { node = Cartagen.nodes.get(nd.ref) if (!Object.isUndefined(node)) data.nodes.push(node) } }) if (way.tag instanceof Array) { way.tag.each(function(tag) { data.tags.set(tag.k,tag.v) }) } else { data.tags.set(way.tag.k,way.tag.v) } new Way(data) } }) // data.osm.relation.each(function(way){ // var w = new Way // w.id = way.id // w.user = way.user // w.timestamp = way.timestamp // w.nodes = [] // way.nd.each(function(nd){ // //find the node corresponding to nd.ref // try { // w.nodes.push([nodes.get(nd.ref).x,nodes.get(nd.ref).y]) // } catch(e) { // // alert(nd.ref) // } // // }) // way.tag.each(function(tag) { // w.tags.push([tag.k,tag.v]) // }) // objects.push(w) // }) // sort by polygons' node count: objects.sort(Cartagen.sort_by_area) }, number_precision: function(num,prec) { return (num * (1/prec)).round()/(1/prec) }, // gets the plot under the current center of the viewport get_current_plot: function() { if (Map.x != lastPos[0] && Map.y != lastPos[1]) { var new_lat1,new_lat2,new_lng1,new_lng2 new_lat1 = y_to_lat(Map.y)-range new_lng1 = x_to_lon(Map.x)-range new_lat2 = y_to_lat(Map.y)+range new_lng2 = x_to_lon(Map.x)+range // this will look for cached plots, or get new ones if it fails Cartagen.get_cached_plot(new_lat1,new_lng1,new_lat2,new_lng2,Cartagen.bleed_level) } lastPos[0] = Map.x lastPos[1] = Map.y }, // fetches a JSON plot from a static file, given a full url get_static_plot: function(url) { Cartagen.debug('fetching ' + url) Cartagen.requested_plots++ new Ajax.Request(url,{ method: 'get', onComplete: function(result) { // Cartagen.debug(result.responseText.evalJSON().osm.ways.length+" ways") Cartagen.debug('got ' + url) Cartagen.parse_objects(result.responseText.evalJSON()) Cartagen.debug(objects.length+" objects") Cartagen.requested_plots-- if (Cartagen.requested_plots == 0) last_event = frame Cartagen.debug("Total plots: "+Cartagen.plots.size()+", of which "+Cartagen.requested_plots+" are still loading.") } }) }, // reduces precision of a plot request to quantize plot requests // checks against local storage for browers with HTML 5 // then fetches the plot and parses the data into the objects array get_cached_plot: function(_lat1,_lng1,_lat2,_lng2,_bleed) { plot_precision = 0.001 _lat1 = Cartagen.number_precision(_lat1,plot_precision) _lng1 = Cartagen.number_precision(_lng1,plot_precision) _lat2 = Cartagen.number_precision(_lat2,plot_precision) _lng2 = Cartagen.number_precision(_lng2,plot_precision) var cached = false // Remember that parse_objects() will fill localStorage. // We can't do it here because it's an asychronous AJAX call. // if we're not live-loading: if (!Cartagen.live) { // check if we've loaded already this session: if (Cartagen.plots.get(_lat1+","+_lng1+","+_lat2+","+_lng2) && Cartagen.plots.get(_lat1+","+_lng1+","+_lat2+","+_lng2)[0]) { // no live-loading, so: Cartagen.debug("already loaded plot") } else { // if we haven't, check if HTML 5 localStorage exists in this browser: if (typeof localStorage != "undefined") { var ls = localStorage.getItem(_lat1+","+_lng1+","+_lat2+","+_lng2) if (ls) { Cartagen.plots.set(_lat1+","+_lng1+","+_lat2+","+_lng2,[true,_bleed]) Cartagen.debug("localStorage cached plot") Cartagen.parse_objects(ls) } else { // it's not in the localStorage: Cartagen.load_plot(_lat1,_lng1,_lat2,_lng2) } } else { // not loaded this session and no localStorage, so: Cartagen.load_plot(_lat1,_lng1,_lat2,_lng2) Cartagen.plots.set(_lat1+","+_lng1+","+_lat2+","+_lng2,[true,_bleed]) } } // if the bleed level of this plot is > 0 if (_bleed > 0) { Cartagen.debug('bleeding to neighbors with bleed = '+_bleed) // bleed to 8 neighboring plots, decrement bleed: Cartagen.delayed_get_cached_plot(_lat1+plot_precision,_lng1+plot_precision,_lat2+plot_precision,_lng2+plot_precision,_bleed-1) Cartagen.delayed_get_cached_plot(_lat1-plot_precision,_lng1-plot_precision,_lat2-plot_precision,_lng2-plot_precision,_bleed-1) Cartagen.delayed_get_cached_plot(_lat1+plot_precision,_lng1,_lat2+plot_precision,_lng2,_bleed-1) Cartagen.delayed_get_cached_plot(_lat1,_lng1+plot_precision,_lat2,_lng2+plot_precision,_bleed-1) Cartagen.delayed_get_cached_plot(_lat1-plot_precision,_lng1,_lat2-plot_precision,_lng2,_bleed-1) Cartagen.delayed_get_cached_plot(_lat1,_lng1-plot_precision,_lat2,_lng2-plot_precision,_bleed-1) Cartagen.delayed_get_cached_plot(_lat1-plot_precision,_lng1+plot_precision,_lat2-plot_precision,_lng2+plot_precision,_bleed-1) Cartagen.delayed_get_cached_plot(_lat1+plot_precision,_lng1-plot_precision,_lat2+plot_precision,_lng2-plot_precision,_bleed-1) } } else { // we're live-loading! Gotta get it no matter what: load_plot(_lat1,_lng1,_lat2,_lng2) } }, // peforms get_cached_plot() with a randomized delay of between 1 and 3 seconds // this prevents a zillion requests to the server at the same time and is useful for live viewing // for viewing page_cached plots, it doesn't matter delayed_get_cached_plot: function(_lat1,_lng1,_lat2,_lng2,_bleed) { bleed_delay = 1000+(2000*Math.random(_lat1+_lng1)) //milliseconds, with a random factor to stagger requests setTimeout("get_cached_plot("+_lat1+","+_lng1+","+_lat2+","+_lng2+","+_bleed+")",bleed_delay) }, // requests a JSON plot for a bbox from the server load_plot: function(_lat1,_lng1,_lat2,_lng2) { Cartagen.requested_plots++ new Ajax.Request('/map/plot.js?lat1='+_lat1+'&lng1='+_lng1+'&lat2='+_lat2+'&lng2='+_lng2+'',{ method: 'get', onComplete: function(result) { Cartagen.parse_objects(result.responseText.evalJSON()) Cartagen.requested_plots-- if (Cartagen.requested_plots == 0) last_event = frame Cartagen.debug("Total plots: "+Cartagen.plots.size()+", of which "+Cartagen.requested_plots+" are still loading.") } }) }, // Searches all objects by tags, and sets highlight=true for all matches. highlight: function(query) { objects.each(function(object) { object.highlight = false if (query != "" && object.tags && object instanceof Way) { object.tags.each(function(tag) { if (tag.key.toLowerCase().match(query.toLowerCase()) || tag.value.toLowerCase().match(query.toLowerCase())) { object.highlight = true } }) if (object.user && object.user.toLowerCase().match(query.toLowerCase())) object.highlight = true if (object.description && object.description.toLowerCase().match(query.toLowerCase())) object.highlight = true } }) }, show_gss_editor: function() { $('description').hide() $('brief').style.width = '28%' $('brief_first').style.width = '92%'; $('gss').toggle() Cartagen.live_gss = !Cartagen.live_gss }, // sends user to an image of the current canvas redirect_to_image: function() { document.location = canvas.canvas.toDataURL(); }, debug: function(msg) { console.log(msg) } } var Map = { initialize: function() { this.x = Projection.lon_to_x((Cartagen.lng1+Cartagen.lng2)/2) this.y = Projection.lat_to_y((Cartagen.lat1+Cartagen.lat2)/2) }, pointer_x: function() { return Map.x+(((width/-2)-Mouse.x)/Cartagen.zoom_level) }, pointer_y: function() { return Map.y+(((height/-2)-Mouse.y)/Cartagen.zoom_level) }, x: 0, y: 0, rotate: 0, rotate_old: 0, x_old: 0, y_old: 0, // Res down for zoomed-out... getting a NaN for x % 0. Not that much savings yet. resolution: Math.round(Math.abs(Math.log(Cartagen.zoom_level))), refresh_resolution: function() { this.resolution = Math.round(Math.abs(Math.log(Cartagen.zoom_level))) } } var Node = Class.create({ radius: 6, tags: new Hash(), fillStyle: "#555", fontColor: "#eee", fontSize: 12, draw: function() { Cartagen.object_count++ Cartagen.point_count++ canvas.save() this.shape() canvas.restore() }, shape: function() { canvas.save() Style.apply_style(this) beginPath() translate(this.x,this.y-6) arc(0,this.radius,this.radius,0,Math.PI*2,true) fill() stroke() canvas.restore() } }) var Way = Class.create({ age: 0, highlight: false, nodes: [], label: null, closed_poly: false, tags: new Hash(), fillStyle: "#555", fontColor: "#eee", fontSize: 12, initialize: function(data) { Object.extend(this, data) this.bbox = Geometry.calculate_bounding_box(this.nodes) if (this.nodes[0].x == this.nodes[this.nodes.length-1].x && this.nodes[0].y == this.nodes[this.nodes.length-1].y) this.closed_poly = true if (this.tags.get('natural') == "coastline") this.closed_poly = true if (this.closed_poly) { var centroid = Geometry.poly_centroid(this.nodes) this.x = centroid[0]*2 this.y = centroid[1]*2 } else { // attempt to make letters follow line segments: this.x = (this.middle_segment()[0].x+this.middle_segment()[1].x)/2 this.y = (this.middle_segment()[0].y+this.middle_segment()[1].y)/2 } this.area = poly_area(this.nodes) this.label = new Label(this) Style.parse_styles(this,Style.styles.way) // geohash.set(encodeGeoHash()) objects.push(this) Geohash.put(Projection.y_to_lat(this.y),Projection.x_to_lon(this.x),this,6) Cartagen.ways.set(this.id,this) }, // returns the middle-most line segment as a tuple [node_1,node_2] middle_segment: function() { // Cartagen.debug(this.nodes[Math.floor(this.nodes.length/2)+1]) if (this.nodes.length == 1) { return this.nodes[0] } else if (this.nodes.length == 2) { return [this.nodes[0], this.nodes[1]] } else { return [this.nodes[Math.floor(this.nodes.length/2)],this.nodes[Math.floor(this.nodes.length/2)+1]] } }, middle_segment_angle: function() { var segment = this.middle_segment() if (segment[1]) { var _x = segment[0].x-segment[1].x var _y = segment[0].y-segment[1].y return (Math.tan(_y/_x)/1.7) } else return 90 }, draw: function() { Cartagen.object_count++ // only draw if in the viewport: if (intersect(viewport[0],viewport[1],viewport[2],viewport[3],this.bbox[0],this.bbox[1],this.bbox[2],this.bbox[3])) { Cartagen.way_count++ this.shape() this.age += 1; } }, shape: function() { canvas.save() Style.apply_style(this) if (this.highlight) { lineWidth(3/Cartagen.zoom_level) strokeStyle("red") } // fade in after load: if (Object.isUndefined(this.opacity)) this.opacity = 1 if (this.age < 20) { canvas.globalAlpha = this.opacity*(this.age/20) } else { canvas.globalAlpha = this.opacity } beginPath() moveTo(this.nodes[0].x,this.nodes[0].y) if (Map.resolution == 0) Map.resolution = 1 this.nodes.each(function(node,index){ if ((index % Map.resolution == 0) || index == 0 || index == this.nodes.length-1 || this.nodes.length <= 30) { Cartagen.node_count++ lineTo(node.x,node.y) } },this) // fill the polygon if the beginning and end nodes are the same. // we'll have to change this for open polys, like coastlines stroke() if (this.closed_poly) fill() // show bboxes for ways: // lineWidth(1) // strokeStyle('red') // strokeRect(this.bbox[1],this.bbox[0],this.bbox[3]-this.bbox[1],this.bbox[2]-this.bbox[0]) // draw label if we're zoomed in enough' if (Cartagen.zoom_level > 0.3) { Cartagen.queue_label(this.label, this.x, this.y) } canvas.restore() } }) var Label = Class.create({ fontFamily: 'Lucida Grande', fontSize: 11, fontBackground: null, text: null, fontScale: false, padding: 6, fontColor: '#eee', initialize: function(_way) { this.way = _way }, draw: function(_x, _y) { if (this.text) { canvas.save() //this.text = this.way.id Style.apply_font_style(this) // try to rotate the labels on unclosed ways: try { if (!this.way.closed_poly) { translate(_x,_y) rotate(this.way.middle_segment_angle()) translate(-1*_x,-1*_y) } } catch(e) {console.log(e)} if (this.fontScale == "fixed") { var _height = Object.value(this.fontSize) var _padding = Object.value(this.padding) } else { var _height = Object.value(this.fontSize)/Cartagen.zoom_level var _padding = Object.value(this.padding)/Cartagen.zoom_level } if (canvas.fillText) { // || Prototype.Browser.Gecko) { canvas.font = _height+"pt "+this.fontFamily var _width = canvas.measureText(Object.value(this.text)).width if (this.fontBackground) { fillStyle(Object.value(this.fontBackground)) rect(_x-((_width+_padding)/2),_y-((_height/2+(_padding/2))),_width+_padding,_height+_padding) } fillStyle(Object.value(this.fontColor)) canvas.fillText(Object.value(this.text),_x-(_width/2),_y+(_height/2)) } else { var _width = canvas.measureCanvasText(Object.value(this.fontFamily),_height,this.text) if (this.fontBackground) { fillStyle(Object.value(this.fontBackground)) rect(_x-((_width+_padding)/2),_y-((_height/2+(_padding/2))),_width+_padding,_height+_padding) } drawTextCenter(Object.value(this.fontFamily),_height,_x,_y+(_height/2),Object.value(this.text)) } canvas.restore() } } }) var Projection = { current_projection: 'spherical_mercator', scale_factor: 100000, set: function(new_projection) { this.current_projection = new_projection }, lon_to_x: function(lon) { return -1*Projection[Projection.current_projection].lon_to_x(lon) }, x_to_lon: function(x) { return -1*Projection[Projection.current_projection].x_to_lon(x) }, lat_to_y: function(lat) { return -1*Projection[Projection.current_projection].lat_to_y(lat) }, y_to_lat: function(y) { return -1*Projection[Projection.current_projection].y_to_lat(y) }, //required by spherical mercator: center_lon: function() { return (Cartagen.lng2+Cartagen.lng1)/2 }, spherical_mercator: { lon_to_x: function(lon) { return (lon - Projection.center_lon()) * -1 * Projection.scale_factor }, x_to_lon: function(x) { return (x/(-1*Projection.scale_factor)) + Projection.center_lon() }, lat_to_y: function(lat) { return ((180/Math.PI * (2 * Math.atan(Math.exp(lat*Math.PI/180)) - Math.PI/2))) * Projection.scale_factor * 1.7 }, y_to_lat: function(y) { return (180/Math.PI * Math.log(Math.tan(Math.PI/4+(y/(Projection.scale_factor*1.7))*(Math.PI/180)/2))) } }, elliptical_mercator: { lon_to_x: function(lon) { var r_major = 6378137.000; return r_major * lon; }, x_to_lon: function(x) { var r_major = 6378137.000; return lon/r_major; }, lat_to_y: function(lat) { if (lat > 89.5) lat = 89.5; if (lat < -89.5) lat = -89.5; var r_major = 6378137.000; var r_minor = 6356752.3142; var temp = r_minor / r_major; var es = 1.0 - (temp * temp); var eccent = Math.sqrt(es); var phi = lat; var sinphi = Math.sin(phi); var con = eccent * sinphi; var com = .5 * eccent; con = Math.pow(((1.0-con)/(1.0+con)), com); var ts = Math.tan(.5 * ((Math.PI*0.5) - phi))/con; var y = 0 - r_major * Math.log(ts); return y; }, y_to_lat: function(y) { // unknown } } } User = { color: randomColor(), name: 'anonymous', // lat & lon are based on geolocation: lat: 0, lon: 0, x: -118.31700000003664, y: -6562600.9880228145, node_submit_uri: '/node/write', node_updates_uri: '/node/read', way_submit_uri: '/way/write', way_update_uri: '/way/read', line_width: 10, node_radius: 50, follow_interval: 60, following: false, following_executer: null, drawing_way: false, set_loc: function(loc) { if (loc.coords) { User.lat = loc.coords.latitude User.lon = loc.coords.longitude } else { User.lat = loc.latitude User.lon = loc.longitude } // User.calculate_coords() Cartagen.debug('detected location: '+this.lat+","+this.lon) }, calculate_coords: function() { // this should be based on lat and lon }, create_node: function(_x, _y, _draw, id) { if (Object.isUndefined(_x)) _x = User.x if (Object.isUndefined(_y)) _y = User.y if (Object.isUndefined(id)) id = 'temp_' + (Math.random() * 999999999).floor() var node = new Node() node.x = _x node.y = _y node.radius = User.node_radius node.id = id node.lon = Projection.x_to_lon(_x) node.lat = Projection.y_to_lat(_y) node.fillStyle = User.color if (_draw) { objects.push(node) draw() } return node }, submit_node: function(_x, _y) { var node = User.create_node(_x, _y, true) var params = { color: User.color, lon: node.lon, lat: node.lat, author: User.name } new Ajax.Request(User.node_submit_uri, { method: 'post', parameters: params, onSuccess: function(transport) { node.id = 'cartagen_' + transport.responseText Cartagen.debug('saved node with id ' + node.id) } }) }, toggle_following: function() { if (User.following) { User.following_executer.stop() User.following = false } else { User.following_executer = new PeriodicalExecuter(User.center_map_on_user, User.follow_interval) User.following = true User.center_map_on_user() } }, center_map_on_user: function() { //navigator.geolocation.getCurrentPosition(User.set_loc_and_center) User.set_loc_and_center() }, set_loc_and_center: function(loc) { //User.set_loc(loc) Map.x = User.x Map.y = User.y draw() }, toggle_way_drawing: function(_x, _y) { if (User.drawing_way) { User.add_node(_x, _y) User.submit_way(User.way) } else { User.way = new Way({ id: 'temp_' + (Math.random() * 999999999).floor(), author: User.name, nodes: [User.create_node(_x,_y,true)], tags: new Hash() }) User.way.closed_poly = false User.way.strokeStyle = User.color User.way.lineWidth = User.line_width User.way.age = 40 draw() } User.drawing_way = !User.drawing_way }, submit_way: function(_way) { var params = { color: User.color, author: User.name, bbox: _way.bbox, nodes: _way.nodes.collect(function(node) { return [node.lon, node.lat] }) } Cartagen.debug(_way.nodes) Cartagen.debug(params) new Ajax.Request(User.way_submit_uri, { parameters: {way: Object.toJSON(params)}, onSuccess: function(transport) { _way.id = 'cartagen_' + transport.responseJSON.way_id _way.nodes.each(function(nd) { nd.id = 'cartagen_' + transport.responseJSON.node_ids.shift() }) } }) Cartagen.debug(_way) }, add_node: function(_x, _y) { node = User.create_node(_x, _y, true) User.way.nodes.push(node) User.way.bbox = Geometry.calculate_bounding_box(User.way.nodes) draw() } } function overlaps(x1,y1,x2,y2,fudge) { if (x2 > x1-fudge && x2 < x1+fudge) { if (y2 > y1-fudge && y2 < y1+fudge) { return true } else { return false } } else { return false } } function intersect(box1top,box1left,box1bottom,box1right,box2top,box2left,box2bottom,box2right) { return !(box2left > box1right || box2right < box1left || box2top > box1bottom || box2bottom < box1top) } //+ Jonas Raoni Soares Silva //@ http://jsfromhell.com/math/is-point-in-poly [rev. #0] function is_point_in_poly(poly, _x, _y){ for(var c = false, i = -1, l = poly.length, j = l - 1; ++i < l; j = i) ((poly[i].y <= _y && _y < poly[j].y) || (poly[j].y <= _y && _y < poly[i].y)) && (_x < (poly[j].x - poly[i].x) * (_y - poly[i].y) / (poly[j].y - poly[i].y) + poly[i].x) && (c = !c); return c; } // use poly_area(nodes,true) for signed area function poly_area(nodes) { var area = 0 nodes.each(function(node,index) { if (index < nodes.length-1) next = nodes[index+1] else next = nodes[0] if (index > 0) last = nodes[index-1] else last = nodes[nodes.length-1] area += last.x*node.y-node.x*last.y+node.x*next.y-next.x*node.y }) if (arguments[1] == true) return area/2 else return Math.abs(area/2) } var Geometry = { poly_centroid: function(polygon) { var n = polygon.length var cx = 0, cy = 0 var a = poly_area(polygon,true) var centroid = [] var i,j var factor = 0 for (i=0;i<n;i++) { j = (i + 1) % n factor = (polygon[i].x * polygon[j].y - polygon[j].x * polygon[i].y) cx += (polygon[i].x + polygon[j].x) * factor cy += (polygon[i].y + polygon[j].y) * factor } a *= 6 factor = 1/a cx *= factor cy *= factor centroid[0] = cx centroid[1] = cy return centroid }, calculate_bounding_box: function(points) { var bbox = [0,0,0,0] // top, left, bottom, right points.each(function(node) { if (node.x < bbox[1] || bbox[1] == 0) bbox[1] = node.x if (node.x > bbox[3] || bbox[3] == 0) bbox[3] = node.x if (node.y < bbox[0] || bbox[0] == 0) bbox[0] = node.y if (node.y > bbox[2] || bbox[2] == 0) bbox[2] = node.y }) return bbox } /* PolygonCenterOfMass(Point[] polygon,int N) { float cx=0,cy=0; float A=(float)SignedPolygonArea(polygon,N); Point2Df res=new Point2Df(); int i,j; float factor=0; for (i=0;i<N;i++) { j = (i + 1) % N; factor=(polygon[i].x*polygon[j].y-polygon[j].x*polygon[i].y); cx+=(polygon[i].x+polygon[j].x)*factor; cy+=(polygon[i].y+polygon[j].y)*factor; } A*=6.0f; factor=1/A; cx*=factor; cy*=factor; res.x=cx; res.y=cy; return res; } */ } // add Object.value, which returns the argument, unless the argument is a function, // in which case it calls the function and returns the result Object.value = function(obj) { if(Object.isFunction(obj)) return obj() return obj } // This duplicates a function call in glop.js... load order issues function randomColor() { return "rgb("+Math.round(Math.random()*255)+","+Math.round(Math.random()*255)+","+Math.round(Math.random()*255)+")" } // Rotates view slowly for cool demo purposes. function demo() { try { Map.rotate += 0.005 } catch(e) {}}
start of way and node reading
cartagen/public/cartagen/cartagen.js
start of way and node reading
<ide><path>artagen/public/cartagen/cartagen.js <ide> resolution: Math.round(Math.abs(Math.log(Cartagen.zoom_level))), <ide> refresh_resolution: function() { <ide> this.resolution = Math.round(Math.abs(Math.log(Cartagen.zoom_level))) <add> }, <add> // [lon1, lat2, lon2, lat1] <add> get_bbox: function() { <add> var lon1 = Projection.x_to_lon(Map.x - (width/2)) <add> var lon2 = Projection.x_to_lon(Map.x + (width/2)) <add> var lat1 = Projection.y_to_lat(Map.y - (height/2)) <add> var lat2 = Projection.y_to_lat(Map.y + (height/2)) <add> return [lon1, lat2, lon2, lat1] <ide> } <ide> } <ide> <ide> User.way.nodes.push(node) <ide> User.way.bbox = Geometry.calculate_bounding_box(User.way.nodes) <ide> draw() <del> } <add> }, <add> update: function() { <add> if (User.last_pos && User.last_pos == lastPos) { <add> var timestamp = User.last_update <add> } <add> User.last_pos = lastPos <add> User.last_update = (new Date()).toUTCString() <ide> <add> <add> new Ajax.Request(User.node_update_uri, { <add> }) <add> } <ide> } <ide> <ide> function overlaps(x1,y1,x2,y2,fudge) {
Java
epl-1.0
d6f530eb0bb4ce88c9d9a47fa8fdaf41d661a79d
0
Techjar/ForgeEssentials,liachmodded/ForgeEssentials,CityOfLearning/ForgeEssentials,planetguy32/ForgeEssentials,ForgeEssentials/ForgeEssentialsMain
package com.forgeessentials.scripting; import java.lang.management.ManagementFactory; import java.lang.management.RuntimeMXBean; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.text.DecimalFormat; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import net.minecraft.command.ICommandSender; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.server.MinecraftServer; import net.minecraft.util.EnumChatFormatting; import com.forgeessentials.api.APIRegistry; import com.forgeessentials.api.UserIdent; import com.forgeessentials.chat.ChatConfig; import com.forgeessentials.commons.selections.WorldPoint; import com.forgeessentials.core.FEConfig; import com.forgeessentials.scripting.ScriptParser.MissingPlayerException; import com.forgeessentials.scripting.ScriptParser.ScriptArgument; import com.forgeessentials.scripting.ScriptParser.ScriptException; import com.forgeessentials.scripting.ScriptParser.SyntaxException; import com.forgeessentials.util.PlayerInfo; import com.forgeessentials.util.ServerUtil; import com.forgeessentials.util.output.ChatOutputHandler; import com.google.common.collect.ImmutableMap; public final class ScriptArguments { private static Map<String, ScriptArgument> scriptArguments = new HashMap<>(); public static void add(String name, ScriptArgument argument) { if (scriptArguments.containsKey(name)) throw new RuntimeException(String.format("Script argument name @%s already registered", name)); scriptArguments.put(name, argument); } public static ScriptArgument get(String name) { return scriptArguments.get(name); } public static Map<String, ScriptArgument> getAll() { return ImmutableMap.copyOf(scriptArguments); } public static final Pattern ARGUMENT_PATTERN = Pattern.compile("@\\{?(\\w+)\\}?"); public static String process(String text, ICommandSender sender) throws ScriptException { return process(text, sender, null); } public static String process(String text, ICommandSender sender, List<?> args) throws ScriptException { Matcher m = ARGUMENT_PATTERN.matcher(text); StringBuffer sb = new StringBuffer(); while (m.find()) { String modifier = m.group(1).toLowerCase(); ScriptArgument argument = get(modifier); if (argument != null) m.appendReplacement(sb, argument.process(sender)); else if (args == null) m.appendReplacement(sb, m.group()); else try { int idx = Integer.parseInt(modifier); if (args == null || idx < 1 || idx > args.size()) throw new SyntaxException("Missing argument @%d", idx); m.appendReplacement(sb, args.get(idx - 1).toString()); } catch (NumberFormatException e) { m.appendReplacement(sb, m.group()); } } m.appendTail(sb); return sb.toString(); } public static String processSafe(String text, ICommandSender sender) { return processSafe(text, sender, null); } public static String processSafe(String text, ICommandSender sender, List<?> args) { Matcher m = ARGUMENT_PATTERN.matcher(text); StringBuffer sb = new StringBuffer(); while (m.find()) { String modifier = m.group(1).toLowerCase(); ScriptArgument argument = get(modifier); if (argument != null) { try { m.appendReplacement(sb, argument.process(sender)); } catch (ScriptException e) { m.appendReplacement(sb, m.group()); } } else if (args == null) m.appendReplacement(sb, m.group()); else try { int idx = Integer.parseInt(modifier); if (args == null || idx >= args.size()) throw new SyntaxException("Missing argument @%d", idx); m.appendReplacement(sb, args.get(idx).toString()); } catch (NumberFormatException e) { m.appendReplacement(sb, m.group()); } } m.appendTail(sb); return sb.toString(); } private static void registerAll() { try { for (Field field : ScriptArguments.class.getDeclaredFields()) if (ScriptArgument.class.isAssignableFrom(field.getType()) && Modifier.isStatic(field.getModifiers())) add(field.getName().toLowerCase(), (ScriptArgument) field.get(null)); } catch (IllegalArgumentException | IllegalAccessException e) { throw new RuntimeException(e); } } public static ScriptArgument sender = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (sender == null) throw new MissingPlayerException(); return sender.getCommandSenderName(); } @Override public String getHelp() { return "Command sender name"; } }; public static ScriptArgument player = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (sender == null) throw new MissingPlayerException(); return sender.getCommandSenderName(); } @Override public String getHelp() { return "Player name"; } }; public static ScriptArgument uuid = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); return ((EntityPlayerMP) sender).getPersistentID().toString(); } @Override public String getHelp() { return "Player UUID"; } }; public static ScriptArgument x = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); return Integer.toString((int) ((EntityPlayerMP) sender).posX); } @Override public String getHelp() { return "Player X position (as integer)"; } }; public static ScriptArgument y = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); return Integer.toString((int) ((EntityPlayerMP) sender).posY); } @Override public String getHelp() { return "Player Y position (as integer)"; } }; public static ScriptArgument z = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); return Integer.toString((int) ((EntityPlayerMP) sender).posZ); } @Override public String getHelp() { return "Player Z position (as integer)"; } }; public static ScriptArgument xd = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); return Double.toString(((EntityPlayerMP) sender).posX); } @Override public String getHelp() { return "Player X position (as floating point number)"; } }; public static ScriptArgument yd = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); return Double.toString(((EntityPlayerMP) sender).posY); } @Override public String getHelp() { return "Player Y position (as floating point number)"; } }; public static ScriptArgument zd = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); return Double.toString(((EntityPlayerMP) sender).posZ); } @Override public String getHelp() { return "Player Z position (as floating point number)"; } }; public static ScriptArgument dim = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); return Integer.toString(((EntityPlayerMP) sender).dimension); } @Override public String getHelp() { return "Player dimension"; } }; public static ScriptArgument gm = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); if (((EntityPlayerMP) sender).theItemInWorldManager.getGameType().isCreative()) return ChatConfig.gamemodeCreative; if (((EntityPlayerMP) sender).theItemInWorldManager.getGameType().isAdventure()) return ChatConfig.gamemodeAdventure; return ChatConfig.gamemodeSurvival; } @Override public String getHelp() { return "Player gamemode"; } }; public static ScriptArgument health = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); return Float.toString(((EntityPlayerMP) sender).getHealth()); } @Override public String getHelp() { return "Player health"; } }; public static ScriptArgument healthcolor = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); float health = ((EntityPlayerMP) sender).getHealth(); if (health <= 6) return EnumChatFormatting.RED.toString(); if (health < 16) return EnumChatFormatting.YELLOW.toString(); return EnumChatFormatting.GREEN.toString(); } @Override public String getHelp() { return "Insert color code based on player health"; } }; public static ScriptArgument hunger = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); return Integer.toString(((EntityPlayerMP) sender).getFoodStats().getFoodLevel()); } @Override public String getHelp() { return "Player hunger level"; } }; public static ScriptArgument hungercolor = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); float hunger = ((EntityPlayerMP) sender).getFoodStats().getFoodLevel(); if (hunger <= 6) return EnumChatFormatting.RED.toString(); if (hunger < 12) return EnumChatFormatting.YELLOW.toString(); return EnumChatFormatting.GREEN.toString(); } @Override public String getHelp() { return "Insert color code based on player hunger level"; } }; public static ScriptArgument saturation = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); return Float.toString(((EntityPlayerMP) sender).getFoodStats().getSaturationLevel()); } @Override public String getHelp() { return "Player (food) saturation level"; } }; public static ScriptArgument saturationcolor = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); float hunger = ((EntityPlayerMP) sender).getFoodStats().getSaturationLevel(); if (hunger <= 0) return EnumChatFormatting.RED.toString(); if (hunger <= 1.5) return EnumChatFormatting.YELLOW.toString(); return EnumChatFormatting.GREEN.toString(); } @Override public String getHelp() { return "Insert color code based on player saturation level"; } }; public static ScriptArgument zone = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); return APIRegistry.perms.getServerZone().getZoneAt(new WorldPoint(((EntityPlayerMP) sender))).getName(); } @Override public String getHelp() { return "Get name of the zone the player is in"; } }; public static ScriptArgument zoneId = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); return Integer.toString(APIRegistry.perms.getServerZone().getZoneAt(new WorldPoint(((EntityPlayerMP) sender))).getId()); } @Override public String getHelp() { return "Get ID of the zone the player is in"; } }; public static ScriptArgument group = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); EntityPlayerMP _player = ((EntityPlayerMP) sender); return APIRegistry.perms.getServerZone().getPlayerGroups(UserIdent.get(_player)).first().getGroup(); } @Override public String getHelp() { return "Get name of the zone the player is in"; } }; public static ScriptArgument timePlayed = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); EntityPlayerMP _player = ((EntityPlayerMP) sender); return ChatOutputHandler.formatTimeDurationReadable(PlayerInfo.get(_player).getTimePlayed() / 1000, true); } @Override public String getHelp() { return "Get total time a player played on the server" + ""; } }; public static ScriptArgument lastLogout = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); EntityPlayerMP _player = ((EntityPlayerMP) sender); return FEConfig.FORMAT_DATE_TIME.format(PlayerInfo.get(_player).getLastLogout()); } @Override public String getHelp() { return "Get the time a player logged out last time"; } }; public static ScriptArgument lastLogin = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); EntityPlayerMP _player = ((EntityPlayerMP) sender); return FEConfig.FORMAT_DATE_TIME.format(PlayerInfo.get(_player).getLastLogin()); } @Override public String getHelp() { return "Get the time a player logged in last time"; } }; public static ScriptArgument sinceLastLogout = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); EntityPlayerMP _player = ((EntityPlayerMP) sender); return ChatOutputHandler.formatTimeDurationReadable((new Date().getTime() - PlayerInfo.get(_player).getLastLogout().getTime()) / 1000, true); } @Override public String getHelp() { return "Get the time since a player logged out last time"; } }; public static ScriptArgument sinceLastLogin = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); EntityPlayerMP _player = ((EntityPlayerMP) sender); return ChatOutputHandler.formatTimeDurationReadable((new Date().getTime() - PlayerInfo.get(_player).getLastLogin().getTime()) / 1000, true); } @Override public String getHelp() { return "Get the time since a player logged in last time"; } }; public static ScriptArgument tps = new ScriptArgument() { @Override public String process(ICommandSender sender) { return new DecimalFormat("#").format(Math.min(20, ServerUtil.getTPS())); } @Override public String getHelp() { return "Ticks per second"; } }; public static ScriptArgument realTime = new ScriptArgument() { @Override public String process(ICommandSender sender) { return FEConfig.FORMAT_TIME.format(new Date()); } @Override public String getHelp() { return "Current real time"; } }; public static ScriptArgument realDate = new ScriptArgument() { @Override public String process(ICommandSender sender) { return FEConfig.FORMAT_DATE.format(new Date()); } @Override public String getHelp() { return "Current real date"; } }; public static ScriptArgument worldTime = new ScriptArgument() { @Override public String process(ICommandSender sender) { return new DecimalFormat("#").format(MinecraftServer.getServer().getEntityWorld().getWorldTime()); } @Override public String getHelp() { return "Current MC world time"; } }; public static ScriptArgument totalWorldTime = new ScriptArgument() { @Override public String process(ICommandSender sender) { return new DecimalFormat("#").format(MinecraftServer.getServer().getEntityWorld().getTotalWorldTime()); } @Override public String getHelp() { return "MC time passed since map creation"; } }; public static ScriptArgument serverUptime = new ScriptArgument() { @Override public String process(ICommandSender sender) { RuntimeMXBean rb = ManagementFactory.getRuntimeMXBean(); return ChatOutputHandler.formatTimeDurationReadable(rb.getUptime() / 1000, true); } @Override public String getHelp() { return "Time since server start"; } }; public static ScriptArgument onlinePlayers = new ScriptArgument() { @Override public String process(ICommandSender sender) { int online = 0; try { online = MinecraftServer.getServer().getCurrentPlayerCount(); } catch (Exception e) { /* do nothing */ } return Integer.toString(online); } @Override public String getHelp() { return "Number of players that are online right now"; } }; public static ScriptArgument uniquePlayers = new ScriptArgument() { @Override public String process(ICommandSender sender) { return Integer.toString(APIRegistry.perms.getServerZone().getKnownPlayers().size()); } @Override public String getHelp() { return "Number of unique players on the server at all time"; } }; static { registerAll(); add("p", player); } }
src/main/java/com/forgeessentials/scripting/ScriptArguments.java
package com.forgeessentials.scripting; import java.lang.management.ManagementFactory; import java.lang.management.RuntimeMXBean; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.text.DecimalFormat; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import net.minecraft.command.ICommandSender; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.server.MinecraftServer; import net.minecraft.util.EnumChatFormatting; import com.forgeessentials.api.APIRegistry; import com.forgeessentials.api.UserIdent; import com.forgeessentials.chat.ChatConfig; import com.forgeessentials.commons.selections.WorldPoint; import com.forgeessentials.core.FEConfig; import com.forgeessentials.scripting.ScriptParser.MissingPlayerException; import com.forgeessentials.scripting.ScriptParser.ScriptArgument; import com.forgeessentials.scripting.ScriptParser.ScriptException; import com.forgeessentials.scripting.ScriptParser.SyntaxException; import com.forgeessentials.util.output.ChatOutputHandler; import com.forgeessentials.util.PlayerInfo; import com.forgeessentials.util.ServerUtil; import com.google.common.collect.ImmutableMap; public final class ScriptArguments { private static Map<String, ScriptArgument> scriptArguments = new HashMap<>(); public static void add(String name, ScriptArgument argument) { if (scriptArguments.containsKey(name)) throw new RuntimeException(String.format("Script argument name @%s already registered", name)); scriptArguments.put(name, argument); } public static ScriptArgument get(String name) { return scriptArguments.get(name); } public static Map<String, ScriptArgument> getAll() { return ImmutableMap.copyOf(scriptArguments); } public static final Pattern ARGUMENT_PATTERN = Pattern.compile("@\\{?(\\w+)\\}?"); public static String process(String text, ICommandSender sender) throws ScriptException { return process(text, sender, null); } public static String process(String text, ICommandSender sender, List<?> args) throws ScriptException { Matcher m = ARGUMENT_PATTERN.matcher(text); StringBuffer sb = new StringBuffer(); while (m.find()) { String modifier = m.group(1).toLowerCase(); ScriptArgument argument = get(modifier); if (argument != null) m.appendReplacement(sb, argument.process(sender)); else if (args == null) m.appendReplacement(sb, m.group()); else try { int idx = Integer.parseInt(modifier); if (args == null || idx >= args.size()) throw new SyntaxException("Missing argument @%d", idx); m.appendReplacement(sb, args.get(idx).toString()); } catch (NumberFormatException e) { m.appendReplacement(sb, m.group()); } } m.appendTail(sb); return sb.toString(); } public static String processSafe(String text, ICommandSender sender) { return processSafe(text, sender, null); } public static String processSafe(String text, ICommandSender sender, List<?> args) { Matcher m = ARGUMENT_PATTERN.matcher(text); StringBuffer sb = new StringBuffer(); while (m.find()) { String modifier = m.group(1).toLowerCase(); ScriptArgument argument = get(modifier); if (argument != null) { try { m.appendReplacement(sb, argument.process(sender)); } catch (ScriptException e) { m.appendReplacement(sb, m.group()); } } else if (args == null) m.appendReplacement(sb, m.group()); else try { int idx = Integer.parseInt(modifier); if (args == null || idx >= args.size()) throw new SyntaxException("Missing argument @%d", idx); m.appendReplacement(sb, args.get(idx).toString()); } catch (NumberFormatException e) { m.appendReplacement(sb, m.group()); } } m.appendTail(sb); return sb.toString(); } private static void registerAll() { try { for (Field field : ScriptArguments.class.getDeclaredFields()) if (ScriptArgument.class.isAssignableFrom(field.getType()) && Modifier.isStatic(field.getModifiers())) add(field.getName().toLowerCase(), (ScriptArgument) field.get(null)); } catch (IllegalArgumentException | IllegalAccessException e) { throw new RuntimeException(e); } } public static ScriptArgument sender = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (sender == null) throw new MissingPlayerException(); return sender.getCommandSenderName(); } @Override public String getHelp() { return "Command sender name"; } }; public static ScriptArgument player = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (sender == null) throw new MissingPlayerException(); return sender.getCommandSenderName(); } @Override public String getHelp() { return "Player name"; } }; public static ScriptArgument uuid = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); return ((EntityPlayerMP) sender).getPersistentID().toString(); } @Override public String getHelp() { return "Player UUID"; } }; public static ScriptArgument x = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); return Integer.toString((int) ((EntityPlayerMP) sender).posX); } @Override public String getHelp() { return "Player X position (as integer)"; } }; public static ScriptArgument y = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); return Integer.toString((int) ((EntityPlayerMP) sender).posY); } @Override public String getHelp() { return "Player Y position (as integer)"; } }; public static ScriptArgument z = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); return Integer.toString((int) ((EntityPlayerMP) sender).posZ); } @Override public String getHelp() { return "Player Z position (as integer)"; } }; public static ScriptArgument xd = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); return Double.toString(((EntityPlayerMP) sender).posX); } @Override public String getHelp() { return "Player X position (as floating point number)"; } }; public static ScriptArgument yd = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); return Double.toString(((EntityPlayerMP) sender).posY); } @Override public String getHelp() { return "Player Y position (as floating point number)"; } }; public static ScriptArgument zd = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); return Double.toString(((EntityPlayerMP) sender).posZ); } @Override public String getHelp() { return "Player Z position (as floating point number)"; } }; public static ScriptArgument dim = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); return Integer.toString(((EntityPlayerMP) sender).dimension); } @Override public String getHelp() { return "Player dimension"; } }; public static ScriptArgument gm = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); if (((EntityPlayerMP) sender).theItemInWorldManager.getGameType().isCreative()) return ChatConfig.gamemodeCreative; if (((EntityPlayerMP) sender).theItemInWorldManager.getGameType().isAdventure()) return ChatConfig.gamemodeAdventure; return ChatConfig.gamemodeSurvival; } @Override public String getHelp() { return "Player gamemode"; } }; public static ScriptArgument health = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); return Float.toString(((EntityPlayerMP) sender).getHealth()); } @Override public String getHelp() { return "Player health"; } }; public static ScriptArgument healthcolor = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); float health = ((EntityPlayerMP) sender).getHealth(); if (health <= 6) return EnumChatFormatting.RED.toString(); if (health < 16) return EnumChatFormatting.YELLOW.toString(); return EnumChatFormatting.GREEN.toString(); } @Override public String getHelp() { return "Insert color code based on player health"; } }; public static ScriptArgument hunger = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); return Integer.toString(((EntityPlayerMP) sender).getFoodStats().getFoodLevel()); } @Override public String getHelp() { return "Player hunger level"; } }; public static ScriptArgument hungercolor = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); float hunger = ((EntityPlayerMP) sender).getFoodStats().getFoodLevel(); if (hunger <= 6) return EnumChatFormatting.RED.toString(); if (hunger < 12) return EnumChatFormatting.YELLOW.toString(); return EnumChatFormatting.GREEN.toString(); } @Override public String getHelp() { return "Insert color code based on player hunger level"; } }; public static ScriptArgument saturation = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); return Float.toString(((EntityPlayerMP) sender).getFoodStats().getSaturationLevel()); } @Override public String getHelp() { return "Player (food) saturation level"; } }; public static ScriptArgument saturationcolor = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); float hunger = ((EntityPlayerMP) sender).getFoodStats().getSaturationLevel(); if (hunger <= 0) return EnumChatFormatting.RED.toString(); if (hunger <= 1.5) return EnumChatFormatting.YELLOW.toString(); return EnumChatFormatting.GREEN.toString(); } @Override public String getHelp() { return "Insert color code based on player saturation level"; } }; public static ScriptArgument zone = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); return APIRegistry.perms.getServerZone().getZoneAt(new WorldPoint(((EntityPlayerMP) sender))).getName(); } @Override public String getHelp() { return "Get name of the zone the player is in"; } }; public static ScriptArgument zoneId = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); return Integer.toString(APIRegistry.perms.getServerZone().getZoneAt(new WorldPoint(((EntityPlayerMP) sender))).getId()); } @Override public String getHelp() { return "Get ID of the zone the player is in"; } }; public static ScriptArgument group = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); EntityPlayerMP _player = ((EntityPlayerMP) sender); return APIRegistry.perms.getServerZone().getPlayerGroups(UserIdent.get(_player)).first().getGroup(); } @Override public String getHelp() { return "Get name of the zone the player is in"; } }; public static ScriptArgument timePlayed = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); EntityPlayerMP _player = ((EntityPlayerMP) sender); return ChatOutputHandler.formatTimeDurationReadable(PlayerInfo.get(_player).getTimePlayed() / 1000, true); } @Override public String getHelp() { return "Get total time a player played on the server" + ""; } }; public static ScriptArgument lastLogout = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); EntityPlayerMP _player = ((EntityPlayerMP) sender); return FEConfig.FORMAT_DATE_TIME.format(PlayerInfo.get(_player).getLastLogout()); } @Override public String getHelp() { return "Get the time a player logged out last time"; } }; public static ScriptArgument lastLogin = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); EntityPlayerMP _player = ((EntityPlayerMP) sender); return FEConfig.FORMAT_DATE_TIME.format(PlayerInfo.get(_player).getLastLogin()); } @Override public String getHelp() { return "Get the time a player logged in last time"; } }; public static ScriptArgument sinceLastLogout = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); EntityPlayerMP _player = ((EntityPlayerMP) sender); return ChatOutputHandler.formatTimeDurationReadable((new Date().getTime() - PlayerInfo.get(_player).getLastLogout().getTime()) / 1000, true); } @Override public String getHelp() { return "Get the time since a player logged out last time"; } }; public static ScriptArgument sinceLastLogin = new ScriptArgument() { @Override public String process(ICommandSender sender) { if (!(sender instanceof EntityPlayerMP)) throw new MissingPlayerException(); EntityPlayerMP _player = ((EntityPlayerMP) sender); return ChatOutputHandler.formatTimeDurationReadable((new Date().getTime() - PlayerInfo.get(_player).getLastLogin().getTime()) / 1000, true); } @Override public String getHelp() { return "Get the time since a player logged in last time"; } }; public static ScriptArgument tps = new ScriptArgument() { @Override public String process(ICommandSender sender) { return new DecimalFormat("#").format(Math.min(20, ServerUtil.getTPS())); } @Override public String getHelp() { return "Ticks per second"; } }; public static ScriptArgument realTime = new ScriptArgument() { @Override public String process(ICommandSender sender) { return FEConfig.FORMAT_TIME.format(new Date()); } @Override public String getHelp() { return "Current real time"; } }; public static ScriptArgument realDate = new ScriptArgument() { @Override public String process(ICommandSender sender) { return FEConfig.FORMAT_DATE.format(new Date()); } @Override public String getHelp() { return "Current real date"; } }; public static ScriptArgument worldTime = new ScriptArgument() { @Override public String process(ICommandSender sender) { return new DecimalFormat("#").format(MinecraftServer.getServer().getEntityWorld().getWorldTime()); } @Override public String getHelp() { return "Current MC world time"; } }; public static ScriptArgument totalWorldTime = new ScriptArgument() { @Override public String process(ICommandSender sender) { return new DecimalFormat("#").format(MinecraftServer.getServer().getEntityWorld().getTotalWorldTime()); } @Override public String getHelp() { return "MC time passed since map creation"; } }; public static ScriptArgument serverUptime = new ScriptArgument() { @Override public String process(ICommandSender sender) { RuntimeMXBean rb = ManagementFactory.getRuntimeMXBean(); return ChatOutputHandler.formatTimeDurationReadable(rb.getUptime() / 1000, true); } @Override public String getHelp() { return "Time since server start"; } }; public static ScriptArgument onlinePlayers = new ScriptArgument() { @Override public String process(ICommandSender sender) { int online = 0; try { online = MinecraftServer.getServer().getCurrentPlayerCount(); } catch (Exception e) { } return Integer.toString(online); } @Override public String getHelp() { return "Number of players that are online right now"; } }; public static ScriptArgument uniquePlayers = new ScriptArgument() { @Override public String process(ICommandSender sender) { return Integer.toString(APIRegistry.perms.getServerZone().getKnownPlayers().size()); } @Override public String getHelp() { return "Number of unique players on the server at all time"; } }; static { registerAll(); add("p", player); } }
Fixed script arguments to start numbering from 1 instead of 0
src/main/java/com/forgeessentials/scripting/ScriptArguments.java
Fixed script arguments to start numbering from 1 instead of 0
<ide><path>rc/main/java/com/forgeessentials/scripting/ScriptArguments.java <ide> import com.forgeessentials.scripting.ScriptParser.ScriptArgument; <ide> import com.forgeessentials.scripting.ScriptParser.ScriptException; <ide> import com.forgeessentials.scripting.ScriptParser.SyntaxException; <del>import com.forgeessentials.util.output.ChatOutputHandler; <ide> import com.forgeessentials.util.PlayerInfo; <ide> import com.forgeessentials.util.ServerUtil; <add>import com.forgeessentials.util.output.ChatOutputHandler; <ide> import com.google.common.collect.ImmutableMap; <ide> <ide> public final class ScriptArguments <ide> try <ide> { <ide> int idx = Integer.parseInt(modifier); <del> if (args == null || idx >= args.size()) <add> if (args == null || idx < 1 || idx > args.size()) <ide> throw new SyntaxException("Missing argument @%d", idx); <del> m.appendReplacement(sb, args.get(idx).toString()); <add> m.appendReplacement(sb, args.get(idx - 1).toString()); <ide> } <ide> catch (NumberFormatException e) <ide> { <ide> } <ide> catch (Exception e) <ide> { <add> /* do nothing */ <ide> } <ide> return Integer.toString(online); <ide> }
JavaScript
apache-2.0
3acca94cacc7128db1cbd32ce96714f1cc072543
0
google/tern-closure,angelozerr/tern-closure
'use strict'; var infer = require('tern/lib/infer'); var tern = require('tern/lib/tern'); var comment = require('tern/lib/comment'); var walk = require('acorn/util/walk'); var doctrine = require('doctrine'); var Weight = { TEMP_OBJ: 40, TEMP_CTOR: 50 } tern.registerPlugin('closure', function() { var defs = { '!name': 'closure', goog: { provide: 'fn(name: string) -> !custom:closureProvide', require: 'fn(name: string) -> !custom:closureRequire' }, }; return { passes: { 'postParse': postParse, 'postInfer': postInfer }, defs: defs }; }); infer.registerFunction('closureProvide', function(_self, args, argNodes) { if (!argNodes || !argNodes.length || argNodes[0].type != "Literal" || typeof argNodes[0].value != "string") return infer.ANull; defineQualifiedName(argNodes[0].value); return infer.ANull; }); infer.registerFunction('closureRequire', function(_self, args, argNodes) { if (!argNodes || !argNodes.length || argNodes[0].type != "Literal" || typeof argNodes[0].value != "string") return infer.ANull; defineQualifiedName(argNodes[0].value); return infer.ANull; }); /** * Walks the syntax tree after the Acorn parsing pass, parsing JSDoc comments * and attaching them to their corresponding nodes. * @param {!acorn.Node} ast * @param {string} text The file text. */ function postParse(ast, text) { function attachComments(node) { // TODO: Do our own comment-finding, handling casts. var comments = comment.commentsBefore(text, node.start); if (comments) { node._closureComment = doctrine.parse( '/*' + comments[comments.length - 1] + '*/', {unwrap: true}); } } // TODO: Handle property declarations with no initialization, e.g. // /** @type {BlahType} */ // Class.prototype.blah; walk.simple(ast, { VariableDeclaration: attachComments, FunctionDeclaration: attachComments, AssignmentExpression: function(node) { if (node.operator == '=') { attachComments(node); } }, ObjectExpression: function(node) { for (var i = 0; i < node.properties.length; ++i) { attachComments(node.properties[i].key); } } }); } /** * Applies type information from JSDoc comments to the initialized values after * Tern's type inference pass. * @param {!acorn.Node} ast * @param {!infer.Scope} scope */ function postInfer(ast, scope) { walk.simple(ast, { VariableDeclaration: function(node, scope) { interpretComments(node, node._closureComment, scope.getProp(node.declarations[0].id.name)); }, FunctionDeclaration: function(node, scope) { interpretComments( node, node._closureComment, scope.getProp(node.id.name)); }, AssignmentExpression: function(node, scope) { interpretComments(node, node._closureComment, infer.expressionType({node: node.left, state: scope})); }, ObjectExpression: function(node, scope) { for (var i = 0; i < node.properties.length; ++i) { var prop = node.properties[i], key = prop.key; interpretComments( prop, key._closureComment, node.objType.getProp(key.name)); } } }, infer.searchVisitor, scope); } /** * Interpret the comments before an expression and apply type information from * the comments. * @param {!acorn.Node} node An Acorn AST node. * @param {{description: string, tags: Array}} comment The Doctrine-parsed * comment before the node if present. * @param {!infer.AVal} aval An abtract type value to which type information * should be applied. */ function interpretComments(node, comment, aval) { if (!comment) { return; } var argTypes, returnType, valueType; var argDocs, returnDoc, valueDoc; for (var i = 0; i < comment.tags.length; i++) { var tag = comment.tags[i]; var type; if (tag.type) { type = getExpressionAval(tag.type); } // TODO: Handle many other tags (inherits, implements, typedef, override...) switch (tag.title) { case 'type': case 'private': case 'protected': case 'public': // TODO: Add completion filtering based on access restrictions? valueType = type; valueDoc = tag.description; break; case 'return': case 'returns': returnType = type; returnDoc = tag.description; break; case 'param': case 'arg': case 'argument': (argTypes || (argTypes = Object.create(null)))[tag.name] = type; (argDocs || (argDocs = Object.create(null)))[tag.name] = tag.description; break; } } var fnType = getFnType(node); if (fnType) { // This comment applies to a function, and we have information to apply // to that function type. applyFnTypeInfo(fnType, argTypes, argDocs, returnType, returnDoc); if (comment.description) { fnType.doc = comment.description; } } else if (valueType) { // This comment applies to a variable or property. valueType.propagate(aval); setDoc(aval, comment.description || valueDoc); } } /** * Applies the given argument and return type information to the given function * type. * @param {!infer.Fn} fnType The function type to propagate to. * @param {Object.<infer.AVal>} argTypes A map of argument names to parsed * types. * @param {Object.<string>} argDocs Doc comments for the arguments. * @param {infer.AVal} returnType The parsed return type. * @param {string} returnDoc Doc comments for the return value. */ function applyFnTypeInfo(fnType, argTypes, argDocs, returnType, returnDoc) { if (argTypes) { for (var i = 0; i < fnType.argNames.length; i++) { var name = fnType.argNames[i]; var docArgType = argTypes[name]; // Propagate the documented type info to the inferred argument type. if (docArgType) { docArgType.propagate(fnType.args[i]); setDoc(fnType.args[i], argDocs[name]); } } } // Propagate any return type info. if (returnType) { returnType.propagate(fnType.retval); setDoc(fnType.retval, returnDoc); } } /** * Recursively process a JSDoc type expression to assemble a corresponding AVal. * @param {{type: string}} typeExpr A Doctrine parsed type expression. * @param {infer.AVal=} innerType The inner type, for type applications. * @return {infer.AVal} An abstract value for the type expression. */ function getExpressionAval(typeExpr, innerType) { var t; switch (typeExpr.type) { case doctrine.Syntax.NameExpression: return getQualifiedType(typeExpr.name, innerType); case doctrine.Syntax.NullableType: case doctrine.Syntax.NonNullableType: // TODO: Expose nullability information. return getExpressionAval(typeExpr.expression, innerType); case doctrine.Syntax.OptionalType: // TODO: Expose optional param information (orthogonal to nullability). return getExpressionAval(typeExpr.expression, innerType); case doctrine.Syntax.UnionType: // TODO: Decide if this behaves better with a custom synthetic 'Union' // type. var aval = new infer.AVal(); typeExpr.elements.forEach(function(subExpr) { if (t = getExpressionAval(subExpr, innerType)) { aval.addType(t); } }); return aval; case doctrine.Syntax.RestType: // TODO: Expose varargs. return getExpressionAval(typeExpr.expression, innerType); case doctrine.Syntax.RecordType: case doctrine.Syntax.FieldType: // TODO: Handle records. return null; case doctrine.Syntax.FunctionType: // TODO: Handle functions. return null; case doctrine.Syntax.TypeApplication: // TODO: Handle more exotic type applications? // We support type applications for array and object values. In those // cases, only the last applied type (the value type) is meaningful. return getExpressionAval(typeExpr.expression, getExpressionAval( typeExpr.applications[typeExpr.applications.length - 1], innerType)); case doctrine.Syntax.NullLiteral: case doctrine.Syntax.UndefinedLiteral: return infer.ANull; case doctrine.Syntax.NullableLiteral: case doctrine.Syntax.AllLiteral: case doctrine.Syntax.VoidLiteral: // No type to apply. return null; default: console.log('Unknown expression type: ' + typeExpr.type); return null; } } /** * Gets an AVal for a type, given its qualified name. Creates stand-in * AVals for types and namespaces that have not been defined yet. * @param {string} name The type name. * @param {infer.AVal=} innerType In the case of a type application, the * applied type. * @return {!infer.AVal} */ function getQualifiedType(name, innerType) { // Handle primitives. if (/^(number|integer)$/i.test(name)) { return infer.cx().num; } if (/^bool(ean)?$/i.test(name)) { return infer.cx().bool; } if (/^string$/i.test(name)) { return infer.cx().str; } if (/^array$/i.test(name)) { return new infer.Arr(innerType); } if (/^object$/i.test(name)) { var objType = new infer.Obj(true /* null proto */); if (innerType) { innerType.propagate(objType.defProp("<i>")); } return objType; } var ctorType = defineQualifiedName(name); if (!(ctorType.getType() instanceof infer.Fn)) { // Create a fake constructor function to stand in for the real one. var fakeFnType = new infer.Fn(name, infer.ANull /* self */, [] /* args */, [] /* argNames */, infer.ANull /* retVal */); // Propagate it with reduced weight so it will be overriden if the real // constructor function loads. fakeFnType.propagate(ctorType, Weight.TEMP_CTOR); } var type = new infer.AVal(); // Say that the final property type is the constructor of the commented value. ctorType.propagate(new infer.IsCtor(type)); return type; } /** * Defines the given fully-qualified name as a property on the global scope. If * any part of the name is already defined, uses the existing value. * @param {string} name * @return {AVal} The abstract value for the name. */ function defineQualifiedName(name) { // TODO: Deal with goog.scope. // TODO: Put in temp origin nodes? var parts = name.split('.'); var base = infer.cx().topScope; for (var i = 0; i < parts.length; i++) { var prop = base.defProp(parts[i]); if (prop.getType()) { base = prop.getType(); } else { base = new infer.Obj(true, parts.slice(0, i + 1).join('.')); base.propagate(prop, Weight.TEMP_OBJ); } } return prop; } /** * If the given node is associated with a function, gets the type value for the * function. * @param {!acorn.Node} node * @return {infer.Fn} */ function getFnType(node) { if (node.type == "VariableDeclaration") { var decl = node.declarations[0]; if (decl.init && decl.init.type == "FunctionExpression") { return decl.init.body.scope.fnType; } } else if (node.type == "FunctionDeclaration") { return node.body.scope.fnType; } else if (node.type == "AssignmentExpression" && node.right.type == "FunctionExpression") { return node.right.body.scope.fnType; } else if (node.value && node.value.type == "FunctionExpression") { // Object property. return node.value.body.scope.fnType; } return null; } /** * Sets the doc property for a type, but only if it is not a type literal (a doc * set on a type literal will be associated with all values of that type). * TODO: Consider indirection of type literals through AVals to store docs. * @param {(infer.AVal|infer.ANull|infer.Type)} type * @param {string} doc */ function setDoc(type, doc) { if (type instanceof infer.AVal) { type.doc = doc; } };
closure.js
'use strict'; var infer = require('tern/lib/infer'); var tern = require('tern/lib/tern'); var comment = require('tern/lib/comment'); var walk = require('acorn/util/walk'); var doctrine = require('doctrine'); var Weight = { TEMP_OBJ: 40, TEMP_CTOR: 50 } tern.registerPlugin('closure', function() { var defs = { '!name': 'closure', goog: { provide: 'fn(name: string) -> !custom:closureProvide', require: 'fn(name: string) -> !custom:closureRequire' }, }; return { passes: { 'postParse': postParse, 'preInfer': preInfer, 'postInfer': postInfer }, defs: defs }; }); infer.registerFunction('closureProvide', function(_self, args, argNodes) { if (!argNodes || !argNodes.length || argNodes[0].type != "Literal" || typeof argNodes[0].value != "string") return infer.ANull; defineQualifiedName(argNodes[0].value); return infer.ANull; }); infer.registerFunction('closureRequire', function(_self, args, argNodes) { if (!argNodes || !argNodes.length || argNodes[0].type != "Literal" || typeof argNodes[0].value != "string") return infer.ANull; defineQualifiedName(argNodes[0].value); return infer.ANull; }); /** * Walks the syntax tree after the Acorn parsing pass, parsing JSDoc comments * and attaching them to their corresponding nodes. * @param {!acorn.Node} ast * @param {string} text The file text. */ function postParse(ast, text) { function attachComments(node) { // TODO: Do our own comment-finding, handling casts. var comments = comment.commentsBefore(text, node.start); if (comments) { node._closureComment = doctrine.parse( '/*' + comments[comments.length - 1] + '*/', {unwrap: true}); } } // TODO: Handle property declarations with no initialization, e.g. // /** @type {BlahType} */ // Class.prototype.blah; walk.simple(ast, { VariableDeclaration: attachComments, FunctionDeclaration: attachComments, AssignmentExpression: function(node) { if (node.operator == '=') { attachComments(node); } }, ObjectExpression: function(node) { for (var i = 0; i < node.properties.length; ++i) { attachComments(node.properties[i].key); } } }); } /** * Identifies and initializes contructors before Tern's type inference pass. We * apply most JSDoc type information after the type inference pass initializes * the values. However, Tern misses pretty badly on constructors, so we identify * them beforehand. * @param {!acorn.Node} ast * @param {!infer.Scope} scope */ function preInfer(ast, scope) { walk.simple(ast, { VariableDeclaration: function(node, scope) { identifyConstructor(node, node._closureComment); }, FunctionDeclaration: function(node, scope) { identifyConstructor(node, node._closureComment); }, AssignmentExpression: function(node, scope) { identifyConstructor(node, node._closureComment); }, ObjectExpression: function(node, scope) { for (var i = 0; i < node.properties.length; ++i) { var prop = node.properties[i], key = prop.key; identifyConstructor(prop, key._closureComment); } } }, infer.searchVisitor, scope); }; /** * Applies type information from JSDoc comments to the initialized values after * Tern's type inference pass. * @param {!acorn.Node} ast * @param {!infer.Scope} scope */ function postInfer(ast, scope) { walk.simple(ast, { VariableDeclaration: function(node, scope) { interpretComments(node, node._closureComment, scope.getProp(node.declarations[0].id.name)); }, FunctionDeclaration: function(node, scope) { interpretComments( node, node._closureComment, scope.getProp(node.id.name)); }, AssignmentExpression: function(node, scope) { interpretComments(node, node._closureComment, infer.expressionType({node: node.left, state: scope})); }, ObjectExpression: function(node, scope) { for (var i = 0; i < node.properties.length; ++i) { var prop = node.properties[i], key = prop.key; interpretComments( prop, key._closureComment, node.objType.getProp(key.name)); } } }, infer.searchVisitor, scope); } /** * Looks for a constructor tag in the comments before a function and marks the * function as a constructor. * @param {!acorn.Node} node * @param {{description: string, tags: Array}} comment The Doctrine-parsed * comment before the node if present. */ function identifyConstructor(node, comment) { if (!comment) { return; } var fnType = getFnType(node); if (!fnType) { return; } for (var i = 0; i < comment.tags.length; i++) { if (comment.tags[i].title == 'constructor') { // Mark the function type a constructor by creating an object for the // prototype. // TODO: Handle inheritance. // TODO: Get the name in here, possibly in postInfer. var prototypeAval = fnType.defProp('prototype', node); var proto = new infer.Obj(true); proto.propagate(prototypeAval); } } } /** * Interpret the comments before an expression and apply type information from * the comments. * @param {!acorn.Node} node An Acorn AST node. * @param {{description: string, tags: Array}} comment The Doctrine-parsed * comment before the node if present. * @param {!infer.AVal} aval An abtract type value to which type information * should be applied. */ function interpretComments(node, comment, aval) { if (!comment) { return; } var argTypes, returnType, valueType; var argDocs, returnDoc, valueDoc; for (var i = 0; i < comment.tags.length; i++) { var tag = comment.tags[i]; var type; if (tag.type) { type = getExpressionAval(tag.type); } // TODO: Handle many other tags (inherits, implements, typedef, override...) switch (tag.title) { case 'type': case 'private': case 'protected': case 'public': // TODO: Add completion filtering based on access restrictions? valueType = type; valueDoc = tag.description; break; case 'return': case 'returns': returnType = type; returnDoc = tag.description; break; case 'param': case 'arg': case 'argument': (argTypes || (argTypes = Object.create(null)))[tag.name] = type; (argDocs || (argDocs = Object.create(null)))[tag.name] = tag.description; break; } } var fnType = getFnType(node); if (fnType) { // This comment applies to a function, and we have information to apply // to that function type. applyFnTypeInfo(fnType, argTypes, argDocs, returnType, returnDoc); if (comment.description) { fnType.doc = comment.description; } } else if (valueType) { // This comment applies to a variable or property. valueType.propagate(aval); setDoc(aval, comment.description || valueDoc); } } /** * Applies the given argument and return type information to the given function * type. * @param {!infer.Fn} fnType The function type to propagate to. * @param {Object.<infer.AVal>} argTypes A map of argument names to parsed * types. * @param {Object.<string>} argDocs Doc comments for the arguments. * @param {infer.AVal} returnType The parsed return type. * @param {string} returnDoc Doc comments for the return value. */ function applyFnTypeInfo(fnType, argTypes, argDocs, returnType, returnDoc) { if (argTypes) { for (var i = 0; i < fnType.argNames.length; i++) { var name = fnType.argNames[i]; var docArgType = argTypes[name]; // Propagate the documented type info to the inferred argument type. if (docArgType) { docArgType.propagate(fnType.args[i]); setDoc(fnType.args[i], argDocs[name]); } } } // Propagate any return type info. if (returnType) { returnType.propagate(fnType.retval); setDoc(fnType.retval, returnDoc); } } /** * Recursively process a JSDoc type expression to assemble a corresponding AVal. * @param {{type: string}} typeExpr A Doctrine parsed type expression. * @param {infer.AVal=} innerType The inner type, for type applications. * @return {infer.AVal} An abstract value for the type expression. */ function getExpressionAval(typeExpr, innerType) { var t; switch (typeExpr.type) { case doctrine.Syntax.NameExpression: return getQualifiedType(typeExpr.name, innerType); case doctrine.Syntax.NullableType: case doctrine.Syntax.NonNullableType: // TODO: Expose nullability information. return getExpressionAval(typeExpr.expression, innerType); case doctrine.Syntax.OptionalType: // TODO: Expose optional param information (orthogonal to nullability). return getExpressionAval(typeExpr.expression, innerType); case doctrine.Syntax.UnionType: // TODO: Decide if this behaves better with a custom synthetic 'Union' // type. var aval = new infer.AVal(); typeExpr.elements.forEach(function(subExpr) { if (t = getExpressionAval(subExpr, innerType)) { aval.addType(t); } }); return aval; case doctrine.Syntax.RestType: // TODO: Expose varargs. return getExpressionAval(typeExpr.expression, innerType); case doctrine.Syntax.RecordType: case doctrine.Syntax.FieldType: // TODO: Handle records. return null; case doctrine.Syntax.FunctionType: // TODO: Handle functions. return null; case doctrine.Syntax.TypeApplication: // TODO: Handle more exotic type applications? // We support type applications for array and object values. In those // cases, only the last applied type (the value type) is meaningful. return getExpressionAval(typeExpr.expression, getExpressionAval( typeExpr.applications[typeExpr.applications.length - 1], innerType)); case doctrine.Syntax.NullLiteral: case doctrine.Syntax.UndefinedLiteral: return infer.ANull; case doctrine.Syntax.NullableLiteral: case doctrine.Syntax.AllLiteral: case doctrine.Syntax.VoidLiteral: // No type to apply. return null; default: console.log('Unknown expression type: ' + typeExpr.type); return null; } } /** * Gets an AVal for a type, given its qualified name. Creates stand-in * AVals for types and namespaces that have not been defined yet. * @param {string} name The type name. * @param {infer.AVal=} innerType In the case of a type application, the * applied type. * @return {!infer.AVal} */ function getQualifiedType(name, innerType) { // Handle primitives. if (/^(number|integer)$/i.test(name)) { return infer.cx().num; } if (/^bool(ean)?$/i.test(name)) { return infer.cx().bool; } if (/^string$/i.test(name)) { return infer.cx().str; } if (/^array$/i.test(name)) { return new infer.Arr(innerType); } if (/^object$/i.test(name)) { var objType = new infer.Obj(true /* null proto */); if (innerType) { innerType.propagate(objType.defProp("<i>")); } return objType; } var ctorType = defineQualifiedName(name); if (!(ctorType.getType() instanceof infer.Fn)) { // Create a fake constructor function to stand in for the real one. var fakeFnType = new infer.Fn(name, infer.ANull /* self */, [] /* args */, [] /* argNames */, infer.ANull /* retVal */); // Propagate it with reduced weight so it will be overriden if the real // constructor function loads. fakeFnType.propagate(ctorType, Weight.TEMP_CTOR); } var type = new infer.AVal(); // Say that the final property type is the constructor of the commented value. ctorType.propagate(new infer.IsCtor(type)); return type; } /** * Defines the given fully-qualified name as a property on the global scope. If * any part of the name is already defined, uses the existing value. * @param {string} name * @return {AVal} The abstract value for the name. */ function defineQualifiedName(name) { // TODO: Deal with goog.scope. // TODO: Put in temp origin nodes? var parts = name.split('.'); var base = infer.cx().topScope; for (var i = 0; i < parts.length; i++) { var prop = base.defProp(parts[i]); if (prop.getType()) { base = prop.getType(); } else { base = new infer.Obj(true, parts.slice(0, i + 1).join('.')); base.propagate(prop, Weight.TEMP_OBJ); } } return prop; } /** * If the given node is associated with a function, gets the type value for the * function. * @param {!acorn.Node} node * @return {infer.Fn} */ function getFnType(node) { if (node.type == "VariableDeclaration") { var decl = node.declarations[0]; if (decl.init && decl.init.type == "FunctionExpression") { return decl.init.body.scope.fnType; } } else if (node.type == "FunctionDeclaration") { return node.body.scope.fnType; } else if (node.type == "AssignmentExpression" && node.right.type == "FunctionExpression") { return node.right.body.scope.fnType; } else if (node.value && node.value.type == "FunctionExpression") { // Object property. return node.value.body.scope.fnType; } return null; } /** * Sets the doc property for a type, but only if it is not a type literal (a doc * set on a type literal will be associated with all values of that type). * TODO: Consider indirection of type literals through AVals to store docs. * @param {(infer.AVal|infer.ANull|infer.Type)} type * @param {string} doc */ function setDoc(type, doc) { if (type instanceof infer.AVal) { type.doc = doc; } };
Remove custom constructor detection. Tern is handling constructors fine without the help.
closure.js
Remove custom constructor detection.
<ide><path>losure.js <ide> return { <ide> passes: { <ide> 'postParse': postParse, <del> 'preInfer': preInfer, <ide> 'postInfer': postInfer <ide> }, <ide> defs: defs <ide> <ide> <ide> /** <del> * Identifies and initializes contructors before Tern's type inference pass. We <del> * apply most JSDoc type information after the type inference pass initializes <del> * the values. However, Tern misses pretty badly on constructors, so we identify <del> * them beforehand. <del> * @param {!acorn.Node} ast <del> * @param {!infer.Scope} scope <del> */ <del>function preInfer(ast, scope) { <del> walk.simple(ast, { <del> VariableDeclaration: function(node, scope) { <del> identifyConstructor(node, node._closureComment); <del> }, <del> FunctionDeclaration: function(node, scope) { <del> identifyConstructor(node, node._closureComment); <del> }, <del> AssignmentExpression: function(node, scope) { <del> identifyConstructor(node, node._closureComment); <del> }, <del> ObjectExpression: function(node, scope) { <del> for (var i = 0; i < node.properties.length; ++i) { <del> var prop = node.properties[i], key = prop.key; <del> identifyConstructor(prop, key._closureComment); <del> } <del> } <del> }, infer.searchVisitor, scope); <del>}; <del> <del> <del>/** <ide> * Applies type information from JSDoc comments to the initialized values after <ide> * Tern's type inference pass. <ide> * @param {!acorn.Node} ast <ide> } <ide> } <ide> }, infer.searchVisitor, scope); <del>} <del> <del> <del>/** <del> * Looks for a constructor tag in the comments before a function and marks the <del> * function as a constructor. <del> * @param {!acorn.Node} node <del> * @param {{description: string, tags: Array}} comment The Doctrine-parsed <del> * comment before the node if present. <del> */ <del>function identifyConstructor(node, comment) { <del> if (!comment) { <del> return; <del> } <del> var fnType = getFnType(node); <del> if (!fnType) { <del> return; <del> } <del> <del> for (var i = 0; i < comment.tags.length; i++) { <del> if (comment.tags[i].title == 'constructor') { <del> // Mark the function type a constructor by creating an object for the <del> // prototype. <del> // TODO: Handle inheritance. <del> // TODO: Get the name in here, possibly in postInfer. <del> var prototypeAval = fnType.defProp('prototype', node); <del> var proto = new infer.Obj(true); <del> proto.propagate(prototypeAval); <del> } <del> } <ide> } <ide> <ide>
Java
agpl-3.0
07cf2754a4cc4dce5e8f1b656944164688260dd3
0
acontes/scheduling,acontes/scheduling,acontes/scheduling,acontes/scheduling,acontes/scheduling,acontes/scheduling,acontes/scheduling
/* * ################################################################ * * ProActive: The Java(TM) library for Parallel, Distributed, * Concurrent computing with Security and Mobility * * Copyright (C) 1997-2008 INRIA/University of Nice-Sophia Antipolis * Contact: [email protected] * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version * 2 of the License, or any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * USA * * Initial developer(s): The ProActive Team * http://proactive.inria.fr/team_members.htm * Contributor(s): * * ################################################################ * $$PROACTIVE_INITIAL_DEV$$ */ package org.ow2.proactive.resourcemanager.utils; import java.io.File; import java.util.Collection; import java.util.LinkedList; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.cli.Parser; import org.apache.log4j.Logger; import org.objectweb.proactive.core.util.log.ProActiveLogger; import org.ow2.proactive.resourcemanager.RMFactory; import org.ow2.proactive.resourcemanager.core.properties.PAResourceManagerProperties; /** * Class with main which instantiates a Resource Manager. * * @author The ProActive Team * @since ProActive Scheduling 0.9 */ public class RMStarter { /** * Log4j logger name. */ private static Logger logger = ProActiveLogger.getLogger(RMLoggers.RMLAUNCHER); private static Options options = new Options(); private static void initOptions() { Option help = new Option("h", "help", false, "to display this help"); help.setArgName("help"); help.setRequired(false); options.addOption(help); Option deploy = new Option("d", "deploy", true, "list of GCM deployment descriptors files"); deploy.setArgName("deploy"); deploy.setRequired(false); deploy.setArgs(Option.UNLIMITED_VALUES); options.addOption(deploy); Option noDeploy = new Option("n", "nodeploy", false, "start Resource Manager without deploying default 4 local nodes"); noDeploy.setArgName("nodeploy"); noDeploy.setRequired(false); options.addOption(noDeploy); } private static void displayHelp() { System.out.println("\nLaunch ProActive Resource Manager."); System.out.println("Without arguments, Resource Manager is launched with 4 " + "computing nodes on local machine.\n"); new HelpFormatter().printHelp("scheduler", options, true); System.exit(2); } /** * main function * @param args * @throws Exception */ public static void main(String[] args) { initOptions(); Parser parser = new GnuParser(); CommandLine cmd; String[] gcmdList = null; try { cmd = parser.parse(options, args); if (cmd.hasOption("h")) { displayHelp(); } else if (cmd.hasOption("d") && cmd.hasOption("n")) { System.out .println("\nError, you cannot specify a deployment (-d|--deploy) and ask to deploy nothing (-n|--nodeply) !"); displayHelp(); } else if (cmd.hasOption("d")) { // checking that all specified files are exist gcmdList = cmd.getOptionValues("d"); for (String gcmdPath : gcmdList) { if (!(new File(gcmdPath)).exists()) { System.out.println("Error, cannot find GCM deployment file " + gcmdPath); System.exit(2); } } } logger.info("STARTING RESOURCE MANAGER: Press 'e' to shutdown."); RMFactory.setOsJavaProperty(); if (cmd.hasOption("n")) { // starting clean resource manager RMFactory.startLocal(); } else { Collection<String> deploymentDescriptors = new LinkedList<String>(); if (cmd.hasOption("d")) { for (String desc : gcmdList) { deploymentDescriptors.add(desc); } } else { String gcmDeployFile = PAResourceManagerProperties.RM_HOME.getValueAsString() + File.separator + "config/deployment/Local4JVMDeployment.xml"; deploymentDescriptors.add(gcmDeployFile); } // starting resource manager and deploy given infrastructure RMFactory.startLocal(deploymentDescriptors); } while (System.in.read() != 'e') { logger.info("Press 'e' to shutdown."); } logger.info("Shuting down the resource manager"); // shutdown hook of rmcore should deal with node sources } catch (ParseException e1) { displayHelp(); } catch (Exception e) { e.printStackTrace(); System.exit(2); } System.exit(0); } }
src/resource-manager/src/org/ow2/proactive/resourcemanager/utils/RMStarter.java
/* * ################################################################ * * ProActive: The Java(TM) library for Parallel, Distributed, * Concurrent computing with Security and Mobility * * Copyright (C) 1997-2008 INRIA/University of Nice-Sophia Antipolis * Contact: [email protected] * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version * 2 of the License, or any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * USA * * Initial developer(s): The ProActive Team * http://proactive.inria.fr/team_members.htm * Contributor(s): * * ################################################################ * $$PROACTIVE_INITIAL_DEV$$ */ package org.ow2.proactive.resourcemanager.utils; import java.io.File; import java.util.Collection; import java.util.LinkedList; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.cli.Parser; import org.apache.log4j.Logger; import org.objectweb.proactive.core.util.log.ProActiveLogger; import org.ow2.proactive.resourcemanager.RMFactory; import org.ow2.proactive.resourcemanager.core.properties.PAResourceManagerProperties; /** * Class with main which instantiates a Resource Manager. * * @author The ProActive Team * @since ProActive Scheduling 0.9 */ public class RMStarter { /** * Log4j logger name. */ private static Logger logger = ProActiveLogger.getLogger(RMLoggers.RMLAUNCHER); private static Options options = new Options(); private static void initOptions() { Option help = new Option("h", "help", false, "to display this help"); help.setArgName("help"); help.setRequired(false); options.addOption(help); Option deploy = new Option("d", "deploy", true, "list of GCM deployment descriptors files"); deploy.setArgName("deploy"); deploy.setRequired(false); deploy.setArgs(Option.UNLIMITED_VALUES); options.addOption(deploy); Option noDeploy = new Option("n", "nodeploy", false, "start Resource Manager without deploying default 4 local nodes"); noDeploy.setArgName("nodeploy"); noDeploy.setRequired(false); options.addOption(noDeploy); } private static void displayHelp() { System.out.println("\nLaunch ProActive Resource Manager."); System.out.println("Without arguments, Resource Manager is launched with 4 " + "computing nodes on local machine.\n"); new HelpFormatter().printHelp("scheduler", options, true); System.exit(2); } /** * main function * @param args * @throws Exception */ public static void main(String[] args) { initOptions(); Parser parser = new GnuParser(); CommandLine cmd; String[] gcmdList = null; try { cmd = parser.parse(options, args); if (cmd.hasOption("h")) { displayHelp(); } else if (cmd.hasOption("d") && cmd.hasOption("n")) { System.out .println("\nError, you cannot specify a deployment (-d|--deploy) and ask to deploy nothing (-n|--nodeply) !"); displayHelp(); } else if (cmd.hasOption("d")) { // checking that all specified files are exist gcmdList = cmd.getOptionValues("d"); for (String gcmdPath : gcmdList) { if (!(new File(gcmdPath)).exists()) { System.out.println("Error, cannot find GCM deployment file " + gcmdPath); System.exit(2); } } } logger.info("STARTING RESOURCE MANAGER: Press 'e' to shutdown."); if (cmd.hasOption("n")) { // starting clean resource manager RMFactory.startLocal(); } else { Collection<String> deploymentDescriptors = new LinkedList<String>(); if (cmd.hasOption("d")) { for (String desc : gcmdList) { deploymentDescriptors.add(desc); } } else { RMFactory.setOsJavaProperty(); String gcmDeployFile = PAResourceManagerProperties.RM_HOME.getValueAsString() + File.separator + "config/deployment/Local4JVMDeployment.xml"; deploymentDescriptors.add(gcmDeployFile); } // starting resource manager and deploy given infrastructure RMFactory.startLocal(deploymentDescriptors); } while (System.in.read() != 'e') { logger.info("Press 'e' to shutdown."); } logger.info("Shuting down the resource manager"); // shutdown hook of rmcore should deal with node sources } catch (ParseException e1) { displayHelp(); } catch (Exception e) { e.printStackTrace(); System.exit(2); } System.exit(0); } }
Set javaProperty in RMStarter every time. git-svn-id: 27916816d6cfa57849e9a885196bf7392b80e1ac@11362 28e8926c-6b08-0410-baaa-805c5e19b8d6
src/resource-manager/src/org/ow2/proactive/resourcemanager/utils/RMStarter.java
Set javaProperty in RMStarter every time.
<ide><path>rc/resource-manager/src/org/ow2/proactive/resourcemanager/utils/RMStarter.java <ide> } <ide> <ide> logger.info("STARTING RESOURCE MANAGER: Press 'e' to shutdown."); <add> RMFactory.setOsJavaProperty(); <ide> <ide> if (cmd.hasOption("n")) { <ide> // starting clean resource manager <ide> deploymentDescriptors.add(desc); <ide> } <ide> } else { <del> RMFactory.setOsJavaProperty(); <ide> String gcmDeployFile = PAResourceManagerProperties.RM_HOME.getValueAsString() + <ide> File.separator + "config/deployment/Local4JVMDeployment.xml"; <ide> deploymentDescriptors.add(gcmDeployFile);
Java
bsd-2-clause
545cefed0d7b69f8950930a0c5e2122cd59000bc
0
dragondgold/MultiWork
package com.protocolanalyzer.andres; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.ObjectOutputStream; import org.achartengine.ChartFactory; import org.achartengine.GraphicalView; import org.achartengine.chart.PointStyle; import org.achartengine.model.XYMultipleSeriesDataset; import org.achartengine.model.XYSeries; import org.achartengine.renderer.XYMultipleSeriesRenderer; import org.achartengine.renderer.XYSeriesRenderer; import org.achartengine.util.MathHelper; import android.annotation.SuppressLint; import android.app.AlertDialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; import android.graphics.Bitmap; import android.graphics.Color; import android.graphics.Bitmap.CompressFormat; import android.graphics.Paint.Align; import android.graphics.Paint.Style; import android.os.Bundle; import android.os.Environment; import android.os.Handler; import android.os.Vibrator; import android.preference.PreferenceManager; import android.text.Editable; import android.util.Log; import android.view.LayoutInflater; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.widget.EditText; import android.widget.FrameLayout; import android.widget.Toast; import com.actionbarsherlock.app.ActionBar; import com.actionbarsherlock.app.SherlockFragment; import com.actionbarsherlock.app.SherlockFragmentActivity; import com.actionbarsherlock.view.Menu; import com.actionbarsherlock.view.MenuInflater; import com.actionbarsherlock.view.MenuItem; import com.actionbarsherlock.view.ActionMode; import com.multiwork.andres.ConfirmDialog; import com.multiwork.andres.R; import com.protocolanalyzer.api.andres.LogicData; @SuppressLint("ValidFragment") public class LogicAnalizerChartFragment extends SherlockFragment implements OnDataDecodedListener{ /** Debugging */ private static final boolean DEBUG = true; /** Valores del eje Y que son tomados como inicio ('0' lógico) para las Series de los canales de entrada */ private static final float yChannel[] = {12f, 8f, 4f, 0}; /** Cuanto se incrementa en el eje Y para hacer un '1' logico */ private static final float bitScale = 1.00f; /** Valor del eje X maximo inicial */ private static final double xMax = 10; /** Colores de linea para cada canal */ private static final int lineColor[] = {Color.RED, Color.GREEN, Color.BLUE, Color.YELLOW}; /** Constantes */ private static final int CONFIRM_DIALOG = 0, RESULT_OK = -1; /** Vibrador del dispositivo */ private static Vibrator mVibrator; /** Directorio para guardar las sesiones */ private static String sesionDirectory; /** Directorio para guardar las imagenes */ private static String imagesDirectory; /** ActionBar */ private static ActionBar mActionBar; /** Handler para la actualizacion del grafico en el UI Thread */ private static Handler mUpdaterHandler = new Handler(); /** Tiempo que va transcurriendo (eje x del grafico) */ private static double time = 0.0d; /** Cuantos segundos representa un cuadrito (una unidad) en el grafico */ private static double timeScale; /** Numero maximo de muestras en las series (osea en el grafico) */ private static int maxSamples = 5; private static int currentSamples = 0; /** Serie que muestra los '1' y '0' de cada canal */ private static XYSeries[] mSerie = new XYSeries[LogicAnalizerActivity.channelsNumber]; /** Renderer para cada Serie, indica color, tamaño, etc */ private static XYSeriesRenderer[] mRenderer = new XYSeriesRenderer[LogicAnalizerActivity.channelsNumber]; /** Dataset para agrupar las Series */ private static XYMultipleSeriesDataset mSerieDataset = new XYMultipleSeriesDataset(); /** Dataser para agrupar los Renderer */ private static XYMultipleSeriesRenderer mRenderDataset = new XYMultipleSeriesRenderer(); private static GraphicalView mChartView; private static SherlockFragmentActivity mActivity; private static OnActionBarClickListener mActionBarListener; /** Coordenadas de inicio cuando se toco por primera vez el touchscreen */ private static float x = 0, y = 0; /** Indica si se esta deslizando el dedo en vez de mantenerlo apretado */ private static boolean isMoving = false; /** Indica si se esta sosteniendo el dedo sobre la pantalla (long-press) */ private static boolean fingerStillDown = false; /** Dato decodificado desde LogicHelper para ser mostrado en el grafico, contiene las posiciones para mostar * el tipo de protocolo, etc * @see LogicData.java */ private static LogicData[] mData = new LogicData[LogicAnalizerActivity.channelsNumber]; private static boolean firstTime = true; private static int samplesNumber = 0; // Constructor public LogicAnalizerChartFragment(LogicData[] data) { mData = data; } @Override public double onDataDecodedListener(LogicData[] mLogicData, int samplesCount, boolean isConfig) { if(DEBUG) Log.i("mFragmentChart","onDataDecoded() - isConfig: " + isConfig); if(DEBUG) Log.i("mFragmentChart","Data: " + mLogicData.toString()); // Si se cambiaron las configuraciones las actualizo if(isConfig) setChartPreferences(); else{ mData = mLogicData; samplesNumber = samplesCount; mUpdaterHandler.post(mUpdaterTask); // Configuro las variables en base a las preferencias la primera vez unicamente if(firstTime){ setChartPreferences(); firstTime = false; } } return 0; } // Creacion del Fragment @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Log.i("mFragmentChart", "onCreate()"); mActivity = getSherlockActivity(); // Obtengo la Activity que contiene el Fragment mActionBar = mActivity.getSupportActionBar(); // Obtengo el ActionBar mActionBar.setDisplayHomeAsUpEnabled(true); // El icono de la aplicacion funciona como boton HOME mActionBar.setTitle(getString(R.string.AnalyzerName)) ; // Nombre this.setHasOptionsMenu(true); for(int n=0; n < LogicAnalizerActivity.channelsNumber; ++n) { // Crea las Serie que es una linea en el grafico (cada una de las entradas) mSerie[n] = new XYSeries(getString(R.string.AnalyzerName) + n); mRenderer[n] = new XYSeriesRenderer(); // Creo el renderer de la Serie mRenderDataset.addSeriesRenderer(mRenderer[n]); // Agrego el renderer al Dataset mSerieDataset.addSeries(mSerie[n]); // Agrego la seria al Dataset mRenderer[n].setColor(lineColor[n]); // Color de la Serie mRenderer[n].setFillPoints(true); mRenderer[n].setPointStyle(PointStyle.CIRCLE); mRenderer[n].setLineWidth(2f); mRenderer[n].getTextPaint().setTextSize(30); // Tamaño del texto mRenderer[n].getTextPaint().setColor(Color.WHITE); mRenderer[n].getTextPaint().setTypeface(android.graphics.Typeface.DEFAULT_BOLD); mRenderer[n].getTextPaint().setTextAlign(Align.CENTER); mRenderer[n].getRectPaint().setColor(Color.WHITE); mRenderer[n].getRectPaint().setStrokeWidth(2f); mRenderer[n].getRectPaint().setStyle(Style.STROKE); } // Configuraciones generales mRenderDataset.setYTitle(getString(R.string.AnalyzerYTitle)); mRenderDataset.setAntialiasing(true); mRenderDataset.setYAxisMax(yChannel[0]+4); mRenderDataset.setXAxisMin(0); mRenderDataset.setXAxisMax(xMax); mRenderDataset.setPanEnabled(true); mRenderDataset.setShowGrid(true); mRenderDataset.setPointSize(4f); mRenderDataset.setExternalZoomEnabled(true); mRenderDataset.setPanEnabled(true, false); mRenderDataset.setZoomEnabled(true, false); mRenderDataset.setPanLimits(new double[] {0d , Double.MAX_VALUE, -1d, yChannel[0]+4}); mChartView = ChartFactory.getLineChartView(mActivity, mSerieDataset, mRenderDataset); setChartPreferences(); // Obtengo el OnActionBarClickListener de la Activity que creo este Fragment try { mActionBarListener = (OnActionBarClickListener) mActivity; } catch (ClassCastException e) { throw new ClassCastException(mActivity.toString() + " must implement OnActionBarClickListener"); } // Vibrador mVibrator = (Vibrator) mActivity.getSystemService(Context.VIBRATOR_SERVICE); final Runnable longClickRun = new Runnable() { @Override public void run() { if(fingerStillDown && !isMoving) { if(DEBUG) Log.i("Runnable longClickRun()", "LONG CLICK"); mVibrator.vibrate(80); // Vibro e inicio el ActionMode mActivity.startActionMode(new ActionModeEnable()); } } }; mChartView.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { // Si me movi al menos 20 unidades en cualquier direccion ya se toma como scroll NO long-press if(Math.abs(event.getX() - x) > 20 || Math.abs(event.getY() - y) > 20) { isMoving = true; } // Obtengo las coordenadas iniciales para tomar el movimiento if(event.getAction() == MotionEvent.ACTION_DOWN) { x = event.getX(); y = event.getY(); fingerStillDown = true; isMoving = false; mChartView.postDelayed(longClickRun, 1000); // En 1000mS se iniciara el Long-Press } // Si levanto el dedo ya no cuenta para el long-press else if(event.getAction() == MotionEvent.ACTION_UP){ mChartView.removeCallbacks(longClickRun); // Elimino el postDelayed() fingerStillDown = false; isMoving = false; x = y = 0; } // Sleep por 50mS para que no este continuamente testeando y ahorre recursos (no hace falta gran velocidad) try { Thread.sleep(50); } catch (InterruptedException e) { e.printStackTrace(); } // return false; da lugar a que se analizen otros eventos de touch (como cuando deslizamos el grafico). Si fuera // true el grafico no se desplazaría porque este se activa primero y evita al otro return false; } }); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { menu.clear(); inflater.inflate(R.menu.actionbar_logicchart, menu); super.onCreateOptionsMenu(menu, inflater); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { Log.i("mFragmentChart", "onCreateView()"); // Renderizo el layout return inflater.inflate(R.layout.logicanalizer, container, false); } @Override public void onResume() { super.onResume(); if(DEBUG) Log.i("mFragmentChart","onResume()"); // Elimino primero el View porque si ya esta agregado genera una excepcion ((FrameLayout) mActivity.findViewById(R.id.mChart)).removeViewInLayout(mChartView); // Agrego un View al layout que se renderizo en onCreateView. No puedo hacerlo antes porque dentro de // onCreateView() el layout no se renderizo y por lo tanto es null. ((FrameLayout) mActivity.findViewById(R.id.mChart)).addView(mChartView); } // Activa el ActionMode del ActionBar private final class ActionModeEnable implements ActionMode.Callback { @Override public boolean onCreateActionMode(ActionMode mode, Menu menu) { if(DEBUG) Log.i("ActionMode", "Create"); MenuInflater inflater = mActivity.getSupportMenuInflater(); inflater.inflate(R.menu.actionmodelogic, menu); return true; } @Override public boolean onPrepareActionMode(ActionMode mode, Menu menu) { if(DEBUG) Log.i("ActionMode", "Prepare"); return false; } // Al presionar iconos en el ActionMode @Override public boolean onActionItemClicked(ActionMode mode, MenuItem item) { if(DEBUG) Log.i("ActionMode", "Item clicked: " + item.getItemId() + " - " + item.getTitle()); switch(item.getItemId()) { case R.id.restartLogic: mActionBarListener.onActionBarClickListener(R.id.restartLogic); restart(); break; case R.id.saveLogic: createDialog(); break; } mode.finish(); return true; } @Override public void onDestroyActionMode(ActionMode mode) { if(DEBUG) Log.i("ActionMode", "Destroy"); } } // Listener de los items en el ActionBar @Override public boolean onOptionsItemSelected(MenuItem item) { if(DEBUG) Log.i("mFragmentChart", "ActionBar -> " + item.getTitle()); switch(item.getItemId()){ case R.id.zoomInLogic: mChartView.zoomIn(); break; case R.id.zoomOutLogic: mChartView.zoomOut(); break; } return true; } /** * Viene aqui cuando se vuelve de la Activity de las preferences al ser llamada con startActivityForResult() de este * modo actualizo las preferencias */ @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if(DEBUG) Log.i("mFragmentChart", "Activity Result"); if(DEBUG) Log.i("mFragmentChart", "resultCode: " + resultCode); if(DEBUG) Log.i("mFragmentChart", "requestCode: " + requestCode); // Dialogo de confirmación para guardar imagen else if(requestCode == CONFIRM_DIALOG){ if(resultCode == RESULT_OK){ if(DEBUG) Log.i("ActivityResult", "Confirm Dialog OK"); Bitmap bitmap = mChartView.toBitmap(); // Creo un nuevo BitMap try { FileOutputStream output = new FileOutputStream(new File(Environment.getExternalStorageDirectory().getPath() + imagesDirectory + data.getExtras().getString("text") + ".jpeg")); // Guardo la imagen con el nombre bitmap.compress(CompressFormat.JPEG, 95, output); // Formato JPEG } catch (FileNotFoundException e) { e.printStackTrace(); } mActivity.runOnUiThread(new Runnable() { @Override // Toast en el UI Thread public void run() { Toast.makeText(mActivity, getString(R.string.AnalyzerDialogFileSaved), Toast.LENGTH_SHORT).show(); } }); } } } // Reinicia el gŕafico y las variables involucradas private void restart() { for(int n = 0; n < LogicAnalizerActivity.channelsNumber; ++n) { mSerie[n].clear(); } mRenderDataset.setXAxisMax(xMax); mRenderDataset.setXAxisMin(0); time = 0; mChartView.repaint(); Toast.makeText(mActivity, getString(R.string.FrecReinicio), Toast.LENGTH_SHORT).show(); } /** * Crea una ventana preguntando al usuario si desea guardar la sesion o una imagen del grafico * @author Andres Torti * @see http://developer.android.com/guide/topics/ui/menus.html */ private void createDialog() { final CharSequence[] items = {getString(R.string.AnalyzerImagen), getString(R.string.AnalyzerSesion)}; AlertDialog.Builder alert = new AlertDialog.Builder(mActivity); alert.setTitle(getString(R.string.AnalyzerDialogSaveTitle)); alert.setItems(items, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int item) { if(item == 0) { saveImageDialog(); } else { saveSesionDialog(); } } }); alert.show(); } /** * Guarda un screenshot del grafico actual en la tarjeta de memoria * @author Andres Torti */ private void saveImageDialog() { // Creo el dialogo AlertDialog.Builder alert = new AlertDialog.Builder(mActivity); alert.setTitle(getString(R.string.AnalyzerDialogSaveTitle)); alert.setMessage(getString(R.string.AnalyzerDialogFileName)); // Creamos un EditView para que el usuario escriba final EditText input = new EditText(mActivity); alert.setView(input); // Creamos el boton OK y su onClickListener alert.setPositiveButton(getString(R.string.Ok), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int whichButton) { Editable text = input.getText(); // Obtengo el texto que escribio el usuario // Creo un nuevo archivo con el nombre del usuario y extension .jpeg // Verifico que pueda escribir en la SD if (Environment.MEDIA_MOUNTED.equals(Environment.getExternalStorageState())) { // Creo el directorio si ya existe no hace nada new File(Environment.getExternalStorageDirectory().getPath() + imagesDirectory).mkdirs(); // Creo el archivo final File imageFile = new File(Environment.getExternalStorageDirectory().getPath() + imagesDirectory + text.toString() + ".jpeg"); // Si el archivo ya existe pregunto por confirmacion if(imageFile.exists()){ if(DEBUG) Log.i("Dialog", "File exists"); // Creo un diálogo preguntando por confirmación de sobreescribir el archivo y paso el nombre del archivo startActivityForResult(new Intent(mActivity, ConfirmDialog.class).putExtra("text", text.toString()), CONFIRM_DIALOG); } // Si no existe el archivo directamente lo guardo else { if(DEBUG) Log.i("Dialog", "File doesn't exists)"); Bitmap bitmap = mChartView.toBitmap(); // Creo un nuevo BitMap try { // Formato JPEG, 95% de calidad guardado en imageFile bitmap.compress(CompressFormat.JPEG, 95, new FileOutputStream(imageFile)); } catch (FileNotFoundException e) { e.printStackTrace(); } Toast.makeText(mActivity, getString(R.string.AnalyzerDialogFileSaved), Toast.LENGTH_SHORT).show(); } } // Si no se puede escribir en la tarjeta SD muestro un Toast con error else { Toast.makeText(mActivity, getString(R.string.AnalyzerDialogFileNotSaved), Toast.LENGTH_LONG).show(); } dialog.dismiss(); } }); // Boton cancelar alert.setNegativeButton(getString(R.string.Cancel), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int whichButton) { dialog.dismiss(); } }); alert.show(); } // Guarda la sesion actual, osea los datos contenidos en las series y el grafico //TODO: hay que hacer que se pueda abrir la sesion con un explorador de archivos //TODO: guardar las anotaciones del grafico tambien private void saveSesionDialog() { AlertDialog.Builder dialog = new AlertDialog.Builder(mActivity); dialog.setTitle(getString(R.string.AnalyzerDialogSaveTitle)); dialog.setMessage(getString(R.string.AnalyzerDialogFileName)); // Creamos un EditView para que el usuario escriba final EditText textInput = new EditText(mActivity); dialog.setView(textInput); dialog.setPositiveButton("Ok", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface arg0, int arg1) { new Thread(new Runnable() { // Creo un nuevo Thread para evitar bloquear el UI Thread @Override // (Guardar el archivos lleva algunos segundos a veces) public void run() { try { // Creo el directorio por si no existe new File(Environment.getExternalStorageDirectory().getPath() + sesionDirectory).mkdirs(); // Creo el archivo final File path = new File(Environment.getExternalStorageDirectory().getPath() + sesionDirectory + textInput.getText().toString() + ".ms"); final FileOutputStream fos = new FileOutputStream(path); // Guardo las Series ObjectOutputStream os = new ObjectOutputStream(fos); os.writeInt(LogicAnalizerActivity.channelsNumber); // Numero de canales que voy a guardar for(int n = 0; n < LogicAnalizerActivity.channelsNumber; ++n) { os.writeObject(mSerie[n]); } os.close(); } catch (IOException e) { e.printStackTrace(); } mActivity.runOnUiThread(new Runnable() { // El Toast debe mostrarse en el Thread de la UI @Override public void run() { Toast.makeText(mActivity, getString(R.string.AnalyzerDialogSesionSaved), Toast.LENGTH_SHORT).show(); } }); } }).start(); } }); dialog.setNegativeButton("Cancel", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { } }); dialog.show(); } /** * Los Handlers ejecutan sus operaciones en el Thread de la UI haciendo posible la modificacion de la misma desde Threads no UI. * @author Andres Torti * @see http://developer.android.com/guide/topics/fundamentals/processes-and-threads.html * @see http://developer.android.com/reference/android/os/Handler.html * @see http://developer.android.com/resources/articles/timed-ui-updates.html * @see http://stackoverflow.com/questions/10405773/how-to-use-preferencefragment/10405850#comment13428324_10405850 */ final private Runnable mUpdaterTask = new Runnable() { @Override public void run() { if(DEBUG) { for(int n=0; n < LogicAnalizerActivity.channelsNumber; ++n) { for(int i = 0; i < mData[n].getStringCount(); ++i) { Log.i("Data", "Data[" + n + "]: " + mData[n].getString(i)); } } } // Si los bit son 1 le sumo 1 a los valores tomados como 0 logicos for(int n=0; n <samplesNumber; ++n){ for(int channel=0; channel < LogicAnalizerActivity.channelsNumber; ++channel){ if(mData[channel].getBits().get(n)){ // Si es 1 // Nivel tomado como 0 + un alto de bit mSerie[channel].add(toCoordinate(time, timeScale), yChannel[channel]+bitScale); } else{ // Si es 0 mSerie[channel].add(toCoordinate(time, timeScale), yChannel[channel]); } } //Si llego al maximo del cuadro (borde derecho) aumento el maximo y el minimo para dibujar un tiempo mas //(desplazamiento del cuadro) de esta manera si deslizamos el cuadro horizontalmente tendremos los datos if(toCoordinate(time, timeScale) >= xMax){ //if(DEBUG) Log.i("Move", "Chart moved"); mRenderDataset.setXAxisMax(mRenderDataset.getXAxisMax()+1d); mRenderDataset.setXAxisMin(mRenderDataset.getXAxisMin()+1d); } time += 1.0d/LogicData.getSampleRate(); // Incremento el tiempo } // Agrego un espacio para indicar que el buffer de muestreo llego hasta aqui time += (10*timeScale); for(int n=0; n < LogicAnalizerActivity.channelsNumber; ++n){ mSerie[n].add(mSerie[n].getX(mSerie[n].getItemCount()-1)+0.0000001d, MathHelper.NULL_VALUE); } for(int n = 0; n < LogicAnalizerActivity.channelsNumber; ++n){ for(int i = 0; i < mData[n].getStringCount(); ++i){ // Agrego el texto en el centro del area de tiempo que contiene el string mSerie[n].addAnnotation(mData[n].getString(i), toCoordinate(mData[n].getPositionAt(i)[0]+((mData[n].getPositionAt(i)[1]-mData[n].getPositionAt(i)[0])/2.0d), timeScale), yChannel[n]+2f); // Agrego el recuadro mSerie[n].addRectangle(toCoordinate(mData[n].getPositionAt(i)[0], timeScale)+0.0000001, yChannel[n]+3.5f, toCoordinate(mData[n].getPositionAt(i)[1], timeScale), yChannel[n]+bitScale+0.5f); } } // Encuadro el área y redibujo mRenderDataset.setXAxisMax(mRenderDataset.getXAxisMax()+1); mChartView.repaint(); // Redibujo el grafico ++currentSamples; // Si me paso de las muestras borro los canales if(currentSamples > maxSamples){ for(int n = 0; n < LogicAnalizerActivity.channelsNumber; n++) mSerie[n].clear(); currentSamples = 0; } // Cada vez que recibo un buffer del analizador logico, lo muestro todo y pauso mActionBarListener.onActionBarClickListener(R.id.PlayPauseLogic); } }; /** * Convierte el tiempo en segundo a la escala del grafico segunda la escala de tiempos * @param time tiempo en segundos * @param timeScale cuantos segundos equivalen a una unidad en el grafico * @return coordenada equivalente */ private static double toCoordinate (double time, double timeScale){ return (time/timeScale); } // Define los parametros de acuerdo a las preferencias private void setChartPreferences() { SharedPreferences getPrefs = PreferenceManager.getDefaultSharedPreferences(mActivity); for(int n=0; n < LogicAnalizerActivity.channelsNumber; ++n){ // Seteo el protocolo para cada canal switch(Byte.decode(getPrefs.getString("protocol" + (n+1), "0"))){ case 0: // I2C mSerie[n].setTitle(getString(R.string.AnalyzerChannel) + " " + (n+1) + " [I2C]"); break; case 1: // UART mSerie[n].setTitle(getString(R.string.AnalyzerChannel) + " " + (n+1) + " [UART]"); break; case 2: // CLOCK mSerie[n].setTitle(getString(R.string.AnalyzerChannel) + " " + (n+1) + "[CLK]"); break; case 3: // NONE mSerie[n].setTitle(getString(R.string.AnalyzerChannel) + " " + (n+1) + "[---]"); break; } } // Directorios para guardar las imagenes y sesiones imagesDirectory = getPrefs.getString("logicImageSave","Multi/Work/Images/"); sesionDirectory = getPrefs.getString("logicSesionSave","Multi/Work/Sesion/"); // Máxima cantidad de muestras para almacenar maxSamples = Integer.decode(getPrefs.getString("maxSamples","5")); // Escala del eje X de acuerdo al sample rate if(LogicData.getSampleRate() == 40000000) { mRenderDataset.setXTitle(getString(R.string.AnalyzerXTitle) + " x25nS"); timeScale = 0.000000025d; // 25nS }else if(LogicData.getSampleRate() == 20000000) { mRenderDataset.setXTitle(getString(R.string.AnalyzerXTitle) + " x50 nS"); timeScale = 0.000000050d; // 50nS }else if(LogicData.getSampleRate() == 10000000) { mRenderDataset.setXTitle(getString(R.string.AnalyzerXTitle) + " x100 nS"); timeScale = 0.000000100d; // 100nS }else if(LogicData.getSampleRate() == 4000000) { mRenderDataset.setXTitle(getString(R.string.AnalyzerXTitle) + " x250 nS"); timeScale = 0.000000250d; // 250nS }else if(LogicData.getSampleRate() == 400000) { mRenderDataset.setXTitle(getString(R.string.AnalyzerXTitle) + " x2.5 uS"); timeScale = 0.0000025d; // 2.5uS }else if(LogicData.getSampleRate() == 2000) { mRenderDataset.setXTitle(getString(R.string.AnalyzerXTitle) + " x500 uS"); timeScale = 0.000500d; // 500uS }else if(LogicData.getSampleRate() == 10) { mRenderDataset.setXTitle(getString(R.string.AnalyzerXTitle) + " x100 mS"); timeScale = 0.1d; // 100mS } // Actualizo los datos del grafico mChartView.repaint(); } }
src/com/protocolanalyzer/andres/LogicAnalizerChartFragment.java
package com.protocolanalyzer.andres; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.ObjectOutputStream; import org.achartengine.ChartFactory; import org.achartengine.GraphicalView; import org.achartengine.chart.PointStyle; import org.achartengine.model.XYMultipleSeriesDataset; import org.achartengine.model.XYSeries; import org.achartengine.renderer.XYMultipleSeriesRenderer; import org.achartengine.renderer.XYSeriesRenderer; import org.achartengine.util.MathHelper; import android.annotation.SuppressLint; import android.app.AlertDialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; import android.graphics.Bitmap; import android.graphics.Color; import android.graphics.Bitmap.CompressFormat; import android.graphics.Paint.Align; import android.graphics.Paint.Style; import android.os.Bundle; import android.os.Environment; import android.os.Handler; import android.os.Vibrator; import android.preference.PreferenceManager; import android.text.Editable; import android.util.Log; import android.view.LayoutInflater; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.widget.EditText; import android.widget.FrameLayout; import android.widget.Toast; import com.actionbarsherlock.app.ActionBar; import com.actionbarsherlock.app.SherlockFragment; import com.actionbarsherlock.app.SherlockFragmentActivity; import com.actionbarsherlock.view.Menu; import com.actionbarsherlock.view.MenuInflater; import com.actionbarsherlock.view.MenuItem; import com.actionbarsherlock.view.ActionMode; import com.multiwork.andres.ConfirmDialog; import com.multiwork.andres.R; import com.protocolanalyzer.api.andres.LogicData; @SuppressLint("ValidFragment") public class LogicAnalizerChartFragment extends SherlockFragment implements OnDataDecodedListener{ /** Debugging */ private static final boolean DEBUG = true; /** Valores del eje Y que son tomados como inicio ('0' lógico) para las Series de los canales de entrada */ private static final float yChannel[] = {12f, 8f, 4f, 0}; /** Cuanto se incrementa en el eje Y para hacer un '1' logico */ private static final float bitScale = 1.00f; /** Valor del eje X maximo inicial */ private static final double xMax = 10; /** Colores de linea para cada canal */ private static final int lineColor[] = {Color.RED, Color.GREEN, Color.BLUE, Color.YELLOW}; /** Constantes */ private static final int CONFIRM_DIALOG = 0, RESULT_OK = -1; /** Vibrador del dispositivo */ private static Vibrator mVibrator; /** Directorio para guardar las sesiones */ private static String sesionDirectory; /** Directorio para guardar las imagenes */ private static String imagesDirectory; /** ActionBar */ private static ActionBar mActionBar; /** Handler para la actualizacion del grafico en el UI Thread */ private static Handler mUpdaterHandler = new Handler(); /** Tiempo que va transcurriendo (eje x del grafico) */ private static double time = 0.0d; /** Cuantos segundos representa un cuadrito (una unidad) en el grafico */ private static double timeScale; /** Numero maximo de muestras en las series (osea en el grafico) */ private static int maxSamples = 5; private static int currentSamples = 0; /** Serie que muestra los '1' y '0' de cada canal */ private static XYSeries[] mSerie = new XYSeries[LogicAnalizerActivity.channelsNumber]; /** Renderer para cada Serie, indica color, tamaño, etc */ private static XYSeriesRenderer[] mRenderer = new XYSeriesRenderer[LogicAnalizerActivity.channelsNumber]; /** Dataset para agrupar las Series */ private static XYMultipleSeriesDataset mSerieDataset = new XYMultipleSeriesDataset(); /** Dataser para agrupar los Renderer */ private static XYMultipleSeriesRenderer mRenderDataset = new XYMultipleSeriesRenderer(); private static GraphicalView mChartView; private static SherlockFragmentActivity mActivity; private static OnActionBarClickListener mActionBarListener; /** Coordenadas de inicio cuando se toco por primera vez el touchscreen */ private static float x = 0, y = 0; /** Indica si se esta deslizando el dedo en vez de mantenerlo apretado */ private static boolean isMoving = false; /** Indica si se esta sosteniendo el dedo sobre la pantalla (long-press) */ private static boolean fingerStillDown = false; /** Dato decodificado desde LogicHelper para ser mostrado en el grafico, contiene las posiciones para mostar * el tipo de protocolo, etc * @see LogicData.java */ private static LogicData[] mData = new LogicData[LogicAnalizerActivity.channelsNumber]; private static boolean firstTime = true; private static int samplesNumber = 0; // Constructor public LogicAnalizerChartFragment(LogicData[] data) { mData = data; } @Override public double onDataDecodedListener(LogicData[] mLogicData, int samplesCount, boolean isConfig) { // Si se cambiaron las configuraciones las actualizo if(isConfig) setChartPreferences(); else{ if(DEBUG) Log.i("mFragmentChart","onDataDecoded()"); if(DEBUG) Log.i("mFragmentChart","Data: " + mLogicData.toString()); mData = mLogicData; samplesNumber = samplesCount; mUpdaterHandler.post(mUpdaterTask); // Configuro las variables en base a las preferencias la primera vez unicamente if(firstTime){ setChartPreferences(); firstTime = false; } } return 0; } // Creacion del Fragment @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Log.i("onCreate()", "onCreate LogicAnalizerView"); // Obtengo la Activity que contiene el Fragment mActivity = getSherlockActivity(); mActionBar = mActivity.getSupportActionBar(); // Obtengo el ActionBar mActionBar.setDisplayHomeAsUpEnabled(true); // El icono de la aplicacion funciona como boton HOME mActionBar.setTitle(getString(R.string.AnalyzerName)) ; // Nombre this.setHasOptionsMenu(true); for(int n=0; n < LogicAnalizerActivity.channelsNumber; ++n) { // Crea las Serie que es una linea en el grafico (cada una de las entradas) mSerie[n] = new XYSeries(getString(R.string.AnalyzerName) + n); mRenderer[n] = new XYSeriesRenderer(); // Creo el renderer de la Serie mRenderDataset.addSeriesRenderer(mRenderer[n]); // Agrego el renderer al Dataset mSerieDataset.addSeries(mSerie[n]); // Agrego la seria al Dataset mRenderer[n].setColor(lineColor[n]); // Color de la Serie mRenderer[n].setFillPoints(true); mRenderer[n].setPointStyle(PointStyle.CIRCLE); mRenderer[n].setLineWidth(2f); mRenderer[n].getTextPaint().setTextSize(30); // Tamaño del texto mRenderer[n].getTextPaint().setColor(Color.WHITE); mRenderer[n].getTextPaint().setTypeface(android.graphics.Typeface.DEFAULT_BOLD); mRenderer[n].getTextPaint().setTextAlign(Align.CENTER); mRenderer[n].getRectPaint().setColor(Color.WHITE); mRenderer[n].getRectPaint().setStrokeWidth(2f); mRenderer[n].getRectPaint().setStyle(Style.STROKE); } // Configuraciones generales mRenderDataset.setYTitle(getString(R.string.AnalyzerYTitle)); mRenderDataset.setAntialiasing(true); mRenderDataset.setYAxisMax(yChannel[0]+4); mRenderDataset.setXAxisMin(0); mRenderDataset.setXAxisMax(xMax); mRenderDataset.setPanEnabled(true); mRenderDataset.setShowGrid(true); mRenderDataset.setPointSize(4f); mRenderDataset.setExternalZoomEnabled(true); mRenderDataset.setPanEnabled(true, false); mRenderDataset.setZoomEnabled(true, false); mRenderDataset.setPanLimits(new double[] {0d , Double.MAX_VALUE, -1d, yChannel[0]+4}); mChartView = ChartFactory.getLineChartView(mActivity, mSerieDataset, mRenderDataset); setChartPreferences(); // Obtengo el OnActionBarClickListener de la Activity que creo este Fragment try { mActionBarListener = (OnActionBarClickListener) mActivity; } catch (ClassCastException e) { throw new ClassCastException(mActivity.toString() + " must implement OnActionBarClickListener"); } // Vibrador mVibrator = (Vibrator) mActivity.getSystemService(Context.VIBRATOR_SERVICE); final Runnable longClickRun = new Runnable() { @Override public void run() { if(fingerStillDown && !isMoving) { if(DEBUG) Log.i("Runnable longClickRun()", "LONG CLICK"); mVibrator.vibrate(80); // Vibro e inicio el ActionMode mActivity.startActionMode(new ActionModeEnable()); } } }; mChartView.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { // Si me movi al menos 20 unidades en cualquier direccion ya se toma como scroll NO long-press if(Math.abs(event.getX() - x) > 20 || Math.abs(event.getY() - y) > 20) { isMoving = true; } // Obtengo las coordenadas iniciales para tomar el movimiento if(event.getAction() == MotionEvent.ACTION_DOWN) { x = event.getX(); y = event.getY(); fingerStillDown = true; isMoving = false; mChartView.postDelayed(longClickRun, 1000); // En 1000mS se iniciara el Long-Press } // Si levanto el dedo ya no cuenta para el long-press else if(event.getAction() == MotionEvent.ACTION_UP){ mChartView.removeCallbacks(longClickRun); // Elimino el postDelayed() fingerStillDown = false; isMoving = false; x = y = 0; } // Sleep por 50mS para que no este continuamente testeando y ahorre recursos (no hace falta gran velocidad) try { Thread.sleep(50); } catch (InterruptedException e) { e.printStackTrace(); } // return false; da lugar a que se analizen otros eventos de touch (como cuando deslizamos el grafico). Si fuera // true el grafico no se desplazaría porque este se activa primero y evita al otro return false; } }); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { menu.clear(); inflater.inflate(R.menu.actionbar_logicchart, menu); super.onCreateOptionsMenu(menu, inflater); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { // Renderizo el layout return inflater.inflate(R.layout.logicanalizer, container, false); } @Override public void onResume() { super.onResume(); if(DEBUG) Log.i("onResume()","Resume LogicAnalizerView"); // Elimino primero el View porque si ya esta agregado genera una excepcion ((FrameLayout) mActivity.findViewById(R.id.mChart)).removeViewInLayout(mChartView); // Agrego un View al layout que se renderizo en onCreateView. No puedo hacerlo antes porque dentro de // onCreateView() el layout no se renderizo y por lo tanto es null. ((FrameLayout) mActivity.findViewById(R.id.mChart)).addView(mChartView); } // Activa el ActionMode del ActionBar private final class ActionModeEnable implements ActionMode.Callback { @Override public boolean onCreateActionMode(ActionMode mode, Menu menu) { if(DEBUG) Log.i("ActionMode", "Create"); MenuInflater inflater = mActivity.getSupportMenuInflater(); inflater.inflate(R.menu.actionmodelogic, menu); return true; } @Override public boolean onPrepareActionMode(ActionMode mode, Menu menu) { if(DEBUG) Log.i("ActionMode", "Prepare"); return false; } // Al presionar iconos en el ActionMode @Override public boolean onActionItemClicked(ActionMode mode, MenuItem item) { if(DEBUG) Log.i("ActionMode", "Item clicked: " + item.getItemId() + " - " + item.getTitle()); switch(item.getItemId()) { case R.id.restartLogic: mActionBarListener.onActionBarClickListener(R.id.restartLogic); restart(); break; case R.id.saveLogic: createDialog(); break; } mode.finish(); return true; } @Override public void onDestroyActionMode(ActionMode mode) { if(DEBUG) Log.i("ActionMode", "Destroy"); } } // Listener de los items en el ActionBar @Override public boolean onOptionsItemSelected(MenuItem item) { if(DEBUG) Log.i("mFragmentChart", "ActionBar -> " + item.getTitle()); switch(item.getItemId()){ case R.id.zoomInLogic: mChartView.zoomIn(); break; case R.id.zoomOutLogic: mChartView.zoomOut(); break; } return true; } /** * Viene aqui cuando se vuelve de la Activity de las preferences al ser llamada con startActivityForResult() de este * modo actualizo las preferencias */ @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if(DEBUG) Log.i("mFragmentChart", "Activity Result"); if(DEBUG) Log.i("mFragmentChart", "resultCode: " + resultCode); if(DEBUG) Log.i("mFragmentChart", "requestCode: " + requestCode); // Dialogo de confirmación para guardar imagen else if(requestCode == CONFIRM_DIALOG){ if(resultCode == RESULT_OK){ if(DEBUG) Log.i("ActivityResult", "Confirm Dialog OK"); Bitmap bitmap = mChartView.toBitmap(); // Creo un nuevo BitMap try { FileOutputStream output = new FileOutputStream(new File(Environment.getExternalStorageDirectory().getPath() + imagesDirectory + data.getExtras().getString("text") + ".jpeg")); // Guardo la imagen con el nombre bitmap.compress(CompressFormat.JPEG, 95, output); // Formato JPEG } catch (FileNotFoundException e) { e.printStackTrace(); } mActivity.runOnUiThread(new Runnable() { @Override // Toast en el UI Thread public void run() { Toast.makeText(mActivity, getString(R.string.AnalyzerDialogFileSaved), Toast.LENGTH_SHORT).show(); } }); } } } // Reinicia el gŕafico y las variables involucradas private void restart() { for(int n = 0; n < LogicAnalizerActivity.channelsNumber; ++n) { mSerie[n].clear(); } mRenderDataset.setXAxisMax(xMax); mRenderDataset.setXAxisMin(0); time = 0; mChartView.repaint(); Toast.makeText(mActivity, getString(R.string.FrecReinicio), Toast.LENGTH_SHORT).show(); } /** * Crea una ventana preguntando al usuario si desea guardar la sesion o una imagen del grafico * @author Andres Torti * @see http://developer.android.com/guide/topics/ui/menus.html */ private void createDialog() { final CharSequence[] items = {getString(R.string.AnalyzerImagen), getString(R.string.AnalyzerSesion)}; AlertDialog.Builder alert = new AlertDialog.Builder(mActivity); alert.setTitle(getString(R.string.AnalyzerDialogSaveTitle)); alert.setItems(items, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int item) { if(item == 0) { saveImageDialog(); } else { saveSesionDialog(); } } }); alert.show(); } /** * Guarda un screenshot del grafico actual en la tarjeta de memoria * @author Andres Torti */ private void saveImageDialog() { // Creo el dialogo AlertDialog.Builder alert = new AlertDialog.Builder(mActivity); alert.setTitle(getString(R.string.AnalyzerDialogSaveTitle)); alert.setMessage(getString(R.string.AnalyzerDialogFileName)); // Creamos un EditView para que el usuario escriba final EditText input = new EditText(mActivity); alert.setView(input); // Creamos el boton OK y su onClickListener alert.setPositiveButton(getString(R.string.Ok), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int whichButton) { Editable text = input.getText(); // Obtengo el texto que escribio el usuario // Creo un nuevo archivo con el nombre del usuario y extension .jpeg // Verifico que pueda escribir en la SD if (Environment.MEDIA_MOUNTED.equals(Environment.getExternalStorageState())) { // Creo el directorio si ya existe no hace nada new File(Environment.getExternalStorageDirectory().getPath() + imagesDirectory).mkdirs(); // Creo el archivo final File imageFile = new File(Environment.getExternalStorageDirectory().getPath() + imagesDirectory + text.toString() + ".jpeg"); // Si el archivo ya existe pregunto por confirmacion if(imageFile.exists()){ if(DEBUG) Log.i("Dialog", "File exists"); // Creo un diálogo preguntando por confirmación de sobreescribir el archivo y paso el nombre del archivo startActivityForResult(new Intent(mActivity, ConfirmDialog.class).putExtra("text", text.toString()), CONFIRM_DIALOG); } // Si no existe el archivo directamente lo guardo else { if(DEBUG) Log.i("Dialog", "File doesn't exists)"); Bitmap bitmap = mChartView.toBitmap(); // Creo un nuevo BitMap try { // Formato JPEG, 95% de calidad guardado en imageFile bitmap.compress(CompressFormat.JPEG, 95, new FileOutputStream(imageFile)); } catch (FileNotFoundException e) { e.printStackTrace(); } Toast.makeText(mActivity, getString(R.string.AnalyzerDialogFileSaved), Toast.LENGTH_SHORT).show(); } } // Si no se puede escribir en la tarjeta SD muestro un Toast con error else { Toast.makeText(mActivity, getString(R.string.AnalyzerDialogFileNotSaved), Toast.LENGTH_LONG).show(); } dialog.dismiss(); } }); // Boton cancelar alert.setNegativeButton(getString(R.string.Cancel), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int whichButton) { dialog.dismiss(); } }); alert.show(); } // Guarda la sesion actual, osea los datos contenidos en las series y el grafico //TODO: hay que hacer que se pueda abrir la sesion con un explorador de archivos //TODO: guardar las anotaciones del grafico tambien private void saveSesionDialog() { AlertDialog.Builder dialog = new AlertDialog.Builder(mActivity); dialog.setTitle(getString(R.string.AnalyzerDialogSaveTitle)); dialog.setMessage(getString(R.string.AnalyzerDialogFileName)); // Creamos un EditView para que el usuario escriba final EditText textInput = new EditText(mActivity); dialog.setView(textInput); dialog.setPositiveButton("Ok", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface arg0, int arg1) { new Thread(new Runnable() { // Creo un nuevo Thread para evitar bloquear el UI Thread @Override // (Guardar el archivos lleva algunos segundos a veces) public void run() { try { // Creo el directorio por si no existe new File(Environment.getExternalStorageDirectory().getPath() + sesionDirectory).mkdirs(); // Creo el archivo final File path = new File(Environment.getExternalStorageDirectory().getPath() + sesionDirectory + textInput.getText().toString() + ".ms"); final FileOutputStream fos = new FileOutputStream(path); // Guardo las Series ObjectOutputStream os = new ObjectOutputStream(fos); os.writeInt(LogicAnalizerActivity.channelsNumber); // Numero de canales que voy a guardar for(int n = 0; n < LogicAnalizerActivity.channelsNumber; ++n) { os.writeObject(mSerie[n]); } os.close(); } catch (IOException e) { e.printStackTrace(); } mActivity.runOnUiThread(new Runnable() { // El Toast debe mostrarse en el Thread de la UI @Override public void run() { Toast.makeText(mActivity, getString(R.string.AnalyzerDialogSesionSaved), Toast.LENGTH_SHORT).show(); } }); } }).start(); } }); dialog.setNegativeButton("Cancel", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { } }); dialog.show(); } /** * Los Handlers ejecutan sus operaciones en el Thread de la UI haciendo posible la modificacion de la misma desde Threads no UI. * @author Andres Torti * @see http://developer.android.com/guide/topics/fundamentals/processes-and-threads.html * @see http://developer.android.com/reference/android/os/Handler.html * @see http://developer.android.com/resources/articles/timed-ui-updates.html * @see http://stackoverflow.com/questions/10405773/how-to-use-preferencefragment/10405850#comment13428324_10405850 */ final private Runnable mUpdaterTask = new Runnable() { @Override public void run() { if(DEBUG) { for(int n=0; n < LogicAnalizerActivity.channelsNumber; ++n) { for(int i = 0; i < mData[n].getStringCount(); ++i) { Log.i("Data", "Data[" + n + "]: " + mData[n].getString(i)); } } } // Si los bit son 1 le sumo 1 a los valores tomados como 0 logicos for(int n=0; n <samplesNumber; ++n){ for(int channel=0; channel < LogicAnalizerActivity.channelsNumber; ++channel){ if(mData[channel].getBits().get(n)){ // Si es 1 // Nivel tomado como 0 + un alto de bit mSerie[channel].add(toCoordinate(time, timeScale), yChannel[channel]+bitScale); } else{ // Si es 0 mSerie[channel].add(toCoordinate(time, timeScale), yChannel[channel]); } } //Si llego al maximo del cuadro (borde derecho) aumento el maximo y el minimo para dibujar un tiempo mas //(desplazamiento del cuadro) de esta manera si deslizamos el cuadro horizontalmente tendremos los datos if(toCoordinate(time, timeScale) >= xMax){ //if(DEBUG) Log.i("Move", "Chart moved"); mRenderDataset.setXAxisMax(mRenderDataset.getXAxisMax()+1d); mRenderDataset.setXAxisMin(mRenderDataset.getXAxisMin()+1d); } time += 1.0d/LogicData.getSampleRate(); // Incremento el tiempo } // Agrego un espacio para indicar que el buffer de muestreo llego hasta aqui time += (10*timeScale); for(int n=0; n < LogicAnalizerActivity.channelsNumber; ++n){ mSerie[n].add(mSerie[n].getX(mSerie[n].getItemCount()-1)+0.0000001d, MathHelper.NULL_VALUE); } for(int n = 0; n < LogicAnalizerActivity.channelsNumber; ++n){ for(int i = 0; i < mData[n].getStringCount(); ++i){ // Agrego el texto en el centro del area de tiempo que contiene el string mSerie[n].addAnnotation(mData[n].getString(i), toCoordinate(mData[n].getPositionAt(i)[0]+((mData[n].getPositionAt(i)[1]-mData[n].getPositionAt(i)[0])/2.0d), timeScale), yChannel[n]+2f); // Agrego el recuadro mSerie[n].addRectangle(toCoordinate(mData[n].getPositionAt(i)[0], timeScale)+0.0000001, yChannel[n]+3.5f, toCoordinate(mData[n].getPositionAt(i)[1], timeScale), yChannel[n]+bitScale+0.5f); } } // Encuadro el área y redibujo mRenderDataset.setXAxisMax(mRenderDataset.getXAxisMax()+1); mChartView.repaint(); // Redibujo el grafico ++currentSamples; // Si me paso de las muestras borro los canales if(currentSamples > maxSamples){ for(int n = 0; n < LogicAnalizerActivity.channelsNumber; n++) mSerie[n].clear(); currentSamples = 0; } // Cada vez que recibo un buffer del analizador logico, lo muestro todo y pauso mActionBarListener.onActionBarClickListener(R.id.PlayPauseLogic); } }; /** * Convierte el tiempo en segundo a la escala del grafico segunda la escala de tiempos * @param time tiempo en segundos * @param timeScale cuantos segundos equivalen a una unidad en el grafico * @return coordenada equivalente */ private static double toCoordinate (double time, double timeScale){ return (time/timeScale); } // Define los parametros de acuerdo a las preferencias private void setChartPreferences() { SharedPreferences getPrefs = PreferenceManager.getDefaultSharedPreferences(mActivity); for(int n=0; n < LogicAnalizerActivity.channelsNumber; ++n){ // Seteo el protocolo para cada canal switch(Byte.decode(getPrefs.getString("protocol" + (n+1), "0"))){ case 0: // I2C mSerie[n].setTitle(getString(R.string.AnalyzerChannel) + " " + (n+1) + " [I2C]"); break; case 1: // UART mSerie[n].setTitle(getString(R.string.AnalyzerChannel) + " " + (n+1) + " [UART]"); break; case 2: // CLOCK mSerie[n].setTitle(getString(R.string.AnalyzerChannel) + " " + (n+1) + "[CLK]"); break; case 3: // NONE mSerie[n].setTitle(getString(R.string.AnalyzerChannel) + " " + (n+1) + "[---]"); break; } } // Directorios para guardar las imagenes y sesiones imagesDirectory = getPrefs.getString("logicImageSave","Multi/Work/Images/"); sesionDirectory = getPrefs.getString("logicSesionSave","Multi/Work/Sesion/"); // Máxima cantidad de muestras para almacenar maxSamples = Integer.decode(getPrefs.getString("maxSamples","5")); // Escala del eje X de acuerdo al sample rate if(LogicData.getSampleRate() == 40000000) { mRenderDataset.setXTitle(getString(R.string.AnalyzerXTitle) + " x25nS"); timeScale = 0.000000025d; // 25nS }else if(LogicData.getSampleRate() == 20000000) { mRenderDataset.setXTitle(getString(R.string.AnalyzerXTitle) + " x50 nS"); timeScale = 0.000000050d; // 50nS }else if(LogicData.getSampleRate() == 10000000) { mRenderDataset.setXTitle(getString(R.string.AnalyzerXTitle) + " x100 nS"); timeScale = 0.000000100d; // 100nS }else if(LogicData.getSampleRate() == 4000000) { mRenderDataset.setXTitle(getString(R.string.AnalyzerXTitle) + " x250 nS"); timeScale = 0.000000250d; // 250nS }else if(LogicData.getSampleRate() == 400000) { mRenderDataset.setXTitle(getString(R.string.AnalyzerXTitle) + " x2.5 uS"); timeScale = 0.0000025d; // 2.5uS }else if(LogicData.getSampleRate() == 2000) { mRenderDataset.setXTitle(getString(R.string.AnalyzerXTitle) + " x500 uS"); timeScale = 0.000500d; // 500uS }else if(LogicData.getSampleRate() == 10) { mRenderDataset.setXTitle(getString(R.string.AnalyzerXTitle) + " x100 mS"); timeScale = 0.1d; // 100mS } // Actualizo los datos del grafico mChartView.repaint(); } }
Acomodamiento y renombres
src/com/protocolanalyzer/andres/LogicAnalizerChartFragment.java
Acomodamiento y renombres
<ide><path>rc/com/protocolanalyzer/andres/LogicAnalizerChartFragment.java <ide> <ide> @Override <ide> public double onDataDecodedListener(LogicData[] mLogicData, int samplesCount, boolean isConfig) { <add> if(DEBUG) Log.i("mFragmentChart","onDataDecoded() - isConfig: " + isConfig); <add> if(DEBUG) Log.i("mFragmentChart","Data: " + mLogicData.toString()); <ide> // Si se cambiaron las configuraciones las actualizo <ide> if(isConfig) setChartPreferences(); <ide> else{ <del> if(DEBUG) Log.i("mFragmentChart","onDataDecoded()"); <del> if(DEBUG) Log.i("mFragmentChart","Data: " + mLogicData.toString()); <del> <ide> mData = mLogicData; <ide> samplesNumber = samplesCount; <ide> mUpdaterHandler.post(mUpdaterTask); <ide> firstTime = false; <ide> } <ide> } <del> <ide> return 0; <ide> } <ide> <ide> public void onCreate(Bundle savedInstanceState) { <ide> super.onCreate(savedInstanceState); <ide> <del> Log.i("onCreate()", "onCreate LogicAnalizerView"); <del> <del> // Obtengo la Activity que contiene el Fragment <del> mActivity = getSherlockActivity(); <del> <add> Log.i("mFragmentChart", "onCreate()"); <add> <add> mActivity = getSherlockActivity(); // Obtengo la Activity que contiene el Fragment <ide> mActionBar = mActivity.getSupportActionBar(); // Obtengo el ActionBar <ide> mActionBar.setDisplayHomeAsUpEnabled(true); // El icono de la aplicacion funciona como boton HOME <ide> mActionBar.setTitle(getString(R.string.AnalyzerName)) ; // Nombre <ide> @Override <ide> public View onCreateView(LayoutInflater inflater, ViewGroup container, <ide> Bundle savedInstanceState) { <add> Log.i("mFragmentChart", "onCreateView()"); <ide> // Renderizo el layout <ide> return inflater.inflate(R.layout.logicanalizer, container, false); <ide> } <ide> @Override <ide> public void onResume() { <ide> super.onResume(); <del> if(DEBUG) Log.i("onResume()","Resume LogicAnalizerView"); <add> if(DEBUG) Log.i("mFragmentChart","onResume()"); <ide> <ide> // Elimino primero el View porque si ya esta agregado genera una excepcion <ide> ((FrameLayout) mActivity.findViewById(R.id.mChart)).removeViewInLayout(mChartView);
Java
apache-2.0
384dcd78533ff3b57bcb8cb0c69babc73453f82e
0
marklogic/data-hub-in-a-box,marklogic/data-hub-in-a-box,marklogic/data-hub-in-a-box,marklogic/data-hub-in-a-box
package com.marklogic.hub; import com.marklogic.hub.collector.DiskQueueTest; import com.marklogic.hub.collector.EmptyCollectorTest; import com.marklogic.hub.collector.StreamCollectorTest; import com.marklogic.hub.core.*; import com.marklogic.hub.deploy.commands.GenerateHubTDETemplateCommandTest; import com.marklogic.hub.deploy.commands.GeneratePiiCommandTest; import com.marklogic.hub.deploy.commands.LoadUserModulesCommandTest; import com.marklogic.hub.entity.EntityManagerTest; import com.marklogic.hub.flow.FlowManagerTest; import com.marklogic.hub.flow.FlowRunnerTest; import com.marklogic.hub.job.JobManagerTest; import com.marklogic.hub.job.TracingTest; import com.marklogic.hub.scaffolding.ScaffoldingTest; import com.marklogic.hub.scaffolding.ScaffoldingValidatorTest; import com.marklogic.hub.util.Installer; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.runner.RunWith; import org.junit.runners.Suite; import org.junit.runners.Suite.SuiteClasses; @RunWith(Suite.class) @SuiteClasses( { DiskQueueTest.class, EmptyCollectorTest.class, StreamCollectorTest.class, DataHubInstallTest.class, DataHubTest.class, DebugLibTest.class, HubConfigTest.class, HubProjectTest.class, GenerateHubTDETemplateCommandTest.class, GeneratePiiCommandTest.class, LoadUserModulesCommandTest.class, EntityManagerTest.class, FlowManagerTest.class, FlowRunnerTest.class, JobManagerTest.class, TracingTest.class, // these two must be run separately! //EndToEndFlowTests.class, //ScaffoldingE2E.class, ScaffoldingTest.class, ScaffoldingValidatorTest.class }) public class CoreTestSuite { @BeforeClass public static void setUp() { new Installer().installHubOnce(); } @AfterClass public static void tearDown() { new Installer().uninstallHub(); } }
marklogic-data-hub/src/test/java/com/marklogic/hub/CoreTestSuite.java
package com.marklogic.hub; import com.marklogic.hub.collector.DiskQueueTest; import com.marklogic.hub.collector.EmptyCollectorTest; import com.marklogic.hub.collector.StreamCollectorTest; import com.marklogic.hub.core.*; import com.marklogic.hub.deploy.commands.GenerateHubTDETemplateCommandTest; import com.marklogic.hub.deploy.commands.GeneratePiiCommandTest; import com.marklogic.hub.deploy.commands.LoadUserModulesCommandTest; import com.marklogic.hub.entity.EntityManagerTest; import com.marklogic.hub.flow.FlowManagerTest; import com.marklogic.hub.flow.FlowRunnerTest; import com.marklogic.hub.job.JobManagerTest; import com.marklogic.hub.job.TracingTest; import com.marklogic.hub.scaffolding.ScaffoldingTest; import com.marklogic.hub.scaffolding.ScaffoldingValidatorTest; import com.marklogic.hub.util.Installer; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.runner.RunWith; import org.junit.runners.Suite; import org.junit.runners.Suite.SuiteClasses; @RunWith(Suite.class) @SuiteClasses( { // DiskQueueTest.class, // EmptyCollectorTest.class, // StreamCollectorTest.class, // DataHubInstallTest.class, // DataHubTest.class, // DebugLibTest.class, // HubConfigTest.class, // HubProjectTest.class, // GenerateHubTDETemplateCommandTest.class, // GeneratePiiCommandTest.class, // LoadUserModulesCommandTest.class, // EntityManagerTest.class, FlowManagerTest.class, // FlowRunnerTest.class, // JobManagerTest.class, // TracingTest.class, // // these two must be run separately! // //EndToEndFlowTests.class, // //ScaffoldingE2E.class, // ScaffoldingTest.class, // ScaffoldingValidatorTest.class }) public class CoreTestSuite { @BeforeClass public static void setUp() { new Installer().installHubOnce(); } @AfterClass public static void tearDown() { new Installer().uninstallHub(); } }
Uncomment tests
marklogic-data-hub/src/test/java/com/marklogic/hub/CoreTestSuite.java
Uncomment tests
<ide><path>arklogic-data-hub/src/test/java/com/marklogic/hub/CoreTestSuite.java <ide> <ide> @RunWith(Suite.class) <ide> @SuiteClasses( { <del>// DiskQueueTest.class, <del>// EmptyCollectorTest.class, <del>// StreamCollectorTest.class, <del>// DataHubInstallTest.class, <del>// DataHubTest.class, <del>// DebugLibTest.class, <del>// HubConfigTest.class, <del>// HubProjectTest.class, <del>// GenerateHubTDETemplateCommandTest.class, <del>// GeneratePiiCommandTest.class, <del>// LoadUserModulesCommandTest.class, <del>// EntityManagerTest.class, <add> DiskQueueTest.class, <add> EmptyCollectorTest.class, <add> StreamCollectorTest.class, <add> DataHubInstallTest.class, <add> DataHubTest.class, <add> DebugLibTest.class, <add> HubConfigTest.class, <add> HubProjectTest.class, <add> GenerateHubTDETemplateCommandTest.class, <add> GeneratePiiCommandTest.class, <add> LoadUserModulesCommandTest.class, <add> EntityManagerTest.class, <ide> FlowManagerTest.class, <del>// FlowRunnerTest.class, <del>// JobManagerTest.class, <del>// TracingTest.class, <del>// // these two must be run separately! <del>// //EndToEndFlowTests.class, <del>// //ScaffoldingE2E.class, <del>// ScaffoldingTest.class, <del>// ScaffoldingValidatorTest.class <add> FlowRunnerTest.class, <add> JobManagerTest.class, <add> TracingTest.class, <add> // these two must be run separately! <add> //EndToEndFlowTests.class, <add> //ScaffoldingE2E.class, <add> ScaffoldingTest.class, <add> ScaffoldingValidatorTest.class <ide> }) <ide> <ide> public class CoreTestSuite {
Java
apache-2.0
f1f8fdaee19ecf28a4389703b66da1135e1ab4a4
0
saki4510t/libcommon,saki4510t/libcommon
package com.serenegiant.utils; /* * libcommon * utility/helper classes for myself * * Copyright (c) 2014-2022 saki [email protected] * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.ByteArrayOutputStream; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.FloatBuffer; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import android.text.TextUtils; import android.util.Log; /** * Buffer/配列用のヘルパークラス */ public class BufferHelper { private BufferHelper() { // インスタンス化をエラーにするためにデフォルトコンストラクタをprivateに } private static final char[] HEX = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'}; private static final int BUF_LEN = 256; /** * ByteBufferの中身をlogCatへ出力する * @param tag * @param buffer * @param offset * @param size */ public static final void dump(@NonNull final String tag, final ByteBuffer buffer, final int offset, final int size) { dump(tag, null, buffer, offset, size, false); } /** * ByteBufferの中身をlogCatへ出力する * @param tag * @param prefix * @param buffer * @param offset * @param size */ public static final void dump( @NonNull final String tag, final String prefix, final ByteBuffer buffer, final int offset, final int size) { dump(tag, prefix, buffer, offset, size, false); } /** * ByteBufferの中身をlogCatへ出力する * @param tag * @param buffer * @param offset * @param size * @param findAnnexB */ public static final void dump(@NonNull final String tag, final ByteBuffer buffer, final int offset, final int size, final boolean findAnnexB) { dump(tag, null, buffer, offset, size, findAnnexB); } /** * ByteBufferの中身をlogCatへ出力する * @param tag * @param _prefix * @param _buffer * @param offset * @param _size * @param findAnnexB */ public static final void dump( @NonNull final String tag, @Nullable final String _prefix, final ByteBuffer _buffer, final int offset, final int _size, final boolean findAnnexB) { @NonNull final String prefix = _prefix != null ? _prefix : "dump:"; final byte[] dump = new byte[BUF_LEN]; // if (DEBUG) Log.i(TAG, "dump:" + buffer); if (_buffer == null) return; final ByteBuffer buffer = _buffer.asReadOnlyBuffer(); final int n = buffer.limit(); final int pos = buffer.position(); // final int cap = buffer.capacity(); // if (DEBUG) Log.i(TAG, "dump:limit=" + n + ",capacity=" + cap + ",position=" + buffer.position()); int size = _size; if (size > n) size = n; buffer.position(offset); final StringBuilder sb = new StringBuilder(); int sz; for (int i = offset; i < size; i += BUF_LEN) { sz = i + BUF_LEN < size ? BUF_LEN : size - i; buffer.get(dump, 0, sz); for (int j = 0; j < sz; j++) { sb.append(String.format("%02x", dump[j])); } if (findAnnexB) { int index = -1; do { index = byteComp(dump, index+1, ANNEXB_START_MARK, ANNEXB_START_MARK.length); if (index >= 0) { Log.i(tag, prefix + " found ANNEXB: start index=" + index); } } while (index >= 0); } } Log.i(tag, prefix + sb); } /** * ByteBufferの中身をlogCatへ出力する * @param tag * @param buffer * @param offset * @param size * @param findAnnexB */ public static final void dump(final String tag, final byte[] buffer, final int offset, final int size, final boolean findAnnexB) { dump(tag, null, buffer, offset, size, findAnnexB); } /** * ByteBufferの中身をlogCatへ出力する * @param tag * @param _prefix * @param buffer * @param offset * @param _size * @param findAnnexB */ public static final void dump( @NonNull final String tag, @Nullable final String _prefix, final byte[] buffer, final int offset, final int _size, final boolean findAnnexB) { @NonNull final String prefix = _prefix != null ? _prefix : "dump:"; final int n = buffer != null ? buffer.length : 0; if (n == 0) return; int size = _size; if (size > n) size = n; final StringBuilder sb = new StringBuilder(); int sz; for (int i = offset; i < size; i ++) { sb.append(String.format("%02x", buffer[i])); } if (findAnnexB) { int index = -1; do { index = byteComp(buffer, index+1, ANNEXB_START_MARK, ANNEXB_START_MARK.length); if (index >= 0) { Log.i(tag, prefix + " found ANNEXB: start index=" + index); } } while (index >= 0); } Log.i(tag, prefix + sb); } /** * codec specific dataのスタートマーカー * AnnexBのスタートマーカーと同じ * N[00] 00 00 01 (N ≧ 0) */ public static final byte[] ANNEXB_START_MARK = { 0, 0, 0, 1, }; /** * byte[]を検索して一致する先頭インデックスを返す * @param array 検索されるbyte[] * @param search 検索するbyte[] * @param len 検索するバイト数 * @return 一致した先頭位置、一致しなければ-1 */ public static final int byteComp(@NonNull final byte[] array, final int offset, @NonNull final byte[] search, final int len) { int index = -1; final int n0 = array.length; final int ns = search.length; if ((n0 >= offset + len) && (ns >= len)) { for (int i = offset; i < n0 - len; i++) { int j = len - 1; while (j >= 0) { if (array[i + j] != search[j]) break; j--; } if (j < 0) { index = i; break; } } } return index; } /** * AnnexBのスタートマーカー(N[00] 00 00 01 (N ≧ 0))を探して先頭インデックスを返す * 返り値が0以上の場合は、返り値+3がpayloadの先頭位置(nalu headerのはず) * @param data * @param offset * @return 見つからなければ負 */ public static final int findAnnexB(final byte[] data, final int offset) { if (data != null) { final int len5 = data.length - 5; // 本当はlength-3までだけどpayloadが無いのは無効とみなしてlength-4までとする for (int i = offset; i < len5; i++) { // 最低3つは連続して0x00 if ((data[i] != 0x00) || (data[i+1] != 0x00) || (data[i+2] != 0x00)) { continue; } // 4つ目が0x01ならOK if (data[i+3] == 0x01) { return i; } } final int len4 = data.length - 4; // 本当はlength-3までだけどpayloadが無いのは無効とみなしてlength-4までとする for (int i = offset; i < len4; i++) { // 最低2つは連続して0x00でないとだめ if ((data[i] != 0x00) || (data[i+1] != 0x00)) { continue; } // 3つ目が0x01ならOK if (data[i+2] == 0x01) { return i; } } } return -1; } /** * float1つのサイズ[バイト] */ public static final int SIZEOF_FLOAT_BYTES = Float.SIZE / 8; /** * 引数のfloat配列と同じ長さのFloatBufferを生成して引数の値をセットする * @param coords * @return */ public static FloatBuffer createFloatBuffer(@NonNull final float[] coords) { // Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it. final FloatBuffer result = ByteBuffer.allocateDirect(coords.length * SIZEOF_FLOAT_BYTES) .order(ByteOrder.nativeOrder()).asFloatBuffer(); result.put(coords).flip(); return result; } /** * 16進文字列をパースしてByteBufferとして返す * @param hexString * @return * @throws NumberFormatException */ public static ByteBuffer from(final String hexString) throws NumberFormatException { final ByteArrayOutputStream out = new ByteArrayOutputStream(); final int n = !TextUtils.isEmpty(hexString) ? hexString.length() : 0; for (int i = 0; i < n; i += 2) { final int b = Integer.parseInt(hexString.substring(i, i + 2), 16); out.write(b); } return ByteBuffer.wrap(out.toByteArray()); } /** * byte配列を16進文字列に変換する * @param bytes * @return */ public static String toHexString(@NonNull final byte[] bytes) { return toHexString(bytes, 0, bytes.length); } /** * byte配列を16進文字列に変換する * @param bytes * @param offset * @param len 出力する最大バイト数 * @return */ public static String toHexString(final byte[] bytes, final int offset, final int len) { final int n = (bytes != null) ? bytes.length : 0; final int m = Math.min(n, offset + len); final StringBuilder sb = new StringBuilder(n * 2 + 2); for (int i = offset; i < m; i++) { final byte b = bytes[i]; sb.append(HEX[(0xf0 & b) >>> 4]); sb.append(HEX[0x0f & b]); } return sb.toString(); } /** * ByteBufferを16進文字列に変換する * @param buffer * @return */ public static String toHexString(final ByteBuffer buffer) { if (buffer == null) return null; final ByteBuffer _buffer = buffer.duplicate(); final int n = _buffer.remaining(); final StringBuilder sb = new StringBuilder(n * 2 + 2); for (int i = 0; i < n; i++) { final byte b = _buffer.get(); sb.append(HEX[(0xf0 & b) >>> 4]); sb.append(HEX[0x0f & b]); } return sb.toString(); } /** * 指定したbyte配列が指定したサイズ以上になるようにする * 引数のbyte配列がnullまたは指定したサイズよりも小さい場合には新しいbyte配列を生成して返す * @param buffer * @param newSize * @return */ @NonNull public static byte[] resize(@Nullable final byte[] buffer, final int newSize) { byte[] result = buffer; if ((result == null) || (result.length < newSize)) { result = new byte[newSize]; } return result; } /** * 指定したByteBufferが指定したサイズ以上になるようにする * 引数のByteBufferがnullまたは指定したサイズよりも小さい場合には新しいByteBufferを生成して返す * @param buffer * @param newSize * @return */ @NonNull public static ByteBuffer resize(@Nullable final ByteBuffer buffer, final int newSize) { ByteBuffer result = buffer; if ((result == null) || (result.capacity() < newSize)) { result = ByteBuffer.allocate(newSize); } result.clear(); return result; } /** * 指定したByteBufferが指定したサイズ以上になるようにする * 引数のByteBufferがnullまたは指定したサイズよりも小さい場合には新しいダイレクトByteBufferを生成して返す * @param buffer * @param newSize * @return */ @NonNull public static ByteBuffer resizeDirect(@Nullable final ByteBuffer buffer, final int newSize) { ByteBuffer result = buffer; if ((result == null) || (result.capacity() < newSize)) { result = ByteBuffer.allocateDirect(newSize).order(ByteOrder.nativeOrder()); } result.clear(); return result; } }
common/src/main/java/com/serenegiant/utils/BufferHelper.java
package com.serenegiant.utils; /* * libcommon * utility/helper classes for myself * * Copyright (c) 2014-2022 saki [email protected] * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.ByteArrayOutputStream; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.FloatBuffer; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import android.text.TextUtils; import android.util.Log; /** * Buffer/配列用のヘルパークラス */ public class BufferHelper { private BufferHelper() { // インスタンス化をエラーにするためにデフォルトコンストラクタをprivateに } private static final char[] HEX = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'}; private static final int BUF_LEN = 256; /** * ByteBufferの中身をlogCatへ出力する * @param tag * @param buffer * @param offset * @param size */ public static final void dump(final String tag, final ByteBuffer buffer, final int offset, final int size) { dump(tag, buffer, offset, size, false); } /** * ByteBufferの中身をlogCatへ出力する * @param tag * @param _buffer * @param offset * @param _size * @param findAnnexB */ public static final void dump(final String tag, final ByteBuffer _buffer, final int offset, final int _size, final boolean findAnnexB) { final byte[] dump = new byte[BUF_LEN]; // if (DEBUG) Log.i(TAG, "dump:" + buffer); if (_buffer == null) return; final ByteBuffer buffer = _buffer.duplicate(); final int n = buffer.limit(); final int pos = buffer.position(); // final int cap = buffer.capacity(); // if (DEBUG) Log.i(TAG, "dump:limit=" + n + ",capacity=" + cap + ",position=" + buffer.position()); int size = _size; if (size > n) size = n; buffer.position(offset); final StringBuilder sb = new StringBuilder(); int sz; for (int i = offset; i < size; i += BUF_LEN) { sz = i + BUF_LEN < size ? BUF_LEN : size - i; buffer.get(dump, 0, sz); for (int j = 0; j < sz; j++) { sb.append(String.format("%02x", dump[j])); } if (findAnnexB) { int index = -1; do { index = byteComp(dump, index+1, ANNEXB_START_MARK, ANNEXB_START_MARK.length); if (index >= 0) { Log.i(tag, "found ANNEXB: start index=" + index); } } while (index >= 0); } } Log.i(tag, "dump:" + sb.toString()); } /** * ByteBufferの中身をlogCatへ出力する * @param tag * @param buffer * @param offset * @param _size * @param findAnnexB */ public static final void dump(final String tag, final byte[] buffer, final int offset, final int _size, final boolean findAnnexB) { final int n = buffer != null ? buffer.length : 0; if (n == 0) return; int size = _size; if (size > n) size = n; final StringBuilder sb = new StringBuilder(); int sz; for (int i = offset; i < size; i ++) { sb.append(String.format("%02x", buffer[i])); } if (findAnnexB) { int index = -1; do { index = byteComp(buffer, index+1, ANNEXB_START_MARK, ANNEXB_START_MARK.length); if (index >= 0) { Log.i(tag, "found ANNEXB: start index=" + index); } } while (index >= 0); } Log.i(tag, "dump:" + sb.toString()); } /** * codec specific dataのスタートマーカー * AnnexBのスタートマーカーと同じ * N[00] 00 00 01 (N ≧ 0) */ public static final byte[] ANNEXB_START_MARK = { 0, 0, 0, 1, }; /** * byte[]を検索して一致する先頭インデックスを返す * @param array 検索されるbyte[] * @param search 検索するbyte[] * @param len 検索するバイト数 * @return 一致した先頭位置、一致しなければ-1 */ public static final int byteComp(@NonNull final byte[] array, final int offset, @NonNull final byte[] search, final int len) { int index = -1; final int n0 = array.length; final int ns = search.length; if ((n0 >= offset + len) && (ns >= len)) { for (int i = offset; i < n0 - len; i++) { int j = len - 1; while (j >= 0) { if (array[i + j] != search[j]) break; j--; } if (j < 0) { index = i; break; } } } return index; } /** * AnnexBのスタートマーカー(N[00] 00 00 01 (N ≧ 0))を探して先頭インデックスを返す * 返り値が0以上の場合は、返り値+3がpayloadの先頭位置(nalu headerのはず) * @param data * @param offset * @return 見つからなければ負 */ public static final int findAnnexB(final byte[] data, final int offset) { if (data != null) { final int len5 = data.length - 5; // 本当はlength-3までだけどpayloadが無いのは無効とみなしてlength-4までとする for (int i = offset; i < len5; i++) { // 最低3つは連続して0x00 if ((data[i] != 0x00) || (data[i+1] != 0x00) || (data[i+2] != 0x00)) { continue; } // 4つ目が0x01ならOK if (data[i+3] == 0x01) { return i; } } final int len4 = data.length - 4; // 本当はlength-3までだけどpayloadが無いのは無効とみなしてlength-4までとする for (int i = offset; i < len4; i++) { // 最低2つは連続して0x00でないとだめ if ((data[i] != 0x00) || (data[i+1] != 0x00)) { continue; } // 3つ目が0x01ならOK if (data[i+2] == 0x01) { return i; } } } return -1; } /** * float1つのサイズ[バイト] */ public static final int SIZEOF_FLOAT_BYTES = Float.SIZE / 8; /** * 引数のfloat配列と同じ長さのFloatBufferを生成して引数の値をセットする * @param coords * @return */ public static FloatBuffer createFloatBuffer(@NonNull final float[] coords) { // Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it. final FloatBuffer result = ByteBuffer.allocateDirect(coords.length * SIZEOF_FLOAT_BYTES) .order(ByteOrder.nativeOrder()).asFloatBuffer(); result.put(coords).flip(); return result; } /** * 16進文字列をパースしてByteBufferとして返す * @param hexString * @return * @throws NumberFormatException */ public static ByteBuffer from(final String hexString) throws NumberFormatException { final ByteArrayOutputStream out = new ByteArrayOutputStream(); final int n = !TextUtils.isEmpty(hexString) ? hexString.length() : 0; for (int i = 0; i < n; i += 2) { final int b = Integer.parseInt(hexString.substring(i, i + 2), 16); out.write(b); } return ByteBuffer.wrap(out.toByteArray()); } /** * byte配列を16進文字列に変換する * @param bytes * @return */ public static String toHexString(@NonNull final byte[] bytes) { return toHexString(bytes, 0, bytes.length); } /** * byte配列を16進文字列に変換する * @param bytes * @param offset * @param len 出力する最大バイト数 * @return */ public static String toHexString(final byte[] bytes, final int offset, final int len) { final int n = (bytes != null) ? bytes.length : 0; final int m = Math.min(n, offset + len); final StringBuilder sb = new StringBuilder(n * 2 + 2); for (int i = offset; i < m; i++) { final byte b = bytes[i]; sb.append(HEX[(0xf0 & b) >>> 4]); sb.append(HEX[0x0f & b]); } return sb.toString(); } /** * ByteBufferを16進文字列に変換する * @param buffer * @return */ public static String toHexString(final ByteBuffer buffer) { if (buffer == null) return null; final ByteBuffer _buffer = buffer.duplicate(); final int n = _buffer.remaining(); final StringBuilder sb = new StringBuilder(n * 2 + 2); for (int i = 0; i < n; i++) { final byte b = _buffer.get(); sb.append(HEX[(0xf0 & b) >>> 4]); sb.append(HEX[0x0f & b]); } return sb.toString(); } /** * 指定したbyte配列が指定したサイズ以上になるようにする * 引数のbyte配列がnullまたは指定したサイズよりも小さい場合には新しいbyte配列を生成して返す * @param buffer * @param newSize * @return */ @NonNull public static byte[] resize(@Nullable final byte[] buffer, final int newSize) { byte[] result = buffer; if ((result == null) || (result.length < newSize)) { result = new byte[newSize]; } return result; } /** * 指定したByteBufferが指定したサイズ以上になるようにする * 引数のByteBufferがnullまたは指定したサイズよりも小さい場合には新しいByteBufferを生成して返す * @param buffer * @param newSize * @return */ @NonNull public static ByteBuffer resize(@Nullable final ByteBuffer buffer, final int newSize) { ByteBuffer result = buffer; if ((result == null) || (result.capacity() < newSize)) { result = ByteBuffer.allocate(newSize); } result.clear(); return result; } /** * 指定したByteBufferが指定したサイズ以上になるようにする * 引数のByteBufferがnullまたは指定したサイズよりも小さい場合には新しいダイレクトByteBufferを生成して返す * @param buffer * @param newSize * @return */ @NonNull public static ByteBuffer resizeDirect(@Nullable final ByteBuffer buffer, final int newSize) { ByteBuffer result = buffer; if ((result == null) || (result.capacity() < newSize)) { result = ByteBuffer.allocateDirect(newSize).order(ByteOrder.nativeOrder()); } result.clear(); return result; } }
バッファーのダンプ時にプレフィックス文字列を指定できるように追加
common/src/main/java/com/serenegiant/utils/BufferHelper.java
バッファーのダンプ時にプレフィックス文字列を指定できるように追加
<ide><path>ommon/src/main/java/com/serenegiant/utils/BufferHelper.java <ide> * @param offset <ide> * @param size <ide> */ <del> public static final void dump(final String tag, <add> public static final void dump(@NonNull final String tag, <ide> final ByteBuffer buffer, final int offset, final int size) { <ide> <del> dump(tag, buffer, offset, size, false); <del> } <del> <del> /** <del> * ByteBufferの中身をlogCatへ出力する <del> * @param tag <add> dump(tag, null, buffer, offset, size, false); <add> } <add> <add> /** <add> * ByteBufferの中身をlogCatへ出力する <add> * @param tag <add> * @param prefix <add> * @param buffer <add> * @param offset <add> * @param size <add> */ <add> public static final void dump( <add> @NonNull final String tag, final String prefix, <add> final ByteBuffer buffer, final int offset, final int size) { <add> <add> dump(tag, prefix, buffer, offset, size, false); <add> } <add> <add> /** <add> * ByteBufferの中身をlogCatへ出力する <add> * @param tag <add> * @param buffer <add> * @param offset <add> * @param size <add> * @param findAnnexB <add> */ <add> public static final void dump(@NonNull final String tag, <add> final ByteBuffer buffer, final int offset, final int size, final boolean findAnnexB) { <add> <add> dump(tag, null, buffer, offset, size, findAnnexB); <add> } <add> <add> /** <add> * ByteBufferの中身をlogCatへ出力する <add> * @param tag <add> * @param _prefix <ide> * @param _buffer <ide> * @param offset <ide> * @param _size <ide> * @param findAnnexB <ide> */ <del> public static final void dump(final String tag, <add> public static final void dump( <add> @NonNull final String tag, @Nullable final String _prefix, <ide> final ByteBuffer _buffer, final int offset, final int _size, final boolean findAnnexB) { <ide> <add> @NonNull <add> final String prefix = _prefix != null ? _prefix : "dump:"; <ide> final byte[] dump = new byte[BUF_LEN]; <ide> // if (DEBUG) Log.i(TAG, "dump:" + buffer); <ide> if (_buffer == null) return; <del> final ByteBuffer buffer = _buffer.duplicate(); <add> final ByteBuffer buffer = _buffer.asReadOnlyBuffer(); <ide> final int n = buffer.limit(); <ide> final int pos = buffer.position(); <ide> // final int cap = buffer.capacity(); <ide> do { <ide> index = byteComp(dump, index+1, ANNEXB_START_MARK, ANNEXB_START_MARK.length); <ide> if (index >= 0) { <del> Log.i(tag, "found ANNEXB: start index=" + index); <add> Log.i(tag, prefix + " found ANNEXB: start index=" + index); <ide> } <ide> } while (index >= 0); <ide> } <ide> } <del> Log.i(tag, "dump:" + sb.toString()); <del> } <del> <del> /** <del> * ByteBufferの中身をlogCatへ出力する <del> * @param tag <add> Log.i(tag, prefix + sb); <add> } <add> <add> /** <add> * ByteBufferの中身をlogCatへ出力する <add> * @param tag <add> * @param buffer <add> * @param offset <add> * @param size <add> * @param findAnnexB <add> */ <add> public static final void dump(final String tag, <add> final byte[] buffer, final int offset, final int size, final boolean findAnnexB) { <add> <add> dump(tag, null, buffer, offset, size, findAnnexB); <add> } <add> <add> /** <add> * ByteBufferの中身をlogCatへ出力する <add> * @param tag <add> * @param _prefix <ide> * @param buffer <ide> * @param offset <ide> * @param _size <ide> * @param findAnnexB <ide> */ <del> public static final void dump(final String tag, <add> public static final void dump( <add> @NonNull final String tag, @Nullable final String _prefix, <ide> final byte[] buffer, final int offset, final int _size, final boolean findAnnexB) { <ide> <add> @NonNull <add> final String prefix = _prefix != null ? _prefix : "dump:"; <ide> final int n = buffer != null ? buffer.length : 0; <ide> if (n == 0) return; <ide> int size = _size; <ide> do { <ide> index = byteComp(buffer, index+1, ANNEXB_START_MARK, ANNEXB_START_MARK.length); <ide> if (index >= 0) { <del> Log.i(tag, "found ANNEXB: start index=" + index); <add> Log.i(tag, prefix + " found ANNEXB: start index=" + index); <ide> } <ide> } while (index >= 0); <ide> } <del> Log.i(tag, "dump:" + sb.toString()); <add> Log.i(tag, prefix + sb); <ide> } <ide> <ide> /**
Java
apache-2.0
error: pathspec 'dependencymanager/test/src/test/java/org/apache/felix/dm/test/ServiceTrackerTest.java' did not match any file(s) known to git
69c2e9477b87b0d62838a21a86d916ebbd93761c
1
apache/felix-dev,apache/felix-dev,apache/felix-dev,apache/felix-dev
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.felix.dm.test; import static org.ops4j.pax.exam.CoreOptions.mavenBundle; import static org.ops4j.pax.exam.CoreOptions.options; import static org.ops4j.pax.exam.CoreOptions.provision; import java.util.Properties; import junit.framework.Assert; import org.apache.felix.dm.DependencyManager; import org.apache.felix.dm.ServiceUtil; import org.apache.felix.dm.tracker.ServiceTracker; import org.junit.Test; import org.junit.runner.RunWith; import org.ops4j.pax.exam.Option; import org.ops4j.pax.exam.junit.Configuration; import org.ops4j.pax.exam.junit.JUnit4TestRunner; import org.osgi.framework.BundleContext; import org.osgi.framework.Constants; import org.osgi.framework.ServiceRegistration; @RunWith(JUnit4TestRunner.class) public class ServiceTrackerTest extends Base { @Configuration public static Option[] configuration() { return options( provision( mavenBundle().groupId("org.osgi").artifactId("org.osgi.compendium").version("4.1.0"), mavenBundle().groupId("org.apache.felix").artifactId("org.apache.felix.dependencymanager").versionAsInProject() ) ); } @Test public void testPlainServiceTracker(BundleContext context) { ServiceTracker st = new ServiceTracker(context, ServiceInterface.class.getName(), null); st.open(); ServiceRegistration sr = context.registerService(ServiceInterface.class.getName(), new ServiceProvider(), null); Assert.assertEquals("There should be one service that matches the tracker", 1, st.getServices().length); sr.unregister(); Assert.assertNull("There should be no service that matches the tracker", st.getServices()); st.close(); } @Test public void testAspectServiceTracker(BundleContext context) { ServiceTracker st = new ServiceTracker(context, ServiceInterface.class.getName(), null); st.open(); ServiceRegistration sr = context.registerService(ServiceInterface.class.getName(), new ServiceProvider(), null); Assert.assertEquals("There should be one service that matches the tracker", 1, st.getServices().length); final long sid = ServiceUtil.getServiceId(sr.getReference()); ServiceRegistration asr = context.registerService(ServiceInterface.class.getName(), new ServiceProvider(), new Properties() {{ put(DependencyManager.ASPECT, sid); put(Constants.SERVICE_RANKING, 10); }}); Assert.assertEquals("There should be one service that matches the tracker", 1, st.getServices().length); Assert.assertEquals("Service ranking should be 10", Integer.valueOf(10), (Integer) st.getServiceReference().getProperty(Constants.SERVICE_RANKING)); ServiceRegistration asr2 = context.registerService(ServiceInterface.class.getName(), new ServiceProvider(), new Properties() {{ put(DependencyManager.ASPECT, sid); put(Constants.SERVICE_RANKING, 20); }}); Assert.assertEquals("There should be one service that matches the tracker", 1, st.getServices().length); Assert.assertEquals("Service ranking should be 20", Integer.valueOf(20), (Integer) st.getServiceReference().getProperty(Constants.SERVICE_RANKING)); asr.unregister(); Assert.assertEquals("There should be one service that matches the tracker", 1, st.getServices().length); Assert.assertEquals("Service ranking should be 20", Integer.valueOf(20), (Integer) st.getServiceReference().getProperty(Constants.SERVICE_RANKING)); asr2.unregister(); Assert.assertEquals("There should be one service that matches the tracker", 1, st.getServices().length); Assert.assertNull("Service should not have a ranking", st.getServiceReference().getProperty(Constants.SERVICE_RANKING)); sr.unregister(); Assert.assertNull("There should be no service that matches the tracker", st.getServices()); st.close(); } @Test public void testExistingAspectServiceTracker(BundleContext context) { ServiceTracker st = new ServiceTracker(context, ServiceInterface.class.getName(), null); ServiceRegistration sr = context.registerService(ServiceInterface.class.getName(), new ServiceProvider(), null); final long sid = ServiceUtil.getServiceId(sr.getReference()); ServiceRegistration asr = context.registerService(ServiceInterface.class.getName(), new ServiceProvider(), new Properties() {{ put(DependencyManager.ASPECT, sid); put(Constants.SERVICE_RANKING, 10); }}); ServiceRegistration asr2 = context.registerService(ServiceInterface.class.getName(), new ServiceProvider(), new Properties() {{ put(DependencyManager.ASPECT, sid); put(Constants.SERVICE_RANKING, 20); }}); st.open(); Assert.assertEquals("There should be one service that matches the tracker", 1, st.getServices().length); Assert.assertEquals("Service ranking should be 20", Integer.valueOf(20), (Integer) st.getServiceReference().getProperty(Constants.SERVICE_RANKING)); asr2.unregister(); Assert.assertEquals("There should be one service that matches the tracker", 1, st.getServices().length); Assert.assertEquals("Service ranking should be 10", Integer.valueOf(10), (Integer) st.getServiceReference().getProperty(Constants.SERVICE_RANKING)); asr.unregister(); Assert.assertEquals("There should be one service that matches the tracker", 1, st.getServices().length); Assert.assertNull("Service should not have a ranking", st.getServiceReference().getProperty(Constants.SERVICE_RANKING)); sr.unregister(); Assert.assertNull("There should be no service that matches the tracker", st.getServices()); st.close(); } static interface ServiceInterface { public void invoke(); } static class ServiceProvider implements ServiceInterface { public void invoke() { } } }
dependencymanager/test/src/test/java/org/apache/felix/dm/test/ServiceTrackerTest.java
Tests the extensions that have been made to the standard ServiceTracker (mostly related to dealing with aspects). git-svn-id: e057f57e93a604d3b43d277ae69bde5ebf332112@1022569 13f79535-47bb-0310-9956-ffa450edef68
dependencymanager/test/src/test/java/org/apache/felix/dm/test/ServiceTrackerTest.java
Tests the extensions that have been made to the standard ServiceTracker (mostly related to dealing with aspects).
<ide><path>ependencymanager/test/src/test/java/org/apache/felix/dm/test/ServiceTrackerTest.java <add>/* <add> * Licensed to the Apache Software Foundation (ASF) under one <add> * or more contributor license agreements. See the NOTICE file <add> * distributed with this work for additional information <add> * regarding copyright ownership. The ASF licenses this file <add> * to you under the Apache License, Version 2.0 (the <add> * "License"); you may not use this file except in compliance <add> * with the License. You may obtain a copy of the License at <add> * <add> * http://www.apache.org/licenses/LICENSE-2.0 <add> * <add> * Unless required by applicable law or agreed to in writing, <add> * software distributed under the License is distributed on an <add> * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY <add> * KIND, either express or implied. See the License for the <add> * specific language governing permissions and limitations <add> * under the License. <add> */ <add>package org.apache.felix.dm.test; <add> <add>import static org.ops4j.pax.exam.CoreOptions.mavenBundle; <add>import static org.ops4j.pax.exam.CoreOptions.options; <add>import static org.ops4j.pax.exam.CoreOptions.provision; <add> <add>import java.util.Properties; <add> <add>import junit.framework.Assert; <add> <add>import org.apache.felix.dm.DependencyManager; <add>import org.apache.felix.dm.ServiceUtil; <add>import org.apache.felix.dm.tracker.ServiceTracker; <add>import org.junit.Test; <add>import org.junit.runner.RunWith; <add>import org.ops4j.pax.exam.Option; <add>import org.ops4j.pax.exam.junit.Configuration; <add>import org.ops4j.pax.exam.junit.JUnit4TestRunner; <add>import org.osgi.framework.BundleContext; <add>import org.osgi.framework.Constants; <add>import org.osgi.framework.ServiceRegistration; <add> <add>@RunWith(JUnit4TestRunner.class) <add>public class ServiceTrackerTest extends Base { <add> @Configuration <add> public static Option[] configuration() { <add> return options( <add> provision( <add> mavenBundle().groupId("org.osgi").artifactId("org.osgi.compendium").version("4.1.0"), <add> mavenBundle().groupId("org.apache.felix").artifactId("org.apache.felix.dependencymanager").versionAsInProject() <add> ) <add> ); <add> } <add> <add> @Test <add> public void testPlainServiceTracker(BundleContext context) { <add> ServiceTracker st = new ServiceTracker(context, ServiceInterface.class.getName(), null); <add> st.open(); <add> ServiceRegistration sr = context.registerService(ServiceInterface.class.getName(), new ServiceProvider(), null); <add> Assert.assertEquals("There should be one service that matches the tracker", 1, st.getServices().length); <add> sr.unregister(); <add> Assert.assertNull("There should be no service that matches the tracker", st.getServices()); <add> st.close(); <add> } <add> <add> @Test <add> public void testAspectServiceTracker(BundleContext context) { <add> ServiceTracker st = new ServiceTracker(context, ServiceInterface.class.getName(), null); <add> st.open(); <add> <add> ServiceRegistration sr = context.registerService(ServiceInterface.class.getName(), new ServiceProvider(), null); <add> Assert.assertEquals("There should be one service that matches the tracker", 1, st.getServices().length); <add> <add> final long sid = ServiceUtil.getServiceId(sr.getReference()); <add> ServiceRegistration asr = context.registerService(ServiceInterface.class.getName(), new ServiceProvider(), <add> new Properties() {{ put(DependencyManager.ASPECT, sid); put(Constants.SERVICE_RANKING, 10); }}); <add> Assert.assertEquals("There should be one service that matches the tracker", 1, st.getServices().length); <add> Assert.assertEquals("Service ranking should be 10", Integer.valueOf(10), (Integer) st.getServiceReference().getProperty(Constants.SERVICE_RANKING)); <add> <add> ServiceRegistration asr2 = context.registerService(ServiceInterface.class.getName(), new ServiceProvider(), <add> new Properties() {{ put(DependencyManager.ASPECT, sid); put(Constants.SERVICE_RANKING, 20); }}); <add> Assert.assertEquals("There should be one service that matches the tracker", 1, st.getServices().length); <add> Assert.assertEquals("Service ranking should be 20", Integer.valueOf(20), (Integer) st.getServiceReference().getProperty(Constants.SERVICE_RANKING)); <add> <add> asr.unregister(); <add> Assert.assertEquals("There should be one service that matches the tracker", 1, st.getServices().length); <add> Assert.assertEquals("Service ranking should be 20", Integer.valueOf(20), (Integer) st.getServiceReference().getProperty(Constants.SERVICE_RANKING)); <add> <add> asr2.unregister(); <add> Assert.assertEquals("There should be one service that matches the tracker", 1, st.getServices().length); <add> Assert.assertNull("Service should not have a ranking", st.getServiceReference().getProperty(Constants.SERVICE_RANKING)); <add> <add> sr.unregister(); <add> Assert.assertNull("There should be no service that matches the tracker", st.getServices()); <add> <add> st.close(); <add> } <add> <add> @Test <add> public void testExistingAspectServiceTracker(BundleContext context) { <add> ServiceTracker st = new ServiceTracker(context, ServiceInterface.class.getName(), null); <add> ServiceRegistration sr = context.registerService(ServiceInterface.class.getName(), new ServiceProvider(), null); <add> final long sid = ServiceUtil.getServiceId(sr.getReference()); <add> ServiceRegistration asr = context.registerService(ServiceInterface.class.getName(), new ServiceProvider(), <add> new Properties() {{ put(DependencyManager.ASPECT, sid); put(Constants.SERVICE_RANKING, 10); }}); <add> ServiceRegistration asr2 = context.registerService(ServiceInterface.class.getName(), new ServiceProvider(), <add> new Properties() {{ put(DependencyManager.ASPECT, sid); put(Constants.SERVICE_RANKING, 20); }}); <add> <add> st.open(); <add> Assert.assertEquals("There should be one service that matches the tracker", 1, st.getServices().length); <add> Assert.assertEquals("Service ranking should be 20", Integer.valueOf(20), (Integer) st.getServiceReference().getProperty(Constants.SERVICE_RANKING)); <add> <add> asr2.unregister(); <add> Assert.assertEquals("There should be one service that matches the tracker", 1, st.getServices().length); <add> Assert.assertEquals("Service ranking should be 10", Integer.valueOf(10), (Integer) st.getServiceReference().getProperty(Constants.SERVICE_RANKING)); <add> <add> asr.unregister(); <add> Assert.assertEquals("There should be one service that matches the tracker", 1, st.getServices().length); <add> Assert.assertNull("Service should not have a ranking", st.getServiceReference().getProperty(Constants.SERVICE_RANKING)); <add> <add> sr.unregister(); <add> Assert.assertNull("There should be no service that matches the tracker", st.getServices()); <add> <add> st.close(); <add> } <add> <add> static interface ServiceInterface { <add> public void invoke(); <add> } <add> <add> static class ServiceProvider implements ServiceInterface { <add> public void invoke() { <add> } <add> } <add>}
Java
mit
0ce46d57f93bb95dde8ae4913bfbe0188e3287fb
0
SimpleServer/SimpleServer,iBotPeaches/SimpleServer,iBotPeaches/SimpleServer,SimpleServer/SimpleServer
/* * Copyright (c) 2010 SimpleServer authors (see CONTRIBUTORS) * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package simpleserver.config; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import simpleserver.Player; public class ChestList extends AsciiConfig { private final ConcurrentMap<Coordinate, Chest> locations; public ChestList() { super("chest-list.txt"); locations = new ConcurrentHashMap<Coordinate, Chest>(); } private boolean giveLock(String player, int x, byte y, int z, boolean isGroupLock) { Coordinate coordinate = new Coordinate(x, y, z); if (locations.containsKey(coordinate)) { return false; } Chest chest = new Chest(player, coordinate, isGroupLock); locations.put(coordinate, chest); save(); return true; } public synchronized boolean giveLock(Player player, int x, byte y, int z, boolean isGroupLock) { return giveLock(player.getName(), x, y, z, isGroupLock); } public void addOpenChest(int x, byte y, int z) { giveLock("-", x, y, z, false); } public boolean hasLock(String name) { return false; } public boolean hasLock(int x, byte y, int z) { if(locations.containsKey(new Coordinate(x, y, z))) { return !locations.get(new Coordinate(x, y, z)).isOpen(); } return false; } public Chest adjacentChest(int x, byte y, int z) { Chest chest = chestAt(new Coordinate(x + 1, y, z)); if(chest == null) chest = chestAt(new Coordinate(x - 1, y, z)); if(chest == null) chest = chestAt(new Coordinate(x, y, z + 1)); if(chest == null) chest = chestAt(new Coordinate(x, y, z - 1)); return chest; } private Chest chestAt(Coordinate coord) { if(locations.containsKey(coord)) { return locations.get(coord); } return null; } public boolean ownsLock(Player player, int x, byte y, int z) { Coordinate coordinate = new Coordinate(x, y, z); Chest chest = locations.get(coordinate); return (chest != null) && (chest.owner.toLowerCase().equals(player.getName().toLowerCase()) || chest.isOpen()); } public synchronized void releaseLock(Player player) { for(Chest chest : locations.values()) { if(chest.owner.equals(player.getName())) chest.unlock(); } save(); } public synchronized void releaseLock(int x, byte y, int z) { locations.remove(new Coordinate(x, y, z)); save(); } @Override public void load() { locations.clear(); super.load(); } @Override protected void loadLine(String line) { line = line.trim(); if (line.length() == 0) { return; } String[] tokens = line.split(","); if (tokens.length > 4) { int x; byte y; int z; try { x = Integer.parseInt(tokens[2]); y = Byte.parseByte(tokens[3]); z = Integer.parseInt(tokens[4]); } catch (NumberFormatException e) { System.out.println("Skipping malformed chest metadata: " + line); return; } giveLock(tokens[0], x, y, z, Boolean.parseBoolean(tokens[1])); } } @Override protected String saveString() { StringBuilder output = new StringBuilder(); for (Chest chest : locations.values().toArray(new Chest[0])) { output.append(chest.owner); output.append(","); output.append(chest.isGroup); output.append(","); output.append(chest.coordinate.x); output.append(","); output.append(chest.coordinate.y); output.append(","); output.append(chest.coordinate.z); output.append("\n"); } return output.toString(); } public static final class Coordinate { private final int x; private final byte y; private final int z; private final int hashCode; private Coordinate(int x, byte y, int z) { this.x = x; this.y = y; this.z = z; int code = 17; code = 37 * code + x; code = 37 * code + y; code = 37 * code + z; hashCode = code; } public boolean equals(Coordinate coordinate) { return (coordinate.x == x) && (coordinate.y == y) && (coordinate.z == z); } @Override public boolean equals(Object object) { return (object instanceof Coordinate) && equals((Coordinate) object); } @Override public int hashCode() { return hashCode; } } public static final class Chest { private String owner; private final Coordinate coordinate; private final boolean isGroup; private Chest(String player, Coordinate coordinate, boolean isGroup) { this.owner = player; this.coordinate = coordinate; this.isGroup = isGroup; } public void unlock() { owner = "-"; } public boolean isOpen() { return owner.equals("-"); } public String owner() { return owner; } public void lock(Player player) { this.owner = player.getName(); } } }
src/simpleserver/config/ChestList.java
/* * Copyright (c) 2010 SimpleServer authors (see CONTRIBUTORS) * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package simpleserver.config; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import simpleserver.Player; public class ChestList extends AsciiConfig { private final ConcurrentMap<Coordinate, Chest> locations; public ChestList() { super("chest-list.txt"); locations = new ConcurrentHashMap<Coordinate, Chest>(); } private boolean giveLock(String player, int x, byte y, int z, boolean isGroupLock) { Coordinate coordinate = new Coordinate(x, y, z); if (locations.containsKey(coordinate)) { return false; } Chest chest = new Chest(player, coordinate, isGroupLock); locations.put(coordinate, chest); save(); return true; } public synchronized boolean giveLock(Player player, int x, byte y, int z, boolean isGroupLock) { return giveLock(player.getName(), x, y, z, isGroupLock); } public void addOpenChest(int x, byte y, int z) { giveLock("-", x, y, z, false); } public boolean hasLock(String name) { return false; } public boolean hasLock(int x, byte y, int z) { if(locations.containsKey(new Coordinate(x, y, z))) { return !locations.get(new Coordinate(x, y, z)).isOpen(); } return false; } public Chest adjacentChest(int x, byte y, int z) { Chest chest = chestAt(new Coordinate(x + 1, y, z)); if(chest == null) chest = chestAt(new Coordinate(x - 1, y, z)); if(chest == null) chest = chestAt(new Coordinate(x, y, z + 1)); if(chest == null) chest = chestAt(new Coordinate(x, y, z - 1)); return chest; } private Chest chestAt(Coordinate coord) { if(locations.containsKey(coord)) { return locations.get(coord); } return null; } public boolean ownsLock(Player player, int x, byte y, int z) { Coordinate coordinate = new Coordinate(x, y, z); Chest chest = locations.get(coordinate); return (chest != null) && (chest.owner == player.getName() || chest.isOpen()); } public synchronized void releaseLock(Player player) { for(Chest chest : locations.values()) { if(chest.owner.equals(player.getName())) chest.unlock(); } save(); } public synchronized void releaseLock(int x, byte y, int z) { locations.remove(new Coordinate(x, y, z)); save(); } @Override public void load() { locations.clear(); super.load(); } @Override protected void loadLine(String line) { line = line.trim(); if (line.length() == 0) { return; } String[] tokens = line.split(","); if (tokens.length > 4) { int x; byte y; int z; try { x = Integer.parseInt(tokens[2]); y = Byte.parseByte(tokens[3]); z = Integer.parseInt(tokens[4]); } catch (NumberFormatException e) { System.out.println("Skipping malformed chest metadata: " + line); return; } giveLock(tokens[0], x, y, z, Boolean.parseBoolean(tokens[1])); } } @Override protected String saveString() { StringBuilder output = new StringBuilder(); for (Chest chest : locations.values().toArray(new Chest[0])) { output.append(chest.owner); output.append(","); output.append(chest.isGroup); output.append(","); output.append(chest.coordinate.x); output.append(","); output.append(chest.coordinate.y); output.append(","); output.append(chest.coordinate.z); output.append("\n"); } return output.toString(); } public static final class Coordinate { private final int x; private final byte y; private final int z; private final int hashCode; private Coordinate(int x, byte y, int z) { this.x = x; this.y = y; this.z = z; int code = 17; code = 37 * code + x; code = 37 * code + y; code = 37 * code + z; hashCode = code; } public boolean equals(Coordinate coordinate) { return (coordinate.x == x) && (coordinate.y == y) && (coordinate.z == z); } @Override public boolean equals(Object object) { return (object instanceof Coordinate) && equals((Coordinate) object); } @Override public int hashCode() { return hashCode; } } public static final class Chest { private String owner; private final Coordinate coordinate; private final boolean isGroup; private Chest(String player, Coordinate coordinate, boolean isGroup) { this.owner = player; this.coordinate = coordinate; this.isGroup = isGroup; } public void unlock() { owner = "-"; } public boolean isOpen() { return owner.equals("-"); } public String owner() { return owner; } public void lock(Player player) { this.owner = player.getName(); } } }
fixed bug preventing user to open their chests
src/simpleserver/config/ChestList.java
fixed bug preventing user to open their chests
<ide><path>rc/simpleserver/config/ChestList.java <ide> public boolean ownsLock(Player player, int x, byte y, int z) { <ide> Coordinate coordinate = new Coordinate(x, y, z); <ide> Chest chest = locations.get(coordinate); <del> return (chest != null) && (chest.owner == player.getName() || chest.isOpen()); <add> return (chest != null) && (chest.owner.toLowerCase().equals(player.getName().toLowerCase()) || chest.isOpen()); <ide> } <ide> <ide> public synchronized void releaseLock(Player player) {
Java
mit
3100997b354403d5d7236ed79c9beb590a3f0533
0
firmsoil/expense-tracker,vamsivadrevu/expense-tracker,vinsol/expense-tracker,anshuPurohit/expense-tracker
package com.vinsol.expensetracker; import java.io.File; import java.util.ArrayList; import java.util.Calendar; import java.util.HashMap; import java.util.List; import android.app.Activity; import android.content.Intent; import android.os.Bundle; import android.util.Log; import android.view.View; import android.view.View.OnClickListener; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.ListView; import android.widget.RelativeLayout; import android.widget.Toast; public class ExpenseSubListing extends Activity implements OnItemClickListener{ private ListView mListView; private ConvertCursorToListString mConvertCursorToListString; private List<HashMap<String, String>> mDataDateList; private SeparatedListAdapter mSeparatedListAdapter; private List<HashMap<String, String>> mSubList; private Long highlightID = null; private String idList; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.expense_listing); idList = getIntent().getStringExtra("idList"); mConvertCursorToListString = new ConvertCursorToListString(this); } @SuppressWarnings("unchecked") @Override protected void onResume() { mSeparatedListAdapter = new SeparatedListAdapter(this); mDataDateList = mConvertCursorToListString.getDateListString(idList); mSubList = mConvertCursorToListString.getListStringParticularDate(idList); Bundle intentExtras = getIntent().getExtras(); if(intentExtras != null){ if(intentExtras.containsKey("toHighLight")){ highlightID = intentExtras.getLong("toHighLight"); } } int j = 0; @SuppressWarnings("rawtypes") List listString = new ArrayList<List<List<String>>>(); for (int i = 0; i < mDataDateList.size(); i++) { List<List<String>> mList = new ArrayList<List<String>>(); String date = mDataDateList.get(i).get(DatabaseAdapter.KEY_DATE_TIME); Log.v("asd", mDataDateList.get(i).get(DatabaseAdapter.KEY_DATE_TIME) +" "+mSubList.get(j).get(DatabaseAdapter.KEY_DATE_TIME)); while (j < mSubList.size()&& date.equals(mSubList.get(j).get(DatabaseAdapter.KEY_DATE_TIME))) { List<String> _templist = new ArrayList<String>(); Calendar mCalendar = Calendar.getInstance(); mCalendar.setFirstDayOfWeek(Calendar.MONDAY); mCalendar.setTimeInMillis(Long.parseLong(mSubList.get(j).get(DatabaseAdapter.KEY_DATE_TIME + "Millis"))); // DisplayDate mDisplayDate = new DisplayDate(mCalendar); _templist = getList(j); Log.v("_templist", _templist.toString()); mList.add(_templist); j++; if (j < mSubList.size()) { } else { break; } } listString.add(mList); Log.v("listString", listString.toString()); @SuppressWarnings("rawtypes") List tt = (List) listString.get(i); mSeparatedListAdapter.addSection(i + "", new ArrayAdapter<String>( this, R.layout.expense_listing, tt), mDataDateList); } mListView = (ListView) findViewById(R.id.expense_listing_listview); mListView.setOnItemClickListener(this); mListView.setAdapter(mSeparatedListAdapter); if (mDataDateList.size() < 1) { mListView.setVisibility(View.GONE); RelativeLayout mRelativeLayout = (RelativeLayout) findViewById(R.id.expense_listing_listview_no_item); mRelativeLayout.setVisibility(View.VISIBLE); Button expense_listing_listview_no_item_button = (Button) findViewById(R.id.expense_listing_listview_no_item_button); expense_listing_listview_no_item_button.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { finish(); } }); } mSeparatedListAdapter.notifyDataSetChanged(); // TODO Auto-generated method stub super.onResume(); } @Override public void onItemClick(AdapterView<?> adapter, View v, int position, long arg3) { // TODO Auto-generated method stub @SuppressWarnings("unchecked") ArrayList<String> mTempClickedList = (ArrayList<String>) adapter.getItemAtPosition(position); String _id = mTempClickedList.get(0); if (!_id.contains(",")) { Bundle bundle = new Bundle(); bundle.putStringArrayList("mDisplayList", mTempClickedList); if (mTempClickedList.get(5).equals(getString(R.string.camera))) { if (android.os.Environment.getExternalStorageState().equals(android.os.Environment.MEDIA_MOUNTED)) { if (!isEntryComplete(mTempClickedList)) { Intent intentCamera = new Intent(this,CameraActivity.class); intentCamera.putExtra("cameraBundle", bundle); startActivity(intentCamera); } else { Intent intentCamera = new Intent(this,ShowCameraActivity.class); intentCamera.putExtra("cameraShowBundle", bundle); startActivity(intentCamera); // TODO } } else { Toast.makeText(this, "sdcard not available",Toast.LENGTH_SHORT).show(); } } else if (mTempClickedList.get(5).equals(getString(R.string.text))) { if (!isEntryComplete(mTempClickedList)) { Intent intentTextEntry = new Intent(this, TextEntry.class); intentTextEntry.putExtra("textEntryBundle", bundle); startActivity(intentTextEntry); } else { Intent intentTextShow = new Intent(this,ShowTextActivity.class); intentTextShow.putExtra("textShowBundle", bundle); startActivity(intentTextShow); // TODO } } else if (mTempClickedList.get(5).equals(getString(R.string.voice))) { if (android.os.Environment.getExternalStorageState().equals(android.os.Environment.MEDIA_MOUNTED)) { if (!isEntryComplete(mTempClickedList)) { Intent intentVoice = new Intent(this, Voice.class); intentVoice.putExtra("voiceBundle", bundle); startActivity(intentVoice); } else { Intent intentVoiceShow = new Intent(this,ShowVoiceActivity.class); intentVoiceShow.putExtra("voiceShowBundle", bundle); startActivity(intentVoiceShow); // TODO } } else { Toast.makeText(this, "sdcard not available", Toast.LENGTH_SHORT).show(); } } else if (mTempClickedList.get(5).equals(getString(R.string.unknown))) { Intent intentMain = new Intent(this, MainActivity.class); intentMain.putExtra("mainBundle", bundle); intentMain.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); startActivity(intentMain); //TODO if unknown entry } } else { } } private List<String> getList(int j) { List<String> _templist = new ArrayList<String>(); _templist.add(mSubList.get(j).get(DatabaseAdapter.KEY_ID)); if (mSubList.get(j).get(DatabaseAdapter.KEY_TAG) != null&& !mSubList.get(j).get(DatabaseAdapter.KEY_TAG).equals("")) { _templist.add(mSubList.get(j).get(DatabaseAdapter.KEY_TAG)); } else { if (mSubList.get(j).get(DatabaseAdapter.KEY_TYPE).equals(getString(R.string.camera))) { if(isEntryComplete(mSubList.get(j))){ _templist.add(getString(R.string.finished_cameraentry)); } else { _templist.add(getString(R.string.unfinished_cameraentry)); } } else if (mSubList.get(j).get(DatabaseAdapter.KEY_TYPE).equals(getString(R.string.voice))) { if(isEntryComplete(mSubList.get(j))){ _templist.add(getString(R.string.finished_voiceentry)); } else { _templist.add(getString(R.string.unfinished_voiceentry)); } } else if (mSubList.get(j).get(DatabaseAdapter.KEY_TYPE).equals(getString(R.string.text))) { if(isEntryComplete(mSubList.get(j))){ _templist.add(getString(R.string.finished_textentry)); } else { _templist.add(getString(R.string.unfinished_textentry)); } } else if (mSubList.get(j).get(DatabaseAdapter.KEY_TYPE).equals(getString(R.string.favorite_entry))) { _templist.add("Unfinished Favorite Entry"); } else if (mSubList.get(j).get(DatabaseAdapter.KEY_TYPE).equals(getString(R.string.unknown))) { _templist.add(getString(R.string.unknown_entry)); } } if (mSubList.get(j).get(DatabaseAdapter.KEY_AMOUNT) != null&& !mSubList.get(j).get(DatabaseAdapter.KEY_AMOUNT).equals("")) { String totalAmountString = mSubList.get(j).get(DatabaseAdapter.KEY_AMOUNT); if (totalAmountString.contains("?")&& totalAmountString.length() > 1) { String temp = totalAmountString.substring(0,totalAmountString.length() - 2); Double mAmount = Double.parseDouble(temp); mAmount = (double) ((int) ((mAmount + 0.005) * 100.0) / 100.0); if (mAmount.toString().contains(".")) { if (mAmount.toString().charAt( mAmount.toString().length() - 3) == '.') { totalAmountString = mAmount.toString() + " ?"; } else if (mAmount.toString().charAt( mAmount.toString().length() - 2) == '.') { totalAmountString = mAmount.toString() + "0 ?"; } } else { totalAmountString = mAmount.toString() + ".00 ?"; } } else if (!totalAmountString.contains("?")) { String temp = totalAmountString.substring(0, totalAmountString.length()); Double mAmount = Double.parseDouble(temp); mAmount = (double) ((int) ((mAmount + 0.005) * 100.0) / 100.0); if (mAmount.toString().contains(".")) { if (mAmount.toString().charAt( mAmount.toString().length() - 3) == '.') { totalAmountString = mAmount.toString() + ""; } else if (mAmount.toString().charAt( mAmount.toString().length() - 2) == '.') { totalAmountString = mAmount.toString() + "0"; } } else { totalAmountString = mAmount.toString() + ".00"; } } _templist.add(totalAmountString); } else { _templist.add("?"); } // ///// ******* Adding location date data to list ******* ////////// if (mSubList.get(j).get(DatabaseAdapter.KEY_DATE_TIME + "Millis") != null && !mSubList.get(j).get(DatabaseAdapter.KEY_DATE_TIME + "Millis").equals("") && mSubList.get(j).get(DatabaseAdapter.KEY_LOCATION) != null && !mSubList.get(j).get(DatabaseAdapter.KEY_LOCATION).equals("")) { _templist.add(getLocationDate(mSubList.get(j).get(DatabaseAdapter.KEY_DATE_TIME + "Millis"), mSubList.get(j).get(DatabaseAdapter.KEY_LOCATION))); } else if (mSubList.get(j).get(DatabaseAdapter.KEY_DATE_TIME + "Millis") != null && !mSubList.get(j).get(DatabaseAdapter.KEY_DATE_TIME + "Millis").equals("") && (mSubList.get(j).get(DatabaseAdapter.KEY_LOCATION) == null || mSubList.get(j).get(DatabaseAdapter.KEY_LOCATION).equals(""))) { _templist.add(getLocationDateDate(mSubList.get(j).get(DatabaseAdapter.KEY_DATE_TIME + "Millis"))); } else if ((mSubList.get(j).get(DatabaseAdapter.KEY_DATE_TIME + "Millis") == null || mSubList.get(j).get(DatabaseAdapter.KEY_DATE_TIME + "Millis").equals(""))&& mSubList.get(j).get(DatabaseAdapter.KEY_LOCATION) != null&& !mSubList.get(j).get(DatabaseAdapter.KEY_LOCATION).equals("")) { _templist.add("Unknown time at "+ mSubList.get(j).get(mSubList.get(j).get(DatabaseAdapter.KEY_LOCATION))); } else { _templist.add("Unknown Location and Date"); } if (mSubList.get(j).get(DatabaseAdapter.KEY_FAVORITE) != null && !mSubList.get(j).get(DatabaseAdapter.KEY_FAVORITE).equals("")) { _templist.add(mSubList.get(j).get(DatabaseAdapter.KEY_FAVORITE)); } else { _templist.add(""); } if (mSubList.get(j).get(DatabaseAdapter.KEY_TYPE) != null && !mSubList.get(j).get(DatabaseAdapter.KEY_TYPE).equals("")) { _templist.add(mSubList.get(j).get(DatabaseAdapter.KEY_TYPE)); } else { _templist.add(""); } _templist.add(mSubList.get(j).get(DatabaseAdapter.KEY_DATE_TIME + "Millis")); _templist.add(mSubList.get(j).get(DatabaseAdapter.KEY_LOCATION)); _templist.add(mSubList.get(j).get(DatabaseAdapter.KEY_LOCATION)); if(highlightID != null){ if(mSubList.get(j).get(DatabaseAdapter.KEY_ID).equals(Long.toString(highlightID))){ _templist.add(Long.toString(highlightID)); } } return _templist; } private boolean isEntryComplete(HashMap<String, String> hashMap) { if (hashMap.get(DatabaseAdapter.KEY_TYPE).equals(getString(R.string.camera))) { if(hashMap.get(DatabaseAdapter.KEY_AMOUNT) != null){ if (hashMap.get(DatabaseAdapter.KEY_AMOUNT).contains("?")) { return false; } } File mFileSmall = new File("/sdcard/ExpenseTracker/" + hashMap.get(DatabaseAdapter.KEY_ID) + "_small.jpg"); File mFile = new File("/sdcard/ExpenseTracker/" + hashMap.get(DatabaseAdapter.KEY_ID) + ".jpg"); File mFileThumbnail = new File("/sdcard/ExpenseTracker/" + hashMap.get(DatabaseAdapter.KEY_ID) + "_thumbnail.jpg"); if (mFile.canRead() && mFileSmall.canRead() && mFileThumbnail.canRead()) { return true; } else { return false; } } else if (hashMap.get(DatabaseAdapter.KEY_TYPE).equals(getString(R.string.voice))) { if(hashMap.get(DatabaseAdapter.KEY_AMOUNT) != null){ if (hashMap.get(DatabaseAdapter.KEY_AMOUNT).contains("?")) { return false; } } File mFile = new File("/sdcard/ExpenseTracker/Audio/" + hashMap.get(DatabaseAdapter.KEY_ID) + ".amr"); if (mFile.canRead()) { return true; } else { return false; } } else if (hashMap.get(DatabaseAdapter.KEY_TYPE).equals(getString(R.string.text))) { if(hashMap.get(DatabaseAdapter.KEY_AMOUNT) != null){ if (hashMap.get(DatabaseAdapter.KEY_AMOUNT).contains("?")) { return false; } } if(hashMap.get(DatabaseAdapter.KEY_TAG) != null){ if (hashMap.get(DatabaseAdapter.KEY_TAG).equals("")) { return false; } else { return true; } } } return false; } private String getLocationDateDate(String dateInMillis) { Calendar tempCalendar = Calendar.getInstance(); tempCalendar.setFirstDayOfWeek(Calendar.MONDAY); tempCalendar.setTimeInMillis(Long.parseLong(dateInMillis)); int hour = tempCalendar.get(Calendar.HOUR); String minute = Integer.toString(tempCalendar.get(Calendar.MINUTE)); if (minute.length() == 1) { minute = "0" + minute; } if (hour == 0) { hour = 12; } if (tempCalendar.get(Calendar.MINUTE) != 0){ if (tempCalendar.get(Calendar.AM_PM) == 1){ return hour + ":" + minute + " " + "PM"+ " at Unknown location"; } if (tempCalendar.get(Calendar.AM_PM) == 0){ return hour + ":" + minute + " " + "AM" + " at Unknown location"; } } else{ if (tempCalendar.get(Calendar.AM_PM) == 1){ return hour + "" + " " + "PM" + " at Unknown location"; } if (tempCalendar.get(Calendar.AM_PM) == 0){ return hour + "" + " " + "AM" + " at Unknown location"; } } return null; } private String getLocationDate(String dateInMillis, String locationData) { Calendar tempCalendar = Calendar.getInstance(); tempCalendar.setFirstDayOfWeek(Calendar.MONDAY); tempCalendar.setTimeInMillis(Long.parseLong(dateInMillis)); int hour = tempCalendar.get(Calendar.HOUR); String minute = Integer.toString(tempCalendar.get(Calendar.MINUTE)); if (minute.length() == 1) { minute = "0" + minute; } if (hour == 0) { hour = 12; } if (tempCalendar.get(Calendar.MINUTE) != 0){ if (tempCalendar.get(Calendar.AM_PM) == 1){ return hour + ":" + minute + " " + "PM" + " at " + locationData; } if (tempCalendar.get(Calendar.AM_PM) == 0){ return hour + ":" + minute + " " + "AM" + " at " + locationData; } } else{ if (tempCalendar.get(Calendar.AM_PM) == 1){ return hour + "" + " " + "PM" + " at " + locationData; } if (tempCalendar.get(Calendar.AM_PM) == 0){ return hour + ":" + " " + "AM" + " at " + locationData; } } return null; } private boolean isEntryComplete(ArrayList<String> toCheckList) { if (toCheckList.get(5).equals(getString(R.string.camera))) { if(toCheckList.get(2) != null){ if (toCheckList.get(2).contains("?")) { return false; } } File mFileSmall = new File("/sdcard/ExpenseTracker/" + toCheckList.get(0) + "_small.jpg"); File mFile = new File("/sdcard/ExpenseTracker/" + toCheckList.get(0) + ".jpg"); File mFileThumbnail = new File("/sdcard/ExpenseTracker/" + toCheckList.get(0) + "_thumbnail.jpg"); if (mFile.canRead() && mFileSmall.canRead() && mFileThumbnail.canRead()) { return true; } else { return false; } } else if (toCheckList.get(5).equals(getString(R.string.voice))) { if(toCheckList.get(2) != null){ if (toCheckList.get(2).contains("?")) { return false; } } File mFile = new File("/sdcard/ExpenseTracker/Audio/" + toCheckList.get(0) + ".amr"); if (mFile.canRead()) { return true; } else { return false; } } else if (toCheckList.get(5).equals(getString(R.string.text))) { if(toCheckList.get(2) != null){ if (toCheckList.get(2).contains("?")) { return false; } } if(toCheckList.get(1) != null){ if (toCheckList.get(1).equals(getString(R.string.unfinished_textentry)) || toCheckList.get(1).equals(getString(R.string.finished_textentry))) { return false; } else { return true; } } } return false; } }
src/com/vinsol/expensetracker/ExpenseSubListing.java
package com.vinsol.expensetracker; import java.io.File; import java.util.ArrayList; import java.util.Calendar; import java.util.HashMap; import java.util.List; import android.app.Activity; import android.content.Intent; import android.os.Bundle; import android.util.Log; import android.view.View; import android.view.View.OnClickListener; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.ArrayAdapter; import android.widget.ImageButton; import android.widget.ListView; import android.widget.RelativeLayout; import android.widget.Toast; public class ExpenseSubListing extends Activity implements OnItemClickListener{ private ListView mListView; private ConvertCursorToListString mConvertCursorToListString; private List<HashMap<String, String>> mDataDateList; private SeparatedListAdapter mSeparatedListAdapter; private List<HashMap<String, String>> mSubList; private Long highlightID = null; private String idList; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.expense_listing); idList = getIntent().getStringExtra("idList"); mConvertCursorToListString = new ConvertCursorToListString(this); } @SuppressWarnings("unchecked") @Override protected void onResume() { mSeparatedListAdapter = new SeparatedListAdapter(this); mDataDateList = mConvertCursorToListString.getDateListString(idList); mSubList = mConvertCursorToListString.getListStringParticularDate(idList); Bundle intentExtras = getIntent().getExtras(); if(intentExtras != null){ if(intentExtras.containsKey("toHighLight")){ highlightID = intentExtras.getLong("toHighLight"); } } int j = 0; @SuppressWarnings("rawtypes") List listString = new ArrayList<List<List<String>>>(); for (int i = 0; i < mDataDateList.size(); i++) { List<List<String>> mList = new ArrayList<List<String>>(); String date = mDataDateList.get(i).get(DatabaseAdapter.KEY_DATE_TIME); Log.v("asd", mDataDateList.get(i).get(DatabaseAdapter.KEY_DATE_TIME) +" "+mSubList.get(j).get(DatabaseAdapter.KEY_DATE_TIME)); while (j < mSubList.size()&& date.equals(mSubList.get(j).get(DatabaseAdapter.KEY_DATE_TIME))) { List<String> _templist = new ArrayList<String>(); Calendar mCalendar = Calendar.getInstance(); mCalendar.setFirstDayOfWeek(Calendar.MONDAY); mCalendar.setTimeInMillis(Long.parseLong(mSubList.get(j).get(DatabaseAdapter.KEY_DATE_TIME + "Millis"))); // DisplayDate mDisplayDate = new DisplayDate(mCalendar); _templist = getList(j); Log.v("_templist", _templist.toString()); mList.add(_templist); j++; if (j < mSubList.size()) { } else { break; } } listString.add(mList); Log.v("listString", listString.toString()); @SuppressWarnings("rawtypes") List tt = (List) listString.get(i); mSeparatedListAdapter.addSection(i + "", new ArrayAdapter<String>( this, R.layout.expense_listing, tt), mDataDateList); } mListView = (ListView) findViewById(R.id.expense_listing_listview); mListView.setOnItemClickListener(this); mListView.setAdapter(mSeparatedListAdapter); if (mDataDateList.size() < 1) { mListView.setVisibility(View.GONE); RelativeLayout mRelativeLayout = (RelativeLayout) findViewById(R.id.expense_listing_listview_no_item); mRelativeLayout.setVisibility(View.VISIBLE); ImageButton expense_listing_listview_no_item_button = (ImageButton) findViewById(R.id.expense_listing_listview_no_item_button); expense_listing_listview_no_item_button.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { finish(); } }); } mSeparatedListAdapter.notifyDataSetChanged(); // TODO Auto-generated method stub super.onResume(); } @Override public void onItemClick(AdapterView<?> adapter, View v, int position, long arg3) { // TODO Auto-generated method stub @SuppressWarnings("unchecked") ArrayList<String> mTempClickedList = (ArrayList<String>) adapter.getItemAtPosition(position); String _id = mTempClickedList.get(0); if (!_id.contains(",")) { Bundle bundle = new Bundle(); bundle.putStringArrayList("mDisplayList", mTempClickedList); if (mTempClickedList.get(5).equals(getString(R.string.camera))) { if (android.os.Environment.getExternalStorageState().equals(android.os.Environment.MEDIA_MOUNTED)) { if (!isEntryComplete(mTempClickedList)) { Intent intentCamera = new Intent(this,CameraActivity.class); intentCamera.putExtra("cameraBundle", bundle); startActivity(intentCamera); } else { Intent intentCamera = new Intent(this,ShowCameraActivity.class); intentCamera.putExtra("cameraShowBundle", bundle); startActivity(intentCamera); // TODO } } else { Toast.makeText(this, "sdcard not available",Toast.LENGTH_SHORT).show(); } } else if (mTempClickedList.get(5).equals(getString(R.string.text))) { if (!isEntryComplete(mTempClickedList)) { Intent intentTextEntry = new Intent(this, TextEntry.class); intentTextEntry.putExtra("textEntryBundle", bundle); startActivity(intentTextEntry); } else { Intent intentTextShow = new Intent(this,ShowTextActivity.class); intentTextShow.putExtra("textShowBundle", bundle); startActivity(intentTextShow); // TODO } } else if (mTempClickedList.get(5).equals(getString(R.string.voice))) { if (android.os.Environment.getExternalStorageState().equals(android.os.Environment.MEDIA_MOUNTED)) { if (!isEntryComplete(mTempClickedList)) { Intent intentVoice = new Intent(this, Voice.class); intentVoice.putExtra("voiceBundle", bundle); startActivity(intentVoice); } else { Intent intentVoiceShow = new Intent(this,ShowVoiceActivity.class); intentVoiceShow.putExtra("voiceShowBundle", bundle); startActivity(intentVoiceShow); // TODO } } else { Toast.makeText(this, "sdcard not available", Toast.LENGTH_SHORT).show(); } } else if (mTempClickedList.get(5).equals(getString(R.string.unknown))) { Intent intentMain = new Intent(this, MainActivity.class); intentMain.putExtra("mainBundle", bundle); intentMain.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); startActivity(intentMain); //TODO if unknown entry } } else { } } private List<String> getList(int j) { List<String> _templist = new ArrayList<String>(); _templist.add(mSubList.get(j).get(DatabaseAdapter.KEY_ID)); if (mSubList.get(j).get(DatabaseAdapter.KEY_TAG) != null&& !mSubList.get(j).get(DatabaseAdapter.KEY_TAG).equals("")) { _templist.add(mSubList.get(j).get(DatabaseAdapter.KEY_TAG)); } else { if (mSubList.get(j).get(DatabaseAdapter.KEY_TYPE).equals(getString(R.string.camera))) { if(isEntryComplete(mSubList.get(j))){ _templist.add(getString(R.string.finished_cameraentry)); } else { _templist.add(getString(R.string.unfinished_cameraentry)); } } else if (mSubList.get(j).get(DatabaseAdapter.KEY_TYPE).equals(getString(R.string.voice))) { if(isEntryComplete(mSubList.get(j))){ _templist.add(getString(R.string.finished_voiceentry)); } else { _templist.add(getString(R.string.unfinished_voiceentry)); } } else if (mSubList.get(j).get(DatabaseAdapter.KEY_TYPE).equals(getString(R.string.text))) { if(isEntryComplete(mSubList.get(j))){ _templist.add(getString(R.string.finished_textentry)); } else { _templist.add(getString(R.string.unfinished_textentry)); } } else if (mSubList.get(j).get(DatabaseAdapter.KEY_TYPE).equals(getString(R.string.favorite_entry))) { _templist.add("Unfinished Favorite Entry"); } else if (mSubList.get(j).get(DatabaseAdapter.KEY_TYPE).equals(getString(R.string.unknown))) { _templist.add(getString(R.string.unknown_entry)); } } if (mSubList.get(j).get(DatabaseAdapter.KEY_AMOUNT) != null&& !mSubList.get(j).get(DatabaseAdapter.KEY_AMOUNT).equals("")) { String totalAmountString = mSubList.get(j).get(DatabaseAdapter.KEY_AMOUNT); if (totalAmountString.contains("?")&& totalAmountString.length() > 1) { String temp = totalAmountString.substring(0,totalAmountString.length() - 2); Double mAmount = Double.parseDouble(temp); mAmount = (double) ((int) ((mAmount + 0.005) * 100.0) / 100.0); if (mAmount.toString().contains(".")) { if (mAmount.toString().charAt( mAmount.toString().length() - 3) == '.') { totalAmountString = mAmount.toString() + " ?"; } else if (mAmount.toString().charAt( mAmount.toString().length() - 2) == '.') { totalAmountString = mAmount.toString() + "0 ?"; } } else { totalAmountString = mAmount.toString() + ".00 ?"; } } else if (!totalAmountString.contains("?")) { String temp = totalAmountString.substring(0, totalAmountString.length()); Double mAmount = Double.parseDouble(temp); mAmount = (double) ((int) ((mAmount + 0.005) * 100.0) / 100.0); if (mAmount.toString().contains(".")) { if (mAmount.toString().charAt( mAmount.toString().length() - 3) == '.') { totalAmountString = mAmount.toString() + ""; } else if (mAmount.toString().charAt( mAmount.toString().length() - 2) == '.') { totalAmountString = mAmount.toString() + "0"; } } else { totalAmountString = mAmount.toString() + ".00"; } } _templist.add(totalAmountString); } else { _templist.add("?"); } // ///// ******* Adding location date data to list ******* ////////// if (mSubList.get(j).get(DatabaseAdapter.KEY_DATE_TIME + "Millis") != null && !mSubList.get(j).get(DatabaseAdapter.KEY_DATE_TIME + "Millis").equals("") && mSubList.get(j).get(DatabaseAdapter.KEY_LOCATION) != null && !mSubList.get(j).get(DatabaseAdapter.KEY_LOCATION).equals("")) { _templist.add(getLocationDate(mSubList.get(j).get(DatabaseAdapter.KEY_DATE_TIME + "Millis"), mSubList.get(j).get(DatabaseAdapter.KEY_LOCATION))); } else if (mSubList.get(j).get(DatabaseAdapter.KEY_DATE_TIME + "Millis") != null && !mSubList.get(j).get(DatabaseAdapter.KEY_DATE_TIME + "Millis").equals("") && (mSubList.get(j).get(DatabaseAdapter.KEY_LOCATION) == null || mSubList.get(j).get(DatabaseAdapter.KEY_LOCATION).equals(""))) { _templist.add(getLocationDateDate(mSubList.get(j).get(DatabaseAdapter.KEY_DATE_TIME + "Millis"))); } else if ((mSubList.get(j).get(DatabaseAdapter.KEY_DATE_TIME + "Millis") == null || mSubList.get(j).get(DatabaseAdapter.KEY_DATE_TIME + "Millis").equals(""))&& mSubList.get(j).get(DatabaseAdapter.KEY_LOCATION) != null&& !mSubList.get(j).get(DatabaseAdapter.KEY_LOCATION).equals("")) { _templist.add("Unknown time at "+ mSubList.get(j).get(mSubList.get(j).get(DatabaseAdapter.KEY_LOCATION))); } else { _templist.add("Unknown Location and Date"); } if (mSubList.get(j).get(DatabaseAdapter.KEY_FAVORITE) != null && !mSubList.get(j).get(DatabaseAdapter.KEY_FAVORITE).equals("")) { _templist.add(mSubList.get(j).get(DatabaseAdapter.KEY_FAVORITE)); } else { _templist.add(""); } if (mSubList.get(j).get(DatabaseAdapter.KEY_TYPE) != null && !mSubList.get(j).get(DatabaseAdapter.KEY_TYPE).equals("")) { _templist.add(mSubList.get(j).get(DatabaseAdapter.KEY_TYPE)); } else { _templist.add(""); } _templist.add(mSubList.get(j).get(DatabaseAdapter.KEY_DATE_TIME + "Millis")); _templist.add(mSubList.get(j).get(DatabaseAdapter.KEY_LOCATION)); _templist.add(mSubList.get(j).get(DatabaseAdapter.KEY_LOCATION)); if(highlightID != null){ if(mSubList.get(j).get(DatabaseAdapter.KEY_ID).equals(Long.toString(highlightID))){ _templist.add(Long.toString(highlightID)); } } return _templist; } private boolean isEntryComplete(HashMap<String, String> hashMap) { if (hashMap.get(DatabaseAdapter.KEY_TYPE).equals(getString(R.string.camera))) { if(hashMap.get(DatabaseAdapter.KEY_AMOUNT) != null){ if (hashMap.get(DatabaseAdapter.KEY_AMOUNT).contains("?")) { return false; } } File mFileSmall = new File("/sdcard/ExpenseTracker/" + hashMap.get(DatabaseAdapter.KEY_ID) + "_small.jpg"); File mFile = new File("/sdcard/ExpenseTracker/" + hashMap.get(DatabaseAdapter.KEY_ID) + ".jpg"); File mFileThumbnail = new File("/sdcard/ExpenseTracker/" + hashMap.get(DatabaseAdapter.KEY_ID) + "_thumbnail.jpg"); if (mFile.canRead() && mFileSmall.canRead() && mFileThumbnail.canRead()) { return true; } else { return false; } } else if (hashMap.get(DatabaseAdapter.KEY_TYPE).equals(getString(R.string.voice))) { if(hashMap.get(DatabaseAdapter.KEY_AMOUNT) != null){ if (hashMap.get(DatabaseAdapter.KEY_AMOUNT).contains("?")) { return false; } } File mFile = new File("/sdcard/ExpenseTracker/Audio/" + hashMap.get(DatabaseAdapter.KEY_ID) + ".amr"); if (mFile.canRead()) { return true; } else { return false; } } else if (hashMap.get(DatabaseAdapter.KEY_TYPE).equals(getString(R.string.text))) { if(hashMap.get(DatabaseAdapter.KEY_AMOUNT) != null){ if (hashMap.get(DatabaseAdapter.KEY_AMOUNT).contains("?")) { return false; } } if(hashMap.get(DatabaseAdapter.KEY_TAG) != null){ if (hashMap.get(DatabaseAdapter.KEY_TAG).equals("")) { return false; } else { return true; } } } return false; } private String getLocationDateDate(String dateInMillis) { Calendar tempCalendar = Calendar.getInstance(); tempCalendar.setFirstDayOfWeek(Calendar.MONDAY); tempCalendar.setTimeInMillis(Long.parseLong(dateInMillis)); int hour = tempCalendar.get(Calendar.HOUR); String minute = Integer.toString(tempCalendar.get(Calendar.MINUTE)); if (minute.length() == 1) { minute = "0" + minute; } if (hour == 0) { hour = 12; } if (tempCalendar.get(Calendar.MINUTE) != 0){ if (tempCalendar.get(Calendar.AM_PM) == 1){ return hour + ":" + minute + " " + "PM"+ " at Unknown location"; } if (tempCalendar.get(Calendar.AM_PM) == 0){ return hour + ":" + minute + " " + "AM" + " at Unknown location"; } } else{ if (tempCalendar.get(Calendar.AM_PM) == 1){ return hour + "" + " " + "PM" + " at Unknown location"; } if (tempCalendar.get(Calendar.AM_PM) == 0){ return hour + "" + " " + "AM" + " at Unknown location"; } } return null; } private String getLocationDate(String dateInMillis, String locationData) { Calendar tempCalendar = Calendar.getInstance(); tempCalendar.setFirstDayOfWeek(Calendar.MONDAY); tempCalendar.setTimeInMillis(Long.parseLong(dateInMillis)); int hour = tempCalendar.get(Calendar.HOUR); String minute = Integer.toString(tempCalendar.get(Calendar.MINUTE)); if (minute.length() == 1) { minute = "0" + minute; } if (hour == 0) { hour = 12; } if (tempCalendar.get(Calendar.MINUTE) != 0){ if (tempCalendar.get(Calendar.AM_PM) == 1){ return hour + ":" + minute + " " + "PM" + " at " + locationData; } if (tempCalendar.get(Calendar.AM_PM) == 0){ return hour + ":" + minute + " " + "AM" + " at " + locationData; } } else{ if (tempCalendar.get(Calendar.AM_PM) == 1){ return hour + "" + " " + "PM" + " at " + locationData; } if (tempCalendar.get(Calendar.AM_PM) == 0){ return hour + ":" + " " + "AM" + " at " + locationData; } } return null; } private boolean isEntryComplete(ArrayList<String> toCheckList) { if (toCheckList.get(5).equals(getString(R.string.camera))) { if(toCheckList.get(2) != null){ if (toCheckList.get(2).contains("?")) { return false; } } File mFileSmall = new File("/sdcard/ExpenseTracker/" + toCheckList.get(0) + "_small.jpg"); File mFile = new File("/sdcard/ExpenseTracker/" + toCheckList.get(0) + ".jpg"); File mFileThumbnail = new File("/sdcard/ExpenseTracker/" + toCheckList.get(0) + "_thumbnail.jpg"); if (mFile.canRead() && mFileSmall.canRead() && mFileThumbnail.canRead()) { return true; } else { return false; } } else if (toCheckList.get(5).equals(getString(R.string.voice))) { if(toCheckList.get(2) != null){ if (toCheckList.get(2).contains("?")) { return false; } } File mFile = new File("/sdcard/ExpenseTracker/Audio/" + toCheckList.get(0) + ".amr"); if (mFile.canRead()) { return true; } else { return false; } } else if (toCheckList.get(5).equals(getString(R.string.text))) { if(toCheckList.get(2) != null){ if (toCheckList.get(2).contains("?")) { return false; } } if(toCheckList.get(1) != null){ if (toCheckList.get(1).equals(getString(R.string.unfinished_textentry)) || toCheckList.get(1).equals(getString(R.string.finished_textentry))) { return false; } else { return true; } } } return false; } }
Bug fix
src/com/vinsol/expensetracker/ExpenseSubListing.java
Bug fix
<ide><path>rc/com/vinsol/expensetracker/ExpenseSubListing.java <ide> import android.widget.AdapterView; <ide> import android.widget.AdapterView.OnItemClickListener; <ide> import android.widget.ArrayAdapter; <del>import android.widget.ImageButton; <add>import android.widget.Button; <ide> import android.widget.ListView; <ide> import android.widget.RelativeLayout; <ide> import android.widget.Toast; <ide> mListView.setVisibility(View.GONE); <ide> RelativeLayout mRelativeLayout = (RelativeLayout) findViewById(R.id.expense_listing_listview_no_item); <ide> mRelativeLayout.setVisibility(View.VISIBLE); <del> ImageButton expense_listing_listview_no_item_button = (ImageButton) findViewById(R.id.expense_listing_listview_no_item_button); <add> Button expense_listing_listview_no_item_button = (Button) findViewById(R.id.expense_listing_listview_no_item_button); <ide> expense_listing_listview_no_item_button.setOnClickListener(new OnClickListener() { <ide> @Override <ide> public void onClick(View v) {
JavaScript
bsd-3-clause
2b20d1ee90529444d266398505bb50aafd189e0b
0
dondi/GRNsight,dondi/GRNsight
/* require("browser-env")(); var chai = require("chai"); var expect = chai.expect; var sinon = require("sinon"); var gene = "ACE2"; global.window = window; global.$ = require("jquery"); var geneFunctions = require(__dirname + "/../web-client/public/gene/api"); console.log(typeof XMLSerializer); describe("Uniprot API", function () { var request; beforeEach(function () { window.$ = $; request = sinon.useFakeXMLHttpRequest(); }); afterEach(function () { request.restore(); }); it("should load", function () { geneFunctions.getUniProtInfo(gene).done(function (result) { expect(result).to.not.be.undefined; }); }); }); describe("Ensembl API", function () { it("should load", function () { }); }); describe("JASPAR API", function () { it("should load", function () { }); }); describe("SGD API", function () { it("should load", function () { }); }); describe("NCBI Gene API", function () { it("should load", function () { }); }); */
test/api-tests.js
require("browser-env")(); var chai = require("chai"); var expect = chai.expect; var sinon = require("sinon"); var gene = "ACE2"; global.window = window; global.$ = require("jquery"); var geneFunctions = require(__dirname + "/../web-client/public/gene/api"); console.log(typeof XMLSerializer); describe("Uniprot API", function () { var request; beforeEach(function () { window.$ = $; request = sinon.useFakeXMLHttpRequest(); }); afterEach(function () { request.restore(); }); it("should load", function () { geneFunctions.getUniProtInfo(gene).done(function (result) { expect(result).to.not.be.undefined; }); }); }); /* describe("Ensembl API", function () { it("should load", function () { }); }); describe("JASPAR API", function () { it("should load", function () { }); }); describe("SGD API", function () { it("should load", function () { }); }); describe("NCBI Gene API", function () { it("should load", function () { }); }); */
Commented out API tests, work will commence on another branch
test/api-tests.js
Commented out API tests, work will commence on another branch
<ide><path>est/api-tests.js <del>require("browser-env")(); <add>/* require("browser-env")(); <ide> var chai = require("chai"); <ide> var expect = chai.expect; <ide> var sinon = require("sinon"); <ide> }); <ide> }); <ide> <del>/* <add> <ide> describe("Ensembl API", function () { <ide> it("should load", function () { <ide> });
Java
apache-2.0
8db428d36d5f86a591889bb823c0008dc2903bba
0
facebook/litho,facebook/litho,facebook/litho,facebook/litho,facebook/litho,facebook/litho
/** * Copyright (c) 2014-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.litho.processor; import javax.annotation.processing.ProcessingEnvironment; import javax.lang.model.element.AnnotationMirror; import javax.lang.model.element.Element; import javax.lang.model.element.ElementKind; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Modifier; import javax.lang.model.element.Name; import javax.lang.model.element.TypeElement; import javax.lang.model.element.TypeParameterElement; import javax.lang.model.element.VariableElement; import javax.lang.model.type.DeclaredType; import javax.lang.model.type.TypeKind; import javax.lang.model.type.TypeMirror; import javax.lang.model.util.Types; import java.lang.annotation.Annotation; import java.util.ArrayList; import java.util.Arrays; import java.util.BitSet; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import com.facebook.common.internal.ImmutableList; import com.facebook.litho.annotations.Event; import com.facebook.litho.annotations.FromEvent; import com.facebook.litho.annotations.OnCreateInitialState; import com.facebook.litho.annotations.OnCreateTreeProp; import com.facebook.litho.annotations.OnEvent; import com.facebook.litho.annotations.OnLoadStyle; import com.facebook.litho.annotations.OnUpdateState; import com.facebook.litho.annotations.Param; import com.facebook.litho.annotations.Prop; import com.facebook.litho.annotations.PropDefault; import com.facebook.litho.annotations.ResType; import com.facebook.litho.annotations.State; import com.facebook.litho.annotations.TreeProp; import com.facebook.litho.javapoet.JPUtil; import com.facebook.litho.processor.GetTreePropsForChildrenMethodBuilder.CreateTreePropMethodData; import com.facebook.litho.specmodels.model.ClassNames; import com.facebook.litho.specmodels.model.PropDefaultModel; import com.facebook.litho.specmodels.processor.PropDefaultsExtractor; import com.squareup.javapoet.AnnotationSpec; import com.squareup.javapoet.ClassName; import com.squareup.javapoet.CodeBlock; import com.squareup.javapoet.FieldSpec; import com.squareup.javapoet.MethodSpec; import com.squareup.javapoet.ParameterSpec; import com.squareup.javapoet.ParameterizedTypeName; import com.squareup.javapoet.TypeName; import com.squareup.javapoet.TypeSpec; import com.squareup.javapoet.TypeVariableName; import com.squareup.javapoet.WildcardTypeName; import static com.facebook.litho.processor.Utils.capitalize; import static com.facebook.litho.processor.Visibility.PRIVATE; import static com.facebook.litho.specmodels.generator.GeneratorConstants.DELEGATE_FIELD_NAME; import static com.facebook.litho.specmodels.generator.GeneratorConstants.SPEC_INSTANCE_NAME; import static java.util.Arrays.asList; import static javax.lang.model.type.TypeKind.ARRAY; import static javax.lang.model.type.TypeKind.DECLARED; import static javax.lang.model.type.TypeKind.DOUBLE; import static javax.lang.model.type.TypeKind.FLOAT; import static javax.lang.model.type.TypeKind.TYPEVAR; import static javax.lang.model.type.TypeKind.VOID; public class Stages { public static final String IMPL_CLASS_NAME_SUFFIX = "Impl"; private static final String INNER_IMPL_BUILDER_CLASS_NAME = "Builder"; private static final String STATE_UPDATE_IMPL_NAME_SUFFIX = "StateUpdate"; public static final String STATE_CONTAINER_IMPL_NAME_SUFFIX = "StateContainerImpl"; public static final String STATE_CONTAINER_IMPL_MEMBER = "mStateContainerImpl"; private static final String REQUIRED_PROPS_NAMES = "REQUIRED_PROPS_NAMES"; private static final String REQUIRED_PROPS_COUNT = "REQUIRED_PROPS_COUNT"; private static final int ON_STYLE_PROPS = 1; private static final int ON_CREATE_INITIAL_STATE = 1; private final boolean mSupportState; public enum StaticFlag { STATIC, NOT_STATIC } public enum StyleableFlag { STYLEABLE, NOT_STYLEABLE } // Using these names in props might cause conflicts with the method names in the // component's generated layout builder class so we trigger a more user-friendly // error if the component tries to use them. This list should be kept in sync // with BaseLayoutBuilder. private static final String[] RESERVED_PROP_NAMES = new String[] { "withLayout", "key", "loadingEventHandler", }; private static final Class<Annotation>[] TREE_PROP_ANNOTATIONS = new Class[] { TreeProp.class, }; private static final Class<Annotation>[] PROP_ANNOTATIONS = new Class[] { Prop.class, }; private static final Class<Annotation>[] STATE_ANNOTATIONS = new Class[] { State.class, }; private final ProcessingEnvironment mProcessingEnv; private final TypeElement mSourceElement; private final String mQualifiedClassName; private final Class<Annotation>[] mStageAnnotations; private final Class<Annotation>[] mInterStagePropAnnotations; private final Class<Annotation>[] mParameterAnnotations; private final TypeSpec.Builder mClassTypeSpec; private final List<TypeVariableName> mTypeVariables; private final List<TypeElement> mEventDeclarations; private final Map<String, String> mPropJavadocs; private final String mSimpleClassName; private String mSourceDelegateAccessorName = DELEGATE_FIELD_NAME; private List<VariableElement> mProps; private List<VariableElement> mOnCreateInitialStateDefinedProps; private ImmutableList<PropDefaultModel> mPropDefaults; private List<VariableElement> mTreeProps; private final Map<String, VariableElement> mStateMap = new LinkedHashMap<>(); // Map of name to VariableElement, for members of the inner implementation class, in order private LinkedHashMap<String, VariableElement> mImplMembers; private List<Parameter> mImplParameters; private final Map<String, TypeMirror> mExtraStateMembers; // List of methods that have @OnEvent on it. private final List<ExecutableElement> mOnEventMethods; // List of methods annotated with @OnUpdateState. private final List<ExecutableElement> mOnUpdateStateMethods; private final List<ExecutableElement> mOnCreateTreePropsMethods; // List of methods that define stages (e.g. OnCreateLayout) private List<ExecutableElement> mStages; public TypeElement getSourceElement() { return mSourceElement; } public Stages( ProcessingEnvironment processingEnv, TypeElement sourceElement, String qualifiedClassName, Class<Annotation>[] stageAnnotations, Class<Annotation>[] interStagePropAnnotations, TypeSpec.Builder typeSpec, List<TypeVariableName> typeVariables, boolean supportState, Map<String, TypeMirror> extraStateMembers, List<TypeElement> eventDeclarations, Map<String, String> propJavadocs) { mProcessingEnv = processingEnv; mSourceElement = sourceElement; mQualifiedClassName = qualifiedClassName; mStageAnnotations = stageAnnotations; mInterStagePropAnnotations = interStagePropAnnotations; mClassTypeSpec = typeSpec; mTypeVariables = typeVariables; mEventDeclarations = eventDeclarations; mPropJavadocs = propJavadocs; final List<Class<Annotation>> parameterAnnotations = new ArrayList<>(); parameterAnnotations.addAll(asList(PROP_ANNOTATIONS)); parameterAnnotations.addAll(asList(STATE_ANNOTATIONS)); parameterAnnotations.addAll(asList(mInterStagePropAnnotations)); parameterAnnotations.addAll(asList(TREE_PROP_ANNOTATIONS)); mParameterAnnotations = parameterAnnotations.toArray( new Class[parameterAnnotations.size()]); mSupportState = supportState; mSimpleClassName = Utils.getSimpleClassName(mQualifiedClassName); mOnEventMethods = Utils.getAnnotatedMethods(mSourceElement, OnEvent.class); mOnUpdateStateMethods = Utils.getAnnotatedMethods(mSourceElement, OnUpdateState.class); mOnCreateTreePropsMethods = Utils.getAnnotatedMethods(mSourceElement, OnCreateTreeProp.class); mExtraStateMembers = extraStateMembers; validateOnEventMethods(); populatePropDefaults(); populateStages(); validateAnnotatedParameters(); populateOnCreateInitialStateDefinedProps(); populateProps(); populateTreeProps(); if (mSupportState) { populateStateMap(); } validatePropDefaults(); populateImplMembers(); populateImplParameters(); validateStyleOutputs(); } private boolean isInterStagePropAnnotationValidInStage( Class<? extends Annotation> interStageProp, Class<? extends Annotation> stage) { final int interStagePropIndex = asList(mInterStagePropAnnotations).indexOf(interStageProp); final int stageIndex = asList(mStageAnnotations).indexOf(stage); if (interStagePropIndex < 0 || stageIndex < 0) { throw new IllegalArgumentException(); // indicates bug in the annotation processor } // This logic relies on the fact that there are prop annotations for each stage (except for // some number at the end) return interStagePropIndex < stageIndex; } private boolean doesInterStagePropAnnotationMatchStage( Class<? extends Annotation> interStageProp, Class<? extends Annotation> stage) { final int interStagePropIndex = asList(mInterStagePropAnnotations).indexOf(interStageProp); // Null stage is allowed and indicates prop int stageIndex = -1; if (stage != null) { stageIndex = asList(mStageAnnotations).indexOf(stage); if (interStagePropIndex < 0 || stageIndex < 0) { throw new IllegalArgumentException(); // indicates bug in the annotation processor } } return interStagePropIndex == stageIndex; } private void validateOnEventMethods() { final Map<String, Boolean> existsMap = new HashMap<>(); for (ExecutableElement element : mOnEventMethods) { if (existsMap.containsKey(element.getSimpleName().toString())) { throw new ComponentsProcessingException( element, "@OnEvent declared methods must have unique names"); } final DeclaredType eventClass = Utils.getAnnotationParameter( mProcessingEnv, element, OnEvent.class, "value"); final TypeMirror returnType = Utils.getAnnotationParameter( mProcessingEnv, eventClass.asElement(), Event.class, "returnType"); if (!mProcessingEnv.getTypeUtils().isSameType(element.getReturnType(), returnType)) { throw new ComponentsProcessingException( element, "Method " + element.getSimpleName() + " must return " + returnType + ", since that is what " + eventClass + " expects."); } final List<? extends VariableElement> parameters = Utils.getEnclosedFields((TypeElement) eventClass.asElement()); for (VariableElement v : Utils.getParametersWithAnnotation(element, FromEvent.class)) { boolean hasMatchingParameter = false; for (VariableElement parameter : parameters) { if (parameter.getSimpleName().equals(v.getSimpleName()) && parameter.asType().toString().equals(v.asType().toString())) { hasMatchingParameter = true; break; } } if (!hasMatchingParameter) { throw new ComponentsProcessingException( v, v.getSimpleName() + " of this type is not a member of " + eventClass); } return; } existsMap.put(element.getSimpleName().toString(), true); } } /** * Ensures that the declared events don't clash with the predefined ones. */ private void validateEventDeclarations() { for (TypeElement eventDeclaration : mEventDeclarations) { final Event eventAnnotation = eventDeclaration.getAnnotation(Event.class); if (eventAnnotation == null) { throw new ComponentsProcessingException( eventDeclaration, "Events must be declared with the @Event annotation, event is: " + eventDeclaration); } final List<? extends VariableElement> fields = Utils.getEnclosedFields(eventDeclaration); for (VariableElement field : fields) { if (!field.getModifiers().contains(Modifier.PUBLIC) || field.getModifiers().contains(Modifier.FINAL)) { throw new ComponentsProcessingException( field, "Event fields must be declared as public non-final"); } } } } private void validateStyleOutputs() { final ExecutableElement delegateMethod = Utils.getAnnotatedMethod( mSourceElement, OnLoadStyle.class); if (delegateMethod == null) { return; } final List<? extends VariableElement> parameters = delegateMethod.getParameters(); if (parameters.size() < ON_STYLE_PROPS) { throw new ComponentsProcessingException( delegateMethod, "The @OnLoadStyle method should have an ComponentContext" + "followed by Output parameters matching component create."); } final TypeName firstParamType = ClassName.get(parameters.get(0).asType()); if (!firstParamType.equals(ClassNames.COMPONENT_CONTEXT)) { throw new ComponentsProcessingException( parameters.get(0), "The first argument of the @OnLoadStyle method should be an ComponentContext."); } for (int i = ON_STYLE_PROPS, size = parameters.size(); i < size; i++) { final VariableElement v = parameters.get(i); final TypeMirror outputType = Utils.getGenericTypeArgument(v.asType(), ClassNames.OUTPUT); if (outputType == null) { throw new ComponentsProcessingException( parameters.get(i), "The @OnLoadStyle method should have only have Output arguments matching " + "component create."); } final Types typeUtils = mProcessingEnv.getTypeUtils(); final String name = v.getSimpleName().toString(); boolean matchesProp = false; for (Element prop : mProps) { if (!prop.getSimpleName().toString().equals(name)) { continue; } matchesProp = true; if (!typeUtils.isAssignable(prop.asType(), outputType)) { throw new ComponentsProcessingException( v, "Searching for prop \"" + name + "\" of type " + ClassName.get(outputType) + " but found prop with the same name of type " + ClassName.get(prop.asType())); } } if (!matchesProp) { throw new ComponentsProcessingException( v, "Output named '" + v.getSimpleName() + "' does not match any prop " + "in the component."); } } } /** * Validate that: * <ul> * <li>1. Parameters are consistently typed across stages.</li> * <li>2. Outputs for the same parameter name aren't duplicated.</li> * <li>3. Declared inter-stage prop parameters from previous stages (i.e. not * {@link Prop}) correspond to outputs from that stage</li> * <li>4. Inter-stage prop parameters come from previous stages. i.e. It is illegal to declare * a @FromMeasure parameter in @OnInflate</li> * <li>5. Inter-stage parameters don't have duplicate annotations (and that outputs aren't * annotated as inter-stage props)</li> * <li>6. Ensure props don't use reserved words as names.</li> * <li>7. Ensure required props don't have default values.</li> * <li>8. Ensure same props are annotated identically</li> * <li>9. Ensure props are of legal types</li> * </ul> */ private void validateAnnotatedParameters() { final List<PrintableException> exceptions = new ArrayList<>(); final Map<String, VariableElement> variableNameToElementMap = new HashMap<>(); final Map<String, Class<? extends Annotation>> outputVariableToStage = new HashMap<>(); for (Class<? extends Annotation> stageAnnotation : mStageAnnotations) { final ExecutableElement stage = Utils.getAnnotatedMethod( mSourceElement, stageAnnotation); if (stage == null) { continue; } // Enforce #5: getSpecDefinedParameters will verify that parameters don't have duplicate // annotations for (VariableElement v : getSpecDefinedParameters(stage)) { try { final String variableName = v.getSimpleName().toString(); final Annotation interStagePropAnnotation = getInterStagePropAnnotation(v); final boolean isOutput = Utils.getGenericTypeArgument(v.asType(), ClassNames.OUTPUT) != null; if (isOutput) { outputVariableToStage.put(variableName, stageAnnotation); } // Enforce #3 if (interStagePropAnnotation != null) { final Class<? extends Annotation> outputStage = outputVariableToStage.get(variableName); if (!doesInterStagePropAnnotationMatchStage( interStagePropAnnotation.annotationType(), outputStage)) { throw new ComponentsProcessingException( v, "Inter-stage prop declaration is incorrect, the same name and type must be " + "used in every method where the inter-stage prop is declared."); } } // Enforce #4 if (interStagePropAnnotation != null && !isInterStagePropAnnotationValidInStage( interStagePropAnnotation.annotationType(), stageAnnotation)) { throw new ComponentsProcessingException( v, "Inter-stage create must refer to previous stages."); } final VariableElement existingType = variableNameToElementMap.get(variableName); if (existingType != null && !isSameType(existingType.asType(), v.asType())) { // We have a type mis-match. This is allowed, provided that the previous type is an // outputand the new type is an prop, and the type argument of the output matches the // prop. In the future, we may want to allow stages to modify outputs from previous // stages, but for now we disallow it. // Enforce #1 and #2 if ((getInterStagePropAnnotation(v) == null || Utils.getGenericTypeArgument(existingType.asType(), ClassNames.OUTPUT) == null) && Utils.getGenericTypeArgument(existingType.asType(), ClassNames.DIFF) == null) { throw new ComponentsProcessingException( v, "Inconsistent type for '" + variableName + "': '" + existingType.asType() + "' and '" + v.asType() + "'"); } } else if (existingType == null) { // We haven't see a parameter with this name yet. Therefore it must be either @Prop, // @State or an output. final boolean isFromProp = getParameterAnnotation(v, PROP_ANNOTATIONS) != null; final boolean isFromState = getParameterAnnotation(v, STATE_ANNOTATIONS) != null; final boolean isFromTreeProp = getParameterAnnotation(v, TREE_PROP_ANNOTATIONS) != null; if (isFromState && !mSupportState) { throw new ComponentsProcessingException( v, "State is not supported in this kind of Spec."); } if (!isFromProp && !isFromState && !isOutput && !isFromTreeProp) { throw new ComponentsProcessingException( v, "Inter-stage prop declared without source."); } } // Enforce #6 final Prop propAnnotation = v.getAnnotation(Prop.class); if (propAnnotation != null) { for (String reservedPropName : RESERVED_PROP_NAMES) { if (reservedPropName.equals(variableName)) { throw new ComponentsProcessingException( v, "'" + reservedPropName + "' is a reserved prop name used by " + "the component's layout builder. Please use another name."); } } // Enforce #7 final boolean hasDefaultValue = hasDefaultValue(v); if (hasDefaultValue && !propAnnotation.optional()) { throw new ComponentsProcessingException( v, "Prop is not optional but has a declared default value."); } // Enforce #8 if (existingType != null) { final Prop existingPropAnnotation = existingType.getAnnotation(Prop.class); if (existingPropAnnotation != null) { if (!hasSameAnnotations(v, existingType)) { throw new ComponentsProcessingException( v, "The prop '" + variableName + "' is configured differently for different " + "methods. Ensure each instance of this prop is declared identically."); } } } // Enforce #9 TypeName typeName; try { typeName = ClassName.get(v.asType()); } catch (IllegalArgumentException e) { throw new ComponentsProcessingException( v, "Prop type does not exist"); } // Enforce #10 final List<ClassName> illegalPropTypes = Arrays.asList( ClassNames.COMPONENT_LAYOUT, ClassNames.COMPONENT_LAYOUT_BUILDER, ClassNames.COMPONENT_LAYOUT_CONTAINER_BUILDER, ClassNames.COMPONENT_BUILDER, ClassNames.COMPONENT_BUILDER_WITH_LAYOUT, ClassNames.REFERENCE_BUILDER); if (illegalPropTypes.contains(typeName)) { throw new ComponentsProcessingException( v, "Props may not be declared with the following types:" + illegalPropTypes); } } variableNameToElementMap.put(variableName, v); } catch (PrintableException e) { exceptions.add(e); } } } if (!exceptions.isEmpty()) { throw new MultiPrintableException(exceptions); } } private boolean hasSameAnnotations(VariableElement v1, VariableElement v2) { final List<? extends AnnotationMirror> v1Annotations = v1.getAnnotationMirrors(); final List<? extends AnnotationMirror> v2Annotations = v2.getAnnotationMirrors(); if (v1Annotations.size() != v2Annotations.size()) { return false; } final int count = v1Annotations.size(); for (int i = 0; i < count; i++) { final AnnotationMirror a1 = v1Annotations.get(i); final AnnotationMirror a2 = v2Annotations.get(i); // Some object in this hierarchy don't implement equals correctly. // They do however produce very nice strings representations which we can compare instead. if (!a1.toString().equals(a2.toString())) { return false; } } return true; } public void validateStatic() { validateStaticFields(); validateStaticMethods(); } private void validateStaticFields() { for (Element element : mSourceElement.getEnclosedElements()) { if (element.getKind() == ElementKind.FIELD && !element.getModifiers().contains(Modifier.STATIC)) { throw new ComponentsProcessingException( element, "Field " + element.getSimpleName() + " in " + mSourceElement.getQualifiedName() + " must be static"); } } } private void validateStaticMethods() { for (Class<? extends Annotation> stageAnnotation : mStageAnnotations) { final ExecutableElement stage = Utils.getAnnotatedMethod( mSourceElement, stageAnnotation); if (stage != null && !stage.getModifiers().contains(Modifier.STATIC)) { throw new ComponentsProcessingException( stage, "Method " + stage.getSimpleName() + " in " + mSourceElement.getQualifiedName() + " must be static"); } } } /** * Gather a list of VariableElement that are the props to this component */ private void populateProps() { // We use a linked hash map to guarantee iteration order final LinkedHashMap<String, VariableElement> variableNameToElementMap = new LinkedHashMap<>(); for (ExecutableElement stage : mStages) { for (VariableElement v : getProps(stage)) { // Validation unnecessary - already handled by validateAnnotatedParameters final String variableName = v.getSimpleName().toString(); variableNameToElementMap.put(variableName, v); } } mProps = new ArrayList<>(variableNameToElementMap.values()); addCreateInitialStateDefinedProps(mProps); } /** * Gather a list of VariableElement that are the state to this component */ private void populateStateMap() { // We use a linked hash map to guarantee iteration order final LinkedHashMap<String, VariableElement> variableNameToElementMap = new LinkedHashMap<>(); for (ExecutableElement stage : mStages) { for (VariableElement v : getState(stage)) { final String variableName = v.getSimpleName().toString(); if (mStateMap.containsKey(variableName)) { VariableElement existingType = mStateMap.get(variableName); final State existingPropAnnotation = existingType.getAnnotation(State.class); if (existingPropAnnotation != null) { if (!hasSameAnnotations(v, existingType)) { throw new ComponentsProcessingException( v, "The state '" + variableName + "' is configured differently for different " + "methods. Ensure each instance of this state is declared identically."); } } } mStateMap.put( variableName, v); } } } private void populateTreeProps() { final LinkedHashMap<String, VariableElement> variableNameToElementMap = new LinkedHashMap<>(); for (ExecutableElement stage : mStages) { for (VariableElement v : Utils.getParametersWithAnnotation(stage, TreeProp.class)) { final String variableName = v.getSimpleName().toString(); variableNameToElementMap.put(variableName, v); } } mTreeProps = new ArrayList<>(variableNameToElementMap.values()); } /** * Get the list of stages (OnInflate, OnMeasure, OnMount) that are defined for this component. */ private void populateStages() { mStages = new ArrayList<>(); for (Class<Annotation> stageAnnotation : mStageAnnotations) { final ExecutableElement stage = Utils.getAnnotatedMethod( mSourceElement, stageAnnotation); if (stage != null) { mStages.add(stage); } } if (mOnEventMethods != null) { mStages.addAll(mOnEventMethods); } mStages.addAll(mOnCreateTreePropsMethods); } /** * @param prop The prop to determine if it has a default or not. * @return Returns true if the prop has a default, false otherwise. */ private boolean hasDefaultValue(VariableElement prop) { final String name = prop.getSimpleName().toString(); final TypeName type = TypeName.get(prop.asType()); for (PropDefaultModel propDefault : mPropDefaults) { if (propDefault.mName.equals(name) && propDefault.mType.equals(type)) { return true; } } return false; } /** * Fail if any elements that exist in mPropDefaults do not exist in mProps. */ private void validatePropDefaults() { for (PropDefaultModel propDefault : mPropDefaults) { final ImmutableList<Modifier> modifiers = propDefault.mModifiers; if (!modifiers.contains(Modifier.STATIC) || !modifiers.contains(Modifier.FINAL) || modifiers.contains(Modifier.PRIVATE)) { throw new RuntimeException( "Defaults for props (fields annotated with " + PropDefault.class + ") must be " + "non-private, static, and final. This is not the case for " + propDefault.mName); } if (!hasValidNameAndType(propDefault)) { throw new RuntimeException( "Prop defaults (fields annotated with " + PropDefault.class + ") should have the " + "same name and type as the prop that they set the default for. This is not the " + "case for " + propDefault.mName); } } } /** * @return true if the given prop default matches the name and type of a prop, false otherwise. */ private boolean hasValidNameAndType(PropDefaultModel propDefault) { for (VariableElement prop : mProps) { if (prop.getSimpleName().toString().equals(propDefault.mName) && TypeName.get(prop.asType()).equals(propDefault.mType)) { return true; } } return false; } /** * Gather a list of parameters from the given element that are props to this component. */ private static List<VariableElement> getProps(ExecutableElement element) { return Utils.getParametersWithAnnotation(element, Prop.class); } /** * Gather a list of parameters from the given element that are state to this component. */ private static List<VariableElement> getState(ExecutableElement element) { return Utils.getParametersWithAnnotation(element, State.class); } /** * Gather a list of parameters from the given element that are defined by the spec. That is, they * aren't one of the parameters predefined for a given method. For example, OnCreateLayout has a * predefined parameter of type LayoutContext. Spec-defined parameters are annotated with one of * our prop annotations or are of type {@link com.facebook.litho.Output}. */ private List<VariableElement> getSpecDefinedParameters(ExecutableElement element) { return getSpecDefinedParameters(element, true); } private List<VariableElement> getSpecDefinedParameters( ExecutableElement element, boolean shouldIncludeOutputs) { final ArrayList<VariableElement> specDefinedParameters = new ArrayList<>(); for (VariableElement v : element.getParameters()) { final boolean isAnnotatedParameter = getParameterAnnotation(v) != null; final boolean isInterStageOutput = Utils.getGenericTypeArgument( v.asType(), ClassNames.OUTPUT) != null; if (isAnnotatedParameter && isInterStageOutput) { throw new ComponentsProcessingException( v, "Variables that are both prop and output are forbidden."); } else if (isAnnotatedParameter || (shouldIncludeOutputs && isInterStageOutput)) { specDefinedParameters.add(v); } } return specDefinedParameters; } private void populateOnCreateInitialStateDefinedProps() { final ExecutableElement onCreateInitialState = Utils.getAnnotatedMethod( getSourceElement(), OnCreateInitialState.class); if (onCreateInitialState == null) { mOnCreateInitialStateDefinedProps = new ArrayList<>(); } else { mOnCreateInitialStateDefinedProps = getSpecDefinedParameters(onCreateInitialState, false); } } /** * Get the @FromLayout, @FromMeasure, etc annotation on this element (@Prop isn't * considered - use getParameterAnnotation if you want to consider them) */ private Annotation getInterStagePropAnnotation(VariableElement element) { return getParameterAnnotation(element, mInterStagePropAnnotations); } /** * Get the annotation, if any, present on a parameter. Annotations are restricted to our whitelist * of parameter annotations: e.g. {@link Prop}, {@link State} etc) */ private Annotation getParameterAnnotation(VariableElement element) { return getParameterAnnotation(element, mParameterAnnotations); } /** * Get the annotation, if any, present on a parameter. Annotations are restricted to the specified * whitelist. If there is a duplicate we will issue an error. */ private Annotation getParameterAnnotation( VariableElement element, Class<Annotation>[] possibleAnnotations) { final ArrayList<Annotation> annotations = new ArrayList<>(); for (Class<Annotation> annotationClass : possibleAnnotations) { final Annotation annotation = element.getAnnotation(annotationClass); if (annotation != null) { annotations.add(annotation); } } if (annotations.isEmpty()) { return null; } else if (annotations.size() == 1) { return annotations.get(0); } else { throw new ComponentsProcessingException( element, "Duplicate parameter annotation: '" + annotations.get(0) + "' and '" + annotations.get(1) + "'"); } } /** * Generate javadoc block describing component props. */ public void generateJavadoc() { for (VariableElement v : mProps) { final Prop propAnnotation = v.getAnnotation(Prop.class); final String propTag = propAnnotation.optional() ? "@prop-optional" : "@prop-required"; final String javadoc = mPropJavadocs != null ? mPropJavadocs.get(v.getSimpleName().toString()) : ""; final String sanitizedJavadoc = javadoc != null ? javadoc.replace('\n', ' ') : null; // Adds javadoc with following format: // @prop-required name type javadoc. // This can be changed later to use clear demarcation for fields. // This is a block tag and cannot support inline tags like "{@link something}". mClassTypeSpec.addJavadoc( "$L $L $L $L\n", propTag, v.getSimpleName().toString(), Utils.getTypeName(v.asType()), sanitizedJavadoc); } } /** * Generate a method for this component which either lazily instantiates a singleton reference or * return this depending on whether this lifecycle is static or not. */ public void generateGetter(boolean isStatic) { final ClassName className = ClassName.bestGuess(mQualifiedClassName); if (isStatic) { mClassTypeSpec.addField( FieldSpec .builder(className, SPEC_INSTANCE_NAME, Modifier.PRIVATE, Modifier.STATIC) .initializer("null") .build()); mClassTypeSpec.addMethod( MethodSpec.methodBuilder("get") .addModifiers(Modifier.PUBLIC) .addModifiers(Modifier.STATIC) .addModifiers(Modifier.SYNCHRONIZED) .returns(className) .beginControlFlow("if ($L == null)", SPEC_INSTANCE_NAME) .addStatement("$L = new $T()", SPEC_INSTANCE_NAME, className) .endControlFlow() .addStatement("return $L", SPEC_INSTANCE_NAME) .build()); } else { mClassTypeSpec.addMethod( MethodSpec.methodBuilder("get") .addModifiers(Modifier.PUBLIC) .returns(className) .addStatement("return this") .build()); } } public void generateSourceDelegate(boolean initialized) { final ClassName specClassName = ClassName.get(mSourceElement); generateSourceDelegate(initialized, specClassName); } public void generateSourceDelegate(boolean initialized, TypeName specTypeName) { final FieldSpec.Builder builder = FieldSpec .builder(specTypeName, DELEGATE_FIELD_NAME) .addModifiers(Modifier.PRIVATE); if (initialized) { builder.initializer("new $T()", specTypeName); } mClassTypeSpec.addField(builder.build()); } private MethodSpec generateMakeShallowCopy(ClassName componentClassName, boolean hasDeepCopy) { final List<String> componentsInImpl = findComponentsInImpl(componentClassName); final List<String> interStageComponentVariables = getInterStageVariableNames(); if (componentsInImpl.isEmpty() && interStageComponentVariables.isEmpty() && mOnUpdateStateMethods.isEmpty()) { return null; } final String implClassName = getImplClassName(); return new ShallowCopyMethodSpecBuilder() .componentsInImpl(componentsInImpl) .interStageVariables(interStageComponentVariables) .implClassName(implClassName) .hasDeepCopy(hasDeepCopy) .stateContainerImplClassName(getStateContainerImplClassName()) .build(); } private List<String> findComponentsInImpl(ClassName listComponent) { final List<String> componentsInImpl = new ArrayList<>(); for (String key : mImplMembers.keySet()) { final VariableElement element = mImplMembers.get(key); final Name declaredClassName = Utils.getDeclaredClassNameWithoutGenerics(element); if (declaredClassName != null && ClassName.bestGuess(declaredClassName.toString()).equals(listComponent)) { componentsInImpl.add(element.getSimpleName().toString()); } } return componentsInImpl; } /** * Generate a private constructor to enforce singleton-ity. */ public void generateConstructor() { mClassTypeSpec.addMethod( MethodSpec.constructorBuilder() .addModifiers(Modifier.PRIVATE) .build()); } /** * Generates a method to create the initial values for parameters annotated with {@link State}. * This method also validates that the delegate method only tries to assign an initial value to * State annotated parameters. */ public void generateCreateInitialState( ExecutableElement from, ClassName contextClass, ClassName componentClass) { verifyParametersForCreateInitialState(contextClass, from); final MethodDescription methodDescription = new MethodDescription(); methodDescription.annotations = new Class[] { Override.class }; methodDescription.accessType = Modifier.PROTECTED; methodDescription.returnType = null; methodDescription.name = "createInitialState"; methodDescription.parameterTypes = new TypeName[] {contextClass}; generateDelegate(methodDescription, from, componentClass); } private void verifyParametersForCreateInitialState( ClassName contextClass, ExecutableElement executableElement) { final List<VariableElement> parameters = (List<VariableElement>) executableElement.getParameters(); if (parameters.size() < ON_CREATE_INITIAL_STATE + 1) { throw new ComponentsProcessingException( executableElement, "The @OnCreateInitialState method should have an " + contextClass + "followed by Output parameters matching state parameters."); } final TypeName firstParamType = ClassName.get(parameters.get(0).asType()); if (!firstParamType.equals(contextClass)) { throw new ComponentsProcessingException( parameters.get(0), "The first argument of the @OnCreateInitialState method should be an " + contextClass + "."); } for (int i = ON_CREATE_INITIAL_STATE, size = parameters.size(); i < size; i++) { final VariableElement element = parameters.get(i); final TypeMirror elementInnerClassType = Utils.getGenericTypeArgument(element.asType(), ClassNames.OUTPUT); if (elementInnerClassType != null) { final String paramName = element.getSimpleName().toString(); VariableElement implParameter = mStateMap.get(paramName); if (implParameter == null || implParameter.getAnnotation(State.class) == null) { throw new ComponentsProcessingException( executableElement, "Only parameters annotated with @State can be initialized in @OnCreateInitialState," + " parameter without annotation is: " + paramName); } } } } /** * Generate a method implementation that delegates to another method that takes annotated props. * * @param from description of method signature to be generated * @param to method to which to delegate * @param propsClass Component / Delegate. The base class of the inner implementation object * @throws java.io.IOException If one of the writer methods throw */ public void generateDelegate( MethodDescription from, ExecutableElement to, ClassName propsClass) { generateDelegate( from, to, Collections.<TypeName>emptyList(), Collections.<String, String>emptyMap(), propsClass); } public void generateDelegate( MethodDescription from, ExecutableElement to, List<TypeName> expectedTypes, ClassName propsClass) { generateDelegate( from, to, expectedTypes, Collections.<String, String>emptyMap(), propsClass); } /** * Generate a method implementation that delegates to another method that takes annotated props. * * @param from description of method signature to be generated * @param to method to which to delegate * @param propsClass Component / Delegate. The base class of the inner implementation object * @throws java.io.IOException If one of the writer methods throw */ public void generateDelegate( MethodDescription from, ExecutableElement to, List<TypeName> expectedTypes, Map<String, String> parameterTranslation, ClassName propsClass) { final Visibility visibility; if (Arrays.asList(from.accessType).contains(Modifier.PRIVATE)) { visibility = Visibility.PRIVATE; } else if (Arrays.asList(from.accessType).contains(Modifier.PROTECTED)) { visibility = Visibility.PROTECTED; } else if (Arrays.asList(from.accessType).contains(Modifier.PUBLIC)) { visibility = Visibility.PUBLIC; } else { visibility = Visibility.PACKAGE; } final List<Parameter> toParams = getParams(to); final List<Parameter> fromParams = new ArrayList<>(); for (int i = 0; i < from.parameterTypes.length; i++) { fromParams.add(new Parameter(from.parameterTypes[i], toParams.get(i).name)); } final List<PrintableException> errors = new ArrayList<>(); for (int i = 0; i < expectedTypes.size(); i++) { if (!toParams.get(i).type.equals(expectedTypes.get(i))) { errors.add(new ComponentsProcessingException( to.getParameters().get(i), "Expected " + expectedTypes.get(i))); } } if (!errors.isEmpty()) { throw new MultiPrintableException(errors); } writeMethodSpec(new DelegateMethodSpecBuilder() .implClassName(getImplClassName()) .abstractImplType(propsClass) .implParameters(mImplParameters) .checkedExceptions( from.exceptions == null ? new ArrayList<TypeName>() : Arrays.asList(from.exceptions)) .overridesSuper( from.annotations != null && Arrays.asList(from.annotations).contains(Override.class)) .parameterTranslation(parameterTranslation) .visibility(visibility) .fromName(from.name) .fromReturnType(from.returnType == null ? TypeName.VOID : from.returnType) .fromParams(fromParams) .target(mSourceDelegateAccessorName) .toName(to.getSimpleName().toString()) .stateParams(mStateMap.keySet()) .toReturnType(ClassName.get(to.getReturnType())) .toParams(toParams) .build()); } /** * Returns {@code true} if the given types match. */ public boolean isSameType(TypeMirror a, TypeMirror b) { return mProcessingEnv.getTypeUtils().isSameType(a, b); } /** * Generate an onEvent implementation that delegates to the @OnEvent-annotated method. */ public void generateOnEventHandlers(ClassName componentClassName, ClassName contextClassName) { for (ExecutableElement element : mOnEventMethods) { generateOnEventHandler(element, contextClassName); } } /** * Generate the static methods of the Component that can be called to update its state. */ public void generateOnStateUpdateMethods( ClassName contextClass, ClassName componentClassName, ClassName stateContainerClassName, ClassName stateUpdateInterface, Stages.StaticFlag staticFlag) { for (ExecutableElement element : mOnUpdateStateMethods) { validateOnStateUpdateMethodDeclaration(element); generateStateUpdateClass( element, componentClassName, stateContainerClassName, stateUpdateInterface, staticFlag); generateOnStateUpdateMethods(element, contextClass, componentClassName); } } /** * Validate that the declaration of a method annotated with {@link OnUpdateState} is correct: * <ul> * <li>1. Method parameters annotated with {@link Param} don't have the same name as parameters * annotated with {@link State} or {@link Prop}.</li> * <li>2. Method parameters not annotated with {@link Param} must be of type * com.facebook.litho.StateValue.</li> * <li>3. Names of method parameters not annotated with {@link Param} must match the name of * a parameter annotated with {@link State}.</li> * <li>4. Type of method parameters not annotated with {@link Param} must match the type of * a parameter with the same name annotated with {@link State}.</li> * </ul> */ private void validateOnStateUpdateMethodDeclaration(ExecutableElement element) { final List<VariableElement> annotatedParams = Utils.getParametersWithAnnotation(element, Param.class); // Check #1 for (VariableElement annotatedParam : annotatedParams) { if (mStateMap.get(annotatedParam.getSimpleName().toString()) != null) { throw new ComponentsProcessingException( annotatedParam, "Parameters annotated with @Param should not have the same name as a parameter " + "annotated with @State or @Prop"); } } final List<VariableElement> params = (List<VariableElement>) element.getParameters(); for (VariableElement param : params) { if (annotatedParams.contains(param)) { continue; } final TypeMirror paramType = param.asType(); // Check #2 if (paramType.getKind() != DECLARED) { throw new ComponentsProcessingException( param, "Parameters not annotated with @Param must be of type " + "com.facebook.litho.StateValue"); } final DeclaredType paramDeclaredType = (DeclaredType) param.asType(); final String paramDeclaredTypeName = paramDeclaredType .asElement() .getSimpleName() .toString(); if (!paramDeclaredTypeName.equals(ClassNames.STATE_VALUE.simpleName())) { throw new ComponentsProcessingException( "All state parameters must be of type com.facebook.litho.StateValue, " + param.getSimpleName() + " is of type " + param.asType()); } VariableElement stateMatchingParam = mStateMap.get(param.getSimpleName().toString()); // Check #3 if (stateMatchingParam == null || stateMatchingParam.getAnnotation(State.class) == null) { throw new ComponentsProcessingException( param, "Names of parameters of type StateValue must match the name of a parameter annotated " + "with @State"); } // Check #4 final List<TypeMirror> typeArguments = (List<TypeMirror>) paramDeclaredType.getTypeArguments(); if (typeArguments.isEmpty()) { throw new ComponentsProcessingException( param, "Type parameter for a parameter of type StateValue should match the type of " + "a parameter with the same name annotated with @State"); } final TypeMirror typeArgument = typeArguments.get(0); final TypeName stateMatchingParamTypeName = ClassName.get(stateMatchingParam.asType()); if (stateMatchingParamTypeName.isPrimitive()) { TypeName stateMatchingParamBoxedType = stateMatchingParamTypeName.box(); if (!stateMatchingParamBoxedType.equals(TypeName.get(typeArgument))) { throw new ComponentsProcessingException( param, "Type parameter for a parameter of type StateValue should match the type of " + "a parameter with the same name annotated with @State"); } } } } /** * Generate an EventHandler factory methods */ public void generateEventHandlerFactories( ClassName contextClassName, ClassName componentClassName) { for (ExecutableElement element : mOnEventMethods) { generateEventHandlerFactory( element, contextClassName, componentClassName); } } // ExecutableElement.hashCode may be different in different runs of the // processor. getElementId() is deterministic and ensures that the output is // the same across multiple runs. private int getElementId(ExecutableElement el) { return (mQualifiedClassName.hashCode() * 31 + el.getSimpleName().hashCode()) * 31 + el.asType().toString().hashCode(); } /** * Generate a dispatchOnEvent() implementation for the component. */ public void generateDispatchOnEvent( ClassName contextClassName) { final MethodSpec.Builder methodBuilder = MethodSpec.methodBuilder("dispatchOnEvent") .addModifiers(Modifier.PUBLIC) .addAnnotation(Override.class) .returns(TypeName.OBJECT) .addParameter( ParameterSpec.builder(ClassNames.EVENT_HANDLER, "eventHandler", Modifier.FINAL).build()) .addParameter( ParameterSpec.builder(ClassNames.OBJECT, "eventState", Modifier.FINAL).build()); methodBuilder.addStatement("int id = eventHandler.id"); methodBuilder.beginControlFlow("switch($L)", "id"); final String implInstanceName = "_" + getImplInstanceName(); for (ExecutableElement element : mOnEventMethods) { methodBuilder.beginControlFlow("case $L:", getElementId(element)); final DeclaredType eventClass = Utils.getAnnotationParameter( mProcessingEnv, element, OnEvent.class, "value"); final String eventName = eventClass.toString(); methodBuilder.addStatement( "$L $L = ($L) $L", eventName, implInstanceName, eventName, "eventState"); final CodeBlock.Builder eventHandlerParams = CodeBlock.builder(); eventHandlerParams.indent(); int i = 0; eventHandlerParams.add("\n($T) eventHandler.params[$L],", contextClassName, i++); for (VariableElement v : Utils.getParametersWithAnnotation(element, FromEvent.class)) { eventHandlerParams.add( "\n" + implInstanceName + ".$L,", v.getSimpleName().toString()); } for (VariableElement v : Utils.getParametersWithAnnotation(element, Param.class)) { eventHandlerParams.add("\n($T) eventHandler.params[$L],", ClassName.get(v.asType()), i); i++; } eventHandlerParams.add("\n$L", "eventHandler.mHasEventDispatcher"); eventHandlerParams.unindent(); if (element.getReturnType().getKind() != VOID) { methodBuilder.addStatement( "return do$L($L)", capitalize(element.getSimpleName().toString()), eventHandlerParams.build()); } else { methodBuilder.addStatement( "do$L($L)", capitalize(element.getSimpleName().toString()), eventHandlerParams.build()); methodBuilder.addStatement("return null"); } methodBuilder.endControlFlow(); } methodBuilder.addStatement("default: \nreturn null"); methodBuilder.endControlFlow(); writeMethodSpec(methodBuilder.build()); } private void generateEventHandlerFactory( ExecutableElement element, ClassName contextClassName, ClassName componentClassName) { final List<VariableElement> eventParamElements = Utils.getParametersWithAnnotation(element, Param.class); final List<Parameter> eventParams = new ArrayList<>(); final List<String> typeParameters = new ArrayList<>(); for (VariableElement e : eventParamElements) { eventParams.add(new Parameter(ClassName.get(e.asType()), e.getSimpleName().toString())); for (TypeMirror typeParam : getTypeVarArguments(e.asType())) { typeParameters.add(typeParam.toString()); } } final DeclaredType eventClass = Utils.getAnnotationParameter( mProcessingEnv, element, OnEvent.class, "value"); final TypeName eventClassName = ClassName.bestGuess(((TypeElement) eventClass.asElement()).getQualifiedName().toString()); writeMethodSpec(new EventHandlerFactoryMethodSpecBuilder() .eventId(getElementId(element)) .eventName(element.getSimpleName().toString()) .contextClass(contextClassName) .eventHandlerClassName( ParameterizedTypeName.get(ClassNames.EVENT_HANDLER, eventClassName)) .eventParams(eventParams) .typeParameters(typeParameters) .build()); writeMethodSpec(new EventHandlerFactoryMethodSpecBuilder() .eventId(getElementId(element)) .eventName(element.getSimpleName().toString()) .contextClass(componentClassName) .eventHandlerClassName( ParameterizedTypeName.get(ClassNames.EVENT_HANDLER, eventClassName)) .eventParams(eventParams) .typeParameters(typeParameters) .build()); } private void generateOnEventHandler( ExecutableElement element, ClassName contextClassName) { if (element.getParameters().size() == 0 || !ClassName.get(element.getParameters().get(0).asType()).equals(contextClassName)) { throw new ComponentsProcessingException( element, "The first parameter for an onEvent method should be of type " +contextClassName.toString()); } final String evenHandlerName = element.getSimpleName().toString(); final List<Parameter> fromParams = new ArrayList<>(); fromParams.add(new Parameter( contextClassName, element.getParameters().get(0).getSimpleName().toString())); final List<VariableElement> fromParamElements = Utils.getParametersWithAnnotation(element, FromEvent.class); fromParamElements.addAll(Utils.getParametersWithAnnotation(element, Param.class)); for (VariableElement v : fromParamElements) { fromParams.add(new Parameter(ClassName.get(v.asType()), v.getSimpleName().toString())); } writeMethodSpec(new DelegateMethodSpecBuilder() .implClassName(getImplClassName()) .abstractImplType(ClassNames.HAS_EVENT_DISPATCHER_CLASSNAME) .implParameters(mImplParameters) .visibility(PRIVATE) .fromName("do" + capitalize(evenHandlerName)) .fromParams(fromParams) .target(mSourceDelegateAccessorName) .toName(evenHandlerName) .toParams(getParams(element)) .fromReturnType(ClassName.get(element.getReturnType())) .toReturnType(ClassName.get(element.getReturnType())) .stateParams(mStateMap.keySet()) .build()); } private void generateOnStateUpdateMethods( ExecutableElement element, ClassName contextClass, ClassName componentClass) { final String methodName = element.getSimpleName().toString(); final List<VariableElement> updateMethodParamElements = Utils.getParametersWithAnnotation(element, Param.class); final OnStateUpdateMethodSpecBuilder builder = new OnStateUpdateMethodSpecBuilder() .componentClass(componentClass) .lifecycleImplClass(mSimpleClassName) .stateUpdateClassName(getStateUpdateClassName(element)); for (VariableElement e : updateMethodParamElements) { builder.updateMethodParam( new Parameter(ClassName.get(e.asType()), e.getSimpleName().toString())); List<TypeMirror> genericArgs = getTypeVarArguments(e.asType()); if (genericArgs != null) { for (TypeMirror genericArg : genericArgs) { builder.typeParameter(genericArg.toString()); } } } writeMethodSpec(builder .updateMethodName(methodName) .async(false) .contextClass(contextClass) .build()); writeMethodSpec(builder .updateMethodName(methodName + "Async") .async(true) .contextClass(contextClass) .build()); } static List<TypeMirror> getTypeVarArguments(TypeMirror diffType) { List<TypeMirror> typeVarArguments = new ArrayList<>(); if (diffType.getKind() == DECLARED) { final DeclaredType parameterDeclaredType = (DeclaredType) diffType; final List<? extends TypeMirror> typeArguments = parameterDeclaredType.getTypeArguments(); for (TypeMirror typeArgument : typeArguments) { if (typeArgument.getKind() == TYPEVAR) { typeVarArguments.add(typeArgument); } } } return typeVarArguments; } public static List<TypeMirror> getGenericTypeArguments(TypeMirror diffType) { if (diffType.getKind() == DECLARED) { final DeclaredType parameterDeclaredType = (DeclaredType) diffType; final List<? extends TypeMirror> typeArguments = parameterDeclaredType.getTypeArguments(); return (List<TypeMirror>) typeArguments; } return null; } public static List<Parameter> getParams(ExecutableElement e) { final List<Parameter> params = new ArrayList<>(); for (VariableElement v : e.getParameters()) { params.add(new Parameter(ClassName.get(v.asType()), v.getSimpleName().toString())); } return params; } /** * Generates a class that implements {@link com.facebook.litho.ComponentLifecycle} given * a method annotated with {@link OnUpdateState}. The class constructor takes as params all the * params annotated with {@link Param} on the method and keeps them in class members. * @param element The method annotated with {@link OnUpdateState} */ private void generateStateUpdateClass( ExecutableElement element, ClassName componentClassName, ClassName stateContainerClassName, ClassName updateStateInterface, StaticFlag staticFlag) { final String stateUpdateClassName = getStateUpdateClassName(element); final TypeName implClassName = ClassName.bestGuess(getImplClassName()); final StateUpdateImplClassBuilder stateUpdateImplClassBuilder = new StateUpdateImplClassBuilder() .withTarget(mSourceDelegateAccessorName) .withSpecOnUpdateStateMethodName(element.getSimpleName().toString()) .withComponentImplClassName(implClassName) .withComponentClassName(componentClassName) .withComponentStateUpdateInterface(updateStateInterface) .withStateContainerClassName(stateContainerClassName) .withStateContainerImplClassName(ClassName.bestGuess(getStateContainerImplClassName())) .withStateUpdateImplClassName(stateUpdateClassName) .withSpecOnUpdateStateMethodParams(getParams(element)) .withStateValueParams(getStateValueParams(element)) .withStaticFlag(staticFlag); final List<VariableElement> parametersVarElements = Utils.getParametersWithAnnotation(element, Param.class); final List<Parameter> parameters = new ArrayList<>(); for (VariableElement v : parametersVarElements) { parameters.add(new Parameter(ClassName.get(v.asType()), v.getSimpleName().toString())); for (TypeMirror typeVar : getTypeVarArguments(v.asType())) { stateUpdateImplClassBuilder.typeParameter(typeVar.toString()); } } stateUpdateImplClassBuilder.withParamsForStateUpdate(parameters); writeInnerTypeSpec(stateUpdateImplClassBuilder.build()); } /** * Generate an onLoadStyle implementation. */ public void generateOnLoadStyle() { final ExecutableElement delegateMethod = Utils.getAnnotatedMethod( mSourceElement, OnLoadStyle.class); if (delegateMethod == null) { return; } final MethodSpec.Builder methodBuilder = MethodSpec.methodBuilder("onLoadStyle") .addAnnotation( AnnotationSpec .builder(SuppressWarnings.class) .addMember("value", "$S", "unchecked").build()) .addAnnotation(Override.class) .addModifiers(Modifier.PROTECTED) .addParameter(ClassNames.COMPONENT_CONTEXT, "_context") .addParameter( ParameterSpec.builder( ParameterizedTypeName.get( ClassNames.COMPONENT, WildcardTypeName.subtypeOf(Object.class)), "_component") .build()); final List<? extends VariableElement> parameters = delegateMethod.getParameters(); for (int i = ON_STYLE_PROPS, size = parameters.size(); i < size; i++) { final VariableElement v = parameters.get(i); final TypeName typeName = ClassName.get(v.asType()); methodBuilder.addStatement( "$L $L = ($L) $L", typeName, v.getSimpleName(), typeName, "acquireOutput()"); } final CodeBlock.Builder delegateParameters = CodeBlock.builder().indent(); delegateParameters.add("\n_context"); for (int i = ON_STYLE_PROPS, size = parameters.size(); i < size; i++) { delegateParameters.add(",\n$L", parameters.get(i).getSimpleName()); } delegateParameters.unindent(); methodBuilder.addStatement( "this.$L.$L($L)", mSourceDelegateAccessorName, delegateMethod.getSimpleName(), delegateParameters.build()); final String implClassName = getImplClassName(); final String implInstanceName = "_" + getImplInstanceName(); methodBuilder.addStatement( "$L " + implInstanceName + "= ($L) _component", implClassName, implClassName); for (int i = ON_STYLE_PROPS, size = parameters.size(); i < size; i++) { final VariableElement v = parameters.get(i); final String name = v.getSimpleName().toString(); methodBuilder.beginControlFlow("if ($L.get() != null)", name); methodBuilder.addStatement( "$L.$L = $L.get()", implInstanceName, name, name); methodBuilder.endControlFlow(); methodBuilder.addStatement("releaseOutput($L)", name); } writeMethodSpec(methodBuilder.build()); } /** * Find variables annotated with {@link PropDefault} */ private void populatePropDefaults() { mPropDefaults = PropDefaultsExtractor.getPropDefaults(mSourceElement); } public void generateComponentImplClass(Stages.StaticFlag isStatic) { generateStateContainerImplClass(isStatic, ClassNames.STATE_CONTAINER_COMPONENT); final String implClassName = getImplClassName(); final ClassName stateContainerImplClass = ClassName.bestGuess(getSimpleClassName() + STATE_CONTAINER_IMPL_NAME_SUFFIX); final TypeSpec.Builder implClassBuilder = TypeSpec.classBuilder(implClassName) .addModifiers(Modifier.PRIVATE) .superclass( ParameterizedTypeName.get( ClassNames.COMPONENT, ClassName.bestGuess(getSimpleClassName()))) .addSuperinterface(Cloneable.class); if (isStatic.equals(Stages.StaticFlag.STATIC)) { implClassBuilder.addModifiers(Modifier.STATIC); implClassBuilder.addTypeVariables(mTypeVariables); } implClassBuilder.addField(stateContainerImplClass, STATE_CONTAINER_IMPL_MEMBER); implClassBuilder.addMethod(generateStateContainerGetter(ClassNames.STATE_CONTAINER_COMPONENT)); generateComponentClassProps(implClassBuilder, ClassNames.EVENT_HANDLER); MethodSpec.Builder constructorBuilder = MethodSpec.constructorBuilder() .addModifiers(Modifier.PRIVATE) .addStatement("super(get())") .addStatement(STATE_CONTAINER_IMPL_MEMBER + " = new $T()", stateContainerImplClass); implClassBuilder.addMethod(constructorBuilder.build()); implClassBuilder.addMethod( MethodSpec.methodBuilder("getSimpleName") .addModifiers(Modifier.PUBLIC) .addAnnotation(Override.class) .returns(ClassNames.STRING) .addStatement("return \"" + getSimpleClassName() + "\"") .build()); final MethodSpec equalsBuilder = generateEqualsMethodDefinition(true); implClassBuilder.addMethod(equalsBuilder); final MethodSpec copyInterStage = generateCopyInterStageImpl(implClassName); if (copyInterStage != null) { implClassBuilder.addMethod(copyInterStage); } for (ExecutableElement element : mOnUpdateStateMethods) { final String stateUpdateClassName = getStateUpdateClassName(element); final List<Parameter> parameters = getParamsWithAnnotation(element, Param.class); implClassBuilder.addMethod( new CreateStateUpdateInstanceMethodSpecBuilder() .parameters(parameters) .stateUpdateClass(stateUpdateClassName) .build()); } final MethodSpec makeShallowCopy = generateMakeShallowCopy(ClassNames.COMPONENT, /* hasDeepCopy */ false); if (makeShallowCopy != null) { implClassBuilder.addMethod(makeShallowCopy); } writeInnerTypeSpec(implClassBuilder.build()); } public void generateLazyStateUpdateMethods( ClassName context, ClassName componentClass, TypeName stateUpdateType, TypeName stateContainerComponent) { for (VariableElement state : mStateMap.values()) { if (state.getAnnotation(State.class).canUpdateLazily()) { writeMethodSpec(new OnLazyStateUpdateMethodSpecBuilder() .contextClass(context) .componentClass(componentClass) .stateUpdateType(stateUpdateType) .stateName(state.getSimpleName().toString()) .stateType(ClassName.get(state.asType())) .withStateContainerClassName(stateContainerComponent) .implClass(getImplClassName()) .lifecycleImplClass(mSimpleClassName) .build()); } } } private void generateStateContainerImplClass( Stages.StaticFlag isStatic, ClassName stateContainerClassName) { final TypeSpec.Builder stateContainerImplClassBuilder = TypeSpec .classBuilder(getStateContainerImplClassName()) .addSuperinterface(stateContainerClassName); if (isStatic.equals(Stages.StaticFlag.STATIC)) { stateContainerImplClassBuilder.addModifiers(Modifier.STATIC, Modifier.PRIVATE); stateContainerImplClassBuilder.addTypeVariables(mTypeVariables); } for (String stateName : mStateMap.keySet()) { VariableElement v = mStateMap.get(stateName); stateContainerImplClassBuilder.addField(getPropFieldSpec(v, true)); } writeInnerTypeSpec(stateContainerImplClassBuilder.build()); } private static MethodSpec generateStateContainerGetter(ClassName stateContainerClassName) { return MethodSpec.methodBuilder("getStateContainer") .addModifiers(Modifier.PROTECTED) .addAnnotation(Override.class) .returns(stateContainerClassName) .addStatement("return " + STATE_CONTAINER_IMPL_MEMBER) .build(); } public void generateReferenceImplClass( Stages.StaticFlag isStatic, TypeMirror referenceType) { final TypeSpec.Builder implClassBuilder = TypeSpec.classBuilder(getImplClassName()) .addModifiers(Modifier.PRIVATE) .superclass( ParameterizedTypeName.get( ClassNames.REFERENCE, ClassName.get(referenceType))); if (isStatic.equals(Stages.StaticFlag.STATIC)) { implClassBuilder.addModifiers(Modifier.STATIC); } generateComponentClassProps(implClassBuilder, null); implClassBuilder.addMethod( MethodSpec.constructorBuilder() .addModifiers(Modifier.PRIVATE) .addStatement("super(get())") .build()); implClassBuilder.addMethod( MethodSpec.methodBuilder("getSimpleName") .addModifiers(Modifier.PUBLIC) .addAnnotation(Override.class) .returns(ClassNames.STRING) .addStatement("return \"" + getSimpleClassName() + "\"") .build()); final MethodSpec equalsBuilder = generateEqualsMethodDefinition(false); implClassBuilder.addMethod(equalsBuilder); writeInnerTypeSpec(implClassBuilder.build()); } public void generateTransferState( ClassName contextClassName, ClassName componentClassName, ClassName stateContainerClassName) { if (!mStateMap.isEmpty()) { MethodSpec methodSpec = new TransferStateSpecBuilder() .contextClassName(contextClassName) .componentClassName(componentClassName) .componentImplClassName(getImplClassName()) .stateContainerClassName(stateContainerClassName) .stateContainerImplClassName(getStateContainerImplClassName()) .stateParameters(mStateMap.keySet()) .build(); mClassTypeSpec.addMethod(methodSpec); } } public void generateHasState() { if (mStateMap.isEmpty()) { return; } MethodSpec hasStateMethod = MethodSpec.methodBuilder("hasState") .addAnnotation(Override.class) .addModifiers(Modifier.PROTECTED) .returns(TypeName.BOOLEAN) .addStatement("return true") .build(); mClassTypeSpec.addMethod(hasStateMethod); } public void generateListComponentImplClass(Stages.StaticFlag isStatic) { generateStateContainerImplClass(isStatic, SectionClassNames.STATE_CONTAINER_SECTION); final ClassName stateContainerImplClass = ClassName.bestGuess(getSimpleClassName() + STATE_CONTAINER_IMPL_NAME_SUFFIX); final TypeSpec.Builder stateClassBuilder = TypeSpec.classBuilder(getImplClassName()) .addModifiers(Modifier.PRIVATE) .superclass( ParameterizedTypeName.get( SectionClassNames.SECTION, ClassName.bestGuess(getSimpleClassName()))) .addSuperinterface(Cloneable.class); if (isStatic.equals(Stages.StaticFlag.STATIC)) { stateClassBuilder.addModifiers(Modifier.STATIC); } stateClassBuilder.addField(stateContainerImplClass, STATE_CONTAINER_IMPL_MEMBER); stateClassBuilder.addMethod(generateStateContainerGetter(SectionClassNames.STATE_CONTAINER_SECTION)); generateComponentClassProps(stateClassBuilder, ClassNames.EVENT_HANDLER); stateClassBuilder.addMethod( MethodSpec.constructorBuilder() .addModifiers(Modifier.PRIVATE) .addStatement("super(get())") .addStatement(STATE_CONTAINER_IMPL_MEMBER + " = new $T()", stateContainerImplClass) .build()); final MethodSpec equalsBuilder = generateEqualsMethodDefinition(false); stateClassBuilder.addMethod(equalsBuilder); for (ExecutableElement element : mOnUpdateStateMethods) { final String stateUpdateClassName = getStateUpdateClassName(element); final List<Parameter> parameters = getParamsWithAnnotation(element, Param.class); stateClassBuilder.addMethod( new CreateStateUpdateInstanceMethodSpecBuilder() .parameters(parameters) .stateUpdateClass(stateUpdateClassName) .build()); } final MethodSpec makeShallowCopy = generateMakeShallowCopy(SectionClassNames.SECTION, /* hasDeepCopy */ true); if (makeShallowCopy != null) { stateClassBuilder.addMethod(makeShallowCopy); } writeInnerTypeSpec(stateClassBuilder.build()); } private MethodSpec generateEqualsMethodDefinition(boolean shouldCheckId) { final String implClassName = getImplClassName(); final String implInstanceName = getImplInstanceName(); MethodSpec.Builder equalsBuilder = MethodSpec.methodBuilder("equals") .addAnnotation(Override.class) .addModifiers(Modifier.PUBLIC) .returns(TypeName.BOOLEAN) .addParameter(TypeName.OBJECT, "other") .beginControlFlow("if (this == other)") .addStatement("return true") .endControlFlow() .beginControlFlow("if (other == null || getClass() != other.getClass())") .addStatement("return false") .endControlFlow() .addStatement(implClassName + " " + implInstanceName + " = (" + implClassName + ") other"); if (shouldCheckId) { equalsBuilder .beginControlFlow( "if (this.getId() == " + implInstanceName + ".getId())") .addStatement("return true") .endControlFlow(); } for (VariableElement v : mImplMembers.values()) { if (!isState(v)) { addCompareStatement(implInstanceName, v, equalsBuilder, false); } } for (VariableElement v : mStateMap.values()) { addCompareStatement(implInstanceName, v, equalsBuilder, true); } equalsBuilder.addStatement("return true"); return equalsBuilder.build(); } private static void addCompareStatement( String implInstanceName, VariableElement v, MethodSpec.Builder equalsBuilder, boolean isState) { final TypeMirror variableType = v.asType(); final TypeMirror outputTypeMirror = Utils.getGenericTypeArgument( variableType, ClassNames.OUTPUT); final TypeMirror diffTypeMirror = Utils.getGenericTypeArgument( variableType, ClassNames.DIFF); final TypeKind variableKind = diffTypeMirror != null ? diffTypeMirror.getKind() : variableType.getKind(); String qualifiedName = ""; if (variableType instanceof DeclaredType) { final DeclaredType declaredType = (DeclaredType) variableType; qualifiedName = ((TypeElement) declaredType.asElement()).getQualifiedName().toString(); } final String stateContainerMember = isState ? "." + STATE_CONTAINER_IMPL_MEMBER : ""; final CharSequence thisVarName = isState ? STATE_CONTAINER_IMPL_MEMBER + "." + v.getSimpleName() : v.getSimpleName(); if (outputTypeMirror == null) { if (variableKind == FLOAT) { equalsBuilder .beginControlFlow( "if (Float.compare($L, " + implInstanceName + stateContainerMember + ".$L) != 0)", thisVarName, v.getSimpleName()) .addStatement("return false") .endControlFlow(); } else if (variableKind == DOUBLE) { equalsBuilder .beginControlFlow( "if (Double.compare($L, " + implInstanceName + stateContainerMember + ".$L) != 0)", thisVarName, v.getSimpleName()) .addStatement("return false") .endControlFlow(); } else if (variableType.getKind() == ARRAY) { equalsBuilder .beginControlFlow( "if (!Arrays.equals($L, " + implInstanceName + stateContainerMember + ".$L))", thisVarName, v.getSimpleName()) .addStatement("return false") .endControlFlow(); } else if (variableType.getKind().isPrimitive()) { equalsBuilder .beginControlFlow( "if ($L != " + implInstanceName + stateContainerMember + ".$L)", thisVarName, v.getSimpleName()) .addStatement("return false") .endControlFlow(); } else if (qualifiedName.equals(ClassNames.REFERENCE)) { equalsBuilder .beginControlFlow( "if (Reference.shouldUpdate($L, " + implInstanceName + stateContainerMember + ".$L))", thisVarName, v.getSimpleName(), v.getSimpleName(), v.getSimpleName(), v.getSimpleName()) .addStatement("return false") .endControlFlow(); } else { equalsBuilder .beginControlFlow( "if ($L != null ? !$L.equals(" + implInstanceName + stateContainerMember + ".$L) : " + implInstanceName + stateContainerMember + ".$L != null)", thisVarName, thisVarName, v.getSimpleName(), v.getSimpleName()) .addStatement("return false") .endControlFlow(); } } } private boolean isState(VariableElement v) { for (VariableElement find : mStateMap.values()) { if (find.getSimpleName().equals(v.getSimpleName())) { return true; } } return false; } private void generateComponentClassProps( TypeSpec.Builder implClassBuilder, ClassName eventHandlerClassName) { for (VariableElement v : mImplMembers.values()) { implClassBuilder.addField(getPropFieldSpec(v, false)); } if (mExtraStateMembers != null) { for (String key : mExtraStateMembers.keySet()) { final TypeMirror variableType = mExtraStateMembers.get(key); final FieldSpec.Builder fieldBuilder = FieldSpec.builder(TypeName.get(variableType), key); implClassBuilder.addField(fieldBuilder.build()); } } for (TypeElement event : mEventDeclarations) { implClassBuilder.addField(FieldSpec.builder( eventHandlerClassName, getEventHandlerInstanceName(event.getSimpleName().toString())) .build()); } } private FieldSpec getPropFieldSpec(VariableElement v, boolean isStateProp) { final TypeMirror variableType = v.asType(); TypeMirror wrappingTypeMirror = Utils.getGenericTypeArgument( variableType, ClassNames.OUTPUT); if (wrappingTypeMirror == null) { wrappingTypeMirror = Utils.getGenericTypeArgument(variableType, ClassNames.DIFF); } final TypeName variableClassName = JPUtil.getTypeFromMirror( wrappingTypeMirror != null ? wrappingTypeMirror : variableType); final FieldSpec.Builder fieldBuilder = FieldSpec.builder( variableClassName, v.getSimpleName().toString()); if (!isInterStageComponentVariable(v)) { if (isStateProp) { fieldBuilder.addAnnotation(State.class); } else { fieldBuilder.addAnnotation(Prop.class); } } final boolean hasDefaultValue = hasDefaultValue(v); if (hasDefaultValue) { fieldBuilder.initializer( "$L.$L", mSourceElement.getSimpleName().toString(), v.getSimpleName().toString()); } return fieldBuilder.build(); } public void generateIsPureRender() { final MethodSpec.Builder shouldUpdateComponent = MethodSpec.methodBuilder("isPureRender") .addAnnotation(Override.class) .addModifiers(Modifier.PUBLIC) .returns(TypeName.BOOLEAN) .addStatement("return true"); mClassTypeSpec.addMethod(shouldUpdateComponent.build()); } public void generateCallsShouldUpdateOnMount() { final MethodSpec.Builder isFast = MethodSpec.methodBuilder("callsShouldUpdateOnMount") .addAnnotation(Override.class) .addModifiers(Modifier.PUBLIC) .returns(TypeName.BOOLEAN) .addStatement("return true"); mClassTypeSpec.addMethod(isFast.build()); } public void generateShouldUpdateMethod( ExecutableElement shouldUpdateElement, ClassName comparedInstancesClassName) { final ClassName implClass = ClassName.bestGuess(getImplClassName()); final MethodSpec.Builder shouldUpdateComponent = MethodSpec.methodBuilder("shouldUpdate") .addAnnotation(Override.class) .addModifiers(Modifier.PUBLIC) .returns(TypeName.BOOLEAN) .addParameter(comparedInstancesClassName, "previous") .addParameter(comparedInstancesClassName, "next"); final List<? extends VariableElement> shouldUpdateParams = shouldUpdateElement.getParameters(); final int shouldUpdateParamSize = shouldUpdateParams.size(); if (shouldUpdateParamSize > 0) { shouldUpdateComponent .addStatement( "$L previousImpl = ($L) previous", implClass, implClass) .addStatement( "$L nextImpl = ($L) next", implClass, implClass); } final CodeBlock.Builder delegateParameters = CodeBlock.builder(); delegateParameters.indent(); int i = 0; final CodeBlock.Builder releaseDiffs = CodeBlock.builder(); for (VariableElement variableElement : shouldUpdateParams) { final Name variableElementName = variableElement.getSimpleName(); final TypeMirror variableElementType = variableElement.asType(); final VariableElement componentMember = findPropVariableForName(variableElementName); if (componentMember == null) { throw new ComponentsProcessingException( variableElement, "Arguments for ShouldUpdate should match declared Props"); } final TypeMirror innerType = Utils.getGenericTypeArgument( variableElementType, ClassNames.DIFF); if (innerType == null) { throw new ComponentsProcessingException( variableElement, "Arguments for ShouldUpdate should be of type Diff " + componentMember.asType()); } final TypeName typeName; final TypeName innerTypeName = JPUtil.getTypeFromMirror(innerType); if (componentMember.asType().getKind().isPrimitive()) { typeName = JPUtil.getTypeFromMirror(componentMember.asType()).box(); } else { typeName = JPUtil.getTypeFromMirror(componentMember.asType()); } if (!typeName.equals(innerTypeName)) { throw new ComponentsProcessingException( variableElement, "Diff Type parameter does not match Prop " + componentMember); } shouldUpdateComponent .addStatement( "$L $L = acquireDiff(previousImpl.$L, nextImpl.$L)", variableElementType, variableElementName, variableElementName, variableElementName); if (i != 0) { delegateParameters.add(",\n"); } delegateParameters.add(variableElementName.toString()); i++; releaseDiffs.addStatement( "releaseDiff($L)", variableElementName); } delegateParameters.unindent(); shouldUpdateComponent.addStatement( "boolean shouldUpdate = $L.$L(\n$L)", mSourceDelegateAccessorName, shouldUpdateElement.getSimpleName(), delegateParameters.build()); shouldUpdateComponent.addCode(releaseDiffs.build()); shouldUpdateComponent.addStatement( "return shouldUpdate"); mClassTypeSpec.addMethod(shouldUpdateComponent.build()); } public void generateTreePropsMethods(ClassName contextClassName, ClassName componentClassName) { verifyOnCreateTreePropsForChildren(contextClassName); if (!mTreeProps.isEmpty()) { final PopulateTreePropsMethodBuilder builder = new PopulateTreePropsMethodBuilder(); builder.componentClassName = componentClassName; builder.lifecycleImplClass = getImplClassName(); for (VariableElement treeProp : mTreeProps) { builder.treeProps.add( new Parameter(ClassName.get(treeProp.asType()), treeProp.getSimpleName().toString())); } mClassTypeSpec.addMethod(builder.build()); } if (mOnCreateTreePropsMethods.isEmpty()) { return; } final GetTreePropsForChildrenMethodBuilder builder = new GetTreePropsForChildrenMethodBuilder(); builder.lifecycleImplClass = getImplClassName(); builder.delegateName = getSourceDelegateAccessorName(); builder.contextClassName = contextClassName; builder.componentClassName = componentClassName; for (ExecutableElement executable : mOnCreateTreePropsMethods) { final CreateTreePropMethodData method = new CreateTreePropMethodData(); method.parameters = getParams(executable); method.returnType = ClassName.get(executable.getReturnType()); method.name = executable.getSimpleName().toString(); builder.createTreePropMethods.add(method); } mClassTypeSpec.addMethod(builder.build()); } private void verifyOnCreateTreePropsForChildren(ClassName contextClassName) { for (ExecutableElement method : mOnCreateTreePropsMethods) { if (method.getReturnType().getKind().equals(TypeKind.VOID)) { throw new ComponentsProcessingException( method, "@OnCreateTreeProp annotated method" + method.getSimpleName() + "cannot have a void return type"); } final List<? extends VariableElement> params = method.getParameters(); if (params.isEmpty() || !ClassName.get(params.get(0).asType()).equals(contextClassName)) { throw new ComponentsProcessingException( method, "The first argument of an @OnCreateTreeProp method should be the " + contextClassName.simpleName()); } } } private VariableElement findPropVariableForName(Name variableElementName) { for (VariableElement prop : mProps) { if (prop.getSimpleName().equals(variableElementName)) { return prop; } } return null; } private MethodSpec generateCopyInterStageImpl(String implClassName) { final List<String> elementList = getInterStageVariableNames(); if (elementList.isEmpty()) { return null; } final String implInstanceName = getImplInstanceName(); final MethodSpec.Builder copyInterStageComponentBuilder = MethodSpec .methodBuilder("copyInterStageImpl") .addAnnotation(Override.class) .addModifiers(Modifier.PROTECTED) .returns(TypeName.VOID) .addParameter( ParameterizedTypeName.get( ClassNames.COMPONENT, ClassName.bestGuess(getSimpleClassName())), "impl") .addStatement( "$L " + implInstanceName + " = ($L) impl", implClassName, implClassName); for (String s : elementList) { copyInterStageComponentBuilder .addStatement( "$L = " + implInstanceName + ".$L", s, s); } return copyInterStageComponentBuilder.build(); } private List<String> getInterStageVariableNames() { final List<String> elementList = new ArrayList<>(); for (VariableElement v : mImplMembers.values()) { if (isInterStageComponentVariable(v)) { elementList.add(v.getSimpleName().toString()); } } return elementList; } private static boolean isInterStageComponentVariable(VariableElement variableElement) { final TypeMirror variableType = variableElement.asType(); final TypeMirror outputTypeMirror = Utils.getGenericTypeArgument( variableType, ClassNames.OUTPUT); return outputTypeMirror != null; } private static boolean isStateProp(VariableElement variableElement) { return variableElement.getAnnotation(State.class) != null; } public void generateListEvents() { for (TypeElement event : mEventDeclarations) { generateEvent( event, ClassNames.EVENT_HANDLER, SectionClassNames.SECTION_LIFECYCLE, SectionClassNames.SECTION_CONTEXT, "getSectionScope"); } } private static String getEventHandlerInstanceName(String eventHandlerClassName) { return Character.toLowerCase(eventHandlerClassName.charAt(0)) + eventHandlerClassName.substring(1) + "Handler"; } private void generateEvent( TypeElement eventDeclaration, ClassName eventHandlerClassName, ClassName lifecycleClassName, ClassName contextClassName, String scopeMethodName) { final String eventName = eventDeclaration.getSimpleName().toString(); writeMethodSpec(MethodSpec.methodBuilder("get" + eventName + "Handler") .addModifiers(Modifier.PUBLIC, Modifier.STATIC) .returns(eventHandlerClassName) .addParameter(contextClassName, "context") .addCode( CodeBlock.builder() .beginControlFlow("if (context.$L() == null)", scopeMethodName) .addStatement("return null") .endControlFlow() .build()) .addStatement( "return (($L.$T) context.$L()).$L", getSimpleClassName(), ClassName.bestGuess(getImplClassName()), scopeMethodName, getEventHandlerInstanceName(eventName)) .build()); // Override the method that the component will call to fire the event. final MethodDescription methodDescription = new MethodDescription(); methodDescription.annotations = new Class[] {}; methodDescription.accessType = Modifier.STATIC; methodDescription.name = "dispatch" + eventName; methodDescription.parameterTypes = new TypeName[] { ClassName.bestGuess(mQualifiedClassName) }; final TypeMirror returnType = Utils.getAnnotationParameter(mProcessingEnv, eventDeclaration, Event.class, "returnType"); if (returnType != null) { methodDescription.returnType = TypeName.get(returnType); } generateEventDispatcher( methodDescription, eventDeclaration.getTypeParameters(), eventDeclaration, eventHandlerClassName, lifecycleClassName); } /** * Generate an event dispatcher method for the given event. * * @param fixedMethod description of method signature to be generated * @param typeParameters * @param element method the event will call to dispatch * @param eventHandlerClassName @throws IOException If one of the writer methods throw */ private void generateEventDispatcher( MethodDescription fixedMethod, List<? extends TypeParameterElement> typeParameters, TypeElement element, ClassName eventHandlerClassName, ClassName lifecycleClassName) { final List<? extends VariableElement> parameters = Utils.getEnclosedFields(element); final MethodSpec.Builder methodBuilder = MethodSpec.methodBuilder(fixedMethod.name); if (fixedMethod.annotations != null) { for (Class annotation : fixedMethod.annotations) { methodBuilder.addAnnotation(annotation); } } for (TypeParameterElement typeParameterElement : typeParameters) { methodBuilder.addTypeVariable( TypeVariableName.get(typeParameterElement.getSimpleName().toString())); } if (fixedMethod.accessType != null) { methodBuilder.addModifiers(fixedMethod.accessType); } methodBuilder.addParameter(eventHandlerClassName, "_eventHandler"); for (VariableElement v : parameters) { methodBuilder.addParameter(ClassName.get(v.asType()), v.getSimpleName().toString()); } // Add the event parameters to a implParameters. // This should come from a pool. final ClassName className = ClassName.get(element); methodBuilder.addStatement( "$T _eventState = new $T()", className, className); for (VariableElement v : parameters) { final String variableName = v.getSimpleName().toString(); methodBuilder.addStatement("_eventState.$L = $L", variableName, variableName); } methodBuilder.addStatement( "$T _lifecycle = _eventHandler.mHasEventDispatcher.getEventDispatcher()", ClassNames.EVENT_DISPATCHER); final TypeName returnType = fixedMethod.returnType; if (returnType != null && !returnType.equals(ClassName.VOID)) { methodBuilder.addStatement( "return ($L) _lifecycle.dispatchOnEvent(_eventHandler, _eventState)", returnType); methodBuilder.returns(returnType); } else { methodBuilder.addStatement("_lifecycle.dispatchOnEvent(_eventHandler, _eventState)"); } writeMethodSpec(methodBuilder.build()); } /** * Generate a builder method for a given declared parameters. */ private Collection<MethodSpec> generatePropsBuilderMethods( VariableElement element, TypeName propsBuilderClassName, int requiredIndex, ClassName componentClassName) { final Prop propAnnotation = element.getAnnotation(Prop.class); final ResType resType = propAnnotation.resType(); switch (resType) { case STRING: assertOfType(element, TypeName.get(String.class), TypeName.get(CharSequence.class)); break; case STRING_ARRAY: assertOfType(element, TypeName.get(String[].class)); break; case INT: assertOfType(element, TypeName.get(int.class), TypeName.get(Integer.class)); break; case INT_ARRAY: assertOfType(element, TypeName.get(int[].class)); break; case BOOL: assertOfType(element, TypeName.get(boolean.class), TypeName.get(Boolean.class)); break; case COLOR: assertOfType(element, TypeName.get(int.class), TypeName.get(Integer.class)); break; case DIMEN_SIZE: assertOfType( element, TypeName.get(int.class), TypeName.get(Integer.class), TypeName.get(float.class), TypeName.get(Float.class)); break; case DIMEN_TEXT: assertOfType( element, TypeName.get(int.class), TypeName.get(Integer.class), TypeName.get(float.class), TypeName.get(Float.class)); break; case DIMEN_OFFSET: assertOfType( element, TypeName.get(int.class), TypeName.get(Integer.class), TypeName.get(float.class), TypeName.get(Float.class)); break; case FLOAT: assertOfType(element, TypeName.get(float.class), TypeName.get(Float.class)); break; case DRAWABLE: assertOfType(element, ParameterizedTypeName.get(ClassNames.REFERENCE, ClassNames.DRAWABLE)); break; } TypeMirror typeMirror = element.asType(); final TypeMirror diffTypeMirror = Utils.getGenericTypeArgument(typeMirror, ClassNames.DIFF); if (diffTypeMirror != null) { typeMirror = diffTypeMirror; } final TypeName type = JPUtil.getTypeFromMirror(typeMirror); final String name = element.getSimpleName().toString(); final PropParameter propParameter = new PropParameter( new Parameter(type, name), propAnnotation.optional(), resType, getNonComponentAnnotations(element)); return new PropsBuilderMethodsSpecBuilder() .index(requiredIndex) .propParameter(propParameter) .implName(getImplMemberInstanceName()) .requiredSetName("mRequired") .builderClass(propsBuilderClassName) .componentClassName(componentClassName) .build(); } private void assertOfType(VariableElement element, TypeName... types) { final TypeName elementType = JPUtil.getTypeFromMirror(element.asType()); for (TypeName type : types) { if (type.toString().equals(elementType.toString())) { return; } } throw new ComponentsProcessingException( element, "Expected parameter of one of types" + Arrays.toString(types) + ". Found " + elementType); } private List<ClassName> getNonComponentAnnotations(VariableElement element) { final List<? extends AnnotationMirror> annotationMirrors = element.getAnnotationMirrors(); final List<ClassName> annotations = new ArrayList<>(); for (AnnotationMirror annotationMirror : annotationMirrors) { if (annotationMirror.getAnnotationType().toString().startsWith("com.facebook.litho")) { continue; } if (annotationMirror.getElementValues().size() > 0) { throw new ComponentsProcessingException( element, "Currently only non-component annotations without parameters are supported"); } annotations.add(ClassName.bestGuess(annotationMirror.getAnnotationType().toString())); } return annotations; } public void generateReferenceBuilder(StaticFlag isStatic, TypeName genericType) { generateBuilder( isStatic, StyleableFlag.NOT_STYLEABLE, ClassNames.REFERENCE, genericType, INNER_IMPL_BUILDER_CLASS_NAME, new TypeName[]{genericType}, ClassNames.COMPONENT_CONTEXT, null, null, false, false); generateBuilderPool( ClassName.bestGuess(INNER_IMPL_BUILDER_CLASS_NAME), "m" + INNER_IMPL_BUILDER_CLASS_NAME + "Pool", mTypeVariables.isEmpty() || isStatic == StaticFlag.STATIC ? StaticFlag.STATIC : StaticFlag.NOT_STATIC, StyleableFlag.NOT_STYLEABLE, ClassNames.COMPONENT_CONTEXT); writeMethodSpec(MethodSpec.methodBuilder("create") .addModifiers(Modifier.PUBLIC) .returns(ClassName.bestGuess(INNER_IMPL_BUILDER_CLASS_NAME)) .addParameter(ClassNames.COMPONENT_CONTEXT, "context") .addStatement( "return new$L(context, new $T())", INNER_IMPL_BUILDER_CLASS_NAME, ClassName.bestGuess(getImplClassName())) .addModifiers(isStatic == StaticFlag.STATIC ? Modifier.STATIC : Modifier.FINAL) .build()); } public void generateListBuilder(StaticFlag isStatic, TypeName genericType) { generateBuilder( isStatic, StyleableFlag.NOT_STYLEABLE, SectionClassNames.SECTION, genericType, INNER_IMPL_BUILDER_CLASS_NAME, new TypeName[]{genericType}, SectionClassNames.SECTION_CONTEXT, ClassNames.EVENT_HANDLER, SectionClassNames.SECTION, true, true); generateBuilderPool( ClassName.bestGuess(INNER_IMPL_BUILDER_CLASS_NAME), "m" + INNER_IMPL_BUILDER_CLASS_NAME + "Pool", mTypeVariables.isEmpty() || isStatic == StaticFlag.STATIC ? StaticFlag.STATIC : StaticFlag.NOT_STATIC, StyleableFlag.NOT_STYLEABLE, SectionClassNames.SECTION_CONTEXT); writeMethodSpec(MethodSpec.methodBuilder("create") .addModifiers(Modifier.PUBLIC) .returns(ClassName.bestGuess(INNER_IMPL_BUILDER_CLASS_NAME)) .addParameter(SectionClassNames.SECTION_CONTEXT, "context") .addStatement( "return new$L(context, new $T())", INNER_IMPL_BUILDER_CLASS_NAME, ClassName.bestGuess(getImplClassName())) .addModifiers(isStatic == StaticFlag.STATIC ? Modifier.STATIC : Modifier.FINAL) .build()); } private void generateBuilder( StaticFlag isStatic, StyleableFlag isStylable, ClassName propsClass, TypeName genericType, String builderClassName, TypeName[] builderGenericTypes, ClassName contextClass, ClassName eventHandlerClass, ClassName componentClassName, boolean hasKeySetter, boolean hasLoadingEventHandlerSetter) { final String implClassName = getImplClassName(); final String implInstanceName = getImplInstanceName(); final String implMemberInstanceName = getImplMemberInstanceName(); final String contextMemberInstanceName = "mContext"; final ClassName implClass = ClassName.bestGuess(implClassName); final MethodSpec.Builder initMethodSpec = MethodSpec.methodBuilder("init") .addModifiers(Modifier.PRIVATE) .addParameter(contextClass, "context"); if (isStylable == StyleableFlag.STYLEABLE) { initMethodSpec .addParameter(int.class, "defStyleAttr") .addParameter(int.class, "defStyleRes") .addParameter(implClass, implInstanceName) .addStatement("super.init(context, defStyleAttr, defStyleRes, " + implInstanceName + ")"); } else { initMethodSpec .addParameter(implClass, implInstanceName) .addStatement("super.init(context, " + implInstanceName + ")"); } initMethodSpec .addStatement(implMemberInstanceName + " = " + implInstanceName) .addStatement(contextMemberInstanceName + " = context"); final TypeSpec.Builder propsBuilderClassBuilder = TypeSpec .classBuilder(builderClassName) .addModifiers(Modifier.PUBLIC) .superclass( ParameterizedTypeName.get( ClassName.get(propsClass.packageName(), propsClass.simpleName(), builderClassName), builderGenericTypes)) .addField(implClass, implMemberInstanceName) .addField(contextClass, "mContext"); final List<String> requiredPropNames = new ArrayList<>(); int numRequiredProps = 0; for (VariableElement v : mProps) { if (!v.getAnnotation(Prop.class).optional()) { numRequiredProps++; requiredPropNames.add(v.getSimpleName().toString()); } } if (numRequiredProps > 0) { final FieldSpec.Builder requiredPropsNamesBuilder = FieldSpec.builder( String[].class, REQUIRED_PROPS_NAMES, Modifier.PRIVATE) .initializer("new String[] {$L}", commaSeparateAndQuoteStrings(requiredPropNames)) .addModifiers(Modifier.FINAL); if (isStatic.equals(StaticFlag.STATIC)) { requiredPropsNamesBuilder.addModifiers(Modifier.STATIC); } propsBuilderClassBuilder .addField(requiredPropsNamesBuilder.build()) .addField( FieldSpec.builder( int.class, REQUIRED_PROPS_COUNT, Modifier.PRIVATE) .initializer("$L", numRequiredProps) .addModifiers(Modifier.STATIC, Modifier.FINAL) .build()) .addField( FieldSpec.builder( BitSet.class, "mRequired", Modifier.PRIVATE) .initializer("new $T($L)", BitSet.class, REQUIRED_PROPS_COUNT) .build()); initMethodSpec.addStatement("mRequired.clear()"); } propsBuilderClassBuilder.addMethod(initMethodSpec.build()); // If there are no type variables, then this class can always be static. // If the component implementation class is static, and there are type variables, then this // class can be static but must shadow the type variables from the class. // If the component implementation class is not static, and there are type variables, then this // class is not static and we get the type variables from the class. final boolean isBuilderStatic = mTypeVariables.isEmpty() || isStatic.equals(StaticFlag.STATIC); if (isBuilderStatic) { propsBuilderClassBuilder.addModifiers(Modifier.STATIC); if (!mTypeVariables.isEmpty()) { propsBuilderClassBuilder.addTypeVariables(mTypeVariables); } } final ClassName propsBuilderClassName = ClassName.bestGuess(builderClassName); int requiredPropIndex = 0; for (VariableElement v : mProps) { propsBuilderClassBuilder.addMethods( generatePropsBuilderMethods( v, propsBuilderClassName, requiredPropIndex, componentClassName)); if (!v.getAnnotation(Prop.class).optional()) { requiredPropIndex++; } } for (TypeElement event : mEventDeclarations) { propsBuilderClassBuilder.addMethods( new PropsBuilderMethodsSpecBuilder() .propParameter( new PropParameter( new Parameter( eventHandlerClass, getEventHandlerInstanceName(event.getSimpleName().toString())), true, ResType.NONE, Collections.<ClassName>emptyList())) .implName(getImplMemberInstanceName()) .builderClass(propsBuilderClassName) .build()); } if (hasKeySetter) { propsBuilderClassBuilder.addMethod( new PropsBuilderMethodsSpecBuilder() .builderClass(propsBuilderClassName) .buildKeySetter()); } if (hasLoadingEventHandlerSetter) { propsBuilderClassBuilder.addMethod( new PropsBuilderMethodsSpecBuilder() .builderClass(propsBuilderClassName) .buildLoadingEventHandlerSetter()); } final MethodSpec.Builder buildMethodBuilder = MethodSpec.methodBuilder("build") .addAnnotation(Override.class) .addModifiers(Modifier.PUBLIC) .returns(ParameterizedTypeName.get(propsClass, genericType)); if (numRequiredProps > 0) { buildMethodBuilder .beginControlFlow( "if (mRequired != null && mRequired.nextClearBit(0) < $L)", REQUIRED_PROPS_COUNT) .addStatement( "$T missingProps = new $T()", ParameterizedTypeName.get(List.class, String.class), ParameterizedTypeName.get(ArrayList.class, String.class)) .beginControlFlow("for (int i = 0; i < $L; i++)", REQUIRED_PROPS_COUNT) .beginControlFlow("if (!mRequired.get(i))") .addStatement("missingProps.add($L[i])", REQUIRED_PROPS_NAMES) .endControlFlow() .endControlFlow() .addStatement( "throw new $T($S + $T.toString(missingProps.toArray()))", IllegalStateException.class, "The following props are not marked as optional and were not supplied: ", Arrays.class) .endControlFlow(); } buildMethodBuilder .addStatement("$L " + implInstanceName + " = " + implMemberInstanceName, implClassName) .addStatement("release()") .addStatement("return " + implInstanceName); propsBuilderClassBuilder.addMethod(buildMethodBuilder.build()); final String poolName = "m" + builderClassName + "Pool"; propsBuilderClassBuilder.addMethod(MethodSpec.methodBuilder("release") .addAnnotation(Override.class) .addModifiers(Modifier.PROTECTED) .addStatement("super.release()") .addStatement(implMemberInstanceName + " = null") .addStatement(contextMemberInstanceName + " = null")
litho-processor/src/main/java/com/facebook/litho/processor/Stages.java
/** * Copyright (c) 2014-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.litho.processor; import javax.annotation.processing.ProcessingEnvironment; import javax.lang.model.element.AnnotationMirror; import javax.lang.model.element.Element; import javax.lang.model.element.ElementKind; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Modifier; import javax.lang.model.element.Name; import javax.lang.model.element.TypeElement; import javax.lang.model.element.TypeParameterElement; import javax.lang.model.element.VariableElement; import javax.lang.model.type.DeclaredType; import javax.lang.model.type.TypeKind; import javax.lang.model.type.TypeMirror; import javax.lang.model.util.Types; import java.lang.annotation.Annotation; import java.util.ArrayList; import java.util.Arrays; import java.util.BitSet; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import com.facebook.common.internal.ImmutableList; import com.facebook.litho.annotations.Event; import com.facebook.litho.annotations.FromEvent; import com.facebook.litho.annotations.OnCreateInitialState; import com.facebook.litho.annotations.OnCreateTreeProp; import com.facebook.litho.annotations.OnEvent; import com.facebook.litho.annotations.OnLoadStyle; import com.facebook.litho.annotations.OnUpdateState; import com.facebook.litho.annotations.Param; import com.facebook.litho.annotations.Prop; import com.facebook.litho.annotations.PropDefault; import com.facebook.litho.annotations.ResType; import com.facebook.litho.annotations.State; import com.facebook.litho.annotations.TreeProp; import com.facebook.litho.javapoet.JPUtil; import com.facebook.litho.processor.GetTreePropsForChildrenMethodBuilder.CreateTreePropMethodData; import com.facebook.litho.specmodels.model.ClassNames; import com.facebook.litho.specmodels.model.PropDefaultModel; import com.facebook.litho.specmodels.processor.PropDefaultsExtractor; import com.squareup.javapoet.AnnotationSpec; import com.squareup.javapoet.ClassName; import com.squareup.javapoet.CodeBlock; import com.squareup.javapoet.FieldSpec; import com.squareup.javapoet.MethodSpec; import com.squareup.javapoet.ParameterSpec; import com.squareup.javapoet.ParameterizedTypeName; import com.squareup.javapoet.TypeName; import com.squareup.javapoet.TypeSpec; import com.squareup.javapoet.TypeVariableName; import com.squareup.javapoet.WildcardTypeName; import static com.facebook.litho.processor.Utils.capitalize; import static com.facebook.litho.processor.Visibility.PRIVATE; import static com.facebook.litho.specmodels.generator.GeneratorConstants.DELEGATE_FIELD_NAME; import static com.facebook.litho.specmodels.generator.GeneratorConstants.SPEC_INSTANCE_NAME; import static java.util.Arrays.asList; import static javax.lang.model.type.TypeKind.ARRAY; import static javax.lang.model.type.TypeKind.DECLARED; import static javax.lang.model.type.TypeKind.DOUBLE; import static javax.lang.model.type.TypeKind.FLOAT; import static javax.lang.model.type.TypeKind.TYPEVAR; import static javax.lang.model.type.TypeKind.VOID; public class Stages { public static final String IMPL_CLASS_NAME_SUFFIX = "Impl"; private static final String INNER_IMPL_BUILDER_CLASS_NAME = "Builder"; private static final String STATE_UPDATE_IMPL_NAME_SUFFIX = "StateUpdate"; public static final String STATE_CONTAINER_IMPL_NAME_SUFFIX = "StateContainerImpl"; public static final String STATE_CONTAINER_IMPL_MEMBER = "mStateContainerImpl"; private static final String REQUIRED_PROPS_NAMES = "REQUIRED_PROPS_NAMES"; private static final String REQUIRED_PROPS_COUNT = "REQUIRED_PROPS_COUNT"; private static final int ON_STYLE_PROPS = 1; private static final int ON_CREATE_INITIAL_STATE = 1; private final boolean mSupportState; public enum StaticFlag { STATIC, NOT_STATIC } public enum StyleableFlag { STYLEABLE, NOT_STYLEABLE } // Using these names in props might cause conflicts with the method names in the // component's generated layout builder class so we trigger a more user-friendly // error if the component tries to use them. This list should be kept in sync // with BaseLayoutBuilder. private static final String[] RESERVED_PROP_NAMES = new String[] { "withLayout", "key", "loadingEventHandler", }; private static final Class<Annotation>[] TREE_PROP_ANNOTATIONS = new Class[] { TreeProp.class, }; private static final Class<Annotation>[] PROP_ANNOTATIONS = new Class[] { Prop.class, }; private static final Class<Annotation>[] STATE_ANNOTATIONS = new Class[] { State.class, }; private final ProcessingEnvironment mProcessingEnv; private final TypeElement mSourceElement; private final String mQualifiedClassName; private final Class<Annotation>[] mStageAnnotations; private final Class<Annotation>[] mInterStagePropAnnotations; private final Class<Annotation>[] mParameterAnnotations; private final TypeSpec.Builder mClassTypeSpec; private final List<TypeVariableName> mTypeVariables; private final List<TypeElement> mEventDeclarations; private final Map<String, String> mPropJavadocs; private final String mSimpleClassName; private String mSourceDelegateAccessorName = DELEGATE_FIELD_NAME; private List<VariableElement> mProps; private List<VariableElement> mOnCreateInitialStateDefinedProps; private ImmutableList<PropDefaultModel> mPropDefaults; private List<VariableElement> mTreeProps; private final Map<String, VariableElement> mStateMap = new LinkedHashMap<>(); // Map of name to VariableElement, for members of the inner implementation class, in order private LinkedHashMap<String, VariableElement> mImplMembers; private List<Parameter> mImplParameters; private final Map<String, TypeMirror> mExtraStateMembers; // List of methods that have @OnEvent on it. private final List<ExecutableElement> mOnEventMethods; // List of methods annotated with @OnUpdateState. private final List<ExecutableElement> mOnUpdateStateMethods; private final List<ExecutableElement> mOnCreateTreePropsMethods; // List of methods that define stages (e.g. OnCreateLayout) private List<ExecutableElement> mStages; public TypeElement getSourceElement() { return mSourceElement; } public Stages( ProcessingEnvironment processingEnv, TypeElement sourceElement, String qualifiedClassName, Class<Annotation>[] stageAnnotations, Class<Annotation>[] interStagePropAnnotations, TypeSpec.Builder typeSpec, List<TypeVariableName> typeVariables, boolean supportState, Map<String, TypeMirror> extraStateMembers, List<TypeElement> eventDeclarations, Map<String, String> propJavadocs) { mProcessingEnv = processingEnv; mSourceElement = sourceElement; mQualifiedClassName = qualifiedClassName; mStageAnnotations = stageAnnotations; mInterStagePropAnnotations = interStagePropAnnotations; mClassTypeSpec = typeSpec; mTypeVariables = typeVariables; mEventDeclarations = eventDeclarations; mPropJavadocs = propJavadocs; final List<Class<Annotation>> parameterAnnotations = new ArrayList<>(); parameterAnnotations.addAll(asList(PROP_ANNOTATIONS)); parameterAnnotations.addAll(asList(STATE_ANNOTATIONS)); parameterAnnotations.addAll(asList(mInterStagePropAnnotations)); parameterAnnotations.addAll(asList(TREE_PROP_ANNOTATIONS)); mParameterAnnotations = parameterAnnotations.toArray( new Class[parameterAnnotations.size()]); mSupportState = supportState; mSimpleClassName = Utils.getSimpleClassName(mQualifiedClassName); mOnEventMethods = Utils.getAnnotatedMethods(mSourceElement, OnEvent.class); mOnUpdateStateMethods = Utils.getAnnotatedMethods(mSourceElement, OnUpdateState.class); mOnCreateTreePropsMethods = Utils.getAnnotatedMethods(mSourceElement, OnCreateTreeProp.class); mExtraStateMembers = extraStateMembers; validateOnEventMethods(); populatePropDefaults(); populateStages(); validateAnnotatedParameters(); populateOnCreateInitialStateDefinedProps(); populateProps(); populateTreeProps(); if (mSupportState) { populateStateMap(); } validatePropDefaults(); populateImplMembers(); populateImplParameters(); validateStyleOutputs(); } private boolean isInterStagePropAnnotationValidInStage( Class<? extends Annotation> interStageProp, Class<? extends Annotation> stage) { final int interStagePropIndex = asList(mInterStagePropAnnotations).indexOf(interStageProp); final int stageIndex = asList(mStageAnnotations).indexOf(stage); if (interStagePropIndex < 0 || stageIndex < 0) { throw new IllegalArgumentException(); // indicates bug in the annotation processor } // This logic relies on the fact that there are prop annotations for each stage (except for // some number at the end) return interStagePropIndex < stageIndex; } private boolean doesInterStagePropAnnotationMatchStage( Class<? extends Annotation> interStageProp, Class<? extends Annotation> stage) { final int interStagePropIndex = asList(mInterStagePropAnnotations).indexOf(interStageProp); // Null stage is allowed and indicates prop int stageIndex = -1; if (stage != null) { stageIndex = asList(mStageAnnotations).indexOf(stage); if (interStagePropIndex < 0 || stageIndex < 0) { throw new IllegalArgumentException(); // indicates bug in the annotation processor } } return interStagePropIndex == stageIndex; } private void validateOnEventMethods() { final Map<String, Boolean> existsMap = new HashMap<>(); for (ExecutableElement element : mOnEventMethods) { if (existsMap.containsKey(element.getSimpleName().toString())) { throw new ComponentsProcessingException( element, "@OnEvent declared methods must have unique names"); } final DeclaredType eventClass = Utils.getAnnotationParameter( mProcessingEnv, element, OnEvent.class, "value"); final TypeMirror returnType = Utils.getAnnotationParameter( mProcessingEnv, eventClass.asElement(), Event.class, "returnType"); if (!mProcessingEnv.getTypeUtils().isSameType(element.getReturnType(), returnType)) { throw new ComponentsProcessingException( element, "Method " + element.getSimpleName() + " must return " + returnType + ", since that is what " + eventClass + " expects."); } final List<? extends VariableElement> parameters = Utils.getEnclosedFields((TypeElement) eventClass.asElement()); for (VariableElement v : Utils.getParametersWithAnnotation(element, FromEvent.class)) { boolean hasMatchingParameter = false; for (VariableElement parameter : parameters) { if (parameter.getSimpleName().equals(v.getSimpleName()) && parameter.asType().toString().equals(v.asType().toString())) { hasMatchingParameter = true; break; } } if (!hasMatchingParameter) { throw new ComponentsProcessingException( v, v.getSimpleName() + " of this type is not a member of " + eventClass); } return; } existsMap.put(element.getSimpleName().toString(), true); } } /** * Ensures that the declared events don't clash with the predefined ones. */ private void validateEventDeclarations() { for (TypeElement eventDeclaration : mEventDeclarations) { final Event eventAnnotation = eventDeclaration.getAnnotation(Event.class); if (eventAnnotation == null) { throw new ComponentsProcessingException( eventDeclaration, "Events must be declared with the @Event annotation, event is: " + eventDeclaration); } final List<? extends VariableElement> fields = Utils.getEnclosedFields(eventDeclaration); for (VariableElement field : fields) { if (!field.getModifiers().contains(Modifier.PUBLIC) || field.getModifiers().contains(Modifier.FINAL)) { throw new ComponentsProcessingException( field, "Event fields must be declared as public non-final"); } } } } private void validateStyleOutputs() { final ExecutableElement delegateMethod = Utils.getAnnotatedMethod( mSourceElement, OnLoadStyle.class); if (delegateMethod == null) { return; } final List<? extends VariableElement> parameters = delegateMethod.getParameters(); if (parameters.size() < ON_STYLE_PROPS) { throw new ComponentsProcessingException( delegateMethod, "The @OnLoadStyle method should have an ComponentContext" + "followed by Output parameters matching component create."); } final TypeName firstParamType = ClassName.get(parameters.get(0).asType()); if (!firstParamType.equals(ClassNames.COMPONENT_CONTEXT)) { throw new ComponentsProcessingException( parameters.get(0), "The first argument of the @OnLoadStyle method should be an ComponentContext."); } for (int i = ON_STYLE_PROPS, size = parameters.size(); i < size; i++) { final VariableElement v = parameters.get(i); final TypeMirror outputType = Utils.getGenericTypeArgument(v.asType(), ClassNames.OUTPUT); if (outputType == null) { throw new ComponentsProcessingException( parameters.get(i), "The @OnLoadStyle method should have only have Output arguments matching " + "component create."); } final Types typeUtils = mProcessingEnv.getTypeUtils(); final String name = v.getSimpleName().toString(); boolean matchesProp = false; for (Element prop : mProps) { if (!prop.getSimpleName().toString().equals(name)) { continue; } matchesProp = true; if (!typeUtils.isAssignable(prop.asType(), outputType)) { throw new ComponentsProcessingException( v, "Searching for prop \"" + name + "\" of type " + ClassName.get(outputType) + " but found prop with the same name of type " + ClassName.get(prop.asType())); } } if (!matchesProp) { throw new ComponentsProcessingException( v, "Output named '" + v.getSimpleName() + "' does not match any prop " + "in the component."); } } } /** * Validate that: * <ul> * <li>1. Parameters are consistently typed across stages.</li> * <li>2. Outputs for the same parameter name aren't duplicated.</li> * <li>3. Declared inter-stage prop parameters from previous stages (i.e. not * {@link Prop}) correspond to outputs from that stage</li> * <li>4. Inter-stage prop parameters come from previous stages. i.e. It is illegal to declare * a @FromMeasure parameter in @OnInflate</li> * <li>5. Inter-stage parameters don't have duplicate annotations (and that outputs aren't * annotated as inter-stage props)</li> * <li>6. Ensure props don't use reserved words as names.</li> * <li>7. Ensure required props don't have default values.</li> * <li>8. Ensure same props are annotated identically</li> * <li>9. Ensure props are of legal types</li> * </ul> */ private void validateAnnotatedParameters() { final List<PrintableException> exceptions = new ArrayList<>(); final Map<String, VariableElement> variableNameToElementMap = new HashMap<>(); final Map<String, Class<? extends Annotation>> outputVariableToStage = new HashMap<>(); for (Class<? extends Annotation> stageAnnotation : mStageAnnotations) { final ExecutableElement stage = Utils.getAnnotatedMethod( mSourceElement, stageAnnotation); if (stage == null) { continue; } // Enforce #5: getSpecDefinedParameters will verify that parameters don't have duplicate // annotations for (VariableElement v : getSpecDefinedParameters(stage)) { try { final String variableName = v.getSimpleName().toString(); final Annotation interStagePropAnnotation = getInterStagePropAnnotation(v); final boolean isOutput = Utils.getGenericTypeArgument(v.asType(), ClassNames.OUTPUT) != null; if (isOutput) { outputVariableToStage.put(variableName, stageAnnotation); } // Enforce #3 if (interStagePropAnnotation != null) { final Class<? extends Annotation> outputStage = outputVariableToStage.get(variableName); if (!doesInterStagePropAnnotationMatchStage( interStagePropAnnotation.annotationType(), outputStage)) { throw new ComponentsProcessingException( v, "Inter-stage prop declaration is incorrect, the same name and type must be " + "used in every method where the inter-stage prop is declared."); } } // Enforce #4 if (interStagePropAnnotation != null && !isInterStagePropAnnotationValidInStage( interStagePropAnnotation.annotationType(), stageAnnotation)) { throw new ComponentsProcessingException( v, "Inter-stage create must refer to previous stages."); } final VariableElement existingType = variableNameToElementMap.get(variableName); if (existingType != null && !isSameType(existingType.asType(), v.asType())) { // We have a type mis-match. This is allowed, provided that the previous type is an // outputand the new type is an prop, and the type argument of the output matches the // prop. In the future, we may want to allow stages to modify outputs from previous // stages, but for now we disallow it. // Enforce #1 and #2 if ((getInterStagePropAnnotation(v) == null || Utils.getGenericTypeArgument(existingType.asType(), ClassNames.OUTPUT) == null) && Utils.getGenericTypeArgument(existingType.asType(), ClassNames.DIFF) == null) { throw new ComponentsProcessingException( v, "Inconsistent type for '" + variableName + "': '" + existingType.asType() + "' and '" + v.asType() + "'"); } } else if (existingType == null) { // We haven't see a parameter with this name yet. Therefore it must be either @Prop, // @State or an output. final boolean isFromProp = getParameterAnnotation(v, PROP_ANNOTATIONS) != null; final boolean isFromState = getParameterAnnotation(v, STATE_ANNOTATIONS) != null; final boolean isFromTreeProp = getParameterAnnotation(v, TREE_PROP_ANNOTATIONS) != null; if (isFromState && !mSupportState) { throw new ComponentsProcessingException( v, "State is not supported in this kind of Spec."); } if (!isFromProp && !isFromState && !isOutput && !isFromTreeProp) { throw new ComponentsProcessingException( v, "Inter-stage prop declared without source."); } } // Enforce #6 final Prop propAnnotation = v.getAnnotation(Prop.class); if (propAnnotation != null) { for (String reservedPropName : RESERVED_PROP_NAMES) { if (reservedPropName.equals(variableName)) { throw new ComponentsProcessingException( v, "'" + reservedPropName + "' is a reserved prop name used by " + "the component's layout builder. Please use another name."); } } // Enforce #7 final boolean hasDefaultValue = hasDefaultValue(v); if (hasDefaultValue && !propAnnotation.optional()) { throw new ComponentsProcessingException( v, "Prop is not optional but has a declared default value."); } // Enforce #8 if (existingType != null) { final Prop existingPropAnnotation = existingType.getAnnotation(Prop.class); if (existingPropAnnotation != null) { if (!hasSameAnnotations(v, existingType)) { throw new ComponentsProcessingException( v, "The prop '" + variableName + "' is configured differently for different " + "methods. Ensure each instance of this prop is declared identically."); } } } // Enforce #9 TypeName typeName; try { typeName = ClassName.get(v.asType()); } catch (IllegalArgumentException e) { throw new ComponentsProcessingException( v, "Prop type does not exist"); } // Enforce #10 final List<ClassName> illegalPropTypes = Arrays.asList( ClassNames.COMPONENT_LAYOUT, ClassNames.COMPONENT_LAYOUT_BUILDER, ClassNames.COMPONENT_LAYOUT_CONTAINER_BUILDER, ClassNames.COMPONENT_BUILDER, ClassNames.COMPONENT_BUILDER_WITH_LAYOUT, ClassNames.REFERENCE_BUILDER); if (illegalPropTypes.contains(typeName)) { throw new ComponentsProcessingException( v, "Props may not be declared with the following types:" + illegalPropTypes); } } variableNameToElementMap.put(variableName, v); } catch (PrintableException e) { exceptions.add(e); } } } if (!exceptions.isEmpty()) { throw new MultiPrintableException(exceptions); } } private boolean hasSameAnnotations(VariableElement v1, VariableElement v2) { final List<? extends AnnotationMirror> v1Annotations = v1.getAnnotationMirrors(); final List<? extends AnnotationMirror> v2Annotations = v2.getAnnotationMirrors(); if (v1Annotations.size() != v2Annotations.size()) { return false; } final int count = v1Annotations.size(); for (int i = 0; i < count; i++) { final AnnotationMirror a1 = v1Annotations.get(i); final AnnotationMirror a2 = v2Annotations.get(i); // Some object in this hierarchy don't implement equals correctly. // They do however produce very nice strings representations which we can compare instead. if (!a1.toString().equals(a2.toString())) { return false; } } return true; } public void validateStatic() { validateStaticFields(); validateStaticMethods(); } private void validateStaticFields() { for (Element element : mSourceElement.getEnclosedElements()) { if (element.getKind() == ElementKind.FIELD && !element.getModifiers().contains(Modifier.STATIC)) { throw new ComponentsProcessingException( element, "Field " + element.getSimpleName() + " in " + mSourceElement.getQualifiedName() + " must be static"); } } } private void validateStaticMethods() { for (Class<? extends Annotation> stageAnnotation : mStageAnnotations) { final ExecutableElement stage = Utils.getAnnotatedMethod( mSourceElement, stageAnnotation); if (stage != null && !stage.getModifiers().contains(Modifier.STATIC)) { throw new ComponentsProcessingException( stage, "Method " + stage.getSimpleName() + " in " + mSourceElement.getQualifiedName() + " must be static"); } } } /** * Gather a list of VariableElement that are the props to this component */ private void populateProps() { // We use a linked hash map to guarantee iteration order final LinkedHashMap<String, VariableElement> variableNameToElementMap = new LinkedHashMap<>(); for (ExecutableElement stage : mStages) { for (VariableElement v : getProps(stage)) { // Validation unnecessary - already handled by validateAnnotatedParameters final String variableName = v.getSimpleName().toString(); variableNameToElementMap.put(variableName, v); } } mProps = new ArrayList<>(variableNameToElementMap.values()); addCreateInitialStateDefinedProps(mProps); } /** * Gather a list of VariableElement that are the state to this component */ private void populateStateMap() { // We use a linked hash map to guarantee iteration order final LinkedHashMap<String, VariableElement> variableNameToElementMap = new LinkedHashMap<>(); for (ExecutableElement stage : mStages) { for (VariableElement v : getState(stage)) { final String variableName = v.getSimpleName().toString(); if (mStateMap.containsKey(variableName)) { VariableElement existingType = mStateMap.get(variableName); final State existingPropAnnotation = existingType.getAnnotation(State.class); if (existingPropAnnotation != null) { if (!hasSameAnnotations(v, existingType)) { throw new ComponentsProcessingException( v, "The state '" + variableName + "' is configured differently for different " + "methods. Ensure each instance of this state is declared identically."); } } } mStateMap.put( variableName, v); } } } private void populateTreeProps() { final LinkedHashMap<String, VariableElement> variableNameToElementMap = new LinkedHashMap<>(); for (ExecutableElement stage : mStages) { for (VariableElement v : Utils.getParametersWithAnnotation(stage, TreeProp.class)) { final String variableName = v.getSimpleName().toString(); variableNameToElementMap.put(variableName, v); } } mTreeProps = new ArrayList<>(variableNameToElementMap.values()); } /** * Get the list of stages (OnInflate, OnMeasure, OnMount) that are defined for this component. */ private void populateStages() { mStages = new ArrayList<>(); for (Class<Annotation> stageAnnotation : mStageAnnotations) { final ExecutableElement stage = Utils.getAnnotatedMethod( mSourceElement, stageAnnotation); if (stage != null) { mStages.add(stage); } } if (mOnEventMethods != null) { mStages.addAll(mOnEventMethods); } mStages.addAll(mOnCreateTreePropsMethods); } /** * @param prop The prop to determine if it has a default or not. * @return Returns true if the prop has a default, false otherwise. */ private boolean hasDefaultValue(VariableElement prop) { final String name = prop.getSimpleName().toString(); final TypeName type = TypeName.get(prop.asType()); for (PropDefaultModel propDefault : mPropDefaults) { if (propDefault.mName.equals(name) && propDefault.mType.equals(type)) { return true; } } return false; } /** * Fail if any elements that exist in mPropDefaults do not exist in mProps. */ private void validatePropDefaults() { for (PropDefaultModel propDefault : mPropDefaults) { final ImmutableList<Modifier> modifiers = propDefault.mModifiers; if (!modifiers.contains(Modifier.STATIC) || !modifiers.contains(Modifier.FINAL) || modifiers.contains(Modifier.PRIVATE)) { throw new RuntimeException( "Defaults for props (fields annotated with " + PropDefault.class + ") must be " + "non-private, static, and final. This is not the case for " + propDefault.mName); } if (!hasValidNameAndType(propDefault)) { throw new RuntimeException( "Prop defaults (fields annotated with " + PropDefault.class + ") should have the " + "same name and type as the prop that they set the default for. This is not the " + "case for " + propDefault.mName); } } } /** * @return true if the given prop default matches the name and type of a prop, false otherwise. */ private boolean hasValidNameAndType(PropDefaultModel propDefault) { for (VariableElement prop : mProps) { if (prop.getSimpleName().toString().equals(propDefault.mName) && TypeName.get(prop.asType()).equals(propDefault.mType)) { return true; } } return false; } /** * Gather a list of parameters from the given element that are props to this component. */ private static List<VariableElement> getProps(ExecutableElement element) { return Utils.getParametersWithAnnotation(element, Prop.class); } /** * Gather a list of parameters from the given element that are state to this component. */ private static List<VariableElement> getState(ExecutableElement element) { return Utils.getParametersWithAnnotation(element, State.class); } /** * Gather a list of parameters from the given element that are defined by the spec. That is, they * aren't one of the parameters predefined for a given method. For example, OnCreateLayout has a * predefined parameter of type LayoutContext. Spec-defined parameters are annotated with one of * our prop annotations or are of type {@link com.facebook.litho.Output}. */ private List<VariableElement> getSpecDefinedParameters(ExecutableElement element) { return getSpecDefinedParameters(element, true); } private List<VariableElement> getSpecDefinedParameters( ExecutableElement element, boolean shouldIncludeOutputs) { final ArrayList<VariableElement> specDefinedParameters = new ArrayList<>(); for (VariableElement v : element.getParameters()) { final boolean isAnnotatedParameter = getParameterAnnotation(v) != null; final boolean isInterStageOutput = Utils.getGenericTypeArgument( v.asType(), ClassNames.OUTPUT) != null; if (isAnnotatedParameter && isInterStageOutput) { throw new ComponentsProcessingException( v, "Variables that are both prop and output are forbidden."); } else if (isAnnotatedParameter || (shouldIncludeOutputs && isInterStageOutput)) { specDefinedParameters.add(v); } } return specDefinedParameters; } private void populateOnCreateInitialStateDefinedProps() { final ExecutableElement onCreateInitialState = Utils.getAnnotatedMethod( getSourceElement(), OnCreateInitialState.class); if (onCreateInitialState == null) { mOnCreateInitialStateDefinedProps = new ArrayList<>(); } else { mOnCreateInitialStateDefinedProps = getSpecDefinedParameters(onCreateInitialState, false); } } /** * Get the @FromLayout, @FromMeasure, etc annotation on this element (@Prop isn't * considered - use getParameterAnnotation if you want to consider them) */ private Annotation getInterStagePropAnnotation(VariableElement element) { return getParameterAnnotation(element, mInterStagePropAnnotations); } /** * Get the annotation, if any, present on a parameter. Annotations are restricted to our whitelist * of parameter annotations: e.g. {@link Prop}, {@link State} etc) */ private Annotation getParameterAnnotation(VariableElement element) { return getParameterAnnotation(element, mParameterAnnotations); } /** * Get the annotation, if any, present on a parameter. Annotations are restricted to the specified * whitelist. If there is a duplicate we will issue an error. */ private Annotation getParameterAnnotation( VariableElement element, Class<Annotation>[] possibleAnnotations) { final ArrayList<Annotation> annotations = new ArrayList<>(); for (Class<Annotation> annotationClass : possibleAnnotations) { final Annotation annotation = element.getAnnotation(annotationClass); if (annotation != null) { annotations.add(annotation); } } if (annotations.isEmpty()) { return null; } else if (annotations.size() == 1) { return annotations.get(0); } else { throw new ComponentsProcessingException( element, "Duplicate parameter annotation: '" + annotations.get(0) + "' and '" + annotations.get(1) + "'"); } } /** * Generate javadoc block describing component props. */ public void generateJavadoc() { for (VariableElement v : mProps) { final Prop propAnnotation = v.getAnnotation(Prop.class); final String propTag = propAnnotation.optional() ? "@prop-optional" : "@prop-required"; final String javadoc = mPropJavadocs != null ? mPropJavadocs.get(v.getSimpleName().toString()) : ""; final String sanitizedJavadoc = javadoc != null ? javadoc.replace('\n', ' ') : null; // Adds javadoc with following format: // @prop-required name type javadoc. // This can be changed later to use clear demarcation for fields. // This is a block tag and cannot support inline tags like "{@link something}". mClassTypeSpec.addJavadoc( "$L $L $L $L\n", propTag, v.getSimpleName().toString(), Utils.getTypeName(v.asType()), sanitizedJavadoc); } } /** * Generate a method for this component which either lazily instantiates a singleton reference or * return this depending on whether this lifecycle is static or not. */ public void generateGetter(boolean isStatic) { final ClassName className = ClassName.bestGuess(mQualifiedClassName); if (isStatic) { mClassTypeSpec.addField( FieldSpec .builder(className, SPEC_INSTANCE_NAME, Modifier.PRIVATE, Modifier.STATIC) .initializer("null") .build()); mClassTypeSpec.addMethod( MethodSpec.methodBuilder("get") .addModifiers(Modifier.PUBLIC) .addModifiers(Modifier.STATIC) .addModifiers(Modifier.SYNCHRONIZED) .returns(className) .beginControlFlow("if ($L == null)", SPEC_INSTANCE_NAME) .addStatement("$L = new $T()", SPEC_INSTANCE_NAME, className) .endControlFlow() .addStatement("return $L", SPEC_INSTANCE_NAME) .build()); } else { mClassTypeSpec.addMethod( MethodSpec.methodBuilder("get") .addModifiers(Modifier.PUBLIC) .returns(className) .addStatement("return this") .build()); } } public void generateSourceDelegate(boolean initialized) { final ClassName specClassName = ClassName.get(mSourceElement); generateSourceDelegate(initialized, specClassName); } public void generateSourceDelegate(boolean initialized, TypeName specTypeName) { final FieldSpec.Builder builder = FieldSpec .builder(specTypeName, DELEGATE_FIELD_NAME) .addModifiers(Modifier.PRIVATE); if (initialized) { builder.initializer("new $T()", specTypeName); } mClassTypeSpec.addField(builder.build()); } private MethodSpec generateMakeShallowCopy(ClassName componentClassName, boolean hasDeepCopy) { final List<String> componentsInImpl = findComponentsInImpl(componentClassName); final List<String> interStageComponentVariables = getInterStageVariableNames(); if (componentsInImpl.isEmpty() && interStageComponentVariables.isEmpty() && mOnUpdateStateMethods.isEmpty()) { return null; } final String implClassName = getImplClassName(); return new ShallowCopyMethodSpecBuilder() .componentsInImpl(componentsInImpl) .interStageVariables(interStageComponentVariables) .implClassName(implClassName) .hasDeepCopy(hasDeepCopy) .stateContainerImplClassName(getStateContainerImplClassName()) .build(); } private List<String> findComponentsInImpl(ClassName listComponent) { final List<String> componentsInImpl = new ArrayList<>(); for (String key : mImplMembers.keySet()) { final VariableElement element = mImplMembers.get(key); final Name declaredClassName = Utils.getDeclaredClassNameWithoutGenerics(element); if (declaredClassName != null && ClassName.bestGuess(declaredClassName.toString()).equals(listComponent)) { componentsInImpl.add(element.getSimpleName().toString()); } } return componentsInImpl; } /** * Generate a private constructor to enforce singleton-ity. */ public void generateConstructor() { mClassTypeSpec.addMethod( MethodSpec.constructorBuilder() .addModifiers(Modifier.PRIVATE) .build()); } /** * Generates a method to create the initial values for parameters annotated with {@link State}. * This method also validates that the delegate method only tries to assign an initial value to * State annotated parameters. */ public void generateCreateInitialState( ExecutableElement from, ClassName contextClass, ClassName componentClass) { verifyParametersForCreateInitialState(contextClass, from); final MethodDescription methodDescription = new MethodDescription(); methodDescription.annotations = new Class[] { Override.class }; methodDescription.accessType = Modifier.PROTECTED; methodDescription.returnType = null; methodDescription.name = "createInitialState"; methodDescription.parameterTypes = new TypeName[] {contextClass}; generateDelegate(methodDescription, from, componentClass); } private void verifyParametersForCreateInitialState( ClassName contextClass, ExecutableElement executableElement) { final List<VariableElement> parameters = (List<VariableElement>) executableElement.getParameters(); if (parameters.size() < ON_CREATE_INITIAL_STATE + 1) { throw new ComponentsProcessingException( executableElement, "The @OnCreateInitialState method should have an " + contextClass + "followed by Output parameters matching state parameters."); } final TypeName firstParamType = ClassName.get(parameters.get(0).asType()); if (!firstParamType.equals(contextClass)) { throw new ComponentsProcessingException( parameters.get(0), "The first argument of the @OnCreateInitialState method should be an " + contextClass + "."); } for (int i = ON_CREATE_INITIAL_STATE, size = parameters.size(); i < size; i++) { final VariableElement element = parameters.get(i); final TypeMirror elementInnerClassType = Utils.getGenericTypeArgument(element.asType(), ClassNames.OUTPUT); if (elementInnerClassType != null) { final String paramName = element.getSimpleName().toString(); VariableElement implParameter = mStateMap.get(paramName); if (implParameter == null || implParameter.getAnnotation(State.class) == null) { throw new ComponentsProcessingException( executableElement, "Only parameters annotated with @State can be initialized in @OnCreateInitialState," + " parameter without annotation is: " + paramName); } } } } /** * Generate a method implementation that delegates to another method that takes annotated props. * * @param from description of method signature to be generated * @param to method to which to delegate * @param propsClass Component / Delegate. The base class of the inner implementation object * @throws java.io.IOException If one of the writer methods throw */ public void generateDelegate( MethodDescription from, ExecutableElement to, ClassName propsClass) { generateDelegate( from, to, Collections.<TypeName>emptyList(), Collections.<String, String>emptyMap(), propsClass); } public void generateDelegate( MethodDescription from, ExecutableElement to, List<TypeName> expectedTypes, ClassName propsClass) { generateDelegate( from, to, expectedTypes, Collections.<String, String>emptyMap(), propsClass); } /** * Generate a method implementation that delegates to another method that takes annotated props. * * @param from description of method signature to be generated * @param to method to which to delegate * @param propsClass Component / Delegate. The base class of the inner implementation object * @throws java.io.IOException If one of the writer methods throw */ public void generateDelegate( MethodDescription from, ExecutableElement to, List<TypeName> expectedTypes, Map<String, String> parameterTranslation, ClassName propsClass) { final Visibility visibility; if (Arrays.asList(from.accessType).contains(Modifier.PRIVATE)) { visibility = Visibility.PRIVATE; } else if (Arrays.asList(from.accessType).contains(Modifier.PROTECTED)) { visibility = Visibility.PROTECTED; } else if (Arrays.asList(from.accessType).contains(Modifier.PUBLIC)) { visibility = Visibility.PUBLIC; } else { visibility = Visibility.PACKAGE; } final List<Parameter> toParams = getParams(to); final List<Parameter> fromParams = new ArrayList<>(); for (int i = 0; i < from.parameterTypes.length; i++) { fromParams.add(new Parameter(from.parameterTypes[i], toParams.get(i).name)); } final List<PrintableException> errors = new ArrayList<>(); for (int i = 0; i < expectedTypes.size(); i++) { if (!toParams.get(i).type.equals(expectedTypes.get(i))) { errors.add(new ComponentsProcessingException( to.getParameters().get(i), "Expected " + expectedTypes.get(i))); } } if (!errors.isEmpty()) { throw new MultiPrintableException(errors); } writeMethodSpec(new DelegateMethodSpecBuilder() .implClassName(getImplClassName()) .abstractImplType(propsClass) .implParameters(mImplParameters) .checkedExceptions( from.exceptions == null ? new ArrayList<TypeName>() : Arrays.asList(from.exceptions)) .overridesSuper( from.annotations != null && Arrays.asList(from.annotations).contains(Override.class)) .parameterTranslation(parameterTranslation) .visibility(visibility) .fromName(from.name) .fromReturnType(from.returnType == null ? TypeName.VOID : from.returnType) .fromParams(fromParams) .target(mSourceDelegateAccessorName) .toName(to.getSimpleName().toString()) .stateParams(mStateMap.keySet()) .toReturnType(ClassName.get(to.getReturnType())) .toParams(toParams) .build()); } /** * Returns {@code true} if the given types match. */ public boolean isSameType(TypeMirror a, TypeMirror b) { return mProcessingEnv.getTypeUtils().isSameType(a, b); } /** * Generate an onEvent implementation that delegates to the @OnEvent-annotated method. */ public void generateOnEventHandlers(ClassName componentClassName, ClassName contextClassName) { for (ExecutableElement element : mOnEventMethods) { generateOnEventHandler(element, contextClassName); } } /** * Generate the static methods of the Component that can be called to update its state. */ public void generateOnStateUpdateMethods( ClassName contextClass, ClassName componentClassName, ClassName stateContainerClassName, ClassName stateUpdateInterface, Stages.StaticFlag staticFlag) { for (ExecutableElement element : mOnUpdateStateMethods) { validateOnStateUpdateMethodDeclaration(element); generateStateUpdateClass( element, componentClassName, stateContainerClassName, stateUpdateInterface, staticFlag); generateOnStateUpdateMethods(element, contextClass, componentClassName); } } /** * Validate that the declaration of a method annotated with {@link OnUpdateState} is correct: * <ul> * <li>1. Method parameters annotated with {@link Param} don't have the same name as parameters * annotated with {@link State} or {@link Prop}.</li> * <li>2. Method parameters not annotated with {@link Param} must be of type * com.facebook.litho.StateValue.</li> * <li>3. Names of method parameters not annotated with {@link Param} must match the name of * a parameter annotated with {@link State}.</li> * <li>4. Type of method parameters not annotated with {@link Param} must match the type of * a parameter with the same name annotated with {@link State}.</li> * </ul> */ private void validateOnStateUpdateMethodDeclaration(ExecutableElement element) { final List<VariableElement> annotatedParams = Utils.getParametersWithAnnotation(element, Param.class); // Check #1 for (VariableElement annotatedParam : annotatedParams) { if (mStateMap.get(annotatedParam.getSimpleName().toString()) != null) { throw new ComponentsProcessingException( annotatedParam, "Parameters annotated with @Param should not have the same name as a parameter " + "annotated with @State or @Prop"); } } final List<VariableElement> params = (List<VariableElement>) element.getParameters(); for (VariableElement param : params) { if (annotatedParams.contains(param)) { continue; } final TypeMirror paramType = param.asType(); // Check #2 if (paramType.getKind() != DECLARED) { throw new ComponentsProcessingException( param, "Parameters not annotated with @Param must be of type " + "com.facebook.litho.StateValue"); } final DeclaredType paramDeclaredType = (DeclaredType) param.asType(); final String paramDeclaredTypeName = paramDeclaredType .asElement() .getSimpleName() .toString(); if (!paramDeclaredTypeName.equals(ClassNames.STATE_VALUE.simpleName())) { throw new ComponentsProcessingException( "All state parameters must be of type com.facebook.litho.StateValue, " + param.getSimpleName() + " is of type " + param.asType()); } VariableElement stateMatchingParam = mStateMap.get(param.getSimpleName().toString()); // Check #3 if (stateMatchingParam == null || stateMatchingParam.getAnnotation(State.class) == null) { throw new ComponentsProcessingException( param, "Names of parameters of type StateValue must match the name of a parameter annotated " + "with @State"); } // Check #4 final List<TypeMirror> typeArguments = (List<TypeMirror>) paramDeclaredType.getTypeArguments(); if (typeArguments.isEmpty()) { throw new ComponentsProcessingException( param, "Type parameter for a parameter of type StateValue should match the type of " + "a parameter with the same name annotated with @State"); } final TypeMirror typeArgument = typeArguments.get(0); final TypeName stateMatchingParamTypeName = ClassName.get(stateMatchingParam.asType()); if (stateMatchingParamTypeName.isPrimitive()) { TypeName stateMatchingParamBoxedType = stateMatchingParamTypeName.box(); if (!stateMatchingParamBoxedType.equals(TypeName.get(typeArgument))) { throw new ComponentsProcessingException( param, "Type parameter for a parameter of type StateValue should match the type of " + "a parameter with the same name annotated with @State"); } } } } /** * Generate an EventHandler factory methods */ public void generateEventHandlerFactories( ClassName contextClassName, ClassName componentClassName) { for (ExecutableElement element : mOnEventMethods) { generateEventHandlerFactory( element, contextClassName, componentClassName); } } // ExecutableElement.hashCode may be different in different runs of the // processor. getElementId() is deterministic and ensures that the output is // the same across multiple runs. private int getElementId(ExecutableElement el) { return (mQualifiedClassName.hashCode() * 31 + el.getSimpleName().hashCode()) * 31 + el.asType().toString().hashCode(); } /** * Generate a dispatchOnEvent() implementation for the component. */ public void generateDispatchOnEvent( ClassName contextClassName) { final MethodSpec.Builder methodBuilder = MethodSpec.methodBuilder("dispatchOnEvent") .addModifiers(Modifier.PUBLIC) .addAnnotation(Override.class) .returns(TypeName.OBJECT) .addParameter( ParameterSpec.builder(ClassNames.EVENT_HANDLER, "eventHandler", Modifier.FINAL).build()) .addParameter( ParameterSpec.builder(ClassNames.OBJECT, "eventState", Modifier.FINAL).build()); methodBuilder.addStatement("int id = eventHandler.id"); methodBuilder.beginControlFlow("switch($L)", "id"); final String implInstanceName = "_" + getImplInstanceName(); for (ExecutableElement element : mOnEventMethods) { methodBuilder.beginControlFlow("case $L:", getElementId(element)); final DeclaredType eventClass = Utils.getAnnotationParameter( mProcessingEnv, element, OnEvent.class, "value"); final String eventName = eventClass.toString(); methodBuilder.addStatement( "$L $L = ($L) $L", eventName, implInstanceName, eventName, "eventState"); final CodeBlock.Builder eventHandlerParams = CodeBlock.builder(); eventHandlerParams.indent(); int i = 0; eventHandlerParams.add("\n($T) eventHandler.params[$L],", contextClassName, i++); for (VariableElement v : Utils.getParametersWithAnnotation(element, FromEvent.class)) { eventHandlerParams.add( "\n" + implInstanceName + ".$L,", v.getSimpleName().toString()); } for (VariableElement v : Utils.getParametersWithAnnotation(element, Param.class)) { eventHandlerParams.add("\n($T) eventHandler.params[$L],", ClassName.get(v.asType()), i); i++; } eventHandlerParams.add("\n$L", "eventHandler.mHasEventDispatcher"); eventHandlerParams.unindent(); if (element.getReturnType().getKind() != VOID) { methodBuilder.addStatement( "return do$L($L)", capitalize(element.getSimpleName().toString()), eventHandlerParams.build()); } else { methodBuilder.addStatement( "do$L($L)", capitalize(element.getSimpleName().toString()), eventHandlerParams.build()); methodBuilder.addStatement("return null"); } methodBuilder.endControlFlow(); } methodBuilder.addStatement("default: \nreturn null"); methodBuilder.endControlFlow(); writeMethodSpec(methodBuilder.build()); } private void generateEventHandlerFactory( ExecutableElement element, ClassName contextClassName, ClassName componentClassName) { final List<VariableElement> eventParamElements = Utils.getParametersWithAnnotation(element, Param.class); final List<Parameter> eventParams = new ArrayList<>(); final List<String> typeParameters = new ArrayList<>(); for (VariableElement e : eventParamElements) { eventParams.add(new Parameter(ClassName.get(e.asType()), e.getSimpleName().toString())); for (TypeMirror typeParam : getTypeVarArguments(e.asType())) { typeParameters.add(typeParam.toString()); } } final DeclaredType eventClass = Utils.getAnnotationParameter( mProcessingEnv, element, OnEvent.class, "value"); final TypeName eventClassName = ClassName.bestGuess(((TypeElement) eventClass.asElement()).getQualifiedName().toString()); writeMethodSpec(new EventHandlerFactoryMethodSpecBuilder() .eventId(getElementId(element)) .eventName(element.getSimpleName().toString()) .contextClass(contextClassName) .eventHandlerClassName( ParameterizedTypeName.get(ClassNames.EVENT_HANDLER, eventClassName)) .eventParams(eventParams) .typeParameters(typeParameters) .build()); writeMethodSpec(new EventHandlerFactoryMethodSpecBuilder() .eventId(getElementId(element)) .eventName(element.getSimpleName().toString()) .contextClass(componentClassName) .eventHandlerClassName( ParameterizedTypeName.get(ClassNames.EVENT_HANDLER, eventClassName)) .eventParams(eventParams) .typeParameters(typeParameters) .build()); } private void generateOnEventHandler( ExecutableElement element, ClassName contextClassName) { if (element.getParameters().size() == 0 || !ClassName.get(element.getParameters().get(0).asType()).equals(contextClassName)) { throw new ComponentsProcessingException( element, "The first parameter for an onEvent method should be of type " +contextClassName.toString()); } final String evenHandlerName = element.getSimpleName().toString(); final List<Parameter> fromParams = new ArrayList<>(); fromParams.add(new Parameter( contextClassName, element.getParameters().get(0).getSimpleName().toString())); final List<VariableElement> fromParamElements = Utils.getParametersWithAnnotation(element, FromEvent.class); fromParamElements.addAll(Utils.getParametersWithAnnotation(element, Param.class)); for (VariableElement v : fromParamElements) { fromParams.add(new Parameter(ClassName.get(v.asType()), v.getSimpleName().toString())); } writeMethodSpec(new DelegateMethodSpecBuilder() .implClassName(getImplClassName()) .abstractImplType(ClassNames.HAS_EVENT_DISPATCHER_CLASSNAME) .implParameters(mImplParameters) .visibility(PRIVATE) .fromName("do" + capitalize(evenHandlerName)) .fromParams(fromParams) .target(mSourceDelegateAccessorName) .toName(evenHandlerName) .toParams(getParams(element)) .fromReturnType(ClassName.get(element.getReturnType())) .toReturnType(ClassName.get(element.getReturnType())) .stateParams(mStateMap.keySet()) .build()); } private void generateOnStateUpdateMethods( ExecutableElement element, ClassName contextClass, ClassName componentClass) { final String methodName = element.getSimpleName().toString(); final List<VariableElement> updateMethodParamElements = Utils.getParametersWithAnnotation(element, Param.class); final OnStateUpdateMethodSpecBuilder builder = new OnStateUpdateMethodSpecBuilder() .componentClass(componentClass) .lifecycleImplClass(mSimpleClassName) .stateUpdateClassName(getStateUpdateClassName(element)); for (VariableElement e : updateMethodParamElements) { builder.updateMethodParam( new Parameter(ClassName.get(e.asType()), e.getSimpleName().toString())); List<TypeMirror> genericArgs = getTypeVarArguments(e.asType()); if (genericArgs != null) { for (TypeMirror genericArg : genericArgs) { builder.typeParameter(genericArg.toString()); } } } writeMethodSpec(builder .updateMethodName(methodName) .async(false) .contextClass(contextClass) .build()); writeMethodSpec(builder .updateMethodName(methodName + "Async") .async(true) .contextClass(contextClass) .build()); } static List<TypeMirror> getTypeVarArguments(TypeMirror diffType) { List<TypeMirror> typeVarArguments = new ArrayList<>(); if (diffType.getKind() == DECLARED) { final DeclaredType parameterDeclaredType = (DeclaredType) diffType; final List<? extends TypeMirror> typeArguments = parameterDeclaredType.getTypeArguments(); for (TypeMirror typeArgument : typeArguments) { if (typeArgument.getKind() == TYPEVAR) { typeVarArguments.add(typeArgument); } } } return typeVarArguments; } public static List<TypeMirror> getGenericTypeArguments(TypeMirror diffType) { if (diffType.getKind() == DECLARED) { final DeclaredType parameterDeclaredType = (DeclaredType) diffType; final List<? extends TypeMirror> typeArguments = parameterDeclaredType.getTypeArguments(); return (List<TypeMirror>) typeArguments; } return null; } public static List<Parameter> getParams(ExecutableElement e) { final List<Parameter> params = new ArrayList<>(); for (VariableElement v : e.getParameters()) { params.add(new Parameter(ClassName.get(v.asType()), v.getSimpleName().toString())); } return params; } /** * Generates a class that implements {@link com.facebook.litho.ComponentLifecycle} given * a method annotated with {@link OnUpdateState}. The class constructor takes as params all the * params annotated with {@link Param} on the method and keeps them in class members. * @param element The method annotated with {@link OnUpdateState} */ private void generateStateUpdateClass( ExecutableElement element, ClassName componentClassName, ClassName stateContainerClassName, ClassName updateStateInterface, StaticFlag staticFlag) { final String stateUpdateClassName = getStateUpdateClassName(element); final TypeName implClassName = ClassName.bestGuess(getImplClassName()); final StateUpdateImplClassBuilder stateUpdateImplClassBuilder = new StateUpdateImplClassBuilder() .withTarget(mSourceDelegateAccessorName) .withSpecOnUpdateStateMethodName(element.getSimpleName().toString()) .withComponentImplClassName(implClassName) .withComponentClassName(componentClassName) .withComponentStateUpdateInterface(updateStateInterface) .withStateContainerClassName(stateContainerClassName) .withStateContainerImplClassName(ClassName.bestGuess(getStateContainerImplClassName())) .withStateUpdateImplClassName(stateUpdateClassName) .withSpecOnUpdateStateMethodParams(getParams(element)) .withStateValueParams(getStateValueParams(element)) .withStaticFlag(staticFlag); final List<VariableElement> parametersVarElements = Utils.getParametersWithAnnotation(element, Param.class); final List<Parameter> parameters = new ArrayList<>(); for (VariableElement v : parametersVarElements) { parameters.add(new Parameter(ClassName.get(v.asType()), v.getSimpleName().toString())); for (TypeMirror typeVar : getTypeVarArguments(v.asType())) { stateUpdateImplClassBuilder.typeParameter(typeVar.toString()); } } stateUpdateImplClassBuilder.withParamsForStateUpdate(parameters); writeInnerTypeSpec(stateUpdateImplClassBuilder.build()); } /** * Generate an onLoadStyle implementation. */ public void generateOnLoadStyle() { final ExecutableElement delegateMethod = Utils.getAnnotatedMethod( mSourceElement, OnLoadStyle.class); if (delegateMethod == null) { return; } final MethodSpec.Builder methodBuilder = MethodSpec.methodBuilder("onLoadStyle") .addAnnotation( AnnotationSpec .builder(SuppressWarnings.class) .addMember("value", "$S", "unchecked").build()) .addAnnotation(Override.class) .addModifiers(Modifier.PROTECTED) .addParameter(ClassNames.COMPONENT_CONTEXT, "_context") .addParameter( ParameterSpec.builder( ParameterizedTypeName.get( ClassNames.COMPONENT, WildcardTypeName.subtypeOf(Object.class)), "_component") .build()); final List<? extends VariableElement> parameters = delegateMethod.getParameters(); for (int i = ON_STYLE_PROPS, size = parameters.size(); i < size; i++) { final VariableElement v = parameters.get(i); final TypeName typeName = ClassName.get(v.asType()); methodBuilder.addStatement( "$L $L = ($L) $L", typeName, v.getSimpleName(), typeName, "acquireOutput()"); } final CodeBlock.Builder delegateParameters = CodeBlock.builder().indent(); delegateParameters.add("\n_context"); for (int i = ON_STYLE_PROPS, size = parameters.size(); i < size; i++) { delegateParameters.add(",\n$L", parameters.get(i).getSimpleName()); } delegateParameters.unindent(); methodBuilder.addStatement( "this.$L.$L($L)", mSourceDelegateAccessorName, delegateMethod.getSimpleName(), delegateParameters.build()); final String implClassName = getImplClassName(); final String implInstanceName = "_" + getImplInstanceName(); methodBuilder.addStatement( "$L " + implInstanceName + "= ($L) _component", implClassName, implClassName); for (int i = ON_STYLE_PROPS, size = parameters.size(); i < size; i++) { final VariableElement v = parameters.get(i); final String name = v.getSimpleName().toString(); methodBuilder.beginControlFlow("if ($L.get() != null)", name); methodBuilder.addStatement( "$L.$L = $L.get()", implInstanceName, name, name); methodBuilder.endControlFlow(); methodBuilder.addStatement("releaseOutput($L)", name); } writeMethodSpec(methodBuilder.build()); } /** * Find variables annotated with {@link PropDefault} */ private void populatePropDefaults() { mPropDefaults = PropDefaultsExtractor.getPropDefaults(mSourceElement); } public void generateComponentImplClass(Stages.StaticFlag isStatic) { generateStateContainerImplClass(isStatic, ClassNames.STATE_CONTAINER_COMPONENT); final String implClassName = getImplClassName(); final ClassName stateContainerImplClass = ClassName.bestGuess(getSimpleClassName() + STATE_CONTAINER_IMPL_NAME_SUFFIX); final TypeSpec.Builder implClassBuilder = TypeSpec.classBuilder(implClassName) .addModifiers(Modifier.PRIVATE) .superclass( ParameterizedTypeName.get( ClassNames.COMPONENT, ClassName.bestGuess(getSimpleClassName()))) .addSuperinterface(Cloneable.class); if (isStatic.equals(Stages.StaticFlag.STATIC)) { implClassBuilder.addModifiers(Modifier.STATIC); implClassBuilder.addTypeVariables(mTypeVariables); } implClassBuilder.addField(stateContainerImplClass, STATE_CONTAINER_IMPL_MEMBER); implClassBuilder.addMethod(generateStateContainerGetter(ClassNames.STATE_CONTAINER_COMPONENT)); generateComponentClassProps(implClassBuilder, ClassNames.EVENT_HANDLER); MethodSpec.Builder constructorBuilder = MethodSpec.constructorBuilder() .addModifiers(Modifier.PRIVATE) .addStatement("super(get())") .addStatement(STATE_CONTAINER_IMPL_MEMBER + " = new $T()", stateContainerImplClass); implClassBuilder.addMethod(constructorBuilder.build()); implClassBuilder.addMethod( MethodSpec.methodBuilder("getSimpleName") .addModifiers(Modifier.PUBLIC) .addAnnotation(Override.class) .returns(ClassNames.STRING) .addStatement("return \"" + getSimpleClassName() + "\"") .build()); final MethodSpec equalsBuilder = generateEqualsMethodDefinition(true); implClassBuilder.addMethod(equalsBuilder); final MethodSpec copyInterStage = generateCopyInterStageImpl(implClassName); if (copyInterStage != null) { implClassBuilder.addMethod(copyInterStage); } for (ExecutableElement element : mOnUpdateStateMethods) { final String stateUpdateClassName = getStateUpdateClassName(element); final List<Parameter> parameters = getParamsWithAnnotation(element, Param.class); implClassBuilder.addMethod( new CreateStateUpdateInstanceMethodSpecBuilder() .parameters(parameters) .stateUpdateClass(stateUpdateClassName) .build()); } final MethodSpec makeShallowCopy = generateMakeShallowCopy(ClassNames.COMPONENT, /* hasDeepCopy */ false); if (makeShallowCopy != null) { implClassBuilder.addMethod(makeShallowCopy); } writeInnerTypeSpec(implClassBuilder.build()); } public void generateLazyStateUpdateMethods( ClassName context, ClassName componentClass, TypeName stateUpdateType, TypeName stateContainerComponent) { for (VariableElement state : mStateMap.values()) { if (state.getAnnotation(State.class).canUpdateLazily()) { writeMethodSpec(new OnLazyStateUpdateMethodSpecBuilder() .contextClass(context) .componentClass(componentClass) .stateUpdateType(stateUpdateType) .stateName(state.getSimpleName().toString()) .stateType(ClassName.get(state.asType())) .withStateContainerClassName(stateContainerComponent) .implClass(getImplClassName()) .lifecycleImplClass(mSimpleClassName) .build()); } } } private void generateStateContainerImplClass( Stages.StaticFlag isStatic, ClassName stateContainerClassName) { final TypeSpec.Builder stateContainerImplClassBuilder = TypeSpec .classBuilder(getStateContainerImplClassName()) .addSuperinterface(stateContainerClassName); if (isStatic.equals(Stages.StaticFlag.STATIC)) { stateContainerImplClassBuilder.addModifiers(Modifier.STATIC, Modifier.PRIVATE); stateContainerImplClassBuilder.addTypeVariables(mTypeVariables); } for (String stateName : mStateMap.keySet()) { VariableElement v = mStateMap.get(stateName); stateContainerImplClassBuilder.addField(getPropFieldSpec(v, true)); } writeInnerTypeSpec(stateContainerImplClassBuilder.build()); } private static MethodSpec generateStateContainerGetter(ClassName stateContainerClassName) { return MethodSpec.methodBuilder("getStateContainer") .addModifiers(Modifier.PROTECTED) .addAnnotation(Override.class) .returns(stateContainerClassName) .addStatement("return " + STATE_CONTAINER_IMPL_MEMBER) .build(); } public void generateReferenceImplClass( Stages.StaticFlag isStatic, TypeMirror referenceType) { final TypeSpec.Builder implClassBuilder = TypeSpec.classBuilder(getImplClassName()) .addModifiers(Modifier.PRIVATE) .superclass( ParameterizedTypeName.get( ClassNames.REFERENCE, ClassName.get(referenceType))); if (isStatic.equals(Stages.StaticFlag.STATIC)) { implClassBuilder.addModifiers(Modifier.STATIC); } generateComponentClassProps(implClassBuilder, null); implClassBuilder.addMethod( MethodSpec.constructorBuilder() .addModifiers(Modifier.PRIVATE) .addStatement("super(get())") .build()); implClassBuilder.addMethod( MethodSpec.methodBuilder("getSimpleName") .addModifiers(Modifier.PUBLIC) .addAnnotation(Override.class) .returns(ClassNames.STRING) .addStatement("return \"" + getSimpleClassName() + "\"") .build()); final MethodSpec equalsBuilder = generateEqualsMethodDefinition(false); implClassBuilder.addMethod(equalsBuilder); writeInnerTypeSpec(implClassBuilder.build()); } public void generateTransferState( ClassName contextClassName, ClassName componentClassName, ClassName stateContainerClassName) { if (!mStateMap.isEmpty()) { MethodSpec methodSpec = new TransferStateSpecBuilder() .contextClassName(contextClassName) .componentClassName(componentClassName) .componentImplClassName(getImplClassName()) .stateContainerClassName(stateContainerClassName) .stateContainerImplClassName(getStateContainerImplClassName()) .stateParameters(mStateMap.keySet()) .build(); mClassTypeSpec.addMethod(methodSpec); } } public void generateHasState() { if (mStateMap.isEmpty()) { return; } MethodSpec hasStateMethod = MethodSpec.methodBuilder("hasState") .addAnnotation(Override.class) .addModifiers(Modifier.PROTECTED) .returns(TypeName.BOOLEAN) .addStatement("return true") .build(); mClassTypeSpec.addMethod(hasStateMethod); } public void generateListComponentImplClass(Stages.StaticFlag isStatic) { generateStateContainerImplClass(isStatic, SectionClassNames.STATE_CONTAINER_SECTION); final ClassName stateContainerImplClass = ClassName.bestGuess(getSimpleClassName() + STATE_CONTAINER_IMPL_NAME_SUFFIX); final TypeSpec.Builder stateClassBuilder = TypeSpec.classBuilder(getImplClassName()) .addModifiers(Modifier.PRIVATE) .superclass( ParameterizedTypeName.get( SectionClassNames.SECTION, ClassName.bestGuess(getSimpleClassName()))) .addSuperinterface(Cloneable.class); if (isStatic.equals(Stages.StaticFlag.STATIC)) { stateClassBuilder.addModifiers(Modifier.STATIC); } stateClassBuilder.addField(stateContainerImplClass, STATE_CONTAINER_IMPL_MEMBER); stateClassBuilder.addMethod(generateStateContainerGetter(SectionClassNames.STATE_CONTAINER_SECTION)); generateComponentClassProps(stateClassBuilder, ClassNames.EVENT_HANDLER); stateClassBuilder.addMethod( MethodSpec.constructorBuilder() .addModifiers(Modifier.PRIVATE) .addStatement("super(get())") .addStatement(STATE_CONTAINER_IMPL_MEMBER + " = new $T()", stateContainerImplClass) .build()); final MethodSpec equalsBuilder = generateEqualsMethodDefinition(false); stateClassBuilder.addMethod(equalsBuilder); for (ExecutableElement element : mOnUpdateStateMethods) { final String stateUpdateClassName = getStateUpdateClassName(element); final List<Parameter> parameters = getParamsWithAnnotation(element, Param.class); stateClassBuilder.addMethod( new CreateStateUpdateInstanceMethodSpecBuilder() .parameters(parameters) .stateUpdateClass(stateUpdateClassName) .build()); } final MethodSpec makeShallowCopy = generateMakeShallowCopy(SectionClassNames.SECTION, /* hasDeepCopy */ true); if (makeShallowCopy != null) { stateClassBuilder.addMethod(makeShallowCopy); } writeInnerTypeSpec(stateClassBuilder.build()); } private MethodSpec generateEqualsMethodDefinition(boolean shouldCheckId) { final String implClassName = getImplClassName(); final String implInstanceName = getImplInstanceName(); MethodSpec.Builder equalsBuilder = MethodSpec.methodBuilder("equals") .addAnnotation(Override.class) .addModifiers(Modifier.PUBLIC) .returns(TypeName.BOOLEAN) .addParameter(TypeName.OBJECT, "other") .beginControlFlow("if (this == other)") .addStatement("return true") .endControlFlow() .beginControlFlow("if (other == null || getClass() != other.getClass())") .addStatement("return false") .endControlFlow() .addStatement(implClassName + " " + implInstanceName + " = (" + implClassName + ") other"); if (shouldCheckId) { equalsBuilder .beginControlFlow( "if (this.getId() == " + implInstanceName + ".getId())") .addStatement("return true") .endControlFlow(); } for (VariableElement v : mImplMembers.values()) { if (!isState(v)) { addCompareStatement(implInstanceName, v, equalsBuilder, false); } } for (VariableElement v : mStateMap.values()) { addCompareStatement(implInstanceName, v, equalsBuilder, true); } equalsBuilder.addStatement("return true"); return equalsBuilder.build(); } private static void addCompareStatement( String implInstanceName, VariableElement v, MethodSpec.Builder equalsBuilder, boolean isState) { final TypeMirror variableType = v.asType(); final TypeMirror outputTypeMirror = Utils.getGenericTypeArgument( variableType, ClassNames.OUTPUT); final TypeMirror diffTypeMirror = Utils.getGenericTypeArgument( variableType, ClassNames.DIFF); final TypeKind variableKind = diffTypeMirror != null ? diffTypeMirror.getKind() : variableType.getKind(); String qualifiedName = ""; if (variableType instanceof DeclaredType) { final DeclaredType declaredType = (DeclaredType) variableType; qualifiedName = ((TypeElement) declaredType.asElement()).getQualifiedName().toString(); } final String stateContainerMember = isState ? "." + STATE_CONTAINER_IMPL_MEMBER : ""; final CharSequence thisVarName = isState ? STATE_CONTAINER_IMPL_MEMBER + "." + v.getSimpleName() : v.getSimpleName(); if (outputTypeMirror == null) { if (variableKind == FLOAT) { equalsBuilder .beginControlFlow( "if (Float.compare($L, " + implInstanceName + stateContainerMember + ".$L) != 0)", thisVarName, v.getSimpleName()) .addStatement("return false") .endControlFlow(); } else if (variableKind == DOUBLE) { equalsBuilder .beginControlFlow( "if (Double.compare($L, " + implInstanceName + stateContainerMember + ".$L) != 0)", thisVarName, v.getSimpleName()) .addStatement("return false") .endControlFlow(); } else if (variableType.getKind() == ARRAY) { equalsBuilder .beginControlFlow( "if (!Arrays.equals($L, " + implInstanceName + stateContainerMember + ".$L))", thisVarName, v.getSimpleName()) .addStatement("return false") .endControlFlow(); } else if (variableType.getKind().isPrimitive()) { equalsBuilder .beginControlFlow( "if ($L != " + implInstanceName + stateContainerMember + ".$L)", thisVarName, v.getSimpleName()) .addStatement("return false") .endControlFlow(); } else if (qualifiedName.equals(ClassNames.REFERENCE)) { equalsBuilder .beginControlFlow( "if (Reference.shouldUpdate($L, " + implInstanceName + stateContainerMember + ".$L))", thisVarName, v.getSimpleName(), v.getSimpleName(), v.getSimpleName(), v.getSimpleName()) .addStatement("return false") .endControlFlow(); } else { equalsBuilder .beginControlFlow( "if ($L != null ? !$L.equals(" + implInstanceName + stateContainerMember + ".$L) : " + implInstanceName + stateContainerMember + ".$L != null)", thisVarName, thisVarName, v.getSimpleName(), v.getSimpleName()) .addStatement("return false") .endControlFlow(); } } } private boolean isState(VariableElement v) { for (VariableElement find : mStateMap.values()) { if (find.getSimpleName().equals(v.getSimpleName())) { return true; } } return false; } private void generateComponentClassProps( TypeSpec.Builder implClassBuilder, ClassName eventHandlerClassName) { for (VariableElement v : mImplMembers.values()) { implClassBuilder.addField(getPropFieldSpec(v, false)); } if (mExtraStateMembers != null) { for (String key : mExtraStateMembers.keySet()) { final TypeMirror variableType = mExtraStateMembers.get(key); final FieldSpec.Builder fieldBuilder = FieldSpec.builder(TypeName.get(variableType), key); implClassBuilder.addField(fieldBuilder.build()); } } for (TypeElement event : mEventDeclarations) { implClassBuilder.addField(FieldSpec.builder( eventHandlerClassName, getEventHandlerInstanceName(event.getSimpleName().toString())) .build()); } } private FieldSpec getPropFieldSpec(VariableElement v, boolean isStateProp) { final TypeMirror variableType = v.asType(); TypeMirror wrappingTypeMirror = Utils.getGenericTypeArgument( variableType, ClassNames.OUTPUT); if (wrappingTypeMirror == null) { wrappingTypeMirror = Utils.getGenericTypeArgument(variableType, ClassNames.DIFF); } final TypeName variableClassName = JPUtil.getTypeFromMirror( wrappingTypeMirror != null ? wrappingTypeMirror : variableType); final FieldSpec.Builder fieldBuilder = FieldSpec.builder( variableClassName, v.getSimpleName().toString()); if (!isInterStageComponentVariable(v)) { if (isStateProp) { fieldBuilder.addAnnotation(State.class); } else { fieldBuilder.addAnnotation(Prop.class); } } final boolean hasDefaultValue = hasDefaultValue(v); if (hasDefaultValue) { fieldBuilder.initializer( "$L.$L", mSourceElement.getSimpleName().toString(), v.getSimpleName().toString()); } return fieldBuilder.build(); } public void generateIsPureRender() { final MethodSpec.Builder shouldUpdateComponent = MethodSpec.methodBuilder("isPureRender") .addAnnotation(Override.class) .addModifiers(Modifier.PUBLIC) .returns(TypeName.BOOLEAN) .addStatement("return true"); mClassTypeSpec.addMethod(shouldUpdateComponent.build()); } public void generateCallsShouldUpdateOnMount() { final MethodSpec.Builder isFast = MethodSpec.methodBuilder("callsShouldUpdateOnMount") .addAnnotation(Override.class) .addModifiers(Modifier.PUBLIC) .returns(TypeName.BOOLEAN) .addStatement("return true"); mClassTypeSpec.addMethod(isFast.build()); } public void generateShouldUpdateMethod( ExecutableElement shouldUpdateElement, ClassName comparedInstancesClassName) { final ClassName implClass = ClassName.bestGuess(getImplClassName()); final MethodSpec.Builder shouldUpdateComponent = MethodSpec.methodBuilder("shouldUpdate") .addAnnotation(Override.class) .addModifiers(Modifier.PUBLIC) .returns(TypeName.BOOLEAN) .addParameter(comparedInstancesClassName, "previous") .addParameter(comparedInstancesClassName, "next"); final List<? extends VariableElement> shouldUpdateParams = shouldUpdateElement.getParameters(); final int shouldUpdateParamSize = shouldUpdateParams.size(); if (shouldUpdateParamSize > 0) { shouldUpdateComponent .addStatement( "$L previousImpl = ($L) previous", implClass, implClass) .addStatement( "$L nextImpl = ($L) next", implClass, implClass); } final CodeBlock.Builder delegateParameters = CodeBlock.builder(); delegateParameters.indent(); int i = 0; final CodeBlock.Builder releaseDiffs = CodeBlock.builder(); for (VariableElement variableElement : shouldUpdateParams) { final Name variableElementName = variableElement.getSimpleName(); final TypeMirror variableElementType = variableElement.asType(); final VariableElement componentMember = findPropVariableForName(variableElementName); if (componentMember == null) { throw new ComponentsProcessingException( variableElement, "Arguments for ShouldUpdate should match declared Props"); } final TypeMirror innerType = Utils.getGenericTypeArgument( variableElementType, ClassNames.DIFF); if (innerType == null) { throw new ComponentsProcessingException( variableElement, "Arguments for ShouldUpdate should be of type Diff " + componentMember.asType()); } final TypeName typeName; final TypeName innerTypeName = JPUtil.getTypeFromMirror(innerType); if (componentMember.asType().getKind().isPrimitive()) { typeName = JPUtil.getTypeFromMirror(componentMember.asType()).box(); } else { typeName = JPUtil.getTypeFromMirror(componentMember.asType()); } if (!typeName.equals(innerTypeName)) { throw new ComponentsProcessingException( variableElement, "Diff Type parameter does not match Prop " + componentMember); } shouldUpdateComponent .addStatement( "$L $L = acquireDiff(previousImpl.$L, nextImpl.$L)", variableElementType, variableElementName, variableElementName, variableElementName); if (i != 0) { delegateParameters.add(",\n"); } delegateParameters.add(variableElementName.toString()); i++; releaseDiffs.addStatement( "releaseDiff($L)", variableElementName); } delegateParameters.unindent(); shouldUpdateComponent.addStatement( "boolean shouldUpdate = $L.$L(\n$L)", mSourceDelegateAccessorName, shouldUpdateElement.getSimpleName(), delegateParameters.build()); shouldUpdateComponent.addCode(releaseDiffs.build()); shouldUpdateComponent.addStatement( "return shouldUpdate"); mClassTypeSpec.addMethod(shouldUpdateComponent.build()); } public void generateTreePropsMethods(ClassName contextClassName, ClassName componentClassName) { verifyOnCreateTreePropsForChildren(contextClassName); if (!mTreeProps.isEmpty()) { final PopulateTreePropsMethodBuilder builder = new PopulateTreePropsMethodBuilder(); builder.componentClassName = componentClassName; builder.lifecycleImplClass = getImplClassName(); for (VariableElement treeProp : mTreeProps) { builder.treeProps.add( new Parameter(ClassName.get(treeProp.asType()), treeProp.getSimpleName().toString())); } mClassTypeSpec.addMethod(builder.build()); } if (mOnCreateTreePropsMethods.isEmpty()) { return; } final GetTreePropsForChildrenMethodBuilder builder = new GetTreePropsForChildrenMethodBuilder(); builder.lifecycleImplClass = getImplClassName(); builder.delegateName = getSourceDelegateAccessorName(); builder.contextClassName = contextClassName; builder.componentClassName = componentClassName; for (ExecutableElement executable : mOnCreateTreePropsMethods) { final CreateTreePropMethodData method = new CreateTreePropMethodData(); method.parameters = getParams(executable); method.returnType = ClassName.get(executable.getReturnType()); method.name = executable.getSimpleName().toString(); builder.createTreePropMethods.add(method); } mClassTypeSpec.addMethod(builder.build()); } private void verifyOnCreateTreePropsForChildren(ClassName contextClassName) { for (ExecutableElement method : mOnCreateTreePropsMethods) { if (method.getReturnType().getKind().equals(TypeKind.VOID)) { throw new ComponentsProcessingException( method, "@OnCreateTreeProp annotated method" + method.getSimpleName() + "cannot have a void return type"); } final List<? extends VariableElement> params = method.getParameters(); if (params.isEmpty() || !ClassName.get(params.get(0).asType()).equals(contextClassName)) { throw new ComponentsProcessingException( method, "The first argument of an @OnCreateTreeProp method should be the " + contextClassName.simpleName()); } } } private VariableElement findPropVariableForName(Name variableElementName) { for (VariableElement prop : mProps) { if (prop.getSimpleName().equals(variableElementName)) { return prop; } } return null; } private MethodSpec generateCopyInterStageImpl(String implClassName) { final List<String> elementList = getInterStageVariableNames(); if (elementList.isEmpty()) { return null; } final String implInstanceName = getImplInstanceName(); final MethodSpec.Builder copyInterStageComponentBuilder = MethodSpec .methodBuilder("copyInterStageImpl") .addAnnotation(Override.class) .addModifiers(Modifier.PROTECTED) .returns(TypeName.VOID) .addParameter( ParameterizedTypeName.get( ClassNames.COMPONENT, ClassName.bestGuess(getSimpleClassName())), "impl") .addStatement( "$L " + implInstanceName + " = ($L) impl", implClassName, implClassName); for (String s : elementList) { copyInterStageComponentBuilder .addStatement( "$L = " + implInstanceName + ".$L", s, s); } return copyInterStageComponentBuilder.build(); } private List<String> getInterStageVariableNames() { final List<String> elementList = new ArrayList<>(); for (VariableElement v : mImplMembers.values()) { if (isInterStageComponentVariable(v)) { elementList.add(v.getSimpleName().toString()); } } return elementList; } private static boolean isInterStageComponentVariable(VariableElement variableElement) { final TypeMirror variableType = variableElement.asType(); final TypeMirror outputTypeMirror = Utils.getGenericTypeArgument( variableType, ClassNames.OUTPUT); return outputTypeMirror != null; } private static boolean isStateProp(VariableElement variableElement) { return variableElement.getAnnotation(State.class) != null; } public void generateListEvents() { for (TypeElement event : mEventDeclarations) { generateEvent( event, ClassNames.EVENT_HANDLER, SectionClassNames.SECTION_LIFECYCLE, SectionClassNames.SECTION_CONTEXT, "getSectionScope"); } } private static String getEventHandlerInstanceName(String eventHandlerClassName) { return Character.toLowerCase(eventHandlerClassName.charAt(0)) + eventHandlerClassName.substring(1) + "Handler"; } private void generateEvent( TypeElement eventDeclaration, ClassName eventHandlerClassName, ClassName lifecycleClassName, ClassName contextClassName, String scopeMethodName) { final String eventName = eventDeclaration.getSimpleName().toString(); writeMethodSpec(MethodSpec.methodBuilder("get" + eventName + "Handler") .addModifiers(Modifier.PUBLIC, Modifier.STATIC) .returns(eventHandlerClassName) .addParameter(contextClassName, "context") .addCode( CodeBlock.builder() .beginControlFlow("if (context.$L() == null)", scopeMethodName) .addStatement("return null") .endControlFlow() .build()) .addStatement( "return (($L.$T) context.$L()).$L", getSimpleClassName(), ClassName.bestGuess(getImplClassName()), scopeMethodName, getEventHandlerInstanceName(eventName)) .build()); // Override the method that the component will call to fire the event. final MethodDescription methodDescription = new MethodDescription(); methodDescription.annotations = new Class[] {}; methodDescription.accessType = Modifier.STATIC; methodDescription.name = "dispatch" + eventName; methodDescription.parameterTypes = new TypeName[] { ClassName.bestGuess(mQualifiedClassName) }; final TypeMirror returnType = Utils.getAnnotationParameter(mProcessingEnv, eventDeclaration, Event.class, "returnType"); if (returnType != null) { methodDescription.returnType = TypeName.get(returnType); } generateEventDispatcher( methodDescription, eventDeclaration.getTypeParameters(), eventDeclaration, eventHandlerClassName, lifecycleClassName); } /** * Generate an event dispatcher method for the given event. * * @param fixedMethod description of method signature to be generated * @param typeParameters * @param element method the event will call to dispatch * @param eventHandlerClassName @throws IOException If one of the writer methods throw */ private void generateEventDispatcher( MethodDescription fixedMethod, List<? extends TypeParameterElement> typeParameters, TypeElement element, ClassName eventHandlerClassName, ClassName lifecycleClassName) { final List<? extends VariableElement> parameters = Utils.getEnclosedFields(element); final MethodSpec.Builder methodBuilder = MethodSpec.methodBuilder(fixedMethod.name); if (fixedMethod.annotations != null) { for (Class annotation : fixedMethod.annotations) { methodBuilder.addAnnotation(annotation); } } for (TypeParameterElement typeParameterElement : typeParameters) { methodBuilder.addTypeVariable( TypeVariableName.get(typeParameterElement.getSimpleName().toString())); } if (fixedMethod.accessType != null) { methodBuilder.addModifiers(fixedMethod.accessType); } methodBuilder.addParameter(eventHandlerClassName, "_eventHandler"); for (VariableElement v : parameters) { methodBuilder.addParameter(ClassName.get(v.asType()), v.getSimpleName().toString()); } // Add the event parameters to a implParameters. // This should come from a pool. final ClassName className = ClassName.get(element); methodBuilder.addStatement( "$T _eventState = new $T()", className, className); for (VariableElement v : parameters) { final String variableName = v.getSimpleName().toString(); methodBuilder.addStatement("_eventState.$L = $L", variableName, variableName); } methodBuilder.addStatement( "$T _lifecycle = _eventHandler.mHasEventDispatcher.getEventDispatcher()", ClassNames.EVENT_DISPATCHER); final TypeName returnType = fixedMethod.returnType; if (returnType != null && !returnType.equals(ClassName.VOID)) { methodBuilder.addStatement( "return ($L) _lifecycle.dispatchOnEvent(_eventHandler, _eventState)", returnType); methodBuilder.returns(returnType); } else { methodBuilder.addStatement("_lifecycle.dispatchOnEvent(_eventHandler, _eventState)"); } writeMethodSpec(methodBuilder.build()); } /** * Generate a builder method for a given declared parameters. */ private Collection<MethodSpec> generatePropsBuilderMethods( VariableElement element, TypeName propsBuilderClassName, int requiredIndex, ClassName componentClassName) { final Prop propAnnotation = element.getAnnotation(Prop.class); final ResType resType = propAnnotation.resType(); switch (resType) { case STRING: assertOfType(element, TypeName.get(String.class), TypeName.get(CharSequence.class)); break; case STRING_ARRAY: assertOfType(element, TypeName.get(String[].class)); break; case INT: assertOfType(element, TypeName.get(int.class), TypeName.get(Integer.class)); break; case INT_ARRAY: assertOfType(element, TypeName.get(int[].class)); break; case BOOL: assertOfType(element, TypeName.get(boolean.class), TypeName.get(Boolean.class)); break; case COLOR: assertOfType(element, TypeName.get(int.class), TypeName.get(Integer.class)); break; case DIMEN_SIZE: assertOfType( element, TypeName.get(int.class), TypeName.get(Integer.class), TypeName.get(float.class), TypeName.get(Float.class)); break; case DIMEN_TEXT: assertOfType( element, TypeName.get(int.class), TypeName.get(Integer.class), TypeName.get(float.class), TypeName.get(Float.class)); break; case DIMEN_OFFSET: assertOfType( element, TypeName.get(int.class), TypeName.get(Integer.class), TypeName.get(float.class), TypeName.get(Float.class)); break; case FLOAT: assertOfType(element, TypeName.get(float.class), TypeName.get(Float.class)); break; case DRAWABLE: assertOfType(element, ParameterizedTypeName.get(ClassNames.REFERENCE, ClassNames.DRAWABLE)); break; } TypeMirror typeMirror = element.asType(); final TypeMirror diffTypeMirror = Utils.getGenericTypeArgument(typeMirror, ClassNames.DIFF); if (diffTypeMirror != null) { typeMirror = diffTypeMirror; } final TypeName type = JPUtil.getTypeFromMirror(typeMirror); final String name = element.getSimpleName().toString(); final PropParameter propParameter = new PropParameter( new Parameter(type, name), propAnnotation.optional(), resType, getNonComponentAnnotations(element)); return new PropsBuilderMethodsSpecBuilder() .index(requiredIndex) .propParameter(propParameter) .implName(getImplMemberInstanceName()) .requiredSetName("mRequired") .builderClass(propsBuilderClassName) .componentClassName(componentClassName) .build(); } private void assertOfType(VariableElement element, TypeName... types) { final TypeName elementType = JPUtil.getTypeFromMirror(element.asType()); for (TypeName type : types) { if (type.toString().equals(elementType.toString())) { return; } } throw new ComponentsProcessingException( element, "Expected parameter of one of types" + Arrays.toString(types) + ". Found " + elementType); } private List<ClassName> getNonComponentAnnotations(VariableElement element) { final List<? extends AnnotationMirror> annotationMirrors = element.getAnnotationMirrors(); final List<ClassName> annotations = new ArrayList<>(); for (AnnotationMirror annotationMirror : annotationMirrors) { if (annotationMirror.getAnnotationType().toString().startsWith("com.facebook.litho")) { continue; } if (annotationMirror.getElementValues().size() > 0) { throw new ComponentsProcessingException( element, "Currently only non-component annotations without parameters are supported"); } annotations.add(ClassName.bestGuess(annotationMirror.getAnnotationType().toString())); } return annotations; } public void generateReferenceBuilder(StaticFlag isStatic, TypeName genericType) { generateBuilder( isStatic, StyleableFlag.NOT_STYLEABLE, ClassNames.REFERENCE, genericType, INNER_IMPL_BUILDER_CLASS_NAME, new TypeName[]{genericType}, ClassNames.COMPONENT_CONTEXT, null, null, false, false); generateBuilderPool( ClassName.bestGuess(INNER_IMPL_BUILDER_CLASS_NAME), "m" + INNER_IMPL_BUILDER_CLASS_NAME + "Pool", mTypeVariables.isEmpty() || isStatic == StaticFlag.STATIC ? StaticFlag.STATIC : StaticFlag.NOT_STATIC, StyleableFlag.NOT_STYLEABLE, ClassNames.COMPONENT_CONTEXT); writeMethodSpec(MethodSpec.methodBuilder("create") .addModifiers(Modifier.PUBLIC) .returns(ClassName.bestGuess(INNER_IMPL_BUILDER_CLASS_NAME)) .addParameter(ClassNames.COMPONENT_CONTEXT, "context") .addStatement( "return new$L(context, new $T())", INNER_IMPL_BUILDER_CLASS_NAME, ClassName.bestGuess(getImplClassName())) .addModifiers(isStatic == StaticFlag.STATIC ? Modifier.STATIC : Modifier.FINAL) .build()); } public void generateListBuilder(StaticFlag isStatic, TypeName genericType) { generateBuilder( isStatic, StyleableFlag.NOT_STYLEABLE, SectionClassNames.SECTION, genericType, INNER_IMPL_BUILDER_CLASS_NAME, new TypeName[]{genericType}, SectionClassNames.SECTION_CONTEXT, ClassNames.EVENT_HANDLER, SectionClassNames.SECTION, true, true); generateBuilderPool( ClassName.bestGuess(INNER_IMPL_BUILDER_CLASS_NAME), "m" + INNER_IMPL_BUILDER_CLASS_NAME + "Pool", mTypeVariables.isEmpty() || isStatic == StaticFlag.STATIC ? StaticFlag.STATIC : StaticFlag.NOT_STATIC, StyleableFlag.NOT_STYLEABLE, SectionClassNames.SECTION_CONTEXT); writeMethodSpec(MethodSpec.methodBuilder("create") .addModifiers(Modifier.PUBLIC) .returns(ClassName.bestGuess(INNER_IMPL_BUILDER_CLASS_NAME)) .addParameter(SectionClassNames.SECTION_CONTEXT, "context") .addStatement( "return new$L(context, new $T())", INNER_IMPL_BUILDER_CLASS_NAME, ClassName.bestGuess(getImplClassName())) .addModifiers(isStatic == StaticFlag.STATIC ? Modifier.STATIC : Modifier.FINAL) .build()); } private void generateBuilder( StaticFlag isStatic, StyleableFlag isStylable, ClassName propsClass, TypeName genericType, String builderClassName, TypeName[] builderGenericTypes, ClassName contextClass, ClassName eventHandlerClass, ClassName componentClassName, boolean hasKeySetter, boolean hasLoadingEventHandlerSetter) { final String implClassName = getImplClassName(); final String implInstanceName = getImplInstanceName(); final String implMemberInstanceName = getImplMemberInstanceName(); final String contextMemberInstanceName = "mContext"; final ClassName implClass = ClassName.bestGuess(implClassName); final MethodSpec.Builder initMethodSpec = MethodSpec.methodBuilder("init") .addModifiers(Modifier.PRIVATE) .addParameter(contextClass, "context"); if (isStylable == StyleableFlag.STYLEABLE) { initMethodSpec .addParameter(int.class, "defStyleAttr") .addParameter(int.class, "defStyleRes") .addParameter(implClass, implInstanceName) .addStatement("super.init(context, defStyleAttr, defStyleRes, " + implInstanceName + ")"); } else { initMethodSpec .addParameter(implClass, implInstanceName) .addStatement("super.init(context, " + implInstanceName + ")"); } initMethodSpec .addStatement(implMemberInstanceName + " = " + implInstanceName) .addStatement(contextMemberInstanceName + " = context"); final TypeSpec.Builder propsBuilderClassBuilder = TypeSpec .classBuilder(builderClassName) .addModifiers(Modifier.PUBLIC) .superclass( ParameterizedTypeName.get( ClassName.get(propsClass.packageName(), propsClass.simpleName(), builderClassName), builderGenericTypes)) .addField(implClass, implMemberInstanceName) .addField(contextClass, "mContext"); final List<String> requiredPropNames = new ArrayList<>(); int numRequiredProps = 0; for (VariableElement v : mProps) { if (!v.getAnnotation(Prop.class).optional()) { numRequiredProps++; requiredPropNames.add(v.getSimpleName().toString()); } } if (numRequiredProps > 0) { final FieldSpec.Builder requiredPropsNamesBuilder = FieldSpec.builder( String[].class, REQUIRED_PROPS_NAMES, Modifier.PRIVATE) .initializer("new String[] {$L}", commaSeparateAndQuoteStrings(requiredPropNames)) .addModifiers(Modifier.FINAL); if (isStatic.equals(StaticFlag.STATIC)) { requiredPropsNamesBuilder.addModifiers(Modifier.STATIC); } propsBuilderClassBuilder .addField(requiredPropsNamesBuilder.build()) .addField( FieldSpec.builder( int.class, REQUIRED_PROPS_COUNT, Modifier.PRIVATE) .initializer("$L", numRequiredProps) .addModifiers(Modifier.STATIC, Modifier.FINAL) .build()) .addField( FieldSpec.builder( BitSet.class, "mRequired", Modifier.PRIVATE) .initializer("new $T($L)", BitSet.class, REQUIRED_PROPS_COUNT) .build()); initMethodSpec.addStatement("mRequired.clear()"); } propsBuilderClassBuilder.addMethod(initMethodSpec.build()); // If there are no type variables, then this class can always be static. // If the component implementation class is static, and there are type variables, then this // class can be static but must shadow the type variables from the class. // If the component implementation class is not static, and there are type variables, then this // class is not static and we get the type variables from the class. final boolean isBuilderStatic = mTypeVariables.isEmpty() || isStatic.equals(StaticFlag.STATIC); if (isBuilderStatic) { propsBuilderClassBuilder.addModifiers(Modifier.STATIC); if (!mTypeVariables.isEmpty()) { propsBuilderClassBuilder.addTypeVariables(mTypeVariables); } } final ClassName propsBuilderClassName = ClassName.bestGuess(builderClassName); int requiredPropIndex = 0; for (VariableElement v : mProps) { propsBuilderClassBuilder.addMethods( generatePropsBuilderMethods( v, propsBuilderClassName, requiredPropIndex, componentClassName)); if (!v.getAnnotation(Prop.class).optional()) { requiredPropIndex++; } } for (TypeElement event : mEventDeclarations) { propsBuilderClassBuilder.addMethods( new PropsBuilderMethodsSpecBuilder() .propParameter( new PropParameter( new Parameter( eventHandlerClass, getEventHandlerInstanceName(event.getSimpleName().toString())), true, ResType.NONE, Collections.<ClassName>emptyList())) .implName(getImplMemberInstanceName()) .builderClass(propsBuilderClassName) .build()); } if (hasKeySetter) { propsBuilderClassBuilder.addMethod( new PropsBuilderMethodsSpecBuilder() .builderClass(propsBuilderClassName) .buildKeySetter()); } if (hasLoadingEventHandlerSetter) { propsBuilderClassBuilder.addMethod( new PropsBuilderMethodsSpecBuilder() .builderClass(propsBuilderClassName) .buildLoadingEventHandlerSetter()); } final MethodSpec.Builder buildMethodBuilder = MethodSpec.methodBuilder("build") .addAnnotation(Override.class) .addModifiers(Modifier.PUBLIC) .returns(ParameterizedTypeName.get(propsClass, genericType)); if (numRequiredProps > 0) { buildMethodBuilder .beginControlFlow( "if (mRequired != null && mRequired.nextClearBit(0) < $L)", REQUIRED_PROPS_COUNT) .addStatement( "$T missingProps = new $T()", ParameterizedTypeName.get(List.class, String.class), ParameterizedTypeName.get(ArrayList.class, String.class)) .beginControlFlow("for (int i = 0; i < $L; i++)", REQUIRED_PROPS_COUNT) .beginControlFlow("if (!mRequired.get(i))") .addStatement("missingProps.add($L[i])", REQUIRED_PROPS_NAMES) .endControlFlow() .endControlFlow() .addStatement( "throw new $T($S + $T.toString(missingProps.toArray()))", IllegalStateException.class, "The following props are not marked as optional and were not supplied: ", Arrays.class) .endControlFlow(); } buildMethodBuilder .addStatement("$L " + implInstanceName + " = " + implMemberInstanceName, implClassName) .addStatement("release()") .addStatement("return " + implInstanceName); propsBuilderClassBuilder.addMethod(buildMethodBuilder.build()); final String poolName = "m" + builderClassName + "Pool"; propsBuilderClassBuilder.addMethod(MethodSpec.methodBuilder("release") .addAnnotation(Override.class) .addModifiers(Modifier.PROTECTED) .addStatement("super.release()") .addStatement(implMemberInstanceName + " = null")
Lines authored by ianc This commit forms part of the blame-preserving initial commit suite.
litho-processor/src/main/java/com/facebook/litho/processor/Stages.java
Lines authored by ianc
<ide><path>itho-processor/src/main/java/com/facebook/litho/processor/Stages.java <ide> .addModifiers(Modifier.PROTECTED) <ide> .addStatement("super.release()") <ide> .addStatement(implMemberInstanceName + " = null") <add> .addStatement(contextMemberInstanceName + " = null")
Java
mit
b57c4832b61cbb832107a62a8628ee00a1cc0880
0
gregwym/joos-compiler-java,gregwym/joos-compiler-java
/** * */ package ca.uwaterloo.joos.ast.decl; import ca.uwaterloo.joos.ast.ASTNode; import ca.uwaterloo.joos.ast.expr.ForInit; import ca.uwaterloo.joos.parser.ParseTree.Node; /** * @author Greg Wang * */ public class LocalVariableDeclaration extends VariableDeclaration implements ForInit{ private int index; /** * @param node * @param parent * @throws Exception */ public LocalVariableDeclaration(Node node, ASTNode parent) throws Exception { super(node, parent); } public void setIndex(int idx){ index = idx; } public int getIndex(){ return index; } }
src/ca/uwaterloo/joos/ast/decl/LocalVariableDeclaration.java
/** * */ package ca.uwaterloo.joos.ast.decl; import ca.uwaterloo.joos.ast.ASTNode; import ca.uwaterloo.joos.ast.expr.ForInit; import ca.uwaterloo.joos.parser.ParseTree.Node; /** * @author Greg Wang * */ public class LocalVariableDeclaration extends VariableDeclaration implements ForInit{ /** * @param node * @param parent * @throws Exception */ public LocalVariableDeclaration(Node node, ASTNode parent) throws Exception { super(node, parent); } }
Added an int holding a Local Variable's index
src/ca/uwaterloo/joos/ast/decl/LocalVariableDeclaration.java
Added an int holding a Local Variable's index
<ide><path>rc/ca/uwaterloo/joos/ast/decl/LocalVariableDeclaration.java <ide> * <ide> */ <ide> public class LocalVariableDeclaration extends VariableDeclaration implements ForInit{ <del> <add> private int index; <ide> /** <ide> * @param node <ide> * @param parent <ide> public LocalVariableDeclaration(Node node, ASTNode parent) throws Exception { <ide> super(node, parent); <ide> } <add> <add> public void setIndex(int idx){ <add> index = idx; <add> } <add> <add> public int getIndex(){ <add> return index; <add> } <ide> <ide> }
Java
bsd-3-clause
7b1ec32ef5725e5f1a2bd49ee35aa08b5a764790
0
phlip9/BIDMat,codeaudit/BIDMat,phlip9/BIDMat,codeaudit/BIDMat,phlip9/BIDMat,phlip9/BIDMat,phlip9/BIDMat,codeaudit/BIDMat,codeaudit/BIDMat,codeaudit/BIDMat
package edu.berkeley.bid.comm; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.Arrays; import java.io.*; import java.nio.*; import java.net.*; //import mpi.*; public class AllReduceX { public class Machine { /* Machine Configuration Variables */ int N; // Number of features int D; // Depth of the network int M; // Number of Machines int imachine; // My identity int [] allks; // k values int replicate = 1; // replication factor int sockBase = 50000; // Socket base address int sendTimeout = 1000; // in msec String [] machineIP; // String IP names Layer [] layers; // All the layers ByteBuffer [] sendbuf; // buffers, one for each destination in a group ByteBuffer [] recbuf; IVec finalMap; // Map to down from down --> up at layer D-1 Msg [][] messages; // Message queue for the simulation boolean [][] msgrecvd; boolean [][] amsending; boolean doSim = true; ExecutorService executor; ExecutorService sockExecutor; Listener listener; int trace = 0; // 0: no trace, 1: high-level, 2: everything public Machine(int N0, int [] allks0, int imachine0, int M0, int bufsize, boolean doSim0, int trace0, int replicate0, String [] machineIP0) { N = N0; M = M0; doSim = doSim0; replicate = replicate0; machineIP = machineIP0; if (machineIP == null) { machineIP = new String[M*replicate]; for (int i = 0; i < M*replicate; i++) machineIP[i] = "localhost"; } imachine = imachine0; allks = allks0; D = allks.length; trace = trace0; layers = new Layer[D]; int left = 0; int right = N; int cumk = 1; int maxk = 1; for (int i = 0; i < D; i++) { int k = allks[i]; layers[i] = new Layer(k, cumk, left, right, imachine, i); int pimg = layers[i].posInMyGroup; left = layers[i].left; if (pimg > 0) left = layers[i].partBoundaries.data[pimg-1]; right = layers[i].partBoundaries.data[pimg]; cumk *= k; maxk = Math.max(maxk, k); } executor = Executors.newFixedThreadPool(maxk); // set to 1 for sequential messaging. sendbuf = new ByteBuffer[maxk]; recbuf = new ByteBuffer[maxk]; for (int i = 0; i < maxk; i++) { sendbuf[i] = ByteBuffer.wrap(new byte[4*bufsize]); recbuf[i] = ByteBuffer.wrap(new byte[4*bufsize]); } messages = new Msg[M*replicate][]; msgrecvd = new boolean[M*replicate][]; amsending = new boolean[M*replicate][]; for (int i = 0; i < M*replicate; i++) { messages[i] = new Msg[3*D]; msgrecvd[i] = new boolean[3*D]; amsending[i] = new boolean[3*D]; } if (!doSim) { sockExecutor = Executors.newFixedThreadPool(1+4*maxk); listener = new Listener(); sockExecutor.execute(listener); } } public void stop() { if (listener != null) { listener.stop(); } } public void config(IVec downi, IVec upi) { IVec [] outputs = new IVec[2]; for (int i = 0; i < D; i++) { layers[i].config(downi, upi, outputs); downi = outputs[0]; upi = outputs[1]; } finalMap = IVec.mapInds(upi, downi); } public Vec reduce(Vec downv) { for (int d = 0; d < D; d++) { downv = layers[d].reduceDown(downv); } Vec upv = downv.mapFrom(finalMap); for (int d = D-1; d >= 0; d--) { upv = layers[d].reduceUp(upv); } if (trace > 0) { synchronized (AllReduceX.this) { System.out.format("machine %d reduce result nnz %d out of %d\n", imachine, upv.nnz(), upv.size()); } } return upv; } class Layer { /* Layer Configuration Variables */ int k; // Size of this group int left; // Left boundary of its indices int right; // Right boundary of its indices int depth; int posInMyGroup; // Position in this machines group int [] outNbr; // Machines we talk to int [] inNbr; // Machines we listen to IVec partBoundaries; // Partition boundaries IVec [] downMaps; // Maps to indices below for down indices IVec [] upMaps; // Maps to indices below for up indices int downn; // Size of the down master list int upn; // Size of the up vector int [] dPartInds; int [] uPartInds; public Layer(int k0, int cumk, int left0, int right0, int imachine, int depth0) { k = k0; int i; left = left0; right = right0; depth = depth0; partBoundaries = new IVec(k); inNbr = new int [k]; outNbr = new int [k]; dPartInds = new int[k+1]; uPartInds = new int[k+1]; int ckk = cumk * k; posInMyGroup = (imachine % ckk) / cumk; int ibase = (imachine % M) - posInMyGroup * cumk; for (i = 0; i < k; i++) { partBoundaries.data[i] = left + (int)(((long)(right - left)) * (i+1) / k); outNbr[i] = ibase + i * cumk; int toMe = (k + 2*posInMyGroup - i) % k; inNbr[i] = ibase + toMe * cumk; } downMaps = new IVec[k]; upMaps = new IVec[k]; } class ConfigThread implements Runnable { IVec [] downp; IVec [] upp; IVec [] dtree; IVec [] utree; int i; int repno; CountDownLatch latch; public ConfigThread(IVec [] downp0, IVec [] upp0, IVec [] dtree0, IVec [] utree0, int i0, CountDownLatch latch0) { downp = downp0; upp = upp0; dtree = dtree0; utree = utree0; i = i0; latch = latch0; } public void run() { sendbuf[i].clear(); recbuf[i].clear(); IntBuffer sbuf = sendbuf[i].asIntBuffer(); IntBuffer rbuf = recbuf[i].asIntBuffer(); int seg1 = downp[i].size(); int seg2 = seg1 + upp[i].size(); sbuf.put(seg1); sbuf.put(seg2); sbuf.put(downp[i].data, 0, seg1); sbuf.put(upp[i].data, 0, seg2-seg1); if (trace > 1) { synchronized (AllReduceX.this) { System.out.format("config layer %d machine %d sent msg to %d, from %d, sizes %d %d\n", depth, imachine, outNbr[i], inNbr[i], sbuf.get(0), sbuf.get(1)); } } sendrecv(i, sendbuf, seg2+2, outNbr[i], recbuf, rbuf.capacity(), inNbr[i], depth*3); seg1 = rbuf.get(); seg2 = rbuf.get(); if (trace > 1) { synchronized (AllReduceX.this) { System.out.format("config layer %d machine %d got msg from %d, sizes %d %d\n", depth, imachine, inNbr[i], seg1, seg2); } } IVec downout = new IVec(seg1); IVec upout = new IVec(seg2-seg1); rbuf.get(downout.data, 0, seg1); rbuf.get(upout.data, 0, seg2-seg1); IVec.checkTree(dtree, downout, i, k); IVec.checkTree(utree, upout, i, k); downp[i] = downout; upp[i] = upout; latch.countDown(); } } public void config(IVec downi, IVec upi, IVec [] outputs) { IVec [] downp = IVec.partition(downi, partBoundaries); IVec [] upp = IVec.partition(upi, partBoundaries); IVec [] dtree = new IVec[2*k-1]; IVec [] utree = new IVec[2*k-1]; if (trace > 0) { synchronized (AllReduceX.this) { System.out.format("machine %d layer %d, dparts (%d", imachine, depth, downp[0].size()); for (int i = 1; i < downp.length; i++) System.out.format(", %d", downp[i].size()); System.out.format(") from %d, bounds %d %d\n", downi.size(), partBoundaries.data[0], partBoundaries.data[partBoundaries.size()-1]); System.out.format("machine %d layer %d, uparts (%d", imachine, depth, upp[0].size()); for (int i = 1; i < upp.length; i++) System.out.format(", %d", upp[i].size()); System.out.format(") from %d, bounds %d %d\n", upi.size(), partBoundaries.data[0], partBoundaries.data[partBoundaries.size()-1]); } } dPartInds[0] = 0; uPartInds[0] = 0; for (int i = 0; i < k; i++) { dPartInds[i+1] = dPartInds[i] + downp[i].size(); uPartInds[i+1] = uPartInds[i] + upp[i].size(); } CountDownLatch latch = new CountDownLatch(k); for (int i = 0; i < k; i++) { int ix = (i + posInMyGroup) % k; // Try to stagger the traffic executor.execute(new ConfigThread(downp, upp, dtree, utree, ix, latch)); } try { latch.await(); } catch (InterruptedException e) {} IVec dmaster = dtree[0]; Arrays.fill(dtree, null); downn = dmaster.size(); IVec umaster = utree[0]; Arrays.fill(utree, null); upn = upi.size(); for (int i = 0; i < k; i++) { downMaps[i] = IVec.mapInds(downp[i], dmaster); upMaps[i] = IVec.mapInds(upp[i], umaster); if (trace > 0) { synchronized (AllReduceX.this) { System.out.format("machine %d dmap(%d) size %d\n", imachine, i, downMaps[i].size()); System.out.format("machine %d umap(%d) size %d\n", imachine, i, upMaps[i].size()); } } } outputs[0] = dmaster; outputs[1] = umaster; } public class ReduceDownThread implements Runnable { Vec newv; Vec downv; int i; CountDownLatch latch; public ReduceDownThread(Vec newv0, Vec downv0, int i0, CountDownLatch latch0) { newv = newv0; downv = downv0; i = i0; latch = latch0; } public void run() { sendbuf[i].clear(); recbuf[i].clear(); IntBuffer isbuf = sendbuf[i].asIntBuffer(); IntBuffer irbuf = recbuf[i].asIntBuffer(); FloatBuffer sbuf = sendbuf[i].asFloatBuffer(); FloatBuffer rbuf = recbuf[i].asFloatBuffer(); int msize = dPartInds[i+1] - dPartInds[i]; isbuf.put(msize); sbuf.position(1); sbuf.put(downv.data, dPartInds[i], msize); if (trace > 1) { synchronized (AllReduceX.this) { System.out.format("reduce layer %d machine %d sent msg to %d, from %d, size %d\n", depth, imachine, outNbr[i], inNbr[i], msize); } } sendrecv(i, sendbuf, msize+1, outNbr[i], recbuf, rbuf.capacity(), inNbr[i], depth*3+1); msize = irbuf.get(); if (trace > 1) { synchronized (AllReduceX.this) { System.out.format("reduce layer %d machine %d got msg from %d, size %d\n", depth, imachine, inNbr[i], msize); } } Vec res = new Vec(msize); rbuf.position(1); rbuf.get(res.data, 0, msize); synchronized (newv) { res.addTo(newv, downMaps[i]); } latch.countDown(); } } public Vec reduceDown(Vec downv) { Vec newv = new Vec(downn); CountDownLatch latch = new CountDownLatch(k); for (int i = 0; i < k; i++) { int ix = (i + posInMyGroup) % k; // Try to stagger the traffic executor.execute(new ReduceDownThread(newv, downv, ix, latch)); } try { latch.await(); } catch (InterruptedException e) {} return newv; } public class ReduceUpThread implements Runnable { Vec newv; Vec upv; int i; CountDownLatch latch; public ReduceUpThread(Vec newv0, Vec upv0, int i0, CountDownLatch latch0) { newv = newv0; upv = upv0; i = i0; latch = latch0; } public void run () { sendbuf[i].clear(); recbuf[i].clear(); IntBuffer isbuf = sendbuf[i].asIntBuffer(); IntBuffer irbuf = recbuf[i].asIntBuffer(); FloatBuffer sbuf = sendbuf[i].asFloatBuffer(); FloatBuffer rbuf = recbuf[i].asFloatBuffer(); Vec up = upv.mapFrom(upMaps[i]); int msize = up.size(); isbuf.put(msize); sbuf.position(1); sbuf.put(up.data, 0, msize); if (trace > 1) { synchronized (AllReduceX.this) { System.out.format("reduce up layer %d machine %d sent msg to %d, from %d, size %d\n", depth, imachine, outNbr[i], inNbr[i], msize); } } sendrecv(i, sendbuf, msize+1, inNbr[i], recbuf, irbuf.capacity(), outNbr[i], depth*3+2); msize = irbuf.get(); if (trace > 1) { synchronized (AllReduceX.this) { System.out.format("reduce up layer %d machine %d got msg from %d, size %d\n", depth, imachine, inNbr[i], msize); } } int psize = uPartInds[i+1] - uPartInds[i]; if (uPartInds[i+1] > newv.size()) throw new RuntimeException("ReduceUp index out of range "+uPartInds[i+1]+" "+newv.size()); if (msize != psize) throw new RuntimeException("ReduceUp size mismatch "+msize+" "+psize); rbuf.position(1); rbuf.get(newv.data, uPartInds[i], msize); latch.countDown(); } } public Vec reduceUp(Vec upv) { Vec newv = new Vec(upn); CountDownLatch latch = new CountDownLatch(k); for (int i = 0; i < k; i++) { int ix = (i + posInMyGroup) % k; // Try to stagger the traffic executor.execute(new ReduceUpThread(newv, upv, ix, latch)); } try { latch.await(); } catch (InterruptedException e) {} return newv; } } public boolean sendrecv(int igroup, ByteBuffer [] sbuf, int sendn, int outi, ByteBuffer [] rbuf, int recn, int ini, int tag) { sbuf[igroup].rewind(); Msg msg = new Msg(sbuf[igroup].array(), sendn, imachine, outi, tag); if (imachine == outi) { rbuf[igroup].clear(); rbuf[igroup].put(msg.buf, 0, 4*sendn); rbuf[igroup].rewind(); return true; } else { if (doSim) { for (int i = 0; i < replicate; i++) { simNetwork[outi + i*M].messages[imachine][tag] = msg; } } else { for (int i = 0; i < replicate; i++) { sockExecutor.execute(new SockWriter(outi + i*M, msg)); } } boolean gotit = false; while (!gotit) { for (int i = 0; i < replicate; i++) { if (messages[ini + i*M][tag] != null) { Msg rmsg = messages[ini + i*M][tag]; rbuf[igroup].clear(); rbuf[igroup].put(rmsg.buf, 0, 4*rmsg.size); rbuf[igroup].rewind(); gotit = true; break; } try { Thread.sleep(1); } catch (InterruptedException e) {} } } for (int i = 0; i < replicate; i++) { messages[ini + i*M][tag] = null; msgrecvd[ini + i*M][tag] = true; } return true; } } public class SockWriter implements Runnable { int dest; Msg msg; public SockWriter(int dest0, Msg msg0) { msg = msg0; dest = dest0; } public void run() { Socket socket = null; try { socket = new Socket(); socket.connect(new InetSocketAddress(machineIP[dest], sockBase + dest), sendTimeout); if (socket.isConnected()) { amsending[dest][msg.tag] = true; DataOutputStream ostr = new DataOutputStream(socket.getOutputStream()); ostr.writeInt(msg.size); ostr.writeInt(msg.sender); ostr.writeInt(msg.tag); ostr.write(msg.buf, 0, msg.size*4); } } catch (SocketTimeoutException e) { // No need to do anything } catch (ConnectException e) { // Server may have been killed - OK } catch (Exception e) { throw new RuntimeException("Problem writing socket "+e); } finally { try { if (socket != null) socket.close(); } catch (Exception e) {} amsending[dest][msg.tag] = false; } } } public class SockReader implements Runnable { Socket socket = null; public SockReader(Socket sock) { socket = sock; } public void run() { try { DataInputStream istr = new DataInputStream(socket.getInputStream()); int len = istr.readInt(); int src = istr.readInt(); int tag = istr.readInt(); if (!msgrecvd[src][tag]) { Msg msg = new Msg(len, src, imachine, tag); istr.readFully(msg.buf, 0, len*4); if (!msgrecvd[src][tag]) { messages[src][tag] = msg; } } } catch (Exception e) { throw new RuntimeException("Problem reading socket "+e); } finally { try {socket.close();} catch (IOException e) {} } } } public class Listener implements Runnable { boolean stop = false; ServerSocket ss = null; public Listener() { try { ss = new ServerSocket(sockBase + imachine); } catch (Exception e) { throw new RuntimeException("Couldnt start socket listener "+e); } } public void run() { while (!stop) { try { Socket cs = ss.accept(); sockExecutor.execute(new SockReader(cs)); } catch (SocketException e) { // This is probably due to the server shutting down. Don't do anything. } catch (Exception e) { throw new RuntimeException("Socket listener had a problem "+e); } } } public boolean stillSending() { boolean sending = false; for (int i = 0; i < amsending.length; i++) { boolean [] sendrow = amsending[i]; for (int j = 0; j < sendrow.length; j++) { if (amsending[i][j]) sending = true; } } return sending; } public void stop() { while (stillSending()) { try { Thread.sleep(1); } catch (InterruptedException e) {} } try { stop = true; ss.close(); } catch (Exception e) { throw new RuntimeException("Trouble closing listener"); } } } } public class Msg { byte [] buf; int size; int sender; int receiver; int tag; public Msg(int size0, int sender0, int receiver0, int tag0) { buf = new byte[4*size0]; size = size0; sender = sender0; receiver = receiver0; tag = tag0; } public Msg(byte [] inbuf, int size0, int sender0, int receiver0, int tag0) { buf = new byte[4*size0]; System.arraycopy(inbuf, 0, buf, 0, 4*size0); size = size0; sender = sender0; receiver = receiver0; tag = tag0; } } public Machine [] simNetwork = null; public AllReduceX(int M) { simNetwork = new Machine[M]; } public void stop() { if (simNetwork != null) { for (int i = 0; i < simNetwork.length; i++) simNetwork[i].stop(); } } }
src/main/java/edu/berkeley/bid/comm/AllReduceX.java
package edu.berkeley.bid.comm; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.Arrays; import java.io.*; import java.nio.*; import java.net.*; //import mpi.*; public class AllReduceX { public class Machine { /* Machine Configuration Variables */ int N; // Number of features int D; // Depth of the network int M; // Number of Machines int imachine; // My identity int [] allks; // k values int replicate = 1; // replication factor int sockBase = 50000; // Socket base address int sendTimeout = 1000; // in msec String [] machineIP; // String IP names Layer [] layers; // All the layers ByteBuffer [] sendbuf; // buffers, one for each destination in a group ByteBuffer [] recbuf; IVec finalMap; // Map to down from down --> up at layer D-1 Msg [][] messages; // Message queue for the simulation boolean [][] msgrecvd; boolean doSim = true; ExecutorService executor; ExecutorService sockExecutor; Listener listener; int trace = 0; // 0: no trace, 1: high-level, 2: everything public Machine(int N0, int [] allks0, int imachine0, int M0, int bufsize, boolean doSim0, int trace0, int replicate0, String [] machineIP0) { N = N0; M = M0; doSim = doSim0; replicate = replicate0; machineIP = machineIP0; if (machineIP == null) { machineIP = new String[M*replicate]; for (int i = 0; i < M*replicate; i++) machineIP[i] = "localhost"; } imachine = imachine0; allks = allks0; D = allks.length; trace = trace0; layers = new Layer[D]; int left = 0; int right = N; int cumk = 1; int maxk = 1; for (int i = 0; i < D; i++) { int k = allks[i]; layers[i] = new Layer(k, cumk, left, right, imachine, i); int pimg = layers[i].posInMyGroup; left = layers[i].left; if (pimg > 0) left = layers[i].partBoundaries.data[pimg-1]; right = layers[i].partBoundaries.data[pimg]; cumk *= k; maxk = Math.max(maxk, k); } executor = Executors.newFixedThreadPool(maxk); // set to 1 for sequential messaging. sendbuf = new ByteBuffer[maxk]; recbuf = new ByteBuffer[maxk]; for (int i = 0; i < maxk; i++) { sendbuf[i] = ByteBuffer.wrap(new byte[4*bufsize]); recbuf[i] = ByteBuffer.wrap(new byte[4*bufsize]); } messages = new Msg[M*replicate][]; msgrecvd = new boolean[M*replicate][]; for (int i = 0; i < M*replicate; i++) { messages[i] = new Msg[3*D]; msgrecvd[i] = new boolean[3*D]; } if (!doSim) { sockExecutor = Executors.newFixedThreadPool(1+4*maxk); listener = new Listener(); sockExecutor.execute(listener); } } public void stop() { if (listener != null) { listener.stop(); } } public void config(IVec downi, IVec upi) { IVec [] outputs = new IVec[2]; for (int i = 0; i < D; i++) { layers[i].config(downi, upi, outputs); downi = outputs[0]; upi = outputs[1]; } finalMap = IVec.mapInds(upi, downi); } public Vec reduce(Vec downv) { for (int d = 0; d < D; d++) { downv = layers[d].reduceDown(downv); } Vec upv = downv.mapFrom(finalMap); for (int d = D-1; d >= 0; d--) { upv = layers[d].reduceUp(upv); } if (trace > 0) { synchronized (AllReduceX.this) { System.out.format("machine %d reduce result nnz %d out of %d\n", imachine, upv.nnz(), upv.size()); } } return upv; } class Layer { /* Layer Configuration Variables */ int k; // Size of this group int left; // Left boundary of its indices int right; // Right boundary of its indices int depth; int posInMyGroup; // Position in this machines group int [] outNbr; // Machines we talk to int [] inNbr; // Machines we listen to IVec partBoundaries; // Partition boundaries IVec [] downMaps; // Maps to indices below for down indices IVec [] upMaps; // Maps to indices below for up indices int downn; // Size of the down master list int upn; // Size of the up vector int [] dPartInds; int [] uPartInds; public Layer(int k0, int cumk, int left0, int right0, int imachine, int depth0) { k = k0; int i; left = left0; right = right0; depth = depth0; partBoundaries = new IVec(k); inNbr = new int [k]; outNbr = new int [k]; dPartInds = new int[k+1]; uPartInds = new int[k+1]; int ckk = cumk * k; posInMyGroup = (imachine % ckk) / cumk; int ibase = (imachine % M) - posInMyGroup * cumk; for (i = 0; i < k; i++) { partBoundaries.data[i] = left + (int)(((long)(right - left)) * (i+1) / k); outNbr[i] = ibase + i * cumk; int toMe = (k + 2*posInMyGroup - i) % k; inNbr[i] = ibase + toMe * cumk; } downMaps = new IVec[k]; upMaps = new IVec[k]; } class ConfigThread implements Runnable { IVec [] downp; IVec [] upp; IVec [] dtree; IVec [] utree; int i; int repno; CountDownLatch latch; public ConfigThread(IVec [] downp0, IVec [] upp0, IVec [] dtree0, IVec [] utree0, int i0, CountDownLatch latch0) { downp = downp0; upp = upp0; dtree = dtree0; utree = utree0; i = i0; latch = latch0; } public void run() { sendbuf[i].clear(); recbuf[i].clear(); IntBuffer sbuf = sendbuf[i].asIntBuffer(); IntBuffer rbuf = recbuf[i].asIntBuffer(); int seg1 = downp[i].size(); int seg2 = seg1 + upp[i].size(); sbuf.put(seg1); sbuf.put(seg2); sbuf.put(downp[i].data, 0, seg1); sbuf.put(upp[i].data, 0, seg2-seg1); if (trace > 1) { synchronized (AllReduceX.this) { System.out.format("config layer %d machine %d sent msg to %d, from %d, sizes %d %d\n", depth, imachine, outNbr[i], inNbr[i], sbuf.get(0), sbuf.get(1)); } } sendrecv(i, sendbuf, seg2+2, outNbr[i], recbuf, rbuf.capacity(), inNbr[i], depth*3); seg1 = rbuf.get(); seg2 = rbuf.get(); if (trace > 1) { synchronized (AllReduceX.this) { System.out.format("config layer %d machine %d got msg from %d, sizes %d %d\n", depth, imachine, inNbr[i], seg1, seg2); } } IVec downout = new IVec(seg1); IVec upout = new IVec(seg2-seg1); rbuf.get(downout.data, 0, seg1); rbuf.get(upout.data, 0, seg2-seg1); IVec.checkTree(dtree, downout, i, k); IVec.checkTree(utree, upout, i, k); downp[i] = downout; upp[i] = upout; latch.countDown(); } } public void config(IVec downi, IVec upi, IVec [] outputs) { IVec [] downp = IVec.partition(downi, partBoundaries); IVec [] upp = IVec.partition(upi, partBoundaries); IVec [] dtree = new IVec[2*k-1]; IVec [] utree = new IVec[2*k-1]; if (trace > 0) { synchronized (AllReduceX.this) { System.out.format("machine %d layer %d, dparts (%d", imachine, depth, downp[0].size()); for (int i = 1; i < downp.length; i++) System.out.format(", %d", downp[i].size()); System.out.format(") from %d, bounds %d %d\n", downi.size(), partBoundaries.data[0], partBoundaries.data[partBoundaries.size()-1]); System.out.format("machine %d layer %d, uparts (%d", imachine, depth, upp[0].size()); for (int i = 1; i < upp.length; i++) System.out.format(", %d", upp[i].size()); System.out.format(") from %d, bounds %d %d\n", upi.size(), partBoundaries.data[0], partBoundaries.data[partBoundaries.size()-1]); } } dPartInds[0] = 0; uPartInds[0] = 0; for (int i = 0; i < k; i++) { dPartInds[i+1] = dPartInds[i] + downp[i].size(); uPartInds[i+1] = uPartInds[i] + upp[i].size(); } CountDownLatch latch = new CountDownLatch(k); for (int i = 0; i < k; i++) { int ix = (i + posInMyGroup) % k; // Try to stagger the traffic executor.execute(new ConfigThread(downp, upp, dtree, utree, ix, latch)); } try { latch.await(); } catch (InterruptedException e) {} IVec dmaster = dtree[0]; Arrays.fill(dtree, null); downn = dmaster.size(); IVec umaster = utree[0]; Arrays.fill(utree, null); upn = upi.size(); for (int i = 0; i < k; i++) { downMaps[i] = IVec.mapInds(downp[i], dmaster); upMaps[i] = IVec.mapInds(upp[i], umaster); if (trace > 0) { synchronized (AllReduceX.this) { System.out.format("machine %d dmap(%d) size %d\n", imachine, i, downMaps[i].size()); System.out.format("machine %d umap(%d) size %d\n", imachine, i, upMaps[i].size()); } } } outputs[0] = dmaster; outputs[1] = umaster; } public class ReduceDownThread implements Runnable { Vec newv; Vec downv; int i; CountDownLatch latch; public ReduceDownThread(Vec newv0, Vec downv0, int i0, CountDownLatch latch0) { newv = newv0; downv = downv0; i = i0; latch = latch0; } public void run() { sendbuf[i].clear(); recbuf[i].clear(); IntBuffer isbuf = sendbuf[i].asIntBuffer(); IntBuffer irbuf = recbuf[i].asIntBuffer(); FloatBuffer sbuf = sendbuf[i].asFloatBuffer(); FloatBuffer rbuf = recbuf[i].asFloatBuffer(); int msize = dPartInds[i+1] - dPartInds[i]; isbuf.put(msize); sbuf.position(1); sbuf.put(downv.data, dPartInds[i], msize); if (trace > 1) { synchronized (AllReduceX.this) { System.out.format("reduce layer %d machine %d sent msg to %d, from %d, size %d\n", depth, imachine, outNbr[i], inNbr[i], msize); } } sendrecv(i, sendbuf, msize+1, outNbr[i], recbuf, rbuf.capacity(), inNbr[i], depth*3+1); msize = irbuf.get(); if (trace > 1) { synchronized (AllReduceX.this) { System.out.format("reduce layer %d machine %d got msg from %d, size %d\n", depth, imachine, inNbr[i], msize); } } Vec res = new Vec(msize); rbuf.position(1); rbuf.get(res.data, 0, msize); synchronized (newv) { res.addTo(newv, downMaps[i]); } latch.countDown(); } } public Vec reduceDown(Vec downv) { Vec newv = new Vec(downn); CountDownLatch latch = new CountDownLatch(k); for (int i = 0; i < k; i++) { int ix = (i + posInMyGroup) % k; // Try to stagger the traffic executor.execute(new ReduceDownThread(newv, downv, ix, latch)); } try { latch.await(); } catch (InterruptedException e) {} return newv; } public class ReduceUpThread implements Runnable { Vec newv; Vec upv; int i; CountDownLatch latch; public ReduceUpThread(Vec newv0, Vec upv0, int i0, CountDownLatch latch0) { newv = newv0; upv = upv0; i = i0; latch = latch0; } public void run () { sendbuf[i].clear(); recbuf[i].clear(); IntBuffer isbuf = sendbuf[i].asIntBuffer(); IntBuffer irbuf = recbuf[i].asIntBuffer(); FloatBuffer sbuf = sendbuf[i].asFloatBuffer(); FloatBuffer rbuf = recbuf[i].asFloatBuffer(); Vec up = upv.mapFrom(upMaps[i]); int msize = up.size(); isbuf.put(msize); sbuf.position(1); sbuf.put(up.data, 0, msize); if (trace > 1) { synchronized (AllReduceX.this) { System.out.format("reduce up layer %d machine %d sent msg to %d, from %d, size %d\n", depth, imachine, outNbr[i], inNbr[i], msize); } } sendrecv(i, sendbuf, msize+1, inNbr[i], recbuf, irbuf.capacity(), outNbr[i], depth*3+2); msize = irbuf.get(); if (trace > 1) { synchronized (AllReduceX.this) { System.out.format("reduce up layer %d machine %d got msg from %d, size %d\n", depth, imachine, inNbr[i], msize); } } int psize = uPartInds[i+1] - uPartInds[i]; if (uPartInds[i+1] > newv.size()) throw new RuntimeException("ReduceUp index out of range "+uPartInds[i+1]+" "+newv.size()); if (msize != psize) throw new RuntimeException("ReduceUp size mismatch "+msize+" "+psize); rbuf.position(1); rbuf.get(newv.data, uPartInds[i], msize); latch.countDown(); } } public Vec reduceUp(Vec upv) { Vec newv = new Vec(upn); CountDownLatch latch = new CountDownLatch(k); for (int i = 0; i < k; i++) { int ix = (i + posInMyGroup) % k; // Try to stagger the traffic executor.execute(new ReduceUpThread(newv, upv, ix, latch)); } try { latch.await(); } catch (InterruptedException e) {} return newv; } } public boolean sendrecv(int igroup, ByteBuffer [] sbuf, int sendn, int outi, ByteBuffer [] rbuf, int recn, int ini, int tag) { sbuf[igroup].rewind(); Msg msg = new Msg(sbuf[igroup].array(), sendn, imachine, outi, tag); if (imachine == outi) { rbuf[igroup].clear(); rbuf[igroup].put(msg.buf, 0, 4*sendn); return true; } else { if (doSim) { for (int i = 0; i < replicate; i++) { simNetwork[outi + i*M].messages[imachine][tag] = msg; } } else { for (int i = 0; i < replicate; i++) { sockExecutor.execute(new SockWriter(outi + i*M, msg)); } } boolean gotit = false; while (!gotit) { for (int i = 0; i < replicate; i++) { if (messages[ini + i*M][tag] != null) { Msg rmsg = messages[ini + i*M][tag]; rbuf[igroup].clear(); rbuf[igroup].put(rmsg.buf, 0, 4*rmsg.size); gotit = true; break; } try { Thread.sleep(1); } catch (InterruptedException e) {} } } for (int i = 0; i < replicate; i++) { messages[ini + i*M][tag] = null; msgrecvd[ini + i*M][tag] = true; } return true; } /* try { sbuf.rewind(); rbuf.clear(); MPI.COMM_WORLD.sendRecv(sbuf, 4*sendn, MPI.BYTE, outi, tag, rbuf, 4*recn, MPI.BYTE, ini, tag); sbuf.rewind(); rbuf.rewind(); } catch (MPIException e) { throw new RuntimeException("Exception in sendrecv "+e); } */ // JFC: Use this code /* try { Request [] sreq = new Request[replicate]; Request [] rreq = new Request[replicate]; boolean sdone = false; boolean rdone = false; for (int i = 0; i < replicate; i++) { sbuf[igroup + i*k].rewind(); rbuf[igroup + i*k].clear(); if (i > 0) { sbuf[igroup + i*k].put(sbuf[igroup].array(), 0, sendn); } sreq[i] = MPI.COMM_WORLD.iSend(sbuf[igroup + i*k].array(), 4*sendn, MPI.BYTE, outi + i*M, tag); rreq[i] = MPI.COMM_WORLD.iRecv(rbuf[igroup + i*k].array(), 4*recn, MPI.BYTE, ini + i*M, tag); } // Wait until timeout or when one send and one receive are done, then cancel others long timeout = 2000; // Wait this many msecs long then = System.currentTimeMillis(); while ((!sdone || !rdone) && System.currentTimeMillis() - then < timeout) { if (!rdone) { for (int i = 0; i < replicate; i++) { if (rreq[i].testStatus() != null) { if (i > 0) { int msize = rbuf[igroup + i*k].asIntBuffer().get(0); rbuf[igroup].put(rbuf[igroup + i*k].array(), 0, msize); } rreq[i] = null; rdone = true; break; } } } if (!sdone) { for (int i = 0; i < replicate; i++) { if (sreq[i].testStatus() != null) { sreq[i] = null; sdone = true; break; } } } Thread.sleep(1); } for (int i = 0; i < replicate; i++) { if (sreq[i] != null && sreq[i].testStatus() == null) sreq[i].cancel(); if (rreq[i] != null && rreq[i].testStatus() == null) rreq[i].cancel(); } if (!rdone || !sdone) { return false; } sbuf[igroup].rewind(); rbuf[igroup].rewind(); } catch (Exception e) { throw new RuntimeException("Exception in sendrecv "+e); } */ } public class SockWriter implements Runnable { int dest; Msg msg; public SockWriter(int dest0, Msg msg0) { msg = msg0; dest = dest0; } public void run() { try { Socket socket = new Socket(); socket.connect(new InetSocketAddress(machineIP[dest], sockBase + dest), sendTimeout); if (socket.isConnected()) { DataOutputStream ostr = new DataOutputStream(socket.getOutputStream()); ostr.writeInt(msg.size); ostr.writeInt(msg.sender); ostr.writeInt(msg.tag); ostr.write(msg.buf, 0, msg.size*4); socket.close(); } } catch (SocketTimeoutException e) { // No need to do anything } catch (ConnectException e) { // Server may have been killed - OK } catch (Exception e) { throw new RuntimeException("Problem writing socket "+e); } } } public class SockReader implements Runnable { Socket socket = null; public SockReader(Socket sock) { socket = sock; } public void run() { try { DataInputStream istr = new DataInputStream(socket.getInputStream()); int len = istr.readInt(); int src = istr.readInt(); int tag = istr.readInt(); if (!msgrecvd[src][tag]) { Msg msg = new Msg(len, src, imachine, tag); istr.readFully(msg.buf, 0, len*4); if (!msgrecvd[src][tag]) { messages[src][tag] = msg; } } } catch (Exception e) { throw new RuntimeException("Problem reading socket "+e); } finally { try {socket.close();} catch (IOException e) {} } } } public class Listener implements Runnable { boolean stop = false; ServerSocket ss = null; public Listener() { try { ss = new ServerSocket(sockBase + imachine); } catch (Exception e) { throw new RuntimeException("Couldnt start socket listener "+e); } } public void run() { while (!stop) { try { Socket cs = ss.accept(); sockExecutor.execute(new SockReader(cs)); } catch (SocketException e) { // This is probably due to the server shutting down. Don't do anything. } catch (Exception e) { throw new RuntimeException("Socket listener had a problem "+e); } } } public void stop() { try { stop = true; ss.close(); } catch (Exception e) {} } } } public class Msg { byte [] buf; int size; int sender; int receiver; int tag; public Msg(int size0, int sender0, int receiver0, int tag0) { buf = new byte[4*size0]; size = size0; sender = sender0; receiver = receiver0; tag = tag0; } public Msg(byte [] inbuf, int size0, int sender0, int receiver0, int tag0) { buf = new byte[4*size0]; System.arraycopy(inbuf, 0, buf, 0, 4*size0); size = size0; sender = sender0; receiver = receiver0; tag = tag0; } } public Machine [] simNetwork = null; public AllReduceX(int M) { simNetwork = new Machine[M]; } public void stop() { if (simNetwork != null) { for (int i = 0; i < simNetwork.length; i++) simNetwork[i].stop(); } } }
AllReduceX rock solid
src/main/java/edu/berkeley/bid/comm/AllReduceX.java
AllReduceX rock solid
<ide><path>rc/main/java/edu/berkeley/bid/comm/AllReduceX.java <ide> IVec finalMap; // Map to down from down --> up at layer D-1 <ide> Msg [][] messages; // Message queue for the simulation <ide> boolean [][] msgrecvd; <add> boolean [][] amsending; <ide> boolean doSim = true; <ide> ExecutorService executor; <ide> ExecutorService sockExecutor; <ide> } <ide> messages = new Msg[M*replicate][]; <ide> msgrecvd = new boolean[M*replicate][]; <add> amsending = new boolean[M*replicate][]; <ide> for (int i = 0; i < M*replicate; i++) { <ide> messages[i] = new Msg[3*D]; <ide> msgrecvd[i] = new boolean[3*D]; <add> amsending[i] = new boolean[3*D]; <ide> } <ide> if (!doSim) { <ide> sockExecutor = Executors.newFixedThreadPool(1+4*maxk); <ide> if (imachine == outi) { <ide> rbuf[igroup].clear(); <ide> rbuf[igroup].put(msg.buf, 0, 4*sendn); <add> rbuf[igroup].rewind(); <ide> return true; <ide> } else { <ide> if (doSim) { <ide> Msg rmsg = messages[ini + i*M][tag]; <ide> rbuf[igroup].clear(); <ide> rbuf[igroup].put(rmsg.buf, 0, 4*rmsg.size); <add> rbuf[igroup].rewind(); <ide> gotit = true; <ide> break; <ide> } <ide> } <ide> return true; <ide> } <del> <del>/* try { <del> sbuf.rewind(); <del> rbuf.clear(); <del> MPI.COMM_WORLD.sendRecv(sbuf, 4*sendn, MPI.BYTE, outi, tag, rbuf, 4*recn, MPI.BYTE, ini, tag); <del> sbuf.rewind(); <del> rbuf.rewind(); <del> } catch (MPIException e) { <del> throw new RuntimeException("Exception in sendrecv "+e); <del> } */ <del> <del> // JFC: Use this code <del> /* try { <del> Request [] sreq = new Request[replicate]; <del> Request [] rreq = new Request[replicate]; <del> boolean sdone = false; <del> boolean rdone = false; <del> for (int i = 0; i < replicate; i++) { <del> sbuf[igroup + i*k].rewind(); <del> rbuf[igroup + i*k].clear(); <del> if (i > 0) { <del> sbuf[igroup + i*k].put(sbuf[igroup].array(), 0, sendn); <del> } <del> sreq[i] = MPI.COMM_WORLD.iSend(sbuf[igroup + i*k].array(), 4*sendn, MPI.BYTE, outi + i*M, tag); <del> rreq[i] = MPI.COMM_WORLD.iRecv(rbuf[igroup + i*k].array(), 4*recn, MPI.BYTE, ini + i*M, tag); <del> } <del> // Wait until timeout or when one send and one receive are done, then cancel others <del> long timeout = 2000; // Wait this many msecs <del> long then = System.currentTimeMillis(); <del> while ((!sdone || !rdone) && System.currentTimeMillis() - then < timeout) { <del> if (!rdone) { <del> for (int i = 0; i < replicate; i++) { <del> if (rreq[i].testStatus() != null) { <del> if (i > 0) { <del> int msize = rbuf[igroup + i*k].asIntBuffer().get(0); <del> rbuf[igroup].put(rbuf[igroup + i*k].array(), 0, msize); <del> } <del> rreq[i] = null; <del> rdone = true; <del> break; <del> } <del> } <del> } <del> if (!sdone) { <del> for (int i = 0; i < replicate; i++) { <del> if (sreq[i].testStatus() != null) { <del> sreq[i] = null; <del> sdone = true; <del> break; <del> } <del> } <del> } <del> Thread.sleep(1); <del> } <del> for (int i = 0; i < replicate; i++) { <del> if (sreq[i] != null && sreq[i].testStatus() == null) sreq[i].cancel(); <del> if (rreq[i] != null && rreq[i].testStatus() == null) rreq[i].cancel(); <del> } <del> <del> if (!rdone || !sdone) { <del> return false; <del> } <del> sbuf[igroup].rewind(); <del> rbuf[igroup].rewind(); <del> } catch (Exception e) { <del> throw new RuntimeException("Exception in sendrecv "+e); <del> } */ <ide> } <ide> <ide> public class SockWriter implements Runnable { <ide> } <ide> <ide> public void run() { <add> Socket socket = null; <ide> try { <del> Socket socket = new Socket(); <add> socket = new Socket(); <ide> socket.connect(new InetSocketAddress(machineIP[dest], sockBase + dest), sendTimeout); <ide> if (socket.isConnected()) { <add> amsending[dest][msg.tag] = true; <ide> DataOutputStream ostr = new DataOutputStream(socket.getOutputStream()); <ide> ostr.writeInt(msg.size); <ide> ostr.writeInt(msg.sender); <ide> ostr.writeInt(msg.tag); <ide> ostr.write(msg.buf, 0, msg.size*4); <del> socket.close(); <ide> } <ide> } catch (SocketTimeoutException e) { <ide> // No need to do anything <ide> // Server may have been killed - OK <ide> } catch (Exception e) { <ide> throw new RuntimeException("Problem writing socket "+e); <add> } finally { <add> try { if (socket != null) socket.close(); } catch (Exception e) {} <add> amsending[dest][msg.tag] = false; <ide> } <ide> } <ide> } <ide> } <ide> } <ide> <add> public boolean stillSending() { <add> boolean sending = false; <add> for (int i = 0; i < amsending.length; i++) { <add> boolean [] sendrow = amsending[i]; <add> for (int j = 0; j < sendrow.length; j++) { <add> if (amsending[i][j]) sending = true; <add> } <add> } <add> return sending; <add> } <add> <ide> public void stop() { <add> while (stillSending()) { <add> try { Thread.sleep(1); } catch (InterruptedException e) {} <add> } <ide> try { <ide> stop = true; <ide> ss.close(); <del> } catch (Exception e) {} <add> } catch (Exception e) { <add> throw new RuntimeException("Trouble closing listener"); <add> } <ide> } <ide> } <ide> }
Java
mit
678b008c7dccfbc879ebe65c2c6c9ba231132bd6
0
xemime-lang/xemime,xemime-lang/xemime
package net.zero918nobita.Xemime.parser; import net.zero918nobita.Xemime.ast.*; import net.zero918nobita.Xemime.entity.Handler; import net.zero918nobita.Xemime.interpreter.Main; import net.zero918nobita.Xemime.lexer.Lexer; import net.zero918nobita.Xemime.lexer.TokenType; import net.zero918nobita.Xemime.resolver.Resolver; import java.util.ArrayList; import java.util.HashMap; /** * 一次子の構文解析を行います。 * @author Kodai Matsumoto */ class First extends ParseUnit { First(Lexer lexer, Resolver resolver) { super(lexer, resolver); } @Override Node parse() throws Exception { Node node = null; switch (lexer.tokenType()) { case EOS: break; case INT: node = lexer.value(); getToken(); // skip int literal break; case DOUBLE: node = lexer.value(); getToken(); // skip double literal break; case SUB: getToken(); // skip "-" node = new MinusNode(lexer.getLocation(), new First(lexer, resolver).parse()); break; case LP: getToken(); // skip "(" node = new Expr(lexer, resolver).parse(); // Syntax Error - 対応する括弧がありません。 if (lexer.tokenType() != TokenType.RP) throw new SyntaxError(lexer.getLocation(), 8, "対応する括弧がありません。"); getToken(); // skip ")" break; case LB: node = new Block(lexer, resolver).parse(); break; case DECLARE: getToken(); // skip "let" if (lexer.tokenType() == TokenType.SYMBOL) { Symbol sym = (Symbol) lexer.value(); getToken(); // skip symbol if (lexer.tokenType() == TokenType.ASSIGN) { getToken(); // skip "=" // 現在のスコープに変数を登録する resolver.declareVar(sym); node = new DeclarationNode(lexer.getLocation(), sym, new Expr(lexer, resolver).parse()); } else { throw new Exception(lexer.getLocation() + ": 変数宣言式が不正です"); } } else { throw new Exception(lexer.getLocation() + ": 変数宣言式が不正です"); } break; case ATTR: getToken(); // skip "attr" if (lexer.tokenType() != TokenType.SYMBOL) throw new SyntaxError(lexer.getLocation(), 16, "属性定義式では attr の後ろに属性名の記述が必要です。"); Symbol attr = (Symbol) lexer.value(); getToken(); // skip symbol if (lexer.tokenType() != TokenType.LB) throw new SyntaxError(lexer.getLocation(), 17, "属性定義式ではシンボルの後ろに波括弧が必要です。"); HashMap<Symbol, Node> member = new HashMap<>(); getToken(); // skip "{" Symbol name = (Symbol) lexer.value(); getToken(); // skip name if (lexer.tokenType() != TokenType.COLON) throw new SyntaxError(lexer.getLocation(), 18, "属性定義式ではメンバ名と値の区切りとなるコロンが必要です。"); getToken(); // skip colon Node value = new Expr(lexer, resolver).parse(); member.put(name, value); while (lexer.tokenType() != TokenType.RB) { if (lexer.tokenType() != TokenType.COMMA) throw new SyntaxError(lexer.getLocation(), 19, ""); getToken(); // skip comma name = (Symbol) lexer.value(); getToken(); // skip name if (lexer.tokenType() != TokenType.COLON) throw new SyntaxError(lexer.getLocation(), 20, "属性定義式ではメンバ名と値の区切りとなるコロンが必要です。"); getToken(); // skip colon value = new Expr(lexer, resolver).parse(); member.put(name, value); } getToken(); // skip "}" resolver.declareVar(attr); node = new AttrDeclarationNode(lexer.getLocation(), attr, member); break; case SUBST: getToken(); // skip "subst" if (lexer.tokenType() == TokenType.SYMBOL) { Symbol sym = (Symbol) lexer.value(); getToken(); // skip symbol if (lexer.tokenType() == TokenType.ATTACH) { getToken(); // skip "<-" node = new SubstanceDeclarationNode(lexer.getLocation(), sym, new Expr(lexer, resolver).parse()); } else { throw new Exception(lexer.getLocation() + ": 実体宣言式が不正です。"); } } else { throw new Exception(lexer.getLocation() + ": 実体宣言式が不正です。"); } break; case SYMBOL: Symbol sym = (Symbol) lexer.value(); // 変数の参照を解決する resolver.referVar(lexer.getLocation(), sym); getToken(); // skip symbol if (lexer.tokenType() == TokenType.ASSIGN) { // 宣言済みの変数への代入 getToken(); node = new AssignNode(lexer.getLocation(), sym, new Expr(lexer, resolver).parse()); } else if (lexer.tokenType() == TokenType.LP) { // 関数呼び出し node = new MethodCall(lexer, resolver).methodCall(sym); } else if (lexer.tokenType() == TokenType.DOLLAR) { // 括弧を省略した関数呼び出し getToken(); // skip "$" ArrayList<Node> list = new ArrayList<>(); if (lexer.tokenType() == TokenType.SEMICOLON) { node = new FuncallNode(lexer.getLocation(), method(sym), list); break; } Node expr = new Expr(lexer, resolver).parse(); if (expr == null) throw new Exception(lexer.getLocation() + ": 文法エラーです"); list.add(expr); while (lexer.tokenType() != TokenType.SEMICOLON) { if (lexer.tokenType() != TokenType.COMMA) throw new Exception(lexer.getLocation() + ": 文法エラーです"); getToken(); list.add(new Expr(lexer, resolver).parse()); } node = new FuncallNode(lexer.getLocation(), method(sym), list); } else { node = method(sym); } break; case SEMICOLON: node = null; break; default: throw new Exception(lexer.getLocation() + ": 文法エラーです"); } while (lexer.tokenType() == TokenType.LP) { ArrayList<Node> list = new ArrayList<>(); getToken(); if (lexer.tokenType() != TokenType.RP) list = new Args(lexer, resolver).arguments(); if (lexer.tokenType() != TokenType.RP) throw new Exception(lexer.getLocation() + ": 文法エラー"); getToken(); node = new FuncallNode(lexer.getLocation(), node, list); } return node; } private Node method(Symbol sym) throws Exception { Node node; Handler core; switch (sym.getName()) { case "if": core = (Handler) Main.getValueOfSymbol(Symbol.intern(0, "Core")); if (core == null) throw new Exception("深刻なエラー: Core オブジェクトがありません"); node = core.message(lexer.getLocation(), Symbol.intern(0, "if")); break; case "print": core = (Handler) Main.getValueOfSymbol(Symbol.intern(0, "Core")); if (core == null) throw new Exception("深刻なエラー: Core オブジェクトがありません"); node = core.message(lexer.getLocation(), Symbol.intern(0, "print")); break; case "println": core = (Handler) Main.getValueOfSymbol(Symbol.intern(0, "Core")); if (core == null) throw new Exception("深刻なエラー: Core オブジェクトがありません"); node = core.message(lexer.getLocation(), Symbol.intern(0, "println")); break; case "exit": if (!Main.allowExitMethod()) throw new Exception(lexer.getLocation() + ": この実行環境で実行することはできません"); core = (Handler) Main.getValueOfSymbol(Symbol.intern(0, "Core")); if (core == null) throw new Exception("深刻なエラー: Core オブジェクトがありません"); node = core.message(lexer.getLocation(), Symbol.intern(0, "exit")); break; default: node = sym; } return node; } }
src/main/java/net/zero918nobita/Xemime/parser/First.java
package net.zero918nobita.Xemime.parser; import net.zero918nobita.Xemime.ast.*; import net.zero918nobita.Xemime.entity.Handler; import net.zero918nobita.Xemime.interpreter.Main; import net.zero918nobita.Xemime.lexer.Lexer; import net.zero918nobita.Xemime.lexer.TokenType; import net.zero918nobita.Xemime.resolver.Resolver; import java.util.ArrayList; /** * 一次子の構文解析を行います。 * @author Kodai Matsumoto */ class First extends ParseUnit { First(Lexer lexer, Resolver resolver) { super(lexer, resolver); } @Override Node parse() throws Exception { Node node = null; switch (lexer.tokenType()) { case EOS: break; case INT: node = lexer.value(); getToken(); // skip int literal break; case DOUBLE: node = lexer.value(); getToken(); // skip double literal break; case SUB: getToken(); // skip "-" node = new MinusNode(lexer.getLocation(), new First(lexer, resolver).parse()); break; case LP: getToken(); // skip "(" node = new Expr(lexer, resolver).parse(); // Syntax Error - 対応する括弧がありません。 if (lexer.tokenType() != TokenType.RP) throw new SyntaxError(lexer.getLocation(), 8, "対応する括弧がありません。"); getToken(); // skip ")" break; case LB: node = new Block(lexer, resolver).parse(); break; case DECLARE: getToken(); // skip "let" if (lexer.tokenType() == TokenType.SYMBOL) { Symbol sym = (Symbol) lexer.value(); getToken(); // skip symbol if (lexer.tokenType() == TokenType.ASSIGN) { getToken(); // skip "=" // 現在のスコープに変数を登録する resolver.declareVar(sym); node = new DeclarationNode(lexer.getLocation(), sym, new Expr(lexer, resolver).parse()); } else { throw new Exception(lexer.getLocation() + ": 変数宣言式が不正です"); } } else { throw new Exception(lexer.getLocation() + ": 変数宣言式が不正です"); } break; case ATTR: getToken(); // skip "attr" if (lexer.tokenType() != TokenType.SYMBOL) throw new SyntaxError(lexer.getLocation(), 16, "属性定義式では attr の後ろに属性名の記述が必要です。"); Symbol attr = (Symbol) lexer.value(); getToken(); // skip symbol if (lexer.tokenType() != TokenType.LB) throw new SyntaxError(lexer.getLocation(), 17, "属性定義式ではシンボルの後ろに波括弧が必要です。"); HashMap<Symbol, Node> member = new HashMap<>(); getToken(); // skip "{" Symbol name = (Symbol) lexer.value(); getToken(); // skip name if (lexer.tokenType() != TokenType.COLON) throw new SyntaxError(lexer.getLocation(), 18, "属性定義式ではメンバ名と値の区切りとなるコロンが必要です。"); getToken(); // skip colon Node value = new Expr(lexer, resolver).parse(); member.put(name, value); while (lexer.tokenType() != TokenType.RB) { if (lexer.tokenType() != TokenType.COMMA) throw new SyntaxError(lexer.getLocation(), 19, ""); getToken(); // skip comma name = (Symbol) lexer.value(); getToken(); // skip name if (lexer.tokenType() != TokenType.COLON) throw new SyntaxError(lexer.getLocation(), 20, "属性定義式ではメンバ名と値の区切りとなるコロンが必要です。"); getToken(); // skip colon value = new Expr(lexer, resolver).parse(); member.put(name, value); } getToken(); // skip "}" resolver.declareVar(attr); node = new AttrDeclarationNode(lexer.getLocation(), attr, member); break; case SUBST: getToken(); // skip "subst" if (lexer.tokenType() == TokenType.SYMBOL) { Symbol sym = (Symbol) lexer.value(); getToken(); // skip symbol if (lexer.tokenType() == TokenType.ATTACH) { getToken(); // skip "<-" node = new SubstanceDeclarationNode(lexer.getLocation(), sym, new Expr(lexer, resolver).parse()); } else { throw new Exception(lexer.getLocation() + ": 実体宣言式が不正です。"); } } else { throw new Exception(lexer.getLocation() + ": 実体宣言式が不正です。"); } break; case SYMBOL: Symbol sym = (Symbol) lexer.value(); // 変数の参照を解決する resolver.referVar(lexer.getLocation(), sym); getToken(); // skip symbol if (lexer.tokenType() == TokenType.ASSIGN) { // 宣言済みの変数への代入 getToken(); node = new AssignNode(lexer.getLocation(), sym, new Expr(lexer, resolver).parse()); } else if (lexer.tokenType() == TokenType.LP) { // 関数呼び出し node = new MethodCall(lexer, resolver).methodCall(sym); } else if (lexer.tokenType() == TokenType.DOLLAR) { // 括弧を省略した関数呼び出し getToken(); // skip "$" ArrayList<Node> list = new ArrayList<>(); if (lexer.tokenType() == TokenType.SEMICOLON) { node = new FuncallNode(lexer.getLocation(), method(sym), list); break; } Node expr = new Expr(lexer, resolver).parse(); if (expr == null) throw new Exception(lexer.getLocation() + ": 文法エラーです"); list.add(expr); while (lexer.tokenType() != TokenType.SEMICOLON) { if (lexer.tokenType() != TokenType.COMMA) throw new Exception(lexer.getLocation() + ": 文法エラーです"); getToken(); list.add(new Expr(lexer, resolver).parse()); } node = new FuncallNode(lexer.getLocation(), method(sym), list); } else { node = method(sym); } break; case SEMICOLON: node = null; break; default: throw new Exception(lexer.getLocation() + ": 文法エラーです"); } while (lexer.tokenType() == TokenType.LP) { ArrayList<Node> list = new ArrayList<>(); getToken(); if (lexer.tokenType() != TokenType.RP) list = new Args(lexer, resolver).arguments(); if (lexer.tokenType() != TokenType.RP) throw new Exception(lexer.getLocation() + ": 文法エラー"); getToken(); node = new FuncallNode(lexer.getLocation(), node, list); } return node; } private Node method(Symbol sym) throws Exception { Node node; Handler core; switch (sym.getName()) { case "if": core = (Handler) Main.getValueOfSymbol(Symbol.intern(0, "Core")); if (core == null) throw new Exception("深刻なエラー: Core オブジェクトがありません"); node = core.message(lexer.getLocation(), Symbol.intern(0, "if")); break; case "print": core = (Handler) Main.getValueOfSymbol(Symbol.intern(0, "Core")); if (core == null) throw new Exception("深刻なエラー: Core オブジェクトがありません"); node = core.message(lexer.getLocation(), Symbol.intern(0, "print")); break; case "println": core = (Handler) Main.getValueOfSymbol(Symbol.intern(0, "Core")); if (core == null) throw new Exception("深刻なエラー: Core オブジェクトがありません"); node = core.message(lexer.getLocation(), Symbol.intern(0, "println")); break; case "exit": if (!Main.allowExitMethod()) throw new Exception(lexer.getLocation() + ": この実行環境で実行することはできません"); core = (Handler) Main.getValueOfSymbol(Symbol.intern(0, "Core")); if (core == null) throw new Exception("深刻なエラー: Core オブジェクトがありません"); node = core.message(lexer.getLocation(), Symbol.intern(0, "exit")); break; default: node = sym; } return node; } }
Fix typo
src/main/java/net/zero918nobita/Xemime/parser/First.java
Fix typo
<ide><path>rc/main/java/net/zero918nobita/Xemime/parser/First.java <ide> import net.zero918nobita.Xemime.resolver.Resolver; <ide> <ide> import java.util.ArrayList; <add>import java.util.HashMap; <ide> <ide> /** <ide> * 一次子の構文解析を行います。
JavaScript
mit
f3cabb46b15aaaba8be656f45c71fe3fe1eb952d
0
RichoM/MiniMorphicJS,RichoM/MiniMorphicJS
var Form = (function () { /* * Code taken from: http://www.playmycode.com/blog/2011/06/realtime-image-tinting-on-html5-canvas/ */ function generateImageForChannel(img, w, h, pixels, rgbI) { return new Promise((resolve, reject) => { var canvas = document.createElement("canvas"); canvas.width = w; canvas.height = h; var ctx = canvas.getContext('2d'); ctx.drawImage(img, 0, 0); var to = ctx.getImageData(0, 0, w, h); var toData = to.data; for ( var i = 0, len = pixels.length; i < len; i += 4) { toData[i] = (rgbI === 0) ? pixels[i] : 0; toData[i + 1] = (rgbI === 1) ? pixels[i + 1] : 0; toData[i + 2] = (rgbI === 2) ? pixels[i + 2] : 0; toData[i + 3] = pixels[i + 3]; } ctx.putImageData(to, 0, 0); // image is _slightly_ faster then canvas for this, so convert var imgComp = new Image(); imgComp.onload = function () { resolve(imgComp); }; imgComp.src = canvas.toDataURL(); }); } function generateRGBKs(img) { var w = img.width; var h = img.height; var rgbks = []; var canvas = document.createElement("canvas"); canvas.width = w; canvas.height = h; var ctx = canvas.getContext("2d"); ctx.drawImage(img, 0, 0); var pixels = ctx.getImageData(0, 0, w, h).data; // 4 is used to ask for 3 images: red, green, blue and // black in that order. for (var rgbI = 0; rgbI < 4; rgbI++) { rgbks.push(generateImageForChannel(img, w, h, pixels, rgbI)); } return Promise.all(rgbks); } /* * Code taken from: http://www.playmycode.com/blog/2011/06/realtime-image-tinting-on-html5-canvas/ */ function generateTintImage(img, rgbks, red, green, blue) { var buff = document.createElement("canvas"); buff.width = img.width; buff.height = img.height; var ctx = buff.getContext("2d"); ctx.globalAlpha = 1; ctx.globalCompositeOperation = 'copy'; ctx.drawImage(rgbks[3], 0, 0); ctx.globalCompositeOperation = 'lighter'; if (red > 0) { ctx.globalAlpha = red / 255.0; ctx.drawImage(rgbks[0], 0, 0); } if (green > 0) { ctx.globalAlpha = green / 255.0; ctx.drawImage(rgbks[1], 0, 0); } if (blue > 0) { ctx.globalAlpha = blue / 255.0; ctx.drawImage(rgbks[2], 0, 0); } return buff; } return class Form { constructor(img) { //TODO: use private fields when it is on the ECMA standard this._img = img; this._extent = { w : img.width, h : img.height }; /* * Draw the image on an internal canvas in order to be able to use * getImageData() to ask for the colors at a specific pixel. */ let canvas = document.createElement("canvas"); canvas.width = img.width; canvas.height = img.height; let ctx = canvas.getContext("2d"); ctx.drawImage(img, 0, 0); this._canvas = canvas; this._ctx = ctx; } get extent() { return this._extent; } get img() { return this._img; } colorAt(point) { var data = this._ctx.getImageData(point.x, point.y, 1, 1).data; return { r : data[0], g : data[1], b : data[2], a : data[3] }; } alphaAt(point) { var data = this._ctx.getImageData(point.x, point.y, 1, 1).data; return data[3]; } tint(r, g, b) { return new Promise((resolve, reject) => { let img = this.img; generateRGBKs(img).then(rgbks => { let tintImg = generateTintImage(img, rgbks, r, g, b); let canvas = document.createElement("canvas"); canvas.width = img.width; canvas.height = img.height; let ctx = canvas.getContext("2d"); ctx.fillStyle = "black"; //ctx.fillRect(0, 0, canvas.width, canvas.height); ctx.drawImage(tintImg, 0, 0); let result = new Image(); result.onload = function () { resolve(new Form(result)); } result.src = canvas.toDataURL(); }); }); } static loadImage(src) { return new Promise((resolve, reject) => { let img = new Image(); img.onload = function () { resolve(new Form(img)); }; img.src = src; }); } static load(sources) { return Promise.all(sources.map(Form.loadImage)); } static loadSpritesheet(src, w, h) { return new Promise((resolve, reject) => { let img = new Image(); img.onload = function () { let rows = img.width / w; let cols = img.height / h; let pieces = []; for (let j = 0; j < cols; j++) { for (let i = 0; i < rows; i++) { let canvas = document.createElement("canvas"); canvas.width = w; canvas.height = h; let ctx = canvas.getContext("2d"); ctx.drawImage(img, i*w, j*h, w, h, 0, 0, w, h); let temp = new Image(); pieces.push(new Promise((resolve, reject) => { temp.onload = function () { resolve(new Form(temp)); } temp.src = canvas.toDataURL(); })); } } Promise.all(pieces).then(resolve); }; img.src = src; }); } }; })();
js/core/Form.js
var Form = (function () { /* * Code taken from: http://www.playmycode.com/blog/2011/06/realtime-image-tinting-on-html5-canvas/ */ function generateImageForChannel(img, w, h, pixels, rgbI) { return new Promise((resolve, reject) => { var canvas = document.createElement("canvas"); canvas.width = w; canvas.height = h; var ctx = canvas.getContext('2d'); ctx.drawImage(img, 0, 0); var to = ctx.getImageData(0, 0, w, h); var toData = to.data; for ( var i = 0, len = pixels.length; i < len; i += 4) { toData[i] = (rgbI === 0) ? pixels[i] : 0; toData[i + 1] = (rgbI === 1) ? pixels[i + 1] : 0; toData[i + 2] = (rgbI === 2) ? pixels[i + 2] : 0; toData[i + 3] = pixels[i + 3]; } ctx.putImageData(to, 0, 0); // image is _slightly_ faster then canvas for this, so convert var imgComp = new Image(); imgComp.onload = function () { resolve(imgComp); }; imgComp.src = canvas.toDataURL(); }); } function generateRGBKs(img) { var w = img.width; var h = img.height; var rgbks = []; var canvas = document.createElement("canvas"); canvas.width = w; canvas.height = h; var ctx = canvas.getContext("2d"); ctx.drawImage(img, 0, 0); var pixels = ctx.getImageData(0, 0, w, h).data; // 4 is used to ask for 3 images: red, green, blue and // black in that order. for (var rgbI = 0; rgbI < 4; rgbI++) { rgbks.push(generateImageForChannel(img, w, h, pixels, rgbI)); } return Promise.all(rgbks); } /* * Code taken from: http://www.playmycode.com/blog/2011/06/realtime-image-tinting-on-html5-canvas/ */ function generateTintImage(img, rgbks, red, green, blue) { var buff = document.createElement("canvas"); buff.width = img.width; buff.height = img.height; var ctx = buff.getContext("2d"); ctx.globalAlpha = 1; ctx.globalCompositeOperation = 'copy'; ctx.drawImage(rgbks[3], 0, 0); ctx.globalCompositeOperation = 'lighter'; if (red > 0) { ctx.globalAlpha = red / 255.0; ctx.drawImage(rgbks[0], 0, 0); } if (green > 0) { ctx.globalAlpha = green / 255.0; ctx.drawImage(rgbks[1], 0, 0); } if (blue > 0) { ctx.globalAlpha = blue / 255.0; ctx.drawImage(rgbks[2], 0, 0); } return buff; } return class Form { constructor(img) { //TODO: use private fields when it is on the ECMA standard this._img = img; this._extent = { w : img.width, h : img.height }; /* * Draw the image on an internal canvas in order to be able to use * getImageData() to ask for the colors at a specific pixel. */ let canvas = document.createElement("canvas"); canvas.width = img.width; canvas.height = img.height; let ctx = canvas.getContext("2d"); ctx.drawImage(img, 0, 0); this._canvas = canvas; this._ctx = ctx; } get extent() { return this._extent; } get img() { return this._img; } colorAt(point) { var data = this._ctx.getImageData(point.x, point.y, 1, 1).data; return { r : data[0], g : data[1], b : data[2], a : data[3] }; } alphaAt(point) { var data = this._ctx.getImageData(point.x, point.y, 1, 1).data; return data[3]; } tint(r, g, b) { return new Promise((resolve, reject) => { let img = this.img; generateRGBKs(img).then(rgbks => { let tintImg = generateTintImage(img, rgbks, r, g, b); let canvas = document.createElement("canvas"); canvas.width = img.width; canvas.height = img.height; let ctx = canvas.getContext("2d"); ctx.fillStyle = "black"; //ctx.fillRect(0, 0, canvas.width, canvas.height); ctx.drawImage(tintImg, 0, 0); let result = new Image(); result.onload = function () { resolve(new Form(result)); } result.src = canvas.toDataURL(); }); }); } static loadImage(src) { return new Promise((resolve, reject) => { let img = new Image(); img.onload = function () { resolve(new Form(img)); }; img.src = src; }); } static load(sources) { return Promise.all(sources.map(Form.loadImage)); } }; })();
Added Form.loadSpritesheet(..)
js/core/Form.js
Added Form.loadSpritesheet(..)
<ide><path>s/core/Form.js <ide> static load(sources) { <ide> return Promise.all(sources.map(Form.loadImage)); <ide> } <add> static loadSpritesheet(src, w, h) { <add> return new Promise((resolve, reject) => { <add> let img = new Image(); <add> img.onload = function () { <add> let rows = img.width / w; <add> let cols = img.height / h; <add> let pieces = []; <add> for (let j = 0; j < cols; j++) { <add> for (let i = 0; i < rows; i++) { <add> let canvas = document.createElement("canvas"); <add> canvas.width = w; <add> canvas.height = h; <add> let ctx = canvas.getContext("2d"); <add> ctx.drawImage(img, i*w, j*h, w, h, 0, 0, w, h); <add> let temp = new Image(); <add> pieces.push(new Promise((resolve, reject) => { <add> temp.onload = function () { <add> resolve(new Form(temp)); <add> } <add> temp.src = canvas.toDataURL(); <add> })); <add> } <add> } <add> Promise.all(pieces).then(resolve); <add> }; <add> img.src = src; <add> }); <add> } <ide> }; <ide> })();
Java
apache-2.0
c1a91aa9def0f4ee817ec2f5d8383b4698fd8f61
0
DavideD/hibernate-validator,mxrenkin/hibernate-validator,marko-bekhta/hibernate-validator,mxrenkin/hibernate-validator,flibbertigibbet/hibernate-validator-android,gastaldi/hibernate-validator,emmanuelbernard/hibernate-validator,DavideD/hibernate-validator,shahramgdz/hibernate-validator,hibernate/hibernate-validator,shahramgdz/hibernate-validator,mohanaraosv/hibernate-validator,shahramgdz/hibernate-validator,mxrenkin/hibernate-validator,DavideD/hibernate-validator,flibbertigibbet/hibernate-validator-android,hibernate/hibernate-validator,mohanaraosv/hibernate-validator,flibbertigibbet/hibernate-validator-android,fazerish/hibernate-validator,DavideD/hibernate-validator,flibbertigibbet/hibernate-validator-android,hferentschik/hibernate-validator,mohanaraosv/hibernate-validator,fazerish/hibernate-validator,hibernate/hibernate-validator,hferentschik/hibernate-validator,shahramgdz/hibernate-validator,marko-bekhta/hibernate-validator,marko-bekhta/hibernate-validator,fazerish/hibernate-validator,mxrenkin/hibernate-validator,fazerish/hibernate-validator,mohanaraosv/hibernate-validator,hferentschik/hibernate-validator,gastaldi/hibernate-validator
/* * JBoss, Home of Professional Open Source * Copyright 2009, Red Hat, Inc. and/or its affiliates, and individual contributors * by the @authors tag. See the copyright.txt in the distribution for a * full listing of individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hibernate.validator.engine; import java.io.Serializable; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.validation.Path; /** * @author Hardy Ferentschik */ public final class PathImpl implements Path, Serializable { private static final long serialVersionUID = 7564511574909882392L; public static final String PROPERTY_PATH_SEPARATOR = "."; /** * Regular expression used to split a string path into its elements. * * @see <a href="http://www.regexplanet.com/simple/index.jsp">Regular expression tester</a> */ private static final Pattern PATH_PATTERN = Pattern.compile( "(\\w+)(\\[(\\w*)\\])?(\\.(.*))*" ); private static final int PROPERTY_NAME_GROUP = 1; private static final int INDEXED_GROUP = 2; private static final int INDEX_GROUP = 3; private static final int REMAINING_STRING_GROUP = 5; private final List<Node> nodeList; private NodeImpl currentLeafNode; private int hashCode; /** * Returns a {@code Path} instance representing the path described by the given string. To create a root node the empty string should be passed. * * @param propertyPath the path as string representation. * * @return a {@code Path} instance representing the path described by the given string. * * @throws IllegalArgumentException in case {@code property == null} or {@code property} cannot be parsed. */ public static PathImpl createPathFromString(String propertyPath) { if ( propertyPath == null ) { throw new IllegalArgumentException( "null is not allowed as property path." ); } if ( propertyPath.length() == 0 ) { return createNewPath( null ); } return parseProperty( propertyPath ); } public static PathImpl createNewPath(String name) { PathImpl path = new PathImpl(); path.addNode( name ); return path; } public static PathImpl createRootPath() { return createNewPath( null ); } public static PathImpl createCopy(PathImpl path) { return new PathImpl( path ); } public final boolean isRootPath() { return nodeList.size() == 1 && nodeList.get( 0 ).getName() == null; } public final PathImpl getPathWithoutLeafNode() { return new PathImpl( nodeList.subList( 0, nodeList.size() - 1 ) ); } public final NodeImpl addNode(String nodeName) { NodeImpl parent = nodeList.size() == 0 ? null : (NodeImpl) nodeList.get( nodeList.size() - 1 ); currentLeafNode = new NodeImpl( nodeName, parent, false, null, null ); nodeList.add( currentLeafNode ); hashCode = -1; return currentLeafNode; } public final NodeImpl makeLeafNodeIterable() { NodeImpl leafNode = getLeafNode(); currentLeafNode = new NodeImpl( leafNode.getName(), leafNode.getParent(), true, null, null ); nodeList.remove( leafNode ); nodeList.add( currentLeafNode ); hashCode = -1; return currentLeafNode; } public final NodeImpl setLeafNodeIndex(Integer index) { NodeImpl leafNode = getLeafNode(); currentLeafNode = new NodeImpl( leafNode.getName(), leafNode.getParent(), true, index, null ); nodeList.remove( leafNode ); nodeList.add( currentLeafNode ); hashCode = -1; return currentLeafNode; } public final NodeImpl setLeafNodeMapKey(Object key) { NodeImpl leafNode = getLeafNode(); currentLeafNode = new NodeImpl( leafNode.getName(), leafNode.getParent(), true, null, key ); nodeList.remove( leafNode ); nodeList.add( currentLeafNode ); hashCode = -1; return currentLeafNode; } public final NodeImpl getLeafNode() { return currentLeafNode; } public final Iterator<Path.Node> iterator() { if ( nodeList.size() == 0 ) { return Collections.<Path.Node>emptyList().iterator(); } if ( nodeList.size() == 1 ) { return nodeList.iterator(); } return nodeList.subList( 1, nodeList.size() ).iterator(); } public final String asString() { StringBuilder builder = new StringBuilder(); boolean first = true; for ( int i = 1; i < nodeList.size(); i++ ) { NodeImpl nodeImpl = (NodeImpl) nodeList.get( i ); if ( nodeImpl.getName() != null ) { if ( !first ) { builder.append( PROPERTY_PATH_SEPARATOR ); } builder.append( nodeImpl.asString() ); } first = false; } return builder.toString(); } @Override public String toString() { return asString(); } @Override public boolean equals(Object o) { if ( this == o ) { return true; } if ( o == null || getClass() != o.getClass() ) { return false; } PathImpl path = (PathImpl) o; if ( nodeList != null && !nodeList.equals( path.nodeList ) ) { return false; } if ( nodeList == null && path.nodeList != null ) { return false; } return true; } @Override public int hashCode() { if(hashCode == -1) { buildHashCode(); } return hashCode; } public void buildHashCode() { hashCode = nodeList != null ? nodeList.hashCode() : 0; } /** * Copy constructor. * * @param path the path to make a copy of. */ private PathImpl(PathImpl path) { this.nodeList = new ArrayList<Node>(); NodeImpl parent = null; NodeImpl node = null; for ( int i = 0; i < path.nodeList.size(); i++ ) { node = (NodeImpl) path.nodeList.get( i ); NodeImpl newNode = new NodeImpl( node, parent ); this.nodeList.add( newNode ); parent = newNode; } currentLeafNode = node; } private PathImpl() { nodeList = new ArrayList<Node>(); } private PathImpl(List<Node> nodeList) { this.nodeList = new ArrayList<Node>(); for ( Node node : nodeList ) { this.nodeList.add( node ); } } private static PathImpl parseProperty(String property) { PathImpl path = createNewPath( null ); String tmp = property; do { Matcher matcher = PATH_PATTERN.matcher( tmp ); if ( matcher.matches() ) { // create the node String value = matcher.group( PROPERTY_NAME_GROUP ); path.addNode( value ); // is the node indexable if ( matcher.group( INDEXED_GROUP ) != null ) { path.makeLeafNodeIterable(); } // take care of the index/key if one exists String indexOrKey = matcher.group( INDEX_GROUP ); if ( indexOrKey != null && indexOrKey.length() > 0 ) { try { Integer i = Integer.parseInt( indexOrKey ); path.setLeafNodeIndex( i ); } catch ( NumberFormatException e ) { path.setLeafNodeMapKey( indexOrKey ); } } // match the remaining string tmp = matcher.group( REMAINING_STRING_GROUP ); } else { throw new IllegalArgumentException( "Unable to parse property path " + property ); } } while ( tmp != null ); if ( path.getLeafNode().isIterable() ) { path.addNode( null ); } return path; } }
hibernate-validator/src/main/java/org/hibernate/validator/engine/PathImpl.java
/* * JBoss, Home of Professional Open Source * Copyright 2009, Red Hat, Inc. and/or its affiliates, and individual contributors * by the @authors tag. See the copyright.txt in the distribution for a * full listing of individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hibernate.validator.engine; import java.io.Serializable; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.validation.Path; /** * @author Hardy Ferentschik */ public final class PathImpl implements Path, Serializable { private static final long serialVersionUID = 7564511574909882392L; public static final String PROPERTY_PATH_SEPARATOR = "."; /** * Regular expression used to split a string path into its elements. * * @see <a href="http://www.regexplanet.com/simple/index.jsp">Regular expression tester</a> */ private static final Pattern PATH_PATTERN = Pattern.compile( "(\\w+)(\\[(\\w*)\\])?(\\.(.*))*" ); private static final int PROPERTY_NAME_GROUP = 1; private static final int INDEXED_GROUP = 2; private static final int INDEX_GROUP = 3; private static final int REMAINING_STRING_GROUP = 5; private final List<Node> nodeList; private NodeImpl currentLeafNode; private int hashCode; /** * Returns a {@code Path} instance representing the path described by the given string. To create a root node the empty string should be passed. * * @param propertyPath the path as string representation. * * @return a {@code Path} instance representing the path described by the given string. * * @throws IllegalArgumentException in case {@code property == null} or {@code property} cannot be parsed. */ public static PathImpl createPathFromString(String propertyPath) { if ( propertyPath == null ) { throw new IllegalArgumentException( "null is not allowed as property path." ); } if ( propertyPath.length() == 0 ) { return createNewPath( null ); } return parseProperty( propertyPath ); } public static PathImpl createNewPath(String name) { PathImpl path = new PathImpl(); path.addNode( name ); return path; } public static PathImpl createRootPath() { return createNewPath( null ); } public static PathImpl createCopy(PathImpl path) { return new PathImpl( path ); } public final boolean isRootPath() { return nodeList.size() == 1 && nodeList.get( 0 ).getName() == null; } public final PathImpl getPathWithoutLeafNode() { return new PathImpl( nodeList.subList( 0, nodeList.size() - 1 ) ); } public final NodeImpl addNode(String nodeName) { NodeImpl parent = nodeList.size() == 0 ? null : (NodeImpl) nodeList.get( nodeList.size() - 1 ); currentLeafNode = new NodeImpl( nodeName, parent, false, null, null ); nodeList.add( currentLeafNode ); hashCode = -1; return currentLeafNode; } public final NodeImpl makeLeafNodeIterable() { NodeImpl leafNode = getLeafNode(); currentLeafNode = new NodeImpl( leafNode.getName(), leafNode.getParent(), true, null, null ); nodeList.remove( leafNode ); nodeList.add( currentLeafNode ); hashCode = -1; return currentLeafNode; } public final NodeImpl setLeafNodeIndex(Integer index) { NodeImpl leafNode = getLeafNode(); currentLeafNode = new NodeImpl( leafNode.getName(), leafNode.getParent(), true, index, null ); nodeList.remove( leafNode ); nodeList.add( currentLeafNode ); hashCode = -1; return currentLeafNode; } public final NodeImpl setLeafNodeMapKey(Object key) { NodeImpl leafNode = getLeafNode(); currentLeafNode = new NodeImpl( leafNode.getName(), leafNode.getParent(), true, null, key ); nodeList.remove( leafNode ); nodeList.add( currentLeafNode ); hashCode = -1; return currentLeafNode; } public final NodeImpl getLeafNode() { return currentLeafNode; } public final Iterator<Path.Node> iterator() { if ( nodeList.size() == 0 ) { return Collections.<Path.Node>emptyList().iterator(); } if ( nodeList.size() == 1 ) { return nodeList.iterator(); } return nodeList.subList( 1, nodeList.size() ).iterator(); } public final String asString() { StringBuilder builder = new StringBuilder(); boolean first = true; for ( int i = 1; i < nodeList.size(); i++ ) { NodeImpl nodeImpl = (NodeImpl) nodeList.get( i ); if ( nodeImpl.getName() != null ) { if ( !first ) { builder.append( PROPERTY_PATH_SEPARATOR ); } builder.append( nodeImpl.asString() ); } first = false; } return builder.toString(); } @Override public String toString() { return asString(); } @Override public boolean equals(Object o) { if ( this == o ) { return true; } if ( o == null || getClass() != o.getClass() ) { return false; } PathImpl path = (PathImpl) o; if ( nodeList != null && !nodeList.equals( path.nodeList ) ) { return false; } if ( nodeList == null && path.nodeList != null ) { return false; } return true; } @Override public int hashCode() { if(hashCode == -1) { buildHashCode(); } return hashCode; } public void buildHashCode() { hashCode = nodeList != null ? nodeList.hashCode() : 0; } /** * Copy constructor. * * @param path the path to make a copy of. */ private PathImpl(PathImpl path) { this.nodeList = new ArrayList<Node>(); NodeImpl parent = null; NodeImpl node = null; for ( int i = 0; i < path.nodeList.size(); i++ ) { node = (NodeImpl) path.nodeList.get( i ); NodeImpl newNode = new NodeImpl( node, parent ); this.nodeList.add( newNode ); parent = newNode; } currentLeafNode = node; } private PathImpl() { nodeList = new ArrayList<Node>(); } private PathImpl(List<Node> nodeList) { this.nodeList = new ArrayList<Node>(); for ( Node node : nodeList ) { this.nodeList.add( node ); } } private static PathImpl parseProperty(String property) { PathImpl path = createNewPath( null ); String tmp = property; do { Matcher matcher = PATH_PATTERN.matcher( tmp ); if ( matcher.matches() ) { // create the node String value = matcher.group( PROPERTY_NAME_GROUP ); Node node = path.addNode( value ); // is the node indexable if ( matcher.group( INDEXED_GROUP ) != null ) { path.makeLeafNodeIterable(); } // take care of the index/key if one exists String indexOrKey = matcher.group( INDEX_GROUP ); if ( indexOrKey != null && indexOrKey.length() > 0 ) { try { Integer i = Integer.parseInt( indexOrKey ); path.setLeafNodeIndex( i ); } catch ( NumberFormatException e ) { path.setLeafNodeMapKey( indexOrKey ); } } // match the remaining string tmp = matcher.group( REMAINING_STRING_GROUP ); } else { throw new IllegalArgumentException( "Unable to parse property path " + property ); } } while ( tmp != null ); if ( path.getLeafNode().isIterable() ) { path.addNode( null ); } return path; } }
HV-395 Removing unused Node instance in parseProperty
hibernate-validator/src/main/java/org/hibernate/validator/engine/PathImpl.java
HV-395 Removing unused Node instance in parseProperty
<ide><path>ibernate-validator/src/main/java/org/hibernate/validator/engine/PathImpl.java <ide> <ide> // create the node <ide> String value = matcher.group( PROPERTY_NAME_GROUP ); <del> Node node = path.addNode( value ); <add> path.addNode( value ); <ide> <ide> // is the node indexable <ide> if ( matcher.group( INDEXED_GROUP ) != null ) {
Java
apache-2.0
cc68e242112a94fecac5ba172197f1d3cfd413b5
0
asedunov/intellij-community,asedunov/intellij-community,da1z/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,semonte/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,ibinti/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,signed/intellij-community,allotria/intellij-community,da1z/intellij-community,da1z/intellij-community,apixandru/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,suncycheng/intellij-community,signed/intellij-community,semonte/intellij-community,vvv1559/intellij-community,semonte/intellij-community,allotria/intellij-community,semonte/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,signed/intellij-community,apixandru/intellij-community,xfournet/intellij-community,da1z/intellij-community,xfournet/intellij-community,FHannes/intellij-community,FHannes/intellij-community,da1z/intellij-community,asedunov/intellij-community,allotria/intellij-community,suncycheng/intellij-community,semonte/intellij-community,ibinti/intellij-community,signed/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,ibinti/intellij-community,signed/intellij-community,da1z/intellij-community,ibinti/intellij-community,xfournet/intellij-community,asedunov/intellij-community,asedunov/intellij-community,allotria/intellij-community,suncycheng/intellij-community,ibinti/intellij-community,signed/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,ibinti/intellij-community,semonte/intellij-community,signed/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,da1z/intellij-community,vvv1559/intellij-community,da1z/intellij-community,asedunov/intellij-community,ibinti/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,suncycheng/intellij-community,ibinti/intellij-community,FHannes/intellij-community,semonte/intellij-community,FHannes/intellij-community,apixandru/intellij-community,ibinti/intellij-community,ibinti/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,xfournet/intellij-community,apixandru/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,allotria/intellij-community,FHannes/intellij-community,asedunov/intellij-community,apixandru/intellij-community,apixandru/intellij-community,xfournet/intellij-community,ibinti/intellij-community,signed/intellij-community,semonte/intellij-community,asedunov/intellij-community,asedunov/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,signed/intellij-community,signed/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,FHannes/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,allotria/intellij-community,semonte/intellij-community,apixandru/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,da1z/intellij-community,suncycheng/intellij-community,da1z/intellij-community,apixandru/intellij-community,vvv1559/intellij-community
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.packageDependencies.ui; import com.intellij.idea.ActionsBundle; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleType; import com.intellij.openapi.roots.ui.configuration.ProjectSettingsService; import com.intellij.openapi.util.Comparing; import com.intellij.pom.NavigatableWithText; import com.intellij.psi.PsiFile; import org.jetbrains.annotations.NotNull; import javax.swing.*; import java.util.Set; public class ModuleNode extends PackageDependenciesNode implements NavigatableWithText { private final @NotNull Module myModule; public ModuleNode(@NotNull Module module) { super(module.getProject()); myModule = module; } @Override public void fillFiles(Set<PsiFile> set, boolean recursively) { super.fillFiles(set, recursively); int count = getChildCount(); for (int i = 0; i < count; i++) { PackageDependenciesNode child = (PackageDependenciesNode)getChildAt(i); child.fillFiles(set, true); } } @Override public boolean canNavigate() { return !myModule.isDisposed(); } @Override public boolean canNavigateToSource() { return false; } @Override public void navigate(boolean focus) { ProjectSettingsService.getInstance(myModule.getProject()).openModuleSettings(myModule); } @Override public Icon getIcon() { return myModule.isDisposed() ? super.getIcon() : ModuleType.get(myModule).getIcon(); } @Override public String toString() { return myModule.getName(); } public String getModuleName() { return myModule.getName(); } @NotNull public Module getModule() { return myModule; } @Override public int getWeight() { return 1; } public boolean equals(Object o) { if (isEquals()){ return super.equals(o); } if (this == o) return true; if (!(o instanceof ModuleNode)) return false; final ModuleNode moduleNode = (ModuleNode)o; return Comparing.equal(myModule, moduleNode.myModule); } @Override public int hashCode() { return myModule.hashCode(); } @Override public boolean isValid() { return !myModule.isDisposed(); } @Override public String getNavigateActionText(boolean focusEditor) { return ActionsBundle.message("action.ModuleSettings.navigate"); } }
platform/lang-impl/src/com/intellij/packageDependencies/ui/ModuleNode.java
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.packageDependencies.ui; import com.intellij.analysis.AnalysisScopeBundle; import com.intellij.idea.ActionsBundle; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleType; import com.intellij.openapi.roots.ui.configuration.ProjectSettingsService; import com.intellij.openapi.util.Comparing; import com.intellij.pom.NavigatableWithText; import com.intellij.psi.PsiFile; import javax.swing.*; import java.util.Set; public class ModuleNode extends PackageDependenciesNode implements NavigatableWithText { private final Module myModule; public ModuleNode(Module module) { super(module.getProject()); myModule = module; } @Override public void fillFiles(Set<PsiFile> set, boolean recursively) { super.fillFiles(set, recursively); int count = getChildCount(); for (int i = 0; i < count; i++) { PackageDependenciesNode child = (PackageDependenciesNode)getChildAt(i); child.fillFiles(set, true); } } @Override public boolean canNavigate() { return myModule != null && !myModule.isDisposed(); } @Override public boolean canNavigateToSource() { return false; } @Override public void navigate(boolean focus) { ProjectSettingsService.getInstance(myModule.getProject()).openModuleSettings(myModule); } @Override public Icon getIcon() { return myModule == null || myModule.isDisposed() ? super.getIcon() : ModuleType.get(myModule).getIcon(); } @Override public String toString() { return myModule == null ? AnalysisScopeBundle.message("unknown.node.text") : myModule.getName(); } public String getModuleName() { return myModule.getName(); } public Module getModule() { return myModule; } @Override public int getWeight() { return 1; } public boolean equals(Object o) { if (isEquals()){ return super.equals(o); } if (this == o) return true; if (!(o instanceof ModuleNode)) return false; final ModuleNode moduleNode = (ModuleNode)o; return Comparing.equal(myModule, moduleNode.myModule); } @Override public int hashCode() { return myModule == null ? 0 : myModule.hashCode(); } @Override public boolean isValid() { return myModule != null && !myModule.isDisposed(); } @Override public String getNavigateActionText(boolean focusEditor) { return ActionsBundle.message("action.ModuleSettings.navigate"); } }
ModuleNode: notnullification and simplification
platform/lang-impl/src/com/intellij/packageDependencies/ui/ModuleNode.java
ModuleNode: notnullification and simplification
<ide><path>latform/lang-impl/src/com/intellij/packageDependencies/ui/ModuleNode.java <ide> */ <ide> package com.intellij.packageDependencies.ui; <ide> <del>import com.intellij.analysis.AnalysisScopeBundle; <ide> import com.intellij.idea.ActionsBundle; <ide> import com.intellij.openapi.module.Module; <ide> import com.intellij.openapi.module.ModuleType; <ide> import com.intellij.openapi.util.Comparing; <ide> import com.intellij.pom.NavigatableWithText; <ide> import com.intellij.psi.PsiFile; <add>import org.jetbrains.annotations.NotNull; <ide> <ide> import javax.swing.*; <ide> import java.util.Set; <ide> <ide> public class ModuleNode extends PackageDependenciesNode implements NavigatableWithText { <del> private final Module myModule; <add> private final @NotNull Module myModule; <ide> <del> public ModuleNode(Module module) { <add> public ModuleNode(@NotNull Module module) { <ide> super(module.getProject()); <ide> myModule = module; <ide> } <ide> <ide> @Override <ide> public boolean canNavigate() { <del> return myModule != null && !myModule.isDisposed(); <add> return !myModule.isDisposed(); <ide> } <ide> <ide> @Override <ide> <ide> @Override <ide> public Icon getIcon() { <del> return myModule == null || myModule.isDisposed() ? super.getIcon() : ModuleType.get(myModule).getIcon(); <add> return myModule.isDisposed() ? super.getIcon() : ModuleType.get(myModule).getIcon(); <ide> } <ide> <ide> @Override <ide> public String toString() { <del> return myModule == null ? AnalysisScopeBundle.message("unknown.node.text") : myModule.getName(); <add> return myModule.getName(); <ide> } <ide> <ide> public String getModuleName() { <ide> return myModule.getName(); <ide> } <ide> <add> @NotNull <ide> public Module getModule() { <ide> return myModule; <ide> } <ide> <ide> @Override <ide> public int hashCode() { <del> return myModule == null ? 0 : myModule.hashCode(); <add> return myModule.hashCode(); <ide> } <ide> <ide> @Override <ide> public boolean isValid() { <del> return myModule != null && !myModule.isDisposed(); <add> return !myModule.isDisposed(); <ide> } <ide> <ide> @Override
Java
apache-2.0
b1eaf7f5bbc3d6d72b86c3a71ec2240f2e47b622
0
dimagi/commcare-core,dimagi/commcare,dimagi/commcare-core,dimagi/commcare,dimagi/commcare-core,dimagi/commcare
package org.commcare.suite.model; import org.javarosa.core.model.condition.EvaluationContext; import org.javarosa.core.util.externalizable.DeserializationException; import org.javarosa.core.util.externalizable.ExtUtil; import org.javarosa.core.util.externalizable.ExtWrapList; import org.javarosa.core.util.externalizable.Externalizable; import org.javarosa.core.util.externalizable.PrototypeFactory; import org.javarosa.xpath.XPathParseTool; import org.javarosa.xpath.expr.XPathExpression; import org.javarosa.xpath.parser.XPathSyntaxException; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.util.Vector; import java.util.concurrent.Callable; import io.reactivex.Single; /** * A Menu definition describes the structure of how * actions should be provided to the user in a CommCare * application. * * @author ctsims */ public class Menu implements Externalizable, MenuDisplayable { public static final String ROOT_MENU_ID = "root"; private DisplayUnit display; private Vector<String> commandIds; private String[] commandExprs; private String id; private String root; private String rawRelevance; private String style; private XPathExpression relevance; /** * Serialization only!!! */ public Menu() { } public Menu(String id, String root, String rawRelevance, XPathExpression relevance, DisplayUnit display, Vector<String> commandIds, String[] commandExprs, String style) { this.id = id; this.root = root; this.rawRelevance = rawRelevance; this.relevance = relevance; this.display = display; this.commandIds = commandIds; this.commandExprs = commandExprs; this.style = style; } /** * @return The ID of what menu an option to navigate to * this menu should be displayed in. */ public String getRoot() { return root; } /** * @return A Text which should be displayed to the user as * the action which will display this menu. */ public Text getName() { return display.getText(); } /** * @return The ID of this menu. <p>If this value is "root" * many CommCare applications will support displaying this * menu's options at the app home screen</p> */ public String getId() { return id; } /** * @return A parsed XPath expression that determines * whether or not to display this menu. */ public XPathExpression getMenuRelevance() throws XPathSyntaxException { if (relevance == null && rawRelevance != null) { relevance = XPathParseTool.parseXPath(rawRelevance); } return relevance; } /** * @return A string representing an XPath expression to determine * whether or not to display this menu. */ public String getMenuRelevanceRaw() { return rawRelevance; } /** * @return The ID of what command actions should be available * when viewing this menu. */ public Vector<String> getCommandIds() { //UNSAFE! UNSAFE! return commandIds; } public XPathExpression getCommandRelevance(int index) throws XPathSyntaxException { //Don't cache this for now at all return commandExprs[index] == null ? null : XPathParseTool.parseXPath(commandExprs[index]); } /** * @return an optional string indicating how this menu wants to display its items */ public String getStyle() { return style; } /** * @param index the * @return the raw xpath string for a relevant condition (if available). Largely for * displaying to the user in the event of a failure */ public String getCommandRelevanceRaw(int index) { return commandExprs[index]; } @Override public void readExternal(DataInputStream in, PrototypeFactory pf) throws IOException, DeserializationException { id = ExtUtil.nullIfEmpty(ExtUtil.readString(in)); root = ExtUtil.readString(in); rawRelevance = ExtUtil.nullIfEmpty(ExtUtil.readString(in)); display = (DisplayUnit)ExtUtil.read(in, DisplayUnit.class, pf); commandIds = (Vector<String>)ExtUtil.read(in, new ExtWrapList(String.class), pf); commandExprs = new String[ExtUtil.readInt(in)]; for (int i = 0; i < commandExprs.length; ++i) { if (ExtUtil.readBool(in)) { commandExprs[i] = ExtUtil.readString(in); } } style = ExtUtil.nullIfEmpty(ExtUtil.readString(in)); } @Override public void writeExternal(DataOutputStream out) throws IOException { ExtUtil.writeString(out, ExtUtil.emptyIfNull(id)); ExtUtil.writeString(out, root); ExtUtil.writeString(out, ExtUtil.emptyIfNull(rawRelevance)); ExtUtil.write(out, display); ExtUtil.write(out, new ExtWrapList(commandIds)); ExtUtil.writeNumeric(out, commandExprs.length); for (String commandExpr : commandExprs) { if (commandExpr == null) { ExtUtil.writeBool(out, false); } else { ExtUtil.writeBool(out, true); ExtUtil.writeString(out, commandExpr); } } ExtUtil.writeString(out, ExtUtil.emptyIfNull(style)); } @Override public String getImageURI() { if (display.getImageURI() == null) { return null; } return display.getImageURI().evaluate(); } @Override public String getAudioURI() { if (display.getAudioURI() == null) { return null; } return display.getAudioURI().evaluate(); } @Override public String getDisplayText() { if (display.getText() == null) { return null; } return display.getText().evaluate(); } @Override public Single<String> getTextForBadge(final EvaluationContext ec) { if (display.getBadgeText() == null) { return Single.just(""); } return display.getBadgeText().getDisposableSingleForEvaluation(ec); } @Override public Text getRawBadgeTextObject() { return display.getBadgeText(); } @Override public String getCommandID() { return id; } // unsafe! assumes that xpath expressions evaluate properly... public int indexOfCommand(String cmd) { return commandIds.indexOf(cmd); } @Override public String toString() { return "Menu with id " + this.getId() + " display text " + this.getDisplayText(); } }
src/main/java/org/commcare/suite/model/Menu.java
package org.commcare.suite.model; import org.javarosa.core.model.condition.EvaluationContext; import org.javarosa.core.util.externalizable.DeserializationException; import org.javarosa.core.util.externalizable.ExtUtil; import org.javarosa.core.util.externalizable.ExtWrapList; import org.javarosa.core.util.externalizable.Externalizable; import org.javarosa.core.util.externalizable.PrototypeFactory; import org.javarosa.xpath.XPathParseTool; import org.javarosa.xpath.expr.XPathExpression; import org.javarosa.xpath.parser.XPathSyntaxException; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.util.Vector; import java.util.concurrent.Callable; import io.reactivex.Single; /** * A Menu definition describes the structure of how * actions should be provided to the user in a CommCare * application. * * @author ctsims */ public class Menu implements Externalizable, MenuDisplayable { public static final String ROOT_MENU_ID = "root"; private DisplayUnit display; private Vector<String> commandIds; private String[] commandExprs; private String id; private String root; private String rawRelevance; private String style; private XPathExpression relevance; /** * Serialization only!!! */ public Menu() { } public Menu(String id, String root, String rawRelevance, XPathExpression relevance, DisplayUnit display, Vector<String> commandIds, String[] commandExprs, String style) { this.id = id; this.root = root; this.rawRelevance = rawRelevance; this.relevance = relevance; this.display = display; this.commandIds = commandIds; this.commandExprs = commandExprs; this.style = style; } /** * @return The ID of what menu an option to navigate to * this menu should be displayed in. */ public String getRoot() { return root; } /** * @return A Text which should be displayed to the user as * the action which will display this menu. */ public Text getName() { return display.getText(); } /** * @return The ID of this menu. <p>If this value is "root" * many CommCare applications will support displaying this * menu's options at the app home screen</p> */ public String getId() { return id; } /** * @return A parsed XPath expression that determines * whether or not to display this menu. */ public XPathExpression getMenuRelevance() throws XPathSyntaxException { if (relevance == null && rawRelevance != null) { relevance = XPathParseTool.parseXPath(rawRelevance); } return relevance; } /** * @return A string representing an XPath expression to determine * whether or not to display this menu. */ public String getMenuRelevanceRaw() { return rawRelevance; } /** * @return The ID of what command actions should be available * when viewing this menu. */ public Vector<String> getCommandIds() { //UNSAFE! UNSAFE! return commandIds; } public XPathExpression getCommandRelevance(int index) throws XPathSyntaxException { //Don't cache this for now at all return commandExprs[index] == null ? null : XPathParseTool.parseXPath(commandExprs[index]); } /** * @return an optional string indicating how this menu wants to display its items */ public String getStyle() { return style; } /** * @param index the * @return the raw xpath string for a relevant condition (if available). Largely for * displaying to the user in the event of a failure */ public String getCommandRelevanceRaw(int index) { return commandExprs[index]; } @Override public void readExternal(DataInputStream in, PrototypeFactory pf) throws IOException, DeserializationException { id = ExtUtil.nullIfEmpty(ExtUtil.readString(in)); root = ExtUtil.readString(in); rawRelevance = ExtUtil.nullIfEmpty(ExtUtil.readString(in)); display = (DisplayUnit)ExtUtil.read(in, DisplayUnit.class, pf); commandIds = (Vector<String>)ExtUtil.read(in, new ExtWrapList(String.class), pf); commandExprs = new String[ExtUtil.readInt(in)]; for (int i = 0; i < commandExprs.length; ++i) { if (ExtUtil.readBool(in)) { commandExprs[i] = ExtUtil.readString(in); } } style = ExtUtil.nullIfEmpty(ExtUtil.readString(in)); } @Override public void writeExternal(DataOutputStream out) throws IOException { ExtUtil.writeString(out, ExtUtil.emptyIfNull(id)); ExtUtil.writeString(out, root); ExtUtil.writeString(out, ExtUtil.emptyIfNull(rawRelevance)); ExtUtil.write(out, display); ExtUtil.write(out, new ExtWrapList(commandIds)); ExtUtil.writeNumeric(out, commandExprs.length); for (String commandExpr : commandExprs) { if (commandExpr == null) { ExtUtil.writeBool(out, false); } else { ExtUtil.writeBool(out, true); ExtUtil.writeString(out, commandExpr); } } ExtUtil.writeString(out, ExtUtil.emptyIfNull(style)); } @Override public String getImageURI() { if (display.getImageURI() == null) { return null; } return display.getImageURI().evaluate(); } @Override public String getAudioURI() { if (display.getAudioURI() == null) { return null; } return display.getAudioURI().evaluate(); } @Override public String getDisplayText() { if (display.getText() == null) { return null; } return display.getText().evaluate(); } @Override public Single<String> getTextForBadge(EvaluationContext ec) { if (display.getBadgeText() == null) { return Single.just(""); } return display.getBadgeText().getDisposableSingleForEvaluation(ec); } @Override public Text getRawBadgeTextObject() { return display.getBadgeText(); } @Override public String getCommandID() { return id; } // unsafe! assumes that xpath expressions evaluate properly... public int indexOfCommand(String cmd) { return commandIds.indexOf(cmd); } @Override public String toString() { return "Menu with id " + this.getId() + " display text " + this.getDisplayText(); } }
Fix Menu white spacing, final
src/main/java/org/commcare/suite/model/Menu.java
Fix Menu white spacing, final
<ide><path>rc/main/java/org/commcare/suite/model/Menu.java <ide> } <ide> <ide> @Override <del> public Single<String> getTextForBadge(EvaluationContext ec) { <add> public Single<String> getTextForBadge(final EvaluationContext ec) { <ide> if (display.getBadgeText() == null) { <ide> return Single.just(""); <ide> } <add> <ide> return display.getBadgeText().getDisposableSingleForEvaluation(ec); <ide> } <del> <del> <add> <ide> @Override <ide> public Text getRawBadgeTextObject() { <ide> return display.getBadgeText();
Java
apache-2.0
0e4875dd1c8c1cc8af33b34f67c8ba022a1c28be
0
hbs/warp10-platform,hbs/warp10-platform,hbs/warp10-platform,StevenLeRoux/warp10-platform,cityzendata/warp10-platform,StevenLeRoux/warp10-platform,hbs/warp10-platform,cityzendata/warp10-platform,cityzendata/warp10-platform,StevenLeRoux/warp10-platform,cityzendata/warp10-platform,StevenLeRoux/warp10-platform
// // Copyright 2018 Cityzen Data // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package io.warp10.hadoop; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.PrintWriter; import java.net.HttpURLConnection; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.sort.SortConfig; import com.fasterxml.sort.std.RawTextLineWriter; import com.fasterxml.sort.std.TextFileSorter; import io.warp10.WarpURLEncoder; import io.warp10.continuum.TextFileShuffler; import io.warp10.continuum.store.Constants; public class Warp10InputFormat extends InputFormat<Text, BytesWritable> { private static final Logger LOG = LoggerFactory.getLogger(Warp10InputFormat.class); /** * URL of split endpoint */ public static final String PROPERTY_WARP10_SPLITS_ENDPOINT = "warp10.splits.endpoint"; /** * List of fallback fetchers */ public static final String PROPERTY_WARP10_FETCHER_FALLBACKS = "warp10.fetcher.fallbacks"; /** * Boolean indicating whether to use the fetchers or only the fallbacks */ public static final String PROPERTY_WARP10_FETCHER_FALLBACKSONLY = "warp10.fetcher.fallbacksonly"; /** * Protocol to use when contacting the fetcher (http or https), defaults to http */ public static final String PROPERTY_WARP10_FETCHER_PROTOCOL = "warp10.fetcher.protocol"; public static final String DEFAULT_WARP10_FETCHER_PROTOCOL = "http"; /** * Port to use when contacting the fetcher, defaults to 8881 */ public static final String PROPERTY_WARP10_FETCHER_PORT = "warp10.fetcher.port"; public static final String DEFAULT_WARP10_FETCHER_PORT = "8881"; /** * URL Path of the fetcher, defaults to "/api/v0/sfetch" */ public static final String PROPERTY_WARP10_FETCHER_PATH = "warp10.fetcher.path"; public static final String DEFAULT_WARP10_FETCHER_PATH = Constants.API_ENDPOINT_SFETCH; /** * GTS Selector */ public static final String PROPERTY_WARP10_SPLITS_SELECTOR = "warp10.splits.selector"; /** * Token to use for selecting GTS */ public static final String PROPERTY_WARP10_SPLITS_TOKEN = "warp10.splits.token"; /** * Connection timeout to the splits and sfetch endpoints, defaults to 10000 ms */ public static final String PROPERTY_WARP10_HTTP_CONNECT_TIMEOUT = "warp10.http.connect.timeout"; public static final String DEFAULT_WARP10_HTTP_CONNECT_TIMEOUT = "10000"; /** * Read timeout to the splits and sfetch endpoints, defaults to 10000 ms */ public static final String PROPERTY_WARP10_HTTP_READ_TIMEOUT = "warp10.http.read.timeout"; public static final String DEFAULT_WARP10_HTTP_READ_TIMEOUT = "10000"; /** * Now parameter */ public static final String PROPERTY_WARP10_FETCH_NOW = "warp10.fetch.now"; /** * Timespan parameter */ public static final String PROPERTY_WARP10_FETCH_TIMESPAN = "warp10.fetch.timespan"; /** * Maximum number of splits to combined into a single split */ public static final String PROPERTY_WARP10_MAX_COMBINED_SPLITS = "warp10.max.combined.splits"; /** * Maximum number of splits we wish to produce */ public static final String PROPERTY_WARP10_MAX_SPLITS = "warp10.max.splits"; /** * Default Now HTTP Header */ public static final String HTTP_HEADER_NOW_HEADER_DEFAULT = "X-Warp10-Now"; /** * Default Timespan HTTP Header */ public static final String HTTP_HEADER_TIMESPAN_HEADER_DEFAULT = "X-Warp10-Timespan"; /** * Suffix for the properties */ private final String suffix; public Warp10InputFormat(String suffix) { if (null != suffix) { this.suffix = "." + suffix; } else { this.suffix = ""; } } public Warp10InputFormat() { this.suffix = ""; } @Override public List<InputSplit> getSplits(JobContext context) throws IOException { List<String> fallbacks = new ArrayList<>(); boolean fallbacksonly = "true".equals(getProperty(context, PROPERTY_WARP10_FETCHER_FALLBACKSONLY)); if (null != getProperty(context, PROPERTY_WARP10_FETCHER_FALLBACKS)) { String[] servers = getProperty(context, PROPERTY_WARP10_FETCHER_FALLBACKS).split(","); for (String server: servers) { fallbacks.add(server); } } int connectTimeout = Integer.valueOf(getProperty(context, Warp10InputFormat.PROPERTY_WARP10_HTTP_CONNECT_TIMEOUT, Warp10InputFormat.DEFAULT_WARP10_HTTP_CONNECT_TIMEOUT)); int readTimeout = Integer.valueOf(getProperty(context, Warp10InputFormat.PROPERTY_WARP10_HTTP_READ_TIMEOUT, Warp10InputFormat.DEFAULT_WARP10_HTTP_READ_TIMEOUT)); // // Issue a call to the /splits endpoint to retrieve the individual splits // String splitEndpoint = getProperty(context, PROPERTY_WARP10_SPLITS_ENDPOINT); StringBuilder sb = new StringBuilder(); sb.append(splitEndpoint); sb.append("?"); sb.append(Constants.HTTP_PARAM_SELECTOR); sb.append("="); sb.append(WarpURLEncoder.encode(getProperty(context, PROPERTY_WARP10_SPLITS_SELECTOR), "UTF-8")); sb.append("&"); sb.append(Constants.HTTP_PARAM_TOKEN); sb.append("="); sb.append(getProperty(context, PROPERTY_WARP10_SPLITS_TOKEN)); URL url = new URL(sb.toString()); LOG.info("Get splits from: " + splitEndpoint); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setConnectTimeout(connectTimeout); conn.setReadTimeout(readTimeout); conn.setDoInput(true); InputStream in = conn.getInputStream(); File infile = File.createTempFile("Warp10InputFormat-", "-in"); infile.deleteOnExit(); OutputStream out = new FileOutputStream(infile); BufferedReader br = new BufferedReader(new InputStreamReader(in)); PrintWriter pw = new PrintWriter(out); int count = 0; Map<String,AtomicInteger> perServer = new HashMap<String,AtomicInteger>(); while(true) { String line = br.readLine(); if (null == line) { break; } // Count the total number of splits count++; // Count the number of splits per RS String server = line.substring(0, line.indexOf(' ')); AtomicInteger scount = perServer.get(server); if (null == scount) { scount = new AtomicInteger(0); perServer.put(server, scount); } scount.addAndGet(1); pw.println(line); } pw.flush(); out.close(); br.close(); in.close(); conn.disconnect(); TextFileSorter sorter = new TextFileSorter(new SortConfig().withMaxMemoryUsage(64000000L)); File outfile = File.createTempFile("Warp10InputFormat-", "-out"); outfile.deleteOnExit(); in = new FileInputStream(infile); out = new FileOutputStream(outfile); try { sorter.sort(new TextFileShuffler.CustomReader<byte[]>(in), new RawTextLineWriter(out)); } finally { out.close(); in.close(); sorter.close(); infile.delete(); } // // Do a naive split generation, using the RegionServer as the ideal fetcher. We will need // to adapt this later so we ventilate the splits on all fetchers if we notice that a single // fetcher gets pounded too much // // Compute the maximum number of splits which can be combined given the number of servers (RS) int avgsplitcount = (int) Math.ceil((double) count / perServer.size()); if (null != getProperty(context, PROPERTY_WARP10_MAX_SPLITS)) { int maxsplitavg = (int) Math.ceil((double) count / Integer.parseInt(getProperty(context, PROPERTY_WARP10_MAX_SPLITS))); avgsplitcount = maxsplitavg; } if (null != getProperty(context, PROPERTY_WARP10_MAX_COMBINED_SPLITS)) { int maxcombined = Integer.parseInt(getProperty(context, PROPERTY_WARP10_MAX_COMBINED_SPLITS)); if (maxcombined < avgsplitcount) { avgsplitcount = maxcombined; } } List<InputSplit> splits = new ArrayList<>(); br = new BufferedReader(new FileReader(outfile)); Warp10InputSplit split = new Warp10InputSplit(); String lastserver = null; int subsplits = 0; while(true) { String line = br.readLine(); if (null == line) { break; } String[] tokens = line.split("\\s+"); // If the server changed or we've reached the maximum split size, flush the current split. if (null != lastserver && !lastserver.equals(tokens[0]) || avgsplitcount == subsplits) { // Add fallback fetchers, shuffle them first Collections.shuffle(fallbacks); for (String fallback: fallbacks) { split.addFetcher(fallback); } splits.add(split.build()); split = new Warp10InputSplit(); subsplits = 0; } subsplits++; split.addEntry(fallbacksonly ? null : tokens[0], tokens[2]); } br.close(); outfile.delete(); if (subsplits > 0) { // Add fallback fetchers, shuffle them first Collections.shuffle(fallbacks); for (String fallback: fallbacks) { split.addFetcher(fallback); } splits.add(split.build()); } LOG.info("Number of splits: " + splits.size()); return splits; // // // // We know we have 'count' splits to combine and we know how many splits are hosted on each // // server // // // // // Compute the average number of splits per combined split // int avgsplitcount = (int) Math.ceil((double) count / numSplits); // // // Compute the average number of splits per server // int avgsplitpersrv = (int) Math.ceil((double) count / perServer.size()); // // // // // Determine the number of ideal (i.e. associated with the right server) combined splits // // per server // // // // Map<String,AtomicInteger> idealcount = new HashMap<String,AtomicInteger>(); // // for (Entry<String,AtomicInteger> entry: perServer.entrySet()) { // idealcount.put(entry.getKey(), new AtomicInteger(Math.min((int) Math.ceil(entry.getValue().doubleValue() / avgsplitcount), avgsplitpersrv))); // } // // // // // Compute the number of available slots per server after the maximum ideal combined splits // // have been allocated // // // // Map<String,AtomicInteger> freeslots = new HashMap<String,AtomicInteger>(); // // for (Entry<String,AtomicInteger> entry: perServer.entrySet()) { // if (entry.getValue().get() < avgsplitpersrv) { // freeslots.put(entry.getKey(), new AtomicInteger(avgsplitpersrv - entry.getValue().get())); // } // } // // // // // Generate splits // // We know the input file is sorted by server then region // // // // br = new BufferedReader(new FileReader(outfile)); // // Warp10InputSplit split = null; // String lastsrv = null; // int subsplits = 0; // // List<Warp10InputSplit> splits = new ArrayList<Warp10InputSplit>(); // // while(true) { // String line = br.readLine(); // // if (null == line) { // break; // } // // // Split line into tokens // String[] tokens = line.split("\\s+"); // // // If the srv changed, flush the split // if (null != lastsrv && lastsrv != tokens[0]) { // splits.add(split); // split = null; // } // // // if (null == splitsrv) { // splitsrv = tokens[0]; // // Check if 'splitsrv' can host more splits // if (idealcount.get(splitsrv)) // } // // Emit current split if it is full // // if (avgsplitcount == subsplits) { // // } // } // // System.out.println("NSPLITS=" + count); // // System.out.println("AVG=" + avgsplit); // System.out.println(perServer); // return null; } @Override public RecordReader<Text, BytesWritable> createRecordReader(InputSplit split, TaskAttemptContext context) throws IOException { if (!(split instanceof Warp10InputSplit)) { throw new IOException("Invalid split type."); } return new Warp10RecordReader(this.suffix); } private String getProperty(JobContext context, String property) { return getProperty(context, property, null); } private String getProperty(JobContext context, String property, String defaultValue) { return getProperty(context.getConfiguration(), this.suffix, property, defaultValue); } public static String getProperty(Configuration conf, String suffix, String property, String defaultValue) { if (null != conf.get(property + suffix)) { return conf.get(property + suffix); } else if (null != conf.get(property)) { return conf.get(property); } else if (null != defaultValue) { return defaultValue; } else { return null; } } }
warp10/src/main/java/io/warp10/hadoop/Warp10InputFormat.java
package io.warp10.hadoop; import io.warp10.WarpURLEncoder; import io.warp10.continuum.TextFileShuffler; import io.warp10.continuum.store.Constants; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.PrintWriter; import java.net.HttpURLConnection; import java.net.URL; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.HashMap; import java.util.Collections; import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Text; import com.fasterxml.sort.SortConfig; import com.fasterxml.sort.std.RawTextLineWriter; import com.fasterxml.sort.std.TextFileSorter; import org.apache.hadoop.mapreduce.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class Warp10InputFormat extends InputFormat<Text, BytesWritable> { private static final Logger LOG = LoggerFactory.getLogger(Warp10InputFormat.class); /** * URL of split endpoint */ public static final String PROPERTY_WARP10_SPLITS_ENDPOINT = "warp10.splits.endpoint"; /** * List of fallback fetchers */ public static final String PROPERTY_WARP10_FETCHER_FALLBACKS = "warp10.fetcher.fallbacks"; /** * Boolean indicating whether to use the fetchers or only the fallbacks */ public static final String PROPERTY_WARP10_FETCHER_FALLBACKSONLY = "warp10.fetcher.fallbacksonly"; /** * Protocol to use when contacting the fetcher (http or https), defaults to http */ public static final String PROPERTY_WARP10_FETCHER_PROTOCOL = "warp10.fetcher.protocol"; public static final String DEFAULT_WARP10_FETCHER_PROTOCOL = "http"; /** * Port to use when contacting the fetcher, defaults to 8881 */ public static final String PROPERTY_WARP10_FETCHER_PORT = "warp10.fetcher.port"; public static final String DEFAULT_WARP10_FETCHER_PORT = "8881"; /** * URL Path of the fetcher, defaults to "/api/v0/sfetch" */ public static final String PROPERTY_WARP10_FETCHER_PATH = "warp10.fetcher.path"; public static final String DEFAULT_WARP10_FETCHER_PATH = Constants.API_ENDPOINT_SFETCH; /** * GTS Selector */ public static final String PROPERTY_WARP10_SPLITS_SELECTOR = "warp10.splits.selector"; /** * Token to use for selecting GTS */ public static final String PROPERTY_WARP10_SPLITS_TOKEN = "warp10.splits.token"; /** * Connection timeout to the splits and sfetch endpoints, defaults to 10000 ms */ public static final String PROPERTY_WARP10_HTTP_CONNECT_TIMEOUT = "warp10.http.connect.timeout"; public static final String DEFAULT_WARP10_HTTP_CONNECT_TIMEOUT = "10000"; /** * Read timeout to the splits and sfetch endpoints, defaults to 10000 ms */ public static final String PROPERTY_WARP10_HTTP_READ_TIMEOUT = "warp10.http.read.timeout"; public static final String DEFAULT_WARP10_HTTP_READ_TIMEOUT = "10000"; /** * Now parameter */ public static final String PROPERTY_WARP10_FETCH_NOW = "warp10.fetch.now"; /** * Timespan parameter */ public static final String PROPERTY_WARP10_FETCH_TIMESPAN = "warp10.fetch.timespan"; /** * Maximum number of splits to combined into a single split */ public static final String PROPERTY_WARP10_MAX_COMBINED_SPLITS = "warp10.max.combined.splits"; /** * Maximum number of splits we wish to produce */ public static final String PROPERTY_WARP10_MAX_SPLITS = "warp10.max.splits"; /** * Default Now HTTP Header */ public static final String HTTP_HEADER_NOW_HEADER_DEFAULT = "X-Warp10-Now"; /** * Default Timespan HTTP Header */ public static final String HTTP_HEADER_TIMESPAN_HEADER_DEFAULT = "X-Warp10-Timespan"; /** * Suffix for the properties */ private final String suffix; public Warp10InputFormat(String suffix) { if (null != suffix) { this.suffix = "." + suffix; } else { this.suffix = ""; } } public Warp10InputFormat() { this.suffix = ""; } @Override public List<InputSplit> getSplits(JobContext context) throws IOException { List<String> fallbacks = new ArrayList<>(); boolean fallbacksonly = "true".equals(getProperty(context, PROPERTY_WARP10_FETCHER_FALLBACKSONLY)); if (null != getProperty(context, PROPERTY_WARP10_FETCHER_FALLBACKS)) { String[] servers = getProperty(context, PROPERTY_WARP10_FETCHER_FALLBACKS).split(","); for (String server: servers) { fallbacks.add(server); } } int connectTimeout = Integer.valueOf(getProperty(context, Warp10InputFormat.PROPERTY_WARP10_HTTP_CONNECT_TIMEOUT, Warp10InputFormat.DEFAULT_WARP10_HTTP_CONNECT_TIMEOUT)); int readTimeout = Integer.valueOf(getProperty(context, Warp10InputFormat.PROPERTY_WARP10_HTTP_READ_TIMEOUT, Warp10InputFormat.DEFAULT_WARP10_HTTP_READ_TIMEOUT)); // // Issue a call to the /splits endpoint to retrieve the individual splits // String splitEndpoint = getProperty(context, PROPERTY_WARP10_SPLITS_ENDPOINT); StringBuilder sb = new StringBuilder(); sb.append(splitEndpoint); sb.append("?"); sb.append(Constants.HTTP_PARAM_SELECTOR); sb.append("="); sb.append(WarpURLEncoder.encode(getProperty(context, PROPERTY_WARP10_SPLITS_SELECTOR), "UTF-8")); sb.append("&"); sb.append(Constants.HTTP_PARAM_TOKEN); sb.append("="); sb.append(getProperty(context, PROPERTY_WARP10_SPLITS_TOKEN)); URL url = new URL(sb.toString()); LOG.info("Get splits from: " + splitEndpoint); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setConnectTimeout(connectTimeout); conn.setReadTimeout(readTimeout); conn.setDoInput(true); InputStream in = conn.getInputStream(); File infile = File.createTempFile("Warp10InputFormat-", "-in"); infile.deleteOnExit(); OutputStream out = new FileOutputStream(infile); BufferedReader br = new BufferedReader(new InputStreamReader(in)); PrintWriter pw = new PrintWriter(out); int count = 0; Map<String,AtomicInteger> perServer = new HashMap<String,AtomicInteger>(); while(true) { String line = br.readLine(); if (null == line) { break; } // Count the total number of splits count++; // Count the number of splits per RS String server = line.substring(0, line.indexOf(' ')); AtomicInteger scount = perServer.get(server); if (null == scount) { scount = new AtomicInteger(0); perServer.put(server, scount); } scount.addAndGet(1); pw.println(line); } pw.flush(); out.close(); br.close(); in.close(); conn.disconnect(); TextFileSorter sorter = new TextFileSorter(new SortConfig().withMaxMemoryUsage(64000000L)); File outfile = File.createTempFile("Warp10InputFormat-", "-out"); outfile.deleteOnExit(); in = new FileInputStream(infile); out = new FileOutputStream(outfile); try { sorter.sort(new TextFileShuffler.CustomReader<byte[]>(in), new RawTextLineWriter(out)); } finally { out.close(); in.close(); sorter.close(); infile.delete(); } // // Do a naive split generation, using the RegionServer as the ideal fetcher. We will need // to adapt this later so we ventilate the splits on all fetchers if we notice that a single // fetcher gets pounded too much // // Compute the maximum number of splits which can be combined given the number of servers (RS) int avgsplitcount = (int) Math.ceil((double) count / perServer.size()); if (null != getProperty(context, PROPERTY_WARP10_MAX_SPLITS)) { int maxsplitavg = (int) Math.ceil((double) count / Integer.parseInt(getProperty(context, PROPERTY_WARP10_MAX_SPLITS))); avgsplitcount = maxsplitavg; } if (null != getProperty(context, PROPERTY_WARP10_MAX_COMBINED_SPLITS)) { int maxcombined = Integer.parseInt(getProperty(context, PROPERTY_WARP10_MAX_COMBINED_SPLITS)); if (maxcombined < avgsplitcount) { avgsplitcount = maxcombined; } } List<InputSplit> splits = new ArrayList<>(); br = new BufferedReader(new FileReader(outfile)); Warp10InputSplit split = new Warp10InputSplit(); String lastserver = null; int subsplits = 0; while(true) { String line = br.readLine(); if (null == line) { break; } String[] tokens = line.split("\\s+"); // If the server changed or we've reached the maximum split size, flush the current split. if (null != lastserver && !lastserver.equals(tokens[0]) || avgsplitcount == subsplits) { // Add fallback fetchers, shuffle them first Collections.shuffle(fallbacks); for (String fallback: fallbacks) { split.addFetcher(fallback); } splits.add(split.build()); split = new Warp10InputSplit(); subsplits = 0; } subsplits++; split.addEntry(fallbacksonly ? null : tokens[0], tokens[2]); } br.close(); outfile.delete(); if (subsplits > 0) { // Add fallback fetchers, shuffle them first Collections.shuffle(fallbacks); for (String fallback: fallbacks) { split.addFetcher(fallback); } splits.add(split.build()); } LOG.info("Number of splits: " + splits.size()); return splits; // // // // We know we have 'count' splits to combine and we know how many splits are hosted on each // // server // // // // // Compute the average number of splits per combined split // int avgsplitcount = (int) Math.ceil((double) count / numSplits); // // // Compute the average number of splits per server // int avgsplitpersrv = (int) Math.ceil((double) count / perServer.size()); // // // // // Determine the number of ideal (i.e. associated with the right server) combined splits // // per server // // // // Map<String,AtomicInteger> idealcount = new HashMap<String,AtomicInteger>(); // // for (Entry<String,AtomicInteger> entry: perServer.entrySet()) { // idealcount.put(entry.getKey(), new AtomicInteger(Math.min((int) Math.ceil(entry.getValue().doubleValue() / avgsplitcount), avgsplitpersrv))); // } // // // // // Compute the number of available slots per server after the maximum ideal combined splits // // have been allocated // // // // Map<String,AtomicInteger> freeslots = new HashMap<String,AtomicInteger>(); // // for (Entry<String,AtomicInteger> entry: perServer.entrySet()) { // if (entry.getValue().get() < avgsplitpersrv) { // freeslots.put(entry.getKey(), new AtomicInteger(avgsplitpersrv - entry.getValue().get())); // } // } // // // // // Generate splits // // We know the input file is sorted by server then region // // // // br = new BufferedReader(new FileReader(outfile)); // // Warp10InputSplit split = null; // String lastsrv = null; // int subsplits = 0; // // List<Warp10InputSplit> splits = new ArrayList<Warp10InputSplit>(); // // while(true) { // String line = br.readLine(); // // if (null == line) { // break; // } // // // Split line into tokens // String[] tokens = line.split("\\s+"); // // // If the srv changed, flush the split // if (null != lastsrv && lastsrv != tokens[0]) { // splits.add(split); // split = null; // } // // // if (null == splitsrv) { // splitsrv = tokens[0]; // // Check if 'splitsrv' can host more splits // if (idealcount.get(splitsrv)) // } // // Emit current split if it is full // // if (avgsplitcount == subsplits) { // // } // } // // System.out.println("NSPLITS=" + count); // // System.out.println("AVG=" + avgsplit); // System.out.println(perServer); // return null; } @Override public RecordReader<Text, BytesWritable> createRecordReader(InputSplit split, TaskAttemptContext context) throws IOException { if (!(split instanceof Warp10InputSplit)) { throw new IOException("Invalid split type."); } return new Warp10RecordReader(this.suffix); } private String getProperty(JobContext context, String property) { return getProperty(context, property, null); } private String getProperty(JobContext context, String property, String defaultValue) { if (null != context.getConfiguration().get(property + suffix)) { return context.getConfiguration().get(property + suffix); } else if (null != context.getConfiguration().get(property)) { return context.getConfiguration().get(property); } else if (null != defaultValue) { return defaultValue; } else { return null; } } }
Modified getProperty to use a Configuration
warp10/src/main/java/io/warp10/hadoop/Warp10InputFormat.java
Modified getProperty to use a Configuration
<ide><path>arp10/src/main/java/io/warp10/hadoop/Warp10InputFormat.java <add>// <add>// Copyright 2018 Cityzen Data <add>// <add>// Licensed under the Apache License, Version 2.0 (the "License"); <add>// you may not use this file except in compliance with the License. <add>// You may obtain a copy of the License at <add>// <add>// http://www.apache.org/licenses/LICENSE-2.0 <add>// <add>// Unless required by applicable law or agreed to in writing, software <add>// distributed under the License is distributed on an "AS IS" BASIS, <add>// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <add>// See the License for the specific language governing permissions and <add>// limitations under the License. <add>// <add> <ide> package io.warp10.hadoop; <del> <del>import io.warp10.WarpURLEncoder; <del>import io.warp10.continuum.TextFileShuffler; <del>import io.warp10.continuum.store.Constants; <ide> <ide> import java.io.BufferedReader; <ide> import java.io.File; <ide> import java.net.HttpURLConnection; <ide> import java.net.URL; <ide> import java.util.ArrayList; <add>import java.util.Collections; <add>import java.util.HashMap; <ide> import java.util.List; <ide> import java.util.Map; <del>import java.util.HashMap; <del>import java.util.Collections; <ide> import java.util.concurrent.atomic.AtomicInteger; <ide> <add>import org.apache.hadoop.conf.Configuration; <ide> import org.apache.hadoop.io.BytesWritable; <ide> import org.apache.hadoop.io.Text; <add>import org.apache.hadoop.mapreduce.InputFormat; <add>import org.apache.hadoop.mapreduce.InputSplit; <add>import org.apache.hadoop.mapreduce.JobContext; <add>import org.apache.hadoop.mapreduce.RecordReader; <add>import org.apache.hadoop.mapreduce.TaskAttemptContext; <add>import org.slf4j.Logger; <add>import org.slf4j.LoggerFactory; <ide> <ide> import com.fasterxml.sort.SortConfig; <ide> import com.fasterxml.sort.std.RawTextLineWriter; <ide> import com.fasterxml.sort.std.TextFileSorter; <ide> <del>import org.apache.hadoop.mapreduce.*; <del>import org.slf4j.Logger; <del>import org.slf4j.LoggerFactory; <add>import io.warp10.WarpURLEncoder; <add>import io.warp10.continuum.TextFileShuffler; <add>import io.warp10.continuum.store.Constants; <ide> <ide> public class Warp10InputFormat extends InputFormat<Text, BytesWritable> { <ide> <ide> private String getProperty(JobContext context, String property) { <ide> return getProperty(context, property, null); <ide> } <del> <add> <ide> private String getProperty(JobContext context, String property, String defaultValue) { <del> if (null != context.getConfiguration().get(property + suffix)) { <del> return context.getConfiguration().get(property + suffix); <del> } else if (null != context.getConfiguration().get(property)) { <del> return context.getConfiguration().get(property); <add> return getProperty(context.getConfiguration(), this.suffix, property, defaultValue); <add> } <add> <add> public static String getProperty(Configuration conf, String suffix, String property, String defaultValue) { <add> if (null != conf.get(property + suffix)) { <add> return conf.get(property + suffix); <add> } else if (null != conf.get(property)) { <add> return conf.get(property); <ide> } else if (null != defaultValue) { <ide> return defaultValue; <ide> } else {
Java
apache-2.0
dd434c440a28276ff40ec5b51f6443bd169fd362
0
bergerch/library,bft-smart/library,bergerch/library,bft-smart/library
/** Copyright (c) 2007-2013 Alysson Bessani, Eduardo Alchieri, Paulo Sousa, and the authors indicated in the @author tags Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package bftsmart.tom.util; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.security.PrivateKey; import java.security.PublicKey; import java.security.Signature; import java.security.SignatureException; import java.util.Arrays; import bftsmart.reconfiguration.ViewController; public class TOMUtil { //private static final int BENCHMARK_PERIOD = 10000; //some message types public static final int RR_REQUEST = 0; public static final int RR_REPLY = 1; public static final int RR_DELIVERED = 2; public static final int STOP = 3; public static final int STOPDATA = 4; public static final int SYNC = 5; public static final int SM_REQUEST = 6; public static final int SM_REPLY = 7; public static final int SM_ASK_INITIAL = 11; public static final int SM_REPLY_INITIAL = 12; public static final int TRIGGER_LC_LOCALLY = 8; public static final int TRIGGER_SM_LOCALLY = 9; private static int signatureSize = -1; public static int getSignatureSize(ViewController controller) { if (signatureSize > 0) { return signatureSize; } byte[] signature = signMessage(controller.getStaticConf().getRSAPrivateKey(), "a".getBytes()); if (signature != null) { signatureSize = signature.length; } return signatureSize; } //******* EDUARDO BEGIN **************// public static byte[] getBytes(Object o) { ByteArrayOutputStream bOut = new ByteArrayOutputStream(); ObjectOutputStream obOut = null; try { obOut = new ObjectOutputStream(bOut); obOut.writeObject(o); obOut.flush(); bOut.flush(); obOut.close(); bOut.close(); } catch (IOException ex) { ex.printStackTrace(); return null; } return bOut.toByteArray(); } public static Object getObject(byte[] b) { if (b == null) return null; ByteArrayInputStream bInp = new ByteArrayInputStream(b); try { ObjectInputStream obInp = new ObjectInputStream(bInp); Object ret = obInp.readObject(); obInp.close(); bInp.close(); return ret; } catch (Exception ex) { return null; } } //******* EDUARDO END **************// /** * Sign a message. * * @param key the private key to be used to generate the signature * @param message the message to be signed * @return the signature */ public static byte[] signMessage(PrivateKey key, byte[] message) { byte[] result = null; try { Signature signatureEngine = Signature.getInstance("SHA1withRSA"); signatureEngine.initSign(key); signatureEngine.update(message); result = signatureEngine.sign(); } catch (Exception e) { e.printStackTrace(); } return result; } /** * Verify the signature of a message. * * @param key the public key to be used to verify the signature * @param message the signed message * @param signature the signature to be verified * @return true if the signature is valid, false otherwise */ public static boolean verifySignature(PublicKey key, byte[] message, byte[] signature) { boolean result = false; try { Signature signatureEngine = Signature.getInstance("SHA1withRSA"); signatureEngine.initVerify(key); result = verifySignature(signatureEngine, message, signature); } catch (Exception e) { e.printStackTrace(); } return result; } /** * Verify the signature of a message. * * @param initializedSignatureEngine a signature engine already initialized * for verification * @param message the signed message * @param signature the signature to be verified * @return true if the signature is valid, false otherwise */ public static boolean verifySignature(Signature initializedSignatureEngine, byte[] message, byte[] signature) throws SignatureException { initializedSignatureEngine.update(message); return initializedSignatureEngine.verify(signature); } public static String byteArrayToString(byte[] b) { String s = ""; for (int i = 0; i < b.length; i++) { s = s + b[i]; } return s; } public static boolean equalsHash(byte[] h1, byte[] h2) { return Arrays.equals(h2, h2); } public static final byte[] computeHash(byte[] data) { byte[] result = null; try { MessageDigest md = MessageDigest.getInstance("MD5"); result = md.digest(data); } catch (NoSuchAlgorithmException e) { e.printStackTrace(); } // TODO: shouldn't it be SHA? return result; } }
src/bftsmart/tom/util/TOMUtil.java
/** Copyright (c) 2007-2013 Alysson Bessani, Eduardo Alchieri, Paulo Sousa, and the authors indicated in the @author tags Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package bftsmart.tom.util; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.security.PrivateKey; import java.security.PublicKey; import java.security.Signature; import java.security.SignatureException; import java.util.Arrays; import bftsmart.reconfiguration.ViewController; public class TOMUtil { //private static final int BENCHMARK_PERIOD = 10000; //some message types public static final int RR_REQUEST = 0; public static final int RR_REPLY = 1; public static final int RR_DELIVERED = 2; public static final int STOP = 3; public static final int STOPDATA = 4; public static final int SYNC = 5; public static final int SM_REQUEST = 6; public static final int SM_REPLY = 7; public static final int SM_ASK_INITIAL = 11; public static final int SM_REPLY_INITIAL = 12; public static final int TRIGGER_LC_LOCALLY = 8; public static final int TRIGGER_SM_LOCALLY = 9; private static int signatureSize = -1; public static int getSignatureSize(ViewController controller) { if (signatureSize > 0) { return signatureSize; } byte[] signature = signMessage(controller.getStaticConf().getRSAPrivateKey(), "a".getBytes()); if (signature != null) { signatureSize = signature.length; } return signatureSize; } //******* EDUARDO BEGIN **************// public static byte[] getBytes(Object o) { ByteArrayOutputStream bOut = new ByteArrayOutputStream(); ObjectOutputStream obOut = null; try { obOut = new ObjectOutputStream(bOut); obOut.writeObject(o); obOut.flush(); bOut.flush(); obOut.close(); bOut.close(); } catch (IOException ex) { ex.printStackTrace(); return null; } return bOut.toByteArray(); } public static Object getObject(byte[] b) { if (b == null) return null; ByteArrayInputStream bInp = new ByteArrayInputStream(b); try { ObjectInputStream obInp = new ObjectInputStream(bInp); Object ret = obInp.readObject(); obInp.close(); bInp.close(); return ret; } catch (Exception ex) { ex.printStackTrace(); return null; } } //******* EDUARDO END **************// /** * Sign a message. * * @param key the private key to be used to generate the signature * @param message the message to be signed * @return the signature */ public static byte[] signMessage(PrivateKey key, byte[] message) { byte[] result = null; try { Signature signatureEngine = Signature.getInstance("SHA1withRSA"); signatureEngine.initSign(key); signatureEngine.update(message); result = signatureEngine.sign(); } catch (Exception e) { e.printStackTrace(); } return result; } /** * Verify the signature of a message. * * @param key the public key to be used to verify the signature * @param message the signed message * @param signature the signature to be verified * @return true if the signature is valid, false otherwise */ public static boolean verifySignature(PublicKey key, byte[] message, byte[] signature) { boolean result = false; try { Signature signatureEngine = Signature.getInstance("SHA1withRSA"); signatureEngine.initVerify(key); result = verifySignature(signatureEngine, message, signature); } catch (Exception e) { e.printStackTrace(); } return result; } /** * Verify the signature of a message. * * @param initializedSignatureEngine a signature engine already initialized * for verification * @param message the signed message * @param signature the signature to be verified * @return true if the signature is valid, false otherwise */ public static boolean verifySignature(Signature initializedSignatureEngine, byte[] message, byte[] signature) throws SignatureException { initializedSignatureEngine.update(message); return initializedSignatureEngine.verify(signature); } public static String byteArrayToString(byte[] b) { String s = ""; for (int i = 0; i < b.length; i++) { s = s + b[i]; } return s; } public static boolean equalsHash(byte[] h1, byte[] h2) { return Arrays.equals(h2, h2); } public static final byte[] computeHash(byte[] data) { byte[] result = null; try { MessageDigest md = MessageDigest.getInstance("MD5"); result = md.digest(data); } catch (NoSuchAlgorithmException e) { e.printStackTrace(); } // TODO: shouldn't it be SHA? return result; } }
Method getObject from TOMUtil does not print out any exception anymore. If not object can ve deserialized, the method simply returns null, which is ought to be enough.
src/bftsmart/tom/util/TOMUtil.java
Method getObject from TOMUtil does not print out any exception anymore. If not object can ve deserialized, the method simply returns null, which is ought to be enough.
<ide><path>rc/bftsmart/tom/util/TOMUtil.java <ide> bInp.close(); <ide> return ret; <ide> } catch (Exception ex) { <del> ex.printStackTrace(); <ide> return null; <ide> } <ide> }
Java
apache-2.0
error: pathspec 'addressbook-web-tests/src/test/java/ru/stqa/javaCursBarancev/addressbook/tests/KontactModificationTests.java' did not match any file(s) known to git
b034406ff9e97e3b5684297934e31328b811c091
1
yafarkina/java_curs_Barancev
package ru.stqa.javaCursBarancev.addressbook.tests; import org.testng.annotations.Test; /** * Created by yafar_000 on 15.12.2016. */ public class KontactModificationTests extends TestBase{ @Test public void testKontactModification() { } }
addressbook-web-tests/src/test/java/ru/stqa/javaCursBarancev/addressbook/tests/KontactModificationTests.java
создание класса KontactModificationTests èÏî
addressbook-web-tests/src/test/java/ru/stqa/javaCursBarancev/addressbook/tests/KontactModificationTests.java
создание класса KontactModificationTests èÏî
<ide><path>ddressbook-web-tests/src/test/java/ru/stqa/javaCursBarancev/addressbook/tests/KontactModificationTests.java <add>package ru.stqa.javaCursBarancev.addressbook.tests; <add> <add>import org.testng.annotations.Test; <add> <add>/** <add> * Created by yafar_000 on 15.12.2016. <add> */ <add>public class KontactModificationTests extends TestBase{ <add> <add> @Test <add> <add> public void testKontactModification() { <add> <add> } <add>}
Java
mit
error: pathspec 'proj0/tests/SimpleTests.java' did not match any file(s) known to git
b550ecc22fa92064590975292071cd1279c07492
1
hardfist/CS61b,hardfist/skeleton,hardfist/CS61b,hardfist/skeleton,hardfist/CS61b,hardfist/skeleton,hardfist/CS61b,hardfist/skeleton,hardfist/CS61b,hardfist/skeleton
import org.junit.Test; import static org.junit.Assert.*; /** * @author Josh Hug */ public class SimpleTests { @Test public void testCoreFunctionality() { System.out.println("Test 1: See comments in SimpleTests.java for description."); Board b = new Board(true); // Place a shield at position 0, 0. Piece shield = new Piece(true, b, 0, 0, "shield"); b.place(shield, 0, 0); // Verify that it can be selected. assertTrue(b.canSelect(0, 0)); b.select(0, 0); // Verify that the blank square to the top right of it can be selected. assertTrue(b.canSelect(1, 1)); b.select(1, 1); // Ensure that we can end turn after movement. assertTrue(b.canEndTurn()); } @Test public void testThatSelectAndCanSelectDontCallEachOther() { System.out.println("Test 2: See comments in SimpleTests.java for description."); SpyBoard b = new SpyBoard(true); // Place a shield at position 0, 0. Piece shield = new SpyPiece(true, b, 0, 0, "shield"); b.place(shield, 0, 0); assertTrue(b.canSelect(0, 0)); // Assert that canSelect has been called once, but // select has not been called. assertEquals(0, b.selectCount); assertEquals(1, b.canSelectCount); b.select(0, 0); // Assert that select and canSelect have been // called exactly once. assertEquals(1, b.selectCount); assertEquals(1, b.canSelectCount); assertTrue(b.canSelect(1, 1)); assertEquals(1, b.selectCount); assertEquals(2, b.canSelectCount); b.select(1, 1); assertEquals(2, b.selectCount); assertEquals(2, b.canSelectCount); } @Test public void testThatSelectCallsMove() { System.out.println("Test 3: See comments in SimpleTests.java for description."); Board b = new Board(true); // Place a shield at position 0, 0. SpyPiece shield = new SpyPiece(true, b, 0, 0, "shield"); b.place(shield, 0, 0); b.select(0, 0); assertEquals(0, shield.moveCount); b.select(1, 1); assertEquals(1, shield.moveCount); } public static void main(String[] args) { System.out.println("This file tests common misconceptions" + " as observed by Josh (and any TAs who edit this file)."); System.out.println("If you fail any tests, " + " start by fixing test 1, then 2, and so on."); System.out.println("Due to JUnit limitations, they may run out of order."); jh61b.junit.textui.runClasses(SimpleTests.class); } /* Special class that spies on your game. */ public static class SpyBoard extends Board { public static int selectCount = 0; public static int canSelectCount = 0; public SpyBoard(boolean blank) { super(blank); } @Override public void select(int x, int y) { selectCount += 1; super.select(x, y); } @Override public boolean canSelect(int x, int y) { canSelectCount += 1; return super.canSelect(x, y); } } /* Special class that spies on your game. */ public static class SpyPiece extends Piece { public static int moveCount = 0; public SpyPiece(boolean isFire, Board b, int x, int y, String type) { super(isFire, b, x, y, type); } @Override public void move(int x, int y) { moveCount += 1; super.move(x, y); } } }
proj0/tests/SimpleTests.java
Added tests for common misconceptions
proj0/tests/SimpleTests.java
Added tests for common misconceptions
<ide><path>roj0/tests/SimpleTests.java <add>import org.junit.Test; <add>import static org.junit.Assert.*; <add>/** <add> * @author Josh Hug <add> */ <add> <add>public class SimpleTests { <add> <add> @Test <add> public void testCoreFunctionality() { <add> System.out.println("Test 1: See comments in SimpleTests.java for description."); <add> Board b = new Board(true); <add> <add> // Place a shield at position 0, 0. <add> Piece shield = new Piece(true, b, 0, 0, "shield"); <add> b.place(shield, 0, 0); <add> <add> // Verify that it can be selected. <add> assertTrue(b.canSelect(0, 0)); <add> b.select(0, 0); <add> <add> // Verify that the blank square to the top right of it can be selected. <add> assertTrue(b.canSelect(1, 1)); <add> b.select(1, 1); <add> <add> // Ensure that we can end turn after movement. <add> assertTrue(b.canEndTurn()); <add> } <add> <add> <add> @Test <add> public void testThatSelectAndCanSelectDontCallEachOther() { <add> System.out.println("Test 2: See comments in SimpleTests.java for description."); <add> <add> SpyBoard b = new SpyBoard(true); <add> <add> // Place a shield at position 0, 0. <add> Piece shield = new SpyPiece(true, b, 0, 0, "shield"); <add> b.place(shield, 0, 0); <add> <add> assertTrue(b.canSelect(0, 0)); <add> <add> // Assert that canSelect has been called once, but <add> // select has not been called. <add> assertEquals(0, b.selectCount); <add> assertEquals(1, b.canSelectCount); <add> <add> b.select(0, 0); <add> <add> // Assert that select and canSelect have been <add> // called exactly once. <add> assertEquals(1, b.selectCount); <add> assertEquals(1, b.canSelectCount); <add> <add> assertTrue(b.canSelect(1, 1)); <add> <add> assertEquals(1, b.selectCount); <add> assertEquals(2, b.canSelectCount); <add> <add> b.select(1, 1); <add> <add> assertEquals(2, b.selectCount); <add> assertEquals(2, b.canSelectCount); <add> } <add> <add> <add> @Test <add> public void testThatSelectCallsMove() { <add> System.out.println("Test 3: See comments in SimpleTests.java for description."); <add> <add> Board b = new Board(true); <add> <add> // Place a shield at position 0, 0. <add> SpyPiece shield = new SpyPiece(true, b, 0, 0, "shield"); <add> b.place(shield, 0, 0); <add> <add> <add> b.select(0, 0); <add> assertEquals(0, shield.moveCount); <add> b.select(1, 1); <add> assertEquals(1, shield.moveCount); <add> } <add> <add> <add> public static void main(String[] args) { <add> System.out.println("This file tests common misconceptions" + <add> " as observed by Josh (and any TAs who edit this file)."); <add> System.out.println("If you fail any tests, " + <add> " start by fixing test 1, then 2, and so on."); <add> System.out.println("Due to JUnit limitations, they may run out of order."); <add> <add> jh61b.junit.textui.runClasses(SimpleTests.class); <add> } <add> <add> /* Special class that spies on your game. */ <add> public static class SpyBoard extends Board { <add> public static int selectCount = 0; <add> public static int canSelectCount = 0; <add> <add> public SpyBoard(boolean blank) { <add> super(blank); <add> } <add> <add> @Override <add> public void select(int x, int y) { <add> selectCount += 1; <add> super.select(x, y); <add> } <add> <add> @Override <add> public boolean canSelect(int x, int y) { <add> canSelectCount += 1; <add> return super.canSelect(x, y); <add> } <add> } <add> <add> /* Special class that spies on your game. */ <add> public static class SpyPiece extends Piece { <add> public static int moveCount = 0; <add> <add> public SpyPiece(boolean isFire, Board b, int x, int y, String type) { <add> super(isFire, b, x, y, type); <add> } <add> <add> @Override <add> public void move(int x, int y) { <add> moveCount += 1; <add> super.move(x, y); <add> } <add> } <add> <add> <add>}
Java
agpl-3.0
01870f5259bf338593ebd670e805f1d0d1b14f61
0
duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test
37ddf67e-2e60-11e5-9284-b827eb9e62be
hello.java
37d89378-2e60-11e5-9284-b827eb9e62be
37ddf67e-2e60-11e5-9284-b827eb9e62be
hello.java
37ddf67e-2e60-11e5-9284-b827eb9e62be
<ide><path>ello.java <del>37d89378-2e60-11e5-9284-b827eb9e62be <add>37ddf67e-2e60-11e5-9284-b827eb9e62be
JavaScript
bsd-3-clause
a2fcdcc6e49450f10e01a496843ac297637bfb4d
0
DouglasHuston/rcl,DouglasHuston/rcl,DouglasHuston/rcl,DouglasHuston/rcl
define([ 'config', 'model', 'mustache', 'text!views/ImportDirectory/URL.html', './DirTypeView', 'typeahead' ], function(config, model, Mustache, template, DirTypeView){ return Backbone.View.extend({ initialize:function(){ // Make it easy to reference this object in event handlers _.bindAll(this, 'changes_listeners', 'handle_404', 'got_url_html', 'got_batchgeo_map_html', 'got_json', 'get_church_dir_from_url', 'get_cgroup', 'save_cgroup_and_dir', 'save_dir', 'parse_json', 'process_batch_geo', 'get_batchgeo_json', 'batchgeo_parse_json') if (typeof window.app.geocoder == 'undefined'){ window.app.geocoder = new google.maps.Geocoder(); } }, render: function(){ $('#steps').html(Mustache.render(template)) this.delegateEvents() // Render typeahead for URL textbox // Render typeahead // TODO: Consider filtering and sorting by levenshtein distance var substringMatcher = function(strs) { return function findMatches(q, cb) { var matches, substringRegex; // an array that will be populated with substring matches matches = []; // regex used to determine if a string contains the substring `q` substrRegex = new RegExp(q, 'i'); // iterate through the pool of strings and for any string that // contains the substring `q`, add it to the `matches` array $.each(strs, function(i, str) { if (substrRegex.test(str)) { // the typeahead jQuery plugin expects suggestions to a // JavaScript object, refer to typeahead docs for more info matches.push({ value: str }); } }); cb(matches); }; }; // Get array of directories from model this.directories = new model.Directories() this.directories.fetch() this.$('#url').typeahead({ hint: true, highlight: true, minLength: 1 },{ name: 'directories', displayKey: 'value', source: substringMatcher(this.directories.each(function(mod){return mod.get('url')})) }) }, events: { 'keyup #url':'get_church_dir_from_url' }, changes_listeners:function(){ // These are the main cases - different types of changes that // need to be handled this.listenTo(this.model,{ 'change':this.handle_404, 'change:get_url_html':this.got_url_html, 'change:get_batchgeo_map_html':this.got_batchgeo_map_html, 'change:get_json':this.got_json }) }, handle_404:function(model, value, options){ // TODO: Don't load the new view yet if the status code returned from the URL is a 404; // Instead after the delay, notify the user with // "Is that URL correct? It returns a '404 page not found' error." }, got_url_html:function(model, value, options){ // Handle directory's first page of content if (this.model.get('url_html') && this.model.get('get_url_html') == 'gotten'){ var html = this.model.get('url_html') // Determine whether this URL's data is HTML, RSS, KML, or JSON, or a 404 page // TODO: Don't load the new view yet if the status code returned from the URL is a 404; // Instead after the delay, notify the user with // "Is that URL correct? It returns a '404 page not found' error." if (this.model.get('error_code')){ var msg = 'We got error code ' + this.model.get('error_code') + ' from this URL: ' + this.model.get('url') console.log(msg) // TODO: Report this to the user, including what error code we got } if (html.indexOf("</html>") > -1){ console.log('We got HTML') this.model.set('pagetype', 'html') // Determine what type of directory this is // batchgeo if (this.uses_batch_geo(html) === true && typeof this.model.get('get_batchgeo_map_html') == 'undefined' && typeof this.model.get('get_json') == 'undefined'){ this.process_batch_geo(html) }else{ // TODO: If the other form fields are empty, // auto-populate them with info from this // directory's cgroup to help the user // TODO: Maybe only display those fields after // the URL is filled in // https://blueprints.launchpad.net/reformedchurcheslocator/+spec/display-cgroup-name-and-abbr-fields } } else if (html.indexOf("</rss>") > -1){ console.log('We got RSS') // TODO: Display the right form controls for an RSS feed this.model.set('pagetype', 'rss') } else if (html.indexOf("<kml") > -1){ console.log('We got KML') // TODO: Display the right form controls for a KML feed this.model.set('pagetype', 'kml') } else if (html.indexOf("per = {") === 0){ // batchgeo format console.log('We got batchgeo JSON') this.model.set('pagetype', 'batchgeo_json') } else if (html.indexOf("{") === 0){ console.log('We got JSON') this.model.set('pagetype', 'json') // TODO: The RPCNA's data is in a JSON file in RCL format already at http://reformedpresbyterian.org/congregations/json this.parse_json() } else { // We got an error code console.log('We got an error code from this URL:' + this.model.get('url')) // TODO: Report this to the user, including what error code we got } this.model.set('get_url_html', '') // TODO: Is this the right place to save the dir? // https://blueprints.launchpad.net/reformedchurcheslocator/+spec/decide-whether-to-save-dir //this.model.save({_id:this.model.get('_id')}) } }, got_batchgeo_map_html:function(model, value, options){ // Handle batchgeo map page if (typeof this.model.get('batchgeo_map_html') !== 'undefined' && this.model.get('get_batchgeo_map_html') == 'gotten'){ this.get_batchgeo_json() } }, got_json:function(model, value, options){ // Handle JSON if (typeof this.model.get('json') !== 'undefined' && this.model.get('get_json') == 'gotten'){ var json = this.model.get('json') // Batchgeo JSON if (json.indexOf('per = {') === 0){ this.batchgeo_parse_json() } } }, delay:(function(){ var timer = 0; return function(callback, ms){ clearTimeout (timer); timer = setTimeout(callback, ms); }; })(), get_church_dir_from_url:function(event){ var thiz = this // Delay this to run after typing has stopped for 3 seconds, so we don't // send too many requests this.delay(function(){ // --------- Main code section begins here ---------- // TODO: Start here. Decide what sub-views to create out of this view, and under what conditions // to display them. /* Steps: Event handler: Wait for $('#url') to change Enter URL. Event handler: Test URL to see if it is one of the following: Regular HTML page > display directory name, abbreviation inputs Batchgeo URL > import batchgeo JSON > report RPCNA JSON feed > import feed > report Arbitrary RSS or JSON > display field matching interface */ // If we have not already created a directory on this page, create it; else get the existing directory // If the cgroup's associated directory exists in the db, get it var page_url = thiz.$('#url').val() thiz.model = thiz.directories.findWhere({url:page_url}) if (typeof(thiz.model) === 'undefined'){ // The dir hasn't been created yet, so create it thiz.model = new model.Directory({url:page_url}) } // Create changes listeners on this.model thiz.changes_listeners() // thiz.get_cgroup() // TODO: Start here. I can't figure out how to determine that the task has completed. console.log('Start here.') function handle_storage(e) { if (!e) { e = window.event; } console.log(e) } if (window.addEventListener) { window.addEventListener("storage", handle_storage, false); } else { window.attachEvent("onstorage", handle_storage); }; $(window).bind('storage', function (e) { alert('storage changed'); }); hoodie.task('geturlhtml').on('start', function(db, doc){ console.log(doc, 'start'); }) hoodie.task('geturlhtml').on('abort', function(db, doc){ console.log(doc, 'abort'); }) hoodie.task('geturlhtml').on('error', function(db, doc){ console.log(doc, 'error'); }) hoodie.task('geturlhtml').on('success', function(db, doc){ console.log(doc, 'success'); }) hoodie.task('geturlhtml').on('change', function(db, doc){ console.log(doc, 'change'); }) hoodie.store.on('change', function(ev, doc){ console.log(ev, doc)}) hoodie.task('geturlhtml').on('geturlhtml:success', function(task, options){ console.log(task, options) console.log('Task completed!') }) var task = hoodie.task.start('geturlhtml', { url: page_url }) task.done(function(task){ // Add url_html to thiz.model, and save thiz.model thiz.model.set('url_html', task.html) thiz.model.save() console.log(task.html, task.status_code) console.log('Logged task to console.') }).fail(function(error){ console.log("Couldn't get the url_html from this URL: ", error) }) // hoodie.get_url_html(page_url).then(function(task){ // // TODO: Add url_html to thiz.model, and save thiz.model // thiz.model.set('url_html', task.html) // thiz.model.save() // console.log(task.html, task.status_code) // console.log('Logged task to console.') // },function(error){ // console.log("Couldn't get the url_html from this URL: ", error) // }) // thiz.model.set('get_url_html', 'requested') // thiz.model.save() // TODO: Don't create the dir if the URL is not valid. // Maybe mark the dir's URL as invalid in the node.js script (by // checking for a 404 response), and/or // just delete the dir from node.js in an asynchronous cleanup task. // We wait until later to set get_url_html = 'requested', so as not // to fire that request event twice // TODO: If the other form fields are empty, // auto-populate them with info from this // directory's cgroup to help the user // TODO: Maybe only display those fields after // the URL is filled in // https://blueprints.launchpad.net/reformedchurcheslocator/+spec/display-cgroup-name-and-abbr-fields }, 3000) }, get_cgroup:function(){ console.log('Start here for hoodie integration') var thiz = this // Reset status flag so the status messages will display this.model.set('get_state_url_html', '') var cgroup_name = $('#cgroup_name').val() var abbr = $('#abbreviation').val() // Don't do anything if the CGroup info isn't entered yet if (cgroup_name !== '' && abbr !== ''){ // Check if cgroup already exists in db var search_keys = [cgroup_name, abbr] var attrs = this.model.attributes model.CGroupsByAbbrOrName.get_or_create_one(search_keys, attrs, {success:function(cgroup){ thiz.cgroup = cgroup thiz.save_cgroup_and_dir() }}) } }, save_cgroup_and_dir:function(){ // Save the dir so if the URL has changed in the browser, it gets // updated in the db too this.iterations = 0 // Note this is a recursive function! this.save_dir() }, save_dir:function(){ console.log('Start here') // TODO: Start here. Set up changes listener on this.model to handle responses from node_changes_listener.js // Maybe put the changes listener up in this.get_cgroup() after refactoring this.save_dir() so it uses // that changes listener and so this.iterations isn't needed anymore. // Event: This function gets called when: // Stack (last at top): // save_dir // save_cgroup_and_dir // get_cgroup // get_church_dir_from_url // [URL entered] var thiz = this this.iterations++; // Make this function wait until this rev is not being saved anymore under any other event if (typeof window.app.import_directory_view.rev_currently_being_saved !== 'undefined' && window.app.import_directory_view.rev_currently_being_saved === thiz.model.get('_rev')){ setTimeout(function(){ thiz.save_dir() }, 1000) return; } this.model.fetch({success:function(model, response, options){ var get_url_html = thiz.model.get('get_url_html') // Prevent import from running multiple times simultaneously if (get_url_html != 'getting'){ get_url_html = 'requested' } // Only save this revision if it's not currently being saved already if (typeof window.app.import_directory_view.rev_currently_being_saved === 'undefined' || window.app.import_directory_view.rev_currently_being_saved !== thiz.model.get('_rev')){ // console.log(iterations + ' 196 saving', dir.get('_rev')) // Prevent saving the same revision twice simultaneously if (typeof window.app.import_directory_view.rev_currently_being_saved === 'undefined'){ window.app.import_directory_view.rev_currently_being_saved = thiz.model.get('_rev') } thiz.model.save({ _id:thiz.model.get('_id'), _rev:thiz.model.get('_rev'), url:$('#url').val(), get_url_html:get_url_html }, { success:function(){ // Report that it's OK for other calls to save_dir to run delete window.app.import_directory_view.rev_currently_being_saved // Append dir to CGroup thiz.cgroup.get('directories').add([{_id:thiz.model.get('_id')}]) // Save cgroup to db // TODO: Does the relation appear on the dir in the db also? // This will trigger the Node changes listener's response thiz.cgroup.fetch({success:function(){ thiz.cgroup.save({_id:thiz.cgroup.get('_id'),_rev:thiz.cgroup.get('_rev')},{success:function(){ // TODO: This isn't necessary on dirtypes other than HTML // Render DirTypeView $('#steps').hide() thiz.dir_type_view = new DirTypeView({el: '#steps', model: thiz.model}) thiz.dir_type_view.render() $('#steps').fadeIn(2000) }}) }}) }, error:function(model, xhr, options){ console.error('We got the 196 error '+ thiz.iterations) thiz.save_dir() } } ) } }}) }, parse_json:function(){ var thiz = this var json = this.model.get('url_html') // console.log(json) // TODO: This handles the RPCNA data's current format, which does not yet // perfectly match the RCL format. So put this in a conditional if(){} block // to test if this is a JSON feed that has this format: {[]} (no "docs") window.app.json = json // TODO: Remove this when RPCNA's newly-corrected format comes out // TODO: There are 3 Beaver Falls congs in the JSON, but only 1 shows up on the map var new_json = json // Add initial object property .replace(/^{/gm, '{"docs":') // Double-quote values .replace(/:\s*?'(.*?)'\s*?,/gm, function(match, p1){ var output = '' if (p1.indexOf('"') !== 0){ output = p1.replace(/"/g, '\\"') }else{ output = p1 } return ': "' + output + '",' }) // Double-quote property names .replace(/\s*?(\w+?)\s*?:\s*?["']/gm, ' "$1": "') // Convert any remaining single quotes preceding a comma to double quotes .replace(/',/gm, '",') // Add newline after final object in list, and convert final single quote to double .replace(/(.*?)('\s*?})/gm,'$1"\n}') // Escape newlines in values // .replace(/(".*?)\n(?=.*?")/g, '$1\\\\n') // Since the above doesn't seem to work, try a different regex to do the same .replace(/"([\s\S]*?)"/g, function(match, p1, offset, string){ var output = '' if (p1.indexOf("\n") !== -1 || p1.indexOf("\r") !== -1){ if (p1.indexOf("\n") !== -1){ output = p1.replace(/\n/g, '\\\\n') } if (p1.indexOf("\r") !== -1){ output = p1.replace(/\r/g, '\\\\r') } }else{ output = p1 } return '"' + output + '"' }) // Remove comma after last item in list .replace(/}(,)(?![\s\S]*?{)/g, function(match, p1){ return '}' }) // TODO: Put this into the correct template, once we get the state page selector template to stop displaying // for this view $('#steps').append('<div class="status"></div>') function bulksave(congs){ // See if all geocoding requests have finished var geocoding = _.countBy(congs, function(cong){ if (cong.geocoding == 'started'){ return 'count' } }) // Report to the user how many congs are left $('.status').html(geocoding.count + ' congregations left to geocode!') window.app.congs = congs if (geocoding.count === 0 || geocoding.count == thiz.errors){ // Write the JSON to the database // It is easiest to bulk-save using jquery.couch.js rather than Backbone config.db.bulkSave({"docs":congs},{success:function(){ console.log(new Date().getTime() + '\tb: All congs are saved!') }}) } } function geocode(address, congs, index){ var now = new Date().getTime() if (typeof thiz.usecs == 'undefined'){ thiz.usecs = 100 } if (typeof thiz.geocode_end_time !== 'undefined' && (now - thiz.geocode_end_time) > thiz.usecs){ // This line should prevent two delayed geocode requests from running simultaneously thiz.geocode_end_time = now window.app.geocoder.geocode( { 'address': address }, function(results, status) { // console.log(results, status) // TODO: Handle when Google returns multiple possible address matches (results.length > 1, // or status == 'ZERO_RESULTS' if (status == google.maps.GeocoderStatus.OK) { var loc = results[0].geometry.location congs[index].loc = [loc.lat(), loc.lng()] // Delay bulkSave until after asynchronous geocoding is done for all congs congs[index].geocoding = 'done' thiz.geocode_end_time = new Date().getTime() bulksave(congs) }else{ // === if we were sending the requests to fast, try this one again and increase the delay if (status == google.maps.GeocoderStatus.OVER_QUERY_LIMIT){ thiz.usecs += 100; setTimeout(function(){ geocode(address, congs, index) },thiz.usecs) }else{ var reason = "Code "+status; var msg = 'address="' + address + '" error=' +reason+ '(usecs='+thiz.usecs+'ms)'; if (typeof thiz.errors == 'undefined'){ thiz.errors = 1 }else{ thiz.errors++ } console.error('Errors: ' + thiz.errors, msg) } } }) }else{ // Wait to avoid Google throttling the geocode requests (for there // being too many per second, as indicated by the 'OVER_QUERY_LIMIT' error code) if (typeof thiz.geocode_end_time == 'undefined'){ // Set the first geocode_end_time thiz.geocode_end_time = now } setTimeout(function(){ geocode(address, congs, index) },thiz.usecs) } } var congs = JSON.parse(new_json).docs _.each(congs, function(cong, index, list){ // TODO: Record cgroup id for this directory, by appending it to the list congs[index].cgroups = [thiz.model.get('cgroup')] // TODO: Record the denomination abbreviation for other denominations here too // Get it from the cgroup.abbr congs[index].denomination_abbr = 'RPCNA' congs[index].collection = 'cong' if (cong.lat === '' || cong.lng === ''){ // Geocode the cong and put geocode in object for geocouch // Note this is limited to 2500 requests per day congs[index].geocoding = 'started' // TODO: Refactor this into a general function // Pick the meeting_address[1|2] which contains a number, else just use meeting_address1 var address_line = '' if (cong.meeting_address1.search(/\d/) !== -1){ address_line = cong.meeting_address1 } else if (cong.meeting_address2.search(/\d/) !== -1){ address_line = cong.meeting_address2 }else{ address_line = cong.meeting_address1 } var address = address_line + ', ' + (cong.meeting_city?cong.meeting_city: (cong.mailing_city?cong.mailing_city:'')) + ', ' + (cong.meeting_state?cong.meeting_state: (cong.mailing_state?cong.mailing_state:'')) + ' ' + (cong.meeting_zip?cong.meeting_zip: (cong.mailing_zip?cong.mailing_zip:'')) // TODO: Consider how to refactor this to geocode only one cong at a time. // Currently the code tries all at once, then when it realizes it's getting errors, // it throttles back, but the effect of that throttling is probably to throttle back too // much, so the whole geocoding batch runs much slower than it has to. So the way to refactor // this is to keep track of which cong is being handled, then only once one cong is geocoded, // move on to the next cong. geocode(address, congs, index) } else { // Use existing geocode data congs[index].loc = [cong.lat, cong.lng] } }) }, // TODO: Consider moving these into a library uses_batch_geo:function(html){ return ( html.indexOf('https://batchgeo.com/map/') !== -1 ) }, process_batch_geo:function(html){ // Get the batchgeo map URL out of the HTML var map_url = html.match(/(https:\/\/batchgeo.com\/map\/.+?)['"]{1}/i)[1] // Get the batchgeo JSON URL out of the map's HTML console.log(new Date().getTime() + '\tb: ' + map_url) this.model.set('pagetype', 'batchgeo') this.model.set('batchgeo_map_url', map_url) this.model.set('get_batchgeo_map_html', 'requested') this.model.save() }, get_batchgeo_json:function(){ var thiz = this this.model.fetch({success:function(){ thiz.model.unset('get_batchgeo_map_html') var html = thiz.model.get('batchgeo_map_html') // console.log(html) var json_url = html.match(/(https:\/\/.+?.cloudfront.net\/map\/json\/.+?)['"]{1}/i)[1] console.log(new Date().getTime() + '\tb: get_json for ' + json_url) // TODO: Request that the node script get this URL's contents thiz.model.set('json_url', json_url) thiz.model.set('get_json', 'requested') thiz.model.save() }}) }, batchgeo_parse_json:function(){ this.model.unset('get_json') // The PCA has a KML file at http://batchgeo.com/map/kml/c78fa06a3fbdf2642daae48ca62bbb82 // Some (all?) data is also in JSON at http://static.batchgeo.com/map/json/c78fa06a3fbdf2642daae48ca62bbb82/1357687276 // The PCA directory's main HTML URL is http://www.pcaac.org/church-search/ // After trimming off the non-JSON, the cong details are in the obj.mapRS array // You can pretty-print it at http://www.cerny-online.com/cerny.js/demos/json-pretty-printing // Its format is as follows: // per = {mapRS:[{ // "accuracy":"ROOFTOP", // "postal":"30097", // mailing_zip? // "a":"9500 Medlock Bridge Road", // address // "c":"Johns Creek", // city // "s":"GA", // state // "z":"30097", // meeting_zip? // "t":"Perimeter Church", // name // "u":"www.Perimeter.org", // url // "i":"", // ? // "g":" ", // ? // "e":"[email protected]", // email // "lt":34.013179067701, // lat // "ln":-84.191637606647, // lng // "d":"<div><span class=\"l\">Church Phone:<\/span>&nbsp;678-405-2000<\/div><div><span class=\"l\">Pastor:<\/span>&nbsp;Rev. Randy Pope<\/div><div><span class=\"l\">Presbytery:<\/span>&nbsp;Metro Atlanta<\/div>", // phone, pastor_name, presbytery_name // "addr":"9500 Medlock Bridge Road Johns Creek GA 30097", // mailing_address (full, needs to be parsed) // "l":"9500 Medlock Bridge Road<br \/>Johns Creek, GA 30097", // mailing_address_formatted, easier to parse // "clr":"red" // }]} // Get the relevant JSON in a variable // This regex took forever // var json = this.model.get('json').replace(/.*?"mapRS":/, '{"congs":').replace(/,"dataRS":.*/, '}') // So although this could be unsafe, it is expedient! eval(this.model.get('json')) var congs = per.mapRS // Convert the JSON's fieldnames to RCL fieldnames var replacements = [ { old:'postal', new:'mailing_zip' }, { old:'a', new:'meeting_address1' }, { old:'c', new:'meeting_city' }, { old:'s', new:'meeting_state' }, { old:'z', new:'meeting_zip' }, { old:'t', new:'name' }, { old:'u', new:'website' }, { old:'e', new:'email' }, { old:'lt', new:'lat' }, { old:'ln', new:'lng' } ] // For each cong $.each(congs, function(index, cong){ // For each key name $.each(replacements,function(index, repl){ // Replace each key name cong[repl.new] = cong[repl.old]; delete cong[repl.old]; }) // Parse 'd' field into: // phone, pastor_name, presbytery_name [, others?] // cong.d = <div><span class="l">Church Phone:</span>&nbsp;334-294-1226</div><div><span class="l">Pastor:</span>&nbsp;Rev. Brian DeWitt MacDonald</div><div><span class="l">Presbytery:</span>&nbsp;Southeast Alabama</div> // Ignore errors if the match fails try { cong.phone = cong.d.match(/Church Phone:.*?&nbsp;(.*?)</)[1]} catch(e){} try { cong.pastor_name = cong.d.match(/Pastor:.*?&nbsp;Rev. (.*?)</)[1] } catch(e){} try { cong.presbytery_name = cong.d.match(/Presbytery:.*?&nbsp;(.*?)</)[1] } catch(e){} // Parse 'l' field into: // mailing_address1, mailing_city, mailing_state, mailing_zip // cong.l = 6600 Terry Road<br />Terry, MS 39170 // But note there are many other formats, particularly outside the US // TODO: compact this into a recursive function that iterates through a list of regexes to try for // each field try{ cong.mailing_address1 = cong.l.match(/^(.*?)<br/)[1] try{ cong.mailing_city = cong.l.match(/<br \/>(.*?),/)[1] }catch(e){ try{ cong.mailing_city = cong.l.match(/<br \/>(.*?) [0-9]+/)[1] }catch(e){ try{ cong.mailing_city = cong.l.match(/<br \/>[0-9]+ (.*?)/)[1] }catch(e){ try{ cong.mailing_city = cong.l.match(/<br \/>(.*?)/)[1] }catch(e){ console.log(cong.l) } } } } try{ cong.mailing_state = cong.l.match(/<br \/>.*?, (.*?) /)[1] }catch(e){ // The only ones missed here are not states, but cities, so this is commented out // console.log(cong.l) } try{ cong.mailing_zip = cong.l.match(/<br \/>.*?, .*? (.*)$/)[1] }catch(e){ try{ cong.mailing_zip = cong.l.match(/<br \/>.*? ([0-9- ]+)$/)[1] }catch(e){ try{ cong.mailing_zip = cong.l.match(/<br \/>([0-9- ]+) .*/)[1] }catch(e){ try{ cong.mailing_zip = cong.l.match(/.*?[0-9]+?<br/)[1] }catch(e){ // The rest just don't have a zip, so this is commented out // console.log(cong.l) } } } } }catch(e){ // If this outputs data, create a new regex to fix the errors console.log(cong.l) } // Convert geocode to geocouch format cong.loc = [cong.lat, cong.lng] // TODO: Set each model's cgroup_id. What key name does backbone-relational use for the cgroup_id? // TODO: Record other denominations' abbreviations here too cong.denomination_abbr = 'PCA' cong.cgroups = [] cong.collection = 'cong' congs[index] = cong }) // Write the JSON to the database // It is easiest to bulk-save using jquery.couch.js rather than Backbone config.db.bulkSave({"docs":congs},{success:function(){ // TODO: Notify the user console.log(new Date().getTime() + '\tb: All congs are saved!') }}) } }); });
www/assets/js/views/ImportDirectory/URLView.js
define([ 'config', 'model', 'mustache', 'text!views/ImportDirectory/URL.html', './DirTypeView', 'typeahead' ], function(config, model, Mustache, template, DirTypeView){ return Backbone.View.extend({ initialize:function(){ // Make it easy to reference this object in event handlers _.bindAll(this, 'changes_listeners', 'handle_404', 'got_url_html', 'got_batchgeo_map_html', 'got_json', 'get_church_dir_from_url', 'get_cgroup', 'save_cgroup_and_dir', 'save_dir', 'parse_json', 'process_batch_geo', 'get_batchgeo_json', 'batchgeo_parse_json') if (typeof window.app.geocoder == 'undefined'){ window.app.geocoder = new google.maps.Geocoder(); } }, render: function(){ $('#steps').html(Mustache.render(template)) this.delegateEvents() // Render typeahead for URL textbox // Render typeahead // TODO: Consider filtering and sorting by levenshtein distance var substringMatcher = function(strs) { return function findMatches(q, cb) { var matches, substringRegex; // an array that will be populated with substring matches matches = []; // regex used to determine if a string contains the substring `q` substrRegex = new RegExp(q, 'i'); // iterate through the pool of strings and for any string that // contains the substring `q`, add it to the `matches` array $.each(strs, function(i, str) { if (substrRegex.test(str)) { // the typeahead jQuery plugin expects suggestions to a // JavaScript object, refer to typeahead docs for more info matches.push({ value: str }); } }); cb(matches); }; }; // Get array of directories from model this.directories = new model.Directories() this.directories.fetch() this.$('#url').typeahead({ hint: true, highlight: true, minLength: 1 },{ name: 'directories', displayKey: 'value', source: substringMatcher(this.directories.each(function(mod){return mod.get('url')})) }) }, events: { 'keyup #url':'get_church_dir_from_url' }, changes_listeners:function(){ // These are the main cases - different types of changes that // need to be handled this.listenTo(this.model,{ 'change':this.handle_404, 'change:get_url_html':this.got_url_html, 'change:get_batchgeo_map_html':this.got_batchgeo_map_html, 'change:get_json':this.got_json }) }, handle_404:function(model, value, options){ // TODO: Don't load the new view yet if the status code returned from the URL is a 404; // Instead after the delay, notify the user with // "Is that URL correct? It returns a '404 page not found' error." }, got_url_html:function(model, value, options){ // Handle directory's first page of content if (this.model.get('url_html') && this.model.get('get_url_html') == 'gotten'){ var html = this.model.get('url_html') // Determine whether this URL's data is HTML, RSS, KML, or JSON, or a 404 page // TODO: Don't load the new view yet if the status code returned from the URL is a 404; // Instead after the delay, notify the user with // "Is that URL correct? It returns a '404 page not found' error." if (this.model.get('error_code')){ var msg = 'We got error code ' + this.model.get('error_code') + ' from this URL: ' + this.model.get('url') console.log(msg) // TODO: Report this to the user, including what error code we got } if (html.indexOf("</html>") > -1){ console.log('We got HTML') this.model.set('pagetype', 'html') // Determine what type of directory this is // batchgeo if (this.uses_batch_geo(html) === true && typeof this.model.get('get_batchgeo_map_html') == 'undefined' && typeof this.model.get('get_json') == 'undefined'){ this.process_batch_geo(html) }else{ // TODO: If the other form fields are empty, // auto-populate them with info from this // directory's cgroup to help the user // TODO: Maybe only display those fields after // the URL is filled in // https://blueprints.launchpad.net/reformedchurcheslocator/+spec/display-cgroup-name-and-abbr-fields } } else if (html.indexOf("</rss>") > -1){ console.log('We got RSS') // TODO: Display the right form controls for an RSS feed this.model.set('pagetype', 'rss') } else if (html.indexOf("<kml") > -1){ console.log('We got KML') // TODO: Display the right form controls for a KML feed this.model.set('pagetype', 'kml') } else if (html.indexOf("per = {") === 0){ // batchgeo format console.log('We got batchgeo JSON') this.model.set('pagetype', 'batchgeo_json') } else if (html.indexOf("{") === 0){ console.log('We got JSON') this.model.set('pagetype', 'json') // TODO: The RPCNA's data is in a JSON file in RCL format already at http://reformedpresbyterian.org/congregations/json this.parse_json() } else { // We got an error code console.log('We got an error code from this URL:' + this.model.get('url')) // TODO: Report this to the user, including what error code we got } this.model.set('get_url_html', '') // TODO: Is this the right place to save the dir? // https://blueprints.launchpad.net/reformedchurcheslocator/+spec/decide-whether-to-save-dir //this.model.save({_id:this.model.get('_id')}) } }, got_batchgeo_map_html:function(model, value, options){ // Handle batchgeo map page if (typeof this.model.get('batchgeo_map_html') !== 'undefined' && this.model.get('get_batchgeo_map_html') == 'gotten'){ this.get_batchgeo_json() } }, got_json:function(model, value, options){ // Handle JSON if (typeof this.model.get('json') !== 'undefined' && this.model.get('get_json') == 'gotten'){ var json = this.model.get('json') // Batchgeo JSON if (json.indexOf('per = {') === 0){ this.batchgeo_parse_json() } } }, delay:(function(){ var timer = 0; return function(callback, ms){ clearTimeout (timer); timer = setTimeout(callback, ms); }; })(), get_church_dir_from_url:function(event){ var thiz = this // Delay this to run after typing has stopped for 3 seconds, so we don't // send too many requests this.delay(function(){ // --------- Main code section begins here ---------- // TODO: Start here. Decide what sub-views to create out of this view, and under what conditions // to display them. /* Steps: Event handler: Wait for $('#url') to change Enter URL. Event handler: Test URL to see if it is one of the following: Regular HTML page > display directory name, abbreviation inputs Batchgeo URL > import batchgeo JSON > report RPCNA JSON feed > import feed > report Arbitrary RSS or JSON > display field matching interface */ // If we have not already created a directory on this page, create it; else get the existing directory // If the cgroup's associated directory exists in the db, get it var page_url = thiz.$('#url').val() thiz.model = thiz.directories.findWhere({url:page_url}) if (typeof(thiz.model) === 'undefined'){ // The dir hasn't been created yet, so create it thiz.model = new model.Directory({url:page_url}) } // Create changes listeners on this.model thiz.changes_listeners() // thiz.get_cgroup() // TODO: Start here. I can't figure out how to determine that the task has completed. console.log('Start here.') var task = hoodie.task.start('geturlhtml', { url: page_url }) hoodie.task.on('geturlhtml:' + task.id + ':success', function(task, options){ console.log('Task completed!', options) }) task.done(function(task){ // Add url_html to thiz.model, and save thiz.model thiz.model.set('url_html', task.html) thiz.model.save() console.log(task.html, task.status_code) console.log('Logged task to console.') }).fail(function(error){ console.log("Couldn't get the url_html from this URL: ", error) }) // hoodie.get_url_html(page_url).then(function(task){ // // TODO: Add url_html to thiz.model, and save thiz.model // thiz.model.set('url_html', task.html) // thiz.model.save() // console.log(task.html, task.status_code) // console.log('Logged task to console.') // },function(error){ // console.log("Couldn't get the url_html from this URL: ", error) // }) // thiz.model.set('get_url_html', 'requested') // thiz.model.save() // TODO: Don't create the dir if the URL is not valid. // Maybe mark the dir's URL as invalid in the node.js script (by // checking for a 404 response), and/or // just delete the dir from node.js in an asynchronous cleanup task. // We wait until later to set get_url_html = 'requested', so as not // to fire that request event twice // TODO: If the other form fields are empty, // auto-populate them with info from this // directory's cgroup to help the user // TODO: Maybe only display those fields after // the URL is filled in // https://blueprints.launchpad.net/reformedchurcheslocator/+spec/display-cgroup-name-and-abbr-fields }, 3000) }, get_cgroup:function(){ console.log('Start here for hoodie integration') var thiz = this // Reset status flag so the status messages will display this.model.set('get_state_url_html', '') var cgroup_name = $('#cgroup_name').val() var abbr = $('#abbreviation').val() // Don't do anything if the CGroup info isn't entered yet if (cgroup_name !== '' && abbr !== ''){ // Check if cgroup already exists in db var search_keys = [cgroup_name, abbr] var attrs = this.model.attributes model.CGroupsByAbbrOrName.get_or_create_one(search_keys, attrs, {success:function(cgroup){ thiz.cgroup = cgroup thiz.save_cgroup_and_dir() }}) } }, save_cgroup_and_dir:function(){ // Save the dir so if the URL has changed in the browser, it gets // updated in the db too this.iterations = 0 // Note this is a recursive function! this.save_dir() }, save_dir:function(){ console.log('Start here') // TODO: Start here. Set up changes listener on this.model to handle responses from node_changes_listener.js // Maybe put the changes listener up in this.get_cgroup() after refactoring this.save_dir() so it uses // that changes listener and so this.iterations isn't needed anymore. // Event: This function gets called when: // Stack (last at top): // save_dir // save_cgroup_and_dir // get_cgroup // get_church_dir_from_url // [URL entered] var thiz = this this.iterations++; // Make this function wait until this rev is not being saved anymore under any other event if (typeof window.app.import_directory_view.rev_currently_being_saved !== 'undefined' && window.app.import_directory_view.rev_currently_being_saved === thiz.model.get('_rev')){ setTimeout(function(){ thiz.save_dir() }, 1000) return; } this.model.fetch({success:function(model, response, options){ var get_url_html = thiz.model.get('get_url_html') // Prevent import from running multiple times simultaneously if (get_url_html != 'getting'){ get_url_html = 'requested' } // Only save this revision if it's not currently being saved already if (typeof window.app.import_directory_view.rev_currently_being_saved === 'undefined' || window.app.import_directory_view.rev_currently_being_saved !== thiz.model.get('_rev')){ // console.log(iterations + ' 196 saving', dir.get('_rev')) // Prevent saving the same revision twice simultaneously if (typeof window.app.import_directory_view.rev_currently_being_saved === 'undefined'){ window.app.import_directory_view.rev_currently_being_saved = thiz.model.get('_rev') } thiz.model.save({ _id:thiz.model.get('_id'), _rev:thiz.model.get('_rev'), url:$('#url').val(), get_url_html:get_url_html }, { success:function(){ // Report that it's OK for other calls to save_dir to run delete window.app.import_directory_view.rev_currently_being_saved // Append dir to CGroup thiz.cgroup.get('directories').add([{_id:thiz.model.get('_id')}]) // Save cgroup to db // TODO: Does the relation appear on the dir in the db also? // This will trigger the Node changes listener's response thiz.cgroup.fetch({success:function(){ thiz.cgroup.save({_id:thiz.cgroup.get('_id'),_rev:thiz.cgroup.get('_rev')},{success:function(){ // TODO: This isn't necessary on dirtypes other than HTML // Render DirTypeView $('#steps').hide() thiz.dir_type_view = new DirTypeView({el: '#steps', model: thiz.model}) thiz.dir_type_view.render() $('#steps').fadeIn(2000) }}) }}) }, error:function(model, xhr, options){ console.error('We got the 196 error '+ thiz.iterations) thiz.save_dir() } } ) } }}) }, parse_json:function(){ var thiz = this var json = this.model.get('url_html') // console.log(json) // TODO: This handles the RPCNA data's current format, which does not yet // perfectly match the RCL format. So put this in a conditional if(){} block // to test if this is a JSON feed that has this format: {[]} (no "docs") window.app.json = json // TODO: Remove this when RPCNA's newly-corrected format comes out // TODO: There are 3 Beaver Falls congs in the JSON, but only 1 shows up on the map var new_json = json // Add initial object property .replace(/^{/gm, '{"docs":') // Double-quote values .replace(/:\s*?'(.*?)'\s*?,/gm, function(match, p1){ var output = '' if (p1.indexOf('"') !== 0){ output = p1.replace(/"/g, '\\"') }else{ output = p1 } return ': "' + output + '",' }) // Double-quote property names .replace(/\s*?(\w+?)\s*?:\s*?["']/gm, ' "$1": "') // Convert any remaining single quotes preceding a comma to double quotes .replace(/',/gm, '",') // Add newline after final object in list, and convert final single quote to double .replace(/(.*?)('\s*?})/gm,'$1"\n}') // Escape newlines in values // .replace(/(".*?)\n(?=.*?")/g, '$1\\\\n') // Since the above doesn't seem to work, try a different regex to do the same .replace(/"([\s\S]*?)"/g, function(match, p1, offset, string){ var output = '' if (p1.indexOf("\n") !== -1 || p1.indexOf("\r") !== -1){ if (p1.indexOf("\n") !== -1){ output = p1.replace(/\n/g, '\\\\n') } if (p1.indexOf("\r") !== -1){ output = p1.replace(/\r/g, '\\\\r') } }else{ output = p1 } return '"' + output + '"' }) // Remove comma after last item in list .replace(/}(,)(?![\s\S]*?{)/g, function(match, p1){ return '}' }) // TODO: Put this into the correct template, once we get the state page selector template to stop displaying // for this view $('#steps').append('<div class="status"></div>') function bulksave(congs){ // See if all geocoding requests have finished var geocoding = _.countBy(congs, function(cong){ if (cong.geocoding == 'started'){ return 'count' } }) // Report to the user how many congs are left $('.status').html(geocoding.count + ' congregations left to geocode!') window.app.congs = congs if (geocoding.count === 0 || geocoding.count == thiz.errors){ // Write the JSON to the database // It is easiest to bulk-save using jquery.couch.js rather than Backbone config.db.bulkSave({"docs":congs},{success:function(){ console.log(new Date().getTime() + '\tb: All congs are saved!') }}) } } function geocode(address, congs, index){ var now = new Date().getTime() if (typeof thiz.usecs == 'undefined'){ thiz.usecs = 100 } if (typeof thiz.geocode_end_time !== 'undefined' && (now - thiz.geocode_end_time) > thiz.usecs){ // This line should prevent two delayed geocode requests from running simultaneously thiz.geocode_end_time = now window.app.geocoder.geocode( { 'address': address }, function(results, status) { // console.log(results, status) // TODO: Handle when Google returns multiple possible address matches (results.length > 1, // or status == 'ZERO_RESULTS' if (status == google.maps.GeocoderStatus.OK) { var loc = results[0].geometry.location congs[index].loc = [loc.lat(), loc.lng()] // Delay bulkSave until after asynchronous geocoding is done for all congs congs[index].geocoding = 'done' thiz.geocode_end_time = new Date().getTime() bulksave(congs) }else{ // === if we were sending the requests to fast, try this one again and increase the delay if (status == google.maps.GeocoderStatus.OVER_QUERY_LIMIT){ thiz.usecs += 100; setTimeout(function(){ geocode(address, congs, index) },thiz.usecs) }else{ var reason = "Code "+status; var msg = 'address="' + address + '" error=' +reason+ '(usecs='+thiz.usecs+'ms)'; if (typeof thiz.errors == 'undefined'){ thiz.errors = 1 }else{ thiz.errors++ } console.error('Errors: ' + thiz.errors, msg) } } }) }else{ // Wait to avoid Google throttling the geocode requests (for there // being too many per second, as indicated by the 'OVER_QUERY_LIMIT' error code) if (typeof thiz.geocode_end_time == 'undefined'){ // Set the first geocode_end_time thiz.geocode_end_time = now } setTimeout(function(){ geocode(address, congs, index) },thiz.usecs) } } var congs = JSON.parse(new_json).docs _.each(congs, function(cong, index, list){ // TODO: Record cgroup id for this directory, by appending it to the list congs[index].cgroups = [thiz.model.get('cgroup')] // TODO: Record the denomination abbreviation for other denominations here too // Get it from the cgroup.abbr congs[index].denomination_abbr = 'RPCNA' congs[index].collection = 'cong' if (cong.lat === '' || cong.lng === ''){ // Geocode the cong and put geocode in object for geocouch // Note this is limited to 2500 requests per day congs[index].geocoding = 'started' // TODO: Refactor this into a general function // Pick the meeting_address[1|2] which contains a number, else just use meeting_address1 var address_line = '' if (cong.meeting_address1.search(/\d/) !== -1){ address_line = cong.meeting_address1 } else if (cong.meeting_address2.search(/\d/) !== -1){ address_line = cong.meeting_address2 }else{ address_line = cong.meeting_address1 } var address = address_line + ', ' + (cong.meeting_city?cong.meeting_city: (cong.mailing_city?cong.mailing_city:'')) + ', ' + (cong.meeting_state?cong.meeting_state: (cong.mailing_state?cong.mailing_state:'')) + ' ' + (cong.meeting_zip?cong.meeting_zip: (cong.mailing_zip?cong.mailing_zip:'')) // TODO: Consider how to refactor this to geocode only one cong at a time. // Currently the code tries all at once, then when it realizes it's getting errors, // it throttles back, but the effect of that throttling is probably to throttle back too // much, so the whole geocoding batch runs much slower than it has to. So the way to refactor // this is to keep track of which cong is being handled, then only once one cong is geocoded, // move on to the next cong. geocode(address, congs, index) } else { // Use existing geocode data congs[index].loc = [cong.lat, cong.lng] } }) }, // TODO: Consider moving these into a library uses_batch_geo:function(html){ return ( html.indexOf('https://batchgeo.com/map/') !== -1 ) }, process_batch_geo:function(html){ // Get the batchgeo map URL out of the HTML var map_url = html.match(/(https:\/\/batchgeo.com\/map\/.+?)['"]{1}/i)[1] // Get the batchgeo JSON URL out of the map's HTML console.log(new Date().getTime() + '\tb: ' + map_url) this.model.set('pagetype', 'batchgeo') this.model.set('batchgeo_map_url', map_url) this.model.set('get_batchgeo_map_html', 'requested') this.model.save() }, get_batchgeo_json:function(){ var thiz = this this.model.fetch({success:function(){ thiz.model.unset('get_batchgeo_map_html') var html = thiz.model.get('batchgeo_map_html') // console.log(html) var json_url = html.match(/(https:\/\/.+?.cloudfront.net\/map\/json\/.+?)['"]{1}/i)[1] console.log(new Date().getTime() + '\tb: get_json for ' + json_url) // TODO: Request that the node script get this URL's contents thiz.model.set('json_url', json_url) thiz.model.set('get_json', 'requested') thiz.model.save() }}) }, batchgeo_parse_json:function(){ this.model.unset('get_json') // The PCA has a KML file at http://batchgeo.com/map/kml/c78fa06a3fbdf2642daae48ca62bbb82 // Some (all?) data is also in JSON at http://static.batchgeo.com/map/json/c78fa06a3fbdf2642daae48ca62bbb82/1357687276 // The PCA directory's main HTML URL is http://www.pcaac.org/church-search/ // After trimming off the non-JSON, the cong details are in the obj.mapRS array // You can pretty-print it at http://www.cerny-online.com/cerny.js/demos/json-pretty-printing // Its format is as follows: // per = {mapRS:[{ // "accuracy":"ROOFTOP", // "postal":"30097", // mailing_zip? // "a":"9500 Medlock Bridge Road", // address // "c":"Johns Creek", // city // "s":"GA", // state // "z":"30097", // meeting_zip? // "t":"Perimeter Church", // name // "u":"www.Perimeter.org", // url // "i":"", // ? // "g":" ", // ? // "e":"[email protected]", // email // "lt":34.013179067701, // lat // "ln":-84.191637606647, // lng // "d":"<div><span class=\"l\">Church Phone:<\/span>&nbsp;678-405-2000<\/div><div><span class=\"l\">Pastor:<\/span>&nbsp;Rev. Randy Pope<\/div><div><span class=\"l\">Presbytery:<\/span>&nbsp;Metro Atlanta<\/div>", // phone, pastor_name, presbytery_name // "addr":"9500 Medlock Bridge Road Johns Creek GA 30097", // mailing_address (full, needs to be parsed) // "l":"9500 Medlock Bridge Road<br \/>Johns Creek, GA 30097", // mailing_address_formatted, easier to parse // "clr":"red" // }]} // Get the relevant JSON in a variable // This regex took forever // var json = this.model.get('json').replace(/.*?"mapRS":/, '{"congs":').replace(/,"dataRS":.*/, '}') // So although this could be unsafe, it is expedient! eval(this.model.get('json')) var congs = per.mapRS // Convert the JSON's fieldnames to RCL fieldnames var replacements = [ { old:'postal', new:'mailing_zip' }, { old:'a', new:'meeting_address1' }, { old:'c', new:'meeting_city' }, { old:'s', new:'meeting_state' }, { old:'z', new:'meeting_zip' }, { old:'t', new:'name' }, { old:'u', new:'website' }, { old:'e', new:'email' }, { old:'lt', new:'lat' }, { old:'ln', new:'lng' } ] // For each cong $.each(congs, function(index, cong){ // For each key name $.each(replacements,function(index, repl){ // Replace each key name cong[repl.new] = cong[repl.old]; delete cong[repl.old]; }) // Parse 'd' field into: // phone, pastor_name, presbytery_name [, others?] // cong.d = <div><span class="l">Church Phone:</span>&nbsp;334-294-1226</div><div><span class="l">Pastor:</span>&nbsp;Rev. Brian DeWitt MacDonald</div><div><span class="l">Presbytery:</span>&nbsp;Southeast Alabama</div> // Ignore errors if the match fails try { cong.phone = cong.d.match(/Church Phone:.*?&nbsp;(.*?)</)[1]} catch(e){} try { cong.pastor_name = cong.d.match(/Pastor:.*?&nbsp;Rev. (.*?)</)[1] } catch(e){} try { cong.presbytery_name = cong.d.match(/Presbytery:.*?&nbsp;(.*?)</)[1] } catch(e){} // Parse 'l' field into: // mailing_address1, mailing_city, mailing_state, mailing_zip // cong.l = 6600 Terry Road<br />Terry, MS 39170 // But note there are many other formats, particularly outside the US // TODO: compact this into a recursive function that iterates through a list of regexes to try for // each field try{ cong.mailing_address1 = cong.l.match(/^(.*?)<br/)[1] try{ cong.mailing_city = cong.l.match(/<br \/>(.*?),/)[1] }catch(e){ try{ cong.mailing_city = cong.l.match(/<br \/>(.*?) [0-9]+/)[1] }catch(e){ try{ cong.mailing_city = cong.l.match(/<br \/>[0-9]+ (.*?)/)[1] }catch(e){ try{ cong.mailing_city = cong.l.match(/<br \/>(.*?)/)[1] }catch(e){ console.log(cong.l) } } } } try{ cong.mailing_state = cong.l.match(/<br \/>.*?, (.*?) /)[1] }catch(e){ // The only ones missed here are not states, but cities, so this is commented out // console.log(cong.l) } try{ cong.mailing_zip = cong.l.match(/<br \/>.*?, .*? (.*)$/)[1] }catch(e){ try{ cong.mailing_zip = cong.l.match(/<br \/>.*? ([0-9- ]+)$/)[1] }catch(e){ try{ cong.mailing_zip = cong.l.match(/<br \/>([0-9- ]+) .*/)[1] }catch(e){ try{ cong.mailing_zip = cong.l.match(/.*?[0-9]+?<br/)[1] }catch(e){ // The rest just don't have a zip, so this is commented out // console.log(cong.l) } } } } }catch(e){ // If this outputs data, create a new regex to fix the errors console.log(cong.l) } // Convert geocode to geocouch format cong.loc = [cong.lat, cong.lng] // TODO: Set each model's cgroup_id. What key name does backbone-relational use for the cgroup_id? // TODO: Record other denominations' abbreviations here too cong.denomination_abbr = 'PCA' cong.cgroups = [] cong.collection = 'cong' congs[index] = cong }) // Write the JSON to the database // It is easiest to bulk-save using jquery.couch.js rather than Backbone config.db.bulkSave({"docs":congs},{success:function(){ // TODO: Notify the user console.log(new Date().getTime() + '\tb: All congs are saved!') }}) } }); });
Trying to get a response from my hoodie plugin
www/assets/js/views/ImportDirectory/URLView.js
Trying to get a response from my hoodie plugin
<ide><path>ww/assets/js/views/ImportDirectory/URLView.js <ide> // thiz.get_cgroup() <ide> // TODO: Start here. I can't figure out how to determine that the task has completed. <ide> console.log('Start here.') <add> function handle_storage(e) { <add> if (!e) { e = window.event; } <add> console.log(e) <add> } <add> if (window.addEventListener) { <add> window.addEventListener("storage", handle_storage, false); <add> } else { <add> window.attachEvent("onstorage", handle_storage); <add> }; <add> $(window).bind('storage', function (e) { <add> alert('storage changed'); <add> }); <add> hoodie.task('geturlhtml').on('start', function(db, doc){ console.log(doc, 'start'); }) <add> hoodie.task('geturlhtml').on('abort', function(db, doc){ console.log(doc, 'abort'); }) <add> hoodie.task('geturlhtml').on('error', function(db, doc){ console.log(doc, 'error'); }) <add> hoodie.task('geturlhtml').on('success', function(db, doc){ console.log(doc, 'success'); }) <add> hoodie.task('geturlhtml').on('change', function(db, doc){ console.log(doc, 'change'); }) <add> hoodie.store.on('change', function(ev, doc){ console.log(ev, doc)}) <add> hoodie.task('geturlhtml').on('geturlhtml:success', function(task, options){ <add> console.log(task, options) <add> console.log('Task completed!') <add> }) <ide> var task = hoodie.task.start('geturlhtml', { <ide> url: page_url <del> }) <del> hoodie.task.on('geturlhtml:' + task.id + ':success', function(task, options){ <del> console.log('Task completed!', options) <ide> }) <ide> task.done(function(task){ <ide> // Add url_html to thiz.model, and save thiz.model
JavaScript
apache-2.0
378869c752df3a32f02b4ceccc16773c476e84fc
0
xonixx/cml-site-jekyll,xonixx/cml-site-jekyll,xonixx/cml-site-jekyll,xonixx/cml-site-jekyll,xonixx/cml-site-jekyll
$((function () { $('#customers-main').slick({ infinite: false, dots: true, }); }));
_includes/js/about.js
$((function () { $('#customers-main').slick({}); }));
CML-118: slick params added
_includes/js/about.js
CML-118: slick params added
<ide><path>includes/js/about.js <ide> $((function () { <del> $('#customers-main').slick({}); <add> $('#customers-main').slick({ <add> infinite: false, <add> dots: true, <add> }); <ide> })); <ide>
Java
apache-2.0
84cb1b1d3b502a255bc3d325225785977a65e9a8
0
PDXFinder/pdxfinder,PDXFinder/pdxfinder,PDXFinder/pdxfinder,PDXFinder/pdxfinder
package org.pdxfinder.dataloaders.updog; import org.pdxfinder.graph.dao.*; import org.pdxfinder.services.DataImportService; import org.pdxfinder.services.UtilityService; import tech.tablesaw.api.Row; import tech.tablesaw.api.Table; import java.util.*; public class DomainObjectCreator { private Map<String, Table> pdxDataTables; //nodeType=>ID=>NodeObject private Map<String, Map<String, Object>> domainObjects; private UtilityService utilityService; private DataImportService dataImportService; public DomainObjectCreator(DataImportService dataImportService, UtilityService utilityService, Map<String, Table> pdxDataTables) { this.dataImportService = dataImportService; this.utilityService = utilityService; this.pdxDataTables = pdxDataTables; domainObjects = new HashMap<>(); } public void loadDomainObjects(){ //: Do not change the order of these unless you want to risk 1. the universe to collapse OR 2. missing nodes in the db loadProvider(); loadPatientData(); loadModelData(); loadSampleData(); } private void addToDomainObjects(String key1, String key2, Object object){ if(domainObjects.containsKey(key1)){ domainObjects.get(key1).put(key2, object); } else{ Map map = new HashMap(); map.put(key2,object); domainObjects.put(key1, map); } } private void loadProvider(){ Table finderRelatedTable = pdxDataTables.get("metadata-loader.tsv"); Row row = finderRelatedTable.row(5); Group providerGroup = dataImportService.getProviderGroup(row.getString("name"), row.getString("abbreviation"), "", "", "", row.getString("internal_url")); addToDomainObjects("provider_group", null, providerGroup); } private void loadPatientData() { Table patientTable = pdxDataTables.get("metadata-patient.tsv"); int rowCount = patientTable.rowCount(); //start this from 1, row 0 is the header for (int i = 1; i < rowCount; i++) { if (i < 4) continue; Row row = patientTable.row(i); Patient patient = dataImportService.createPatient(row.getText("patient_id"), (Group) getExistingDomainObject("provider_group", null), row.getText("sex"), "", row.getText("ethnicity")); patient.setCancerRelevantHistory(row.getText("history")); patient.setFirstDiagnosis(row.getText("initial_diagnosis")); patient.setAgeAtFirstDiagnosis(row.getText("age_at_initial_diagnosis")); addToDomainObjects("patient", row.getText("patient_id"), dataImportService.savePatient(patient)); } } private void loadSampleData(){ Table sampleTable = pdxDataTables.get("metadata-sample.tsv"); int rowCount = sampleTable.rowCount(); //start this from 1, row 0 is the header for(int i = 1; i < rowCount; i++){ if(i < 4) continue; Row row = sampleTable.row(i); String patientId = row.getString("patient_id"); String sampleId = row.getString("sample_id"); String modelId = row.getString("model_id"); String dateOfCollection = row.getString("collection_date"); String ageAtCollection = row.getString("age_in_years_at_collection"); String collectionEvent = row.getString("collection_event"); String elapsedTime = row.getString("months_since_collection_1"); String diagnosis = row.getString("diagnosis"); String tumorTypeName = row.getString("tumour_type"); String primarySiteName = row.getString("primary_site"); String collectionSiteName = row.getString("collection_site"); String stage = row.getString("stage"); String stagingSystem = row.getString("staging_system"); String grade = row.getString("grade"); String gradingSystem = row.getString("grading_system"); String virologyStatus = row.getString("virology_status"); String sharable = row.getString("sharable"); String treatmentNaive = row.getString("treatment_naive_at_collection"); String treated = row.getString("treated"); String priorTreatment = row.getString("prior_treatment"); Patient patient = (Patient) getExistingDomainObject("patient", patientId); PatientSnapshot patientSnapshot = patient.getSnapShotByCollection(ageAtCollection, dateOfCollection, collectionEvent, elapsedTime); if(patientSnapshot == null){ patientSnapshot = new PatientSnapshot(patient, ageAtCollection, dateOfCollection, collectionEvent, elapsedTime); patientSnapshot.setVirologyStatus(virologyStatus); patientSnapshot.setTreatmentNaive(treatmentNaive); patient.addSnapshot(patientSnapshot); } Tissue primarySite = (Tissue) getExistingDomainObject("tissue", primarySiteName); if(primarySite == null){ primarySite = dataImportService.getTissue(primarySiteName); addToDomainObjects("tissue", primarySiteName, primarySite); } Tissue collectionSite = (Tissue) getExistingDomainObject("tissue", collectionSiteName); if(collectionSite == null){ collectionSite = dataImportService.getTissue(collectionSiteName); addToDomainObjects("tissue", collectionSiteName, collectionSite); } TumorType tumorType = (TumorType) getExistingDomainObject("tumor_type", tumorTypeName); if(tumorType == null){ tumorType = dataImportService.getTumorType(tumorTypeName); addToDomainObjects("tumor_type", tumorTypeName, tumorType); } Sample sample = new Sample(); sample.setSourceSampleId(sampleId); sample.setDiagnosis(diagnosis); sample.setStage(stage); sample.setStageClassification(stagingSystem); sample.setGrade(grade); sample.setGradeClassification(gradingSystem); patientSnapshot.addSample(sample); ModelCreation modelCreation = (ModelCreation) getExistingDomainObject("model", modelId); modelCreation.setSample(sample); modelCreation.addRelatedSample(sample); } } private void loadModelData(){ } private Object getExistingDomainObject(String key1, String key2){ return domainObjects.get(key1).get(key2); } }
indexer/src/main/java/org/pdxfinder/dataloaders/updog/DomainObjectCreator.java
package org.pdxfinder.dataloaders.updog; import org.pdxfinder.graph.dao.*; import org.pdxfinder.services.DataImportService; import org.pdxfinder.services.UtilityService; import tech.tablesaw.api.Row; import tech.tablesaw.api.Table; import java.util.*; public class DomainObjectCreator { private Map<String, Table> pdxDataTables; //nodeType=>ID=>NodeObject private Map<String, Map<String, Object>> domainObjects; private UtilityService utilityService; private DataImportService dataImportService; public DomainObjectCreator(DataImportService dataImportService, UtilityService utilityService, Map<String, Table> pdxDataTables) { this.dataImportService = dataImportService; this.utilityService = utilityService; this.pdxDataTables = pdxDataTables; domainObjects = new HashMap<>(); } public void loadDomainObjects(){ //: Do not change the order of these unless you want to risk 1. the universe to collapse OR 2. missing nodes in the db loadProvider(); loadPatientData(); loadModelData(); loadSampleData(); } private void addToDomainObjects(String key1, String key2, Object object){ if(domainObjects.containsKey(key1)){ domainObjects.get(key1).put(key2, object); } else{ Map map = new HashMap(); map.put(key2,object); domainObjects.put(key1, map); } } private void loadProvider(){ Table finderRelatedTable = pdxDataTables.get("metadata-loader.tsv"); Row row = finderRelatedTable.row(5); Group providerGroup = dataImportService.getProviderGroup(row.getString("name"), row.getString("abbreviation"), "", "", "", row.getString("internal_url")); addToDomainObjects("provider_group", null, providerGroup); } private void loadPatientData() { Table patientTable = pdxDataTables.get("metadata-patient.tsv"); int rowCount = patientTable.rowCount(); //start this from 1, row 0 is the header for (int i = 1; i < rowCount; i++) { if (i < 4) continue; Row row = patientTable.row(i); Patient patient = dataImportService.createPatient(row.getText("patient_id"), (Group) getExistingDomainObject("provider_group", null), row.getText("sex"), "", row.getText("ethnicity")); patient.setCancerRelevantHistory(row.getText("history")); patient.setFirstDiagnosis(row.getText("initial_diagnosis")); patient.setAgeAtFirstDiagnosis(row.getText("age_at_initial_diagnosis")); addToDomainObjects("patient", row.getText("patient_id"), dataImportService.savePatient(patient)); } } private void loadSampleData(){ Table sampleTable = pdxDataTables.get("metadata-sample.tsv"); int rowCount = sampleTable.rowCount(); //start this from 1, row 0 is the header for(int i = 1; i < rowCount; i++){ if(i < 4) continue; Row row = sampleTable.row(i); String patientId = row.getString("patient_id"); String sampleId = row.getString("sample_id"); String modelId = row.getString("model_id"); String dateOfCollection = row.getString("collection_date"); String ageAtCollection = row.getString("age_in_years_at_collection"); String collectionEvent = row.getString("collection_event"); String elapsedTime = row.getString("months_since_collection_1"); String diagnosis = row.getString("diagnosis"); String tumorTypeName = row.getString("tumour_type"); String primarySiteName = row.getString("primary_site"); String collectionSiteName = row.getString("collection_site"); String stage = row.getString("stage"); String stagingSystem = row.getString("staging_system"); String grade = row.getString("grade"); String gradingSystem = row.getString("grading_system"); String virologyStatus = row.getString("virology_status"); String sharable = row.getString("sharable"); String treatmentNaive = row.getString("treatment_naive_at_collection"); String treated = row.getString("treated"); String priorTreatment = row.getString("prior_treatment"); Patient patient = (Patient) getExistingDomainObject("patient", patientId); PatientSnapshot patientSnapshot = patient.getSnapShotByCollection(ageAtCollection, dateOfCollection, collectionEvent, elapsedTime); if(patientSnapshot == null){ patientSnapshot = new PatientSnapshot(patient, ageAtCollection, dateOfCollection, collectionEvent, elapsedTime); patientSnapshot.setVirologyStatus(virologyStatus); patientSnapshot.setTreatmentNaive(treatmentNaive); patient.addSnapshot(patientSnapshot); } Tissue primarySite = (Tissue) getExistingDomainObject("tissue", primarySiteName); if(primarySite == null){ primarySite = dataImportService.getTissue(primarySiteName); addToDomainObjects("tissue", primarySiteName, primarySite); } Tissue collectionSite = (Tissue) getExistingDomainObject("tissue", collectionSiteName); if(collectionSite == null){ collectionSite = dataImportService.getTissue(collectionSiteName); addToDomainObjects("tissue", collectionSiteName, collectionSite); } TumorType tumorType = (TumorType) getExistingDomainObject("tumor_type", tumorTypeName); if(tumorType == null){ tumorType = dataImportService.getTumorType(tumorTypeName); addToDomainObjects("tumor_type", tumorTypeName, tumorType); } Sample sample = new Sample(); sample.setSourceSampleId(sampleId); sample.setDiagnosis(diagnosis); sample.setStage(stage); sample.setStageClassification(stagingSystem); sample.setGrade(grade); sample.setGradeClassification(gradingSystem); ModelCreation modelCreation = (ModelCreation) getExistingDomainObject("model", modelId); modelCreation.setSample(sample); modelCreation.addRelatedSample(sample); } } private void loadModelData(){ } private Object getExistingDomainObject(String key1, String key2){ return domainObjects.get(key1).get(key2); } }
add sample to snapshot
indexer/src/main/java/org/pdxfinder/dataloaders/updog/DomainObjectCreator.java
add sample to snapshot
<ide><path>ndexer/src/main/java/org/pdxfinder/dataloaders/updog/DomainObjectCreator.java <ide> sample.setGrade(grade); <ide> sample.setGradeClassification(gradingSystem); <ide> <add> patientSnapshot.addSample(sample); <add> <ide> ModelCreation modelCreation = (ModelCreation) getExistingDomainObject("model", modelId); <ide> <ide> modelCreation.setSample(sample);
Java
mit
c94619941b0fcf11d29651b477357af17b673d91
0
conveyal/r5,conveyal/r5,conveyal/r5,conveyal/r5,conveyal/r5
package org.opentripplanner.profile; import com.google.common.collect.Lists; import gnu.trove.iterator.TObjectIntIterator; import gnu.trove.map.TObjectIntMap; import gnu.trove.map.TObjectLongMap; import gnu.trove.map.hash.TObjectIntHashMap; import gnu.trove.map.hash.TObjectLongHashMap; import org.joda.time.DateTimeZone; import org.opentripplanner.analyst.TimeSurface; import org.opentripplanner.api.parameter.QualifiedModeSet; import org.opentripplanner.common.model.GenericLocation; import org.opentripplanner.routing.algorithm.AStar; import org.opentripplanner.routing.algorithm.PathDiscardingRaptorStateStore; import org.opentripplanner.routing.algorithm.Raptor; import org.opentripplanner.routing.core.RoutingRequest; import org.opentripplanner.routing.core.State; import org.opentripplanner.routing.core.TraverseMode; import org.opentripplanner.routing.core.TraverseModeSet; import org.opentripplanner.routing.edgetype.TripPattern; import org.opentripplanner.routing.graph.Graph; import org.opentripplanner.routing.graph.Vertex; import org.opentripplanner.routing.pathparser.BasicPathParser; import org.opentripplanner.routing.pathparser.InitialStopSearchPathParser; import org.opentripplanner.routing.pathparser.PathParser; import org.opentripplanner.routing.spt.DominanceFunction; import org.opentripplanner.routing.spt.ShortestPathTree; import org.opentripplanner.routing.trippattern.TripTimeSubset; import org.opentripplanner.routing.vertextype.TransitStop; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Collection; import java.util.List; import java.util.Map; /** * Perform profile routing using repeated RAPTOR searches. * * This is conceptually very similar to the work of the Minnesota Accessibility Observatory * (http://www.its.umn.edu/Publications/ResearchReports/pdfdownloadl.pl?id=2504) * They run repeated searches. We take advantage of the fact that the street network is static * (for the most part, assuming time-dependent turn restrictions and traffic are consistent across the time window) * and only run a fast transit search for each minute in the window. */ public class RepeatedRaptorProfileRouter { private Logger LOG = LoggerFactory.getLogger(RepeatedRaptorProfileRouter.class); public static final int MAX_DURATION = 60 * 60 * 2; // seconds public ProfileRequest request; public Graph graph; public TimeSurface.RangeSet timeSurfaceRangeSet; public TObjectIntMap<TransitStop> mins = new TObjectIntHashMap<TransitStop>(3000, 0.75f, Integer.MAX_VALUE); public TObjectIntMap<TransitStop> maxs = new TObjectIntHashMap<TransitStop>(3000, 0.75f, Integer.MIN_VALUE); // accumulate the travel times to create an average TObjectLongMap<TransitStop> accumulator = new TObjectLongHashMap<TransitStop>(); TObjectIntMap<TransitStop> counts = new TObjectIntHashMap<TransitStop>(); public RepeatedRaptorProfileRouter (Graph graph, ProfileRequest request) { this.request = request; this.graph = graph; } public void route () { long computationStartTime = System.currentTimeMillis(); LOG.info("Begin profile request"); LOG.info("Finding initial stops"); TObjectIntMap<TransitStop> accessTimes = findInitialStops(false); LOG.info("Found {} initial stops", accessTimes.size()); Map<TripPattern, TripTimeSubset> timetables = TripTimeSubset.indexGraph(graph, request.date, request.fromTime, request.toTime + MAX_DURATION); PathDiscardingRaptorStateStore rss = new PathDiscardingRaptorStateStore(9); int i = 1; // We assume the times are aligned to minutes, and we don't do a depart-after search starting // at the end of the window. for (int startTime = request.toTime - 60; startTime >= request.fromTime; startTime -= 60) { if (++i % 30 == 0) LOG.info("Completed {} RAPTOR searches", i); // adjust the max time rss.maxTime = startTime + 120 * 60; // reset the counter rss.restart(); // relax the times at the start stops for (TObjectIntIterator<TransitStop> it = accessTimes.iterator(); it.hasNext();) { it.advance(); // this is "transfer" from the origin rss.put(it.key(), startTime + it.value(), true); } //LOG.info("Filtering RAPTOR states"); Raptor raptor = new Raptor(graph, 3, request.walkSpeed, rss, startTime, request.date, timetables); //LOG.info("Performing RAPTOR search for minute {}", i++); raptor.run(); //LOG.info("Finished RAPTOR search in {} milliseconds", System.currentTimeMillis() - roundStartTime); // loop over all states, accumulating mins, maxes, etc. for (TObjectIntIterator<TransitStop> it = raptor.iterator(); it.hasNext();) { it.advance(); int et = it.value() - startTime; // this can happen if the time is left from a previous search at a later start time /*if (et > 120 * 60) continue;*/ TransitStop v = it.key(); if (et < mins.get(v)) mins.put(v, et); if (et > maxs.get(v)) maxs.put(v, et); accumulator.putIfAbsent(v, 0); counts.putIfAbsent(v, 0); accumulator.adjustValue(v, et); counts.increment(v); } } LOG.info("Profile request complete, propagating to the street network"); makeSurfaces(); LOG.info("Profile request finished in {} seconds", (System.currentTimeMillis() - computationStartTime) / 1000.0); } /** find the boarding stops */ private TObjectIntMap<TransitStop> findInitialStops(boolean dest) { double lat = dest ? request.toLat : request.fromLat; double lon = dest ? request.toLon : request.fromLon; QualifiedModeSet modes = dest ? request.accessModes : request.egressModes; RoutingRequest rr = new RoutingRequest(TraverseMode.WALK); rr.dominanceFunction = new DominanceFunction.EarliestArrival(); rr.batch = true; rr.from = new GenericLocation(lat, lon); rr.walkSpeed = request.walkSpeed; rr.to = rr.from; rr.setRoutingContext(graph); rr.rctx.pathParsers = new PathParser[] { new InitialStopSearchPathParser() }; rr.dateTime = request.date.toDateMidnight(DateTimeZone.forTimeZone(graph.getTimeZone())).getMillis() / 1000 + request.fromTime; // RoutingRequest dateTime defaults to currentTime. // If elapsed time is not capped, searches are very slow. rr.worstTime = (rr.dateTime + request.maxWalkTime * 60); AStar astar = new AStar(); rr.longDistance = true; rr.setNumItineraries(1); ShortestPathTree spt = astar.getShortestPathTree(rr, 5); // timeout in seconds TObjectIntMap<TransitStop> accessTimes = new TObjectIntHashMap<TransitStop>(); for (TransitStop tstop : graph.index.stopVertexForStop.values()) { State s = spt.getState(tstop); if (s != null) { accessTimes.put(tstop, (int) s.getElapsedTimeSeconds()); } } // initialize propagation with direct modes timeSurfaceRangeSet = new TimeSurface.RangeSet(); timeSurfaceRangeSet.min = new TimeSurface(spt, false); timeSurfaceRangeSet.max = new TimeSurface(spt, false); timeSurfaceRangeSet.avg = new TimeSurface(spt, false); rr.cleanup(); return accessTimes; } private void makeSurfaces () { LOG.info("Propagating from transit stops to the street network..."); // Grab a cached map of distances to street intersections from each transit stop StopTreeCache stopTreeCache = graph.index.getStopTreeCache(); // Iterate over all nondominated rides at all clusters for (TransitStop tstop : mins.keySet()) { int lb0 = mins.get(tstop); int ub0 = maxs.get(tstop); int avg0 = (int) (accumulator.get(tstop) / counts.get(tstop)); // Iterate over street intersections in the vicinity of this particular transit stop. // Shift the time range at this transit stop, merging it into that for all reachable street intersections. TObjectIntMap<Vertex> distanceToVertex = stopTreeCache.getDistancesForStop(tstop); for (TObjectIntIterator<Vertex> iter = distanceToVertex.iterator(); iter.hasNext(); ) { iter.advance(); Vertex vertex = iter.key(); // distance in meters over walkspeed in meters per second --> seconds int egressWalkTimeSeconds = (int) (iter.value() / request.walkSpeed); if (egressWalkTimeSeconds > request.maxWalkTime * 60) { continue; } int propagated_min = lb0 + egressWalkTimeSeconds; int propagated_max = ub0 + egressWalkTimeSeconds; // TODO: we can't take the min propagated average and call it an average int propagated_avg = avg0 + egressWalkTimeSeconds; int existing_min = timeSurfaceRangeSet.min.times.get(vertex); int existing_max = timeSurfaceRangeSet.max.times.get(vertex); int existing_avg = timeSurfaceRangeSet.avg.times.get(vertex); // FIXME this is taking the least lower bound and the least upper bound // which is not necessarily wrong but it's a crude way to perform the combination if (existing_min == TimeSurface.UNREACHABLE || existing_min > propagated_min) { timeSurfaceRangeSet.min.times.put(vertex, propagated_min); } if (existing_max == TimeSurface.UNREACHABLE || existing_max > propagated_max) { timeSurfaceRangeSet.max.times.put(vertex, propagated_max); } if (existing_avg == TimeSurface.UNREACHABLE || existing_avg > propagated_avg) { timeSurfaceRangeSet.avg.times.put(vertex, propagated_avg); } } } LOG.info("Done with propagation."); /* Store the results in a field in the router object. */ } }
src/main/java/org/opentripplanner/profile/RepeatedRaptorProfileRouter.java
package org.opentripplanner.profile; import com.google.common.collect.Lists; import gnu.trove.iterator.TObjectIntIterator; import gnu.trove.map.TObjectIntMap; import gnu.trove.map.TObjectLongMap; import gnu.trove.map.hash.TObjectIntHashMap; import gnu.trove.map.hash.TObjectLongHashMap; import org.joda.time.DateTimeZone; import org.opentripplanner.analyst.TimeSurface; import org.opentripplanner.api.parameter.QualifiedModeSet; import org.opentripplanner.common.model.GenericLocation; import org.opentripplanner.routing.algorithm.AStar; import org.opentripplanner.routing.algorithm.PathDiscardingRaptorStateStore; import org.opentripplanner.routing.algorithm.Raptor; import org.opentripplanner.routing.core.RoutingRequest; import org.opentripplanner.routing.core.State; import org.opentripplanner.routing.core.TraverseMode; import org.opentripplanner.routing.core.TraverseModeSet; import org.opentripplanner.routing.edgetype.TripPattern; import org.opentripplanner.routing.graph.Graph; import org.opentripplanner.routing.graph.Vertex; import org.opentripplanner.routing.spt.DominanceFunction; import org.opentripplanner.routing.spt.ShortestPathTree; import org.opentripplanner.routing.trippattern.TripTimeSubset; import org.opentripplanner.routing.vertextype.TransitStop; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Collection; import java.util.List; import java.util.Map; /** * Perform profile routing using repeated RAPTOR searches. * * This is conceptually very similar to the work of the Minnesota Accessibility Observatory * (http://www.its.umn.edu/Publications/ResearchReports/pdfdownloadl.pl?id=2504) * They run repeated searches. We take advantage of the fact that the street network is static * (for the most part, assuming time-dependent turn restrictions and traffic are consistent across the time window) * and only run a fast transit search for each minute in the window. */ public class RepeatedRaptorProfileRouter { private Logger LOG = LoggerFactory.getLogger(RepeatedRaptorProfileRouter.class); public static final int MAX_DURATION = 60 * 60 * 2; // seconds public ProfileRequest request; public Graph graph; public TimeSurface.RangeSet timeSurfaceRangeSet; public TObjectIntMap<TransitStop> mins = new TObjectIntHashMap<TransitStop>(3000, 0.75f, Integer.MAX_VALUE); public TObjectIntMap<TransitStop> maxs = new TObjectIntHashMap<TransitStop>(3000, 0.75f, Integer.MIN_VALUE); // accumulate the travel times to create an average TObjectLongMap<TransitStop> accumulator = new TObjectLongHashMap<TransitStop>(); TObjectIntMap<TransitStop> counts = new TObjectIntHashMap<TransitStop>(); public RepeatedRaptorProfileRouter (Graph graph, ProfileRequest request) { this.request = request; this.graph = graph; } public void route () { long computationStartTime = System.currentTimeMillis(); LOG.info("Begin profile request"); LOG.info("Finding initial stops"); TObjectIntMap<TransitStop> accessTimes = findInitialStops(false); LOG.info("Found {} initial stops", accessTimes.size()); Map<TripPattern, TripTimeSubset> timetables = TripTimeSubset.indexGraph(graph, request.date, request.fromTime, request.toTime + MAX_DURATION); PathDiscardingRaptorStateStore rss = new PathDiscardingRaptorStateStore(9); int i = 1; // We assume the times are aligned to minutes, and we don't do a depart-after search starting // at the end of the window. for (int startTime = request.toTime - 60; startTime >= request.fromTime; startTime -= 60) { if (++i % 30 == 0) LOG.info("Completed {} RAPTOR searches", i); // adjust the max time rss.maxTime = startTime + 120 * 60; // reset the counter rss.restart(); // relax the times at the start stops for (TObjectIntIterator<TransitStop> it = accessTimes.iterator(); it.hasNext();) { it.advance(); // this is "transfer" from the origin rss.put(it.key(), startTime + it.value(), true); } //LOG.info("Filtering RAPTOR states"); Raptor raptor = new Raptor(graph, 3, request.walkSpeed, rss, startTime, request.date, timetables); //LOG.info("Performing RAPTOR search for minute {}", i++); raptor.run(); //LOG.info("Finished RAPTOR search in {} milliseconds", System.currentTimeMillis() - roundStartTime); // loop over all states, accumulating mins, maxes, etc. for (TObjectIntIterator<TransitStop> it = raptor.iterator(); it.hasNext();) { it.advance(); int et = it.value() - startTime; // this can happen if the time is left from a previous search at a later start time /*if (et > 120 * 60) continue;*/ TransitStop v = it.key(); if (et < mins.get(v)) mins.put(v, et); if (et > maxs.get(v)) maxs.put(v, et); accumulator.putIfAbsent(v, 0); counts.putIfAbsent(v, 0); accumulator.adjustValue(v, et); counts.increment(v); } } LOG.info("Profile request complete, propagating to the street network"); makeSurfaces(); LOG.info("Profile request finished in {} seconds", (System.currentTimeMillis() - computationStartTime) / 1000.0); } /** find the boarding stops */ private TObjectIntMap<TransitStop> findInitialStops(boolean dest) { double lat = dest ? request.toLat : request.fromLat; double lon = dest ? request.toLon : request.fromLon; QualifiedModeSet modes = dest ? request.accessModes : request.egressModes; RoutingRequest rr = new RoutingRequest(TraverseMode.WALK); rr.dominanceFunction = new DominanceFunction.EarliestArrival(); rr.batch = true; rr.from = new GenericLocation(lat, lon); rr.walkSpeed = request.walkSpeed; rr.to = rr.from; rr.setRoutingContext(graph); rr.dateTime = request.date.toDateMidnight(DateTimeZone.forTimeZone(graph.getTimeZone())).getMillis() / 1000 + request.fromTime; // RoutingRequest dateTime defaults to currentTime. // If elapsed time is not capped, searches are very slow. rr.worstTime = (rr.dateTime + request.maxWalkTime * 60); AStar astar = new AStar(); rr.longDistance = true; rr.setNumItineraries(1); ShortestPathTree spt = astar.getShortestPathTree(rr, 5); // timeout in seconds TObjectIntMap<TransitStop> accessTimes = new TObjectIntHashMap<TransitStop>(); for (TransitStop tstop : graph.index.stopVertexForStop.values()) { State s = spt.getState(tstop); if (s != null) { accessTimes.put(tstop, (int) s.getElapsedTimeSeconds()); } } // initialize propagation with direct modes timeSurfaceRangeSet = new TimeSurface.RangeSet(); timeSurfaceRangeSet.min = new TimeSurface(spt, false); timeSurfaceRangeSet.max = new TimeSurface(spt, false); timeSurfaceRangeSet.avg = new TimeSurface(spt, false); rr.cleanup(); return accessTimes; } private void makeSurfaces () { LOG.info("Propagating from transit stops to the street network..."); // Grab a cached map of distances to street intersections from each transit stop StopTreeCache stopTreeCache = graph.index.getStopTreeCache(); // Iterate over all nondominated rides at all clusters for (TransitStop tstop : mins.keySet()) { int lb0 = mins.get(tstop); int ub0 = maxs.get(tstop); int avg0 = (int) (accumulator.get(tstop) / counts.get(tstop)); // Iterate over street intersections in the vicinity of this particular transit stop. // Shift the time range at this transit stop, merging it into that for all reachable street intersections. TObjectIntMap<Vertex> distanceToVertex = stopTreeCache.getDistancesForStop(tstop); for (TObjectIntIterator<Vertex> iter = distanceToVertex.iterator(); iter.hasNext(); ) { iter.advance(); Vertex vertex = iter.key(); // distance in meters over walkspeed in meters per second --> seconds int egressWalkTimeSeconds = (int) (iter.value() / request.walkSpeed); if (egressWalkTimeSeconds > request.maxWalkTime * 60) { continue; } int propagated_min = lb0 + egressWalkTimeSeconds; int propagated_max = ub0 + egressWalkTimeSeconds; // TODO: we can't take the min propagated average and call it an average int propagated_avg = avg0 + egressWalkTimeSeconds; int existing_min = timeSurfaceRangeSet.min.times.get(vertex); int existing_max = timeSurfaceRangeSet.max.times.get(vertex); int existing_avg = timeSurfaceRangeSet.avg.times.get(vertex); // FIXME this is taking the least lower bound and the least upper bound // which is not necessarily wrong but it's a crude way to perform the combination if (existing_min == TimeSurface.UNREACHABLE || existing_min > propagated_min) { timeSurfaceRangeSet.min.times.put(vertex, propagated_min); } if (existing_max == TimeSurface.UNREACHABLE || existing_max > propagated_max) { timeSurfaceRangeSet.max.times.put(vertex, propagated_max); } if (existing_avg == TimeSurface.UNREACHABLE || existing_avg > propagated_avg) { timeSurfaceRangeSet.avg.times.put(vertex, propagated_avg); } } } LOG.info("Done with propagation."); /* Store the results in a field in the router object. */ } }
add a pathparser for finding initial stops in the repeated RAPTOR profile router. Former-commit-id: add2d978ea047840cb4eda4b2de6b55d003bf04b
src/main/java/org/opentripplanner/profile/RepeatedRaptorProfileRouter.java
add a pathparser for finding initial stops in the repeated RAPTOR profile router.
<ide><path>rc/main/java/org/opentripplanner/profile/RepeatedRaptorProfileRouter.java <ide> package org.opentripplanner.profile; <ide> <ide> import com.google.common.collect.Lists; <add> <ide> import gnu.trove.iterator.TObjectIntIterator; <ide> import gnu.trove.map.TObjectIntMap; <ide> import gnu.trove.map.TObjectLongMap; <ide> import gnu.trove.map.hash.TObjectIntHashMap; <ide> import gnu.trove.map.hash.TObjectLongHashMap; <add> <ide> import org.joda.time.DateTimeZone; <ide> import org.opentripplanner.analyst.TimeSurface; <ide> import org.opentripplanner.api.parameter.QualifiedModeSet; <ide> import org.opentripplanner.routing.edgetype.TripPattern; <ide> import org.opentripplanner.routing.graph.Graph; <ide> import org.opentripplanner.routing.graph.Vertex; <add>import org.opentripplanner.routing.pathparser.BasicPathParser; <add>import org.opentripplanner.routing.pathparser.InitialStopSearchPathParser; <add>import org.opentripplanner.routing.pathparser.PathParser; <ide> import org.opentripplanner.routing.spt.DominanceFunction; <ide> import org.opentripplanner.routing.spt.ShortestPathTree; <ide> import org.opentripplanner.routing.trippattern.TripTimeSubset; <ide> TObjectIntMap<TransitStop> accessTimes = findInitialStops(false); <ide> <ide> LOG.info("Found {} initial stops", accessTimes.size()); <del> <add> <ide> Map<TripPattern, TripTimeSubset> timetables = <ide> TripTimeSubset.indexGraph(graph, request.date, request.fromTime, request.toTime + MAX_DURATION); <ide> <ide> rr.walkSpeed = request.walkSpeed; <ide> rr.to = rr.from; <ide> rr.setRoutingContext(graph); <add> rr.rctx.pathParsers = new PathParser[] { new InitialStopSearchPathParser() }; <ide> rr.dateTime = request.date.toDateMidnight(DateTimeZone.forTimeZone(graph.getTimeZone())).getMillis() / 1000 + <ide> request.fromTime; <ide> // RoutingRequest dateTime defaults to currentTime.
Java
apache-2.0
3a67f2eddec186f0d039aef431c5f43744446c53
0
wcm-io-caravan/caravan-pipeline
/* * #%L * wcm.io * %% * Copyright (C) 2014 wcm.io * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package io.wcm.caravan.pipeline.impl.operators; import java.util.concurrent.TimeUnit; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import rx.Observable; import rx.Observable.OnSubscribe; import rx.Observable.Operator; import rx.Observable.Transformer; import rx.Observer; import com.google.common.base.Stopwatch; import com.google.common.base.Ticker; /** * A class that measures the time spent by a specific {@link Transformer}, {@link Operator} or {@link Observable}. * Usage of this class is limited to observables with a single emission */ public final class PerformanceMetricsTransformer<T> implements Transformer<T, T> { private static final Logger log = LoggerFactory.getLogger(PerformanceMetricsTransformer.class); private final Transformer<T, T> toMeasure; private final Stopwatch subscriptionStopwatch; private final Stopwatch observationStopwatch; private final Stopwatch emissionStopwatch; /** * constructor for unit-tests that allows you to specify a custom ticker. For general usage use * {@link #withSystemTicker(Transformer)} * @param toMeasure the transformer that should be measured * @param ticker the time source */ PerformanceMetricsTransformer(Transformer<T, T> toMeasure, Ticker ticker) { super(); this.toMeasure = toMeasure; subscriptionStopwatch = Stopwatch.createUnstarted(ticker); observationStopwatch = Stopwatch.createUnstarted(ticker); emissionStopwatch = Stopwatch.createUnstarted(ticker); } /** * constructor for unit-tests that allows you to specify a custom ticker. For general usage use * {@link #withSystemTicker(Operator)} * @param toMeasure the operator that should be measured * @param ticker the time source */ PerformanceMetricsTransformer(Operator<T, T> toMeasure, Ticker ticker) { // if the subject to be measured is an operator we wrap it in a trivial transformer this(new Transformer<T, T>() { @Override public Observable<T> call(Observable<T> sourceObservable) { return Observable.create(subscriber -> { // apply the operator to measure to the source observable before subscribing sourceObservable.lift(toMeasure).subscribe(subscriber); }); } }, ticker); } /** * constructor for unit-tests that allows you to specify a custom ticker. For general usage use * {@link #withSystemTicker(Observable)} * @param toMeasure the operator that should be measured * @param ticker the time source */ PerformanceMetricsTransformer(Observable<T> toMeasure, Ticker ticker) { // if the subject to be measured is an operator we wrap it in a trivial transformer this(new Transformer<T, T>() { @Override public Observable<T> call(Observable<T> sourceObservable) { return sourceObservable; } }, ticker); } /** * @param toMeasure the transformer that should be measured * @return a PerformanceMetricsTransformer that uses the {@link Ticker#systemTicker()} */ public static <T> PerformanceMetricsTransformer<T> withSystemTicker(Transformer<T, T> toMeasure) { return new PerformanceMetricsTransformer<T>(toMeasure, Ticker.systemTicker()); } /** * @param toMeasure the operator that should be measured * @return a PerformanceMetricsTransformer that uses the {@link Ticker#systemTicker()} */ public static <T> PerformanceMetricsTransformer<T> withSystemTicker(Operator<T, T> toMeasure) { return new PerformanceMetricsTransformer<T>(toMeasure, Ticker.systemTicker()); } /** * @param toMeasure the observable that should be measured * @return a PerformanceMetricsTransformer that uses the {@link Ticker#systemTicker()} */ public static <T> PerformanceMetricsTransformer<T> withSystemTicker(Observable<T> toMeasure) { return new PerformanceMetricsTransformer<T>(toMeasure, Ticker.systemTicker()); } @Override public Observable<T> call(Observable<T> dataSource) { Observable<T> wrappedInputObservable = Observable.create(subscriberToMeasure -> { // 3. this is called after the subscriber to measure has finished its job and subscribes to this input observable if (subscriptionStopwatch.isRunning()) { subscriptionStopwatch.stop(); } // 4. we now start the stop watch that measures how long the actual data source takes between subscription and emission observationStopwatch.start(); dataSource.subscribe(new Observer<T>() { @Override public void onCompleted() { subscriberToMeasure.onCompleted(); } @Override public void onError(Throwable e) { subscriberToMeasure.onError(e); } @Override public void onNext(T emission) { // 5. the source data is available, we now stop the observation stop watch if (observationStopwatch.isRunning()) { observationStopwatch.stop(); } // 6. now start the stop watch to measure the time it takes for the transfomer to handle the data emissionStopwatch.start(); subscriberToMeasure.onNext(emission); } }); }); Observable<T> wrappedOutputObservable = Observable.create(actualOutputSubscriber -> { Observable<T> observableToMeasure = toMeasure.call(wrappedInputObservable); // 1. start the subscription stop watch subscriptionStopwatch.start(); // 2. subscribing here will call onSubscribe function of the transformer that we want to measure observableToMeasure.subscribe(new Observer<T>() { @Override public void onCompleted() { actualOutputSubscriber.onCompleted(); } @Override public void onError(Throwable e) { actualOutputSubscriber.onError(e); } @Override public void onNext(T emission) { // 7. the transformer to measure has finished processing the data, so we stop the emission stop watch if (emissionStopwatch.isRunning()) { emissionStopwatch.stop(); } log.info("subscription=" + getSubscriptionMillis() + "ms, observation=" + getObservationMillis() + "ms, emission: " + getEmissionMillis() + "ms"); actualOutputSubscriber.onNext(emission); } }); }); return wrappedOutputObservable.cache(); } /** * Calculate the time spent during the {@link OnSubscribe} function of the {@link Transformer} to measure. Will be 0 * if you are using this class to measure a {@link Operator} or {@link Observable} * @return the time in milliseconds spent subscribing onto the source observable */ public long getSubscriptionMillis() { return this.subscriptionStopwatch.elapsed(TimeUnit.MILLISECONDS); } /** * Calculate the time spent waiting for the source observable to emit the result * @return the time in milliseconds spent waiting for the source observable */ public long getObservationMillis() { return this.observationStopwatch.elapsed(TimeUnit.MILLISECONDS); } /** * Calculate the time spent processing the emission of the source observable * @return the time in milliseconds spent processing the emission */ public long getEmissionMillis() { return this.emissionStopwatch.elapsed(TimeUnit.MILLISECONDS); } }
impl/src/main/java/io/wcm/caravan/pipeline/impl/operators/PerformanceMetricsTransformer.java
/* * #%L * wcm.io * %% * Copyright (C) 2014 wcm.io * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package io.wcm.caravan.pipeline.impl.operators; import io.wcm.caravan.pipeline.JsonPipelineOutput; import java.util.concurrent.TimeUnit; import rx.Observable; import rx.Observable.OnSubscribe; import rx.Observable.Transformer; import rx.Observer; import rx.Subscriber; import com.google.common.base.Stopwatch; import com.google.common.base.Ticker; /** * Prototype for a more rx-java like way to measure how much time a Transformer needs for subscription and handling of * emissions */ final class PerformanceMetricsTransformer implements Transformer<JsonPipelineOutput, JsonPipelineOutput> { private final Transformer<JsonPipelineOutput, JsonPipelineOutput> toMeasure; private final Stopwatch subscriptionStopwatch; private final Stopwatch emissionStopwatch; public PerformanceMetricsTransformer(Transformer<JsonPipelineOutput, JsonPipelineOutput> toMeasure, Ticker ticker) { super(); this.toMeasure = toMeasure; subscriptionStopwatch = Stopwatch.createUnstarted(ticker); emissionStopwatch = Stopwatch.createUnstarted(ticker); } @Override public Observable<JsonPipelineOutput> call(Observable<JsonPipelineOutput> dataSource) { Observable<JsonPipelineOutput> wrappedInputObservable = Observable.create(new OnSubscribe<JsonPipelineOutput>() { @Override public void call(Subscriber<? super JsonPipelineOutput> subscriberToMeasure) { subscriptionStopwatch.stop(); dataSource.subscribe(new Observer<JsonPipelineOutput>() { @Override public void onCompleted() { subscriberToMeasure.onCompleted(); } @Override public void onError(Throwable e) { subscriberToMeasure.onError(e); } @Override public void onNext(JsonPipelineOutput t) { emissionStopwatch.start(); subscriberToMeasure.onNext(t); } }); } }); Observable<JsonPipelineOutput> wrappedOutputObservable = Observable.create(new OnSubscribe<JsonPipelineOutput>() { @Override public void call(Subscriber<? super JsonPipelineOutput> actualOutputSubscriber) { Observable<JsonPipelineOutput> observableToMeasure = toMeasure.call(wrappedInputObservable); subscriptionStopwatch.start(); observableToMeasure.subscribe(new Observer<JsonPipelineOutput>() { @Override public void onCompleted() { actualOutputSubscriber.onCompleted(); } @Override public void onError(Throwable e) { actualOutputSubscriber.onError(e); } @Override public void onNext(JsonPipelineOutput t) { emissionStopwatch.elapsed(TimeUnit.MILLISECONDS); actualOutputSubscriber.onNext(t); } }); } }); return wrappedOutputObservable; } }
extended PerformanceMetricsTransformer to also be able to measure Operators and Observables
impl/src/main/java/io/wcm/caravan/pipeline/impl/operators/PerformanceMetricsTransformer.java
extended PerformanceMetricsTransformer to also be able to measure Operators and Observables
<ide><path>mpl/src/main/java/io/wcm/caravan/pipeline/impl/operators/PerformanceMetricsTransformer.java <ide> */ <ide> package io.wcm.caravan.pipeline.impl.operators; <ide> <del>import io.wcm.caravan.pipeline.JsonPipelineOutput; <del> <ide> import java.util.concurrent.TimeUnit; <add> <add>import org.slf4j.Logger; <add>import org.slf4j.LoggerFactory; <ide> <ide> import rx.Observable; <ide> import rx.Observable.OnSubscribe; <add>import rx.Observable.Operator; <ide> import rx.Observable.Transformer; <ide> import rx.Observer; <del>import rx.Subscriber; <ide> <ide> import com.google.common.base.Stopwatch; <ide> import com.google.common.base.Ticker; <ide> <ide> /** <del> * Prototype for a more rx-java like way to measure how much time a Transformer needs for subscription and handling of <del> * emissions <add> * A class that measures the time spent by a specific {@link Transformer}, {@link Operator} or {@link Observable}. <add> * Usage of this class is limited to observables with a single emission <ide> */ <del>final class PerformanceMetricsTransformer implements Transformer<JsonPipelineOutput, JsonPipelineOutput> { <del> <del> private final Transformer<JsonPipelineOutput, JsonPipelineOutput> toMeasure; <add>public final class PerformanceMetricsTransformer<T> implements Transformer<T, T> { <add> <add> private static final Logger log = LoggerFactory.getLogger(PerformanceMetricsTransformer.class); <add> <add> private final Transformer<T, T> toMeasure; <ide> <ide> private final Stopwatch subscriptionStopwatch; <add> private final Stopwatch observationStopwatch; <ide> private final Stopwatch emissionStopwatch; <ide> <del> public PerformanceMetricsTransformer(Transformer<JsonPipelineOutput, JsonPipelineOutput> toMeasure, Ticker ticker) { <add> /** <add> * constructor for unit-tests that allows you to specify a custom ticker. For general usage use <add> * {@link #withSystemTicker(Transformer)} <add> * @param toMeasure the transformer that should be measured <add> * @param ticker the time source <add> */ <add> PerformanceMetricsTransformer(Transformer<T, T> toMeasure, Ticker ticker) { <ide> super(); <ide> this.toMeasure = toMeasure; <ide> <ide> subscriptionStopwatch = Stopwatch.createUnstarted(ticker); <add> observationStopwatch = Stopwatch.createUnstarted(ticker); <ide> emissionStopwatch = Stopwatch.createUnstarted(ticker); <ide> } <ide> <del> @Override <del> public Observable<JsonPipelineOutput> call(Observable<JsonPipelineOutput> dataSource) { <del> <del> Observable<JsonPipelineOutput> wrappedInputObservable = Observable.create(new OnSubscribe<JsonPipelineOutput>() { <add> /** <add> * constructor for unit-tests that allows you to specify a custom ticker. For general usage use <add> * {@link #withSystemTicker(Operator)} <add> * @param toMeasure the operator that should be measured <add> * @param ticker the time source <add> */ <add> PerformanceMetricsTransformer(Operator<T, T> toMeasure, Ticker ticker) { <add> <add> // if the subject to be measured is an operator we wrap it in a trivial transformer <add> this(new Transformer<T, T>() { <ide> <ide> @Override <del> public void call(Subscriber<? super JsonPipelineOutput> subscriberToMeasure) { <del> <del> subscriptionStopwatch.stop(); <del> <del> dataSource.subscribe(new Observer<JsonPipelineOutput>() { <del> <del> @Override <del> public void onCompleted() { <del> subscriberToMeasure.onCompleted(); <del> } <del> <del> @Override <del> public void onError(Throwable e) { <del> subscriberToMeasure.onError(e); <del> } <del> <del> @Override <del> public void onNext(JsonPipelineOutput t) { <del> emissionStopwatch.start(); <del> subscriberToMeasure.onNext(t); <del> } <del> }); <del> <del> } <del> }); <del> <del> Observable<JsonPipelineOutput> wrappedOutputObservable = Observable.create(new OnSubscribe<JsonPipelineOutput>() { <del> <del> @Override <del> public void call(Subscriber<? super JsonPipelineOutput> actualOutputSubscriber) { <del> <del> Observable<JsonPipelineOutput> observableToMeasure = toMeasure.call(wrappedInputObservable); <del> <del> subscriptionStopwatch.start(); <del> observableToMeasure.subscribe(new Observer<JsonPipelineOutput>() { <del> <del> @Override <del> public void onCompleted() { <del> actualOutputSubscriber.onCompleted(); <del> } <del> <del> @Override <del> public void onError(Throwable e) { <del> actualOutputSubscriber.onError(e); <del> } <del> <del> @Override <del> public void onNext(JsonPipelineOutput t) { <del> emissionStopwatch.elapsed(TimeUnit.MILLISECONDS); <del> actualOutputSubscriber.onNext(t); <del> } <add> public Observable<T> call(Observable<T> sourceObservable) { <add> return Observable.create(subscriber -> { <add> // apply the operator to measure to the source observable before subscribing <add> sourceObservable.lift(toMeasure).subscribe(subscriber); <ide> }); <ide> } <add> <add> }, ticker); <add> } <add> <add> /** <add> * constructor for unit-tests that allows you to specify a custom ticker. For general usage use <add> * {@link #withSystemTicker(Observable)} <add> * @param toMeasure the operator that should be measured <add> * @param ticker the time source <add> */ <add> PerformanceMetricsTransformer(Observable<T> toMeasure, Ticker ticker) { <add> <add> // if the subject to be measured is an operator we wrap it in a trivial transformer <add> this(new Transformer<T, T>() { <add> <add> @Override <add> public Observable<T> call(Observable<T> sourceObservable) { <add> return sourceObservable; <add> } <add> }, ticker); <add> <add> } <add> <add> /** <add> * @param toMeasure the transformer that should be measured <add> * @return a PerformanceMetricsTransformer that uses the {@link Ticker#systemTicker()} <add> */ <add> public static <T> PerformanceMetricsTransformer<T> withSystemTicker(Transformer<T, T> toMeasure) { <add> return new PerformanceMetricsTransformer<T>(toMeasure, Ticker.systemTicker()); <add> } <add> <add> /** <add> * @param toMeasure the operator that should be measured <add> * @return a PerformanceMetricsTransformer that uses the {@link Ticker#systemTicker()} <add> */ <add> public static <T> PerformanceMetricsTransformer<T> withSystemTicker(Operator<T, T> toMeasure) { <add> return new PerformanceMetricsTransformer<T>(toMeasure, Ticker.systemTicker()); <add> } <add> <add> /** <add> * @param toMeasure the observable that should be measured <add> * @return a PerformanceMetricsTransformer that uses the {@link Ticker#systemTicker()} <add> */ <add> public static <T> PerformanceMetricsTransformer<T> withSystemTicker(Observable<T> toMeasure) { <add> return new PerformanceMetricsTransformer<T>(toMeasure, Ticker.systemTicker()); <add> } <add> <add> @Override <add> public Observable<T> call(Observable<T> dataSource) { <add> <add> Observable<T> wrappedInputObservable = Observable.create(subscriberToMeasure -> { <add> <add> // 3. this is called after the subscriber to measure has finished its job and subscribes to this input observable <add> if (subscriptionStopwatch.isRunning()) { <add> subscriptionStopwatch.stop(); <add> } <add> <add> // 4. we now start the stop watch that measures how long the actual data source takes between subscription and emission <add> observationStopwatch.start(); <add> dataSource.subscribe(new Observer<T>() { <add> <add> @Override <add> public void onCompleted() { <add> subscriberToMeasure.onCompleted(); <add> } <add> <add> @Override <add> public void onError(Throwable e) { <add> subscriberToMeasure.onError(e); <add> } <add> <add> @Override <add> public void onNext(T emission) { <add> <add> // 5. the source data is available, we now stop the observation stop watch <add> if (observationStopwatch.isRunning()) { <add> observationStopwatch.stop(); <add> } <add> <add> // 6. now start the stop watch to measure the time it takes for the transfomer to handle the data <add> emissionStopwatch.start(); <add> subscriberToMeasure.onNext(emission); <add> } <add> }); <ide> }); <ide> <del> return wrappedOutputObservable; <add> Observable<T> wrappedOutputObservable = Observable.create(actualOutputSubscriber -> { <add> <add> Observable<T> observableToMeasure = toMeasure.call(wrappedInputObservable); <add> <add> // 1. start the subscription stop watch <add> subscriptionStopwatch.start(); <add> <add> // 2. subscribing here will call onSubscribe function of the transformer that we want to measure <add> observableToMeasure.subscribe(new Observer<T>() { <add> <add> @Override <add> public void onCompleted() { <add> actualOutputSubscriber.onCompleted(); <add> } <add> <add> @Override <add> public void onError(Throwable e) { <add> actualOutputSubscriber.onError(e); <add> } <add> <add> @Override <add> public void onNext(T emission) { <add> // 7. the transformer to measure has finished processing the data, so we stop the emission stop watch <add> if (emissionStopwatch.isRunning()) { <add> emissionStopwatch.stop(); <add> } <add> <add> log.info("subscription=" + getSubscriptionMillis() + "ms, observation=" + getObservationMillis() + "ms, emission: " + getEmissionMillis() + "ms"); <add> <add> actualOutputSubscriber.onNext(emission); <add> } <add> }); <add> }); <add> <add> return wrappedOutputObservable.cache(); <add> } <add> <add> <add> /** <add> * Calculate the time spent during the {@link OnSubscribe} function of the {@link Transformer} to measure. Will be 0 <add> * if you are using this class to measure a {@link Operator} or {@link Observable} <add> * @return the time in milliseconds spent subscribing onto the source observable <add> */ <add> public long getSubscriptionMillis() { <add> return this.subscriptionStopwatch.elapsed(TimeUnit.MILLISECONDS); <add> } <add> <add> /** <add> * Calculate the time spent waiting for the source observable to emit the result <add> * @return the time in milliseconds spent waiting for the source observable <add> */ <add> public long getObservationMillis() { <add> return this.observationStopwatch.elapsed(TimeUnit.MILLISECONDS); <add> } <add> <add> /** <add> * Calculate the time spent processing the emission of the source observable <add> * @return the time in milliseconds spent processing the emission <add> */ <add> public long getEmissionMillis() { <add> return this.emissionStopwatch.elapsed(TimeUnit.MILLISECONDS); <ide> } <ide> }
JavaScript
mit
c63a7bb3acdabeb936c80b80421844fc9a94b010
0
sergiolepore/hexo-plugin-api
/** * waterlock * * defines various options used by waterlock * for more informaiton checkout * * http://waterlock.ninja/documentation */ module.exports.waterlock = { // Base URL // // used by auth methods for callback URI's using oauth and for password // reset links. baseUrl: process.env.BASE_URL || 'http://localhost:1337', // Auth Method(s) // // this can be a single string, an object, or an array of objects for your // chosen auth method(s) you will need to see the individual module's README // file for more information on the attributes necessary. This is an example // of the local authentication method with password reset tokens disabled. authMethod: [ { name:'waterlock-local-auth', passwordReset:{ tokens: false, mail: { protocol: 'SMTP', options:{ service: process.env.MAIL_SERVICE || 'Gmail', auth: { user: process.env.MAIL_USER || '[email protected]', pass: process.env.MAIL_PASS || 'userpass' } }, from: '[email protected]', subject: 'Your password reset!', forwardUrl: process.env.BASE_URL || 'http://localhost:1337' }, template:{ file: '../views/email/reset.jade', vars:{} } }, createOnNotFound: false } ], // JSON Web Tokens // // this provides waterlock with basic information to build your tokens, // these tokens are used for authentication, password reset, // and anything else you can imagine jsonWebTokens:{ secret: process.env.JWT_SECRET || 'oiQWC0ioW4x68a1cIA966A6IM6PbJgKZ', expiry:{ unit: 'days', length: '30' }, audience: 'hexo-plugin-api', subject: 'subject' }, // Post Actions // // Lets waterlock know how to handle different login/logout // attempt outcomes. postActions:{ // post login event login: { // This can be any one of the following // // url - 'http://example.com' // relativePath - '/blog/post' // obj - {controller: 'blog', action: 'post'} // string - 'custom json response string' // default - 'default' success: '/api/v1/user/jwt', // This can be any one of the following // // url - 'http://example.com' // relativePath - '/blog/post' // obj - {controller: 'blog', action: 'post'} // string - 'custom json response string' // default - 'default' failure: 'default' }, //post logout event logout: { // This can be any one of the following // // url - 'http://example.com' // relativePath - '/blog/post' // obj - {controller: 'blog', action: 'post'} // string - 'custom json response string' // default - 'default' success: 'default', // This can be any one of the following // // url - 'http://example.com' // relativePath - '/blog/post' // obj - {controller: 'blog', action: 'post'} // string - 'custom json response string' // default - 'default' failure: 'default' } } };
config/waterlock.js
/** * waterlock * * defines various options used by waterlock * for more informaiton checkout * * http://waterlock.ninja/documentation */ module.exports.waterlock = { // Base URL // // used by auth methods for callback URI's using oauth and for password // reset links. baseUrl: process.env.BASE_URL || 'http://localhost:1337', // Auth Method(s) // // this can be a single string, an object, or an array of objects for your // chosen auth method(s) you will need to see the individual module's README // file for more information on the attributes necessary. This is an example // of the local authentication method with password reset tokens disabled. authMethod: [ { name:'waterlock-local-auth', passwordReset:{ tokens: false, mail: { protocol: 'SMTP', options:{ service: process.env.MAIL_SERVICE || 'Gmail', auth: { user: process.env.MAIL_USER || '[email protected]', pass: process.env.MAIL_PASS || 'userpass' } }, from: '[email protected]', subject: 'Your password reset!', forwardUrl: process.env.BASE_URL || 'http://localhost:1337' }, template:{ file: '../views/email/reset.jade', vars:{} } }, createOnNotFound: false } ], // JSON Web Tokens // // this provides waterlock with basic information to build your tokens, // these tokens are used for authentication, password reset, // and anything else you can imagine jsonWebTokens:{ secret: process.env.JWT_SECRET || 'oiQWC0ioW4x68a1cIA966A6IM6PbJgKZ', expiry:{ unit: 'days', length: '30' }, audience: 'hexo-plugin-api', subject: 'subject' }, // Post Actions // // Lets waterlock know how to handle different login/logout // attempt outcomes. postActions:{ // post login event login: { // This can be any one of the following // // url - 'http://example.com' // relativePath - '/blog/post' // obj - {controller: 'blog', action: 'post'} // string - 'custom json response string' // default - 'default' success: 'default', // This can be any one of the following // // url - 'http://example.com' // relativePath - '/blog/post' // obj - {controller: 'blog', action: 'post'} // string - 'custom json response string' // default - 'default' failure: 'default' }, //post logout event logout: { // This can be any one of the following // // url - 'http://example.com' // relativePath - '/blog/post' // obj - {controller: 'blog', action: 'post'} // string - 'custom json response string' // default - 'default' success: 'default', // This can be any one of the following // // url - 'http://example.com' // relativePath - '/blog/post' // obj - {controller: 'blog', action: 'post'} // string - 'custom json response string' // default - 'default' failure: 'default' } } };
Change Waterlock post login action Now it will return the JWT for the logged in user.
config/waterlock.js
Change Waterlock post login action
<ide><path>onfig/waterlock.js <ide> // obj - {controller: 'blog', action: 'post'} <ide> // string - 'custom json response string' <ide> // default - 'default' <del> success: 'default', <add> success: '/api/v1/user/jwt', <ide> <ide> // This can be any one of the following <ide> //
Java
apache-2.0
f72ae89e4334097c37a41e5d4c8f14b107b209fc
0
jesse-gallagher/frostillic.us-Blog,jesse-gallagher/frostillic.us-Blog,jesse-gallagher/frostillic.us-Blog,jesse-gallagher/frostillic.us-Blog
/** * Copyright © 2016-2018 Jesse Gallagher * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package security; import com.darwino.commons.httpclnt.HttpBase; import com.darwino.commons.httpclnt.HttpClient; import com.darwino.commons.util.StringUtil; import com.darwino.commons.util.io.Base64Util; import com.darwino.j2ee.servlet.authentication.handler.FormAuthHandler; import javax.enterprise.context.ApplicationScoped; import javax.inject.Inject; import javax.security.enterprise.AuthenticationException; import javax.security.enterprise.AuthenticationStatus; import javax.security.enterprise.authentication.mechanism.http.HttpAuthenticationMechanism; import javax.security.enterprise.authentication.mechanism.http.HttpMessageContext; import javax.security.enterprise.credential.UsernamePasswordCredential; import javax.security.enterprise.identitystore.CredentialValidationResult; import javax.security.enterprise.identitystore.IdentityStore; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; @ApplicationScoped public class DarwinoHttpAuthenticationMechanism implements HttpAuthenticationMechanism { @Inject IdentityStore identityStore; @Override public AuthenticationStatus validateRequest(HttpServletRequest request, HttpServletResponse response, HttpMessageContext httpMessageContext) throws AuthenticationException { // Check for basic auth first String authHeaderB64 = request.getHeader(HttpBase.HEADER_AUTHORIZATION); if(StringUtil.isNotEmpty(authHeaderB64) && authHeaderB64.startsWith("Basic ")) { String authHeader = new String(Base64Util.decodeBase64(authHeaderB64.substring(authHeaderB64.indexOf(' ') + 1))); int i = authHeader.indexOf(':'); if (i > 0) { String userName = authHeader.substring(0, i); String password = i == authHeader.length()-1 ? "" : authHeader.substring(i+1); CredentialValidationResult result = identityStore.validate(new UsernamePasswordCredential(userName, password)); return httpMessageContext.notifyContainerAboutLogin(result.getCallerPrincipal(), result.getCallerGroups()); } else { return httpMessageContext.responseUnauthorized(); } } // Failing that, check for form auth FormAuthHandler handler = new FormAuthHandler(); try { HttpClient.Authenticator auth = handler.readAuthentication(request, response); if(auth instanceof HttpClient.BasicAuthenticator) { HttpClient.BasicAuthenticator cred = (HttpClient.BasicAuthenticator)auth; CredentialValidationResult result = identityStore.validate(new UsernamePasswordCredential(cred.getUserName(), cred.getPassword())); return httpMessageContext.notifyContainerAboutLogin(result.getCallerPrincipal(), result.getCallerGroups()); } } catch (IOException | ServletException e) { e.printStackTrace(); throw new AuthenticationException(e); } return httpMessageContext.doNothing(); } }
frostillicus-blog/frostillicus-blog-j2ee/src/main/java/security/DarwinoHttpAuthenticationMechanism.java
/** * Copyright © 2016-2018 Jesse Gallagher * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package security; import com.darwino.commons.httpclnt.HttpClient; import com.darwino.j2ee.servlet.authentication.handler.FormAuthHandler; import javax.enterprise.context.ApplicationScoped; import javax.inject.Inject; import javax.security.enterprise.AuthenticationException; import javax.security.enterprise.AuthenticationStatus; import javax.security.enterprise.authentication.mechanism.http.HttpAuthenticationMechanism; import javax.security.enterprise.authentication.mechanism.http.HttpMessageContext; import javax.security.enterprise.credential.UsernamePasswordCredential; import javax.security.enterprise.identitystore.CredentialValidationResult; import javax.security.enterprise.identitystore.IdentityStore; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; @ApplicationScoped public class DarwinoHttpAuthenticationMechanism implements HttpAuthenticationMechanism { @Inject IdentityStore identityStore; @Override public AuthenticationStatus validateRequest(HttpServletRequest request, HttpServletResponse response, HttpMessageContext httpMessageContext) throws AuthenticationException { FormAuthHandler handler = new FormAuthHandler(); try { HttpClient.Authenticator auth = handler.readAuthentication(request, response); if(auth instanceof HttpClient.BasicAuthenticator) { HttpClient.BasicAuthenticator cred = (HttpClient.BasicAuthenticator)auth; CredentialValidationResult result = identityStore.validate(new UsernamePasswordCredential(cred.getUserName(), cred.getPassword())); httpMessageContext.notifyContainerAboutLogin(result.getCallerPrincipal(), result.getCallerGroups()); } } catch (IOException | ServletException e) { throw new AuthenticationException(e); } return httpMessageContext.doNothing(); } }
Add HTTP Basic auth support
frostillicus-blog/frostillicus-blog-j2ee/src/main/java/security/DarwinoHttpAuthenticationMechanism.java
Add HTTP Basic auth support
<ide><path>rostillicus-blog/frostillicus-blog-j2ee/src/main/java/security/DarwinoHttpAuthenticationMechanism.java <ide> */ <ide> package security; <ide> <add>import com.darwino.commons.httpclnt.HttpBase; <ide> import com.darwino.commons.httpclnt.HttpClient; <add>import com.darwino.commons.util.StringUtil; <add>import com.darwino.commons.util.io.Base64Util; <ide> import com.darwino.j2ee.servlet.authentication.handler.FormAuthHandler; <ide> <ide> import javax.enterprise.context.ApplicationScoped; <ide> <ide> @ApplicationScoped <ide> public class DarwinoHttpAuthenticationMechanism implements HttpAuthenticationMechanism { <del> <ide> @Inject <ide> IdentityStore identityStore; <ide> <ide> @Override <ide> public AuthenticationStatus validateRequest(HttpServletRequest request, HttpServletResponse response, HttpMessageContext httpMessageContext) throws AuthenticationException { <add> // Check for basic auth first <add> String authHeaderB64 = request.getHeader(HttpBase.HEADER_AUTHORIZATION); <add> if(StringUtil.isNotEmpty(authHeaderB64) && authHeaderB64.startsWith("Basic ")) { <add> String authHeader = new String(Base64Util.decodeBase64(authHeaderB64.substring(authHeaderB64.indexOf(' ') + 1))); <add> int i = authHeader.indexOf(':'); <add> if (i > 0) { <add> String userName = authHeader.substring(0, i); <add> String password = i == authHeader.length()-1 ? "" : authHeader.substring(i+1); <add> CredentialValidationResult result = identityStore.validate(new UsernamePasswordCredential(userName, password)); <add> return httpMessageContext.notifyContainerAboutLogin(result.getCallerPrincipal(), result.getCallerGroups()); <add> } else { <add> return httpMessageContext.responseUnauthorized(); <add> } <add> } <add> <add> // Failing that, check for form auth <ide> FormAuthHandler handler = new FormAuthHandler(); <ide> try { <ide> HttpClient.Authenticator auth = handler.readAuthentication(request, response); <ide> if(auth instanceof HttpClient.BasicAuthenticator) { <ide> HttpClient.BasicAuthenticator cred = (HttpClient.BasicAuthenticator)auth; <ide> CredentialValidationResult result = identityStore.validate(new UsernamePasswordCredential(cred.getUserName(), cred.getPassword())); <del> httpMessageContext.notifyContainerAboutLogin(result.getCallerPrincipal(), result.getCallerGroups()); <add> return httpMessageContext.notifyContainerAboutLogin(result.getCallerPrincipal(), result.getCallerGroups()); <ide> } <ide> } catch (IOException | ServletException e) { <add> e.printStackTrace(); <ide> throw new AuthenticationException(e); <ide> } <add> <ide> return httpMessageContext.doNothing(); <ide> } <ide> }
Java
agpl-3.0
e05fc2e14c807679b7be8cb818a045750a57b814
0
TheLanguageArchive/Arbil,TheLanguageArchive/Arbil,TheLanguageArchive/Arbil,TheLanguageArchive/Arbil,TheLanguageArchive/Arbil
package nl.mpi.arbil; import nl.mpi.arbil.data.ImdiTreeObject; import java.awt.AWTEvent; import java.awt.BorderLayout; import java.awt.Component; import java.awt.Dimension; import java.awt.FileDialog; import java.awt.GraphicsEnvironment; import java.awt.Rectangle; import java.awt.Toolkit; import java.awt.event.AWTEventListener; import java.awt.event.KeyEvent; import java.io.File; import java.io.FilenameFilter; import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashSet; import java.util.Hashtable; import java.util.Vector; import javax.swing.JDesktopPane; import javax.swing.JEditorPane; import javax.swing.JFileChooser; import javax.swing.JFrame; import javax.swing.JInternalFrame; import javax.swing.JMenuItem; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JSplitPane; import javax.swing.event.InternalFrameAdapter; import javax.swing.event.InternalFrameEvent; import javax.swing.filechooser.FileFilter; import nl.mpi.arbil.data.ImdiLoader; /** * Document : LinorgWindowManager * Created on : * @author [email protected] */ public class LinorgWindowManager { Hashtable<String, Component[]> windowList = new Hashtable<String, Component[]>(); Hashtable windowStatesHashtable; public JDesktopPane desktopPane; //TODO: this is public for the dialog boxes to use, but will change when the strings are loaded from the resources public JFrame linorgFrame; int nextWindowX = 50; int nextWindowY = 50; int nextWindowWidth = 800; int nextWindowHeight = 600; private Hashtable<String, String> messageDialogQueue = new Hashtable<String, String>(); private boolean messagesCanBeShown = false; boolean showMessageThreadrunning = false; static private LinorgWindowManager singleInstance = null; static synchronized public LinorgWindowManager getSingleInstance() { // System.out.println("LinorgWindowManager getSingleInstance"); if (singleInstance == null) { singleInstance = new LinorgWindowManager(); } return singleInstance; } private LinorgWindowManager() { desktopPane = new JDesktopPane(); desktopPane.setBackground(new java.awt.Color(204, 204, 204)); ArbilDragDrop.getSingleInstance().addTransferHandler(desktopPane); } public void loadGuiState(JFrame linorgFrameLocal) { linorgFrame = linorgFrameLocal; try { // load the saved states windowStatesHashtable = (Hashtable) LinorgSessionStorage.getSingleInstance().loadObject("windowStates"); // set the main window position and size linorgFrame.setExtendedState((Integer) windowStatesHashtable.get("linorgFrameExtendedState")); if (linorgFrame.getExtendedState() == JFrame.ICONIFIED) { // start up iconified is just too confusing to the user linorgFrame.setExtendedState(JFrame.NORMAL); } // if the application was maximised when it was last closed then these values will not be set and this will through setting the size in the catch Object linorgFrameBounds = windowStatesHashtable.get("linorgFrameBounds"); linorgFrame.setBounds((Rectangle) linorgFrameBounds); if (windowStatesHashtable.containsKey("ScreenDeviceCount")) { int screenDeviceCount = ((Integer) windowStatesHashtable.get("ScreenDeviceCount")); if (screenDeviceCount > GraphicsEnvironment.getLocalGraphicsEnvironment().getScreenDevices().length) { linorgFrame.setLocationRelativeTo(null); // make sure the main frame is visible. for instance when a second monitor has been removed. Dimension screenDimension = Toolkit.getDefaultToolkit().getScreenSize(); if (linorgFrame.getBounds().intersects(new Rectangle(screenDimension))) { linorgFrame.setBounds(linorgFrame.getBounds().intersection(new Rectangle(screenDimension))); } else { linorgFrame.setBounds(0, 0, 800, 600); linorgFrame.setLocationRelativeTo(null); } } } } catch (Exception ex) { System.out.println("load windowStates failed: " + ex.getMessage()); System.out.println("setting default windowStates"); windowStatesHashtable = new Hashtable(); linorgFrame.setBounds(0, 0, 800, 600); linorgFrame.setLocationRelativeTo(null); linorgFrame.setExtendedState(JFrame.MAXIMIZED_BOTH); } // set the split pane positions loadSplitPlanes(linorgFrame.getContentPane().getComponent(0)); } public void openAboutPage() { LinorgVersion linorgVersion = new LinorgVersion(); String messageString = "Archive Builder\n" + "A local tool for organising linguistic data.\n" + "Max Planck Institute for Psycholinguistics\n" + "Application design and programming by Peter Withers\n" + "Arbil also uses components of the IMDI API and Lamus Type Checker\n" + "Version: " + linorgVersion.currentMajor + "." + linorgVersion.currentMinor + "." + linorgVersion.currentRevision + "\n" + linorgVersion.lastCommitDate + "\n" + "Compile Date: " + linorgVersion.compileDate + "\n"; JOptionPane.showMessageDialog(linorgFrame, messageString, "About Arbil", JOptionPane.PLAIN_MESSAGE); } public void offerUserToSaveChanges() throws Exception { if (ImdiLoader.getSingleInstance().nodesNeedSave()) { if (JOptionPane.OK_OPTION == JOptionPane.showConfirmDialog(LinorgWindowManager.getSingleInstance().linorgFrame, "There are unsaved changes.\nSave now?", "Save Changes", JOptionPane.OK_CANCEL_OPTION, JOptionPane.PLAIN_MESSAGE)) { ImdiLoader.getSingleInstance().saveNodesNeedingSave(true); } else { throw new Exception("user canceled save action"); } } } public File showEmptyExportDirectoryDialogue(String titleText) { boolean fileSelectDone = false; try { while (!fileSelectDone) { File[] selectedFiles = LinorgWindowManager.getSingleInstance().showFileSelectBox(titleText + " Destination Directory", true, false, false); if (selectedFiles != null && selectedFiles.length > 0) { File destinationDirectory = selectedFiles[0]; if (!destinationDirectory.exists()/* && parentDirectory.getParentFile().exists()*/) { // create the directory provided that the parent directory exists // ths is here due the the way the mac file select gui leads the user to type in a new directory name destinationDirectory.mkdirs(); } if (!destinationDirectory.exists()) { JOptionPane.showMessageDialog(linorgFrame, "The export directory\n\"" + destinationDirectory + "\"\ndoes not exist.\nPlease select or create a directory.", titleText, JOptionPane.PLAIN_MESSAGE); } else { // if (!createdDirectory) { // String newDirectoryName = JOptionPane.showInputDialog(linorgFrame, "Enter Export Name", titleText, JOptionPane.PLAIN_MESSAGE, null, null, "arbil_export").toString(); // try { // destinationDirectory = new File(parentDirectory.getCanonicalPath() + File.separatorChar + newDirectoryName); // destinationDirectory.mkdir(); // } catch (Exception e) { // JOptionPane.showMessageDialog(LinorgWindowManager.getSingleInstance().linorgFrame, "Could not create the export directory + \'" + newDirectoryName + "\'", titleText, JOptionPane.PLAIN_MESSAGE); // } // } if (destinationDirectory != null && destinationDirectory.exists()) { if (destinationDirectory.list().length == 0) { fileSelectDone = true; return destinationDirectory; } else { if (showMessageDialogBox("The selected export directory is not empty.\nTo continue will merge and may overwrite files.\nDo you want to continue?", titleText)) { return destinationDirectory; } //JOptionPane.showMessageDialog(LinorgWindowManager.getSingleInstance().linorgFrame, "The export directory must be empty", titleText, JOptionPane.PLAIN_MESSAGE); } } } } else { fileSelectDone = true; } } } catch (Exception e) { System.out.println("aborting export: " + e.getMessage()); } return null; } public File[] showFileSelectBox(String titleText, boolean directorySelectOnly, boolean multipleSelect, boolean requireMetadataFiles) { // test for os: if mac or file then awt else for other and directory use swing // save/load last directory accoring to the title of the dialogue //Hashtable<String, File> fileSelectLocationsHashtable; File workingDirectory = null; String workingDirectoryPathString = LinorgSessionStorage.getSingleInstance().loadString("fileSelect." + titleText); if (workingDirectoryPathString == null) { workingDirectory = new File(System.getProperty("user.home")); } else { workingDirectory = new File(workingDirectoryPathString); } File lastUsedWorkingDirectory; File[] returnFile; boolean isMac = true; // TODO: set this correctly boolean useAtwSelect = false; //directorySelectOnly && isMac && !multipleSelect; if (useAtwSelect) { if (directorySelectOnly) { System.setProperty("apple.awt.fileDialogForDirectories", "true"); } else { System.setProperty("apple.awt.fileDialogForDirectories", "false"); } FileDialog fileDialog = new FileDialog(linorgFrame); if (requireMetadataFiles) { fileDialog.setFilenameFilter(new FilenameFilter() { public boolean accept(File dir, String name) { return name.toLowerCase().endsWith(".imdi"); } }); } fileDialog.setDirectory(workingDirectory.getAbsolutePath()); fileDialog.setVisible(true); String selectedFile = fileDialog.getFile(); lastUsedWorkingDirectory = new File(fileDialog.getDirectory()); if (selectedFile != null) { returnFile = new File[]{new File(selectedFile)}; } else { returnFile = null; } } else { JFileChooser fileChooser = new JFileChooser(); if (requireMetadataFiles) { FileFilter imdiFileFilter = new FileFilter() { public String getDescription() { return "IMDI"; } @Override public boolean accept(File selectedFile) { // the test for exists is unlikey to do anything here, paricularly regarding the Mac dialogues text entry field return (selectedFile.exists() && (selectedFile.isDirectory() || selectedFile.getName().toLowerCase().endsWith(".imdi"))); } }; fileChooser.addChoosableFileFilter(imdiFileFilter); } if (directorySelectOnly) { // this filter is only cosmetic but gives the user an indication of what to select FileFilter imdiFileFilter = new FileFilter() { public String getDescription() { return "Directories"; } @Override public boolean accept(File selectedFile) { return (selectedFile.exists() && selectedFile.isDirectory()); } }; fileChooser.addChoosableFileFilter(imdiFileFilter); } if (directorySelectOnly) { fileChooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); } else { fileChooser.setFileSelectionMode(JFileChooser.FILES_ONLY); } fileChooser.setCurrentDirectory(workingDirectory); fileChooser.setMultiSelectionEnabled(multipleSelect); if (JFileChooser.APPROVE_OPTION == fileChooser.showDialog(LinorgWindowManager.getSingleInstance().linorgFrame, titleText)) { returnFile = fileChooser.getSelectedFiles(); if (returnFile.length == 0) { returnFile = new File[]{fileChooser.getSelectedFile()}; } } else { returnFile = null; } if (returnFile != null && returnFile.length == 1 && !returnFile[0].exists()) { // if the selected file does not exist then the "unusable" mac file select is usually to blame so try to clean up returnFile[0] = returnFile[0].getParentFile(); // if the result still does not exist then abort the select by returning null if (!returnFile[0].exists()) { returnFile = null; } } lastUsedWorkingDirectory = fileChooser.getCurrentDirectory(); } // save last use working directory LinorgSessionStorage.getSingleInstance().saveString("fileSelect." + titleText, lastUsedWorkingDirectory.getAbsolutePath()); return returnFile; } public boolean showMessageDialogBox(String messageString, String messageTitle) { if (messageTitle == null) { messageTitle = "Arbil"; } if (JOptionPane.OK_OPTION == JOptionPane.showConfirmDialog(LinorgWindowManager.getSingleInstance().linorgFrame, messageString, messageTitle, JOptionPane.OK_CANCEL_OPTION, JOptionPane.PLAIN_MESSAGE)) { return true; } else { return false; } } public void addMessageDialogToQueue(String messageString, String messageTitle) { if (messageTitle == null) { messageTitle = "Arbil"; } String currentMessage = messageDialogQueue.get(messageTitle); if (currentMessage != null) { messageString = messageString + "\n-------------------------------\n" + currentMessage; } messageDialogQueue.put(messageTitle, messageString); showMessageDialogQueue(); } private synchronized void showMessageDialogQueue() { if (!showMessageThreadrunning) { new Thread("showMessageThread") { public void run() { try { sleep(100); } catch (Exception ex) { GuiHelper.linorgBugCatcher.logError(ex); } showMessageThreadrunning = true; if (messagesCanBeShown) { while (messageDialogQueue.size() > 0) { String messageTitle = messageDialogQueue.keys().nextElement(); String messageText = messageDialogQueue.remove(messageTitle); if (messageText != null) { JOptionPane.showMessageDialog(LinorgWindowManager.getSingleInstance().linorgFrame, messageText, messageTitle, JOptionPane.PLAIN_MESSAGE); } } } showMessageThreadrunning = false; } }.start(); } } public void openIntroductionPage() { // open the introduction page // TODO: always get this page from the server if available, but also save it for off line use // URL introductionUrl = this.getClass().getResource("/nl/mpi/arbil/resources/html/Introduction.html"); // openUrlWindowOnce("Introduction", introductionUrl); // get remote file to local disk // if local file exists then open that // else open the one in the jar file ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // The features html file has been limited to the version in the jar (not the server), so that it is specific to the version of linorg in the jar. // ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // String remoteUrl = "http://www.mpi.nl/tg/j2se/jnlp/linorg/Features.html"; // String cachePath = GuiHelper.linorgSessionStorage.updateCache(remoteUrl, true); // System.out.println("cachePath: " + cachePath); // URL destinationUrl = null; // try { // if (new File(cachePath).exists()) { // destinationUrl = new File(cachePath).toURL(); // } // } catch (Exception ex) { // } // if (destinationUrl == null) { // destinationUrl = this.getClass().getResource("/nl/mpi/arbil/resources/html/Features.html"); //// } // System.out.println("destinationUrl: " + destinationUrl); // openUrlWindowOnce("Features/Known Bugs", destinationUrl); try { // load the saved windows Hashtable windowListHashtable = (Hashtable) LinorgSessionStorage.getSingleInstance().loadObject("openWindows"); for (Enumeration windowNamesEnum = windowListHashtable.keys(); windowNamesEnum.hasMoreElements();) { String currentWindowName = windowNamesEnum.nextElement().toString(); System.out.println("currentWindowName: " + currentWindowName); Vector imdiURLs = (Vector) windowListHashtable.get(currentWindowName); // System.out.println("imdiEnumeration: " + imdiEnumeration); ImdiTreeObject[] imdiObjectsArray = new ImdiTreeObject[imdiURLs.size()]; for (int arrayCounter = 0; arrayCounter < imdiObjectsArray.length; arrayCounter++) { try { imdiObjectsArray[arrayCounter] = (ImdiLoader.getSingleInstance().getImdiObject(null, new URI(imdiURLs.elementAt(arrayCounter).toString()))); } catch (URISyntaxException ex) { GuiHelper.linorgBugCatcher.logError(ex); } } openFloatingTable(imdiObjectsArray, currentWindowName); //openFloatingTable(null, currentWindowName); } System.out.println("done loading windowStates"); } catch (Exception ex) { windowStatesHashtable = new Hashtable(); System.out.println("load windowStates failed: " + ex.getMessage()); } if (!TreeHelper.getSingleInstance().locationsHaveBeenAdded()) { System.out.println("no local locations found, showing help window"); LinorgHelp helpComponent = LinorgHelp.getSingleInstance(); if (null == focusWindow(LinorgHelp.helpWindowTitle)) { createWindow(LinorgHelp.helpWindowTitle, helpComponent); } helpComponent.setCurrentPage(LinorgHelp.IntroductionPage); } startKeyListener(); messagesCanBeShown = true; showMessageDialogQueue(); } public void loadSplitPlanes(Component targetComponent) { //System.out.println("loadSplitPlanes: " + targetComponent); if (targetComponent instanceof JSplitPane) { System.out.println("loadSplitPlanes: " + targetComponent.getName()); Object linorgSplitPosition = windowStatesHashtable.get(targetComponent.getName()); if (linorgSplitPosition instanceof Integer) { System.out.println(targetComponent.getName() + ": " + linorgSplitPosition); ((JSplitPane) targetComponent).setDividerLocation((Integer) linorgSplitPosition); } else { if (targetComponent.getName().equals("rightSplitPane")) { ((JSplitPane) targetComponent).setDividerLocation(150); } else { //leftSplitPane leftLocalSplitPane rightSplitPane) ((JSplitPane) targetComponent).setDividerLocation(200); } } for (Component childComponent : ((JSplitPane) targetComponent).getComponents()) { loadSplitPlanes(childComponent); } } if (targetComponent instanceof JPanel) { for (Component childComponent : ((JPanel) targetComponent).getComponents()) { loadSplitPlanes(childComponent); } } } public void saveSplitPlanes(Component targetComponent) { //System.out.println("saveSplitPlanes: " + targetComponent); if (targetComponent instanceof JSplitPane) { System.out.println("saveSplitPlanes: " + targetComponent.getName()); windowStatesHashtable.put(targetComponent.getName(), ((JSplitPane) targetComponent).getDividerLocation()); for (Component childComponent : ((JSplitPane) targetComponent).getComponents()) { saveSplitPlanes(childComponent); } } if (targetComponent instanceof JPanel) { for (Component childComponent : ((JPanel) targetComponent).getComponents()) { saveSplitPlanes(childComponent); } } } public void saveWindowStates() { // loop windowList and make a hashtable of window names with a vector of the imdinodes displayed, then save the hashtable try { // collect the main window size and position for saving if (linorgFrame.getExtendedState() != JFrame.MAXIMIZED_BOTH) { windowStatesHashtable.put("linorgFrameBounds", linorgFrame.getBounds()); } windowStatesHashtable.put("ScreenDeviceCount", GraphicsEnvironment.getLocalGraphicsEnvironment().getScreenDevices().length); windowStatesHashtable.put("linorgFrameExtendedState", linorgFrame.getExtendedState()); // collect the split pane positions for saving saveSplitPlanes(linorgFrame.getContentPane().getComponent(0)); // save the collected states LinorgSessionStorage.getSingleInstance().saveObject(windowStatesHashtable, "windowStates"); // save the windows Hashtable windowListHashtable = new Hashtable(); //(Hashtable) windowList.clone(); for (Enumeration windowNamesEnum = windowList.keys(); windowNamesEnum.hasMoreElements();) { String currentWindowName = windowNamesEnum.nextElement().toString(); System.out.println("currentWindowName: " + currentWindowName); // set the value of the windowListHashtable to be the imdi urls rather than the windows Object windowObject = ((Component[]) windowList.get(currentWindowName))[0]; try { if (windowObject != null) { Object currentComponent = ((JInternalFrame) windowObject).getContentPane().getComponent(0); if (currentComponent != null && currentComponent instanceof LinorgSplitPanel) { // if this table has no nodes then don't save it if (0 < ((LinorgSplitPanel) currentComponent).imdiTable.getRowCount()) { // System.out.println("windowObject: " + windowObject); // System.out.println("getContentPane: " + ((JInternalFrame) windowObject).getContentPane()); // System.out.println("getComponent: " + ((JInternalFrame) windowObject).getComponent(0)); // System.out.println("LinorgSplitPanel: " + ((LinorgSplitPanel)((JInternalFrame) windowObject).getContentPane())); // System.out.println("getContentPane: " + ((JInternalFrame) windowObject).getContentPane().getComponent(0)); Vector currentNodesVector = new Vector(); for (String currentUrlString : ((ImdiTableModel) ((LinorgSplitPanel) currentComponent).imdiTable.getModel()).getImdiNodesURLs()) { currentNodesVector.add(currentUrlString); } windowListHashtable.put(currentWindowName, currentNodesVector); System.out.println("saved"); } } } } catch (Exception ex) { GuiHelper.linorgBugCatcher.logError(ex); // System.out.println("Exception: " + ex.getMessage()); } } // save the windows LinorgSessionStorage.getSingleInstance().saveObject(windowListHashtable, "openWindows"); System.out.println("saved windowStates"); } catch (Exception ex) { GuiHelper.linorgBugCatcher.logError(ex); // System.out.println("save windowStates exception: " + ex.getMessage()); } } private String addWindowToList(String windowName, JInternalFrame windowFrame) { int instanceCount = 0; String currentWindowName = windowName; while (windowList.containsKey(currentWindowName)) { currentWindowName = windowName + "(" + ++instanceCount + ")"; } JMenuItem windowMenuItem = new JMenuItem(); windowMenuItem.setText(currentWindowName); windowMenuItem.setName(currentWindowName); windowFrame.setName(currentWindowName); windowMenuItem.setActionCommand(currentWindowName); windowMenuItem.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { try { focusWindow(evt.getActionCommand()); } catch (Exception ex) { GuiHelper.linorgBugCatcher.logError(ex); } } }); windowFrame.addInternalFrameListener(new InternalFrameAdapter() { @Override public void internalFrameClosed(InternalFrameEvent e) { String windowName = e.getInternalFrame().getName(); Component[] windowAndMenu = (Component[]) windowList.get(windowName); if (ArbilMenuBar.windowMenu != null) { ArbilMenuBar.windowMenu.remove(windowAndMenu[1]); } windowList.remove(windowName); super.internalFrameClosed(e); } }); windowList.put(currentWindowName, new Component[]{windowFrame, windowMenuItem}); if (ArbilMenuBar.windowMenu != null) { ArbilMenuBar.windowMenu.add(windowMenuItem); } return currentWindowName; } public void closeAllWindows() { for (JInternalFrame focusedWindow : desktopPane.getAllFrames()) { if (focusedWindow != null) { String windowName = focusedWindow.getName(); Component[] windowAndMenu = (Component[]) windowList.get(windowName); if (windowAndMenu != null && ArbilMenuBar.windowMenu != null) { ArbilMenuBar.windowMenu.remove(windowAndMenu[1]); } windowList.remove(windowName); desktopPane.remove(focusedWindow); } } desktopPane.repaint(); } public JInternalFrame focusWindow(String windowName) { if (windowList.containsKey(windowName)) { Object windowObject = ((Component[]) windowList.get(windowName))[0]; try { if (windowObject != null) { ((JInternalFrame) windowObject).setIcon(false); ((JInternalFrame) windowObject).setSelected(true); return (JInternalFrame) windowObject; } } catch (Exception ex) { GuiHelper.linorgBugCatcher.logError(ex); // System.out.println(ex.getMessage()); } } return null; } private void startKeyListener() { // desktopPane.addKeyListener(new KeyAdapter() { // // @Override // public void keyPressed(KeyEvent e) { // System.out.println("keyPressed"); // if (e.VK_W == e.getKeyCode()){ // System.out.println("VK_W"); // } // super.keyPressed(e); // } // // }); Toolkit.getDefaultToolkit().addAWTEventListener(new AWTEventListener() { public void eventDispatched(AWTEvent e) { boolean isKeybordRepeat = false; if (e instanceof KeyEvent) { // only consider key release events if (e.getID() == KeyEvent.KEY_RELEASED) { // work around for jvm in linux // due to the bug in the jvm for linux the keyboard repeats are shown as real key events, so we attempt to prevent ludicrous key events being used here KeyEvent nextPress = (KeyEvent) Toolkit.getDefaultToolkit().getSystemEventQueue().peekEvent(KeyEvent.KEY_PRESSED); if (nextPress != null) { // the next key event is at the same time as this event if ((nextPress.getWhen() == ((KeyEvent) e).getWhen())) { // the next key code is the same as this event if (((nextPress.getKeyCode() == ((KeyEvent) e).getKeyCode()))) { isKeybordRepeat = true; } } } // end work around for jvm in linux if (!isKeybordRepeat) { // System.out.println("KeyEvent.paramString: " + ((KeyEvent) e).paramString()); // System.out.println("KeyEvent.getWhen: " + ((KeyEvent) e).getWhen()); if ((((KeyEvent) e).isMetaDown() || ((KeyEvent) e).isControlDown()) && ((KeyEvent) e).getKeyCode() == KeyEvent.VK_W) { JInternalFrame[] windowsToClose; if (((KeyEvent) e).isShiftDown()) { windowsToClose = desktopPane.getAllFrames(); } else { windowsToClose = new JInternalFrame[]{desktopPane.getSelectedFrame()}; } for (JInternalFrame focusedWindow : windowsToClose) { if (focusedWindow != null) { String windowName = focusedWindow.getName(); Component[] windowAndMenu = (Component[]) windowList.get(windowName); if (windowAndMenu != null && ArbilMenuBar.windowMenu != null) { ArbilMenuBar.windowMenu.remove(windowAndMenu[1]); } windowList.remove(windowName); desktopPane.remove(focusedWindow); try { JInternalFrame[] allWindows = desktopPane.getAllFrames(); if (allWindows.length > 0) { JInternalFrame topMostWindow = allWindows[0]; if (topMostWindow != null) { System.out.println("topMostWindow: " + topMostWindow); topMostWindow.setIcon(false); topMostWindow.setSelected(true); } } } catch (Exception ex) { GuiHelper.linorgBugCatcher.logError(ex); // System.out.println(ex.getMessage()); } } } desktopPane.repaint(); } if ((((KeyEvent) e).getKeyCode() == KeyEvent.VK_TAB && ((KeyEvent) e).isControlDown())) { // the [meta `] is consumed by the operating system, the only way to enable the back quote key for window switching is to use separate windows and rely on the OS to do the switching // || (((KeyEvent) e).getKeyCode() == KeyEvent.VK_BACK_QUOTE && ((KeyEvent) e).isMetaDown()) try { JInternalFrame[] allWindows = desktopPane.getAllFrames(); int targetLayerInt; if (((KeyEvent) e).isShiftDown()) { allWindows[0].moveToBack(); targetLayerInt = 1; } else { targetLayerInt = allWindows.length - 1; } allWindows[targetLayerInt].setIcon(false); allWindows[targetLayerInt].setSelected(true); } catch (Exception ex) { GuiHelper.linorgBugCatcher.logError(ex); // System.out.println(ex.getMessage()); } } if ((((KeyEvent) e).isMetaDown() || ((KeyEvent) e).isControlDown()) && ((KeyEvent) e).getKeyCode() == KeyEvent.VK_F) { JInternalFrame windowToSearch = desktopPane.getSelectedFrame(); //System.out.println(windowToSearch.getContentPane()); for (Component childComponent : windowToSearch.getContentPane().getComponents()) { // loop through all the child components in the window (there will probably only be one) if (childComponent instanceof LinorgSplitPanel) { ((LinorgSplitPanel) childComponent).showSearchPane(); } } } } } } } }, AWTEvent.KEY_EVENT_MASK); } public JInternalFrame createWindow(String windowTitle, Component contentsComponent) { JInternalFrame currentInternalFrame = new javax.swing.JInternalFrame(); currentInternalFrame.setLayout(new BorderLayout()); // GuiHelper.arbilDragDrop.addTransferHandler(currentInternalFrame); currentInternalFrame.add(contentsComponent, BorderLayout.CENTER); windowTitle = addWindowToList(windowTitle, currentInternalFrame); // set the new window size to be fully visible int tempWindowWidth, tempWindowHeight; if (desktopPane.getWidth() > nextWindowWidth) { tempWindowWidth = nextWindowWidth; } else { tempWindowWidth = desktopPane.getWidth() - 50; } if (desktopPane.getHeight() > nextWindowHeight) { tempWindowHeight = nextWindowHeight; } else { tempWindowHeight = desktopPane.getHeight() - 50; } if (tempWindowHeight < 100) { tempWindowHeight = 100; } currentInternalFrame.setSize(tempWindowWidth, tempWindowHeight); currentInternalFrame.setClosable(true); currentInternalFrame.setIconifiable(true); currentInternalFrame.setMaximizable(true); currentInternalFrame.setResizable(true); currentInternalFrame.setTitle(windowTitle); currentInternalFrame.setToolTipText(windowTitle); currentInternalFrame.setName(windowTitle); currentInternalFrame.setVisible(true); // selectedFilesFrame.setSize(destinationComp.getWidth(), 300); // selectedFilesFrame.setRequestFocusEnabled(false); // selectedFilesFrame.getContentPane().add(selectedFilesPanel, java.awt.BorderLayout.CENTER); // selectedFilesFrame.setBounds(0, 0, 641, 256); // destinationComp.add(selectedFilesFrame, javax.swing.JLayeredPane.DEFAULT_LAYER); // set the window position so that they are cascaded currentInternalFrame.setLocation(nextWindowX, nextWindowY); nextWindowX = nextWindowX + 10; nextWindowY = nextWindowY + 10; // TODO: it would be nice to use the JInternalFrame's title bar height to increment the position if (nextWindowX + tempWindowWidth > desktopPane.getWidth()) { nextWindowX = 0; } if (nextWindowY + tempWindowHeight > desktopPane.getHeight()) { nextWindowY = 0; } desktopPane.add(currentInternalFrame, 0); try { // prevent the frame focus process consuming mouse events that should be recieved by the jtable etc. currentInternalFrame.setSelected(true); } catch (Exception ex) { GuiHelper.linorgBugCatcher.logError(ex); // System.out.println(ex.getMessage()); } return currentInternalFrame; } public JEditorPane openUrlWindowOnce(String frameTitle, URL locationUrl) { JEditorPane htmlDisplay = new JEditorPane(); htmlDisplay.setEditable(false); htmlDisplay.setContentType("text/html"); try { htmlDisplay.setPage(locationUrl); htmlDisplay.addHyperlinkListener(new LinorgHyperlinkListener()); //gridViewInternalFrame.setMaximum(true); } catch (Exception ex) { GuiHelper.linorgBugCatcher.logError(ex); // System.out.println(ex.getMessage()); } JInternalFrame existingWindow = focusWindow(frameTitle); if (existingWindow == null) { // return openUrlWindow(frameTitle, htmlDisplay); JScrollPane jScrollPane6; jScrollPane6 = new javax.swing.JScrollPane(); jScrollPane6.setViewportView(htmlDisplay); createWindow(frameTitle, jScrollPane6); } else { ((JScrollPane) existingWindow.getContentPane().getComponent(0)).setViewportView(htmlDisplay); } return htmlDisplay; } public void openSearchTable(ImdiTreeObject[] selectedNodes, String frameTitle) { ImdiTableModel resultsTableModel = new ImdiTableModel(); ImdiTable imdiTable = new ImdiTable(resultsTableModel, frameTitle); LinorgSplitPanel imdiSplitPanel = new LinorgSplitPanel(imdiTable); JInternalFrame searchFrame = this.createWindow(frameTitle, imdiSplitPanel); searchFrame.add(new ImdiNodeSearchPanel(searchFrame, resultsTableModel, selectedNodes), BorderLayout.NORTH); imdiSplitPanel.setSplitDisplay(); imdiSplitPanel.addFocusListener(searchFrame); } public ImdiTableModel openFloatingTableOnce(URI[] rowNodesArray, String frameTitle) { ImdiTreeObject[] tableNodes = new ImdiTreeObject[rowNodesArray.length]; ArrayList<String> fieldPathsToHighlight = new ArrayList<String>(); for (int arrayCounter = 0; arrayCounter < rowNodesArray.length; arrayCounter++) { try { if (rowNodesArray[arrayCounter] != null) { ImdiTreeObject parentNode = ImdiLoader.getSingleInstance().getImdiObject(null, new URI(rowNodesArray[arrayCounter].toString().split("#")[0])); // parentNode.waitTillLoaded(); String fieldPath = rowNodesArray[arrayCounter].getFragment(); String parentNodeFragment = parentNode.nodeTemplate.getParentOfField(fieldPath); URI targetNode; // note that the url has already be encoded and so we must not use the separate parameter version of new URI otherwise it would be encoded again which we do not want if (parentNodeFragment.length() > 0) { targetNode = new URI(rowNodesArray[arrayCounter].toString().split("#")[0] + "#" + parentNodeFragment); } else { targetNode = new URI(rowNodesArray[arrayCounter].toString().split("#")[0]); } tableNodes[arrayCounter] = ImdiLoader.getSingleInstance().getImdiObject(null, targetNode); fieldPathsToHighlight.add(fieldPath); } } catch (URISyntaxException ex) { GuiHelper.linorgBugCatcher.logError(ex); } } ImdiTableModel targetTableModel = openFloatingTableOnce(tableNodes, frameTitle); targetTableModel.highlightMatchingFieldPaths(fieldPathsToHighlight.toArray(new String[]{})); return targetTableModel; } public ImdiTableModel openAllChildNodesInFloatingTableOnce(URI[] rowNodesArray, String frameTitle) { HashSet<ImdiTreeObject> tableNodes = new HashSet(); for (int arrayCounter = 0; arrayCounter < rowNodesArray.length; arrayCounter++) { // try { ImdiTreeObject currentNode = ImdiLoader.getSingleInstance().getImdiObject(null, rowNodesArray[arrayCounter]); tableNodes.add(currentNode); for (ImdiTreeObject currentChildNode : currentNode.getAllChildren()) { tableNodes.add(currentChildNode); } // } catch (URISyntaxException ex) { // GuiHelper.linorgBugCatcher.logError(ex); // } } return openFloatingTableOnce(tableNodes.toArray(new ImdiTreeObject[]{}), frameTitle); } public ImdiTableModel openFloatingTableOnce(ImdiTreeObject[] rowNodesArray, String frameTitle) { if (rowNodesArray.length == 1 && rowNodesArray[0] != null && rowNodesArray[0].isInfoLink) { try { if (rowNodesArray[0].getUrlString().toLowerCase().endsWith(".html") || rowNodesArray[0].getUrlString().toLowerCase().endsWith(".txt")) { openUrlWindowOnce(rowNodesArray[0].toString(), rowNodesArray[0].getURI().toURL()); return null; } } catch (MalformedURLException exception) { GuiHelper.linorgBugCatcher.logError(exception); } } // open find a table containing exactly the same nodes as requested or create a new table for (Component[] currentWindow : windowList.values().toArray(new Component[][]{})) { // loop through all the windows for (Component childComponent : ((JInternalFrame) currentWindow[0]).getContentPane().getComponents()) { // loop through all the child components in the window (there will probably only be one) if (childComponent instanceof LinorgSplitPanel) { // only consider components with a LinorgSplitPanel ImdiTableModel currentTableModel = (ImdiTableModel) ((LinorgSplitPanel) childComponent).imdiTable.getModel(); if (currentTableModel.getImdiNodeCount() == rowNodesArray.length) { // first check that the number of nodes in the table matches boolean tableMatches = true; for (ImdiTreeObject currentItem : rowNodesArray) { // compare each node for a verbatim match if (!currentTableModel.containsImdiNode(currentItem)) { // // ignore this window because the nodes do not match tableMatches = false; break; } } if (tableMatches) { // System.out.println("tableMatches"); try { ((JInternalFrame) currentWindow[0]).setIcon(false); ((JInternalFrame) currentWindow[0]).setSelected(true); return currentTableModel; } catch (Exception ex) { GuiHelper.linorgBugCatcher.logError(ex); } } } } } } // if through the above process a table containing all and only the nodes requested has not been found then create a new table return openFloatingTable(rowNodesArray, frameTitle); } public ImdiTableModel openFloatingTable(ImdiTreeObject[] rowNodesArray, String frameTitle) { if (frameTitle == null) { if (rowNodesArray.length == 1) { frameTitle = rowNodesArray[0].toString(); } else { frameTitle = "Selection"; } } ImdiTableModel imdiTableModel = new ImdiTableModel(); ImdiTable imdiTable = new ImdiTable(imdiTableModel, frameTitle); LinorgSplitPanel imdiSplitPanel = new LinorgSplitPanel(imdiTable); imdiTableModel.addImdiObjects(rowNodesArray); imdiSplitPanel.setSplitDisplay(); JInternalFrame tableFrame = this.createWindow(frameTitle, imdiSplitPanel); imdiSplitPanel.addFocusListener(tableFrame); return imdiTableModel; } }
src/nl/mpi/arbil/LinorgWindowManager.java
package nl.mpi.arbil; import nl.mpi.arbil.data.ImdiTreeObject; import java.awt.AWTEvent; import java.awt.BorderLayout; import java.awt.Component; import java.awt.Dimension; import java.awt.FileDialog; import java.awt.GraphicsEnvironment; import java.awt.Rectangle; import java.awt.Toolkit; import java.awt.event.AWTEventListener; import java.awt.event.KeyEvent; import java.io.File; import java.io.FilenameFilter; import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashSet; import java.util.Hashtable; import java.util.Vector; import javax.swing.JDesktopPane; import javax.swing.JEditorPane; import javax.swing.JFileChooser; import javax.swing.JFrame; import javax.swing.JInternalFrame; import javax.swing.JMenuItem; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JSplitPane; import javax.swing.event.InternalFrameAdapter; import javax.swing.event.InternalFrameEvent; import javax.swing.filechooser.FileFilter; import nl.mpi.arbil.data.ImdiLoader; /** * Document : LinorgWindowManager * Created on : * @author [email protected] */ public class LinorgWindowManager { Hashtable<String, Component[]> windowList = new Hashtable<String, Component[]>(); Hashtable windowStatesHashtable; public JDesktopPane desktopPane; //TODO: this is public for the dialog boxes to use, but will change when the strings are loaded from the resources public JFrame linorgFrame; int nextWindowX = 50; int nextWindowY = 50; int nextWindowWidth = 800; int nextWindowHeight = 600; private Hashtable<String, String> messageDialogQueue = new Hashtable<String, String>(); private boolean messagesCanBeShown = false; boolean showMessageThreadrunning = false; static private LinorgWindowManager singleInstance = null; static synchronized public LinorgWindowManager getSingleInstance() { // System.out.println("LinorgWindowManager getSingleInstance"); if (singleInstance == null) { singleInstance = new LinorgWindowManager(); } return singleInstance; } private LinorgWindowManager() { desktopPane = new JDesktopPane(); desktopPane.setBackground(new java.awt.Color(204, 204, 204)); ArbilDragDrop.getSingleInstance().addTransferHandler(desktopPane); } public void loadGuiState(JFrame linorgFrameLocal) { linorgFrame = linorgFrameLocal; try { // load the saved states windowStatesHashtable = (Hashtable) LinorgSessionStorage.getSingleInstance().loadObject("windowStates"); // set the main window position and size linorgFrame.setExtendedState((Integer) windowStatesHashtable.get("linorgFrameExtendedState")); if (linorgFrame.getExtendedState() == JFrame.ICONIFIED) { // start up iconified is just too confusing to the user linorgFrame.setExtendedState(JFrame.NORMAL); } // if the application was maximised when it was last closed then these values will not be set and this will through setting the size in the catch Object linorgFrameBounds = windowStatesHashtable.get("linorgFrameBounds"); linorgFrame.setBounds((Rectangle) linorgFrameBounds); if (windowStatesHashtable.containsKey("ScreenDeviceCount")) { int screenDeviceCount = ((Integer) windowStatesHashtable.get("ScreenDeviceCount")); if (screenDeviceCount > GraphicsEnvironment.getLocalGraphicsEnvironment().getScreenDevices().length) { linorgFrame.setLocationRelativeTo(null); // make sure the main frame is visible. for instance when a second monitor has been removed. Dimension screenDimension = Toolkit.getDefaultToolkit().getScreenSize(); if (linorgFrame.getBounds().intersects(new Rectangle(screenDimension))) { linorgFrame.setBounds(linorgFrame.getBounds().intersection(new Rectangle(screenDimension))); } else { linorgFrame.setBounds(0, 0, 800, 600); linorgFrame.setLocationRelativeTo(null); } } } } catch (Exception ex) { System.out.println("load windowStates failed: " + ex.getMessage()); System.out.println("setting default windowStates"); windowStatesHashtable = new Hashtable(); linorgFrame.setBounds(0, 0, 800, 600); linorgFrame.setLocationRelativeTo(null); linorgFrame.setExtendedState(JFrame.MAXIMIZED_BOTH); } // set the split pane positions loadSplitPlanes(linorgFrame.getContentPane().getComponent(0)); } public void openAboutPage() { LinorgVersion linorgVersion = new LinorgVersion(); String messageString = "Archive Builder\n" + "A local tool for organising linguistic data.\n" + "Max Planck Institute for Psycholinguistics\n" + "Application design and programming by Peter Withers\n" + "Arbil also uses components of the IMDI API and Lamus Type Checker\n" + "Version: " + linorgVersion.currentMajor + "." + linorgVersion.currentMinor + "." + linorgVersion.currentRevision + "\n" + linorgVersion.lastCommitDate + "\n" + "Compile Date: " + linorgVersion.compileDate + "\n"; JOptionPane.showMessageDialog(linorgFrame, messageString, "About Arbil", JOptionPane.PLAIN_MESSAGE); } public void offerUserToSaveChanges() throws Exception { if (ImdiLoader.getSingleInstance().nodesNeedSave()) { if (JOptionPane.OK_OPTION == JOptionPane.showConfirmDialog(LinorgWindowManager.getSingleInstance().linorgFrame, "There are unsaved changes.\nSave now?", "Save Changes", JOptionPane.OK_CANCEL_OPTION, JOptionPane.PLAIN_MESSAGE)) { ImdiLoader.getSingleInstance().saveNodesNeedingSave(true); } else { throw new Exception("user canceled save action"); } } } public File showEmptyExportDirectoryDialogue(String titleText) { boolean fileSelectDone = false; try { while (!fileSelectDone) { File[] selectedFiles = LinorgWindowManager.getSingleInstance().showFileSelectBox(titleText + " Destination Directory", true, false, false); if (selectedFiles != null && selectedFiles.length > 0) { File destinationDirectory = selectedFiles[0]; if (!destinationDirectory.exists()/* && parentDirectory.getParentFile().exists()*/) { // create the directory provided that the parent directory exists // ths is here due the the way the mac file select gui leads the user to type in a new directory name destinationDirectory.mkdirs(); } if (!destinationDirectory.exists()) { JOptionPane.showMessageDialog(linorgFrame, "The export directory\n\"" + destinationDirectory + "\"\ndoes not exist.\nPlease select or create a directory.", titleText, JOptionPane.PLAIN_MESSAGE); } else { // if (!createdDirectory) { // String newDirectoryName = JOptionPane.showInputDialog(linorgFrame, "Enter Export Name", titleText, JOptionPane.PLAIN_MESSAGE, null, null, "arbil_export").toString(); // try { // destinationDirectory = new File(parentDirectory.getCanonicalPath() + File.separatorChar + newDirectoryName); // destinationDirectory.mkdir(); // } catch (Exception e) { // JOptionPane.showMessageDialog(LinorgWindowManager.getSingleInstance().linorgFrame, "Could not create the export directory + \'" + newDirectoryName + "\'", titleText, JOptionPane.PLAIN_MESSAGE); // } // } if (destinationDirectory != null && destinationDirectory.exists()) { if (destinationDirectory.list().length == 0) { fileSelectDone = true; return destinationDirectory; } else { if (showMessageDialogBox("The selected export directory is not empty.\nTo continue will merge and may overwrite files.\nDo you want to continue?", titleText)) { return destinationDirectory; } //JOptionPane.showMessageDialog(LinorgWindowManager.getSingleInstance().linorgFrame, "The export directory must be empty", titleText, JOptionPane.PLAIN_MESSAGE); } } } } else { fileSelectDone = true; } } } catch (Exception e) { System.out.println("aborting export: " + e.getMessage()); } return null; } public File[] showFileSelectBox(String titleText, boolean directorySelectOnly, boolean multipleSelect, boolean requireMetadataFiles) { // test for os: if mac or file then awt else for other and directory use swing // save/load last directory accoring to the title of the dialogue //Hashtable<String, File> fileSelectLocationsHashtable; File workingDirectory = null; String workingDirectoryPathString = LinorgSessionStorage.getSingleInstance().loadString("fileSelect." + titleText); if (workingDirectoryPathString == null) { workingDirectory = new File(System.getProperty("user.home")); } else { workingDirectory = new File(workingDirectoryPathString); } File lastUsedWorkingDirectory; File[] returnFile; boolean isMac = true; // TODO: set this correctly boolean useAtwSelect = false; //directorySelectOnly && isMac && !multipleSelect; if (useAtwSelect) { if (directorySelectOnly) { System.setProperty("apple.awt.fileDialogForDirectories", "true"); } else { System.setProperty("apple.awt.fileDialogForDirectories", "false"); } FileDialog fileDialog = new FileDialog(linorgFrame); if (requireMetadataFiles) { fileDialog.setFilenameFilter(new FilenameFilter() { public boolean accept(File dir, String name) { return name.toLowerCase().endsWith(".imdi"); } }); } fileDialog.setDirectory(workingDirectory.getAbsolutePath()); fileDialog.setVisible(true); String selectedFile = fileDialog.getFile(); lastUsedWorkingDirectory = new File(fileDialog.getDirectory()); if (selectedFile != null) { returnFile = new File[]{new File(selectedFile)}; } else { returnFile = null; } } else { JFileChooser fileChooser = new JFileChooser(); if (requireMetadataFiles) { FileFilter imdiFileFilter = new FileFilter() { public String getDescription() { return "IMDI"; } @Override public boolean accept(File selectedFile) { // the test for exists is unlikey to do anything here, paricularly regarding the Mac dialogues text entry field return (selectedFile.exists() && (selectedFile.isDirectory() || selectedFile.getName().toLowerCase().endsWith(".imdi"))); } }; fileChooser.addChoosableFileFilter(imdiFileFilter); } if (directorySelectOnly) { // this filter is only cosmetic but gives the user an indication of what to select FileFilter imdiFileFilter = new FileFilter() { public String getDescription() { return "Directories"; } @Override public boolean accept(File selectedFile) { return (selectedFile.exists() && selectedFile.isDirectory()); } }; fileChooser.addChoosableFileFilter(imdiFileFilter); } if (directorySelectOnly) { fileChooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); } else { fileChooser.setFileSelectionMode(JFileChooser.FILES_ONLY); } fileChooser.setCurrentDirectory(workingDirectory); fileChooser.setMultiSelectionEnabled(multipleSelect); if (JFileChooser.APPROVE_OPTION == fileChooser.showDialog(LinorgWindowManager.getSingleInstance().linorgFrame, titleText)) { returnFile = fileChooser.getSelectedFiles(); if (returnFile.length == 0) { returnFile = new File[]{fileChooser.getSelectedFile()}; } } else { returnFile = null; } if (returnFile != null && returnFile.length == 1 && !returnFile[0].exists()) { // if the selected file does not exist then the "unusable" mac file select is usually to blame so try to clean up returnFile[0] = returnFile[0].getParentFile(); // if the result still does not exist then abort the select by returning null if (!returnFile[0].exists()) { returnFile = null; } } lastUsedWorkingDirectory = fileChooser.getCurrentDirectory(); } // save last use working directory LinorgSessionStorage.getSingleInstance().saveString("fileSelect." + titleText, lastUsedWorkingDirectory.getAbsolutePath()); return returnFile; } public boolean showMessageDialogBox(String messageString, String messageTitle) { if (messageTitle == null) { messageTitle = "Arbil"; } if (JOptionPane.OK_OPTION == JOptionPane.showConfirmDialog(LinorgWindowManager.getSingleInstance().linorgFrame, messageString, messageTitle, JOptionPane.OK_CANCEL_OPTION, JOptionPane.PLAIN_MESSAGE)) { return true; } else { return false; } } public void addMessageDialogToQueue(String messageString, String messageTitle) { if (messageTitle == null) { messageTitle = "Arbil"; } String currentMessage = messageDialogQueue.get(messageTitle); if (currentMessage != null) { messageString = messageString + "\n-------------------------------\n" + currentMessage; } messageDialogQueue.put(messageTitle, messageString); showMessageDialogQueue(); } private synchronized void showMessageDialogQueue() { if (!showMessageThreadrunning) { new Thread("showMessageThread") { public void run() { try { sleep(100); } catch (Exception ex) { GuiHelper.linorgBugCatcher.logError(ex); } showMessageThreadrunning = true; if (messagesCanBeShown) { while (messageDialogQueue.size() > 0) { String messageTitle = messageDialogQueue.keys().nextElement(); String messageText = messageDialogQueue.remove(messageTitle); if (messageText != null) { JOptionPane.showMessageDialog(LinorgWindowManager.getSingleInstance().linorgFrame, messageText, messageTitle, JOptionPane.PLAIN_MESSAGE); } } } showMessageThreadrunning = false; } }.start(); } } public void openIntroductionPage() { // open the introduction page // TODO: always get this page from the server if available, but also save it for off line use // URL introductionUrl = this.getClass().getResource("/nl/mpi/arbil/resources/html/Introduction.html"); // openUrlWindowOnce("Introduction", introductionUrl); // get remote file to local disk // if local file exists then open that // else open the one in the jar file ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // The features html file has been limited to the version in the jar (not the server), so that it is specific to the version of linorg in the jar. // ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // String remoteUrl = "http://www.mpi.nl/tg/j2se/jnlp/linorg/Features.html"; // String cachePath = GuiHelper.linorgSessionStorage.updateCache(remoteUrl, true); // System.out.println("cachePath: " + cachePath); // URL destinationUrl = null; // try { // if (new File(cachePath).exists()) { // destinationUrl = new File(cachePath).toURL(); // } // } catch (Exception ex) { // } // if (destinationUrl == null) { // destinationUrl = this.getClass().getResource("/nl/mpi/arbil/resources/html/Features.html"); //// } // System.out.println("destinationUrl: " + destinationUrl); // openUrlWindowOnce("Features/Known Bugs", destinationUrl); try { // load the saved windows Hashtable windowListHashtable = (Hashtable) LinorgSessionStorage.getSingleInstance().loadObject("openWindows"); for (Enumeration windowNamesEnum = windowListHashtable.keys(); windowNamesEnum.hasMoreElements();) { String currentWindowName = windowNamesEnum.nextElement().toString(); System.out.println("currentWindowName: " + currentWindowName); Vector imdiURLs = (Vector) windowListHashtable.get(currentWindowName); // System.out.println("imdiEnumeration: " + imdiEnumeration); ImdiTreeObject[] imdiObjectsArray = new ImdiTreeObject[imdiURLs.size()]; for (int arrayCounter = 0; arrayCounter < imdiObjectsArray.length; arrayCounter++) { try { imdiObjectsArray[arrayCounter] = (ImdiLoader.getSingleInstance().getImdiObject(null, new URI(imdiURLs.elementAt(arrayCounter).toString()))); } catch (URISyntaxException ex) { GuiHelper.linorgBugCatcher.logError(ex); } } openFloatingTable(imdiObjectsArray, currentWindowName); //openFloatingTable(null, currentWindowName); } System.out.println("done loading windowStates"); } catch (Exception ex) { windowStatesHashtable = new Hashtable(); System.out.println("load windowStates failed: " + ex.getMessage()); } if (!TreeHelper.getSingleInstance().locationsHaveBeenAdded()) { System.out.println("no local locations found, showing help window"); LinorgHelp helpComponent = LinorgHelp.getSingleInstance(); if (null == focusWindow(LinorgHelp.helpWindowTitle)) { createWindow(LinorgHelp.helpWindowTitle, helpComponent); } helpComponent.setCurrentPage(LinorgHelp.IntroductionPage); } startKeyListener(); messagesCanBeShown = true; showMessageDialogQueue(); } public void loadSplitPlanes(Component targetComponent) { //System.out.println("loadSplitPlanes: " + targetComponent); if (targetComponent instanceof JSplitPane) { System.out.println("loadSplitPlanes: " + targetComponent.getName()); Object linorgSplitPosition = windowStatesHashtable.get(targetComponent.getName()); if (linorgSplitPosition instanceof Integer) { System.out.println(targetComponent.getName() + ": " + linorgSplitPosition); ((JSplitPane) targetComponent).setDividerLocation((Integer) linorgSplitPosition); } else { if (targetComponent.getName().equals("rightSplitPane")) { ((JSplitPane) targetComponent).setDividerLocation(150); } else { //leftSplitPane leftLocalSplitPane rightSplitPane) ((JSplitPane) targetComponent).setDividerLocation(200); } } for (Component childComponent : ((JSplitPane) targetComponent).getComponents()) { loadSplitPlanes(childComponent); } } if (targetComponent instanceof JPanel) { for (Component childComponent : ((JPanel) targetComponent).getComponents()) { loadSplitPlanes(childComponent); } } } public void saveSplitPlanes(Component targetComponent) { //System.out.println("saveSplitPlanes: " + targetComponent); if (targetComponent instanceof JSplitPane) { System.out.println("saveSplitPlanes: " + targetComponent.getName()); windowStatesHashtable.put(targetComponent.getName(), ((JSplitPane) targetComponent).getDividerLocation()); for (Component childComponent : ((JSplitPane) targetComponent).getComponents()) { saveSplitPlanes(childComponent); } } if (targetComponent instanceof JPanel) { for (Component childComponent : ((JPanel) targetComponent).getComponents()) { saveSplitPlanes(childComponent); } } } public void saveWindowStates() { // loop windowList and make a hashtable of window names with a vector of the imdinodes displayed, then save the hashtable try { // collect the main window size and position for saving if (linorgFrame.getExtendedState() != JFrame.MAXIMIZED_BOTH) { windowStatesHashtable.put("linorgFrameBounds", linorgFrame.getBounds()); } windowStatesHashtable.put("ScreenDeviceCount", GraphicsEnvironment.getLocalGraphicsEnvironment().getScreenDevices().length); windowStatesHashtable.put("linorgFrameExtendedState", linorgFrame.getExtendedState()); // collect the split pane positions for saving saveSplitPlanes(linorgFrame.getContentPane().getComponent(0)); // save the collected states LinorgSessionStorage.getSingleInstance().saveObject(windowStatesHashtable, "windowStates"); // save the windows Hashtable windowListHashtable = new Hashtable(); //(Hashtable) windowList.clone(); for (Enumeration windowNamesEnum = windowList.keys(); windowNamesEnum.hasMoreElements();) { String currentWindowName = windowNamesEnum.nextElement().toString(); System.out.println("currentWindowName: " + currentWindowName); // set the value of the windowListHashtable to be the imdi urls rather than the windows Object windowObject = ((Component[]) windowList.get(currentWindowName))[0]; try { if (windowObject != null) { Object currentComponent = ((JInternalFrame) windowObject).getContentPane().getComponent(0); if (currentComponent != null && currentComponent instanceof LinorgSplitPanel) { // if this table has no nodes then don't save it if (0 < ((LinorgSplitPanel) currentComponent).imdiTable.getRowCount()) { // System.out.println("windowObject: " + windowObject); // System.out.println("getContentPane: " + ((JInternalFrame) windowObject).getContentPane()); // System.out.println("getComponent: " + ((JInternalFrame) windowObject).getComponent(0)); // System.out.println("LinorgSplitPanel: " + ((LinorgSplitPanel)((JInternalFrame) windowObject).getContentPane())); // System.out.println("getContentPane: " + ((JInternalFrame) windowObject).getContentPane().getComponent(0)); Vector currentNodesVector = new Vector(); for (String currentUrlString : ((ImdiTableModel) ((LinorgSplitPanel) currentComponent).imdiTable.getModel()).getImdiNodesURLs()) { currentNodesVector.add(currentUrlString); } windowListHashtable.put(currentWindowName, currentNodesVector); System.out.println("saved"); } } } } catch (Exception ex) { GuiHelper.linorgBugCatcher.logError(ex); // System.out.println("Exception: " + ex.getMessage()); } } // save the windows LinorgSessionStorage.getSingleInstance().saveObject(windowListHashtable, "openWindows"); System.out.println("saved windowStates"); } catch (Exception ex) { GuiHelper.linorgBugCatcher.logError(ex); // System.out.println("save windowStates exception: " + ex.getMessage()); } } private String addWindowToList(String windowName, JInternalFrame windowFrame) { int instanceCount = 0; String currentWindowName = windowName; while (windowList.containsKey(currentWindowName)) { currentWindowName = windowName + "(" + ++instanceCount + ")"; } JMenuItem windowMenuItem = new JMenuItem(); windowMenuItem.setText(currentWindowName); windowMenuItem.setName(currentWindowName); windowFrame.setName(currentWindowName); windowMenuItem.setActionCommand(currentWindowName); windowMenuItem.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { try { focusWindow(evt.getActionCommand()); } catch (Exception ex) { GuiHelper.linorgBugCatcher.logError(ex); } } }); windowFrame.addInternalFrameListener(new InternalFrameAdapter() { @Override public void internalFrameClosed(InternalFrameEvent e) { String windowName = e.getInternalFrame().getName(); Component[] windowAndMenu = (Component[]) windowList.get(windowName); if (ArbilMenuBar.windowMenu != null) { ArbilMenuBar.windowMenu.remove(windowAndMenu[1]); } windowList.remove(windowName); super.internalFrameClosed(e); } }); windowList.put(currentWindowName, new Component[]{windowFrame, windowMenuItem}); if (ArbilMenuBar.windowMenu != null) { ArbilMenuBar.windowMenu.add(windowMenuItem); } return currentWindowName; } public JInternalFrame focusWindow(String windowName) { if (windowList.containsKey(windowName)) { Object windowObject = ((Component[]) windowList.get(windowName))[0]; try { if (windowObject != null) { ((JInternalFrame) windowObject).setIcon(false); ((JInternalFrame) windowObject).setSelected(true); return (JInternalFrame) windowObject; } } catch (Exception ex) { GuiHelper.linorgBugCatcher.logError(ex); // System.out.println(ex.getMessage()); } } return null; } private void startKeyListener() { // desktopPane.addKeyListener(new KeyAdapter() { // // @Override // public void keyPressed(KeyEvent e) { // System.out.println("keyPressed"); // if (e.VK_W == e.getKeyCode()){ // System.out.println("VK_W"); // } // super.keyPressed(e); // } // // }); Toolkit.getDefaultToolkit().addAWTEventListener(new AWTEventListener() { public void eventDispatched(AWTEvent e) { boolean isKeybordRepeat = false; if (e instanceof KeyEvent) { // only consider key release events if (e.getID() == KeyEvent.KEY_RELEASED) { // work around for jvm in linux // due to the bug in the jvm for linux the keyboard repeats are shown as real key events, so we attempt to prevent ludicrous key events being used here KeyEvent nextPress = (KeyEvent) Toolkit.getDefaultToolkit().getSystemEventQueue().peekEvent(KeyEvent.KEY_PRESSED); if (nextPress != null) { // the next key event is at the same time as this event if ((nextPress.getWhen() == ((KeyEvent) e).getWhen())) { // the next key code is the same as this event if (((nextPress.getKeyCode() == ((KeyEvent) e).getKeyCode()))) { isKeybordRepeat = true; } } } // end work around for jvm in linux if (!isKeybordRepeat) { // System.out.println("KeyEvent.paramString: " + ((KeyEvent) e).paramString()); // System.out.println("KeyEvent.getWhen: " + ((KeyEvent) e).getWhen()); if ((((KeyEvent) e).isMetaDown() || ((KeyEvent) e).isControlDown()) && ((KeyEvent) e).getKeyCode() == KeyEvent.VK_W) { JInternalFrame[] windowsToClose; if (((KeyEvent) e).isShiftDown()) { windowsToClose = desktopPane.getAllFrames(); } else { windowsToClose = new JInternalFrame[]{desktopPane.getSelectedFrame()}; } for (JInternalFrame focusedWindow : windowsToClose) { if (focusedWindow != null) { String windowName = focusedWindow.getName(); Component[] windowAndMenu = (Component[]) windowList.get(windowName); if (windowAndMenu != null && ArbilMenuBar.windowMenu != null) { ArbilMenuBar.windowMenu.remove(windowAndMenu[1]); } windowList.remove(windowName); desktopPane.remove(focusedWindow); try { JInternalFrame[] allWindows = desktopPane.getAllFrames(); if (allWindows.length > 0) { JInternalFrame topMostWindow = allWindows[0]; if (topMostWindow != null) { System.out.println("topMostWindow: " + topMostWindow); topMostWindow.setIcon(false); topMostWindow.setSelected(true); } } } catch (Exception ex) { GuiHelper.linorgBugCatcher.logError(ex); // System.out.println(ex.getMessage()); } } } desktopPane.repaint(); } if ((((KeyEvent) e).getKeyCode() == KeyEvent.VK_TAB && ((KeyEvent) e).isControlDown())) { // the [meta `] is consumed by the operating system, the only way to enable the back quote key for window switching is to use separate windows and rely on the OS to do the switching // || (((KeyEvent) e).getKeyCode() == KeyEvent.VK_BACK_QUOTE && ((KeyEvent) e).isMetaDown()) try { JInternalFrame[] allWindows = desktopPane.getAllFrames(); int targetLayerInt; if (((KeyEvent) e).isShiftDown()) { allWindows[0].moveToBack(); targetLayerInt = 1; } else { targetLayerInt = allWindows.length - 1; } allWindows[targetLayerInt].setIcon(false); allWindows[targetLayerInt].setSelected(true); } catch (Exception ex) { GuiHelper.linorgBugCatcher.logError(ex); // System.out.println(ex.getMessage()); } } if ((((KeyEvent) e).isMetaDown() || ((KeyEvent) e).isControlDown()) && ((KeyEvent) e).getKeyCode() == KeyEvent.VK_F) { JInternalFrame windowToSearch = desktopPane.getSelectedFrame(); //System.out.println(windowToSearch.getContentPane()); for (Component childComponent : windowToSearch.getContentPane().getComponents()) { // loop through all the child components in the window (there will probably only be one) if (childComponent instanceof LinorgSplitPanel) { ((LinorgSplitPanel) childComponent).showSearchPane(); } } } } } } } }, AWTEvent.KEY_EVENT_MASK); } public JInternalFrame createWindow(String windowTitle, Component contentsComponent) { JInternalFrame currentInternalFrame = new javax.swing.JInternalFrame(); currentInternalFrame.setLayout(new BorderLayout()); // GuiHelper.arbilDragDrop.addTransferHandler(currentInternalFrame); currentInternalFrame.add(contentsComponent, BorderLayout.CENTER); windowTitle = addWindowToList(windowTitle, currentInternalFrame); // set the new window size to be fully visible int tempWindowWidth, tempWindowHeight; if (desktopPane.getWidth() > nextWindowWidth) { tempWindowWidth = nextWindowWidth; } else { tempWindowWidth = desktopPane.getWidth() - 50; } if (desktopPane.getHeight() > nextWindowHeight) { tempWindowHeight = nextWindowHeight; } else { tempWindowHeight = desktopPane.getHeight() - 50; } if (tempWindowHeight < 100) { tempWindowHeight = 100; } currentInternalFrame.setSize(tempWindowWidth, tempWindowHeight); currentInternalFrame.setClosable(true); currentInternalFrame.setIconifiable(true); currentInternalFrame.setMaximizable(true); currentInternalFrame.setResizable(true); currentInternalFrame.setTitle(windowTitle); currentInternalFrame.setToolTipText(windowTitle); currentInternalFrame.setName(windowTitle); currentInternalFrame.setVisible(true); // selectedFilesFrame.setSize(destinationComp.getWidth(), 300); // selectedFilesFrame.setRequestFocusEnabled(false); // selectedFilesFrame.getContentPane().add(selectedFilesPanel, java.awt.BorderLayout.CENTER); // selectedFilesFrame.setBounds(0, 0, 641, 256); // destinationComp.add(selectedFilesFrame, javax.swing.JLayeredPane.DEFAULT_LAYER); // set the window position so that they are cascaded currentInternalFrame.setLocation(nextWindowX, nextWindowY); nextWindowX = nextWindowX + 10; nextWindowY = nextWindowY + 10; // TODO: it would be nice to use the JInternalFrame's title bar height to increment the position if (nextWindowX + tempWindowWidth > desktopPane.getWidth()) { nextWindowX = 0; } if (nextWindowY + tempWindowHeight > desktopPane.getHeight()) { nextWindowY = 0; } desktopPane.add(currentInternalFrame, 0); try { // prevent the frame focus process consuming mouse events that should be recieved by the jtable etc. currentInternalFrame.setSelected(true); } catch (Exception ex) { GuiHelper.linorgBugCatcher.logError(ex); // System.out.println(ex.getMessage()); } return currentInternalFrame; } public JEditorPane openUrlWindowOnce(String frameTitle, URL locationUrl) { JEditorPane htmlDisplay = new JEditorPane(); htmlDisplay.setEditable(false); htmlDisplay.setContentType("text/html"); try { htmlDisplay.setPage(locationUrl); htmlDisplay.addHyperlinkListener(new LinorgHyperlinkListener()); //gridViewInternalFrame.setMaximum(true); } catch (Exception ex) { GuiHelper.linorgBugCatcher.logError(ex); // System.out.println(ex.getMessage()); } JInternalFrame existingWindow = focusWindow(frameTitle); if (existingWindow == null) { // return openUrlWindow(frameTitle, htmlDisplay); JScrollPane jScrollPane6; jScrollPane6 = new javax.swing.JScrollPane(); jScrollPane6.setViewportView(htmlDisplay); createWindow(frameTitle, jScrollPane6); } else { ((JScrollPane) existingWindow.getContentPane().getComponent(0)).setViewportView(htmlDisplay); } return htmlDisplay; } public void openSearchTable(ImdiTreeObject[] selectedNodes, String frameTitle) { ImdiTableModel resultsTableModel = new ImdiTableModel(); ImdiTable imdiTable = new ImdiTable(resultsTableModel, frameTitle); LinorgSplitPanel imdiSplitPanel = new LinorgSplitPanel(imdiTable); JInternalFrame searchFrame = this.createWindow(frameTitle, imdiSplitPanel); searchFrame.add(new ImdiNodeSearchPanel(searchFrame, resultsTableModel, selectedNodes), BorderLayout.NORTH); imdiSplitPanel.setSplitDisplay(); imdiSplitPanel.addFocusListener(searchFrame); } public ImdiTableModel openFloatingTableOnce(URI[] rowNodesArray, String frameTitle) { ImdiTreeObject[] tableNodes = new ImdiTreeObject[rowNodesArray.length]; ArrayList<String> fieldPathsToHighlight = new ArrayList<String>(); for (int arrayCounter = 0; arrayCounter < rowNodesArray.length; arrayCounter++) { try { if (rowNodesArray[arrayCounter] != null) { ImdiTreeObject parentNode = ImdiLoader.getSingleInstance().getImdiObject(null, new URI(rowNodesArray[arrayCounter].toString().split("#")[0])); // parentNode.waitTillLoaded(); String fieldPath = rowNodesArray[arrayCounter].getFragment(); String parentNodeFragment = parentNode.nodeTemplate.getParentOfField(fieldPath); URI targetNode; // note that the url has already be encoded and so we must not use the separate parameter version of new URI otherwise it would be encoded again which we do not want if (parentNodeFragment.length() > 0) { targetNode = new URI(rowNodesArray[arrayCounter].toString().split("#")[0] + "#" + parentNodeFragment); } else { targetNode = new URI(rowNodesArray[arrayCounter].toString().split("#")[0]); } tableNodes[arrayCounter] = ImdiLoader.getSingleInstance().getImdiObject(null, targetNode); fieldPathsToHighlight.add(fieldPath); } } catch (URISyntaxException ex) { GuiHelper.linorgBugCatcher.logError(ex); } } ImdiTableModel targetTableModel = openFloatingTableOnce(tableNodes, frameTitle); targetTableModel.highlightMatchingFieldPaths(fieldPathsToHighlight.toArray(new String[]{})); return targetTableModel; } public ImdiTableModel openAllChildNodesInFloatingTableOnce(URI[] rowNodesArray, String frameTitle) { HashSet<ImdiTreeObject> tableNodes = new HashSet(); for (int arrayCounter = 0; arrayCounter < rowNodesArray.length; arrayCounter++) { // try { ImdiTreeObject currentNode = ImdiLoader.getSingleInstance().getImdiObject(null, rowNodesArray[arrayCounter]); tableNodes.add(currentNode); for (ImdiTreeObject currentChildNode : currentNode.getAllChildren()) { tableNodes.add(currentChildNode); } // } catch (URISyntaxException ex) { // GuiHelper.linorgBugCatcher.logError(ex); // } } return openFloatingTableOnce(tableNodes.toArray(new ImdiTreeObject[]{}), frameTitle); } public ImdiTableModel openFloatingTableOnce(ImdiTreeObject[] rowNodesArray, String frameTitle) { if (rowNodesArray.length == 1 && rowNodesArray[0] != null && rowNodesArray[0].isInfoLink) { try { if (rowNodesArray[0].getUrlString().toLowerCase().endsWith(".html") || rowNodesArray[0].getUrlString().toLowerCase().endsWith(".txt")) { openUrlWindowOnce(rowNodesArray[0].toString(), rowNodesArray[0].getURI().toURL()); return null; } } catch (MalformedURLException exception) { GuiHelper.linorgBugCatcher.logError(exception); } } // open find a table containing exactly the same nodes as requested or create a new table for (Component[] currentWindow : windowList.values().toArray(new Component[][]{})) { // loop through all the windows for (Component childComponent : ((JInternalFrame) currentWindow[0]).getContentPane().getComponents()) { // loop through all the child components in the window (there will probably only be one) if (childComponent instanceof LinorgSplitPanel) { // only consider components with a LinorgSplitPanel ImdiTableModel currentTableModel = (ImdiTableModel) ((LinorgSplitPanel) childComponent).imdiTable.getModel(); if (currentTableModel.getImdiNodeCount() == rowNodesArray.length) { // first check that the number of nodes in the table matches boolean tableMatches = true; for (ImdiTreeObject currentItem : rowNodesArray) { // compare each node for a verbatim match if (!currentTableModel.containsImdiNode(currentItem)) { // // ignore this window because the nodes do not match tableMatches = false; break; } } if (tableMatches) { // System.out.println("tableMatches"); try { ((JInternalFrame) currentWindow[0]).setIcon(false); ((JInternalFrame) currentWindow[0]).setSelected(true); return currentTableModel; } catch (Exception ex) { GuiHelper.linorgBugCatcher.logError(ex); } } } } } } // if through the above process a table containing all and only the nodes requested has not been found then create a new table return openFloatingTable(rowNodesArray, frameTitle); } public ImdiTableModel openFloatingTable(ImdiTreeObject[] rowNodesArray, String frameTitle) { if (frameTitle == null) { if (rowNodesArray.length == 1) { frameTitle = rowNodesArray[0].toString(); } else { frameTitle = "Selection"; } } ImdiTableModel imdiTableModel = new ImdiTableModel(); ImdiTable imdiTable = new ImdiTable(imdiTableModel, frameTitle); LinorgSplitPanel imdiSplitPanel = new LinorgSplitPanel(imdiTable); imdiTableModel.addImdiObjects(rowNodesArray); imdiSplitPanel.setSplitDisplay(); JInternalFrame tableFrame = this.createWindow(frameTitle, imdiSplitPanel); imdiSplitPanel.addFocusListener(tableFrame); return imdiTableModel; } }
Redesigned the tree sort to traverse down the tree and to use low level methods not the unpredictable high level methods. Used the same blue text indicator for the tree as is used for the table cells to indicate if a node needs to be saved. Modified the tree renderer to prevent jumping and swapping of node labels and icons in later versions of the jvm. Prevented occasional deadlock on some OS. Removed residual dom id attributes left over from the imdi api. Added check on load and message to user that files need to be saved to remove dom id attributes (only for imdi files). Updated the archive handle removal to completely remove the attribute. Corrected bug when storing which nodes need to be saved. Corrected the node removal from the tree so that multiple are removed correctly. Updated the change working directory code so that the user can continue even if the files cannot be moved.
src/nl/mpi/arbil/LinorgWindowManager.java
Redesigned the tree sort to traverse down the tree and to use low level methods not the unpredictable high level methods. Used the same blue text indicator for the tree as is used for the table cells to indicate if a node needs to be saved. Modified the tree renderer to prevent jumping and swapping of node labels and icons in later versions of the jvm. Prevented occasional deadlock on some OS. Removed residual dom id attributes left over from the imdi api. Added check on load and message to user that files need to be saved to remove dom id attributes (only for imdi files). Updated the archive handle removal to completely remove the attribute. Corrected bug when storing which nodes need to be saved. Corrected the node removal from the tree so that multiple are removed correctly. Updated the change working directory code so that the user can continue even if the files cannot be moved.
<ide><path>rc/nl/mpi/arbil/LinorgWindowManager.java <ide> ArbilMenuBar.windowMenu.add(windowMenuItem); <ide> } <ide> return currentWindowName; <add> } <add> <add> public void closeAllWindows() { <add> for (JInternalFrame focusedWindow : desktopPane.getAllFrames()) { <add> if (focusedWindow != null) { <add> String windowName = focusedWindow.getName(); <add> Component[] windowAndMenu = (Component[]) windowList.get(windowName); <add> if (windowAndMenu != null && ArbilMenuBar.windowMenu != null) { <add> ArbilMenuBar.windowMenu.remove(windowAndMenu[1]); <add> } <add> windowList.remove(windowName); <add> desktopPane.remove(focusedWindow); <add> } <add> } <add> desktopPane.repaint(); <ide> } <ide> <ide> public JInternalFrame focusWindow(String windowName) {
JavaScript
mit
c1bfe2fafdb4b65e31bfe3239b48a5dd6fd31f94
0
itkoren/machineto
/** * Create a finite state machine * @param {String} current - initialization state for the state machine * @param {Object} machine - the state machine's flow: * { * "state1": { * "event1": [action1, "state2"], * "event2": [action2] * }, * "state2": { * "event3": [ * [action3, context], * "state1" * ] * } * } * @returns {Object} */ (function () { var root = this; function Machineto(current, machine) { return { /** * Send an event to the state machine * @param {String} name - the name of the event * @param {Object} params - the parameters to pass to the action * @returns {Object} */ event: function (name) { var params = (1 < arguments.length) ? Array.prototype.slice.call(arguments, 1) : void 0; /** * Save [action, nextState] in name for further use and return name * @returns {Array} */ function _assign() { name = machine[current][name]; return name; } /** * name[0] or name[0][0] is the function to invoke (if a context is given) * call the function in the context or call it directly with the params * and return an indicator weather to set next state or not * @returns {Boolean} */ function _invoke() { try { (name[0][0] || name[0]).apply(name[0][1], params); return true; } catch(ex) {} return false; } /** * The next state is the new state and the new state is returned * @returns {Object} */ function _updateState() { current = name[1] || current; return current; } // Save [action, nextState] in name for further use // If name is defined // name[0] or name[0][0] is the function to invoke (if a context is given) // call the function in the context or call it directly with the params // The next state is the new state and the new state is returned return (_assign() && _invoke() && _updateState()); } }; } // NodeJS if ("undefined" !== typeof module && module.exports) { module.exports = Machineto; } // AMD / RequireJS else if ("undefined" !== typeof define && define.amd) { define([], function () { return Machineto; }); } // Included directly via <script> tag else { root.Machineto = Machineto; } })();
machineto.js
/** * Create a finite state machine * @param {String} current - initialization state for the state machine * @param {Object} machine - the state machine's flow: * { * "state1": { * "event1": [action1, "state2"], * "event2": [action2] * }, * "state2": { * "event3": [ * [action3, context], * "state1" * ] * } * } * @returns {Object} */ (function () { var root = this; function Machineto(current, machine) { return { /** * Send an event to the state machine * @param {String} name - the name of the event * @param {Object} params - the parameters to pass to the action * @returns {Object} */ event: function (name) { var params = (1 < arguments.length) ? Array.prototype.slice.call(arguments, 1) : void 0; /** * Save [action, nextState] in name for further use and return name * @returns {Array} */ function _assign() { name = machine[current][name]; return name; } /** * name[0] or name[0][0] is the function to invoke (if a context is given) * call the function in the context or call it directly with the params * and return an indicator weather to set next state or not * @returns {Boolean} */ function _invoke() { try { (name[0][0] || name[0]).apply(name[0][1], params); return true; } catch(ex) {} return false; } /** * The next state is the new state and the new state is returned * @returns {Object} */ function _updateState() { current = name[1] || current; return current; } // Save [action, nextState] in name for further use // If name is defined // name[0] or name[0][0] is the function to invoke (if a context is given) // call the function in the context or call it directly with the params // The next state is the new state and the new state is returned return (_assign() && _invoke() && _updateState()); } }; } // NodeJS if ("undefined" !== typeof module && module.exports) { module.exports = Machineto; } // AMD / RequireJS else if ("undefined" !== typeof define && define.amd) { define([], function () { return Machineto; }); } // Included directly via <script> tag else { root.Machineto = Machineto; } })();
plumbing
machineto.js
plumbing
<ide><path>achineto.js <ide> */ <ide> function _assign() { <ide> name = machine[current][name]; <del> <ide> return name; <ide> } <ide> /** <ide> * @returns {Boolean} <ide> */ <ide> function _invoke() { <del> <ide> try { <ide> (name[0][0] || name[0]).apply(name[0][1], params); <ide> return true; <ide> } <ide> catch(ex) {} <del> <ide> return false; <ide> } <ide> /** <ide> */ <ide> function _updateState() { <ide> current = name[1] || current; <del> <ide> return current; <ide> } <ide> // Save [action, nextState] in name for further use
Java
apache-2.0
09638a023da817cd3b84a2ea1bc965cae6c7ca69
0
jeorme/OG-Platform,McLeodMoores/starling,McLeodMoores/starling,nssales/OG-Platform,codeaudit/OG-Platform,nssales/OG-Platform,jerome79/OG-Platform,DevStreet/FinanceAnalytics,McLeodMoores/starling,McLeodMoores/starling,jerome79/OG-Platform,codeaudit/OG-Platform,DevStreet/FinanceAnalytics,DevStreet/FinanceAnalytics,DevStreet/FinanceAnalytics,codeaudit/OG-Platform,nssales/OG-Platform,ChinaQuants/OG-Platform,codeaudit/OG-Platform,jerome79/OG-Platform,nssales/OG-Platform,jeorme/OG-Platform,jeorme/OG-Platform,ChinaQuants/OG-Platform,jerome79/OG-Platform,ChinaQuants/OG-Platform,ChinaQuants/OG-Platform,jeorme/OG-Platform
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.component.factory.engine; import java.util.LinkedHashMap; import java.util.Map; import java.util.UUID; import org.apache.commons.codec.binary.Base64; import org.apache.commons.lang.StringUtils; import org.fudgemsg.FudgeContext; import org.joda.beans.BeanBuilder; import org.joda.beans.BeanDefinition; import org.joda.beans.JodaBeanUtils; import org.joda.beans.MetaProperty; import org.joda.beans.Property; import org.joda.beans.PropertyDefinition; import org.joda.beans.impl.direct.DirectBeanBuilder; import org.joda.beans.impl.direct.DirectMetaProperty; import org.joda.beans.impl.direct.DirectMetaPropertyMap; import com.google.common.base.Supplier; import com.opengamma.OpenGammaRuntimeException; import com.opengamma.component.ComponentInfo; import com.opengamma.component.ComponentRepository; import com.opengamma.component.factory.AbstractComponentFactory; import com.opengamma.engine.calcnode.CalcNodeSocketConfiguration; import com.opengamma.transport.jaxrs.UriEndPointDescriptionProvider; import com.opengamma.util.GUIDGenerator; import com.opengamma.util.fudgemsg.OpenGammaFudgeContext; import com.opengamma.util.rest.DataConfigurationResource; /** * Component factory providing a managed sub set of the server capabilities. */ @BeanDefinition public class EngineConfigurationComponentFactory extends AbstractComponentFactory { /** * The name of the configuration document published. * <p> * This is used to support servers which publish multiple configurations, for example if they host multiple view processors, or that act as aggregators for a number of other servers at the * installation site. * <p> * This default name may be hard-coded in native code and installation scripts. Changes may cause client tools such as Excel to stop working correctly. */ private static final String DEFAULT_CONFIGURATION_DOCUMENT_ID = "0"; /** * The field name under which the logical server unique identifier is published. * <p> * This property may be set explicitly by calling {@link #setLogicalServerId}, or if omitted will be generated randomly. * <p> * This default name is hard-coded in native code. Changes may cause client tools such as Excel to stop working correctly. */ private static final String LOGICAL_SERVER_UNIQUE_IDENTIFIER = "lsid"; /** * The classifier that the factory should publish under. */ @PropertyDefinition(validate = "notNull") private String _classifier; /** * The Fudge context. */ @PropertyDefinition(validate = "notNull") private FudgeContext _fudgeContext = OpenGammaFudgeContext.getInstance(); /** * The logical server unique identifier. This is defined by the data environment. Clustered servers (that is, they appear suitably identical to any connecting clients) should have the same logical * identifier to reflect this. Any server backed by a unique data environment must have a correspondingly unique identifier. If a server has a transient or temporary data environment it must * generate a new logical identifier whenever that environment is flushed. * <p> * The default behavior, if this is not specified in the configuration file, is to generate a unique identifier at start up. This is suitable for most standard installations which include temporary * (for example, in-memory) masters or other data stores. */ @PropertyDefinition private String _logicalServerId /* = createLogicalServerId() */; /** * Creates a random logical server unique identifier. This is used if an explicit identifier is not set in the configuration file. * <p> * This is a 24 character string using base-64 characters, created using the algorithm from {@link GUIDGenerator} for uniqueness. * * @return the logical server unique identifier, not null */ protected String createLogicalServerId() { final UUID uuid = GUIDGenerator.generate(); final byte[] bytes = new byte[16]; long x = uuid.getMostSignificantBits(); bytes[0] = (byte) x; bytes[1] = (byte) (x >> 8); bytes[2] = (byte) (x >> 16); bytes[3] = (byte) (x >> 24); bytes[4] = (byte) (x >> 32); bytes[5] = (byte) (x >> 40); bytes[6] = (byte) (x >> 48); bytes[7] = (byte) (x >> 56); x = uuid.getLeastSignificantBits(); bytes[8] = (byte) x; bytes[9] = (byte) (x >> 8); bytes[10] = (byte) (x >> 16); bytes[11] = (byte) (x >> 24); bytes[12] = (byte) (x >> 32); bytes[13] = (byte) (x >> 40); bytes[14] = (byte) (x >> 48); bytes[15] = (byte) (x >> 56); return Base64.encodeBase64String(bytes); } protected void afterPropertiesSet() { if (getLogicalServerId() == null) { setLogicalServerId(createLogicalServerId()); } } protected void buildConfiguration(final ComponentRepository repo, final Map<String, String> configuration, final Map<String, Object> map) { map.put("lsid", getLogicalServerId()); for (final String key : configuration.keySet()) { final String valueStr = configuration.get(key); Object targetValue = valueStr; if (valueStr.contains("::")) { final String type = StringUtils.substringBefore(valueStr, "::"); final String classifier = StringUtils.substringAfter(valueStr, "::"); final ComponentInfo info = repo.findInfo(type, classifier); if (info == null) { throw new IllegalArgumentException("Component not found: " + valueStr); } final Object instance = repo.getInstance(info); if ((instance instanceof CalcNodeSocketConfiguration) || (instance instanceof Supplier)) { targetValue = instance; } else { if (info.getUri() == null) { throw new OpenGammaRuntimeException("Unable to add component to configuration as it has not been published by REST: " + valueStr); } targetValue = new UriEndPointDescriptionProvider(info.getUri().toString()); } } buildMap(map, key, targetValue); } } @Override public void init(final ComponentRepository repo, final LinkedHashMap<String, String> configuration) { afterPropertiesSet(); final Map<String, Object> map = new LinkedHashMap<String, Object>(); buildConfiguration(repo, configuration, map); final Map<String, Object> outer = new LinkedHashMap<String, Object>(); outer.put("0", map); final DataConfigurationResource resource = new DataConfigurationResource(getFudgeContext(), outer); repo.getRestComponents().publishResource(resource); // indicate that all component configuration was used configuration.clear(); } /** * Builds the map, handling dot separate keys. * * @param map the map, not null * @param key the key, not null * @param targetValue the target value,not null */ protected void buildMap(final Map<String, Object> map, final String key, final Object targetValue) { if (key.contains(".")) { final String key1 = StringUtils.substringBefore(key, "."); final String key2 = StringUtils.substringAfter(key, "."); @SuppressWarnings("unchecked") Map<String, Object> subMap = (Map<String, Object>) map.get(key1); if (subMap == null) { subMap = new LinkedHashMap<String, Object>(); map.put(key1, subMap); } buildMap(subMap, key2, targetValue); } else { map.put(key, targetValue); } } //------------------------- AUTOGENERATED START ------------------------- ///CLOVER:OFF /** * The meta-bean for {@code EngineConfigurationComponentFactory}. * * @return the meta-bean, not null */ public static EngineConfigurationComponentFactory.Meta meta() { return EngineConfigurationComponentFactory.Meta.INSTANCE; } static { JodaBeanUtils.registerMetaBean(EngineConfigurationComponentFactory.Meta.INSTANCE); } @Override public EngineConfigurationComponentFactory.Meta metaBean() { return EngineConfigurationComponentFactory.Meta.INSTANCE; } @Override protected Object propertyGet(String propertyName, boolean quiet) { switch (propertyName.hashCode()) { case -281470431: // classifier return getClassifier(); case -917704420: // fudgeContext return getFudgeContext(); case -41854233: // logicalServerId return getLogicalServerId(); } return super.propertyGet(propertyName, quiet); } @Override protected void propertySet(String propertyName, Object newValue, boolean quiet) { switch (propertyName.hashCode()) { case -281470431: // classifier setClassifier((String) newValue); return; case -917704420: // fudgeContext setFudgeContext((FudgeContext) newValue); return; case -41854233: // logicalServerId setLogicalServerId((String) newValue); return; } super.propertySet(propertyName, newValue, quiet); } @Override protected void validate() { JodaBeanUtils.notNull(_classifier, "classifier"); JodaBeanUtils.notNull(_fudgeContext, "fudgeContext"); super.validate(); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (obj != null && obj.getClass() == this.getClass()) { EngineConfigurationComponentFactory other = (EngineConfigurationComponentFactory) obj; return JodaBeanUtils.equal(getClassifier(), other.getClassifier()) && JodaBeanUtils.equal(getFudgeContext(), other.getFudgeContext()) && JodaBeanUtils.equal(getLogicalServerId(), other.getLogicalServerId()) && super.equals(obj); } return false; } @Override public int hashCode() { int hash = 7; hash += hash * 31 + JodaBeanUtils.hashCode(getClassifier()); hash += hash * 31 + JodaBeanUtils.hashCode(getFudgeContext()); hash += hash * 31 + JodaBeanUtils.hashCode(getLogicalServerId()); return hash ^ super.hashCode(); } //----------------------------------------------------------------------- /** * Gets the classifier that the factory should publish under. * * @return the value of the property, not null */ public String getClassifier() { return _classifier; } /** * Sets the classifier that the factory should publish under. * * @param classifier the new value of the property, not null */ public void setClassifier(String classifier) { JodaBeanUtils.notNull(classifier, "classifier"); this._classifier = classifier; } /** * Gets the the {@code classifier} property. * * @return the property, not null */ public final Property<String> classifier() { return metaBean().classifier().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the Fudge context. * * @return the value of the property, not null */ public FudgeContext getFudgeContext() { return _fudgeContext; } /** * Sets the Fudge context. * * @param fudgeContext the new value of the property, not null */ public void setFudgeContext(FudgeContext fudgeContext) { JodaBeanUtils.notNull(fudgeContext, "fudgeContext"); this._fudgeContext = fudgeContext; } /** * Gets the the {@code fudgeContext} property. * * @return the property, not null */ public final Property<FudgeContext> fudgeContext() { return metaBean().fudgeContext().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the logical server unique identifier. This is defined by the data environment. Clustered servers (that is, they appear suitably identical to any connecting clients) should have the same * logical identifier to reflect this. Any server backed by a unique data environment must have a correspondingly unique identifier. If a server has a transient or temporary data environment it must * generate a new logical identifier whenever that environment is flushed. * <p> * The default behavior, if this is not specified in the configuration file, is to generate a unique identifier at start up. This is suitable for most standard installations which include temporary * (for example, in-memory) masters or other data stores. * * @return the value of the property */ public String getLogicalServerId() { return _logicalServerId; } /** * Sets the logical server unique identifier. This is defined by the data environment. Clustered servers (that is, they appear suitably identical to any connecting clients) should have the same * logical identifier to reflect this. Any server backed by a unique data environment must have a correspondingly unique identifier. If a server has a transient or temporary data environment it must * generate a new logical identifier whenever that environment is flushed. * <p> * The default behavior, if this is not specified in the configuration file, is to generate a unique identifier at start up. This is suitable for most standard installations which include temporary * (for example, in-memory) masters or other data stores. * * @param logicalServerId the new value of the property */ public void setLogicalServerId(String logicalServerId) { this._logicalServerId = logicalServerId; } /** * Gets the the {@code logicalServerId} property. identifier to reflect this. Any server backed by a unique data environment must have a correspondingly unique identifier. If a server has a * transient or temporary data environment it must generate a new logical identifier whenever that environment is flushed. * <p> * The default behavior, if this is not specified in the configuration file, is to generate a unique identifier at start up. This is suitable for most standard installations which include temporary * (for example, in-memory) masters or other data stores. * * @return the property, not null */ public final Property<String> logicalServerId() { return metaBean().logicalServerId().createProperty(this); } //----------------------------------------------------------------------- /** * The meta-bean for {@code EngineConfigurationComponentFactory}. */ public static class Meta extends AbstractComponentFactory.Meta { /** * The singleton instance of the meta-bean. */ static final Meta INSTANCE = new Meta(); /** * The meta-property for the {@code classifier} property. */ private final MetaProperty<String> _classifier = DirectMetaProperty.ofReadWrite( this, "classifier", EngineConfigurationComponentFactory.class, String.class); /** * The meta-property for the {@code fudgeContext} property. */ private final MetaProperty<FudgeContext> _fudgeContext = DirectMetaProperty.ofReadWrite( this, "fudgeContext", EngineConfigurationComponentFactory.class, FudgeContext.class); /** * The meta-property for the {@code logicalServerId} property. */ private final MetaProperty<String> _logicalServerId = DirectMetaProperty.ofReadWrite( this, "logicalServerId", EngineConfigurationComponentFactory.class, String.class); /** * The meta-properties. */ private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap( this, (DirectMetaPropertyMap) super.metaPropertyMap(), "classifier", "fudgeContext", "logicalServerId"); /** * Restricted constructor. */ protected Meta() { } @Override protected MetaProperty<?> metaPropertyGet(String propertyName) { switch (propertyName.hashCode()) { case -281470431: // classifier return _classifier; case -917704420: // fudgeContext return _fudgeContext; case -41854233: // logicalServerId return _logicalServerId; } return super.metaPropertyGet(propertyName); } @Override public BeanBuilder<? extends EngineConfigurationComponentFactory> builder() { return new DirectBeanBuilder<EngineConfigurationComponentFactory>(new EngineConfigurationComponentFactory()); } @Override public Class<? extends EngineConfigurationComponentFactory> beanType() { return EngineConfigurationComponentFactory.class; } @Override public Map<String, MetaProperty<?>> metaPropertyMap() { return _metaPropertyMap$; } //----------------------------------------------------------------------- /** * The meta-property for the {@code classifier} property. * * @return the meta-property, not null */ public final MetaProperty<String> classifier() { return _classifier; } /** * The meta-property for the {@code fudgeContext} property. * * @return the meta-property, not null */ public final MetaProperty<FudgeContext> fudgeContext() { return _fudgeContext; } /** * The meta-property for the {@code logicalServerId} property. * * @return the meta-property, not null */ public final MetaProperty<String> logicalServerId() { return _logicalServerId; } } ///CLOVER:ON //-------------------------- AUTOGENERATED END -------------------------- }
projects/OG-Component/src/main/java/com/opengamma/component/factory/engine/EngineConfigurationComponentFactory.java
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.component.factory.engine; import java.util.LinkedHashMap; import java.util.Map; import org.apache.commons.lang.StringUtils; import org.fudgemsg.FudgeContext; import org.joda.beans.BeanBuilder; import org.joda.beans.BeanDefinition; import org.joda.beans.JodaBeanUtils; import org.joda.beans.MetaProperty; import org.joda.beans.Property; import org.joda.beans.PropertyDefinition; import org.joda.beans.impl.direct.DirectBeanBuilder; import org.joda.beans.impl.direct.DirectMetaProperty; import org.joda.beans.impl.direct.DirectMetaPropertyMap; import com.google.common.base.Supplier; import com.opengamma.OpenGammaRuntimeException; import com.opengamma.component.ComponentInfo; import com.opengamma.component.ComponentRepository; import com.opengamma.component.factory.AbstractComponentFactory; import com.opengamma.engine.calcnode.CalcNodeSocketConfiguration; import com.opengamma.transport.jaxrs.UriEndPointDescriptionProvider; import com.opengamma.util.fudgemsg.OpenGammaFudgeContext; import com.opengamma.util.rest.DataConfigurationResource; /** * Component factory providing a managed sub set of the server capabilities. */ @BeanDefinition public class EngineConfigurationComponentFactory extends AbstractComponentFactory { /** * The classifier that the factory should publish under. */ @PropertyDefinition(validate = "notNull") private String _classifier; /** * The Fudge context. */ @PropertyDefinition(validate = "notNull") private FudgeContext _fudgeContext = OpenGammaFudgeContext.getInstance(); //------------------------------------------------------------------------- @Override public void init(final ComponentRepository repo, final LinkedHashMap<String, String> configuration) { final Map<String, Object> map = new LinkedHashMap<String, Object>(); for (final String key : configuration.keySet()) { final String valueStr = configuration.get(key); Object targetValue = valueStr; if (valueStr.contains("::")) { final String type = StringUtils.substringBefore(valueStr, "::"); final String classifier = StringUtils.substringAfter(valueStr, "::"); final ComponentInfo info = repo.findInfo(type, classifier); if (info == null) { throw new IllegalArgumentException("Component not found: " + valueStr); } final Object instance = repo.getInstance(info); if ((instance instanceof CalcNodeSocketConfiguration) || (instance instanceof Supplier)) { targetValue = instance; } else { if (info.getUri() == null) { throw new OpenGammaRuntimeException("Unable to add component to configuration as it has not been published by REST: " + valueStr); } targetValue = new UriEndPointDescriptionProvider(info.getUri().toString()); } } buildMap(map, key, targetValue); } final Map<String, Object> outer = new LinkedHashMap<String, Object>(); outer.put("0", map); final DataConfigurationResource resource = new DataConfigurationResource(getFudgeContext(), outer); repo.getRestComponents().publishResource(resource); // indicate that all component configuration was used configuration.clear(); } /** * Builds the map, handling dot separate keys. * * @param map the map, not null * @param key the key, not null * @param targetValue the target value,not null */ protected void buildMap(final Map<String, Object> map, final String key, final Object targetValue) { if (key.contains(".")) { final String key1 = StringUtils.substringBefore(key, "."); final String key2 = StringUtils.substringAfter(key, "."); @SuppressWarnings("unchecked") Map<String, Object> subMap = (Map<String, Object>) map.get(key1); if (subMap == null) { subMap = new LinkedHashMap<String, Object>(); map.put(key1, subMap); } buildMap(subMap, key2, targetValue); } else { map.put(key, targetValue); } } //------------------------- AUTOGENERATED START ------------------------- ///CLOVER:OFF /** * The meta-bean for {@code EngineConfigurationComponentFactory}. * @return the meta-bean, not null */ public static EngineConfigurationComponentFactory.Meta meta() { return EngineConfigurationComponentFactory.Meta.INSTANCE; } static { JodaBeanUtils.registerMetaBean(EngineConfigurationComponentFactory.Meta.INSTANCE); } @Override public EngineConfigurationComponentFactory.Meta metaBean() { return EngineConfigurationComponentFactory.Meta.INSTANCE; } @Override protected Object propertyGet(String propertyName, boolean quiet) { switch (propertyName.hashCode()) { case -281470431: // classifier return getClassifier(); case -917704420: // fudgeContext return getFudgeContext(); } return super.propertyGet(propertyName, quiet); } @Override protected void propertySet(String propertyName, Object newValue, boolean quiet) { switch (propertyName.hashCode()) { case -281470431: // classifier setClassifier((String) newValue); return; case -917704420: // fudgeContext setFudgeContext((FudgeContext) newValue); return; } super.propertySet(propertyName, newValue, quiet); } @Override protected void validate() { JodaBeanUtils.notNull(_classifier, "classifier"); JodaBeanUtils.notNull(_fudgeContext, "fudgeContext"); super.validate(); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (obj != null && obj.getClass() == this.getClass()) { EngineConfigurationComponentFactory other = (EngineConfigurationComponentFactory) obj; return JodaBeanUtils.equal(getClassifier(), other.getClassifier()) && JodaBeanUtils.equal(getFudgeContext(), other.getFudgeContext()) && super.equals(obj); } return false; } @Override public int hashCode() { int hash = 7; hash += hash * 31 + JodaBeanUtils.hashCode(getClassifier()); hash += hash * 31 + JodaBeanUtils.hashCode(getFudgeContext()); return hash ^ super.hashCode(); } //----------------------------------------------------------------------- /** * Gets the classifier that the factory should publish under. * @return the value of the property, not null */ public String getClassifier() { return _classifier; } /** * Sets the classifier that the factory should publish under. * @param classifier the new value of the property, not null */ public void setClassifier(String classifier) { JodaBeanUtils.notNull(classifier, "classifier"); this._classifier = classifier; } /** * Gets the the {@code classifier} property. * @return the property, not null */ public final Property<String> classifier() { return metaBean().classifier().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the Fudge context. * @return the value of the property, not null */ public FudgeContext getFudgeContext() { return _fudgeContext; } /** * Sets the Fudge context. * @param fudgeContext the new value of the property, not null */ public void setFudgeContext(FudgeContext fudgeContext) { JodaBeanUtils.notNull(fudgeContext, "fudgeContext"); this._fudgeContext = fudgeContext; } /** * Gets the the {@code fudgeContext} property. * @return the property, not null */ public final Property<FudgeContext> fudgeContext() { return metaBean().fudgeContext().createProperty(this); } //----------------------------------------------------------------------- /** * The meta-bean for {@code EngineConfigurationComponentFactory}. */ public static class Meta extends AbstractComponentFactory.Meta { /** * The singleton instance of the meta-bean. */ static final Meta INSTANCE = new Meta(); /** * The meta-property for the {@code classifier} property. */ private final MetaProperty<String> _classifier = DirectMetaProperty.ofReadWrite( this, "classifier", EngineConfigurationComponentFactory.class, String.class); /** * The meta-property for the {@code fudgeContext} property. */ private final MetaProperty<FudgeContext> _fudgeContext = DirectMetaProperty.ofReadWrite( this, "fudgeContext", EngineConfigurationComponentFactory.class, FudgeContext.class); /** * The meta-properties. */ private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap( this, (DirectMetaPropertyMap) super.metaPropertyMap(), "classifier", "fudgeContext"); /** * Restricted constructor. */ protected Meta() { } @Override protected MetaProperty<?> metaPropertyGet(String propertyName) { switch (propertyName.hashCode()) { case -281470431: // classifier return _classifier; case -917704420: // fudgeContext return _fudgeContext; } return super.metaPropertyGet(propertyName); } @Override public BeanBuilder<? extends EngineConfigurationComponentFactory> builder() { return new DirectBeanBuilder<EngineConfigurationComponentFactory>(new EngineConfigurationComponentFactory()); } @Override public Class<? extends EngineConfigurationComponentFactory> beanType() { return EngineConfigurationComponentFactory.class; } @Override public Map<String, MetaProperty<?>> metaPropertyMap() { return _metaPropertyMap$; } //----------------------------------------------------------------------- /** * The meta-property for the {@code classifier} property. * @return the meta-property, not null */ public final MetaProperty<String> classifier() { return _classifier; } /** * The meta-property for the {@code fudgeContext} property. * @return the meta-property, not null */ public final MetaProperty<FudgeContext> fudgeContext() { return _fudgeContext; } } ///CLOVER:ON //-------------------------- AUTOGENERATED END -------------------------- }
[XLS-566] Generate a logical server identifier at startup so clients can detect restarts/changes
projects/OG-Component/src/main/java/com/opengamma/component/factory/engine/EngineConfigurationComponentFactory.java
[XLS-566] Generate a logical server identifier at startup so clients can detect restarts/changes
<ide><path>rojects/OG-Component/src/main/java/com/opengamma/component/factory/engine/EngineConfigurationComponentFactory.java <ide> <ide> import java.util.LinkedHashMap; <ide> import java.util.Map; <del> <add>import java.util.UUID; <add> <add>import org.apache.commons.codec.binary.Base64; <ide> import org.apache.commons.lang.StringUtils; <ide> import org.fudgemsg.FudgeContext; <ide> import org.joda.beans.BeanBuilder; <ide> import com.opengamma.component.factory.AbstractComponentFactory; <ide> import com.opengamma.engine.calcnode.CalcNodeSocketConfiguration; <ide> import com.opengamma.transport.jaxrs.UriEndPointDescriptionProvider; <add>import com.opengamma.util.GUIDGenerator; <ide> import com.opengamma.util.fudgemsg.OpenGammaFudgeContext; <ide> import com.opengamma.util.rest.DataConfigurationResource; <ide> <ide> public class EngineConfigurationComponentFactory extends AbstractComponentFactory { <ide> <ide> /** <add> * The name of the configuration document published. <add> * <p> <add> * This is used to support servers which publish multiple configurations, for example if they host multiple view processors, or that act as aggregators for a number of other servers at the <add> * installation site. <add> * <p> <add> * This default name may be hard-coded in native code and installation scripts. Changes may cause client tools such as Excel to stop working correctly. <add> */ <add> private static final String DEFAULT_CONFIGURATION_DOCUMENT_ID = "0"; <add> <add> /** <add> * The field name under which the logical server unique identifier is published. <add> * <p> <add> * This property may be set explicitly by calling {@link #setLogicalServerId}, or if omitted will be generated randomly. <add> * <p> <add> * This default name is hard-coded in native code. Changes may cause client tools such as Excel to stop working correctly. <add> */ <add> private static final String LOGICAL_SERVER_UNIQUE_IDENTIFIER = "lsid"; <add> <add> /** <ide> * The classifier that the factory should publish under. <ide> */ <ide> @PropertyDefinition(validate = "notNull") <ide> private String _classifier; <add> <ide> /** <ide> * The Fudge context. <ide> */ <ide> @PropertyDefinition(validate = "notNull") <ide> private FudgeContext _fudgeContext = OpenGammaFudgeContext.getInstance(); <ide> <del> //------------------------------------------------------------------------- <del> @Override <del> public void init(final ComponentRepository repo, final LinkedHashMap<String, String> configuration) { <del> final Map<String, Object> map = new LinkedHashMap<String, Object>(); <add> /** <add> * The logical server unique identifier. This is defined by the data environment. Clustered servers (that is, they appear suitably identical to any connecting clients) should have the same logical <add> * identifier to reflect this. Any server backed by a unique data environment must have a correspondingly unique identifier. If a server has a transient or temporary data environment it must <add> * generate a new logical identifier whenever that environment is flushed. <add> * <p> <add> * The default behavior, if this is not specified in the configuration file, is to generate a unique identifier at start up. This is suitable for most standard installations which include temporary <add> * (for example, in-memory) masters or other data stores. <add> */ <add> @PropertyDefinition <add> private String _logicalServerId /* = createLogicalServerId() */; <add> <add> /** <add> * Creates a random logical server unique identifier. This is used if an explicit identifier is not set in the configuration file. <add> * <p> <add> * This is a 24 character string using base-64 characters, created using the algorithm from {@link GUIDGenerator} for uniqueness. <add> * <add> * @return the logical server unique identifier, not null <add> */ <add> protected String createLogicalServerId() { <add> final UUID uuid = GUIDGenerator.generate(); <add> final byte[] bytes = new byte[16]; <add> long x = uuid.getMostSignificantBits(); <add> bytes[0] = (byte) x; <add> bytes[1] = (byte) (x >> 8); <add> bytes[2] = (byte) (x >> 16); <add> bytes[3] = (byte) (x >> 24); <add> bytes[4] = (byte) (x >> 32); <add> bytes[5] = (byte) (x >> 40); <add> bytes[6] = (byte) (x >> 48); <add> bytes[7] = (byte) (x >> 56); <add> x = uuid.getLeastSignificantBits(); <add> bytes[8] = (byte) x; <add> bytes[9] = (byte) (x >> 8); <add> bytes[10] = (byte) (x >> 16); <add> bytes[11] = (byte) (x >> 24); <add> bytes[12] = (byte) (x >> 32); <add> bytes[13] = (byte) (x >> 40); <add> bytes[14] = (byte) (x >> 48); <add> bytes[15] = (byte) (x >> 56); <add> return Base64.encodeBase64String(bytes); <add> } <add> <add> protected void afterPropertiesSet() { <add> if (getLogicalServerId() == null) { <add> setLogicalServerId(createLogicalServerId()); <add> } <add> } <add> <add> protected void buildConfiguration(final ComponentRepository repo, final Map<String, String> configuration, final Map<String, Object> map) { <add> map.put("lsid", getLogicalServerId()); <ide> for (final String key : configuration.keySet()) { <ide> final String valueStr = configuration.get(key); <ide> Object targetValue = valueStr; <ide> } <ide> buildMap(map, key, targetValue); <ide> } <del> <add> } <add> <add> @Override <add> public void init(final ComponentRepository repo, final LinkedHashMap<String, String> configuration) { <add> afterPropertiesSet(); <add> final Map<String, Object> map = new LinkedHashMap<String, Object>(); <add> buildConfiguration(repo, configuration, map); <ide> final Map<String, Object> outer = new LinkedHashMap<String, Object>(); <ide> outer.put("0", map); <del> <ide> final DataConfigurationResource resource = new DataConfigurationResource(getFudgeContext(), outer); <ide> repo.getRestComponents().publishResource(resource); <del> <ide> // indicate that all component configuration was used <ide> configuration.clear(); <ide> } <ide> <ide> /** <ide> * Builds the map, handling dot separate keys. <del> * <del> * @param map the map, not null <del> * @param key the key, not null <del> * @param targetValue the target value,not null <add> * <add> * @param map the map, not null <add> * @param key the key, not null <add> * @param targetValue the target value,not null <ide> */ <ide> protected void buildMap(final Map<String, Object> map, final String key, final Object targetValue) { <ide> if (key.contains(".")) { <ide> ///CLOVER:OFF <ide> /** <ide> * The meta-bean for {@code EngineConfigurationComponentFactory}. <add> * <ide> * @return the meta-bean, not null <ide> */ <ide> public static EngineConfigurationComponentFactory.Meta meta() { <ide> @Override <ide> protected Object propertyGet(String propertyName, boolean quiet) { <ide> switch (propertyName.hashCode()) { <del> case -281470431: // classifier <add> case -281470431: // classifier <ide> return getClassifier(); <del> case -917704420: // fudgeContext <add> case -917704420: // fudgeContext <ide> return getFudgeContext(); <add> case -41854233: // logicalServerId <add> return getLogicalServerId(); <ide> } <ide> return super.propertyGet(propertyName, quiet); <ide> } <ide> @Override <ide> protected void propertySet(String propertyName, Object newValue, boolean quiet) { <ide> switch (propertyName.hashCode()) { <del> case -281470431: // classifier <add> case -281470431: // classifier <ide> setClassifier((String) newValue); <ide> return; <del> case -917704420: // fudgeContext <add> case -917704420: // fudgeContext <ide> setFudgeContext((FudgeContext) newValue); <add> return; <add> case -41854233: // logicalServerId <add> setLogicalServerId((String) newValue); <ide> return; <ide> } <ide> super.propertySet(propertyName, newValue, quiet); <ide> EngineConfigurationComponentFactory other = (EngineConfigurationComponentFactory) obj; <ide> return JodaBeanUtils.equal(getClassifier(), other.getClassifier()) && <ide> JodaBeanUtils.equal(getFudgeContext(), other.getFudgeContext()) && <add> JodaBeanUtils.equal(getLogicalServerId(), other.getLogicalServerId()) && <ide> super.equals(obj); <ide> } <ide> return false; <ide> int hash = 7; <ide> hash += hash * 31 + JodaBeanUtils.hashCode(getClassifier()); <ide> hash += hash * 31 + JodaBeanUtils.hashCode(getFudgeContext()); <add> hash += hash * 31 + JodaBeanUtils.hashCode(getLogicalServerId()); <ide> return hash ^ super.hashCode(); <ide> } <ide> <ide> //----------------------------------------------------------------------- <ide> /** <ide> * Gets the classifier that the factory should publish under. <add> * <ide> * @return the value of the property, not null <ide> */ <ide> public String getClassifier() { <ide> <ide> /** <ide> * Sets the classifier that the factory should publish under. <del> * @param classifier the new value of the property, not null <add> * <add> * @param classifier the new value of the property, not null <ide> */ <ide> public void setClassifier(String classifier) { <ide> JodaBeanUtils.notNull(classifier, "classifier"); <ide> <ide> /** <ide> * Gets the the {@code classifier} property. <add> * <ide> * @return the property, not null <ide> */ <ide> public final Property<String> classifier() { <ide> //----------------------------------------------------------------------- <ide> /** <ide> * Gets the Fudge context. <add> * <ide> * @return the value of the property, not null <ide> */ <ide> public FudgeContext getFudgeContext() { <ide> <ide> /** <ide> * Sets the Fudge context. <del> * @param fudgeContext the new value of the property, not null <add> * <add> * @param fudgeContext the new value of the property, not null <ide> */ <ide> public void setFudgeContext(FudgeContext fudgeContext) { <ide> JodaBeanUtils.notNull(fudgeContext, "fudgeContext"); <ide> <ide> /** <ide> * Gets the the {@code fudgeContext} property. <add> * <ide> * @return the property, not null <ide> */ <ide> public final Property<FudgeContext> fudgeContext() { <ide> <ide> //----------------------------------------------------------------------- <ide> /** <add> * Gets the logical server unique identifier. This is defined by the data environment. Clustered servers (that is, they appear suitably identical to any connecting clients) should have the same <add> * logical identifier to reflect this. Any server backed by a unique data environment must have a correspondingly unique identifier. If a server has a transient or temporary data environment it must <add> * generate a new logical identifier whenever that environment is flushed. <add> * <p> <add> * The default behavior, if this is not specified in the configuration file, is to generate a unique identifier at start up. This is suitable for most standard installations which include temporary <add> * (for example, in-memory) masters or other data stores. <add> * <add> * @return the value of the property <add> */ <add> public String getLogicalServerId() { <add> return _logicalServerId; <add> } <add> <add> /** <add> * Sets the logical server unique identifier. This is defined by the data environment. Clustered servers (that is, they appear suitably identical to any connecting clients) should have the same <add> * logical identifier to reflect this. Any server backed by a unique data environment must have a correspondingly unique identifier. If a server has a transient or temporary data environment it must <add> * generate a new logical identifier whenever that environment is flushed. <add> * <p> <add> * The default behavior, if this is not specified in the configuration file, is to generate a unique identifier at start up. This is suitable for most standard installations which include temporary <add> * (for example, in-memory) masters or other data stores. <add> * <add> * @param logicalServerId the new value of the property <add> */ <add> public void setLogicalServerId(String logicalServerId) { <add> this._logicalServerId = logicalServerId; <add> } <add> <add> /** <add> * Gets the the {@code logicalServerId} property. identifier to reflect this. Any server backed by a unique data environment must have a correspondingly unique identifier. If a server has a <add> * transient or temporary data environment it must generate a new logical identifier whenever that environment is flushed. <add> * <p> <add> * The default behavior, if this is not specified in the configuration file, is to generate a unique identifier at start up. This is suitable for most standard installations which include temporary <add> * (for example, in-memory) masters or other data stores. <add> * <add> * @return the property, not null <add> */ <add> public final Property<String> logicalServerId() { <add> return metaBean().logicalServerId().createProperty(this); <add> } <add> <add> //----------------------------------------------------------------------- <add> /** <ide> * The meta-bean for {@code EngineConfigurationComponentFactory}. <ide> */ <ide> public static class Meta extends AbstractComponentFactory.Meta { <ide> */ <ide> private final MetaProperty<FudgeContext> _fudgeContext = DirectMetaProperty.ofReadWrite( <ide> this, "fudgeContext", EngineConfigurationComponentFactory.class, FudgeContext.class); <add> /** <add> * The meta-property for the {@code logicalServerId} property. <add> */ <add> private final MetaProperty<String> _logicalServerId = DirectMetaProperty.ofReadWrite( <add> this, "logicalServerId", EngineConfigurationComponentFactory.class, String.class); <ide> /** <ide> * The meta-properties. <ide> */ <ide> private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap( <ide> this, (DirectMetaPropertyMap) super.metaPropertyMap(), <ide> "classifier", <del> "fudgeContext"); <add> "fudgeContext", <add> "logicalServerId"); <ide> <ide> /** <ide> * Restricted constructor. <ide> @Override <ide> protected MetaProperty<?> metaPropertyGet(String propertyName) { <ide> switch (propertyName.hashCode()) { <del> case -281470431: // classifier <add> case -281470431: // classifier <ide> return _classifier; <del> case -917704420: // fudgeContext <add> case -917704420: // fudgeContext <ide> return _fudgeContext; <add> case -41854233: // logicalServerId <add> return _logicalServerId; <ide> } <ide> return super.metaPropertyGet(propertyName); <ide> } <ide> //----------------------------------------------------------------------- <ide> /** <ide> * The meta-property for the {@code classifier} property. <add> * <ide> * @return the meta-property, not null <ide> */ <ide> public final MetaProperty<String> classifier() { <ide> <ide> /** <ide> * The meta-property for the {@code fudgeContext} property. <add> * <ide> * @return the meta-property, not null <ide> */ <ide> public final MetaProperty<FudgeContext> fudgeContext() { <ide> return _fudgeContext; <add> } <add> <add> /** <add> * The meta-property for the {@code logicalServerId} property. <add> * <add> * @return the meta-property, not null <add> */ <add> public final MetaProperty<String> logicalServerId() { <add> return _logicalServerId; <ide> } <ide> <ide> }
JavaScript
apache-2.0
fad408adef9e7563bc704d2060511467361466c8
0
Sage/carbon,Sage/carbon,Sage/carbon
import React from 'react'; import { StoryHeader, StoryCode, StoryCodeBlock } from '../../../../.storybook/style/storybook-info.styles'; const info = ( <div> <p>Textbox component</p> <StoryHeader>Implementation</StoryHeader> <StoryCodeBlock>import Textbox from {'"carbon-react/lib/components/textbox"'}</StoryCodeBlock> <p> To render a<StoryCode padded>Textbox</StoryCode> </p> <StoryCode padded>{'<Textbox name="myTextbox" />'}</StoryCode> </div> ); export default info;
src/components/textbox/documentation/info.js
import React from 'react'; import { StoryHeader, StoryCode, StoryCodeBlock } from '../../../../.storybook/style/storybook-info.styles'; const info = ( <div> <p>Textbox component </p> <StoryHeader>Implementation</StoryHeader> <StoryCodeBlock>import Textbox from {'"carbon-react/lib/components/textbox"'}</StoryCodeBlock> <p> To render a<StoryCode padded>Textbox</StoryCode> </p> <StoryCode padded>{'<Textbox name="myTextbox" />'}</StoryCode> </div> ); export default info;
spaces
src/components/textbox/documentation/info.js
spaces
<ide><path>rc/components/textbox/documentation/info.js <ide> <ide> const info = ( <ide> <div> <del> <p>Textbox component </p> <add> <p>Textbox component</p> <ide> <ide> <StoryHeader>Implementation</StoryHeader> <ide> <StoryCodeBlock>import Textbox from {'"carbon-react/lib/components/textbox"'}</StoryCodeBlock>
Java
apache-2.0
6344dfeec1d8f364f51cba425a9c704260616c40
0
jtwig/jtwig-versioning
package org.jtwig.version; public class NextVersionFinder { public String nextVersion (String currentVersion) { if (currentVersion == null) { return "1.0"; } else { int indexOfDot = currentVersion.lastIndexOf(".") + 1; String prefix = currentVersion.substring(0, indexOfDot); int number = Integer.valueOf(currentVersion.substring(indexOfDot)); return prefix + (number + 1); } } }
src/main/java/org/jtwig/version/NextVersionFinder.java
package org.jtwig.version; public class NextVersionFinder { public String nextVersion (String currentVersion) { int indexOfDot = currentVersion.lastIndexOf(".") + 1; String prefix = currentVersion.substring(0, indexOfDot); int number = Integer.valueOf(currentVersion.substring(indexOfDot)); return prefix + (number + 1); } }
Handling scenario where no previous version exists
src/main/java/org/jtwig/version/NextVersionFinder.java
Handling scenario where no previous version exists
<ide><path>rc/main/java/org/jtwig/version/NextVersionFinder.java <ide> <ide> public class NextVersionFinder { <ide> public String nextVersion (String currentVersion) { <del> int indexOfDot = currentVersion.lastIndexOf(".") + 1; <del> String prefix = currentVersion.substring(0, indexOfDot); <del> int number = Integer.valueOf(currentVersion.substring(indexOfDot)); <del> <del> return prefix + (number + 1); <add> if (currentVersion == null) { <add> return "1.0"; <add> } else { <add> int indexOfDot = currentVersion.lastIndexOf(".") + 1; <add> String prefix = currentVersion.substring(0, indexOfDot); <add> int number = Integer.valueOf(currentVersion.substring(indexOfDot)); <add> return prefix + (number + 1); <add> } <ide> } <ide> }
JavaScript
mit
e75db68667dcbd31ff620096049af9db89c6c01d
0
TribeMedia/react-redux-universal-hot-example
import React, {Component, PropTypes} from 'react'; import {connect} from 'react-redux'; import Helmet from 'react-helmet'; import {initialize} from 'redux-form'; import {SurveyForm} from 'components'; import request from 'superagent'; @connect( () => ({}), {initialize}) export default class Survey extends Component { static propTypes = { initialize: PropTypes.func.isRequired } handleSubmit = (data) => { /*window.optimizely.push(['trackEvent', 'saveSurvey']);*/ request.get('https://5662157414.log.optimizely.com/event?a=5662157414&d=5662157414&y=false&src=js&x5750250113=5759140016&s5692111775=gc&s5697131616=false&s5688002034=direct&tsent=1462369819.279&n=saveSurvey&u=oeu1462367115866r0.9819769772170281&wxhr=true&time=1462369819.279&f=5750250113&g=5826340299&cx2=255f22b5') .end(function(err, res) { window.alert('Data submitted! ' + JSON.stringify(data)); this.props.initialize('survey', {}); }); request.get('http://abtesting.tribemedia.io:5000/participate?experiment=button_color_out&alternatives=red&alternatives=blue&client_id=12345678-1234-5678-1234-567812345678&kpi=saveSurvey') .end(function(err, res) { request.get('http://abtesting.tribemedia.io:5000/convert?experiment=button_color&client_id=12345678-1234-5678-1234-567812345678&kpi=saveSurvey') .end(function(err, res) { }); }); } handleInitialize = () => { this.props.initialize('survey', { name: window.myvar, email: window.myemail, occupation: 'Redux Wizard', currentlyEmployed: true, sex: 'male' }); //window.optimizely.push(['trackEvent', 'initSurvey']); request.get('https://5662157414.log.optimizely.com/event?a=5662157414&d=5662157414&y=false&src=js&s5692111775=gc&s5697131616=false&s5688002034=direct&tsent=1462367147.538&n=initSurvey&u=oeu1462367115866r0.9819769772170281&wxhr=true&time=1462367147.538&f=5750250113&g=5824140674&cx2=3632d2e0') .end(function(err, res) { }); request.get('http://abtesting.tribemedia.io:5000/participate?experiment=button_color_out&alternatives=red&alternatives=blue&client_id=12345678-1234-5678-1234-567812345678&kpi=initSurvey') .end(function(err, res) { request.get('http://abtesting.tribemedia.io:5000/convert?experiment=button_color&client_id=12345678-1234-5678-1234-567812345678&kpi=initSurvey') .end(function(err, res) { }); }); } render() { return ( <div className="container"> <h1>Survey</h1> <Helmet title="Survey"/> <p> This is an example of a form in redux in which all the state is kept within the redux store. All the components are pure "dumb" components. </p> <p> Things to notice: </p> <ul> <li>No validation errors are shown initially.</li> <li>Validation errors are only shown onBlur</li> <li>Validation errors are hidden onChange when the error is rectified</li> <li><code>valid</code>, <code>invalid</code>, <code>pristine</code> and <code>dirty</code> flags are passed with each change </li> <li><em>Except</em> when you submit the form, in which case they are shown for all invalid fields.</li> <li>If you click the Initialize Form button, the form will be prepopupated with some values and the <code>pristine</code> and <code>dirty</code> flags will be based on those values. </li> </ul> <p> Pardon the use of <code>window.alert()</code>, but I wanted to keep this component stateless. </p> <div style={{textAlign: 'center', margin: 15}}> <button className="btn btn-primary" onClick={this.handleInitialize}> <i className="fa fa-pencil"/> Initialize Form </button> </div> <p>The circles to the left of the inputs correspond to flags provided by <code>redux-form</code>: Touched, Visited, Active, and Dirty.</p> <SurveyForm onSubmit={this.handleSubmit}/> </div> ); } }
src/containers/Survey/Survey.js
import React, {Component, PropTypes} from 'react'; import {connect} from 'react-redux'; import Helmet from 'react-helmet'; import {initialize} from 'redux-form'; import {SurveyForm} from 'components'; import request from 'superagent'; @connect( () => ({}), {initialize}) export default class Survey extends Component { static propTypes = { initialize: PropTypes.func.isRequired } handleSubmit = (data) => { /*window.optimizely.push(['trackEvent', 'saveSurvey']);*/ request.get('https://5662157414.log.optimizely.com/event?a=5662157414&d=5662157414&y=false&src=js&x5750250113=5759140016&s5692111775=gc&s5697131616=false&s5688002034=direct&tsent=1462369819.279&n=saveSurvey&u=oeu1462367115866r0.9819769772170281&wxhr=true&time=1462369819.279&f=5750250113&g=5826340299&cx2=255f22b5') .end(function(err, res) { window.alert('Data submitted! ' + JSON.stringify(data)); this.props.initialize('survey', {}); }); } handleInitialize = () => { this.props.initialize('survey', { name: window.myvar, email: window.myemail, occupation: 'Redux Wizard', currentlyEmployed: true, sex: 'male' }); //window.optimizely.push(['trackEvent', 'initSurvey']); request.get('https://5662157414.log.optimizely.com/event?a=5662157414&d=5662157414&y=false&src=js&s5692111775=gc&s5697131616=false&s5688002034=direct&tsent=1462367147.538&n=initSurvey&u=oeu1462367115866r0.9819769772170281&wxhr=true&time=1462367147.538&f=5750250113&g=5824140674&cx2=3632d2e0') .end(function(err, res) { }); } render() { return ( <div className="container"> <h1>Survey</h1> <Helmet title="Survey"/> <p> This is an example of a form in redux in which all the state is kept within the redux store. All the components are pure "dumb" components. </p> <p> Things to notice: </p> <ul> <li>No validation errors are shown initially.</li> <li>Validation errors are only shown onBlur</li> <li>Validation errors are hidden onChange when the error is rectified</li> <li><code>valid</code>, <code>invalid</code>, <code>pristine</code> and <code>dirty</code> flags are passed with each change </li> <li><em>Except</em> when you submit the form, in which case they are shown for all invalid fields.</li> <li>If you click the Initialize Form button, the form will be prepopupated with some values and the <code>pristine</code> and <code>dirty</code> flags will be based on those values. </li> </ul> <p> Pardon the use of <code>window.alert()</code>, but I wanted to keep this component stateless. </p> <div style={{textAlign: 'center', margin: 15}}> <button className="btn btn-primary" onClick={this.handleInitialize}> <i className="fa fa-pencil"/> Initialize Form </button> </div> <p>The circles to the left of the inputs correspond to flags provided by <code>redux-form</code>: Touched, Visited, Active, and Dirty.</p> <SurveyForm onSubmit={this.handleSubmit}/> </div> ); } }
added sixpack
src/containers/Survey/Survey.js
added sixpack
<ide><path>rc/containers/Survey/Survey.js <ide> window.alert('Data submitted! ' + JSON.stringify(data)); <ide> this.props.initialize('survey', {}); <ide> }); <add> <add> request.get('http://abtesting.tribemedia.io:5000/participate?experiment=button_color_out&alternatives=red&alternatives=blue&client_id=12345678-1234-5678-1234-567812345678&kpi=saveSurvey') <add> .end(function(err, res) { <add> request.get('http://abtesting.tribemedia.io:5000/convert?experiment=button_color&client_id=12345678-1234-5678-1234-567812345678&kpi=saveSurvey') <add> .end(function(err, res) { <add> <add> }); <add> }); <ide> } <ide> <ide> handleInitialize = () => { <ide> request.get('https://5662157414.log.optimizely.com/event?a=5662157414&d=5662157414&y=false&src=js&s5692111775=gc&s5697131616=false&s5688002034=direct&tsent=1462367147.538&n=initSurvey&u=oeu1462367115866r0.9819769772170281&wxhr=true&time=1462367147.538&f=5750250113&g=5824140674&cx2=3632d2e0') <ide> .end(function(err, res) { <ide> <add> }); <add> <add> request.get('http://abtesting.tribemedia.io:5000/participate?experiment=button_color_out&alternatives=red&alternatives=blue&client_id=12345678-1234-5678-1234-567812345678&kpi=initSurvey') <add> .end(function(err, res) { <add> request.get('http://abtesting.tribemedia.io:5000/convert?experiment=button_color&client_id=12345678-1234-5678-1234-567812345678&kpi=initSurvey') <add> .end(function(err, res) { <add> <add> }); <ide> }); <ide> } <ide>
Java
bsd-3-clause
e6526f30a26ac727e41659496157d1cd2045cebe
0
UCDenver-ccp/datasource,UCDenver-ccp/datasource
package edu.ucdenver.ccp.datasource.fileparsers.ebi.goa; /* * #%L * Colorado Computational Pharmacology's common module * %% * Copyright (C) 2012 - 2015 Regents of the University of Colorado * %% * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. Neither the name of the Regents of the University of Colorado nor the names of its contributors * may be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. * #L% */ import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import org.apache.log4j.Logger; import edu.ucdenver.ccp.common.file.CharacterEncoding; import edu.ucdenver.ccp.common.file.FileReaderUtil; import edu.ucdenver.ccp.common.file.reader.Line; import edu.ucdenver.ccp.common.string.RegExPatterns; import edu.ucdenver.ccp.common.string.StringConstants; import edu.ucdenver.ccp.common.string.StringUtil; import edu.ucdenver.ccp.datasource.fileparsers.ebi.goa.gaf.GoaGaf2FileRecordReader; import edu.ucdenver.ccp.datasource.fileparsers.idlist.IdListFileFactory; import edu.ucdenver.ccp.datasource.fileparsers.taxonaware.TaxonAwareSingleLineFileRecordReader; import edu.ucdenver.ccp.datasource.identifiers.DataSource; import edu.ucdenver.ccp.datasource.identifiers.DataSourceIdentifier; import edu.ucdenver.ccp.datasource.identifiers.ebi.intact.IntActID; import edu.ucdenver.ccp.datasource.identifiers.ebi.ipi.IpiID; import edu.ucdenver.ccp.datasource.identifiers.ebi.uniprot.UniProtID; import edu.ucdenver.ccp.datasource.identifiers.ebi.uniprot.UniProtIsoformID; import edu.ucdenver.ccp.datasource.identifiers.ncbi.taxonomy.NcbiTaxonomyID; import edu.ucdenver.ccp.datasource.identifiers.obo.GeneOntologyID; import edu.ucdenver.ccp.datasource.identifiers.other.RnaCentralId; import edu.ucdenver.ccp.datasource.identifiers.reactome.ReactomeReactionID; import edu.ucdenver.ccp.identifier.publication.DOI; import edu.ucdenver.ccp.identifier.publication.PubMedID; /** * NOTE: This class has been deprecated as the file format that it parses has been * discontinued by UniProt and is no longer made available on their FTP site. * Please use {@link GoaGaf2FileRecordReader} as a replacement. * * @author Bill Baumgartner * */ @Deprecated public class GpAssociationGoaUniprotFileParser extends TaxonAwareSingleLineFileRecordReader<GpAssociationGoaUniprotFileData> { private static final Logger logger = Logger.getLogger(GpAssociationGoaUniprotFileParser.class); private final Set<DataSourceIdentifier<?>> taxonSpecificIds; /* @formatter:off */ private static final String HEADER = "!gpa-version: 1.1\n"+ "!\n"+ "!This file contains all GO annotations for proteins in the UniProt KnowledgeBase (UniProtKB).\n"+ "!If a particular protein accession is not annotated with GO, then it will not appear in this file.\n"+ "!\n"+ "!Columns:\n"+ "!\n"+ "! name required? cardinality GAF column #\n"+ "! DB required 1 1\n"+ "! DB_Object_ID required 1 2 / 17\n"+ "! Qualifier required 1 or greater 4\n"+ "! GO ID required 1 5\n"+ "! DB:Reference(s) required 1 or greater 6\n"+ "! ECO evidence code required 1 7 (GO evidence code)\n"+ "! With optional 0 or greater 8\n"+ "! Interacting taxon ID optional 0 or 1 13\n"+ "! Date required 1 14\n"+ "! Assigned_by required 1 15\n"+ "! Annotation Extension optional 0 or greater 16\n"+ "! Annotation Properties optional 0 or 1 n/a\n"+ "!\n"; /* @formatter:on */ public static final CharacterEncoding ENCODING = CharacterEncoding.US_ASCII; public static final String DELIMITER_REGEX = RegExPatterns.TAB; private static final int FILE_COLUMN_COUNT = 10; public static final String COMMENT_INDICATOR = null;// StringConstants.EXCLAMATION_MARK; // public GpAssociationGoaUniprotFileParser(File inputFile, // CharacterEncoding encoding) throws // IOException { // super(inputFile, encoding, COMMENT_INDICATOR, null); // taxonSpecificIds = null; // } // // public GpAssociationGoaUniprotFileParser(File workDirectory, boolean // clean) throws // IOException { // super(workDirectory, ENCODING, COMMENT_INDICATOR, null, null, clean, // null); // taxonSpecificIds = null; // } public GpAssociationGoaUniprotFileParser(File inputFile, CharacterEncoding encoding, File idListDirectory, Set<NcbiTaxonomyID> taxonIds) throws IOException { super(inputFile, encoding, COMMENT_INDICATOR, taxonIds); taxonSpecificIds = loadTaxonSpecificIds(idListDirectory, taxonIds); if (!isLineOfInterest(line)) { advanceToNextLineWithTaxonOfInterest(); } } private Set<DataSourceIdentifier<?>> loadTaxonSpecificIds(File idListDirectory, Set<NcbiTaxonomyID> taxonIds) throws IOException { Set<UniProtID> uniprotIdsForTaxon = IdListFileFactory.getIdListFromFile(idListDirectory, DataSource.UNIPROT, taxonIds, UniProtID.class); Set<IntActID> intactIdsForTaxon = IdListFileFactory.getIdListFromFile(idListDirectory, DataSource.IREFWEB, taxonIds, IntActID.class); Set<DataSourceIdentifier<?>> ids = new HashSet<DataSourceIdentifier<?>>(); if (uniprotIdsForTaxon != null) { ids.addAll(uniprotIdsForTaxon); } if (intactIdsForTaxon != null) { ids.addAll(intactIdsForTaxon); } if (ids.isEmpty()) { return null; } return ids; } @Override protected String getExpectedFileHeader() throws IOException { return HEADER; } @Override protected String getFileHeader() throws IOException { StringBuffer header = new StringBuffer(); Line line = null; while ((line = readLine()).getText().startsWith(StringConstants.EXCLAMATION_MARK)) { System.out.println("header line: " + line.getText()); header.append(line.getText() + "\n"); } // make sure we don't skip the first real data line setNextLine(line); // chop off time stamp int timestampIndex = header.lastIndexOf("!Generated:"); return header.substring(0, timestampIndex); } /** * overriding b/c super.initialize() calls readLine() which increments the * reader. We don't want to increment the reader b/c we have already found * the first real data line while validating the header. See call to * {@link #setNextLine(Line)} in {@link #getFileHeader()} above. */ @Override protected void initialize() throws IOException { String fileHeader = getFileHeader(); validateFileHeader(fileHeader); } @Override protected GpAssociationGoaUniprotFileData parseRecordFromLine(Line line) { return parseGpAssociationGoaUniprotFileDataFromLine(line); } public static GpAssociationGoaUniprotFileData parseGpAssociationGoaUniprotFileDataFromLine(Line line) { String[] columns = FileReaderUtil.getColumnsFromLine(line.getText(), DELIMITER_REGEX); if (columns.length < FILE_COLUMN_COUNT) { String message = String.format( "Unable to initialize a new GpAssociationGoaUniprotFileData object. Expected %d columns in the input " + "String[] but there were %d columns. Columns= %s LINE=%s", FILE_COLUMN_COUNT, columns.length, Arrays.toString(columns), line.getText()); Logger.getLogger(GpAssociationGoaUniprotFileParser.class).warn(message); return null; } return initializeNewGpAssociationGoaUniprotFileData(columns, line.getByteOffset(), line.getLineNumber()); } private static GpAssociationGoaUniprotFileData initializeNewGpAssociationGoaUniprotFileData(String[] columns, long byteOffset, long lineNumber) { String database = new String(columns[0]); String databaseObjectIDStr = columns[1]; DataSourceIdentifier<?> databaseObjectID = createDatabaseObjectID(database, databaseObjectIDStr); if (databaseObjectID == null) { logger.warn("Skipping record (" + lineNumber + ") due to null database ID: " + Arrays.toString(columns)); return null; } String qualifier = null; if (!columns[2].isEmpty()) { qualifier = columns[2]; } GeneOntologyID goID = new GeneOntologyID(columns[3]); DataSourceIdentifier<?> dbReference = createDbReferenceIdentifier(columns[4]); if (dbReference == null) { Logger.getLogger(GpAssociationGoaUniprotFileParser.class).error( "Invalid Db reference value " + columns[4] + ". Skipping record " + Arrays.toString(columns)); return null; } String evidenceCode = columns[5]; String with = null; if (!columns[6].isEmpty()) { with = columns[6]; } String taxonomyIDStr = columns[7]; NcbiTaxonomyID extraTaxonID = null; if (!taxonomyIDStr.isEmpty()) { extraTaxonID = new NcbiTaxonomyID(taxonomyIDStr); } String date = columns[8]; String assignedBy = columns[9]; String annotationExtension = null; if (columns.length > 10 && !columns[10].isEmpty()) { annotationExtension = columns[10]; } String annotationProperties = null; if (columns.length > 11 && !columns[11].isEmpty()) { annotationProperties = columns[11]; } return new GpAssociationGoaUniprotFileData(database, databaseObjectID, qualifier, goID, dbReference, evidenceCode, with, extraTaxonID, date, assignedBy, annotationExtension, annotationProperties, byteOffset, lineNumber); } private static DataSourceIdentifier<?> createDbReferenceIdentifier(String dbReference) { String reactomePrefix = "Reactome:"; if (dbReference.startsWith("PMID")) { PubMedID id = new PubMedID(dbReference); if (id.getDataElement().intValue() <= 0) { return null; } return id; } else if (dbReference.startsWith("DOI")) { return new DOI(dbReference); } else if (dbReference.startsWith(reactomePrefix)) { return new ReactomeReactionID(dbReference.substring(reactomePrefix.length())); } else if (dbReference.startsWith("GO_REF")) { return new GoRefID(dbReference); } else if (dbReference.startsWith("GOA_REF")) { return new GoaRefID(dbReference); } logger.warn("Unhandled DB Reference ID type: " + dbReference); return null; } private static DataSourceIdentifier<?> createDatabaseObjectID(String database, String databaseObjectIDStr) { try { if (database.equals("IPI")) { return new IpiID(databaseObjectIDStr); } if (database.equals("UniProtKB")) { if (databaseObjectIDStr.contains("-")) { return new UniProtIsoformID(databaseObjectIDStr); } else if (databaseObjectIDStr.contains(":PRO_")) { // we are losing some information here (the protein chain // identifier) // TODO: this should be addressed again in the future. return new UniProtID(databaseObjectIDStr.substring(0, databaseObjectIDStr.indexOf(":"))); } return new UniProtID(databaseObjectIDStr); } if (database.equals("IntAct")) { return new IntActID(databaseObjectIDStr); } if (database.equals("RNAcentral")) { return new RnaCentralId(databaseObjectIDStr); } } catch (IllegalArgumentException e) { logger.warn(e.getMessage()); } logger.warn("Unable to handle database/id pairing -- database: " + database.toString() + " id: " + databaseObjectIDStr); return null; // throw new // IllegalArgumentException(String.format("Unable to handle database type: %s", // database)); } @Override protected NcbiTaxonomyID getLineTaxon(Line line) { if (line != null) { GpAssociationGoaUniprotFileData record = parseRecordFromLine(line); if (record != null) { DataSourceIdentifier<?> databaseObjectID = record.getDatabaseObjectID(); if (databaseObjectID instanceof UniProtID) { UniProtID uniprotId = (UniProtID) databaseObjectID; if (taxonSpecificIds != null && !taxonSpecificIds.isEmpty() && taxonSpecificIds.contains(uniprotId)) { /* * here we have matched the record uniprot id as one of * the ids of interest. We don't know exactly what taxon * it is however so we just return one (arbitrarily) of * the taxon ids of interest. this will ensure this * record is returned. */ return taxonsOfInterest.iterator().next(); } } else if (databaseObjectID instanceof UniProtIsoformID) { UniProtIsoformID isoformId = (UniProtIsoformID) databaseObjectID; String uniprotIdStr = StringUtil.removeSuffixRegex(isoformId.getDataElement(), "-\\d+"); if (taxonSpecificIds != null && !taxonSpecificIds.isEmpty() && taxonSpecificIds.contains(new UniProtID(uniprotIdStr))) { /* * here we have matched the record uniprot id as one of * the ids of interest. We don't know exactly what taxon * it is however so we just return one (arbitrarily) of * the taxon ids of interest. this will ensure this * record is returned. */ return taxonsOfInterest.iterator().next(); } } else if (databaseObjectID instanceof IntActID) { IntActID intactId = (IntActID) databaseObjectID; if (taxonSpecificIds != null && !taxonSpecificIds.isEmpty() && taxonSpecificIds.contains(intactId)) { /* * here we have matched the record intact id as one of * the ids of interest. We don't know exactly what taxon * it is however so we just return one (arbitrarily) of * the taxon ids of interest. this will ensure this * record is returned. */ return taxonsOfInterest.iterator().next(); } } else { logger.warn("Unhandled non-UniProt id in GO data while trying to create a species specific subset: " + databaseObjectID.getDataSource() + " -- " + databaseObjectID); } } } return new NcbiTaxonomyID(0); } }
datasource-fileparsers/src/main/java/edu/ucdenver/ccp/datasource/fileparsers/ebi/goa/GpAssociationGoaUniprotFileParser.java
package edu.ucdenver.ccp.datasource.fileparsers.ebi.goa; /* * #%L * Colorado Computational Pharmacology's common module * %% * Copyright (C) 2012 - 2015 Regents of the University of Colorado * %% * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. Neither the name of the Regents of the University of Colorado nor the names of its contributors * may be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. * #L% */ import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import java.util.zip.GZIPInputStream; import org.apache.log4j.Logger; import edu.ucdenver.ccp.common.download.FtpDownload; import edu.ucdenver.ccp.common.file.CharacterEncoding; import edu.ucdenver.ccp.common.file.FileReaderUtil; import edu.ucdenver.ccp.common.file.reader.Line; import edu.ucdenver.ccp.common.file.reader.StreamLineReader; import edu.ucdenver.ccp.common.ftp.FTPUtil.FileType; import edu.ucdenver.ccp.common.string.RegExPatterns; import edu.ucdenver.ccp.common.string.StringConstants; import edu.ucdenver.ccp.common.string.StringUtil; import edu.ucdenver.ccp.datasource.fileparsers.download.FtpHost; import edu.ucdenver.ccp.datasource.fileparsers.idlist.IdListFileFactory; import edu.ucdenver.ccp.datasource.fileparsers.taxonaware.TaxonAwareSingleLineFileRecordReader; import edu.ucdenver.ccp.datasource.identifiers.DataSource; import edu.ucdenver.ccp.datasource.identifiers.DataSourceIdentifier; import edu.ucdenver.ccp.datasource.identifiers.ebi.intact.IntActID; import edu.ucdenver.ccp.datasource.identifiers.ebi.ipi.IpiID; import edu.ucdenver.ccp.datasource.identifiers.ebi.uniprot.UniProtID; import edu.ucdenver.ccp.datasource.identifiers.ebi.uniprot.UniProtIsoformID; import edu.ucdenver.ccp.datasource.identifiers.ncbi.taxonomy.NcbiTaxonomyID; import edu.ucdenver.ccp.datasource.identifiers.obo.GeneOntologyID; import edu.ucdenver.ccp.datasource.identifiers.other.RnaCentralId; import edu.ucdenver.ccp.datasource.identifiers.reactome.ReactomeReactionID; import edu.ucdenver.ccp.identifier.publication.DOI; import edu.ucdenver.ccp.identifier.publication.PubMedID; /** * Parser for the gp_association.goa_uniprot file available here: * ftp://ftp.ebi.ac.uk/pub/databases/GO/goa/UNIPROT/ * * @author Bill Baumgartner * */ public class GpAssociationGoaUniprotFileParser extends TaxonAwareSingleLineFileRecordReader<GpAssociationGoaUniprotFileData> { private static final Logger logger = Logger.getLogger(GpAssociationGoaUniprotFileParser.class); private final Set<DataSourceIdentifier<?>> taxonSpecificIds; /* @formatter:off */ private static final String HEADER = "!gpa-version: 1.1\n"+ "!\n"+ "!This file contains all GO annotations for proteins in the UniProt KnowledgeBase (UniProtKB).\n"+ "!If a particular protein accession is not annotated with GO, then it will not appear in this file.\n"+ "!\n"+ "!Columns:\n"+ "!\n"+ "! name required? cardinality GAF column #\n"+ "! DB required 1 1\n"+ "! DB_Object_ID required 1 2 / 17\n"+ "! Qualifier required 1 or greater 4\n"+ "! GO ID required 1 5\n"+ "! DB:Reference(s) required 1 or greater 6\n"+ "! ECO evidence code required 1 7 (GO evidence code)\n"+ "! With optional 0 or greater 8\n"+ "! Interacting taxon ID optional 0 or 1 13\n"+ "! Date required 1 14\n"+ "! Assigned_by required 1 15\n"+ "! Annotation Extension optional 0 or greater 16\n"+ "! Annotation Properties optional 0 or 1 n/a\n"+ "!\n"; /* @formatter:on */ public static final String FTP_FILE_NAME = "gp_association.goa_uniprot.gz"; public static final CharacterEncoding ENCODING = CharacterEncoding.US_ASCII; @FtpDownload(server = FtpHost.GOA_HOST, path = FtpHost.GOA_PATH, filename = FTP_FILE_NAME, filetype = FileType.BINARY) private File goaDataFile; public static final String DELIMITER_REGEX = RegExPatterns.TAB; private static final int FILE_COLUMN_COUNT = 10; public static final String COMMENT_INDICATOR = null;// StringConstants.EXCLAMATION_MARK; // public GpAssociationGoaUniprotFileParser(File inputFile, // CharacterEncoding encoding) throws // IOException { // super(inputFile, encoding, COMMENT_INDICATOR, null); // taxonSpecificIds = null; // } // // public GpAssociationGoaUniprotFileParser(File workDirectory, boolean // clean) throws // IOException { // super(workDirectory, ENCODING, COMMENT_INDICATOR, null, null, clean, // null); // taxonSpecificIds = null; // } public GpAssociationGoaUniprotFileParser(File inputFile, CharacterEncoding encoding, File idListDirectory, Set<NcbiTaxonomyID> taxonIds) throws IOException { super(inputFile, encoding, COMMENT_INDICATOR, taxonIds); taxonSpecificIds = loadTaxonSpecificIds(idListDirectory, taxonIds); if (!isLineOfInterest(line)) { advanceToNextLineWithTaxonOfInterest(); } } private Set<DataSourceIdentifier<?>> loadTaxonSpecificIds(File idListDirectory, Set<NcbiTaxonomyID> taxonIds) throws IOException { Set<UniProtID> uniprotIdsForTaxon = IdListFileFactory.getIdListFromFile(idListDirectory, DataSource.UNIPROT, taxonIds, UniProtID.class); Set<IntActID> intactIdsForTaxon = IdListFileFactory.getIdListFromFile(idListDirectory, DataSource.IREFWEB, taxonIds, IntActID.class); Set<DataSourceIdentifier<?>> ids = new HashSet<DataSourceIdentifier<?>>(); if (uniprotIdsForTaxon != null) { ids.addAll(uniprotIdsForTaxon); } if (intactIdsForTaxon != null) { ids.addAll(intactIdsForTaxon); } if (ids.isEmpty()) { return null; } return ids; } public GpAssociationGoaUniprotFileParser(File workDirectory, boolean clean, File idListDirectory, Set<NcbiTaxonomyID> taxonIds) throws IOException { super(workDirectory, ENCODING, COMMENT_INDICATOR, null, null, clean, taxonIds); taxonSpecificIds = loadTaxonSpecificIds(idListDirectory, taxonIds); if (!isLineOfInterest(line)) { advanceToNextLineWithTaxonOfInterest(); } } @Override protected StreamLineReader initializeLineReaderFromDownload(CharacterEncoding encoding, String skipLinePrefix) throws IOException { return new StreamLineReader(new GZIPInputStream(new FileInputStream(goaDataFile)), encoding, skipLinePrefix); } @Override protected String getExpectedFileHeader() throws IOException { return HEADER; } @Override protected String getFileHeader() throws IOException { StringBuffer header = new StringBuffer(); Line line = null; while ((line = readLine()).getText().startsWith(StringConstants.EXCLAMATION_MARK)) { System.out.println("header line: " + line.getText()); header.append(line.getText() + "\n"); } // make sure we don't skip the first real data line setNextLine(line); // chop off time stamp int timestampIndex = header.lastIndexOf("!Generated:"); return header.substring(0, timestampIndex); } /** * overriding b/c super.initialize() calls readLine() which increments the * reader. We don't want to increment the reader b/c we have already found * the first real data line while validating the header. See call to * {@link #setNextLine(Line)} in {@link #getFileHeader()} above. */ @Override protected void initialize() throws IOException { String fileHeader = getFileHeader(); validateFileHeader(fileHeader); } @Override protected GpAssociationGoaUniprotFileData parseRecordFromLine(Line line) { return parseGpAssociationGoaUniprotFileDataFromLine(line); } public static GpAssociationGoaUniprotFileData parseGpAssociationGoaUniprotFileDataFromLine(Line line) { String[] columns = FileReaderUtil.getColumnsFromLine(line.getText(), DELIMITER_REGEX); if (columns.length < FILE_COLUMN_COUNT) { String message = String.format( "Unable to initialize a new GpAssociationGoaUniprotFileData object. Expected %d columns in the input " + "String[] but there were %d columns. Columns= %s LINE=%s", FILE_COLUMN_COUNT, columns.length, Arrays.toString(columns), line.getText()); Logger.getLogger(GpAssociationGoaUniprotFileParser.class).warn(message); return null; } return initializeNewGpAssociationGoaUniprotFileData(columns, line.getByteOffset(), line.getLineNumber()); } private static GpAssociationGoaUniprotFileData initializeNewGpAssociationGoaUniprotFileData(String[] columns, long byteOffset, long lineNumber) { String database = new String(columns[0]); String databaseObjectIDStr = columns[1]; DataSourceIdentifier<?> databaseObjectID = createDatabaseObjectID(database, databaseObjectIDStr); if (databaseObjectID == null) { logger.warn("Skipping record (" + lineNumber + ") due to null database ID: " + Arrays.toString(columns)); return null; } String qualifier = null; if (!columns[2].isEmpty()) { qualifier = columns[2]; } GeneOntologyID goID = new GeneOntologyID(columns[3]); DataSourceIdentifier<?> dbReference = createDbReferenceIdentifier(columns[4]); if (dbReference == null) { Logger.getLogger(GpAssociationGoaUniprotFileParser.class).error( "Invalid Db reference value " + columns[4] + ". Skipping record " + Arrays.toString(columns)); return null; } String evidenceCode = columns[5]; String with = null; if (!columns[6].isEmpty()) { with = columns[6]; } String taxonomyIDStr = columns[7]; NcbiTaxonomyID extraTaxonID = null; if (!taxonomyIDStr.isEmpty()) { extraTaxonID = new NcbiTaxonomyID(taxonomyIDStr); } String date = columns[8]; String assignedBy = columns[9]; String annotationExtension = null; if (columns.length > 10 && !columns[10].isEmpty()) { annotationExtension = columns[10]; } String annotationProperties = null; if (columns.length > 11 && !columns[11].isEmpty()) { annotationProperties = columns[11]; } return new GpAssociationGoaUniprotFileData(database, databaseObjectID, qualifier, goID, dbReference, evidenceCode, with, extraTaxonID, date, assignedBy, annotationExtension, annotationProperties, byteOffset, lineNumber); } private static DataSourceIdentifier<?> createDbReferenceIdentifier(String dbReference) { String reactomePrefix = "Reactome:"; if (dbReference.startsWith("PMID")) { PubMedID id = new PubMedID(dbReference); if (id.getDataElement().intValue() <= 0) { return null; } return id; } else if (dbReference.startsWith("DOI")) { return new DOI(dbReference); } else if (dbReference.startsWith(reactomePrefix)) { return new ReactomeReactionID(dbReference.substring(reactomePrefix.length())); } else if (dbReference.startsWith("GO_REF")) { return new GoRefID(dbReference); } else if (dbReference.startsWith("GOA_REF")) { return new GoaRefID(dbReference); } logger.warn("Unhandled DB Reference ID type: " + dbReference); return null; } private static DataSourceIdentifier<?> createDatabaseObjectID(String database, String databaseObjectIDStr) { try { if (database.equals("IPI")) { return new IpiID(databaseObjectIDStr); } if (database.equals("UniProtKB")) { if (databaseObjectIDStr.contains("-")) { return new UniProtIsoformID(databaseObjectIDStr); } else if (databaseObjectIDStr.contains(":PRO_")) { // we are losing some information here (the protein chain // identifier) // TODO: this should be addressed again in the future. return new UniProtID(databaseObjectIDStr.substring(0, databaseObjectIDStr.indexOf(":"))); } return new UniProtID(databaseObjectIDStr); } if (database.equals("IntAct")) { return new IntActID(databaseObjectIDStr); } if (database.equals("RNAcentral")) { return new RnaCentralId(databaseObjectIDStr); } } catch (IllegalArgumentException e) { logger.warn(e.getMessage()); } logger.warn("Unable to handle database/id pairing -- database: " + database.toString() + " id: " + databaseObjectIDStr); return null; // throw new // IllegalArgumentException(String.format("Unable to handle database type: %s", // database)); } @Override protected NcbiTaxonomyID getLineTaxon(Line line) { if (line != null) { GpAssociationGoaUniprotFileData record = parseRecordFromLine(line); if (record != null) { DataSourceIdentifier<?> databaseObjectID = record.getDatabaseObjectID(); if (databaseObjectID instanceof UniProtID) { UniProtID uniprotId = (UniProtID) databaseObjectID; if (taxonSpecificIds != null && !taxonSpecificIds.isEmpty() && taxonSpecificIds.contains(uniprotId)) { /* * here we have matched the record uniprot id as one of * the ids of interest. We don't know exactly what taxon * it is however so we just return one (arbitrarily) of * the taxon ids of interest. this will ensure this * record is returned. */ return taxonsOfInterest.iterator().next(); } } else if (databaseObjectID instanceof UniProtIsoformID) { UniProtIsoformID isoformId = (UniProtIsoformID) databaseObjectID; String uniprotIdStr = StringUtil.removeSuffixRegex(isoformId.getDataElement(), "-\\d+"); if (taxonSpecificIds != null && !taxonSpecificIds.isEmpty() && taxonSpecificIds.contains(new UniProtID(uniprotIdStr))) { /* * here we have matched the record uniprot id as one of * the ids of interest. We don't know exactly what taxon * it is however so we just return one (arbitrarily) of * the taxon ids of interest. this will ensure this * record is returned. */ return taxonsOfInterest.iterator().next(); } } else if (databaseObjectID instanceof IntActID) { IntActID intactId = (IntActID) databaseObjectID; if (taxonSpecificIds != null && !taxonSpecificIds.isEmpty() && taxonSpecificIds.contains(intactId)) { /* * here we have matched the record intact id as one of * the ids of interest. We don't know exactly what taxon * it is however so we just return one (arbitrarily) of * the taxon ids of interest. this will ensure this * record is returned. */ return taxonsOfInterest.iterator().next(); } } else { logger.warn("Unhandled non-UniProt id in GO data while trying to create a species specific subset: " + databaseObjectID.getDataSource() + " -- " + databaseObjectID); } } } return new NcbiTaxonomyID(0); } }
Deprecated the old GOA parser as UniProt has discontinued the file that it parsed All references to the downloaded file have been removed b/c it is no longer available.
datasource-fileparsers/src/main/java/edu/ucdenver/ccp/datasource/fileparsers/ebi/goa/GpAssociationGoaUniprotFileParser.java
Deprecated the old GOA parser as UniProt has discontinued the file that it parsed
<ide><path>atasource-fileparsers/src/main/java/edu/ucdenver/ccp/datasource/fileparsers/ebi/goa/GpAssociationGoaUniprotFileParser.java <ide> */ <ide> <ide> import java.io.File; <del>import java.io.FileInputStream; <ide> import java.io.IOException; <ide> import java.util.Arrays; <ide> import java.util.HashSet; <ide> import java.util.Set; <del>import java.util.zip.GZIPInputStream; <ide> <ide> import org.apache.log4j.Logger; <ide> <del>import edu.ucdenver.ccp.common.download.FtpDownload; <ide> import edu.ucdenver.ccp.common.file.CharacterEncoding; <ide> import edu.ucdenver.ccp.common.file.FileReaderUtil; <ide> import edu.ucdenver.ccp.common.file.reader.Line; <del>import edu.ucdenver.ccp.common.file.reader.StreamLineReader; <del>import edu.ucdenver.ccp.common.ftp.FTPUtil.FileType; <ide> import edu.ucdenver.ccp.common.string.RegExPatterns; <ide> import edu.ucdenver.ccp.common.string.StringConstants; <ide> import edu.ucdenver.ccp.common.string.StringUtil; <del>import edu.ucdenver.ccp.datasource.fileparsers.download.FtpHost; <add>import edu.ucdenver.ccp.datasource.fileparsers.ebi.goa.gaf.GoaGaf2FileRecordReader; <ide> import edu.ucdenver.ccp.datasource.fileparsers.idlist.IdListFileFactory; <ide> import edu.ucdenver.ccp.datasource.fileparsers.taxonaware.TaxonAwareSingleLineFileRecordReader; <ide> import edu.ucdenver.ccp.datasource.identifiers.DataSource; <ide> import edu.ucdenver.ccp.identifier.publication.PubMedID; <ide> <ide> /** <del> * Parser for the gp_association.goa_uniprot file available here: <del> * ftp://ftp.ebi.ac.uk/pub/databases/GO/goa/UNIPROT/ <add> * NOTE: This class has been deprecated as the file format that it parses has been <add> * discontinued by UniProt and is no longer made available on their FTP site. <add> * Please use {@link GoaGaf2FileRecordReader} as a replacement. <ide> * <ide> * @author Bill Baumgartner <ide> * <ide> */ <add>@Deprecated <ide> public class GpAssociationGoaUniprotFileParser extends <ide> TaxonAwareSingleLineFileRecordReader<GpAssociationGoaUniprotFileData> { <ide> <ide> <ide> /* @formatter:on */ <ide> <del> public static final String FTP_FILE_NAME = "gp_association.goa_uniprot.gz"; <ide> public static final CharacterEncoding ENCODING = CharacterEncoding.US_ASCII; <del> @FtpDownload(server = FtpHost.GOA_HOST, path = FtpHost.GOA_PATH, filename = FTP_FILE_NAME, filetype = FileType.BINARY) <del> private File goaDataFile; <ide> <ide> public static final String DELIMITER_REGEX = RegExPatterns.TAB; <ide> private static final int FILE_COLUMN_COUNT = 10; <ide> return null; <ide> } <ide> return ids; <del> } <del> <del> public GpAssociationGoaUniprotFileParser(File workDirectory, boolean clean, File idListDirectory, <del> Set<NcbiTaxonomyID> taxonIds) throws IOException { <del> super(workDirectory, ENCODING, COMMENT_INDICATOR, null, null, clean, taxonIds); <del> taxonSpecificIds = loadTaxonSpecificIds(idListDirectory, taxonIds); <del> if (!isLineOfInterest(line)) { <del> advanceToNextLineWithTaxonOfInterest(); <del> } <del> } <del> <del> @Override <del> protected StreamLineReader initializeLineReaderFromDownload(CharacterEncoding encoding, String skipLinePrefix) <del> throws IOException { <del> return new StreamLineReader(new GZIPInputStream(new FileInputStream(goaDataFile)), encoding, skipLinePrefix); <ide> } <ide> <ide> @Override
Java
agpl-3.0
b6b1a122c5b404583583479ec087d38b77d07316
0
VladRodionov/bigbase,VladRodionov/bigbase,VladRodionov/bigbase
/******************************************************************************* * Copyright (c) 2013 Vladimir Rodionov. All Rights Reserved * * This code is released under the GNU Affero General Public License. * * See: http://www.fsf.org/licensing/licenses/agpl-3.0.html * * VLADIMIR RODIONOV MAKES NO REPRESENTATIONS OR WARRANTIES ABOUT THE SUITABILITY * OF THE SOFTWARE, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE * IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR * NON-INFRINGEMENT. Vladimir Rodionov SHALL NOT BE LIABLE FOR ANY DAMAGES SUFFERED * BY LICENSEE AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR * ITS DERIVATIVES. * * Author: Vladimir Rodionov * *******************************************************************************/ package com.koda.integ.hbase.blockcache; import java.io.File; import java.io.IOException; import java.lang.management.ManagementFactory; import java.lang.management.MemoryUsage; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.hfile.BlockCache; import org.apache.hadoop.hbase.io.hfile.BlockCacheColumnFamilySummary; import org.apache.hadoop.hbase.io.hfile.BlockCacheKey; import org.apache.hadoop.hbase.io.hfile.BlockType; import org.apache.hadoop.hbase.io.hfile.CacheStats; import org.apache.hadoop.hbase.io.hfile.Cacheable; import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer; import org.apache.hadoop.util.StringUtils; import com.koda.KodaException; import com.koda.cache.CacheManager; import com.koda.cache.CacheScanner; import com.koda.cache.OffHeapCache; import com.koda.cache.eviction.EvictionListener; import com.koda.cache.eviction.EvictionPolicy; import com.koda.compression.Codec; import com.koda.compression.CodecType; import com.koda.config.CacheConfiguration; import com.koda.integ.hbase.storage.ExtStorage; import com.koda.integ.hbase.storage.ExtStorageManager; import com.koda.integ.hbase.storage.StorageHandle; import com.koda.integ.hbase.util.CacheableSerializer; import com.koda.integ.hbase.util.StorageHandleSerializer; import com.koda.integ.hbase.util.Utils; import com.koda.io.serde.SerDe; import com.koda.persistence.PersistenceMode; import com.koda.persistence.rawfs.RawFSConfiguration; import com.koda.persistence.rawfs.RawFSStore; // TODO: Auto-generated Javadoc /** * An off-heap block cache implementation that is memory-aware, * memory-bound using an LRU eviction algorithm, and concurrent: backed by a. * * {@link OffHeapCache} and with a non-blocking fast eviction giving * constant-time {@link #cacheBlock} and {@link #getBlock} operations.<p> * * Contains three levels of block priority to allow for * scan-resistance and in-memory families. A block is added with an inMemory * flag if necessary, otherwise a block becomes a single access priority. Once * a blocked is accessed again, it changes to multiple access. This is used * to prevent scans from thrashing the cache, adding a least-frequently-used * element to the eviction algorithm.<p> * * Each priority is given its own chunk of the total cache to ensure * fairness during eviction. Each priority will retain close to its maximum * size, however, if any priority is not using its entire chunk the others * are able to grow beyond their chunk size.<p> * * Instantiated at a minimum with the total size and average block size. * All sizes are in bytes. The block size is not especially important as this * cache is fully dynamic in its sizing of blocks. It is only used for * pre-allocating data structures.<p> * * The detailed constructor defines the sizes for the three priorities (they * should total to the maximum size defined). It also sets the levels that * trigger and control the eviction thread.<p> * * The acceptable size is the cache size level which triggers the eviction * process to start. It evicts enough blocks to get the size below the * minimum size specified.<p> * * * TODO: * * 1. Block data encoding support (see fb-89 L2) ??? * * 2. Implement: * * Each priority is given its own chunk of the total cache to ensure * fairness during eviction. Each priority will retain close to its maximum * size, however, if any priority is not using its entire chunk the others * are able to grow beyond their chunk size. * * * Notes on Cassandra and SSD. Cassandra (1.1+) has so -called flexible data placement * (mixed storage support) feature, * which allows to place particular CFs into separate mounts (SSD) * http://www.datastax.com/dev/blog/whats-new-in-cassandra-1-1-flexible-data-file-placement * * This is not so efficient as a true (SSD - backed) block cache in HBase * * Some additional links (to use a reference): * http://readwrite.com/2012/04/27/cassandra-11-brings-cache-tuning-mixed-storage-support#awesm=~ofb1zhDSfBqb90 * * * 3. Flexible memory limits. Currently, we have 4 (young, tenured, permanent and external) caches and each cache * relies on its limits to activate eviction. This may result in sub-par usage of an available memory. * + 4. 16 bytes hashed values for 'external' cache keys (MD5 or what?). * + 5. Compression for external storage. * 6. External storage to keep both keys and blocks * Format: * 0..3 total record size * 4..7 key size * 8..11 value size * 12 .. (key size + 12) key data * x .. value data * * * TODO: It seems not all cached data are HFileBlock? */ public class OffHeapBlockCache implements BlockCache, HeapSize { /** The Constant YOUNG_GEN_FACTOR. */ public final static String BLOCK_CACHE_YOUNG_GEN_FACTOR = "offheap.blockcache.young.gen.factor"; /** The Constant BLOCK_CACHE_MEMORY_SIZE. */ public final static String BLOCK_CACHE_MEMORY_SIZE = "offheap.blockcache.size"; public final static String HEAP_BLOCK_CACHE_MEMORY_RATIO = "offheap.blockcache.onheap.ratio"; /** The Constant BLOCK_CACHE_IMPL. */ public final static String BLOCK_CACHE_IMPL = "offheap.blockcache.impl"; /** The Constant EXT_STORAGE_FACTOR. */ public final static String BLOCK_CACHE_EXT_STORAGE_MEMORY_SIZE = "offheap.blockcache.storage.ref.size"; /** The Constant _COMPRESSION. */ public final static String BLOCK_CACHE_COMPRESSION = "offheap.blockcache.compression"; /** The Constant OVERFLOW_TO_EXT_STORAGE_ENABLED. */ public final static String BLOCK_CACHE_OVERFLOW_TO_EXT_STORAGE_ENABLED = "offheap.blockcache.storage.enabled"; /** The Constant EXT_STORAGE_IMPL. */ public final static String BLOCK_CACHE_EXT_STORAGE_IMPL = "offheap.blockcache.storage.impl"; /** The Constant BLOCK_CACHE_ONHEAP_ENABLED. */ public final static String BLOCK_CACHE_ONHEAP_ENABLED = "offheap.blockcache.onheap.enabled"; public final static String BLOCK_CACHE_TEST_MODE = "offheap.blockcache.test.mode"; public final static String BLOCK_CACHE_PERSISTENT = "offheap.blockcache.persistent"; public final static String BLOCK_CACHE_SNAPSHOTS ="offheap.blockcache.snapshots.enabled"; public final static String BLOCK_CACHE_SNAPSHOT_INTERVAL ="offheap.blockcache.snapshots.interval"; public final static String BLOCK_CACHE_DATA_ROOTS = "offheap.blockcache.storage.dir"; /** Default is LRU2Q, possible values: LRU, LFU, RANDOM, FIFO */ public final static String BLOCK_CACHE_EVICTION = "offheap.blockcache.eviction"; public final static String BLOCK_CACHE_BUFFER_SIZE = "offheap.blockcache.nativebuffer.size"; public final static int DEFAULT_BLOCK_CACH_BUFFER_SIZE = 1024*1024; // 1 MB /** The Constant LOG. */ static final Log LOG = LogFactory.getLog(OffHeapBlockCache.class); /** Default Configuration Parameters. */ static final int EXT_STORAGE_REF_SIZE = 50; /** Young generation. */ static final float DEFAULT_YOUNG_FACTOR = 0.5f; /** 10 % of JVM heap size is dedicated to on heap cache */ static final float DEFAULT_HEAP_BLOCK_CACHE_MEMORY_RATIO = 0.1f; /** Statistics thread. */ static final int statThreadPeriod = 60000; /** Main (off - heap) cache. All blocks go to this cache first*/ private OffHeapCache offHeapCache; /** External storage handle cache. */ private OffHeapCache extStorageCache; /** Cache statistics - combined */ private final CacheStats stats; /** On-heap cache stats */ private final CacheStats onHeapStats; /** Off-heap cache stats */ private final CacheStats offHeapStats; /** External cache stats -L3 */ private final CacheStats extStats; /** External references cache stats in RAM*/ private final CacheStats extRefStats; /** Maximum allowable size of cache (block put if size > max, evict). */ private long blockCacheMaxSize; /** Maximum allowable size of external storage cache. */ private long extCacheMaxSize; /** Approximate block size. */ private long blockSize; /** Direct buffer block size */ private int nativeBufferSize = DEFAULT_BLOCK_CACH_BUFFER_SIZE; /** Single access bucket size. */ private float youngGenFactor; /** * Data overflow to external storage. */ private boolean overflowExtEnabled = false; /** * Save Ref cache on shutdown */ private boolean isPersistent = false; @SuppressWarnings("unused") private boolean isSnapshotsEnabled = false; @SuppressWarnings("unused") private long snapshotsInterval = 0; /** external storage (file or network - based). */ private ExtStorage storage; /** The stat thread. */ private StatisticsThread statThread; /** The deserializer. */ private AtomicReference<CacheableDeserializer<Cacheable>> deserializer = new AtomicReference<CacheableDeserializer<Cacheable>>(); /** Fast on-heap cache to store NON-DATA blocks (INDEX, BLOOM etc). */ private OnHeapBlockCache onHeapCache; private boolean testMode = false; private AtomicLong fatalExternalReads = new AtomicLong(0); /** * Instantiates a new off heap block cache. * * @param conf the conf */ public OffHeapBlockCache(Configuration conf) { this.blockSize = conf.getInt("hbase.offheapcache.minblocksize", HColumnDescriptor.DEFAULT_BLOCKSIZE); blockCacheMaxSize = conf.getLong(BLOCK_CACHE_MEMORY_SIZE, 0L); if(blockCacheMaxSize == 0L){ throw new RuntimeException("off heap block cache size is not defined"); } nativeBufferSize = conf.getInt(BLOCK_CACHE_BUFFER_SIZE, DEFAULT_BLOCK_CACH_BUFFER_SIZE); extCacheMaxSize = conf.getLong(BLOCK_CACHE_EXT_STORAGE_MEMORY_SIZE, (long) (0.1 * blockCacheMaxSize)); youngGenFactor = conf.getFloat(BLOCK_CACHE_YOUNG_GEN_FACTOR, DEFAULT_YOUNG_FACTOR); overflowExtEnabled = conf.getBoolean(BLOCK_CACHE_OVERFLOW_TO_EXT_STORAGE_ENABLED, false); testMode = conf.getBoolean(BLOCK_CACHE_TEST_MODE, false); isPersistent = conf.getBoolean(BLOCK_CACHE_PERSISTENT, false); isSnapshotsEnabled = conf.getBoolean(BLOCK_CACHE_SNAPSHOTS, false); snapshotsInterval = conf.getInt(BLOCK_CACHE_SNAPSHOT_INTERVAL, 600) * 1000; String[] dataRoots = getDataRoots(conf.get(BLOCK_CACHE_DATA_ROOTS)); if(isPersistent && dataRoots == null){ dataRoots = getHDFSRoots(conf); if(dataRoots == null){ LOG.warn("Data roots are not defined. Set persistent mode to false."); isPersistent = false; } } adjustMaxMemory(); /** Possible values: none, snappy, gzip, lz4, lz4hc */ // TODO: LZ4 is not supported on all platforms // TODO: default compression is LZ4? CodecType codec = CodecType.LZ4; String value = conf.get(BLOCK_CACHE_COMPRESSION); if(value != null){ codec = CodecType.valueOf(value.toUpperCase()); } try { //TODO - Verify we have deep enough copy CacheConfiguration cacheCfg = new CacheConfiguration(); cacheCfg.setCacheName("block-cache"); cacheCfg.setSerDeBufferSize(nativeBufferSize); cacheCfg.setMaxMemory(blockCacheMaxSize); cacheCfg.setCodecType(codec); String evictionPolicy =conf.get(BLOCK_CACHE_EVICTION, "LRU").toUpperCase(); cacheCfg.setEvictionPolicy(evictionPolicy); // Set this only for LRU2Q cacheCfg.setLRU2QInsertPoint(youngGenFactor); setBucketNumber(cacheCfg); CacheManager manager = CacheManager.getInstance(); if(overflowExtEnabled == true){ LOG.info("Overflow to external storage is enabled."); // External storage handle cache CacheConfiguration extStorageCfg = new CacheConfiguration(); extStorageCfg.setCacheName("extStorageCache"); extStorageCfg.setMaxMemory(extCacheMaxSize); extStorageCfg.setCodecType(codec); extStorageCfg.setEvictionPolicy(EvictionPolicy.LRU.toString()); extStorageCfg.setSerDeBufferSize(4096);// small // calculate bucket number // 50 is estimate of a record size int buckets = (extCacheMaxSize / EXT_STORAGE_REF_SIZE) > Integer.MAX_VALUE? Integer.MAX_VALUE -1: (int) (extCacheMaxSize / EXT_STORAGE_REF_SIZE); extStorageCfg.setBucketNumber(buckets); if(isPersistent){ // TODO - this in memory cache has same data dirs as a major cache. RawFSConfiguration storeConfig = new RawFSConfiguration(); storeConfig.setStoreName(extStorageCfg.getCacheName()); storeConfig.setDiskStoreImplementation(RawFSStore.class); storeConfig.setDbDataStoreRoots(dataRoots); storeConfig.setPersistenceMode(PersistenceMode.ONDEMAND); storeConfig.setDbCompressionType(CodecType.LZ4); storeConfig.setDbSnapshotInterval(15); //storeConfig.setTotalWorkerThreads(Runtime.getRuntime().availableProcessors() /2); //storeConfig.setTotalIOThreads(1); extStorageCfg.setDataStoreConfiguration(storeConfig); } // This will initiate the load of stored cache data // if persistance is enabled extStorageCache = manager.getCache(extStorageCfg, null); // Initialize external storage storage = ExtStorageManager.getInstance().getStorage(conf, extStorageCache); } else{ LOG.info("Overflow to external storage is disabled."); if(isPersistent){ RawFSConfiguration storeConfig = new RawFSConfiguration(); storeConfig.setStoreName(cacheCfg.getCacheName()); storeConfig.setDiskStoreImplementation(RawFSStore.class); storeConfig.setDbDataStoreRoots(dataRoots); storeConfig.setPersistenceMode(PersistenceMode.ONDEMAND); storeConfig.setDbSnapshotInterval(15); cacheCfg.setDataStoreConfiguration(storeConfig); // Load cache data offHeapCache = manager.getCache(cacheCfg, null); } } if(offHeapCache == null){ offHeapCache = manager.getCache(cacheCfg, null); } } catch (Exception e) { throw new RuntimeException(e); } boolean onHeapEnabled = conf.getBoolean(BLOCK_CACHE_ONHEAP_ENABLED, true); if(onHeapEnabled){ long onHeapCacheSize = calculateOnHeapCacheSize(conf); if(onHeapCacheSize > 0){ onHeapCache = new OnHeapBlockCache(onHeapCacheSize, blockSize, conf); LOG.info("Created fast on-heap cache. Size="+onHeapCacheSize); } else{ LOG.warn("Conflicting configuration options. On-heap cache is disabled."); } } this.stats = new CacheStats(); this.onHeapStats = new CacheStats(); this.offHeapStats = new CacheStats(); this.extStats = new CacheStats(); this.extRefStats = new CacheStats(); EvictionListener listener = new EvictionListener(){ @Override public void evicted(long ptr, Reason reason, long nanoTime) { stats.evict(); stats.evicted(); } }; offHeapCache.setEvictionListener(listener); // Cacheable serializer registration CacheableSerializer serde = new CacheableSerializer(); offHeapCache.getSerDe().registerSerializer(serde); if( extStorageCache != null){ StorageHandleSerializer serde2 = new StorageHandleSerializer(); extStorageCache.getSerDe().registerSerializer(serde2); } // Start statistics thread statThread = new StatisticsThread(this); statThread.start(); } private String[] getHDFSRoots(Configuration conf) { // Use default dfs data directories String str = conf.get("dfs.data.dir"); if(str == null) return null; String[] dirs = str.split(","); for(int i=0 ; i < dirs.length; i++){ dirs[i] = dirs[i].trim() + File.separator + "blockcache"; } return dirs; } private String[] getDataRoots(String roots) { if (roots == null) return null; String[] rts = roots.split(","); String[] retValue = new String[rts.length]; for( int i=0; i < retValue.length; i++){ retValue[i] = rts[i].trim(); } return retValue; } private final boolean isTestMode() { return testMode; } /** * Calculate on heap cache size. * * @param conf the conf * @return the long */ private long calculateOnHeapCacheSize(Configuration conf) { float cachePercentage = conf.getFloat( HEAP_BLOCK_CACHE_MEMORY_RATIO, DEFAULT_HEAP_BLOCK_CACHE_MEMORY_RATIO); if (cachePercentage == 0L) { // block cache disabled on heap return 0L; } if (cachePercentage > 1.0) { throw new IllegalArgumentException(HEAP_BLOCK_CACHE_MEMORY_RATIO + " must be between 0.0 and 1.0, and not > 1.0"); } // Calculate the amount of heap to give the heap. MemoryUsage mu = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage(); long cacheSize = (long) (mu.getMax() * cachePercentage); return cacheSize; } /** * Adjust max memory. */ private void adjustMaxMemory() { if(overflowExtEnabled == true && extCacheMaxSize == 0){ blockCacheMaxSize = (long) (0.95 * blockCacheMaxSize); // By default we set back 5% to external ref cache extCacheMaxSize = (long) (0.05 * blockCacheMaxSize); } LOG.info("Block cache max size ="+blockCacheMaxSize+" external cache support (in RAM)="+extCacheMaxSize); } /** * Sets the bucket number. * * @param cfg the new bucket number */ private void setBucketNumber(CacheConfiguration cfg) { long memSize = cfg.getMaxMemory(); float compFactor = getAvgCompression(cfg.getCodecType()); long bSize= (long) (blockSize / compFactor); int bucketNumber = (int) (memSize / bSize); cfg.setBucketNumber(bucketNumber); } /** * Gets the avg compression. * * @param codec the codec * @return the avg compression */ private float getAvgCompression(CodecType codec) { switch(codec) { case NONE: return 1.0f; case SNAPPY: return 2.0f; case LZ4: return 2.0f; case LZ4HC: return 3.0f; case DEFLATE : return 4.0f; default: return 1.0f; } } // BlockCache implementation /** * Get the maximum size of this cache. It returns only max size of a data cache * @return max size in bytes */ public long getMaxSize() { return this.blockCacheMaxSize + (onHeapEnabled()? onHeapCache.getMaxSize(): 0); } /* * TODO: run stats thread * Statistics thread. Periodically prints the cache statistics to the log. */ /** * The Class StatisticsThread. */ static class StatisticsThread extends Thread { /** The cache. */ OffHeapBlockCache cache; /** * Instantiates a new statistics thread. * * @param cache the cache */ public StatisticsThread(OffHeapBlockCache cache) { super("BigBaseBlockCache.StatisticsThread"); setDaemon(true); this.cache = cache; } /* (non-Javadoc) * @see java.lang.Thread#run() */ @Override public void run() { LOG.info(Thread.currentThread().getName()+" started."); while(true){ try { Thread.sleep(statThreadPeriod); } catch (InterruptedException e) {} cache.logStats(); cache.logStatsOffHeap(); cache.logStatsOnHeap(); cache.logStatsExternal(); cache.logStatsOffHeapExt(); } } } /** * Log stats. */ protected void logStats() { // Log size long totalSize = getCurrentSize(); long freeSize = getMaxSize() - totalSize; OffHeapBlockCache.LOG.info("Block cache stats: " + "total=" + StringUtils.byteDesc(totalSize) + ", " + "free=" + StringUtils.byteDesc(freeSize) + ", " + "max=" + StringUtils.byteDesc(getMaxSize()) + ", " + "blocks=" + size() +", " + "accesses=" + stats.getRequestCount() + ", " + "hits=" + stats.getHitCount() + ", " + "hitRatio=" + (stats.getRequestCount()>0 ? StringUtils.formatPercent(stats.getHitRatio(), 2): "0.00") + "%, "+ "cachingAccesses=" + stats.getRequestCachingCount() + ", " + "cachingHits=" + stats.getHitCachingCount() + ", " + "cachingHitsRatio=" + (stats.getRequestCachingCount() > 0 ?StringUtils.formatPercent(stats.getHitCachingRatio(), 2): "0.00") + "%, " + "evicted=" + getEvictedCount() ); } protected void logStatsOffHeap() { // Log size long totalSize = offHeapCache.getTotalAllocatedMemorySize(); long maxSize = offHeapCache.getMemoryLimit(); long freeSize = maxSize - totalSize; OffHeapBlockCache.LOG.info("[L2-BLOCK-CACHE-RAM(OFFHEAP)] : " + "total=" + StringUtils.byteDesc(totalSize) + ", " + "free=" + StringUtils.byteDesc(freeSize) + ", " + "max=" + StringUtils.byteDesc(maxSize) + ", " + "blocks=" + offHeapCache.size() +", " + "accesses=" + offHeapStats.getRequestCount() + ", " + "hits=" + offHeapStats.getHitCount() + ", " + "hitRatio=" + (offHeapStats.getRequestCount()>0 ? StringUtils.formatPercent(offHeapStats.getHitRatio(), 2): "0.00") + "%, "+ "cachingAccesses=" + offHeapStats.getRequestCachingCount() + ", " + "cachingHits=" + offHeapStats.getHitCachingCount() + ", " + "cachingHitsRatio=" + (offHeapStats.getRequestCachingCount() > 0 ?StringUtils.formatPercent(offHeapStats.getHitCachingRatio(), 2): "0.00") + "%, " + "evicted=" + offHeapCache.getEvictedCount() ); } protected void logStatsOffHeapExt() { // Log size long totalSize = extStorageCache.getTotalAllocatedMemorySize(); long maxSize = extStorageCache.getMemoryLimit(); long freeSize = maxSize - totalSize; OffHeapBlockCache.LOG.info("[L3-BLOCK-CACHE-RAM] : " + "total=" + StringUtils.byteDesc(totalSize) + ", " + "free=" + StringUtils.byteDesc(freeSize) + ", " + "max=" + StringUtils.byteDesc(maxSize) + ", " + "refs=" + extStorageCache.size() +", " + "accesses=" + extRefStats.getRequestCount() + ", " + "hits=" + extRefStats.getHitCount() + ", " + "hitRatio=" + (extRefStats.getRequestCount()>0 ? StringUtils.formatPercent(extRefStats.getHitRatio(), 2): "0.00") + "%, "+ "cachingAccesses=" + extRefStats.getRequestCachingCount() + ", " + "cachingHits=" + extRefStats.getHitCachingCount() + ", " + "cachingHitsRatio=" + (extRefStats.getRequestCachingCount() > 0 ?StringUtils.formatPercent(offHeapStats.getHitCachingRatio(), 2): "0.00") + "%, " + "evicted=" + extRefStats.getEvictedCount() ); } protected void logStatsOnHeap() { if(onHeapEnabled() == false) return; // Log size long totalSize = onHeapCache.getCurrentSize(); long maxSize = onHeapCache.getMaxSize(); long freeSize = maxSize - totalSize; OnHeapBlockCache.LOG.info("[L2-BLOCK-CACHE-RAM(HEAP)] : " + "total=" + StringUtils.byteDesc(totalSize) + ", " + "free=" + StringUtils.byteDesc(freeSize) + ", " + "max=" + StringUtils.byteDesc(maxSize) + ", " + "blocks=" + onHeapCache.size() +", " + "accesses=" + onHeapStats.getRequestCount() + ", " + "hits=" + onHeapStats.getHitCount() + ", " + "hitRatio=" + (onHeapStats.getRequestCount()>0 ? StringUtils.formatPercent(onHeapStats.getHitRatio(), 2): "0.00") + "%, "+ "cachingAccesses=" + onHeapStats.getRequestCachingCount() + ", " + "cachingHits=" + onHeapStats.getHitCachingCount() + ", " + "cachingHitsRatio=" + (onHeapStats.getRequestCachingCount() > 0 ?StringUtils.formatPercent(onHeapStats.getHitCachingRatio(), 2): "0.00") + "%, " + "evicted=" + onHeapCache.getEvictedCount() ); } protected void logStatsExternal() { if( storage == null) return; // Log size long totalSize = storage.size(); long maxSize = storage.getMaxStorageSize() ; long freeSize = maxSize - totalSize; OnHeapBlockCache.LOG.info("[L3-BLOCK-CACHE-DISK] : " + "total=" + StringUtils.byteDesc(totalSize) + ", " + "free=" + StringUtils.byteDesc(freeSize) + ", " + "max=" + StringUtils.byteDesc(maxSize) + ", " + "accesses=" + extStats.getRequestCount() + ", " + "hits=" + extStats.getHitCount() + ", " + "hitRatio=" + (extStats.getRequestCount()>0 ? StringUtils.formatPercent(extStats.getHitRatio(), 2): "0.00") + "%, "+ "cachingAccesses=" + extStats.getRequestCachingCount() + ", " + "cachingHits=" + extStats.getHitCachingCount() + ", " + "cachingHitsRatio=" + (extStats.getRequestCachingCount() > 0 ?StringUtils.formatPercent(extStats.getHitCachingRatio(), 2): "0.00") + "%, "); // "\nFATAL READS="+fatalExternalReads.get()); } /** * HeapSize implementation - returns zero if auxCache is disabled. * * @return the long */ /* (non-Javadoc) * @see org.apache.hadoop.hbase.io.HeapSize#heapSize() */ public long heapSize() { return onHeapCache == null? 0: onHeapCache.heapSize(); } /** * Add block to cache. * @param cacheKey The block's cache key. * @param buf The block contents wrapped in a ByteBuffer. * @param inMemory Whether block should be treated as in-memory */ public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf, boolean inMemory){ boolean contains = false; try { String blockName = cacheKey.toString(); contains = offHeapCache.contains(blockName); if ( contains) { // TODO - what does it mean? Can we ignore this? throw new RuntimeException("Cached an already cached block: "+blockName); } // Always cache block to off-heap cache first offHeapCache.put(blockName, buf); if(buf.getBlockType() != BlockType.DATA && onHeapEnabled()){ // Cache on-heap only non-data blocks onHeapCache.cacheBlock(cacheKey, buf, inMemory); } // TODO: Remove test mode if( isTestMode() && isExternalStorageEnabled()){ // FIXME This code disables storage in non-test mode??? byte[] hashed = Utils.hash128(blockName); if( extStorageCache.contains(hashed) == false){ // Store external if we found object in a block cache and not in ext cache // ONLY IN TEST MODE storeExternalWithCodec(blockName, buf, false); } } } catch (Exception e) { LOG.error(e); throw new RuntimeException(e); } } /** * On heap enabled. * * @return true, if successful */ private final boolean onHeapEnabled() { return onHeapCache != null; } /** * Store external with codec. * Format: * 0..3 - total record size (-4) * 4..7 - size of a key in bytes (16 if use hash128) * 8 .. x - key data * x+1 ..x+1- IN_MEMORY flag ( 1- in memory, 0 - not) * x+2 ... block, serialized and compressed * * @param blockName the block name * @param buf the buf * @param inMemory the in memory * @throws IOException Signals that an I/O exception has occurred. */ private void storeExternalWithCodec(String blockName, Cacheable buf, boolean inMemory) throws IOException{ // If external storage is disable - bail out if (overflowExtEnabled == false){ return; } byte[] hashed = Utils.hash128(blockName); ByteBuffer buffer = extStorageCache.getLocalBufferWithAddress() .getBuffer(); deserializer.set(buf.getDeserializer()); SerDe serde = extStorageCache.getSerDe(); Codec codec = extStorageCache.getCompressionCodec(); buffer.clear(); buffer.position(4); // Save key buffer.putInt(hashed.length); buffer.put(hashed); buffer.put(inMemory ? (byte) 1 : (byte) 0); if (buf != null) { serde.writeCompressed(buffer, buf, codec); int pos = buffer.position(); buffer.putInt(0, pos - 4); } else { buffer.putInt(0, 0); } buffer.flip(); StorageHandle handle = storage.storeData(buffer); try { // WE USE byte array as a key extStorageCache.put(hashed, handle.toBytes()); } catch (Exception e) { throw new IOException(e); } } /** * Gets the external storage cache. * * @return the ext storage cache */ public OffHeapCache getExtStorageCache() { return extStorageCache; } /** * Read external with codec. * * @param blockName the block name * @return the cacheable * @throws IOException Signals that an I/O exception has occurred. */ @SuppressWarnings("unused") private Cacheable readExternalWithCodec(String blockName, boolean repeat, boolean caching) throws IOException { if(overflowExtEnabled == false) return null; // Check if we have already this block in external storage cache try { // We use 16 - byte hash for external storage cache byte[] hashed = Utils.hash128(blockName); StorageHandle handle = storage.newStorageHandle(); byte[] data = (byte[])extStorageCache.get(hashed); if( data == null ) { if(repeat == false) extRefStats.miss(caching); return null; } else{ extRefStats.hit(caching); } handle.fromBytes(data); ByteBuffer buffer = extStorageCache.getLocalBufferWithAddress().getBuffer(); SerDe serde = extStorageCache.getSerDe(); Codec codec = extStorageCache.getCompressionCodec(); buffer.clear(); StorageHandle newHandle = storage.getData(handle, buffer); if(buffer.position() > 0) buffer.flip(); int size = buffer.getInt(); if(size == 0) { // BIGBASE-45 // Remove reference from reference cache extStorageCache.remove(hashed); return null; } // Skip key int keySize = buffer.getInt(); buffer.position(8 + keySize); boolean inMemory = buffer.get() == (byte) 1; buffer.limit(size + 4); Cacheable obj = (Cacheable) serde.readCompressed(buffer/*, codec*/); offHeapCache.put(blockName, obj); if( newHandle.equals(handle) == false){ extStorageCache.put(hashed, newHandle); } return obj; } catch (Throwable e) { /*DEBUG*/LOG.error("[readExternalWithCodec]" + blockName); fatalExternalReads.incrementAndGet(); /*DEBUG*/LOG.error("REQUESTS="+extStats.getRequestCount()+" HITS="+extStats.getHitCount()); throw new IOException(e); } } /** * Add block to cache (defaults to not in-memory). * @param cacheKey The block's cache key. * @param buf The object to cache. */ public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf){ cacheBlock(cacheKey, buf, false); } /** * Fetch block from cache. * * @param cacheKey Block to fetch. * @param caching Whether this request has caching enabled (used for stats) * @param repeat Whether this is a repeat lookup for the same block * (used to avoid double counting cache misses when doing double-check locking) * @return Block or null if block is not in 2 cache. * {@see HFileReaderV2#readBlock(long, long, boolean, boolean, boolean, BlockType)} */ public Cacheable getBlock(BlockCacheKey cacheKey, boolean caching, boolean repeat){ try{ String blockName = cacheKey.toString(); Cacheable bb = onHeapEnabled()? (Cacheable) onHeapCache.getBlock(cacheKey, caching, repeat): null; if(bb == null) { if(repeat == false) onHeapStats.miss(caching); bb = (Cacheable)offHeapCache.get(blockName); if(bb == null){ if(repeat == false){ offHeapStats.miss(caching); } } else{ offHeapStats.hit(caching); } } else{ // We need touch dataBlockCache offHeapCache.touch(blockName); onHeapStats.hit(caching); } if( bb == null){ // Try to load from external cache bb = readExternalWithCodec(blockName, repeat, caching); if(bb == null){ if(repeat == false) extStats.miss(caching); } else{ extStats.hit(caching); } } else if( isTestMode() == true && isExternalStorageEnabled()){ byte[] hashed = Utils.hash128(blockName); if(extStorageCache.contains(hashed) == false){ // FIXME: double check 'contains' // Store external if we found object in a block cache and not in ext cache storeExternalWithCodec(blockName, bb, false); } } if(bb == null) { if(repeat == false) { stats.miss(caching); } return null; } stats.hit(caching); return bb; }catch(Exception e) { LOG.error(e); throw new RuntimeException(e); } } private final boolean isExternalStorageEnabled() { return extStorageCache != null; } /** * Evict block from cache. * @param cacheKey Block to evict * @return true if block existed and was evicted, false if not */ public boolean evictBlock(BlockCacheKey cacheKey){ // We ignore this as since eviction is automatic // always return true boolean result = false; try { result = offHeapCache.remove(cacheKey.toString()); } catch (Exception e) { throw new RuntimeException(e); } return result || onHeapEnabled() ? onHeapCache.evictBlock(cacheKey): true; } /** * Evicts all blocks for the given HFile. * * @param hfileName the hfile name * @return the number of blocks evicted */ public int evictBlocksByHfileName(final String hfileName){ // We ignore this as since eviction is automatic // always return '0' - will it breaks anything? // Single threaded // TODO: do we need global lock? // TODO: it seems not fast enough // multiple scanners at the same time has not been tested // thouroghly yet. Runnable r = new Runnable() { public void run() { LOG.info("Evict blocks for file "+hfileName); int scannerNo = 0; long total = 0; long startTime = System.currentTimeMillis(); while (scannerNo < 256) { CacheScanner scanner = offHeapCache.getScanner(scannerNo++, 256); List<String> keys = new ArrayList<String>(); while (scanner.hasNext()) { try { String key = (String) scanner.nextKey(); if (key.startsWith(hfileName)) { keys.add(key); } } catch (Exception e) { LOG.error("Failed evictBlocksByHfileName ", e); break; } } scanner.close(); // Remove all keys for (String key : keys) { try { if (offHeapCache.remove(key)) { total++; } } catch (Exception e) { LOG.error("Failed evictBlocksByHfileName ", e); break; } } } LOG.info(hfileName + " : evicted " + total + " in " + (System.currentTimeMillis() - startTime) + "ms"); if (onHeapEnabled()) { onHeapCache.evictBlocksByHfileName(hfileName); } } }; new Thread(r).start(); return (int) 0; } /** * Get the total statistics for this block cache. * @return Stats */ public CacheStats getStats(){ return this.stats; } /** * Get the on-heap cache statistics for this block cache. * @return Stats */ public CacheStats getOnHeapStats() { return onHeapStats; } /** * Get the off-heap cache statistics for this block cache. * @return Stats */ public CacheStats getOffHeapStats() { return offHeapStats; } /** * Get the external cache statistics for this block cache. * @return Stats */ public CacheStats getExtStats() { return extStats; } /** * Gets the external ref cache stats * @return Stats */ public CacheStats getExtRefStats() { return extRefStats; } public OffHeapCache getOffHeapCache() { return offHeapCache; } public OnHeapBlockCache getOnHeapCache() { return onHeapCache; } public ExtStorage getExternalStorage() { return storage; } /** * Shutdown the cache. */ public void shutdown(){ // Shutdown all caches try { offHeapCache.shutdown(); if(extStorageCache != null){ extStorageCache.shutdown(); } if(storage != null){ storage.shutdown(isPersistent); } } catch (KodaException e) { LOG.error(e); } catch (IOException e) { // TODO Auto-generated catch block LOG.error(e); } } /** * Returns the total size of the block cache, in items. * @return size of cache, in bytes */ public final long size(){ // if( storage != null){ // return estimateExtStorageSize(); // } return offHeapCache.size() + (onHeapEnabled()? onHeapCache.size(): 0) ; } @SuppressWarnings("unused") private final long estimateExtStorageSize() { if(storage == null){ return 0; } else{ long extSize = storage.size(); long offHeapSize = offHeapCache.getTotalAllocatedMemorySize(); long items = offHeapCache.size(); if(items == 0) return 0; long avgItemSize = (offHeapSize / items); return extSize / avgItemSize; } } /** * It reports only RAM. * Returns the free size of the block cache, in bytes. * @return free space in cache, in bytes */ public final long getFreeSize(){ return getMaxSize() - getCurrentSize(); } /** * It reports only RAM. * Returns the occupied size of the block cache, in bytes. * @return occupied space in cache, in bytes */ public long getCurrentSize(){ return offHeapCache.getAllocatedMemorySize() + /*(extStorageCache != null? extStorageCache.getAllocatedMemorySize():0) +*/ (onHeapEnabled()? onHeapCache.getCurrentSize(): 0); } /** * It reports only RAM. * Returns the number of evictions that have occurred. * @return number of evictions */ public long getEvictedCount(){ return offHeapCache.getEvictedCount() + //(extStorageCache != null? extStorageCache.getEvictedCount():0); (onHeapEnabled()? onHeapCache.getEvictedCount(): 0); } /** * RAM only * Returns the number of blocks currently cached in the block cache. * @return number of blocks in the cache */ public long getBlockCount() { return size(); } /** * Performs a BlockCache summary and returns a List of BlockCacheColumnFamilySummary objects. * This method could be fairly heavy-weight in that it evaluates the entire HBase file-system * against what is in the RegionServer BlockCache. * <br><br> * The contract of this interface is to return the List in sorted order by Table name, then * ColumnFamily. * * @param conf HBaseConfiguration * @return List of BlockCacheColumnFamilySummary * @throws IOException exception */ public List<BlockCacheColumnFamilySummary> getBlockCacheColumnFamilySummaries(Configuration conf) throws IOException { // TODO // it seems that this API is not used return new ArrayList<BlockCacheColumnFamilySummary>(); } public void dumpCacheStats() { if(onHeapEnabled()){ LOG.info("On heap stats:"); LOG.info("Size="+ onHeapCache.heapSize()); LOG.info("hits="+onHeapStats.getHitCount() + " miss="+onHeapStats.getMissCount() + " hit ratio="+onHeapStats.getHitRatio()); } LOG.info("Off heap stats:"); LOG.info("Size="+ offHeapCache.getAllocatedMemorySize()); LOG.info("hits="+offHeapStats.getHitCount() + " miss="+offHeapStats.getMissCount() + " hit ratio="+offHeapStats.getHitRatio()); if( storage != null){ LOG.info("External storage stats:"); LOG.info("Size="+ storage.size()); LOG.info("hits="+extStats.getHitCount() + " miss="+extStats.getMissCount() + " hit ratio="+extStats.getHitRatio()); } LOG.info("Overall stats:"); LOG.info("hits="+stats.getHitCount() + " miss="+stats.getMissCount() + " hit ratio="+stats.getHitRatio()); } }
block-cache/src/main/java/com/koda/integ/hbase/blockcache/OffHeapBlockCache.java
/******************************************************************************* * Copyright (c) 2013 Vladimir Rodionov. All Rights Reserved * * This code is released under the GNU Affero General Public License. * * See: http://www.fsf.org/licensing/licenses/agpl-3.0.html * * VLADIMIR RODIONOV MAKES NO REPRESENTATIONS OR WARRANTIES ABOUT THE SUITABILITY * OF THE SOFTWARE, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE * IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR * NON-INFRINGEMENT. Vladimir Rodionov SHALL NOT BE LIABLE FOR ANY DAMAGES SUFFERED * BY LICENSEE AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR * ITS DERIVATIVES. * * Author: Vladimir Rodionov * *******************************************************************************/ package com.koda.integ.hbase.blockcache; import java.io.File; import java.io.IOException; import java.lang.management.ManagementFactory; import java.lang.management.MemoryUsage; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.hfile.BlockCache; import org.apache.hadoop.hbase.io.hfile.BlockCacheColumnFamilySummary; import org.apache.hadoop.hbase.io.hfile.BlockCacheKey; import org.apache.hadoop.hbase.io.hfile.BlockType; import org.apache.hadoop.hbase.io.hfile.CacheStats; import org.apache.hadoop.hbase.io.hfile.Cacheable; import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer; import org.apache.hadoop.util.StringUtils; import com.koda.KodaException; import com.koda.cache.CacheManager; import com.koda.cache.CacheScanner; import com.koda.cache.OffHeapCache; import com.koda.cache.eviction.EvictionListener; import com.koda.cache.eviction.EvictionPolicy; import com.koda.compression.Codec; import com.koda.compression.CodecType; import com.koda.config.CacheConfiguration; import com.koda.integ.hbase.storage.ExtStorage; import com.koda.integ.hbase.storage.ExtStorageManager; import com.koda.integ.hbase.storage.StorageHandle; import com.koda.integ.hbase.util.CacheableSerializer; import com.koda.integ.hbase.util.StorageHandleSerializer; import com.koda.integ.hbase.util.Utils; import com.koda.io.serde.SerDe; import com.koda.persistence.PersistenceMode; import com.koda.persistence.rawfs.RawFSConfiguration; import com.koda.persistence.rawfs.RawFSStore; // TODO: Auto-generated Javadoc /** * An off-heap block cache implementation that is memory-aware, * memory-bound using an LRU eviction algorithm, and concurrent: backed by a. * * {@link OffHeapCache} and with a non-blocking fast eviction giving * constant-time {@link #cacheBlock} and {@link #getBlock} operations.<p> * * Contains three levels of block priority to allow for * scan-resistance and in-memory families. A block is added with an inMemory * flag if necessary, otherwise a block becomes a single access priority. Once * a blocked is accessed again, it changes to multiple access. This is used * to prevent scans from thrashing the cache, adding a least-frequently-used * element to the eviction algorithm.<p> * * Each priority is given its own chunk of the total cache to ensure * fairness during eviction. Each priority will retain close to its maximum * size, however, if any priority is not using its entire chunk the others * are able to grow beyond their chunk size.<p> * * Instantiated at a minimum with the total size and average block size. * All sizes are in bytes. The block size is not especially important as this * cache is fully dynamic in its sizing of blocks. It is only used for * pre-allocating data structures.<p> * * The detailed constructor defines the sizes for the three priorities (they * should total to the maximum size defined). It also sets the levels that * trigger and control the eviction thread.<p> * * The acceptable size is the cache size level which triggers the eviction * process to start. It evicts enough blocks to get the size below the * minimum size specified.<p> * * * TODO: * * 1. Block data encoding support (see fb-89 L2) ??? * * 2. Implement: * * Each priority is given its own chunk of the total cache to ensure * fairness during eviction. Each priority will retain close to its maximum * size, however, if any priority is not using its entire chunk the others * are able to grow beyond their chunk size. * * * Notes on Cassandra and SSD. Cassandra (1.1+) has so -called flexible data placement * (mixed storage support) feature, * which allows to place particular CFs into separate mounts (SSD) * http://www.datastax.com/dev/blog/whats-new-in-cassandra-1-1-flexible-data-file-placement * * This is not so efficient as a true (SSD - backed) block cache in HBase * * Some additional links (to use a reference): * http://readwrite.com/2012/04/27/cassandra-11-brings-cache-tuning-mixed-storage-support#awesm=~ofb1zhDSfBqb90 * * * 3. Flexible memory limits. Currently, we have 4 (young, tenured, permanent and external) caches and each cache * relies on its limits to activate eviction. This may result in sub-par usage of an available memory. * + 4. 16 bytes hashed values for 'external' cache keys (MD5 or what?). * + 5. Compression for external storage. * 6. External storage to keep both keys and blocks * Format: * 0..3 total record size * 4..7 key size * 8..11 value size * 12 .. (key size + 12) key data * x .. value data * * * TODO: It seems not all cached data are HFileBlock? */ public class OffHeapBlockCache implements BlockCache, HeapSize { /** The Constant YOUNG_GEN_FACTOR. */ public final static String BLOCK_CACHE_YOUNG_GEN_FACTOR = "offheap.blockcache.young.gen.factor"; /** The Constant BLOCK_CACHE_MEMORY_SIZE. */ public final static String BLOCK_CACHE_MEMORY_SIZE = "offheap.blockcache.size"; public final static String HEAP_BLOCK_CACHE_MEMORY_RATIO = "offheap.blockcache.onheap.ratio"; /** The Constant BLOCK_CACHE_IMPL. */ public final static String BLOCK_CACHE_IMPL = "offheap.blockcache.impl"; /** The Constant EXT_STORAGE_FACTOR. */ public final static String BLOCK_CACHE_EXT_STORAGE_MEMORY_SIZE = "offheap.blockcache.storage.ref.size"; /** The Constant _COMPRESSION. */ public final static String BLOCK_CACHE_COMPRESSION = "offheap.blockcache.compression"; /** The Constant OVERFLOW_TO_EXT_STORAGE_ENABLED. */ public final static String BLOCK_CACHE_OVERFLOW_TO_EXT_STORAGE_ENABLED = "offheap.blockcache.storage.enabled"; /** The Constant EXT_STORAGE_IMPL. */ public final static String BLOCK_CACHE_EXT_STORAGE_IMPL = "offheap.blockcache.storage.impl"; /** The Constant BLOCK_CACHE_ONHEAP_ENABLED. */ public final static String BLOCK_CACHE_ONHEAP_ENABLED = "offheap.blockcache.onheap.enabled"; public final static String BLOCK_CACHE_TEST_MODE = "offheap.blockcache.test.mode"; public final static String BLOCK_CACHE_PERSISTENT = "offheap.blockcache.persistent"; public final static String BLOCK_CACHE_SNAPSHOTS ="offheap.blockcache.snapshots.enabled"; public final static String BLOCK_CACHE_SNAPSHOT_INTERVAL ="offheap.blockcache.snapshots.interval"; public final static String BLOCK_CACHE_DATA_ROOTS = "offheap.blockcache.storage.dir"; /** Default is LRU2Q, possible values: LRU, LFU, RANDOM, FIFO */ public final static String BLOCK_CACHE_EVICTION = "offheap.blockcache.eviction"; public final static String BLOCK_CACHE_BUFFER_SIZE = "offheap.blockcache.nativebuffer.size"; public final static int DEFAULT_BLOCK_CACH_BUFFER_SIZE = 1024*1024; // 1 MB /** The Constant LOG. */ static final Log LOG = LogFactory.getLog(OffHeapBlockCache.class); /** Default Configuration Parameters. */ static final int EXT_STORAGE_REF_SIZE = 50; /** Young generation. */ static final float DEFAULT_YOUNG_FACTOR = 0.5f; /** 10 % of JVM heap size is dedicated to on heap cache */ static final float DEFAULT_HEAP_BLOCK_CACHE_MEMORY_RATIO = 0.1f; /** Statistics thread. */ static final int statThreadPeriod = 60000; /** Main (off - heap) cache. All blocks go to this cache first*/ private OffHeapCache offHeapCache; /** External storage handle cache. */ private OffHeapCache extStorageCache; /** Cache statistics - combined */ private final CacheStats stats; /** On-heap cache stats */ private final CacheStats onHeapStats; /** Off-heap cache stats */ private final CacheStats offHeapStats; /** External cache stats -L3 */ private final CacheStats extStats; /** External references cache stats in RAM*/ private final CacheStats extRefStats; /** Maximum allowable size of cache (block put if size > max, evict). */ private long blockCacheMaxSize; /** Maximum allowable size of external storage cache. */ private long extCacheMaxSize; /** Approximate block size. */ private long blockSize; /** Direct buffer block size */ private int nativeBufferSize = DEFAULT_BLOCK_CACH_BUFFER_SIZE; /** Single access bucket size. */ private float youngGenFactor; /** * Data overflow to external storage. */ private boolean overflowExtEnabled = false; /** * Save Ref cache on shutdown */ private boolean isPersistent = false; @SuppressWarnings("unused") private boolean isSnapshotsEnabled = false; @SuppressWarnings("unused") private long snapshotsInterval = 0; /** external storage (file or network - based). */ private ExtStorage storage; /** The stat thread. */ private StatisticsThread statThread; /** The deserializer. */ private AtomicReference<CacheableDeserializer<Cacheable>> deserializer = new AtomicReference<CacheableDeserializer<Cacheable>>(); /** Fast on-heap cache to store NON-DATA blocks (INDEX, BLOOM etc). */ private OnHeapBlockCache onHeapCache; private boolean testMode = false; private AtomicLong fatalExternalReads = new AtomicLong(0); /** * Instantiates a new off heap block cache. * * @param conf the conf */ public OffHeapBlockCache(Configuration conf) { this.blockSize = conf.getInt("hbase.offheapcache.minblocksize", HColumnDescriptor.DEFAULT_BLOCKSIZE); blockCacheMaxSize = conf.getLong(BLOCK_CACHE_MEMORY_SIZE, 0L); if(blockCacheMaxSize == 0L){ throw new RuntimeException("off heap block cache size is not defined"); } nativeBufferSize = conf.getInt(BLOCK_CACHE_BUFFER_SIZE, DEFAULT_BLOCK_CACH_BUFFER_SIZE); extCacheMaxSize = conf.getLong(BLOCK_CACHE_EXT_STORAGE_MEMORY_SIZE, (long) (0.1 * blockCacheMaxSize)); youngGenFactor = conf.getFloat(BLOCK_CACHE_YOUNG_GEN_FACTOR, DEFAULT_YOUNG_FACTOR); overflowExtEnabled = conf.getBoolean(BLOCK_CACHE_OVERFLOW_TO_EXT_STORAGE_ENABLED, false); testMode = conf.getBoolean(BLOCK_CACHE_TEST_MODE, false); isPersistent = conf.getBoolean(BLOCK_CACHE_PERSISTENT, false); isSnapshotsEnabled = conf.getBoolean(BLOCK_CACHE_SNAPSHOTS, false); snapshotsInterval = conf.getInt(BLOCK_CACHE_SNAPSHOT_INTERVAL, 600) * 1000; String[] dataRoots = getDataRoots(conf.get(BLOCK_CACHE_DATA_ROOTS)); if(isPersistent && dataRoots == null){ dataRoots = getHDFSRoots(conf); if(dataRoots == null){ LOG.warn("Data roots are not defined. Set persistent mode to false."); isPersistent = false; } } adjustMaxMemory(); /** Possible values: none, snappy, gzip, lz4, lz4hc */ // TODO: LZ4 is not supported on all platforms // TODO: default compression is LZ4? CodecType codec = CodecType.LZ4; String value = conf.get(BLOCK_CACHE_COMPRESSION); if(value != null){ codec = CodecType.valueOf(value.toUpperCase()); } try { //TODO - Verify we have deep enough copy CacheConfiguration cacheCfg = new CacheConfiguration(); cacheCfg.setCacheName("block-cache"); cacheCfg.setSerDeBufferSize(nativeBufferSize); cacheCfg.setMaxMemory(blockCacheMaxSize); cacheCfg.setCodecType(codec); String evictionPolicy =conf.get(BLOCK_CACHE_EVICTION, "LRU").toUpperCase(); cacheCfg.setEvictionPolicy(evictionPolicy); // Set this only for LRU2Q cacheCfg.setLRU2QInsertPoint(youngGenFactor); setBucketNumber(cacheCfg); CacheManager manager = CacheManager.getInstance(); if(overflowExtEnabled == true){ LOG.info("Overflow to external storage is enabled."); // External storage handle cache CacheConfiguration extStorageCfg = new CacheConfiguration(); extStorageCfg.setCacheName("extStorageCache"); extStorageCfg.setMaxMemory(extCacheMaxSize); extStorageCfg.setCodecType(codec); extStorageCfg.setEvictionPolicy(EvictionPolicy.LRU.toString()); extStorageCfg.setSerDeBufferSize(4096);// small // calculate bucket number // 50 is estimate of a record size int buckets = (extCacheMaxSize / EXT_STORAGE_REF_SIZE) > Integer.MAX_VALUE? Integer.MAX_VALUE -1: (int) (extCacheMaxSize / EXT_STORAGE_REF_SIZE); extStorageCfg.setBucketNumber(buckets); if(isPersistent){ // TODO - this in memory cache has same data dirs as a major cache. RawFSConfiguration storeConfig = new RawFSConfiguration(); storeConfig.setStoreName(extStorageCfg.getCacheName()); storeConfig.setDiskStoreImplementation(RawFSStore.class); storeConfig.setDbDataStoreRoots(dataRoots); storeConfig.setPersistenceMode(PersistenceMode.ONDEMAND); storeConfig.setDbCompressionType(CodecType.LZ4); storeConfig.setDbSnapshotInterval(15); //storeConfig.setTotalWorkerThreads(Runtime.getRuntime().availableProcessors() /2); //storeConfig.setTotalIOThreads(1); extStorageCfg.setDataStoreConfiguration(storeConfig); } // This will initiate the load of stored cache data // if persistance is enabled extStorageCache = manager.getCache(extStorageCfg, null); // Initialize external storage storage = ExtStorageManager.getInstance().getStorage(conf, extStorageCache); } else{ LOG.info("Overflow to external storage is disabled."); if(isPersistent){ RawFSConfiguration storeConfig = new RawFSConfiguration(); storeConfig.setStoreName(cacheCfg.getCacheName()); storeConfig.setDiskStoreImplementation(RawFSStore.class); storeConfig.setDbDataStoreRoots(dataRoots); storeConfig.setPersistenceMode(PersistenceMode.ONDEMAND); storeConfig.setDbSnapshotInterval(15); cacheCfg.setDataStoreConfiguration(storeConfig); // Load cache data offHeapCache = manager.getCache(cacheCfg, null); } } if(offHeapCache == null){ offHeapCache = manager.getCache(cacheCfg, null); } } catch (Exception e) { throw new RuntimeException(e); } boolean onHeapEnabled = conf.getBoolean(BLOCK_CACHE_ONHEAP_ENABLED, true); if(onHeapEnabled){ long onHeapCacheSize = calculateOnHeapCacheSize(conf); if(onHeapCacheSize > 0){ onHeapCache = new OnHeapBlockCache(onHeapCacheSize, blockSize, conf); LOG.info("Created fast on-heap cache. Size="+onHeapCacheSize); } else{ LOG.warn("Conflicting configuration options. On-heap cache is disabled."); } } this.stats = new CacheStats(); this.onHeapStats = new CacheStats(); this.offHeapStats = new CacheStats(); this.extStats = new CacheStats(); this.extRefStats = new CacheStats(); EvictionListener listener = new EvictionListener(){ @Override public void evicted(long ptr, Reason reason, long nanoTime) { stats.evict(); stats.evicted(); } }; offHeapCache.setEvictionListener(listener); // Cacheable serializer registration CacheableSerializer serde = new CacheableSerializer(); offHeapCache.getSerDe().registerSerializer(serde); if( extStorageCache != null){ StorageHandleSerializer serde2 = new StorageHandleSerializer(); extStorageCache.getSerDe().registerSerializer(serde2); } // Start statistics thread statThread = new StatisticsThread(this); statThread.start(); } private String[] getHDFSRoots(Configuration conf) { // Use default dfs data directories String str = conf.get("dfs.data.dir"); if(str == null) return null; String[] dirs = str.split(","); for(int i=0 ; i < dirs.length; i++){ dirs[i] = dirs[i].trim() + File.separator + "blockcache"; } return dirs; } private String[] getDataRoots(String roots) { if (roots == null) return null; String[] rts = roots.split(","); String[] retValue = new String[rts.length]; for( int i=0; i < retValue.length; i++){ retValue[i] = rts[i].trim(); } return retValue; } private final boolean isTestMode() { return testMode; } /** * Calculate on heap cache size. * * @param conf the conf * @return the long */ private long calculateOnHeapCacheSize(Configuration conf) { float cachePercentage = conf.getFloat( HEAP_BLOCK_CACHE_MEMORY_RATIO, DEFAULT_HEAP_BLOCK_CACHE_MEMORY_RATIO); if (cachePercentage == 0L) { // block cache disabled on heap return 0L; } if (cachePercentage > 1.0) { throw new IllegalArgumentException(HEAP_BLOCK_CACHE_MEMORY_RATIO + " must be between 0.0 and 1.0, and not > 1.0"); } // Calculate the amount of heap to give the heap. MemoryUsage mu = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage(); long cacheSize = (long) (mu.getMax() * cachePercentage); return cacheSize; } /** * Adjust max memory. */ private void adjustMaxMemory() { if(overflowExtEnabled == true && extCacheMaxSize == 0){ blockCacheMaxSize = (long) (0.95 * blockCacheMaxSize); // By default we set back 5% to external ref cache extCacheMaxSize = (long) (0.05 * blockCacheMaxSize); } LOG.info("Block cache max size ="+blockCacheMaxSize+" external cache support (in RAM)="+extCacheMaxSize); } /** * Sets the bucket number. * * @param cfg the new bucket number */ private void setBucketNumber(CacheConfiguration cfg) { long memSize = cfg.getMaxMemory(); float compFactor = getAvgCompression(cfg.getCodecType()); long bSize= (long) (blockSize / compFactor); int bucketNumber = (int) (memSize / bSize); cfg.setBucketNumber(bucketNumber); } /** * Gets the avg compression. * * @param codec the codec * @return the avg compression */ private float getAvgCompression(CodecType codec) { switch(codec) { case NONE: return 1.0f; case SNAPPY: return 2.0f; case LZ4: return 2.0f; case LZ4HC: return 3.0f; case DEFLATE : return 4.0f; default: return 1.0f; } } // BlockCache implementation /** * Get the maximum size of this cache. It returns only max size of a data cache * @return max size in bytes */ public long getMaxSize() { return this.blockCacheMaxSize + (onHeapEnabled()? onHeapCache.getMaxSize(): 0); } /* * TODO: run stats thread * Statistics thread. Periodically prints the cache statistics to the log. */ /** * The Class StatisticsThread. */ static class StatisticsThread extends Thread { /** The cache. */ OffHeapBlockCache cache; /** * Instantiates a new statistics thread. * * @param cache the cache */ public StatisticsThread(OffHeapBlockCache cache) { super("BigBaseBlockCache.StatisticsThread"); setDaemon(true); this.cache = cache; } /* (non-Javadoc) * @see java.lang.Thread#run() */ @Override public void run() { LOG.info(Thread.currentThread().getName()+" started."); while(true){ try { Thread.sleep(statThreadPeriod); } catch (InterruptedException e) {} cache.logStats(); cache.logStatsOffHeap(); cache.logStatsOnHeap(); cache.logStatsExternal(); cache.logStatsOffHeapExt(); } } } /** * Log stats. */ protected void logStats() { // Log size long totalSize = getCurrentSize(); long freeSize = getMaxSize() - totalSize; OffHeapBlockCache.LOG.info("Block cache stats: " + "total=" + StringUtils.byteDesc(totalSize) + ", " + "free=" + StringUtils.byteDesc(freeSize) + ", " + "max=" + StringUtils.byteDesc(getMaxSize()) + ", " + "blocks=" + size() +", " + "accesses=" + stats.getRequestCount() + ", " + "hits=" + stats.getHitCount() + ", " + "hitRatio=" + (stats.getRequestCount()>0 ? StringUtils.formatPercent(stats.getHitRatio(), 2): "0.00") + "%, "+ "cachingAccesses=" + stats.getRequestCachingCount() + ", " + "cachingHits=" + stats.getHitCachingCount() + ", " + "cachingHitsRatio=" + (stats.getRequestCachingCount() > 0 ?StringUtils.formatPercent(stats.getHitCachingRatio(), 2): "0.00") + "%, " + "evicted=" + getEvictedCount() ); } protected void logStatsOffHeap() { // Log size long totalSize = offHeapCache.getTotalAllocatedMemorySize(); long maxSize = offHeapCache.getMemoryLimit(); long freeSize = maxSize - totalSize; OffHeapBlockCache.LOG.info("[L2-BLOCK-CACHE-RAM(OFFHEAP)] : " + "total=" + StringUtils.byteDesc(totalSize) + ", " + "free=" + StringUtils.byteDesc(freeSize) + ", " + "max=" + StringUtils.byteDesc(maxSize) + ", " + "blocks=" + offHeapCache.size() +", " + "accesses=" + offHeapStats.getRequestCount() + ", " + "hits=" + offHeapStats.getHitCount() + ", " + "hitRatio=" + (offHeapStats.getRequestCount()>0 ? StringUtils.formatPercent(offHeapStats.getHitRatio(), 2): "0.00") + "%, "+ "cachingAccesses=" + offHeapStats.getRequestCachingCount() + ", " + "cachingHits=" + offHeapStats.getHitCachingCount() + ", " + "cachingHitsRatio=" + (offHeapStats.getRequestCachingCount() > 0 ?StringUtils.formatPercent(offHeapStats.getHitCachingRatio(), 2): "0.00") + "%, " + "evicted=" + offHeapCache.getEvictedCount() ); } protected void logStatsOffHeapExt() { // Log size long totalSize = extStorageCache.getTotalAllocatedMemorySize(); long maxSize = extStorageCache.getMemoryLimit(); long freeSize = maxSize - totalSize; OffHeapBlockCache.LOG.info("[L3-BLOCK-CACHE-RAM] : " + "total=" + StringUtils.byteDesc(totalSize) + ", " + "free=" + StringUtils.byteDesc(freeSize) + ", " + "max=" + StringUtils.byteDesc(maxSize) + ", " + "refs=" + extStorageCache.size() +", " + "accesses=" + extRefStats.getRequestCount() + ", " + "hits=" + extRefStats.getHitCount() + ", " + "hitRatio=" + (extRefStats.getRequestCount()>0 ? StringUtils.formatPercent(extRefStats.getHitRatio(), 2): "0.00") + "%, "+ "cachingAccesses=" + extRefStats.getRequestCachingCount() + ", " + "cachingHits=" + extRefStats.getHitCachingCount() + ", " + "cachingHitsRatio=" + (extRefStats.getRequestCachingCount() > 0 ?StringUtils.formatPercent(offHeapStats.getHitCachingRatio(), 2): "0.00") + "%, " + "evicted=" + extRefStats.getEvictedCount() ); } protected void logStatsOnHeap() { if(onHeapEnabled() == false) return; // Log size long totalSize = onHeapCache.getCurrentSize(); long maxSize = onHeapCache.getMaxSize(); long freeSize = maxSize - totalSize; OnHeapBlockCache.LOG.info("[L2-BLOCK-CACHE-RAM(HEAP)] : " + "total=" + StringUtils.byteDesc(totalSize) + ", " + "free=" + StringUtils.byteDesc(freeSize) + ", " + "max=" + StringUtils.byteDesc(maxSize) + ", " + "blocks=" + onHeapCache.size() +", " + "accesses=" + onHeapStats.getRequestCount() + ", " + "hits=" + onHeapStats.getHitCount() + ", " + "hitRatio=" + (onHeapStats.getRequestCount()>0 ? StringUtils.formatPercent(onHeapStats.getHitRatio(), 2): "0.00") + "%, "+ "cachingAccesses=" + onHeapStats.getRequestCachingCount() + ", " + "cachingHits=" + onHeapStats.getHitCachingCount() + ", " + "cachingHitsRatio=" + (onHeapStats.getRequestCachingCount() > 0 ?StringUtils.formatPercent(onHeapStats.getHitCachingRatio(), 2): "0.00") + "%, " + "evicted=" + onHeapCache.getEvictedCount() ); } protected void logStatsExternal() { if( storage == null) return; // Log size long totalSize = storage.size(); long maxSize = storage.getMaxStorageSize() ; long freeSize = maxSize - totalSize; OnHeapBlockCache.LOG.info("[L3-BLOCK-CACHE-DISK] : " + "total=" + StringUtils.byteDesc(totalSize) + ", " + "free=" + StringUtils.byteDesc(freeSize) + ", " + "max=" + StringUtils.byteDesc(maxSize) + ", " + "accesses=" + extStats.getRequestCount() + ", " + "hits=" + extStats.getHitCount() + ", " + "hitRatio=" + (extStats.getRequestCount()>0 ? StringUtils.formatPercent(extStats.getHitRatio(), 2): "0.00") + "%, "+ "cachingAccesses=" + extStats.getRequestCachingCount() + ", " + "cachingHits=" + extStats.getHitCachingCount() + ", " + "cachingHitsRatio=" + (extStats.getRequestCachingCount() > 0 ?StringUtils.formatPercent(extStats.getHitCachingRatio(), 2): "0.00") + "%, "); // "\nFATAL READS="+fatalExternalReads.get()); } /** * HeapSize implementation - returns zero if auxCache is disabled. * * @return the long */ /* (non-Javadoc) * @see org.apache.hadoop.hbase.io.HeapSize#heapSize() */ public long heapSize() { return onHeapCache == null? 0: onHeapCache.heapSize(); } /** * Add block to cache. * @param cacheKey The block's cache key. * @param buf The block contents wrapped in a ByteBuffer. * @param inMemory Whether block should be treated as in-memory */ public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf, boolean inMemory){ boolean contains = false; try { String blockName = cacheKey.toString(); contains = offHeapCache.contains(blockName); if ( contains) { // TODO - what does it mean? Can we ignore this? throw new RuntimeException("Cached an already cached block: "+blockName); } // Always cache block to off-heap cache first offHeapCache.put(blockName, buf); if(buf.getBlockType() != BlockType.DATA && onHeapEnabled()){ // Cache on-heap only non-data blocks onHeapCache.cacheBlock(cacheKey, buf, inMemory); } // TODO: Remove test mode if( isTestMode() && isExternalStorageEnabled()){ // FIXME This code disables storage in non-test mode??? byte[] hashed = Utils.hash128(blockName); if( extStorageCache.contains(hashed) == false){ // Store external if we found object in a block cache and not in ext cache // ONLY IN TEST MODE storeExternalWithCodec(blockName, buf, false); } } } catch (Exception e) { LOG.error(e); throw new RuntimeException(e); } } /** * On heap enabled. * * @return true, if successful */ private final boolean onHeapEnabled() { return onHeapCache != null; } /** * Store external with codec. * Format: * 0..3 - total record size (-4) * 4..7 - size of a key in bytes (16 if use hash128) * 8 .. x - key data * x+1 ..x+1- IN_MEMORY flag ( 1- in memory, 0 - not) * x+2 ... block, serialized and compressed * * @param blockName the block name * @param buf the buf * @param inMemory the in memory * @throws IOException Signals that an I/O exception has occurred. */ private void storeExternalWithCodec(String blockName, Cacheable buf, boolean inMemory) throws IOException{ // If external storage is disable - bail out if (overflowExtEnabled == false){ return; } byte[] hashed = Utils.hash128(blockName); ByteBuffer buffer = extStorageCache.getLocalBufferWithAddress() .getBuffer(); deserializer.set(buf.getDeserializer()); SerDe serde = extStorageCache.getSerDe(); Codec codec = extStorageCache.getCompressionCodec(); buffer.clear(); buffer.position(4); // Save key buffer.putInt(hashed.length); buffer.put(hashed); buffer.put(inMemory ? (byte) 1 : (byte) 0); if (buf != null) { serde.writeCompressed(buffer, buf, codec); int pos = buffer.position(); buffer.putInt(0, pos - 4); } else { buffer.putInt(0, 0); } buffer.flip(); StorageHandle handle = storage.storeData(buffer); try { // WE USE byte array as a key extStorageCache.put(hashed, handle.toBytes()); } catch (Exception e) { throw new IOException(e); } } /** * Gets the external storage cache. * * @return the ext storage cache */ public OffHeapCache getExtStorageCache() { return extStorageCache; } /** * Read external with codec. * * @param blockName the block name * @return the cacheable * @throws IOException Signals that an I/O exception has occurred. */ @SuppressWarnings("unused") private Cacheable readExternalWithCodec(String blockName, boolean repeat, boolean caching) throws IOException { if(overflowExtEnabled == false) return null; // Check if we have already this block in external storage cache try { // We use 16 - byte hash for external storage cache byte[] hashed = Utils.hash128(blockName); StorageHandle handle = storage.newStorageHandle(); byte[] data = (byte[])extStorageCache.get(hashed); if( data == null ) { if(repeat == false) extRefStats.miss(caching); return null; } else{ extRefStats.hit(caching); } handle.fromBytes(data); ByteBuffer buffer = extStorageCache.getLocalBufferWithAddress().getBuffer(); SerDe serde = extStorageCache.getSerDe(); Codec codec = extStorageCache.getCompressionCodec(); buffer.clear(); StorageHandle newHandle = storage.getData(handle, buffer); if(buffer.position() > 0) buffer.flip(); int size = buffer.getInt(); if(size == 0) return null; // Skip key int keySize = buffer.getInt(); buffer.position(8 + keySize); boolean inMemory = buffer.get() == (byte) 1; buffer.limit(size + 4); Cacheable obj = (Cacheable) serde.readCompressed(buffer/*, codec*/); offHeapCache.put(blockName, obj); if( newHandle.equals(handle) == false){ extStorageCache.put(hashed, newHandle); } return obj; } catch (Throwable e) { /*DEBUG*/LOG.error("[readExternalWithCodec]" + blockName); fatalExternalReads.incrementAndGet(); /*DEBUG*/LOG.error("REQUESTS="+extStats.getRequestCount()+" HITS="+extStats.getHitCount()); throw new IOException(e); } } /** * Add block to cache (defaults to not in-memory). * @param cacheKey The block's cache key. * @param buf The object to cache. */ public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf){ cacheBlock(cacheKey, buf, false); } /** * Fetch block from cache. * * @param cacheKey Block to fetch. * @param caching Whether this request has caching enabled (used for stats) * @param repeat Whether this is a repeat lookup for the same block * (used to avoid double counting cache misses when doing double-check locking) * @return Block or null if block is not in 2 cache. * {@see HFileReaderV2#readBlock(long, long, boolean, boolean, boolean, BlockType)} */ public Cacheable getBlock(BlockCacheKey cacheKey, boolean caching, boolean repeat){ try{ String blockName = cacheKey.toString(); Cacheable bb = onHeapEnabled()? (Cacheable) onHeapCache.getBlock(cacheKey, caching, repeat): null; if(bb == null) { if(repeat == false) onHeapStats.miss(caching); bb = (Cacheable)offHeapCache.get(blockName); if(bb == null){ if(repeat == false){ offHeapStats.miss(caching); } } else{ offHeapStats.hit(caching); } } else{ // We need touch dataBlockCache offHeapCache.touch(blockName); onHeapStats.hit(caching); } if( bb == null){ // Try to load from external cache bb = readExternalWithCodec(blockName, repeat, caching); if(bb == null){ if(repeat == false) extStats.miss(caching); } else{ extStats.hit(caching); } } else if( isTestMode() == true && isExternalStorageEnabled()){ byte[] hashed = Utils.hash128(blockName); if(extStorageCache.contains(hashed) == false){ // FIXME: double check 'contains' // Store external if we found object in a block cache and not in ext cache storeExternalWithCodec(blockName, bb, false); } } if(bb == null) { if(repeat == false) { stats.miss(caching); } return null; } stats.hit(caching); return bb; }catch(Exception e) { LOG.error(e); throw new RuntimeException(e); } } private final boolean isExternalStorageEnabled() { return extStorageCache != null; } /** * Evict block from cache. * @param cacheKey Block to evict * @return true if block existed and was evicted, false if not */ public boolean evictBlock(BlockCacheKey cacheKey){ // We ignore this as since eviction is automatic // always return true boolean result = false; try { result = offHeapCache.remove(cacheKey.toString()); } catch (Exception e) { throw new RuntimeException(e); } return result || onHeapEnabled() ? onHeapCache.evictBlock(cacheKey): true; } /** * Evicts all blocks for the given HFile. * * @param hfileName the hfile name * @return the number of blocks evicted */ public int evictBlocksByHfileName(final String hfileName){ // We ignore this as since eviction is automatic // always return '0' - will it breaks anything? // Single threaded // TODO: do we need global lock? // TODO: it seems not fast enough // multiple scanners at the same time has not been tested // thouroghly yet. Runnable r = new Runnable() { public void run() { LOG.info("Evict blocks for file "+hfileName); int scannerNo = 0; long total = 0; long startTime = System.currentTimeMillis(); while (scannerNo < 256) { CacheScanner scanner = offHeapCache.getScanner(scannerNo++, 256); List<String> keys = new ArrayList<String>(); while (scanner.hasNext()) { try { String key = (String) scanner.nextKey(); if (key.startsWith(hfileName)) { keys.add(key); } } catch (Exception e) { LOG.error("Failed evictBlocksByHfileName ", e); break; } } scanner.close(); // Remove all keys for (String key : keys) { try { if (offHeapCache.remove(key)) { total++; } } catch (Exception e) { LOG.error("Failed evictBlocksByHfileName ", e); break; } } } LOG.info(hfileName + " : evicted " + total + " in " + (System.currentTimeMillis() - startTime) + "ms"); if (onHeapEnabled()) { onHeapCache.evictBlocksByHfileName(hfileName); } } }; new Thread(r).start(); return (int) 0; } /** * Get the total statistics for this block cache. * @return Stats */ public CacheStats getStats(){ return this.stats; } /** * Get the on-heap cache statistics for this block cache. * @return Stats */ public CacheStats getOnHeapStats() { return onHeapStats; } /** * Get the off-heap cache statistics for this block cache. * @return Stats */ public CacheStats getOffHeapStats() { return offHeapStats; } /** * Get the external cache statistics for this block cache. * @return Stats */ public CacheStats getExtStats() { return extStats; } /** * Gets the external ref cache stats * @return Stats */ public CacheStats getExtRefStats() { return extRefStats; } public OffHeapCache getOffHeapCache() { return offHeapCache; } public OnHeapBlockCache getOnHeapCache() { return onHeapCache; } public ExtStorage getExternalStorage() { return storage; } /** * Shutdown the cache. */ public void shutdown(){ // Shutdown all caches try { offHeapCache.shutdown(); if(extStorageCache != null){ extStorageCache.shutdown(); } if(storage != null){ storage.shutdown(isPersistent); } } catch (KodaException e) { LOG.error(e); } catch (IOException e) { // TODO Auto-generated catch block LOG.error(e); } } /** * Returns the total size of the block cache, in items. * @return size of cache, in bytes */ public final long size(){ // if( storage != null){ // return estimateExtStorageSize(); // } return offHeapCache.size() + (onHeapEnabled()? onHeapCache.size(): 0) ; } @SuppressWarnings("unused") private final long estimateExtStorageSize() { if(storage == null){ return 0; } else{ long extSize = storage.size(); long offHeapSize = offHeapCache.getTotalAllocatedMemorySize(); long items = offHeapCache.size(); if(items == 0) return 0; long avgItemSize = (offHeapSize / items); return extSize / avgItemSize; } } /** * It reports only RAM. * Returns the free size of the block cache, in bytes. * @return free space in cache, in bytes */ public final long getFreeSize(){ return getMaxSize() - getCurrentSize(); } /** * It reports only RAM. * Returns the occupied size of the block cache, in bytes. * @return occupied space in cache, in bytes */ public long getCurrentSize(){ return offHeapCache.getAllocatedMemorySize() + /*(extStorageCache != null? extStorageCache.getAllocatedMemorySize():0) +*/ (onHeapEnabled()? onHeapCache.getCurrentSize(): 0); } /** * It reports only RAM. * Returns the number of evictions that have occurred. * @return number of evictions */ public long getEvictedCount(){ return offHeapCache.getEvictedCount() + //(extStorageCache != null? extStorageCache.getEvictedCount():0); (onHeapEnabled()? onHeapCache.getEvictedCount(): 0); } /** * RAM only * Returns the number of blocks currently cached in the block cache. * @return number of blocks in the cache */ public long getBlockCount() { return size(); } /** * Performs a BlockCache summary and returns a List of BlockCacheColumnFamilySummary objects. * This method could be fairly heavy-weight in that it evaluates the entire HBase file-system * against what is in the RegionServer BlockCache. * <br><br> * The contract of this interface is to return the List in sorted order by Table name, then * ColumnFamily. * * @param conf HBaseConfiguration * @return List of BlockCacheColumnFamilySummary * @throws IOException exception */ public List<BlockCacheColumnFamilySummary> getBlockCacheColumnFamilySummaries(Configuration conf) throws IOException { // TODO // it seems that this API is not used return new ArrayList<BlockCacheColumnFamilySummary>(); } public void dumpCacheStats() { if(onHeapEnabled()){ LOG.info("On heap stats:"); LOG.info("Size="+ onHeapCache.heapSize()); LOG.info("hits="+onHeapStats.getHitCount() + " miss="+onHeapStats.getMissCount() + " hit ratio="+onHeapStats.getHitRatio()); } LOG.info("Off heap stats:"); LOG.info("Size="+ offHeapCache.getAllocatedMemorySize()); LOG.info("hits="+offHeapStats.getHitCount() + " miss="+offHeapStats.getMissCount() + " hit ratio="+offHeapStats.getHitRatio()); if( storage != null){ LOG.info("External storage stats:"); LOG.info("Size="+ storage.size()); LOG.info("hits="+extStats.getHitCount() + " miss="+extStats.getMissCount() + " hit ratio="+extStats.getHitRatio()); } LOG.info("Overall stats:"); LOG.info("hits="+stats.getHitCount() + " miss="+stats.getMissCount() + " hit ratio="+stats.getHitRatio()); } }
Issue#45 improve cache hit ratio for L3-DISK
block-cache/src/main/java/com/koda/integ/hbase/blockcache/OffHeapBlockCache.java
Issue#45 improve cache hit ratio for L3-DISK
<ide><path>lock-cache/src/main/java/com/koda/integ/hbase/blockcache/OffHeapBlockCache.java <ide> StorageHandle newHandle = storage.getData(handle, buffer); <ide> if(buffer.position() > 0) buffer.flip(); <ide> int size = buffer.getInt(); <del> if(size == 0) return null; <add> if(size == 0) { <add> // BIGBASE-45 <add> // Remove reference from reference cache <add> extStorageCache.remove(hashed); <add> return null; <add> } <ide> // Skip key <ide> int keySize = buffer.getInt(); <ide> buffer.position(8 + keySize);
Java
apache-2.0
74efd0e83cbb07b2465733d39feda080b0765672
0
PDX-Flamingo/codonpdx,PDX-Flamingo/codonpdx
package edu.pdx.codonpdx; import javax.servlet.ServletException; import javax.servlet.annotation.MultipartConfig; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.Part; import java.io.*; /** * Created by Robert on 7/7/2014. */ public class ResultsViewServlet extends HttpServlet{ protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { try { PrintWriter out = response.getWriter(); out.println(request.getRequestURI()); String[] URI = request.getRequestURI().split("/"); switch (URI.length < 3 ? "none" : URI[2]) { case "resultsView": if(URI.length == 4) { request.getRequestDispatcher("/resultsView.html").forward(request, response); } else if (URI.length == 5) { request.getRequestDispatcher("/compareTwo.html").forward(request, response); } break; default: } } catch (IOException e) { PrintWriter out = response.getWriter(); out.println(e.getMessage()); } } }
src/edu/pdx/codonpdx/ResultsViewServlet.java
package edu.pdx.codonpdx; import javax.servlet.ServletException; import javax.servlet.annotation.MultipartConfig; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.Part; import java.io.*; /** * Created by Robert on 7/7/2014. */ public class ResultsViewServlet extends HttpServlet{ protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { try { PrintWriter out = response.getWriter(); out.println(request.getRequestURI()); String[] URI = request.getRequestURI().split("/"); switch (URI.length < 3 ? "none" : URI[2]) { case "resultsView": if(URI.length == 4) { out.println("one"); request.getRequestDispatcher("/resultsView.html").forward(request, response); } else if (URI.length == 5) { out.println("two"); request.getRequestDispatcher("/compareTwo.html").forward(request, response); } break; default: } } catch (IOException e) { PrintWriter out = response.getWriter(); out.println(e.getMessage()); } } }
removing debug code
src/edu/pdx/codonpdx/ResultsViewServlet.java
removing debug code
<ide><path>rc/edu/pdx/codonpdx/ResultsViewServlet.java <ide> switch (URI.length < 3 ? "none" : URI[2]) { <ide> case "resultsView": <ide> if(URI.length == 4) { <del> out.println("one"); <ide> request.getRequestDispatcher("/resultsView.html").forward(request, response); <ide> } <ide> else if (URI.length == 5) { <del> out.println("two"); <ide> request.getRequestDispatcher("/compareTwo.html").forward(request, response); <ide> } <ide> break;
Java
apache-2.0
2f744f0da91f551c6674194516c6a02a78116527
0
ChadKillingsworth/closure-compiler,mprobst/closure-compiler,nawawi/closure-compiler,monetate/closure-compiler,nawawi/closure-compiler,tiobe/closure-compiler,google/closure-compiler,monetate/closure-compiler,tiobe/closure-compiler,tdelmas/closure-compiler,Yannic/closure-compiler,google/closure-compiler,tiobe/closure-compiler,tdelmas/closure-compiler,shantanusharma/closure-compiler,tiobe/closure-compiler,shantanusharma/closure-compiler,vobruba-martin/closure-compiler,monetate/closure-compiler,Yannic/closure-compiler,vobruba-martin/closure-compiler,tdelmas/closure-compiler,nawawi/closure-compiler,shantanusharma/closure-compiler,shantanusharma/closure-compiler,mprobst/closure-compiler,google/closure-compiler,tdelmas/closure-compiler,ChadKillingsworth/closure-compiler,Yannic/closure-compiler,vobruba-martin/closure-compiler,ChadKillingsworth/closure-compiler,ChadKillingsworth/closure-compiler,mprobst/closure-compiler,vobruba-martin/closure-compiler,nawawi/closure-compiler,Yannic/closure-compiler,google/closure-compiler,monetate/closure-compiler,mprobst/closure-compiler
/* * Copyright 2006 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import com.google.common.base.Preconditions; import com.google.common.collect.Iterables; import com.google.javascript.jscomp.GlobalNamespace.Name; import com.google.javascript.jscomp.GlobalNamespace.Ref; import com.google.javascript.jscomp.Normalize.PropagateConstantAnnotationsOverVars; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import com.google.javascript.rhino.TokenStream; import com.google.javascript.rhino.TypeI; import java.util.List; import java.util.Map; /** * Flattens global objects/namespaces by replacing each '.' with '$' in * their names. This reduces the number of property lookups the browser has * to do and allows the {@link RenameVars} pass to shorten namespaced names. * For example, goog.events.handleEvent() -> goog$events$handleEvent() -> Za(). * * <p>If a global object's name is assigned to more than once, or if a property * is added to the global object in a complex expression, then none of its * properties will be collapsed (for safety/correctness). * * <p>If, after a global object is declared, it is never referenced except when * its properties are read or set, then the object will be removed after its * properties have been collapsed. * * <p>Uninitialized variable stubs are created at a global object's declaration * site for any of its properties that are added late in a local scope. * * <p> Static properties of constructors are always collapsed, unsafely! * For other objects: if, after an object is declared, it is referenced directly * in a way that might create an alias for it, then none of its properties will * be collapsed. * This behavior is a safeguard to prevent the values associated with the * flattened names from getting out of sync with the object's actual property * values. For example, in the following case, an alias a$b, if created, could * easily keep the value 0 even after a.b became 5: * <code> a = {b: 0}; c = a; c.b = 5; </code>. * * <p>This pass doesn't flatten property accesses of the form: a[b]. * * <p>For lots of examples, see the unit test. * */ class CollapseProperties implements CompilerPass { // Warnings static final DiagnosticType UNSAFE_NAMESPACE_WARNING = DiagnosticType.warning( "JSC_UNSAFE_NAMESPACE", "incomplete alias created for namespace {0}"); static final DiagnosticType NAMESPACE_REDEFINED_WARNING = DiagnosticType.warning( "JSC_NAMESPACE_REDEFINED", "namespace {0} should not be redefined"); static final DiagnosticType UNSAFE_THIS = DiagnosticType.warning( "JSC_UNSAFE_THIS", "dangerous use of ''this'' in static method {0}"); private AbstractCompiler compiler; /** Global namespace tree */ private List<Name> globalNames; /** Maps names (e.g. "a.b.c") to nodes in the global namespace tree */ private Map<String, Name> nameMap; CollapseProperties(AbstractCompiler compiler) { this.compiler = compiler; } @Override public void process(Node externs, Node root) { GlobalNamespace namespace = new GlobalNamespace(compiler, root); nameMap = namespace.getNameIndex(); globalNames = namespace.getNameForest(); checkNamespaces(); for (Name name : globalNames) { flattenReferencesToCollapsibleDescendantNames(name, name.getBaseName()); } // We collapse property definitions after collapsing property references // because this step can alter the parse tree above property references, // invalidating the node ancestry stored with each reference. for (Name name : globalNames) { collapseDeclarationOfNameAndDescendants(name, name.getBaseName()); } // This shouldn't be necessary, this pass should already be setting new constants as constant. // TODO(b/64256754): Investigate. (new PropagateConstantAnnotationsOverVars(compiler, false)).process(externs, root); } /** * Runs through all namespaces (prefixes of classes and enums), and checks if * any of them have been used in an unsafe way. */ private void checkNamespaces() { for (Name name : nameMap.values()) { if (name.isNamespaceObjectLit() && (name.aliasingGets > 0 || name.localSets + name.globalSets > 1 || name.deleteProps > 0)) { boolean initialized = name.getDeclaration() != null; for (Ref ref : name.getRefs()) { if (ref == name.getDeclaration()) { continue; } if (ref.type == Ref.Type.DELETE_PROP) { if (initialized) { warnAboutNamespaceRedefinition(name, ref); } } else if ( ref.type == Ref.Type.SET_FROM_GLOBAL || ref.type == Ref.Type.SET_FROM_LOCAL) { if (initialized && !isSafeNamespaceReinit(ref)) { warnAboutNamespaceRedefinition(name, ref); } initialized = true; } else if (ref.type == Ref.Type.ALIASING_GET) { warnAboutNamespaceAliasing(name, ref); } } } } } private boolean isSafeNamespaceReinit(Ref ref) { // allow "a = a || {}" or "var a = a || {}" Node valParent = getValueParent(ref); Node val = valParent.getLastChild(); if (val.isOr()) { Node maybeName = val.getFirstChild(); if (ref.node.matchesQualifiedName(maybeName)) { return true; } } return false; } /** * Gets the parent node of the value for any assignment to a Name. * For example, in the assignment * {@code var x = 3;} * the parent would be the NAME node. */ private static Node getValueParent(Ref ref) { // there are four types of declarations: VARs, LETs, CONSTs, and ASSIGNs Node n = ref.node.getParent(); return (n != null && NodeUtil.isNameDeclaration(n)) ? ref.node : ref.node.getParent(); } /** * Reports a warning because a namespace was aliased. * * @param nameObj A namespace that is being aliased * @param ref The reference that forced the alias */ private void warnAboutNamespaceAliasing(Name nameObj, Ref ref) { compiler.report( JSError.make(ref.node, UNSAFE_NAMESPACE_WARNING, nameObj.getFullName())); } /** * Reports a warning because a namespace was redefined. * * @param nameObj A namespace that is being redefined * @param ref The reference that set the namespace */ private void warnAboutNamespaceRedefinition(Name nameObj, Ref ref) { compiler.report( JSError.make(ref.node, NAMESPACE_REDEFINED_WARNING, nameObj.getFullName())); } /** * Flattens all references to collapsible properties of a global name except * their initial definitions. Recurs on subnames. * * @param n An object representing a global name * @param alias The flattened name for {@code n} */ private void flattenReferencesToCollapsibleDescendantNames( Name n, String alias) { if (n.props == null || n.isCollapsingExplicitlyDenied()) { return; } for (Name p : n.props) { String propAlias = appendPropForAlias(alias, p.getBaseName()); if (p.canCollapse()) { flattenReferencesTo(p, propAlias); } else if (p.isSimpleStubDeclaration() && !p.isCollapsingExplicitlyDenied()) { flattenSimpleStubDeclaration(p, propAlias); } flattenReferencesToCollapsibleDescendantNames(p, propAlias); } } /** * Flattens a stub declaration. * This is mostly a hack to support legacy users. */ private void flattenSimpleStubDeclaration(Name name, String alias) { Ref ref = Iterables.getOnlyElement(name.getRefs()); Node nameNode = NodeUtil.newName( compiler, alias, ref.node, name.getFullName()); Node varNode = IR.var(nameNode).useSourceInfoIfMissingFrom(nameNode); checkState(ref.node.getParent().isExprResult()); Node parent = ref.node.getParent(); Node grandparent = parent.getParent(); grandparent.replaceChild(parent, varNode); compiler.reportChangeToEnclosingScope(varNode); } /** * Flattens all references to a collapsible property of a global name except * its initial definition. * * @param n A global property name (e.g. "a.b" or "a.b.c.d") * @param alias The flattened name (e.g. "a$b" or "a$b$c$d") */ private void flattenReferencesTo(Name n, String alias) { String originalName = n.getFullName(); for (Ref r : n.getRefs()) { if (r == n.getDeclaration()) { // Declarations are handled separately. continue; } Node rParent = r.node.getParent(); // There are two cases when we shouldn't flatten a reference: // 1) Object literal keys, because duplicate keys show up as refs. // 2) References inside a complex assign. (a = x.y = 0). These are // called TWIN references, because they show up twice in the // reference list. Only collapse the set, not the alias. if (!NodeUtil.isObjectLitKey(r.node) && (r.getTwin() == null || r.isSet())) { flattenNameRef(alias, r.node, rParent, originalName); } } // Flatten all occurrences of a name as a prefix of its subnames. For // example, if {@code n} corresponds to the name "a.b", then "a.b" will be // replaced with "a$b" in all occurrences of "a.b.c", "a.b.c.d", etc. if (n.props != null) { for (Name p : n.props) { flattenPrefixes(alias, p, 1); } } } /** * Flattens all occurrences of a name as a prefix of subnames beginning * with a particular subname. * * @param n A global property name (e.g. "a.b.c.d") * @param alias A flattened prefix name (e.g. "a$b") * @param depth The difference in depth between the property name and * the prefix name (e.g. 2) */ private void flattenPrefixes(String alias, Name n, int depth) { // Only flatten the prefix of a name declaration if the name being // initialized is fully qualified (i.e. not an object literal key). String originalName = n.getFullName(); Ref decl = n.getDeclaration(); if (decl != null && decl.node != null && decl.node.isGetProp()) { flattenNameRefAtDepth(alias, decl.node, depth, originalName); } for (Ref r : n.getRefs()) { if (r == decl) { // Declarations are handled separately. continue; } // References inside a complex assign (a = x.y = 0) // have twins. We should only flatten one of the twins. if (r.getTwin() == null || r.isSet()) { flattenNameRefAtDepth(alias, r.node, depth, originalName); } } if (n.props != null) { for (Name p : n.props) { flattenPrefixes(alias, p, depth + 1); } } } /** * Flattens a particular prefix of a single name reference. * * @param alias A flattened prefix name (e.g. "a$b") * @param n The node corresponding to a subproperty name (e.g. "a.b.c.d") * @param depth The difference in depth between the property name and * the prefix name (e.g. 2) * @param originalName String version of the property name. */ private void flattenNameRefAtDepth(String alias, Node n, int depth, String originalName) { // This method has to work for both GETPROP chains and, in rare cases, // OBJLIT keys, possibly nested. That's why we check for children before // proceeding. In the OBJLIT case, we don't need to do anything. Token nType = n.getToken(); boolean isQName = nType == Token.NAME || nType == Token.GETPROP; boolean isObjKey = NodeUtil.isObjectLitKey(n); checkState(isObjKey || isQName); if (isQName) { for (int i = 1; i < depth && n.hasChildren(); i++) { n = n.getFirstChild(); } if (n.isGetProp() && n.getFirstChild().isGetProp()) { flattenNameRef(alias, n.getFirstChild(), n, originalName); } } } /** * Replaces a GETPROP a.b.c with a NAME a$b$c. * * @param alias A flattened prefix name (e.g. "a$b") * @param n The GETPROP node corresponding to the original name (e.g. "a.b") * @param parent {@code n}'s parent * @param originalName String version of the property name. */ private void flattenNameRef(String alias, Node n, Node parent, String originalName) { Preconditions.checkArgument( n.isGetProp(), "Expected GETPROP, found %s. Node: %s", n.getToken(), n); // BEFORE: // getprop // getprop // name a // string b // string c // AFTER: // name a$b$c Node ref = NodeUtil.newName(compiler, alias, n, originalName); NodeUtil.copyNameAnnotations(n.getLastChild(), ref); if (parent.isCall() && n == parent.getFirstChild()) { // The node was a call target, we are deliberately flatten these as // we node the "this" isn't provided by the namespace. Mark it as such: parent.putBooleanProp(Node.FREE_CALL, true); } TypeI type = n.getTypeI(); if (type != null) { ref.setTypeI(type); } parent.replaceChild(n, ref); compiler.reportChangeToEnclosingScope(ref); } /** * Collapses definitions of the collapsible properties of a global name. * Recurs on subnames that also represent JavaScript objects with * collapsible properties. * * @param n A node representing a global name * @param alias The flattened name for {@code n} */ private void collapseDeclarationOfNameAndDescendants(Name n, String alias) { boolean canCollapseChildNames = n.canCollapseUnannotatedChildNames(); // Handle this name first so that nested object literals get unrolled. if (n.canCollapse()) { updateObjLitOrFunctionDeclaration(n, alias, canCollapseChildNames); } if (n.props == null) { return; } for (Name p : n.props) { // Recur first so that saved node ancestries are intact when needed. collapseDeclarationOfNameAndDescendants( p, appendPropForAlias(alias, p.getBaseName())); if (!p.inExterns && canCollapseChildNames && p.getDeclaration() != null && p.canCollapse() && p.getDeclaration().node != null && p.getDeclaration().node.getParent() != null && p.getDeclaration().node.getParent().isAssign()) { updateSimpleDeclaration( appendPropForAlias(alias, p.getBaseName()), p, p.getDeclaration()); } } } /** * Updates the initial assignment to a collapsible property at global scope * by changing it to a variable declaration (e.g. a.b = 1 -> var a$b = 1). * The property's value may either be a primitive or an object literal or * function whose properties aren't collapsible. * * @param alias The flattened property name (e.g. "a$b") * @param refName The name for the reference being updated. * @param ref An object containing information about the assignment getting * updated */ private void updateSimpleDeclaration(String alias, Name refName, Ref ref) { Node rvalue = ref.node.getNext(); Node parent = ref.node.getParent(); Node grandparent = parent.getParent(); Node greatGrandparent = grandparent.getParent(); if (rvalue != null && rvalue.isFunction()) { checkForHosedThisReferences(rvalue, refName.docInfo, refName); } // Create the new alias node. Node nameNode = NodeUtil.newName(compiler, alias, grandparent.getFirstChild(), refName.getFullName()); NodeUtil.copyNameAnnotations(ref.node.getLastChild(), nameNode); if (grandparent.isExprResult()) { // BEFORE: a.b.c = ...; // exprstmt // assign // getprop // getprop // name a // string b // string c // NODE // AFTER: var a$b$c = ...; // var // name a$b$c // NODE // Remove the r-value (NODE). parent.removeChild(rvalue); nameNode.addChildToFront(rvalue); Node varNode = IR.var(nameNode); greatGrandparent.replaceChild(grandparent, varNode); compiler.reportChangeToEnclosingScope(varNode); } else { // This must be a complex assignment. checkNotNull(ref.getTwin()); // BEFORE: // ... (x.y = 3); // // AFTER: // var x$y; // ... (x$y = 3); Node current = grandparent; Node currentParent = grandparent.getParent(); for (; !currentParent.isScript() && !currentParent.isNormalBlock(); current = currentParent, currentParent = currentParent.getParent()) {} // Create a stub variable declaration right // before the current statement. Node stubVar = IR.var(nameNode.cloneTree()) .useSourceInfoIfMissingFrom(nameNode); currentParent.addChildBefore(stubVar, current); parent.replaceChild(ref.node, nameNode); compiler.reportChangeToEnclosingScope(nameNode); } } /** * Updates the first initialization (a.k.a "declaration") of a global name. * This involves flattening the global name (if it's not just a global * variable name already), collapsing object literal keys into global * variables, declaring stub global variables for properties added later * in a local scope. * * It may seem odd that this function also takes care of declaring stubs * for direct children. The ultimate goal of this function is to eliminate * the global name entirely (when possible), so that "middlemen" namespaces * disappear, and to do that we need to make sure that all the direct children * will be collapsed as well. * * @param n An object representing a global name (e.g. "a", "a.b.c") * @param alias The flattened name for {@code n} (e.g. "a", "a$b$c") * @param canCollapseChildNames Whether it's possible to collapse children of * this name. (This is mostly passed for convenience; it's equivalent to * n.canCollapseChildNames()). */ private void updateObjLitOrFunctionDeclaration( Name n, String alias, boolean canCollapseChildNames) { Ref decl = n.getDeclaration(); if (decl == null) { // Some names do not have declarations, because they // are only defined in local scopes. return; } if (decl.getTwin() != null) { // Twin declarations will get handled when normal references // are handled. return; } switch (decl.node.getParent().getToken()) { case ASSIGN: updateObjLitOrFunctionDeclarationAtAssignNode( n, alias, canCollapseChildNames); break; case VAR: case LET: case CONST: updateObjLitOrFunctionDeclarationAtVariableNode(n, canCollapseChildNames); break; case FUNCTION: updateFunctionDeclarationAtFunctionNode(n, canCollapseChildNames); break; default: break; } } /** * Updates the first initialization (a.k.a "declaration") of a global name * that occurs at an ASSIGN node. See comment for * {@link #updateObjLitOrFunctionDeclaration}. * * @param n An object representing a global name (e.g. "a", "a.b.c") * @param alias The flattened name for {@code n} (e.g. "a", "a$b$c") */ private void updateObjLitOrFunctionDeclarationAtAssignNode( Name n, String alias, boolean canCollapseChildNames) { // NOTE: It's important that we don't add additional nodes // (e.g. a var node before the exprstmt) because the exprstmt might be // the child of an if statement that's not inside a block). // All qualified names - even for variables that are initially declared as LETS and CONSTS - // are being declared as VAR statements, but this is not incorrect because // we are only collapsing for global names. Ref ref = n.getDeclaration(); Node rvalue = ref.node.getNext(); Node varNode = new Node(Token.VAR); Node varParent = ref.node.getAncestor(3); Node grandparent = ref.node.getAncestor(2); boolean isObjLit = rvalue.isObjectLit(); boolean insertedVarNode = false; if (isObjLit && n.canEliminate()) { // Eliminate the object literal altogether. varParent.replaceChild(grandparent, varNode); ref.node = null; insertedVarNode = true; compiler.reportChangeToEnclosingScope(varNode); } else if (!n.isSimpleName()) { // Create a VAR node to declare the name. if (rvalue.isFunction()) { checkForHosedThisReferences(rvalue, n.docInfo, n); } compiler.reportChangeToEnclosingScope(rvalue); ref.node.getParent().removeChild(rvalue); Node nameNode = NodeUtil.newName(compiler, alias, ref.node.getAncestor(2), n.getFullName()); JSDocInfo info = NodeUtil.getBestJSDocInfo(ref.node.getParent()); if (ref.node.getLastChild().getBooleanProp(Node.IS_CONSTANT_NAME) || (info != null && info.isConstant())) { nameNode.putBooleanProp(Node.IS_CONSTANT_NAME, true); } if (info != null) { varNode.setJSDocInfo(info); } varNode.addChildToBack(nameNode); nameNode.addChildToFront(rvalue); varParent.replaceChild(grandparent, varNode); // Update the node ancestry stored in the reference. ref.node = nameNode; insertedVarNode = true; compiler.reportChangeToEnclosingScope(varNode); } if (canCollapseChildNames) { if (isObjLit) { declareVariablesForObjLitValues( n, alias, rvalue, varNode, varNode.getPrevious(), varParent); } addStubsForUndeclaredProperties(n, alias, varParent, varNode); } if (insertedVarNode) { if (!varNode.hasChildren()) { varParent.removeChild(varNode); } } } /** * Warns about any references to "this" in the given FUNCTION. The function * is getting collapsed, so the references will change. */ private void checkForHosedThisReferences(Node function, JSDocInfo docInfo, final Name name) { // A function is getting collapsed. Make sure that if it refers to "this", // it must be a constructor, interface, record, arrow function, or documented with @this. boolean isAllowedToReferenceThis = (docInfo != null && (docInfo.isConstructorOrInterface() || docInfo.hasThisType())) || function.isArrowFunction(); if (!isAllowedToReferenceThis) { NodeTraversal.traverseEs6(compiler, function.getLastChild(), new NodeTraversal.AbstractShallowCallback() { @Override public void visit(NodeTraversal t, Node n, Node parent) { if (n.isThis()) { compiler.report( JSError.make(n, UNSAFE_THIS, name.getFullName())); } } }); } } /** * Updates the first initialization (a.k.a "declaration") of a global name that occurs at a VAR * node. See comment for {@link #updateObjLitOrFunctionDeclaration}. * * @param n An object representing a global name (e.g. "a") */ private void updateObjLitOrFunctionDeclarationAtVariableNode( Name n, boolean canCollapseChildNames) { if (!canCollapseChildNames) { return; } Ref ref = n.getDeclaration(); String name = ref.node.getString(); Node rvalue = ref.node.getFirstChild(); Node variableNode = ref.node.getParent(); Node grandparent = variableNode.getParent(); boolean isObjLit = rvalue.isObjectLit(); if (isObjLit) { declareVariablesForObjLitValues( n, name, rvalue, variableNode, variableNode.getPrevious(), grandparent); } addStubsForUndeclaredProperties(n, name, grandparent, variableNode); if (isObjLit && n.canEliminate()) { variableNode.removeChild(ref.node); compiler.reportChangeToEnclosingScope(variableNode); if (!variableNode.hasChildren()) { grandparent.removeChild(variableNode); } // Clear out the object reference, since we've eliminated it from the // parse tree. ref.node = null; } } /** * Updates the first initialization (a.k.a "declaration") of a global name * that occurs at a FUNCTION node. See comment for * {@link #updateObjLitOrFunctionDeclaration}. * * @param n An object representing a global name (e.g. "a") */ private void updateFunctionDeclarationAtFunctionNode( Name n, boolean canCollapseChildNames) { if (!canCollapseChildNames || !n.canCollapse()) { return; } Ref ref = n.getDeclaration(); String fnName = ref.node.getString(); addStubsForUndeclaredProperties( n, fnName, ref.node.getAncestor(2), ref.node.getParent()); } /** * Declares global variables to serve as aliases for the values in an object literal, optionally * removing all of the object literal's keys and values. * * @param alias The object literal's flattened name (e.g. "a$b$c") * @param objlit The OBJLIT node * @param varNode The VAR node to which new global variables should be added as children * @param nameToAddAfter The child of {@code varNode} after which new variables should be added * (may be null) * @param varParent {@code varNode}'s parent */ private void declareVariablesForObjLitValues( Name objlitName, String alias, Node objlit, Node varNode, Node nameToAddAfter, Node varParent) { int arbitraryNameCounter = 0; boolean discardKeys = !objlitName.shouldKeepKeys(); for (Node key = objlit.getFirstChild(), nextKey; key != null; key = nextKey) { Node value = key.getFirstChild(); nextKey = key.getNext(); // A computed property, or a get or a set can not be rewritten as a VAR. if (key.isGetterDef() || key.isSetterDef() || key.isComputedProp()) { continue; } // We generate arbitrary names for keys that aren't valid JavaScript // identifiers, since those keys are never referenced. (If they were, // this object literal's child names wouldn't be collapsible.) The only // reason that we don't eliminate them entirely is the off chance that // their values are expressions that have side effects. boolean isJsIdentifier = !key.isNumber() && TokenStream.isJSIdentifier(key.getString()); String propName = isJsIdentifier ? key.getString() : String.valueOf(++arbitraryNameCounter); // If the name cannot be collapsed, skip it. String qName = objlitName.getFullName() + '.' + propName; Name p = nameMap.get(qName); if (p != null && !p.canCollapse()) { continue; } String propAlias = appendPropForAlias(alias, propName); Node refNode = null; if (discardKeys) { objlit.removeChild(key); value.detach(); // Don't report a change here because the objlit has already been removed from the tree. } else { // Substitute a reference for the value. refNode = IR.name(propAlias); if (key.getBooleanProp(Node.IS_CONSTANT_NAME)) { refNode.putBooleanProp(Node.IS_CONSTANT_NAME, true); } key.replaceChild(value, refNode); compiler.reportChangeToEnclosingScope(refNode); } // Declare the collapsed name as a variable with the original value. Node nameNode = IR.name(propAlias); nameNode.addChildToFront(value); if (key.getBooleanProp(Node.IS_CONSTANT_NAME)) { nameNode.putBooleanProp(Node.IS_CONSTANT_NAME, true); } Node newVar = IR.var(nameNode).useSourceInfoIfMissingFromForTree(key); if (nameToAddAfter != null) { varParent.addChildAfter(newVar, nameToAddAfter); } else { varParent.addChildBefore(newVar, varNode); } compiler.reportChangeToEnclosingScope(newVar); nameToAddAfter = newVar; // Update the global name's node ancestry if it hasn't already been // done. (Duplicate keys in an object literal can bring us here twice // for the same global name.) if (isJsIdentifier && p != null) { if (!discardKeys) { Ref newAlias = p.getDeclaration().cloneAndReclassify(Ref.Type.ALIASING_GET); newAlias.node = refNode; p.addRef(newAlias); } p.getDeclaration().node = nameNode; if (value.isFunction()) { checkForHosedThisReferences(value, key.getJSDocInfo(), p); } } } } /** * Adds global variable "stubs" for any properties of a global name that are only set in a local * scope or read but never set. * * @param n An object representing a global name (e.g. "a", "a.b.c") * @param alias The flattened name of the object whose properties we are adding stubs for (e.g. * "a$b$c") * @param parent The node to which new global variables should be added as children * @param addAfter The child of after which new variables should be added */ private void addStubsForUndeclaredProperties(Name n, String alias, Node parent, Node addAfter) { checkState(n.canCollapseUnannotatedChildNames(), n); checkArgument(NodeUtil.isStatementBlock(parent), parent); checkNotNull(addAfter); if (n.props == null) { return; } for (Name p : n.props) { if (p.needsToBeStubbed()) { String propAlias = appendPropForAlias(alias, p.getBaseName()); Node nameNode = IR.name(propAlias); Node newVar = IR.var(nameNode).useSourceInfoIfMissingFromForTree(addAfter); parent.addChildAfter(newVar, addAfter); addAfter = newVar; compiler.reportChangeToEnclosingScope(newVar); // Determine if this is a constant var by checking the first // reference to it. Don't check the declaration, as it might be null. if (p.getRefs().get(0).node.getLastChild().getBooleanProp( Node.IS_CONSTANT_NAME)) { nameNode.putBooleanProp(Node.IS_CONSTANT_NAME, true); compiler.reportChangeToEnclosingScope(nameNode); } } } } private String appendPropForAlias(String root, String prop) { if (prop.indexOf('$') != -1) { // Encode '$' in a property as '$0'. Because '0' cannot be the // start of an identifier, this will never conflict with our // encoding from '.' -> '$'. prop = prop.replace("$", "$0"); } String result = root + '$' + prop; int id = 1; while (nameMap.containsKey(result)) { result = root + '$' + prop + '$' + id; id++; } return result; } }
src/com/google/javascript/jscomp/CollapseProperties.java
/* * Copyright 2006 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import com.google.common.base.Preconditions; import com.google.common.collect.Iterables; import com.google.javascript.jscomp.GlobalNamespace.Name; import com.google.javascript.jscomp.GlobalNamespace.Ref; import com.google.javascript.jscomp.Normalize.PropagateConstantAnnotationsOverVars; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import com.google.javascript.rhino.TokenStream; import com.google.javascript.rhino.TypeI; import java.util.List; import java.util.Map; /** * Flattens global objects/namespaces by replacing each '.' with '$' in * their names. This reduces the number of property lookups the browser has * to do and allows the {@link RenameVars} pass to shorten namespaced names. * For example, goog.events.handleEvent() -> goog$events$handleEvent() -> Za(). * * <p>If a global object's name is assigned to more than once, or if a property * is added to the global object in a complex expression, then none of its * properties will be collapsed (for safety/correctness). * * <p>If, after a global object is declared, it is never referenced except when * its properties are read or set, then the object will be removed after its * properties have been collapsed. * * <p>Uninitialized variable stubs are created at a global object's declaration * site for any of its properties that are added late in a local scope. * * <p> Static properties of constructors are always collapsed, unsafely! * For other objects: if, after an object is declared, it is referenced directly * in a way that might create an alias for it, then none of its properties will * be collapsed. * This behavior is a safeguard to prevent the values associated with the * flattened names from getting out of sync with the object's actual property * values. For example, in the following case, an alias a$b, if created, could * easily keep the value 0 even after a.b became 5: * <code> a = {b: 0}; c = a; c.b = 5; </code>. * * <p>This pass doesn't flatten property accesses of the form: a[b]. * * <p>For lots of examples, see the unit test. * */ class CollapseProperties implements CompilerPass { // Warnings static final DiagnosticType UNSAFE_NAMESPACE_WARNING = DiagnosticType.warning( "JSC_UNSAFE_NAMESPACE", "incomplete alias created for namespace {0}"); static final DiagnosticType NAMESPACE_REDEFINED_WARNING = DiagnosticType.warning( "JSC_NAMESPACE_REDEFINED", "namespace {0} should not be redefined"); static final DiagnosticType UNSAFE_THIS = DiagnosticType.warning( "JSC_UNSAFE_THIS", "dangerous use of 'this' in static method {0}"); private AbstractCompiler compiler; /** Global namespace tree */ private List<Name> globalNames; /** Maps names (e.g. "a.b.c") to nodes in the global namespace tree */ private Map<String, Name> nameMap; CollapseProperties(AbstractCompiler compiler) { this.compiler = compiler; } @Override public void process(Node externs, Node root) { GlobalNamespace namespace = new GlobalNamespace(compiler, root); nameMap = namespace.getNameIndex(); globalNames = namespace.getNameForest(); checkNamespaces(); for (Name name : globalNames) { flattenReferencesToCollapsibleDescendantNames(name, name.getBaseName()); } // We collapse property definitions after collapsing property references // because this step can alter the parse tree above property references, // invalidating the node ancestry stored with each reference. for (Name name : globalNames) { collapseDeclarationOfNameAndDescendants(name, name.getBaseName()); } // This shouldn't be necessary, this pass should already be setting new constants as constant. // TODO(b/64256754): Investigate. (new PropagateConstantAnnotationsOverVars(compiler, false)).process(externs, root); } /** * Runs through all namespaces (prefixes of classes and enums), and checks if * any of them have been used in an unsafe way. */ private void checkNamespaces() { for (Name name : nameMap.values()) { if (name.isNamespaceObjectLit() && (name.aliasingGets > 0 || name.localSets + name.globalSets > 1 || name.deleteProps > 0)) { boolean initialized = name.getDeclaration() != null; for (Ref ref : name.getRefs()) { if (ref == name.getDeclaration()) { continue; } if (ref.type == Ref.Type.DELETE_PROP) { if (initialized) { warnAboutNamespaceRedefinition(name, ref); } } else if ( ref.type == Ref.Type.SET_FROM_GLOBAL || ref.type == Ref.Type.SET_FROM_LOCAL) { if (initialized && !isSafeNamespaceReinit(ref)) { warnAboutNamespaceRedefinition(name, ref); } initialized = true; } else if (ref.type == Ref.Type.ALIASING_GET) { warnAboutNamespaceAliasing(name, ref); } } } } } private boolean isSafeNamespaceReinit(Ref ref) { // allow "a = a || {}" or "var a = a || {}" Node valParent = getValueParent(ref); Node val = valParent.getLastChild(); if (val.isOr()) { Node maybeName = val.getFirstChild(); if (ref.node.matchesQualifiedName(maybeName)) { return true; } } return false; } /** * Gets the parent node of the value for any assignment to a Name. * For example, in the assignment * {@code var x = 3;} * the parent would be the NAME node. */ private static Node getValueParent(Ref ref) { // there are four types of declarations: VARs, LETs, CONSTs, and ASSIGNs Node n = ref.node.getParent(); return (n != null && NodeUtil.isNameDeclaration(n)) ? ref.node : ref.node.getParent(); } /** * Reports a warning because a namespace was aliased. * * @param nameObj A namespace that is being aliased * @param ref The reference that forced the alias */ private void warnAboutNamespaceAliasing(Name nameObj, Ref ref) { compiler.report( JSError.make(ref.node, UNSAFE_NAMESPACE_WARNING, nameObj.getFullName())); } /** * Reports a warning because a namespace was redefined. * * @param nameObj A namespace that is being redefined * @param ref The reference that set the namespace */ private void warnAboutNamespaceRedefinition(Name nameObj, Ref ref) { compiler.report( JSError.make(ref.node, NAMESPACE_REDEFINED_WARNING, nameObj.getFullName())); } /** * Flattens all references to collapsible properties of a global name except * their initial definitions. Recurs on subnames. * * @param n An object representing a global name * @param alias The flattened name for {@code n} */ private void flattenReferencesToCollapsibleDescendantNames( Name n, String alias) { if (n.props == null || n.isCollapsingExplicitlyDenied()) { return; } for (Name p : n.props) { String propAlias = appendPropForAlias(alias, p.getBaseName()); if (p.canCollapse()) { flattenReferencesTo(p, propAlias); } else if (p.isSimpleStubDeclaration() && !p.isCollapsingExplicitlyDenied()) { flattenSimpleStubDeclaration(p, propAlias); } flattenReferencesToCollapsibleDescendantNames(p, propAlias); } } /** * Flattens a stub declaration. * This is mostly a hack to support legacy users. */ private void flattenSimpleStubDeclaration(Name name, String alias) { Ref ref = Iterables.getOnlyElement(name.getRefs()); Node nameNode = NodeUtil.newName( compiler, alias, ref.node, name.getFullName()); Node varNode = IR.var(nameNode).useSourceInfoIfMissingFrom(nameNode); checkState(ref.node.getParent().isExprResult()); Node parent = ref.node.getParent(); Node grandparent = parent.getParent(); grandparent.replaceChild(parent, varNode); compiler.reportChangeToEnclosingScope(varNode); } /** * Flattens all references to a collapsible property of a global name except * its initial definition. * * @param n A global property name (e.g. "a.b" or "a.b.c.d") * @param alias The flattened name (e.g. "a$b" or "a$b$c$d") */ private void flattenReferencesTo(Name n, String alias) { String originalName = n.getFullName(); for (Ref r : n.getRefs()) { if (r == n.getDeclaration()) { // Declarations are handled separately. continue; } Node rParent = r.node.getParent(); // There are two cases when we shouldn't flatten a reference: // 1) Object literal keys, because duplicate keys show up as refs. // 2) References inside a complex assign. (a = x.y = 0). These are // called TWIN references, because they show up twice in the // reference list. Only collapse the set, not the alias. if (!NodeUtil.isObjectLitKey(r.node) && (r.getTwin() == null || r.isSet())) { flattenNameRef(alias, r.node, rParent, originalName); } } // Flatten all occurrences of a name as a prefix of its subnames. For // example, if {@code n} corresponds to the name "a.b", then "a.b" will be // replaced with "a$b" in all occurrences of "a.b.c", "a.b.c.d", etc. if (n.props != null) { for (Name p : n.props) { flattenPrefixes(alias, p, 1); } } } /** * Flattens all occurrences of a name as a prefix of subnames beginning * with a particular subname. * * @param n A global property name (e.g. "a.b.c.d") * @param alias A flattened prefix name (e.g. "a$b") * @param depth The difference in depth between the property name and * the prefix name (e.g. 2) */ private void flattenPrefixes(String alias, Name n, int depth) { // Only flatten the prefix of a name declaration if the name being // initialized is fully qualified (i.e. not an object literal key). String originalName = n.getFullName(); Ref decl = n.getDeclaration(); if (decl != null && decl.node != null && decl.node.isGetProp()) { flattenNameRefAtDepth(alias, decl.node, depth, originalName); } for (Ref r : n.getRefs()) { if (r == decl) { // Declarations are handled separately. continue; } // References inside a complex assign (a = x.y = 0) // have twins. We should only flatten one of the twins. if (r.getTwin() == null || r.isSet()) { flattenNameRefAtDepth(alias, r.node, depth, originalName); } } if (n.props != null) { for (Name p : n.props) { flattenPrefixes(alias, p, depth + 1); } } } /** * Flattens a particular prefix of a single name reference. * * @param alias A flattened prefix name (e.g. "a$b") * @param n The node corresponding to a subproperty name (e.g. "a.b.c.d") * @param depth The difference in depth between the property name and * the prefix name (e.g. 2) * @param originalName String version of the property name. */ private void flattenNameRefAtDepth(String alias, Node n, int depth, String originalName) { // This method has to work for both GETPROP chains and, in rare cases, // OBJLIT keys, possibly nested. That's why we check for children before // proceeding. In the OBJLIT case, we don't need to do anything. Token nType = n.getToken(); boolean isQName = nType == Token.NAME || nType == Token.GETPROP; boolean isObjKey = NodeUtil.isObjectLitKey(n); checkState(isObjKey || isQName); if (isQName) { for (int i = 1; i < depth && n.hasChildren(); i++) { n = n.getFirstChild(); } if (n.isGetProp() && n.getFirstChild().isGetProp()) { flattenNameRef(alias, n.getFirstChild(), n, originalName); } } } /** * Replaces a GETPROP a.b.c with a NAME a$b$c. * * @param alias A flattened prefix name (e.g. "a$b") * @param n The GETPROP node corresponding to the original name (e.g. "a.b") * @param parent {@code n}'s parent * @param originalName String version of the property name. */ private void flattenNameRef(String alias, Node n, Node parent, String originalName) { Preconditions.checkArgument( n.isGetProp(), "Expected GETPROP, found %s. Node: %s", n.getToken(), n); // BEFORE: // getprop // getprop // name a // string b // string c // AFTER: // name a$b$c Node ref = NodeUtil.newName(compiler, alias, n, originalName); NodeUtil.copyNameAnnotations(n.getLastChild(), ref); if (parent.isCall() && n == parent.getFirstChild()) { // The node was a call target, we are deliberately flatten these as // we node the "this" isn't provided by the namespace. Mark it as such: parent.putBooleanProp(Node.FREE_CALL, true); } TypeI type = n.getTypeI(); if (type != null) { ref.setTypeI(type); } parent.replaceChild(n, ref); compiler.reportChangeToEnclosingScope(ref); } /** * Collapses definitions of the collapsible properties of a global name. * Recurs on subnames that also represent JavaScript objects with * collapsible properties. * * @param n A node representing a global name * @param alias The flattened name for {@code n} */ private void collapseDeclarationOfNameAndDescendants(Name n, String alias) { boolean canCollapseChildNames = n.canCollapseUnannotatedChildNames(); // Handle this name first so that nested object literals get unrolled. if (n.canCollapse()) { updateObjLitOrFunctionDeclaration(n, alias, canCollapseChildNames); } if (n.props == null) { return; } for (Name p : n.props) { // Recur first so that saved node ancestries are intact when needed. collapseDeclarationOfNameAndDescendants( p, appendPropForAlias(alias, p.getBaseName())); if (!p.inExterns && canCollapseChildNames && p.getDeclaration() != null && p.canCollapse() && p.getDeclaration().node != null && p.getDeclaration().node.getParent() != null && p.getDeclaration().node.getParent().isAssign()) { updateSimpleDeclaration( appendPropForAlias(alias, p.getBaseName()), p, p.getDeclaration()); } } } /** * Updates the initial assignment to a collapsible property at global scope * by changing it to a variable declaration (e.g. a.b = 1 -> var a$b = 1). * The property's value may either be a primitive or an object literal or * function whose properties aren't collapsible. * * @param alias The flattened property name (e.g. "a$b") * @param refName The name for the reference being updated. * @param ref An object containing information about the assignment getting * updated */ private void updateSimpleDeclaration(String alias, Name refName, Ref ref) { Node rvalue = ref.node.getNext(); Node parent = ref.node.getParent(); Node grandparent = parent.getParent(); Node greatGrandparent = grandparent.getParent(); if (rvalue != null && rvalue.isFunction()) { checkForHosedThisReferences(rvalue, refName.docInfo, refName); } // Create the new alias node. Node nameNode = NodeUtil.newName(compiler, alias, grandparent.getFirstChild(), refName.getFullName()); NodeUtil.copyNameAnnotations(ref.node.getLastChild(), nameNode); if (grandparent.isExprResult()) { // BEFORE: a.b.c = ...; // exprstmt // assign // getprop // getprop // name a // string b // string c // NODE // AFTER: var a$b$c = ...; // var // name a$b$c // NODE // Remove the r-value (NODE). parent.removeChild(rvalue); nameNode.addChildToFront(rvalue); Node varNode = IR.var(nameNode); greatGrandparent.replaceChild(grandparent, varNode); compiler.reportChangeToEnclosingScope(varNode); } else { // This must be a complex assignment. checkNotNull(ref.getTwin()); // BEFORE: // ... (x.y = 3); // // AFTER: // var x$y; // ... (x$y = 3); Node current = grandparent; Node currentParent = grandparent.getParent(); for (; !currentParent.isScript() && !currentParent.isNormalBlock(); current = currentParent, currentParent = currentParent.getParent()) {} // Create a stub variable declaration right // before the current statement. Node stubVar = IR.var(nameNode.cloneTree()) .useSourceInfoIfMissingFrom(nameNode); currentParent.addChildBefore(stubVar, current); parent.replaceChild(ref.node, nameNode); compiler.reportChangeToEnclosingScope(nameNode); } } /** * Updates the first initialization (a.k.a "declaration") of a global name. * This involves flattening the global name (if it's not just a global * variable name already), collapsing object literal keys into global * variables, declaring stub global variables for properties added later * in a local scope. * * It may seem odd that this function also takes care of declaring stubs * for direct children. The ultimate goal of this function is to eliminate * the global name entirely (when possible), so that "middlemen" namespaces * disappear, and to do that we need to make sure that all the direct children * will be collapsed as well. * * @param n An object representing a global name (e.g. "a", "a.b.c") * @param alias The flattened name for {@code n} (e.g. "a", "a$b$c") * @param canCollapseChildNames Whether it's possible to collapse children of * this name. (This is mostly passed for convenience; it's equivalent to * n.canCollapseChildNames()). */ private void updateObjLitOrFunctionDeclaration( Name n, String alias, boolean canCollapseChildNames) { Ref decl = n.getDeclaration(); if (decl == null) { // Some names do not have declarations, because they // are only defined in local scopes. return; } if (decl.getTwin() != null) { // Twin declarations will get handled when normal references // are handled. return; } switch (decl.node.getParent().getToken()) { case ASSIGN: updateObjLitOrFunctionDeclarationAtAssignNode( n, alias, canCollapseChildNames); break; case VAR: case LET: case CONST: updateObjLitOrFunctionDeclarationAtVariableNode(n, canCollapseChildNames); break; case FUNCTION: updateFunctionDeclarationAtFunctionNode(n, canCollapseChildNames); break; default: break; } } /** * Updates the first initialization (a.k.a "declaration") of a global name * that occurs at an ASSIGN node. See comment for * {@link #updateObjLitOrFunctionDeclaration}. * * @param n An object representing a global name (e.g. "a", "a.b.c") * @param alias The flattened name for {@code n} (e.g. "a", "a$b$c") */ private void updateObjLitOrFunctionDeclarationAtAssignNode( Name n, String alias, boolean canCollapseChildNames) { // NOTE: It's important that we don't add additional nodes // (e.g. a var node before the exprstmt) because the exprstmt might be // the child of an if statement that's not inside a block). // All qualified names - even for variables that are initially declared as LETS and CONSTS - // are being declared as VAR statements, but this is not incorrect because // we are only collapsing for global names. Ref ref = n.getDeclaration(); Node rvalue = ref.node.getNext(); Node varNode = new Node(Token.VAR); Node varParent = ref.node.getAncestor(3); Node grandparent = ref.node.getAncestor(2); boolean isObjLit = rvalue.isObjectLit(); boolean insertedVarNode = false; if (isObjLit && n.canEliminate()) { // Eliminate the object literal altogether. varParent.replaceChild(grandparent, varNode); ref.node = null; insertedVarNode = true; compiler.reportChangeToEnclosingScope(varNode); } else if (!n.isSimpleName()) { // Create a VAR node to declare the name. if (rvalue.isFunction()) { checkForHosedThisReferences(rvalue, n.docInfo, n); } compiler.reportChangeToEnclosingScope(rvalue); ref.node.getParent().removeChild(rvalue); Node nameNode = NodeUtil.newName(compiler, alias, ref.node.getAncestor(2), n.getFullName()); JSDocInfo info = NodeUtil.getBestJSDocInfo(ref.node.getParent()); if (ref.node.getLastChild().getBooleanProp(Node.IS_CONSTANT_NAME) || (info != null && info.isConstant())) { nameNode.putBooleanProp(Node.IS_CONSTANT_NAME, true); } if (info != null) { varNode.setJSDocInfo(info); } varNode.addChildToBack(nameNode); nameNode.addChildToFront(rvalue); varParent.replaceChild(grandparent, varNode); // Update the node ancestry stored in the reference. ref.node = nameNode; insertedVarNode = true; compiler.reportChangeToEnclosingScope(varNode); } if (canCollapseChildNames) { if (isObjLit) { declareVariablesForObjLitValues( n, alias, rvalue, varNode, varNode.getPrevious(), varParent); } addStubsForUndeclaredProperties(n, alias, varParent, varNode); } if (insertedVarNode) { if (!varNode.hasChildren()) { varParent.removeChild(varNode); } } } /** * Warns about any references to "this" in the given FUNCTION. The function * is getting collapsed, so the references will change. */ private void checkForHosedThisReferences(Node function, JSDocInfo docInfo, final Name name) { // A function is getting collapsed. Make sure that if it refers to "this", // it must be a constructor, interface, record, arrow function, or documented with @this. boolean isAllowedToReferenceThis = (docInfo != null && (docInfo.isConstructorOrInterface() || docInfo.hasThisType())) || function.isArrowFunction(); if (!isAllowedToReferenceThis) { NodeTraversal.traverseEs6(compiler, function.getLastChild(), new NodeTraversal.AbstractShallowCallback() { @Override public void visit(NodeTraversal t, Node n, Node parent) { if (n.isThis()) { compiler.report( JSError.make(n, UNSAFE_THIS, name.getFullName())); } } }); } } /** * Updates the first initialization (a.k.a "declaration") of a global name that occurs at a VAR * node. See comment for {@link #updateObjLitOrFunctionDeclaration}. * * @param n An object representing a global name (e.g. "a") */ private void updateObjLitOrFunctionDeclarationAtVariableNode( Name n, boolean canCollapseChildNames) { if (!canCollapseChildNames) { return; } Ref ref = n.getDeclaration(); String name = ref.node.getString(); Node rvalue = ref.node.getFirstChild(); Node variableNode = ref.node.getParent(); Node grandparent = variableNode.getParent(); boolean isObjLit = rvalue.isObjectLit(); if (isObjLit) { declareVariablesForObjLitValues( n, name, rvalue, variableNode, variableNode.getPrevious(), grandparent); } addStubsForUndeclaredProperties(n, name, grandparent, variableNode); if (isObjLit && n.canEliminate()) { variableNode.removeChild(ref.node); compiler.reportChangeToEnclosingScope(variableNode); if (!variableNode.hasChildren()) { grandparent.removeChild(variableNode); } // Clear out the object reference, since we've eliminated it from the // parse tree. ref.node = null; } } /** * Updates the first initialization (a.k.a "declaration") of a global name * that occurs at a FUNCTION node. See comment for * {@link #updateObjLitOrFunctionDeclaration}. * * @param n An object representing a global name (e.g. "a") */ private void updateFunctionDeclarationAtFunctionNode( Name n, boolean canCollapseChildNames) { if (!canCollapseChildNames || !n.canCollapse()) { return; } Ref ref = n.getDeclaration(); String fnName = ref.node.getString(); addStubsForUndeclaredProperties( n, fnName, ref.node.getAncestor(2), ref.node.getParent()); } /** * Declares global variables to serve as aliases for the values in an object literal, optionally * removing all of the object literal's keys and values. * * @param alias The object literal's flattened name (e.g. "a$b$c") * @param objlit The OBJLIT node * @param varNode The VAR node to which new global variables should be added as children * @param nameToAddAfter The child of {@code varNode} after which new variables should be added * (may be null) * @param varParent {@code varNode}'s parent */ private void declareVariablesForObjLitValues( Name objlitName, String alias, Node objlit, Node varNode, Node nameToAddAfter, Node varParent) { int arbitraryNameCounter = 0; boolean discardKeys = !objlitName.shouldKeepKeys(); for (Node key = objlit.getFirstChild(), nextKey; key != null; key = nextKey) { Node value = key.getFirstChild(); nextKey = key.getNext(); // A computed property, or a get or a set can not be rewritten as a VAR. if (key.isGetterDef() || key.isSetterDef() || key.isComputedProp()) { continue; } // We generate arbitrary names for keys that aren't valid JavaScript // identifiers, since those keys are never referenced. (If they were, // this object literal's child names wouldn't be collapsible.) The only // reason that we don't eliminate them entirely is the off chance that // their values are expressions that have side effects. boolean isJsIdentifier = !key.isNumber() && TokenStream.isJSIdentifier(key.getString()); String propName = isJsIdentifier ? key.getString() : String.valueOf(++arbitraryNameCounter); // If the name cannot be collapsed, skip it. String qName = objlitName.getFullName() + '.' + propName; Name p = nameMap.get(qName); if (p != null && !p.canCollapse()) { continue; } String propAlias = appendPropForAlias(alias, propName); Node refNode = null; if (discardKeys) { objlit.removeChild(key); value.detach(); // Don't report a change here because the objlit has already been removed from the tree. } else { // Substitute a reference for the value. refNode = IR.name(propAlias); if (key.getBooleanProp(Node.IS_CONSTANT_NAME)) { refNode.putBooleanProp(Node.IS_CONSTANT_NAME, true); } key.replaceChild(value, refNode); compiler.reportChangeToEnclosingScope(refNode); } // Declare the collapsed name as a variable with the original value. Node nameNode = IR.name(propAlias); nameNode.addChildToFront(value); if (key.getBooleanProp(Node.IS_CONSTANT_NAME)) { nameNode.putBooleanProp(Node.IS_CONSTANT_NAME, true); } Node newVar = IR.var(nameNode).useSourceInfoIfMissingFromForTree(key); if (nameToAddAfter != null) { varParent.addChildAfter(newVar, nameToAddAfter); } else { varParent.addChildBefore(newVar, varNode); } compiler.reportChangeToEnclosingScope(newVar); nameToAddAfter = newVar; // Update the global name's node ancestry if it hasn't already been // done. (Duplicate keys in an object literal can bring us here twice // for the same global name.) if (isJsIdentifier && p != null) { if (!discardKeys) { Ref newAlias = p.getDeclaration().cloneAndReclassify(Ref.Type.ALIASING_GET); newAlias.node = refNode; p.addRef(newAlias); } p.getDeclaration().node = nameNode; if (value.isFunction()) { checkForHosedThisReferences(value, key.getJSDocInfo(), p); } } } } /** * Adds global variable "stubs" for any properties of a global name that are only set in a local * scope or read but never set. * * @param n An object representing a global name (e.g. "a", "a.b.c") * @param alias The flattened name of the object whose properties we are adding stubs for (e.g. * "a$b$c") * @param parent The node to which new global variables should be added as children * @param addAfter The child of after which new variables should be added */ private void addStubsForUndeclaredProperties(Name n, String alias, Node parent, Node addAfter) { checkState(n.canCollapseUnannotatedChildNames(), n); checkArgument(NodeUtil.isStatementBlock(parent), parent); checkNotNull(addAfter); if (n.props == null) { return; } for (Name p : n.props) { if (p.needsToBeStubbed()) { String propAlias = appendPropForAlias(alias, p.getBaseName()); Node nameNode = IR.name(propAlias); Node newVar = IR.var(nameNode).useSourceInfoIfMissingFromForTree(addAfter); parent.addChildAfter(newVar, addAfter); addAfter = newVar; compiler.reportChangeToEnclosingScope(newVar); // Determine if this is a constant var by checking the first // reference to it. Don't check the declaration, as it might be null. if (p.getRefs().get(0).node.getLastChild().getBooleanProp( Node.IS_CONSTANT_NAME)) { nameNode.putBooleanProp(Node.IS_CONSTANT_NAME, true); compiler.reportChangeToEnclosingScope(nameNode); } } } } private String appendPropForAlias(String root, String prop) { if (prop.indexOf('$') != -1) { // Encode '$' in a property as '$0'. Because '0' cannot be the // start of an identifier, this will never conflict with our // encoding from '.' -> '$'. prop = prop.replace("$", "$0"); } String result = root + '$' + prop; int id = 1; while (nameMap.containsKey(result)) { result = root + '$' + prop + '$' + id; id++; } return result; } }
Fix string escaping in CollapseProperties warning ------------- Created by MOE: https://github.com/google/moe MOE_MIGRATED_REVID=174255979
src/com/google/javascript/jscomp/CollapseProperties.java
Fix string escaping in CollapseProperties warning
<ide><path>rc/com/google/javascript/jscomp/CollapseProperties.java <ide> <ide> static final DiagnosticType UNSAFE_THIS = DiagnosticType.warning( <ide> "JSC_UNSAFE_THIS", <del> "dangerous use of 'this' in static method {0}"); <add> "dangerous use of ''this'' in static method {0}"); <ide> <ide> private AbstractCompiler compiler; <ide>
JavaScript
mit
36cb79884f901bb0bcd151e30051261fb2672718
0
Simpreative/WebSockChat,Simpreative/WebSockChat
var ws = require("nodejs-websocket"); var microtime = require("microtime"); var util = require("util"); var server = ws.createServer(function (connection) { connection.nickname = null; connection.timerout = setTimeout(connection.timeout,10000); connection.timeout = function(){ connection.close(1011,"PING TIMEOUT"); } connection.on("text", function (str) { if(str == "PING"){ clearTimeout(connection.timerout); connection.timerout = setTimeout(connection.timeout,10000); broadcast("PONG"); return; } if (connection.nickname === null) { connection.nickname = str; broadcast("<span style='color: #CE5C00;'>" + str + " 님이 입장하셨습니다</span>"); } else { broadcast("[" + connection.nickname + "] " + str); } } ) connection.on("close", function () { broadcast("<span style='color: #C4A000;'>" + connection.nickname + " 님이 퇴장하셨습니다</span>"); }) }).listen(8000) function broadcast(str) { server.connections.forEach(function (connection) { connection.sendText(str); }) }
ws_server.js
var ws = require("nodejs-websocket"); var microtime = require("microtime"); var util = require("util"); var server = ws.createServer(function (connection) { connection.nickname = null; connection.timerout = setTimeout(connection.timeout,10000); connection.timeout = function(){ connection.close(1011,"PING TIMEOUT"); } connection.on("text", function (str) { if(str == "PING"){ clearTimeout(connection.timerout); connection.timerout = setTimeout(connection.timeout,10000); broadcast("PONG"); return; } if (connection.nickname === null) { connection.nickname = str; broadcast(str + " entered"); } else { broadcast("[" + connection.nickname + "] " + str); } } ) connection.on("close", function () { broadcast(connection.nickname + " left"); }) }).listen(8000) function broadcast(str) { server.connections.forEach(function (connection) { connection.sendText(str); }) }
Chat join/part color
ws_server.js
Chat join/part color
<ide><path>s_server.js <ide> <ide> if (connection.nickname === null) { <ide> connection.nickname = str; <del> broadcast(str + " entered"); <add> broadcast("<span style='color: #CE5C00;'>" + str + " 님이 입장하셨습니다</span>"); <ide> } else { <del> <del> <del> broadcast("[" + connection.nickname + "] " + str); <del> <add> broadcast("[" + connection.nickname + "] " + str); <ide> } <ide> } <ide> ) <ide> connection.on("close", function () { <del> broadcast(connection.nickname + " left"); <add> broadcast("<span style='color: #C4A000;'>" + connection.nickname + " 님이 퇴장하셨습니다</span>"); <ide> }) <ide> }).listen(8000) <ide>
Java
mit
c3507f4b86e860dcc2f1a7ea9d5f79263401dd15
0
rtcTo/rtc2gitcli,reinhapa/rtc2gitcli
/** * File Name: Files.java * * Copyright (c) 2015 BISON Schweiz AG, All Rights Reserved. */ package to.rtc.cli.migrate.util; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.List; /** * Reads/writes all lines of a file using the default platform line separator. * * @author Patrick Reinhart */ public class Files { /** * Reads all lines of the given <code>file</code> using the given character * set <code>cs</code> and returns them as a list without any of the line * separators. * * @param file * the file being read * @param cs * the character used for reading * @return a list of all read lines (empty lines as empty string) * @throws IOException * if the read operation fails */ public static List<String> readLines(File file, Charset cs) throws IOException { if (file.exists()) { FileInputStream in = new FileInputStream(file); try { BufferedReader br = new BufferedReader(new InputStreamReader( in, cs)); List<String> lines = new ArrayList<String>(); String line = null; while ((line = br.readLine()) != null) { lines.add(line); } return lines; } finally { in.close(); } } else { return new ArrayList<String>(); } } /** * Writes all <code>lines</code> given to the <code>file</code> using the * given character set <code>cs</code> and optionally appends them to an * existing file, if <code>append</code> is set to <code>true</code>. The * default platform line separator will be used. * * @param file * the file being written/appended * @param lines * the lines to be written without any line separators * @param cs * the character set used for writing * @param append * <code>true</code> if a existing file should be appended, * <code>false</code> otherwise * @throws IOException * if the write operation fails */ public static void writeLines(File file, List<String> lines, Charset cs, boolean append) throws IOException { FileOutputStream out = new FileOutputStream(file, append); try { PrintWriter pw = new PrintWriter(new OutputStreamWriter(out, cs)); try { for (String line : lines) { pw.append(line).println(); } } finally { pw.close(); } } finally { out.close(); } } }
rtc2git.cli.extension/src/to/rtc/cli/migrate/util/Files.java
/** * File Name: Files.java * * Copyright (c) 2015 BISON Schweiz AG, All Rights Reserved. */ package to.rtc.cli.migrate.util; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.List; /** * Reads/writes all lines of a file using the default platform line separator. * * @author Patrick Reinhart */ public class Files { /** * Reads all lines of the given <code>file</code> using the given character set <code>cs</code> and returns them as a list without any of * the line separators. * * @param file the file being read * @param cs the character used for reading * @return a list of all read lines (empty lines as empty string) * @throws IOException if the read operation fails */ public static List<String> readLines(File file, Charset cs) throws IOException { if (file.exists()) { FileInputStream in = new FileInputStream(file); try { BufferedReader br = new BufferedReader(new InputStreamReader(in, cs)); List<String> lines = new ArrayList<String>(); String line = null; while ((line = br.readLine()) != null) { lines.add(line); } return lines; } finally { in.close(); } } else { return new ArrayList<String>(); } } /** * Writes all <code>lines</code> given to the <code>file</code> using the given character set <code>cs</code> and optionally appends them * to an existing file, if <code>append</code> is set to <code>true</code>. The default platform line separator will be used. * * @param file the file being written/appended * @param lines the lines to be written without any line separators * @param cs the character set used for writing * @param append <code>true</code> if a existing file should be appended, <code>false</code> otherwise * @throws IOException if the write operation fails */ public static void writeLines(File file, List<String> lines, Charset cs, boolean append) throws IOException { FileOutputStream out = new FileOutputStream(file, append); try { PrintWriter pw = new PrintWriter(new OutputStreamWriter(out, cs)); try { for (String line : lines) { pw.append(line).println(); } } finally { pw.close(); } } finally { out.close(); } } }
applies Eclipse built-in formatting consistently
rtc2git.cli.extension/src/to/rtc/cli/migrate/util/Files.java
applies Eclipse built-in formatting consistently
<ide><path>tc2git.cli.extension/src/to/rtc/cli/migrate/util/Files.java <ide> */ <ide> public class Files { <ide> <del> /** <del> * Reads all lines of the given <code>file</code> using the given character set <code>cs</code> and returns them as a list without any of <del> * the line separators. <del> * <del> * @param file the file being read <del> * @param cs the character used for reading <del> * @return a list of all read lines (empty lines as empty string) <del> * @throws IOException if the read operation fails <del> */ <del> public static List<String> readLines(File file, Charset cs) throws IOException { <del> if (file.exists()) { <del> FileInputStream in = new FileInputStream(file); <del> try { <del> BufferedReader br = new BufferedReader(new InputStreamReader(in, cs)); <del> List<String> lines = new ArrayList<String>(); <del> String line = null; <del> while ((line = br.readLine()) != null) { <del> lines.add(line); <del> } <del> return lines; <del> } finally { <del> in.close(); <del> } <del> } else { <del> return new ArrayList<String>(); <del> } <del> } <add> /** <add> * Reads all lines of the given <code>file</code> using the given character <add> * set <code>cs</code> and returns them as a list without any of the line <add> * separators. <add> * <add> * @param file <add> * the file being read <add> * @param cs <add> * the character used for reading <add> * @return a list of all read lines (empty lines as empty string) <add> * @throws IOException <add> * if the read operation fails <add> */ <add> public static List<String> readLines(File file, Charset cs) <add> throws IOException { <add> if (file.exists()) { <add> FileInputStream in = new FileInputStream(file); <add> try { <add> BufferedReader br = new BufferedReader(new InputStreamReader( <add> in, cs)); <add> List<String> lines = new ArrayList<String>(); <add> String line = null; <add> while ((line = br.readLine()) != null) { <add> lines.add(line); <add> } <add> return lines; <add> } finally { <add> in.close(); <add> } <add> } else { <add> return new ArrayList<String>(); <add> } <add> } <ide> <del> /** <del> * Writes all <code>lines</code> given to the <code>file</code> using the given character set <code>cs</code> and optionally appends them <del> * to an existing file, if <code>append</code> is set to <code>true</code>. The default platform line separator will be used. <del> * <del> * @param file the file being written/appended <del> * @param lines the lines to be written without any line separators <del> * @param cs the character set used for writing <del> * @param append <code>true</code> if a existing file should be appended, <code>false</code> otherwise <del> * @throws IOException if the write operation fails <del> */ <del> public static void writeLines(File file, List<String> lines, Charset cs, boolean append) throws IOException { <del> FileOutputStream out = new FileOutputStream(file, append); <del> try { <del> PrintWriter pw = new PrintWriter(new OutputStreamWriter(out, cs)); <del> try { <del> for (String line : lines) { <del> pw.append(line).println(); <del> } <del> } finally { <del> pw.close(); <del> } <del> } finally { <del> out.close(); <del> } <del> } <add> /** <add> * Writes all <code>lines</code> given to the <code>file</code> using the <add> * given character set <code>cs</code> and optionally appends them to an <add> * existing file, if <code>append</code> is set to <code>true</code>. The <add> * default platform line separator will be used. <add> * <add> * @param file <add> * the file being written/appended <add> * @param lines <add> * the lines to be written without any line separators <add> * @param cs <add> * the character set used for writing <add> * @param append <add> * <code>true</code> if a existing file should be appended, <add> * <code>false</code> otherwise <add> * @throws IOException <add> * if the write operation fails <add> */ <add> public static void writeLines(File file, List<String> lines, Charset cs, <add> boolean append) throws IOException { <add> FileOutputStream out = new FileOutputStream(file, append); <add> try { <add> PrintWriter pw = new PrintWriter(new OutputStreamWriter(out, cs)); <add> try { <add> for (String line : lines) { <add> pw.append(line).println(); <add> } <add> } finally { <add> pw.close(); <add> } <add> } finally { <add> out.close(); <add> } <add> } <ide> }
Java
lgpl-2.1
7548fcc2c24eea41c4e22f9e2312dcc308a1f741
0
xwiki/xwiki-platform,xwiki/xwiki-platform,xwiki/xwiki-platform,pbondoer/xwiki-platform,xwiki/xwiki-platform,pbondoer/xwiki-platform,xwiki/xwiki-platform,pbondoer/xwiki-platform,pbondoer/xwiki-platform,pbondoer/xwiki-platform
/* * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package com.xpn.xwiki.api; import com.sun.image.codec.jpeg.JPEGCodec; import com.sun.image.codec.jpeg.JPEGImageEncoder; import com.xpn.xwiki.XWikiContext; import com.xpn.xwiki.XWikiException; import com.xpn.xwiki.user.api.XWikiUser; import com.xpn.xwiki.util.Util; import com.xpn.xwiki.plugin.query.XWikiQuery; import com.xpn.xwiki.plugin.query.XWikiCriteria; import com.xpn.xwiki.doc.XWikiDeletedDocument; import com.xpn.xwiki.doc.XWikiDocument; import com.xpn.xwiki.objects.meta.MetaClass; import com.xpn.xwiki.stats.api.XWikiStatsService; import com.xpn.xwiki.stats.impl.DocumentStats; import com.xpn.xwiki.web.Utils; import com.xpn.xwiki.web.XWikiEngineContext; import org.suigeneris.jrcs.diff.delta.Chunk; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import java.awt.image.BufferedImage; import java.io.IOException; import java.io.OutputStream; import java.lang.Object; import java.util.*; public class XWiki extends Api { protected static final Log LOG = LogFactory.getLog(XWiki.class); private com.xpn.xwiki.XWiki xwiki; /** * @see #getStatsService() */ private StatsService statsService; /** * XWiki API Constructor * * @param xwiki XWiki Main Object to wrap * @param context XWikiContext to wrap */ public XWiki(com.xpn.xwiki.XWiki xwiki, XWikiContext context) { super(context); this.xwiki = xwiki; this.statsService = new StatsService(context); } /** * Priviledge API allowing to access the underlying main XWiki Object * * @return Priviledged Main XWiki Object */ public com.xpn.xwiki.XWiki getXWiki() { if (hasProgrammingRights()) return xwiki; return null; } /** * @return XWiki's version in the format <code>(version).(SVN build number)</code>, or * "Unknown version" if it failed to be retrieved */ public String getVersion() { return xwiki.getVersion(); } /** * API Allowing to access the current request URL being requested * * @return URL * @throws XWikiException */ public String getRequestURL() throws XWikiException { return getXWikiContext().getURLFactory().getRequestURL(getXWikiContext()).toString(); } /** * Loads an Document from the database. Rights are checked before sending back the document. * * @param fullname Fullname of the XWiki document to be loaded * @return a Document object or null if it is not accessible * @throws XWikiException */ public Document getDocument(String fullname) throws XWikiException { XWikiDocument doc = xwiki.getDocument(fullname, getXWikiContext()); if (xwiki.getRightService().hasAccessLevel("view", getXWikiContext().getUser(), doc.getFullName(), getXWikiContext()) == false) { return null; } Document newdoc = doc.newDocument(getXWikiContext()); return newdoc; } /** * @return all deleted documents in recycle bin * @param fullname - {@link XWikiDocument#getFullName()} * @param lang - {@link XWikiDocument#getLanguage()} * @throws XWikiException if any error */ public List getDeletedDocuments(String fullname, String lang) throws XWikiException { XWikiDeletedDocument[] dds = xwiki.getDeletedDocuments(fullname, lang, context); if (dds == null || dds.length == 0) { return Collections.EMPTY_LIST; } List result = new ArrayList(dds.length); for (int i = 0; i < dds.length; i++) { result.add(new DeletedDocument(dds[i], context)); } return result; } /** * @return specified documents in recycle bin * @param fullname - {@link XWikiDocument#getFullName()} * @param lang - {@link XWikiDocument#getLanguage()} * @throws XWikiException if any error */ public DeletedDocument getDeletedDocument(String fullname, String lang, String index) throws XWikiException { XWikiDeletedDocument dd = xwiki.getDeletedDocument(fullname, lang, Integer.parseInt(index), context); if (dd == null) { return null; } return new DeletedDocument(dd, context); } /** * Returns wether a document exists or not * * @param fullname Fullname of the XWiki document to be loaded * @return true if the document exists, false if not * @throws XWikiException */ public boolean exists(String fullname) throws XWikiException { return xwiki.exists(fullname, getXWikiContext()); } /** * Verify the rights the current user has on a document. If the document requires rights and the * user is not authenticated he will be redirected to the login page. * * @param docname fullname of the document * @param right right to check ("view", "edit", "admin", "delete") * @return true if it exists */ public boolean checkAccess(String docname, String right) { try { XWikiDocument doc = getXWikiContext().getWiki().getDocument(docname, context); return getXWikiContext().getWiki().checkAccess(right, doc, getXWikiContext()); } catch (XWikiException e) { return false; } } /** * Loads an Document from the database. Rights are checked before sending back the document. * * @param web Space to use in case no space is defined in the fullname * @param fullname Fullname or relative name of the document to load * @return a Document object or null if it is not accessible * @throws XWikiException */ public Document getDocument(String web, String fullname) throws XWikiException { XWikiDocument doc = xwiki.getDocument(web, fullname, getXWikiContext()); if (xwiki.getRightService().hasAccessLevel("view", getXWikiContext().getUser(), doc.getFullName(), getXWikiContext()) == false) { return null; } Document newdoc = doc.newDocument(getXWikiContext()); return newdoc; } /** * Load a specific revision of a document * * @param doc Document for which to load a specific revision * @param rev Revision number * @return Specific revision of a document * @throws XWikiException */ public Document getDocument(Document doc, String rev) throws XWikiException { if ((doc == null) || (doc.getDoc() == null)) return null; if (xwiki.getRightService().hasAccessLevel("view", getXWikiContext().getUser(), doc.getFullName(), getXWikiContext()) == false) { // Finally we return null, otherwise showing search result is a real pain return null; } try { XWikiDocument revdoc = xwiki.getDocument(doc.getDoc(), rev, getXWikiContext()); Document newdoc = revdoc.newDocument(getXWikiContext()); return newdoc; } catch (Exception e) { // Can't read versioned document e.printStackTrace(); return null; } } /** * Transform a text in a form compatible text * * @param content text to transform * @return encoded result */ public String getFormEncoded(String content) { return com.xpn.xwiki.XWiki.getFormEncoded(content); } /** * Transform a text in a URL compatible text * * @param content text to transform * @return encoded result */ public String getURLEncoded(String content) { return xwiki.getURLEncoded(content); } /** * Transform a text in a XML compatible text * * @param content text to transform * @return encoded result */ public String getXMLEncoded(String content) { return com.xpn.xwiki.XWiki.getXMLEncoded(content); } /** * API to protect Text from Wiki transformation * @param text * @return escaped text */ public String escapeText(String text) { return Util.escapeText(text); } /** * API to protect URLs from Wiki transformation * @param url * @return encoded URL */ public String escapeURL(String url) { return Util.escapeURL(url); } /** * Output content in the edit content textarea * * @param content content to output * @return the textarea text content */ public String getTextArea(String content) { return com.xpn.xwiki.XWiki.getTextArea(content, getXWikiContext()); } /** * Output content in the edit content htmlarea * * @param content content to output * @return the htmlarea text content */ public String getHTMLArea(String content) { return xwiki.getHTMLArea(content, getXWikiContext()); } /** * Get the list of available classes in the wiki * * @return list of classes names * @throws XWikiException */ public List getClassList() throws XWikiException { return xwiki.getClassList(getXWikiContext()); } /** * Get the global MetaClass object * * @return MetaClass object */ public MetaClass getMetaclass() { return xwiki.getMetaclass(); } /** * Priviledged API allowing to run a search on the database returning a list of data This search * is send to the store engine (Hibernate HQL, JCR XPATH or other) * * @param wheresql Query to be run (HQL, XPath) * @return A list of rows (Object[]) * @throws XWikiException */ public List search(String wheresql) throws XWikiException { if (hasProgrammingRights()) return xwiki.search(wheresql, getXWikiContext()); return Collections.EMPTY_LIST; } /** * Priviledged API allowing to run a search on the database returning a list of data This search * is send to the store engine (Hibernate HQL, JCR XPATH or other) * * @param wheresql Query to be run (HQL, XPath) * @param nb return only 'nb' rows * @param start skip the 'start' first elements * @return A list of rows (Object[]) * @throws XWikiException */ public List search(String wheresql, int nb, int start) throws XWikiException { if (hasProgrammingRights()) return xwiki.search(wheresql, nb, start, getXWikiContext()); return Collections.EMPTY_LIST; } /** * API allowing to search for document names matching a query. * Examples: * <ul> * <li>Query: <code>where doc.web='Main' order by doc.creationDate desc</code>. * Result: All the documents in space 'Main' ordered by the creation date from the most * recent</li> * <li>Query: <code>where doc.name like '%sport%' order by doc.name asc</code>. * Result: All the documents containing 'sport' in their name ordered by document * name</li> * <li>Query: <code>where doc.content like '%sport%' order by doc.author</code> * Result: All the documents containing 'sport' in their content ordered by the * author</li> * <li>Query: <code>where doc.creator = 'XWiki.LudovicDubost' order by doc.creationDate * desc</code>. * Result: All the documents with creator LudovicDubost ordered by the creation date * from the most recent</li> * <li>Query: <code>where doc.author = 'XWiki.LudovicDubost' order by doc.date desc</code>. * Result: All the documents with last author LudovicDubost ordered by the last * modification date from the most recent.</li> * <li>Query: <code>,BaseObject as obj where doc.fullName=obj.name and * obj.className='XWiki.XWikiComments' order by doc.date desc</code>. * Result: All the documents with at least one comment ordered by the last modification * date from the most recent</li> * <li>Query: <code>,BaseObject as obj, StringProperty as prop where * doc.fullName=obj.name and obj.className='XWiki.XWikiComments' and obj.id=prop.id.id * and prop.id.name='author' and prop.value='XWiki.LudovicDubost' order by doc.date * desc</code>. * Result: All the documents with at least one comment from LudovicDubost ordered by the * last modification date from the most recent</li> * </ul> * * @param wheresql Query to be run (either starting with ", BaseObject as obj where.." or by * "where ..." * @return List of document names matching (Main.Page1, Main.Page2) * @throws XWikiException */ public List searchDocuments(String wheresql) throws XWikiException { return xwiki.getStore().searchDocumentsNames(wheresql, getXWikiContext()); } /** * API allowing to search for document names matching a query return only a limited number of * elements and skipping the first rows. The query part is the same as searchDocuments * * @param wheresql query to use similar to searchDocuments(wheresql) * @param nb return only 'nb' rows * @param start skip the first 'start' rows * @return List of document names matching * @throws XWikiException * @see List searchDocuments(String where sql) */ public List searchDocuments(String wheresql, int nb, int start) throws XWikiException { return xwiki.getStore().searchDocumentsNames(wheresql, nb, start, getXWikiContext()); } /** * Priviledged API allowing to search for document names matching a query return only a limited * number of elements and skipping the first rows. The return values contain the list of columns * spciefied in addition to the document space and name The query part is the same as * searchDocuments * * @param wheresql query to use similar to searchDocuments(wheresql) * @param nb return only 'nb' rows * @param start skip the first 'start' rows * @param selectColumns List of columns to add to the result * @return List of Object[] with the column values of the matching rows * @throws XWikiException */ public List searchDocuments(String wheresql, int nb, int start, String selectColumns) throws XWikiException { if (hasProgrammingRights()) return xwiki.getStore().searchDocumentsNames(wheresql, nb, start, selectColumns, getXWikiContext()); return Collections.EMPTY_LIST; } /** * API allowing to search for documents allowing to have mutliple entries per language * * @param wheresql query to use similar to searchDocuments(wheresql) * @param distinctbylanguage true to return multiple rows per language * @return List of Document object matching * @throws XWikiException */ public List searchDocuments(String wheresql, boolean distinctbylanguage) throws XWikiException { return wrapDocs(xwiki.getStore().searchDocuments(wheresql, distinctbylanguage, getXWikiContext())); } /** * API allowing to search for documents allowing to have mutliple entries per language * * @param wheresql query to use similar to searchDocuments(wheresql) * @param distinctbylanguage true to return multiple rows per language * @return List of Document object matching * @param nb return only 'nb' rows * @param start skip the first 'start' rows * @throws XWikiException */ public List searchDocuments(String wheresql, boolean distinctbylanguage, int nb, int start) throws XWikiException { return wrapDocs(xwiki.getStore().searchDocuments(wheresql, distinctbylanguage, nb, start, getXWikiContext())); } /** * Search documents by passing HQL where clause values as parameters. This allows generating * a Named HQL query which will automatically encode the passed values (like escaping single * quotes). This API is recommended to be used over the other similar methods where the values * are passed inside the where clause and for which you'll need to do the encoding/escpaing * yourself before calling them. * * <p>Example</p> * <pre><code> * #set($orphans = $xwiki.searchDocuments(" where doc.fullName <> ? and (doc.parent = ? or " * + "(doc.parent = ? and doc.web = ?))", * ["${doc.fullName}as", ${doc.fullName}, ${doc.name}, ${doc.web}])) * </code></pre> * * @param parametrizedSqlClause the HQL where clause. For example <code>" where doc.fullName * <> ? and (doc.parent = ? or (doc.parent = ? and doc.web = ?))"</code> * @param nb the number of rows to return. If 0 then all rows are returned * @param start the number of rows to skip. If 0 don't skip any row * @param parameterValues the where clause values that replace the question marks (?) * @return a list of document names * @throws XWikiException in case of error while performing the query */ public List searchDocuments(String parametrizedSqlClause, int nb, int start, List parameterValues) throws XWikiException { return xwiki.getStore().searchDocumentsNames(parametrizedSqlClause, nb, start, parameterValues, getXWikiContext()); } /** * Same as {@link #searchDocuments(String, int, int, java.util.List)} but returns all rows. * * @see #searchDocuments(String, int, int, java.util.List) */ public List searchDocuments(String parametrizedSqlClause, List parameterValues) throws XWikiException { return xwiki.getStore().searchDocumentsNames(parametrizedSqlClause, parameterValues, getXWikiContext()); } /** * Function to wrap a list of XWikiDocument into Document objects * * @param docs list of XWikiDocument * @return list of Document objects */ public List wrapDocs(List docs) { List result = new ArrayList(); if (docs != null) { for (Iterator iter = docs.iterator(); iter.hasNext();) { Object obj = iter.next(); try { if (obj instanceof XWikiDocument) { XWikiDocument doc = (XWikiDocument) obj; Document wrappedDoc = doc.newDocument(getXWikiContext()); result.add(wrappedDoc); } else if (obj instanceof Document) { result.add(obj); } else if (obj instanceof String) { Document doc = getDocument(obj.toString()); if(doc != null) { result.add(doc); } } } catch (XWikiException ex) { } } } return result; } /** * API allowing to parse a text content to evaluate velocity scripts * * @param content * @return evaluated content if the content contains velocity scripts */ public String parseContent(String content) { return xwiki.parseContent(content, getXWikiContext()); } /** * API to parse the message being stored in the Context A message can be an error message or an * information message either as text or as a message ID pointing to ApplicationResources The * message is also parse for velocity scripts * * @return Final message */ public String parseMessage() { return xwiki.parseMessage(getXWikiContext()); } /** * API to parse a message A message can be an error message or an information message either as * text or as a message ID pointing to ApplicationResources The message is also parse for * velocity scripts * * @return Final message * @param id * @return the result of the parsed message */ public String parseMessage(String id) { return xwiki.parseMessage(id, getXWikiContext()); } /** * API to get a message A message can be an error message or an information message either as * text or as a message ID pointing to ApplicationResources The message is also parsed for * velocity scripts * * @return Final message * @param id * @return the result of the parsed message */ public String getMessage(String id) { return xwiki.getMessage(id, getXWikiContext()); } /** * API to parse a velocity template provided by the current Skin The template is first looked in * the skin active for the user, the space or the wiki. If the template does not exist in that * skin, the template is looked up in the "parent skin" of the skin * * @param template Template name ("view", "edit", "comment") * @return Evaluated content from the template */ public String parseTemplate(String template) { return xwiki.parseTemplate(template, getXWikiContext()); } /** * API to render a velocity template provided by the current Skin The template is first looked * in the skin active for the user, the space or the wiki. If the template does not exist in * that skin, the template is looked up in the "parent skin" of the skin * * @param template Template name ("view", "edit", "comment") * @return Evaluated content from the template */ public String renderTemplate(String template) { return xwiki.renderTemplate(template, getXWikiContext()); } /** * Designed to include dynamic content, such as Servlets or JSPs, inside Velocity templates; * works by creating a RequestDispatcher, buffering the output, then returning it as a string. * * @param url URL of the servlet * @return text result of the servlet */ public String invokeServletAndReturnAsString(String url) { return xwiki.invokeServletAndReturnAsString(url, getXWikiContext()); } /** * Return the URL of the static file provided by the current skin The file is first looked in * the skin active for the user, the space or the wiki. If the file does not exist in that skin, * the file is looked up in the "parent skin" of the skin. The file can be a CSS file, an image * file, a javascript file, etc. * * @param filename Filename to be looked up in the skin (logo.gif, style.css) * @return URL to access this file */ public String getSkinFile(String filename) { return xwiki.getSkinFile(filename, getXWikiContext()); } /** * Return the URL of the static file provided by the current skin The file is first looked in * the skin active for the user, the space or the wiki. If the file does not exist in that skin, * the file is looked up in the "parent skin" of the skin. The file can be a CSS file, an image * file, a javascript file, etc. * * @param filename Filename to be looked up in the skin (logo.gif, style.css) * @param forceSkinAction true to make sure that static files are retrieved through the skin * action, to allow parsing of velocity on CSS files * @return URL to access this file */ public String getSkinFile(String filename, boolean forceSkinAction) { return xwiki.getSkinFile(filename, forceSkinAction, getXWikiContext()); } /** * API to retrieve the current skin for this request and user The skin is first derived from the * request "skin" parameter If this parameter does not exist, the user preference "skin" is * looked up If this parameter does not exist or is empty, the space preference "skin" is looked * up If this parameter does not exist or is empty, the XWiki preference "skin" is looked up If * this parameter does not exist or is empty, the xwiki.cfg parameter xwiki.defaultskin is * looked up If this parameter does not exist or is empty, the xwiki.cfg parameter * xwiki.defaultbaseskin is looked up If this parameter does not exist or is empty, the skin is * "albatross" * * @return The current skin for this request and user */ public String getSkin() { return xwiki.getSkin(getXWikiContext()); } /** * API to retrieve the current skin for this request and user. Each skin has a skin it is based * on. If not the base skin is the xwiki.cfg parameter "xwiki.defaultbaseskin". If this * parameter does not exist or is empty, the base skin is "albatross". * * @return The current baseskin for this request and user */ public String getBaseSkin() { return xwiki.getBaseSkin(getXWikiContext()); } /** * API to access the copyright for this space. The copyright is read in the space preferences. * If it does not exist or is empty it is read from the XWiki preferences. * * @return the text for the copyright */ public String getWebCopyright() { return xwiki.getWebCopyright(getXWikiContext()); } /** * API to access an XWiki Preference There can be one preference object per language This * function will find the right preference object associated to the current active language * * @param prefname Preference name * @return The preference for this wiki and the current language */ public String getXWikiPreference(String prefname) { return xwiki.getXWikiPreference(prefname, getXWikiContext()); } /** * API to access an XWiki Preference There can be one preference object per language This * function will find the right preference object associated to the current active language * * @param prefname Preference name * @param default_value default value to return if the prefenrece does not exist or is empty * @return The preference for this wiki and the current language */ public String getXWikiPreference(String prefname, String default_value) { return xwiki.getXWikiPreference(prefname, default_value, getXWikiContext()); } /** * API to access an Space Preference There can be one preference object per language This * function will find the right preference object associated to the current active language If * no preference is found it will look in the XWiki Preferences * * @param prefname Preference name * @return The preference for this wiki and the current language */ public String getWebPreference(String prefname) { return xwiki.getWebPreference(prefname, getXWikiContext()); } /** * API to access an Space Preference There can be one preference object per language This * function will find the right preference object associated to the current active language If * no preference is found it will look in the XWiki Preferences * * @param prefname Preference name * @param space The space for which this preference is requested * @return The preference for this wiki and the current language */ public String getWebPreferenceFor(String prefname, String space) { return xwiki.getWebPreference(prefname, space, "", getXWikiContext()); } /** * API to access an Space Preference There can be one preference object per language This * function will find the right preference object associated to the current active language If * no preference is found it will look in the XWiki Preferences * * @param prefname Preference name * @param default_value default value to return if the preference does not exist or is empty * @return The preference for this wiki and the current language */ public String getWebPreference(String prefname, String default_value) { return xwiki.getWebPreference(prefname, default_value, getXWikiContext()); } /** * API to access a Skin Preference The skin object is the current user's skin * * @param prefname Preference name * @return The preference for the current skin */ public String getSkinPreference(String prefname) { return xwiki.getSkinPreference(prefname, getXWikiContext()); } /** * API to access a Skin Preference The skin object is the current user's skin * * @param prefname Preference name * @param default_value default value to return if the preference does not exist or is empty * @return The preference for the current skin */ public String getSkinPreference(String prefname, String default_value) { return xwiki.getSkinPreference(prefname, default_value, getXWikiContext()); } /** * API to access an XWiki Preference as a long number There can be one preference object per * language This function will find the right preference object associated to the current active * language * * @param prefname Preference name * @param space The space for which this preference is requested * @param default_value default value to return if the prefenrece does not exist or is empty * @return The preference for this wiki and the current language in long format */ public String getWebPreferenceFor(String prefname, String space, String default_value) { return xwiki.getWebPreference(prefname, space, default_value, getXWikiContext()); } /** * API to access an XWiki Preference as a long number There can be one preference object per * language This function will find the right preference object associated to the current active * language * * @param prefname Preference name * @param default_value default value to return if the prefenrece does not exist or is empty * @return The preference for this wiki and the current language in long format */ public long getXWikiPreferenceAsLong(String prefname, long default_value) { return xwiki.getXWikiPreferenceAsLong(prefname, default_value, getXWikiContext()); } /** * API to access an XWiki Preference as a long number There can be one preference object per * language This function will find the right preference object associated to the current active * language * * @param prefname Preference name * @return The preference for this wiki and the current language in long format */ public long getXWikiPreferenceAsLong(String prefname) { return xwiki.getXWikiPreferenceAsLong(prefname, getXWikiContext()); } /** * API to access an Web Preference as a long number There can be one preference object per * language This function will find the right preference object associated to the current active * language If no preference is found it will look for the XWiki Preference * * @param prefname Preference name * @param default_value default value to return if the prefenrece does not exist or is empty * @return The preference for this wiki and the current language in long format */ public long getWebPreferenceAsLong(String prefname, long default_value) { return xwiki.getWebPreferenceAsLong(prefname, default_value, getXWikiContext()); } /** * API to access an Web Preference as a long number There can be one preference object per * language This function will find the right preference object associated to the current active * language If no preference is found it will look for the XWiki Preference * * @param prefname Preference name * @return The preference for this wiki and the current language in long format */ public long getWebPreferenceAsLong(String prefname) { return xwiki.getWebPreferenceAsLong(prefname, getXWikiContext()); } /** * API to access an XWiki Preference as an int number There can be one preference object per * language This function will find the right preference object associated to the current active * language * * @param prefname Preference name * @param default_value default value to return if the prefenrece does not exist or is empty * @return The preference for this wiki and the current language in int format */ public int getXWikiPreferenceAsInt(String prefname, int default_value) { return xwiki.getXWikiPreferenceAsInt(prefname, default_value, getXWikiContext()); } /** * API to access an XWiki Preference as a int number There can be one preference object per * language This function will find the right preference object associated to the current active * language * * @param prefname Preference name * @return The preference for this wiki and the current language in int format */ public int getXWikiPreferenceAsInt(String prefname) { return xwiki.getXWikiPreferenceAsInt(prefname, getXWikiContext()); } /** * API to access an Web Preference as a int number There can be one preference object per * language This function will find the right preference object associated to the current active * language If no preference is found it will look for the XWiki Preference * * @param prefname Preference name * @param default_value default value to return if the prefenrece does not exist or is empty * @return The preference for this wiki and the current language in int format */ public int getWebPreferenceAsInt(String prefname, int default_value) { return xwiki.getWebPreferenceAsInt(prefname, default_value, getXWikiContext()); } /** * API to access an Web Preference as a int number There can be one preference object per * language This function will find the right preference object associated to the current active * language If no preference is found it will look for the XWiki Preference * * @param prefname Preference name * @return The preference for this wiki and the current language in int format */ public int getWebPreferenceAsInt(String prefname) { return xwiki.getWebPreferenceAsInt(prefname, getXWikiContext()); } /** * API to access a User Preference This function will look in the User profile for the * preference If no preference is found it will look in the Space Preferences If no preference * is found it will look in the XWiki Preferences * * @param prefname Preference name * @return The preference for this wiki and the current language */ public String getUserPreference(String prefname) { return xwiki.getUserPreference(prefname, getXWikiContext()); } /** * API to access a User Preference from cookie This function will look in the session cookie for * the preference * * @param prefname Preference name * @return The preference for this wiki and the current language */ public String getUserPreferenceFromCookie(String prefname) { return xwiki.getUserPreferenceFromCookie(prefname, getXWikiContext()); } /** * First try to find the current language in use from the XWiki context. If none is used * and if the wiki is not multilingual use the default language defined in the XWiki * preferences. If the wiki is multilingual try to get the language passed in the request. * If none was passed try to get it from a cookie. If no language cookie exists then use the * user default language and barring that use the browser's "Accept-Language" header sent in * HTTP request. If none is defined use the default language. * * @return the language to use */ public String getLanguagePreference() { return xwiki.getLanguagePreference(getXWikiContext()); } /** * @deprecated use {@link #getLanguagePreference()} instead */ public String getDocLanguagePreference() { return xwiki.getDocLanguagePreference(getXWikiContext()); } /** * API to access the interface language preference for the request Order of evaluation is: * Language of the wiki in mono-lingual mode language request paramater language in context * language user preference language in cookie language accepted by the navigator * * @return the document language preference for the request */ public String getInterfaceLanguagePreference() { return xwiki.getInterfaceLanguagePreference(getXWikiContext()); } /** * API to check if wiki is in multi-wiki mode (virtual) * * @return true for multi-wiki/false for mono-wiki */ public boolean isVirtual() { return xwiki.isVirtual(); } /** * API to check is wiki is multi-lingual * * @return true for multi-lingual/false for mono-lingual */ public boolean isMultiLingual() { return xwiki.isMultiLingual(getXWikiContext()); } /** * Priviledged API to flush the cache of the Wiki installation This flushed the cache of all * wikis, all plugins, all renderers */ public void flushCache() { if (hasProgrammingRights()) xwiki.flushCache(getXWikiContext()); } /** * Priviledged API to reset the rendenring engine This would restore the rendering engine * evaluation loop and take into account new configuration parameters */ public void resetRenderingEngine() { if (hasProgrammingRights()) try { xwiki.resetRenderingEngine(getXWikiContext()); } catch (XWikiException e) { } } /** * Priviledged API to create a new user from the request This API is used by RegisterNewUser * wiki page * * @return true for success/false for failure * @throws XWikiException */ public int createUser() throws XWikiException { return createUser(false, "edit"); } /** * Priviledged API to create a new user from the request This API is used by RegisterNewUser * wiki page This version sends a validation email to the user Configuration of validation email * is in the XWiki Preferences * * @param withValidation true to send the validationemail * @return true for success/false for failure * @throws XWikiException */ public int createUser(boolean withValidation) throws XWikiException { return createUser(withValidation, "edit"); } /** * Priviledged API to create a new user from the request This API is used by RegisterNewUser * wiki page This version sends a validation email to the user Configuration of validation email * is in the XWiki Preferences * * @param withValidation true to send the validation email * @param userRights Rights to set for the user for it's own page(defaults to "edit") * @return true for success/false for failure * @throws XWikiException */ public int createUser(boolean withValidation, String userRights) throws XWikiException { boolean registerRight; try { // So, what's the register right for? This says that if the creator of the page // (Admin) has programming rights, anybody can register. Is this OK? if (hasProgrammingRights()) { registerRight = true; } else { registerRight = xwiki.getRightService().hasAccessLevel("register", getXWikiContext().getUser(), "XWiki.XWikiPreferences", getXWikiContext()); } if (registerRight) return xwiki.createUser(withValidation, userRights, getXWikiContext()); return -1; } catch (Exception e) { e.printStackTrace(); return -2; } } /** * Priviledged API to create a new Wiki from an existing wiki This creates the database, copies * to documents from a existing wiki Assigns the admin rights, creates the Wiki identification * page in the main wiki * * @param wikiName Wiki Name to create * @param wikiUrl Wiki URL to accept requests from * @param wikiAdmin Wiki admin user * @param baseWikiName Wiki to copy documents from * @param failOnExist true to fail if the wiki already exists, false to overwrite * @return Success of Failure code (0 for success, -1 for missing programming rights, > 0 for * other errors * @throws XWikiException */ public int createNewWiki(String wikiName, String wikiUrl, String wikiAdmin, String baseWikiName, boolean failOnExist) throws XWikiException { return createNewWiki(wikiName, wikiUrl, wikiAdmin, baseWikiName, "", null, failOnExist); } /** * Priviledged API to create a new Wiki from an existing wiki This creates the database, copies * to documents from a existing wiki Assigns the admin rights, creates the Wiki identification * page in the main wiki * * @param wikiName Wiki Name to create * @param wikiUrl Wiki URL to accept requests from * @param wikiAdmin Wiki admin user * @param baseWikiName Wiki to copy documents from * @param description Description of the Wiki * @param failOnExist true to fail if the wiki already exists, false to overwrite * @return Success of Failure code (0 for success, -1 for missing programming rights, > 0 for * other errors * @throws XWikiException */ public int createNewWiki(String wikiName, String wikiUrl, String wikiAdmin, String baseWikiName, String description, boolean failOnExist) throws XWikiException { return createNewWiki(wikiName, wikiUrl, wikiAdmin, baseWikiName, description, null, failOnExist); } /** * Priviledged API to create a new Wiki from an existing wiki This creates the database, copies * to documents from a existing wiki Assigns the admin rights, creates the Wiki identification * page in the main wiki Copy is limited to documents of a specified language. If a document for * the language is not found, the default language document is used * * @param wikiName Wiki Name to create * @param wikiUrl Wiki URL to accept requests from * @param wikiAdmin Wiki admin user * @param baseWikiName Wiki to copy documents from * @param description Description of the Wiki * @param language Language to copy * @param failOnExist true to fail if the wiki already exists, false to overwrite * @return Success of Failure code (0 for success, -1 for missing programming rights, > 0 for * other errors * @throws XWikiException */ public int createNewWiki(String wikiName, String wikiUrl, String wikiAdmin, String baseWikiName, String description, String language, boolean failOnExist) throws XWikiException { if (hasProgrammingRights()) return xwiki.createNewWiki(wikiName, wikiUrl, wikiAdmin, baseWikiName, description, language, failOnExist, getXWikiContext()); return -1; } /** * Priviledged API to validate the return code given by a user in response to an email * validation email The validation information are taken from the request object * * @param withConfirmEmail true to send a account confirmation email/false to not send it * @return Success of Failure code (0 for success, -1 for missing programming rights, > 0 for * other errors * @throws XWikiException */ public int validateUser(boolean withConfirmEmail) throws XWikiException { return xwiki.validateUser(withConfirmEmail, getXWikiContext()); } /** * Priviledged API to add a user to the XWiki.XWikiAllGroup * * @param fullwikiname user name to add * @throws XWikiException */ public void addToAllGroup(String fullwikiname) throws XWikiException { if (hasProgrammingRights()) xwiki.setUserDefaultGroup(fullwikiname, getXWikiContext()); } /** * Priviledged API to send a confirmation email to a user * * @param xwikiname user to send the email to * @param password password to put in the mail * @param email email to send to * @param add_message Additional message to send to the user * @param contentfield Preference field to use as a mail template * @throws XWikiException if the mail was not send successfully */ public void sendConfirmationMail(String xwikiname, String password, String email, String add_message, String contentfield) throws XWikiException { if (hasProgrammingRights()) xwiki.sendConfirmationEmail(xwikiname, password, email, add_message, contentfield, getXWikiContext()); } /** * Priviledged API to send a confirmation email to a user * * @param xwikiname user to send the email to * @param password password to put in the mail * @param email email to send to * @param contentfield Preference field to use as a mail template * @throws XWikiException if the mail was not send successfully */ public void sendConfirmationMail(String xwikiname, String password, String email, String contentfield) throws XWikiException { if (hasProgrammingRights()) xwiki.sendConfirmationEmail(xwikiname, password, email, "", contentfield, getXWikiContext()); } /** * Priviledged API to send a message to an email address * * @param sender email of the sender of the message * @param recipient email of the recipient of the message * @param message Message to send * @throws XWikiException if the mail was not send successfully */ public void sendMessage(String sender, String recipient, String message) throws XWikiException { if (hasProgrammingRights()) xwiki.sendMessage(sender, recipient, message, getXWikiContext()); } /** * Priviledged API to send a message to an email address * * @param sender email of the sender of the message * @param recipient emails of the recipients of the message * @param message Message to send * @throws XWikiException if the mail was not send successfully */ public void sendMessage(String sender, String[] recipient, String message) throws XWikiException { if (hasProgrammingRights()) xwiki.sendMessage(sender, recipient, message, getXWikiContext()); } /** * Priviledged API to copy a document to another document in the same wiki * * @param docname source document * @param targetdocname target document * @return true if the copy was sucessfull * @throws XWikiException if the document was not copied properly */ public boolean copyDocument(String docname, String targetdocname) throws XWikiException { return this.copyDocument(docname, targetdocname, null, null, null, false, false); } /** * Priviledged API to copy a translation of a document to another document in the same wiki * * @param docname source document * @param targetdocname target document * @param wikilanguage language to copy * @return true if the copy was sucessfull * @throws XWikiException if the document was not copied properly */ public boolean copyDocument(String docname, String targetdocname, String wikilanguage) throws XWikiException { return this.copyDocument(docname, targetdocname, null, null, wikilanguage, false, false); } /** * Priviledged API to copy a translation of a document to another document of the same name in * another wiki * * @param docname source document * @param sourceWiki source wiki * @param targetWiki target wiki * @param wikilanguage language to copy * @return true if the copy was sucessfull * @throws XWikiException if the document was not copied properly */ public boolean copyDocument(String docname, String sourceWiki, String targetWiki, String wikilanguage) throws XWikiException { return this.copyDocument(docname, docname, sourceWiki, targetWiki, wikilanguage, true, false); } /** * Priviledged API to copy a translation of a document to another document of the same name in * another wiki additionally resetting the version * * @param docname source document * @param sourceWiki source wiki * @param targetWiki target wiki * @param wikilanguage language to copy * @param reset true to reset versions * @return true if the copy was sucessfull * @throws XWikiException if the document was not copied properly */ public boolean copyDocument(String docname, String targetdocname, String sourceWiki, String targetWiki, String wikilanguage, boolean reset) throws XWikiException { return this.copyDocument(docname, targetdocname, sourceWiki, targetWiki, wikilanguage, reset, false); } /** * Priviledged API to copy a translation of a document to another document of the same name in * another wiki additionally resetting the version and overwriting the previous document * * @param docname source document * @param sourceWiki source wiki * @param targetWiki target wiki * @param wikilanguage language to copy * @param reset true to reset versions * @param force true to overwrite the previous document * @return true if the copy was sucessfull * @throws XWikiException if the document was not copied properly */ public boolean copyDocument(String docname, String targetdocname, String sourceWiki, String targetWiki, String wikilanguage, boolean reset, boolean force) throws XWikiException { if (hasProgrammingRights()) { return xwiki.copyDocument(docname, targetdocname, sourceWiki, targetWiki, wikilanguage, reset, force, true, getXWikiContext()); } return false; } /** * Priviledged API to copy a space to another wiki, optionally deleting all document of the * target space * * @param web source Space * @param sourceWiki source Wiki * @param targetWiki target Wiki * @param wikiLanguage language to copy * @param clean true to delete all document of the target space * @return number of copied documents * @throws XWikiException if the space was not copied properly */ public int copyWikiWeb(String web, String sourceWiki, String targetWiki, String wikiLanguage, boolean clean) throws XWikiException { if (hasProgrammingRights()) return xwiki.copyWikiWeb(web, sourceWiki, targetWiki, wikiLanguage, clean, getXWikiContext()); return -1; } /** * API to include a topic into another The topic is rendered fully in the context of itself * * @param topic page name of the topic to include * @return the content of the included page * @throws XWikiException if the include failed */ public String includeTopic(String topic) throws XWikiException { return includeTopic(topic, true); } /** * API to execute a form in the context of an including topic The rendering is evaluated in the * context of the including topic All velocity variables are the one of the including topic This * api is usually called using #includeForm in a page, which modifies the behavior of "Edit this * page" button to direct for Form mode (inline) * * @param topic page name of the form to execute * @return the content of the included page * @throws XWikiException if the include failed */ public String includeForm(String topic) throws XWikiException { return includeForm(topic, true); } /** * API to include a topic into another, optionnaly surrounding the content with {pre}{/pre} to * avoid future wiki rendering The topic is rendered fully in the context of itself * * @param topic page name of the topic to include * @param pre true to add {pre} {/pre} * @return the content of the included page * @throws XWikiException if the include failed */ public String includeTopic(String topic, boolean pre) throws XWikiException { if (pre) return "{pre}" + xwiki.include(topic, false, getXWikiContext()) + "{/pre}"; return xwiki.include(topic, false, getXWikiContext()); } /** * API to execute a form in the context of an including topic, optionnaly surrounding the * content with {pre}{/pre} to avoid future wiki rendering The rendering is evaluated in the * context of the including topic All velocity variables are the one of the including topic This * api is usually called using #includeForm in a page, which modifies the behavior of "Edit this * page" button to direct for Form mode (inline) * * @param topic page name of the form to execute * @param pre true to add {pre} {/pre} * @return the content of the included page * @throws XWikiException if the include failed */ public String includeForm(String topic, boolean pre) throws XWikiException { if (pre) return "{pre}" + xwiki.include(topic, true, getXWikiContext()) + "{/pre}"; return xwiki.include(topic, true, getXWikiContext()); } /** * API to check rights on the current document for the current user * * @param level right to check (view, edit, comment, delete) * @return true if right is granted/false if not */ public boolean hasAccessLevel(String level) { try { return xwiki.getRightService().hasAccessLevel(level, getXWikiContext().getUser(), getXWikiContext().getDoc().getFullName(), getXWikiContext()); } catch (Exception e) { return false; } } /** * v * API to check rights on a document for a given user * * @param level right to check (view, edit, comment, delete) * @param user user for which to check the right * @param docname document on which to check the rights * @return true if right is granted/false if not */ public boolean hasAccessLevel(String level, String user, String docname) { try { return xwiki.getRightService() .hasAccessLevel(level, user, docname, getXWikiContext()); } catch (Exception e) { return false; } } /** * API to render a text in the context of a document * * @param text text to render * @param doc the text is evaluated in the content of this document * @return evaluated content * @throws XWikiException if the evaluation went wrong */ public String renderText(String text, Document doc) throws XWikiException { return xwiki.getRenderingEngine().renderText(text, doc.getDoc(), getXWikiContext()); } /** * API to render a chunk (difference between two versions * * @param chunk difference between versions to render * @param doc document to use as a context for rendering * @return resuilt of the rendering */ public String renderChunk(Chunk chunk, Document doc) { return renderChunk(chunk, false, doc); } /** * API to render a chunk (difference between two versions * * @param chunk difference between versions to render * @param doc document to use as a context for rendering * @param source true to render the difference as wiki source and not as wiki rendered text * @return resuilt of the rendering */ public String renderChunk(Chunk chunk, boolean source, Document doc) { StringBuffer buf = new StringBuffer(); chunk.toString(buf, "", "\n"); if (source == true) return buf.toString(); try { return xwiki.getRenderingEngine().renderText(buf.toString(), doc.getDoc(), getXWikiContext()); } catch (Exception e) { return buf.toString(); } } /** * API to list the current spaces in thiswiki * * @return a list for strings reprenseting the spaces * @throws XWikiException if something went wrong */ public List getSpaces() throws XWikiException { return xwiki.getSpaces(getXWikiContext()); } /** * API to list all documents in a space * * @param SpaceName space tolest * @return A list of strings to lest the document * @throws XWikiException if the loading went wrong */ public List getSpaceDocsName(String SpaceName) throws XWikiException { return xwiki.getSpaceDocsName(SpaceName, getXWikiContext()); } /** * API to retrieve a java object with the current date * * @return the current date */ public Date getCurrentDate() { return xwiki.getCurrentDate(); } /** * API to retrieve a java object with the current date * * @return the current date */ public Date getDate() { return xwiki.getCurrentDate(); } /** * API to retrieve the time delta in milliseconds between the current date and the time passed * as parameter. * * @param time * @return delta of the time in milliseconds */ public int getTimeDelta(long time) { return xwiki.getTimeDelta(time); } /** * API to convert a date from a time in milliseconds since 01/01/1970 to a Java Date Object * * @param time time in milliseconds since 1970, 00:00:00 GMT * @return Date object */ public Date getDate(long time) { return xwiki.getDate(time); } /** * API to split a text to an array of texts, according to a separator * * @param str original text * @param sep separator characters. The separator is one or more of the separator characters * @return An array of the splitted text */ public String[] split(String str, String sep) { return xwiki.split(str, sep); } /** * API to retrieve an exception stack trace in a String * * @param e Exception to retrieve the stack trace from * @return Text showing the exception stack trace */ public String printStrackTrace(Throwable e) { return xwiki.printStrackTrace(e); } /** * API to retrieve the current encoding of the wiki engine The encoding is stored in xwiki.cfg * Default encoding is ISO-8891-1 * * @return encoding active in this wiki */ public String getEncoding() { return xwiki.getEncoding(); } /** * API to retrieve a NULL object This is usefull in Velocity where there is no real null object * for comparaisons * * @return A null Object */ public Object getNull() { return null; } /** * API to retrieve a New Line character This is usefull in Velocity where there is no real new * line character for inclusion in texts * * @return A new line character */ public String getNl() { return "\n"; } /** * API to retrieve the URL of an attached file in a Wiki Document The URL is generated * differently depending on the environement (Servlet, Portlet, PDF, etc..) The URL generation * can be modified by implementing a new XWikiURLFactory object For compatibility with any * target environement (and especially the portlet environment) It is important to always use * the URL functions to generate URL and never hardcode URLs * * @param fullname page name which includes the attached file * @param filename attached filename to create a link for * @return a URL as a string pointing to the filename * @throws XWikiException if the URL could not be generated properly */ public String getAttachmentURL(String fullname, String filename) throws XWikiException { return xwiki.getAttachmentURL(fullname, filename, getXWikiContext()); } /** * API to retrieve the URL of an a Wiki Document in view mode The URL is generated differently * depending on the environement (Servlet, Portlet, PDF, etc..) The URL generation can be * modified by implementing a new XWikiURLFactory object For compatibility with any target * environement (and especially the portlet environment) It is important to always use the URL * functions to generate URL and never hardcode URLs * * @param fullname page name which includes the attached file * @return a URL as a string pointing to the wiki document in view mode * @throws XWikiException if the URL could not be generated properly */ public String getURL(String fullname) throws XWikiException { return xwiki.getURL(fullname, "view", getXWikiContext()); } /** * API to retrieve the URL of an a Wiki Document in any mode The URL is generated differently * depending on the environement (Servlet, Portlet, PDF, etc..) The URL generation can be * modified by implementing a new XWikiURLFactory object For compatibility with any target * environement (and especially the portlet environment) It is important to always use the URL * functions to generate URL and never hardcode URLs * * @param fullname page name which includes the attached file * @param action mode in which to access the document (view/edit/save/..). Any valid XWiki * action is possible. * @return a URL as a string pointing to the wiki document in view mode * @throws XWikiException if the URL could not be generated properly */ public String getURL(String fullname, String action) throws XWikiException { return xwiki.getURL(fullname, action, getXWikiContext()); } /** * API to retrieve the URL of an a Wiki Document in any mode, optionally adding a query string * The URL is generated differently depending on the environement (Servlet, Portlet, PDF, etc..) * The URL generation can be modified by implementing a new XWikiURLFactory object The query * string will be modified to be added in the way the environement needs it It is important to * not add the query string parameter manually after a URL Some environements will not accept * this (like the Portlet environement) * * @param fullname page name which includes the attached file * @param action mode in which to access the document (view/edit/save/..). Any valid XWiki * action is possible. * @param querystring Query String to provide in the usual mode (name1=value1&name2=value=2) * including encoding. * @return a URL as a string pointing to the wiki document in view mode * @throws XWikiException if the URL could not be generated properly */ public String getURL(String fullname, String action, String querystring) throws XWikiException { return xwiki.getURL(fullname, action, querystring, getXWikiContext()); } /** * @see #getExoService(String) * @deprecated use {@link #getExoService(String)} instead */ public java.lang.Object getService(String className) throws XWikiException { return getExoService(className); } /** * Privileged API to access an eXo Platform service from the Wiki Engine * * @param className eXo classname to retrieve the service from * @return A object representing the service or null if the user doesn't have programming * rights * @throws XWikiException if the service cannot be loaded * @since 1.1 Beta 1 */ public java.lang.Object getExoService(String className) throws XWikiException { java.lang.Object service = null; if (hasProgrammingRights()) { service = xwiki.getExoService(className); } return service; } /** * @see #getExoPortalService(String) * @deprecated use {@link #getExoPortalService(String)} instead */ public java.lang.Object getPortalService(String className) throws XWikiException { return getExoPortalService(className); } /** * Privileged API to access an eXo Platform Portal service from the Wiki Engine * * @param className eXo classname to retrieve the service from * @return A object representing the service or null if the user doesn't have programming * rights * @throws XWikiException if the service cannot be loaded * @since 1.1 Beta 1 */ public java.lang.Object getExoPortalService(String className) throws XWikiException { java.lang.Object portalService = null; if (hasProgrammingRights()) { portalService = xwiki.getExoPortalService(className); } return portalService; } /** * API to retrieve an List object This is usefull is velocity where you cannot create objects * * @return a java.util.ArrayList object casted to List */ public List getArrayList() { return new ArrayList(); } /** * API to retrieve an Map object This is usefull is velocity where you cannot create objects * * @return a java.util.HashMap object casted to Map */ public Map getHashMap() { return new HashMap(); } public Map getTreeMap() { return new TreeMap(); } /** * API to sort a list over standard comparator. Elements need to be mutally comparable and * implement the Comparable interface * * @param list List to sort * @return the sorted list (in the same oject) * @see Collections void sort(List list) */ public List sort(List list) { Collections.sort(list); return list; } public Number toNumber(Object o) { try { return new Long(o.toString()); } catch (Exception e) { return null; } } /** * API to generate a random string * * @param size Desired size of the string * @return the generated string */ public String generateRandomString(int size) { return xwiki.generateRandomString(size); } /** * API to Outpout an BufferedImage object into the response outputstream Once this function has * been called, not further action is possible Users should set $context.setFinished(true) to * avoid template output The image is outpout as image/jpeg * * @param image BufferedImage to output * @throws IOException exception if the output fails */ public void outputImage(BufferedImage image) throws IOException { JPEGImageEncoder encoder; OutputStream ostream = getXWikiContext().getResponse().getOutputStream(); encoder = JPEGCodec.createJPEGEncoder(ostream); encoder.encode(image); ostream.flush(); } /** * API to access the current starts for the Wiki for a specific action It retrieves the number * of times the action was performed for the whole wiki The statistics module need to be * activated (xwiki.stats=1 in xwiki.cfg) * * @param action action for which to retrieve statistics (view/save/download) * @return A DocumentStats object with number of actions performed, unique visitors, number of * visits * @deprecated use {@link #getStatsService()} instead */ public DocumentStats getCurrentMonthXWikiStats(String action) { return getXWikiContext().getWiki().getStatsService(getXWikiContext()).getDocMonthStats( "", action, new Date(), getXWikiContext()); } /** * API to retrieve a viewable referer text for a referer Referers are URL where users have * clicked on a link to an XWiki page Search engine referer URLs are transformed to a nicer view * (Google: search query string) For other URL the http:// part is stripped * * @param referer referer URL to transform * @return A viewable string */ public String getRefererText(String referer) { try { return xwiki.getRefererText(referer, getXWikiContext()); } catch (Exception e) { return ""; } } /** * API to retrieve a viewable referer text for a referer with a maximum length Referers are URL * where users have clicked on a link to an XWiki page Search engine referer URLs are * transformed to a nicer view (Google: search query string) For other URL the http:// part is * stripped * * @param referer referer URL to transform * @param length Maximum length. "..." is added to the end of the text * @return A viewable string */ public String getShortRefererText(String referer, int length) { try { return xwiki.getRefererText(referer, getXWikiContext()).substring(0, length); } catch (Exception e) { return xwiki.getRefererText(referer, getXWikiContext()); } } /** * Deprecated API which was retrieving the SQL to represent the fullName Document field * depending on the database used This is not needed anymore and returns 'doc.fullName' for all * databases * * @deprecated * @return "doc.fullName" */ public String getFullNameSQL() { return xwiki.getFullNameSQL(); } /** * API to retrieve a link to the User Name page displayed for the first name and last name of * the user The link will link to the page on the wiki where the user is registered (in virtual * wiki mode) * * @param user Fully qualified username as retrieved from $context.user (XWiki.LudovicDubost) * @return The first name and last name fields surrounded with a link to the user page */ public String getUserName(String user) { return xwiki.getUserName(user, null, getXWikiContext()); } /** * API to retrieve a link to the User Name page displayed with a custom view The link will link * to the page on the wiki where the user is registered (in virtual wiki mode) The formating is * done using the format parameter which can contain velocity scripting and access all * properties of the User profile using variables ($first_name $last_name $email $city) * * @param user Fully qualified username as retrieved from $context.user (XWiki.LudovicDubost) * @param format formatting to be used ("$first_name $last_name", "$first_name") * @return The first name and last name fields surrounded with a link to the user page */ public String getUserName(String user, String format) { return xwiki.getUserName(user, format, getXWikiContext()); } /** * API to retrieve a link to the User Name page displayed for the first name and last name of * the user The link will link to the page on the local wiki even if the user is registered on a * different wiki (in virtual wiki mode) * * @param user Fully qualified username as retrieved from $context.user (XWiki.LudovicDubost) * @return The first name and last name fields surrounded with a link to the user page */ public String getLocalUserName(String user) { try { return xwiki.getUserName(user.substring(user.indexOf(":") + 1), null, getXWikiContext()); } catch (Exception e) { return xwiki.getUserName(user, null, getXWikiContext()); } } /** * API to retrieve a link to the User Name page displayed with a custom view The link will link * to the page on the local wiki even if the user is registered on a different wiki (in virtual * wiki mode) The formating is done using the format parameter which can contain velocity * scripting and access all properties of the User profile using variables ($first_name * $last_name $email $city) * * @param user Fully qualified username as retrieved from $context.user (XWiki.LudovicDubost) * @param format formatting to be used ("$first_name $last_name", "$first_name") * @return The first name and last name fields surrounded with a link to the user page */ public String getLocalUserName(String user, String format) { try { return xwiki.getUserName(user.substring(user.indexOf(":") + 1), format, getXWikiContext()); } catch (Exception e) { return xwiki.getUserName(user, format, getXWikiContext()); } } /** * API to retrieve a text representing the user with the first name and last name of the user * With the link param set to false it will not link to the user page With the link param set to * true, the link will link to the page on the wiki where the user was registered (in virtual * wiki mode) * * @param user Fully qualified username as retrieved from $context.user (XWiki.LudovicDubost) * @param link false to not add an HTML link to the user profile * @return The first name and last name fields surrounded with a link to the user page */ public String getUserName(String user, boolean link) { return xwiki.getUserName(user, null, link, getXWikiContext()); } /** * API to retrieve a text representing the user with a custom view With the link param set to * false it will not link to the user page With the link param set to true, the link will link * to the page on the wiki where the user was registered (in virtual wiki mode) The formating is * done using the format parameter which can contain velocity scripting and access all * properties of the User profile using variables ($first_name $last_name $email $city) * * @param user Fully qualified username as retrieved from $context.user (XWiki.LudovicDubost) * @param format formatting to be used ("$first_name $last_name", "$first_name") * @param link false to not add an HTML link to the user profile * @return The first name and last name fields surrounded with a link to the user page */ public String getUserName(String user, String format, boolean link) { return xwiki.getUserName(user, format, link, getXWikiContext()); } /** * API to retrieve a text representing the user with the first name and last name of the user * With the link param set to false it will not link to the user page With the link param set to * true, the link will link to the page on the local wiki even if the user is registered on a * different wiki (in virtual wiki mode) * * @param user Fully qualified username as retrieved from $context.user (XWiki.LudovicDubost) * @param link false to not add an HTML link to the user profile * @return The first name and last name fields surrounded with a link to the user page */ public String getLocalUserName(String user, boolean link) { try { return xwiki.getUserName(user.substring(user.indexOf(":") + 1), null, link, getXWikiContext()); } catch (Exception e) { return xwiki.getUserName(user, null, link, getXWikiContext()); } } /** * API to retrieve a text representing the user with a custom view The formating is done using * the format parameter which can contain velocity scripting and access all properties of the * User profile using variables ($first_name $last_name $email $city) With the link param set to * false it will not link to the user page With the link param set to true, the link will link * to the page on the local wiki even if the user is registered on a different wiki (in virtual * wiki mode) * * @param user Fully qualified username as retrieved from $context.user (XWiki.LudovicDubost) * @param format formatting to be used ("$first_name $last_name", "$first_name") * @param link false to not add an HTML link to the user profile * @return The first name and last name fields surrounded with a link to the user page */ public String getLocalUserName(String user, String format, boolean link) { try { return xwiki.getUserName(user.substring(user.indexOf(":") + 1), format, link, getXWikiContext()); } catch (Exception e) { return xwiki.getUserName(user, format, link, getXWikiContext()); } } public User getUser() { return xwiki.getUser(getXWikiContext()); } public User getUser(String username) { return xwiki.getUser(username, getXWikiContext()); } /** * API allowing to format a date according to the default Wiki setting The date format is * provided in the 'dateformat' parameter of the XWiki Preferences * * @param date date object to format * @return A string with the date formating from the default Wiki setting */ public String formatDate(Date date) { return xwiki.formatDate(date, null, getXWikiContext()); } /** * API allowing to format a date according to a custom format The date format is from * java.text.SimpleDateFormat Example: "dd/MM/yyyy HH:mm:ss" or "d MMM yyyy" If the format is * invalid the default format will be used to show the date * * @param date date to format * @param format format of the date to be used * @return the formatted date * @see java.text.SimpleDateFormat */ public String formatDate(Date date, String format) { return xwiki.formatDate(date, format, getXWikiContext()); } /* Allow to read user setting providing the user timezone All dates will be expressed with this timezone @return the timezone */ public String getUserTimeZone() { return xwiki.getUserTimeZone(context); } /** * Returns a plugin from the plugin API. Plugin Rights can be verified. Note that although * this API is a duplicate of {@link #getPlugin(String)} it used to provide an easy access * from Velocity to XWiki plugins. Indeed Velocity has a feature in that if a class has * a get method, using the dot notation will automatically call the get method for the class. * See http://velocity.apache.org/engine/releases/velocity-1.5/user-guide.html#propertylookuprules. * This this allows the following constructs: * <code>$xwiki.pluginName.somePluginMethod()</code> * * @param name Name of the plugin to retrieve (either short of full class name) * @return a plugin object */ public Api get(String name) { return xwiki.getPluginApi(name, getXWikiContext()); } /** * Returns a plugin from the plugin API. Plugin Rights can be verified. * * @param name Name of the plugin to retrieve (either short of full class name) * @return a plugin object */ public Api getPlugin(String name) { return xwiki.getPluginApi(name, getXWikiContext()); } /** * Returns the recently visited pages for a specific action * * @param action ("view" or "edit") * @param size how many recent actions to retrieve * @return a ArrayList of document names * @deprecated use {@link #getStatsService()} instead */ public java.util.Collection getRecentActions(String action, int size) { XWikiStatsService stats = getXWikiContext().getWiki().getStatsService(getXWikiContext()); if (stats == null) return Collections.EMPTY_LIST; return stats.getRecentActions(action, size, getXWikiContext()); } /** * Returns the Advertisement system from the preferences * * @return "google" or "none" */ public String getAdType() { return xwiki.getAdType(getXWikiContext()); } /** * Returns the Advertisement client ID from the preferences * * @return an Ad affiliate ID */ public String getAdClientId() { return xwiki.getAdClientId(getXWikiContext()); } /** * Retrieves a int from a String * * @param str String to convert to int * @return the int or zero in case of exception */ public int parseInt(String str) { try { return Integer.parseInt(str); } catch (Exception e) { return 0; } } /** * Retrieves a int from a String * * @param str String to convert to int * @return the int or zero in case of exception */ public Integer parseInteger(String str) { return new Integer(parseInt(str)); } /** * Retrieves a long from a String * * @param str String to convert to long * @return the long or zero in case of exception */ public long parseLong(String str) { try { return Long.parseLong(str); } catch (Exception e) { return 0; } } /** * Retrieves a float from a String * * @param str String to convert to float * @return the float or zero in case of exception */ public float parseFloat(String str) { try { return Float.parseFloat(str); } catch (Exception e) { return 0; } } /** * Retrieves a double from a String * * @param str String to convert to double * @return the double or zero in case of exception */ public double parseDouble(String str) { try { return Double.parseDouble(str); } catch (Exception e) { return 0; } } /** * Returns the content of an HTTP/HTTPS URL protected using Basic Authentication * * @param surl url to retrieve * @param username username for the basic authentication * @param password password for the basic authentication * @return Content of the specified URL * @throws IOException */ public String getURLContent(String surl, String username, String password) throws IOException { try { return xwiki.getURLContent(surl, username, password, context); } catch (Exception e) { LOG.warn("Failed to retrieve content from [" + surl + "]", e); return ""; } } /** * Returns the content of an HTTP/HTTPS URL * * @param surl url to retrieve * @return Content of the specified URL * @throws IOException */ public String getURLContent(String surl) throws IOException { try { return xwiki.getURLContent(surl, context); } catch (Exception e) { LOG.warn("Failed to retrieve content from [" + surl + "]", e); return ""; } } /** * Returns the content of an HTTP/HTTPS URL protected using Basic Authentication * * @param surl url to retrieve * @param username username for the basic authentication * @param password password for the basic authentication * @param timeout manuel timeout in milliseconds * @return Content of the specified URL * @throws IOException */ public String getURLContent(String surl, String username, String password, int timeout) throws IOException { try { return xwiki.getURLContent(surl, username, password, timeout, xwiki.getHttpUserAgent(context)); } catch (Exception e) { return ""; } } /** * Returns the content of an HTTP/HTTPS URL * * @param surl url to retrieve * @param timeout manuel timeout in milliseconds * @return Content of the specified URL * @throws IOException */ public String getURLContent(String surl, int timeout) throws IOException { try { return xwiki.getURLContent(surl, timeout, xwiki.getHttpUserAgent(context)); } catch (Exception e) { return ""; } } /** * Returns the content of an HTTP/HTTPS URL protected using Basic Authentication as Bytes * * @param surl url to retrieve * @param username username for the basic authentication * @param password password for the basic authentication * @return Content of the specified URL * @throws IOException */ public byte[] getURLContentAsBytes(String surl, String username, String password) throws IOException { try { return xwiki.getURLContentAsBytes(surl, username, password, context); } catch (Exception e) { return null; } } /** * Returns the content of an HTTP/HTTPS URL as Bytes * * @param surl url to retrieve * @return Content of the specified URL * @throws IOException */ public byte[] getURLContentAsBytes(String surl) throws IOException { try { return xwiki.getURLContentAsBytes(surl, context); } catch (Exception e) { return null; } } /** * Filters text to be include in = or like clause in SQL * * @param text text to filter * @return filtered text */ public String sqlfilter(String text) { return Utils.SQLFilter(text); } /** * Returns the list of Macros documents in the specified content * * @param defaultweb Default Web to use for relative path names * @param content Content to parse * @return ArrayList of document names */ public List getIncludedMacros(String defaultweb, String content) { return xwiki.getIncludedMacros(defaultweb, content, getXWikiContext()); } /** * returns true if xwiki.readonly is set in the configuration file * * @return the value of xwiki.isReadOnly() * @see com.xpn.xwiki.XWiki */ public boolean isReadOnly() { return xwiki.isReadOnly(); } /** * Priviledged API to set/unset the readonly status of the Wiki After setting this to true no * writing to the database will be performed All Edit buttons will be removed and save actions * disabled This is used for maintenance purposes * * @param ro true to set read-only mode/false to unset */ public void setReadOnly(boolean ro) { if (hasAdminRights()) { xwiki.setReadOnly(ro); } } /** * Priviledge API to regenerate the links/backlinks table Normally links and backlinks are * stored when a page is modified This function will regenerate all the backlinks This function * can be long to run * * @throws XWikiException exception if the generation fails */ public void refreshLinks() throws XWikiException { if (hasAdminRights()) { xwiki.refreshLinks(getXWikiContext()); } } /** * API to check if the backlinks feature is active * Backlinks are activated in xwiki.cfg or in the XWiki Preferences * @return true if the backlinks feature is active * @throws XWikiException exception if the preference could not be retrieved */ public boolean hasBacklinks() throws XWikiException { return xwiki.hasBacklinks(getXWikiContext()); } /** * API to check if the tags feature is active. * Tags are activated in xwiki.cfg or in the XWiki Preferences * @return true if the tags feature is active, false otherwise * @throws XWikiException exception if the preference could not be retrieved */ public boolean hasTags() throws XWikiException { return xwiki.hasTags(getXWikiContext()); } /** * API to check if the edit comment feature is active * Edit comments are activated in xwiki.cfg or in the XWiki Preferences * @return */ public boolean hasEditComment() { return xwiki.hasEditComment(context); } /** * API to check if the edit comment field is shown in the edit form * Edit comments are activated in xwiki.cfg or in the XWiki Preferences * @return */ public boolean isEditCommentFieldHidden() { return xwiki.isEditCommentFieldHidden(context); } /** * API to check if the edit comment is suggested (prompted once by Javascript if empty) * Edit comments are activated in xwiki.cfg or in the XWiki Preferences * @return */ public boolean isEditCommentSuggested() { return xwiki.isEditCommentSuggested(context); } /** * API to check if the edit comment is mandatory (prompted by Javascript if empty) * Edit comments are activated in xwiki.cfg or in the XWiki Preferences * @return */ public boolean isEditCommentMandatory() { return xwiki.isEditCommentMandatory(context); } /** * API to check if the minor edit feature is active * minor edit is activated in xwiki.cfg or in the XWiki Preferences */ public boolean hasMinorEdit() { return xwiki.hasMinorEdit(context); } /** * API to check if the recycle bin feature is active * recycle bin is activated in xwiki.cfg or in the XWiki Preferences */ public boolean hasRecycleBin() { return xwiki.hasRecycleBin(context); } /** * API to rename a page (experimental) Rights are necessary to edit the source and target page * All objects and attachments ID are modified in the process to link to the new page name * * @param doc page to rename * @param newFullName target page name to move the information to * @throws XWikiException exception if the rename fails */ public boolean renamePage(Document doc, String newFullName) { try { if (xwiki.exists(newFullName, getXWikiContext()) && !xwiki.getRightService().hasAccessLevel("delete", getXWikiContext().getUser(), newFullName, getXWikiContext())) return false; if (xwiki.getRightService().hasAccessLevel("edit", getXWikiContext().getUser(), doc.getFullName(), getXWikiContext())) { xwiki.renamePage(doc.getFullName(), newFullName, getXWikiContext()); } } catch (XWikiException e) { return false; } return true; } /** * Retrieves the current editor preference for the request The preference is first looked up in * the user preference and then in the space and wiki preference * * @return "wysiwyg" or "text" */ public String getEditorPreference() { return xwiki.getEditorPreference(getXWikiContext()); } /** * Priviledged API to retrieve an object instanciated from groovy code in a String Groovy * scripts compilation is cached * * @param script script containing a Groovy class definition (public class MyClass { ... }) * @return An object instanciating this class * @throws XWikiException */ public Object parseGroovyFromString(String script) throws XWikiException { if (hasProgrammingRights()) return xwiki.parseGroovyFromString(script, getXWikiContext()); return "groovy_missingrights"; } /** * Priviledged API to retrieve an object instanciated from groovy code in a String Groovy * scripts compilation is cached * * @param fullname // script containing a Groovy class definition (public class MyClass { ... }) * @return An object instanciating this class * @throws XWikiException */ public Object parseGroovyFromPage(String fullname, String jarWikiPage) throws XWikiException { XWikiDocument doc = xwiki.getDocument(fullname, getXWikiContext()); if (xwiki.getRightService().hasProgrammingRights(doc, getXWikiContext())) return xwiki.parseGroovyFromString(doc.getContent(), jarWikiPage, getXWikiContext()); return "groovy_missingrights"; } /** * Priviledged API to retrieve an object instanciated from groovy code in a String Groovy * scripts compilation is cached * * @param fullname // script containing a Groovy class definition (public class MyClass { ... }) * @return An object instanciating this class * @throws XWikiException */ public Object parseGroovyFromPage(String fullname) throws XWikiException { XWikiDocument doc = xwiki.getDocument(fullname, getXWikiContext()); if (xwiki.getRightService().hasProgrammingRights(doc, getXWikiContext())) return xwiki.parseGroovyFromString(doc.getContent(), getXWikiContext()); return "groovy_missingrights"; } /** * API to get the macro list from the XWiki Preferences The macro list are the macros available * from the Macro Mapping System * * @return String with each macro on each line */ public String getMacroList() { return xwiki.getMacroList(getXWikiContext()); } /** * API to check if using which toolbars in Wysiwyg editor * * @return a string value */ public String getWysiwygToolbars() { return xwiki.getWysiwygToolbars(getXWikiContext()); } /** * API to create an object from the request The parameters are the ones that are created from * doc.display("field","edit") calls * * @param className XWiki Class Name to create the object from * @return a BaseObject wrapped in an Object * @throws XWikiException exception if the object could not be read */ public com.xpn.xwiki.api.Object getObjectFromRequest(String className) throws XWikiException { return new com.xpn.xwiki.api.Object(xwiki.getObjectFromRequest(className, getXWikiContext()), getXWikiContext()); } /** * API to create an empty document * * @return an XWikiDocument wrapped in a Document */ public Document createDocument() { return new XWikiDocument().newDocument(getXWikiContext()); } /** * API to convert the username depending on the configuration The username can be converted from * email to a valid XWiki page name hidding the email address The username can be then used to * login and link to the right user page * * @param username username to use for login * @return converted wiki page name for this username */ public String convertUsername(String username) { return xwiki.convertUsername(username, getXWikiContext()); } /** * API to display a select box for the list of available field for a specific class This field * data can then be used to generate an XWiki Query showing a table with the relevant data * * @param className XWiki Class Name to display the list of columns for * @param query Query to pre-select the currently selected columns * @return text of the select field * @throws XWikiException exception is a failure occured */ public String displaySearchColumns(String className, XWikiQuery query) throws XWikiException { return xwiki.displaySearchColumns(className, "", query, getXWikiContext()); } /** * API to display a select box for the list of available field for a specific class, optionally * adding a prefix This field data can then be used to generate an XWiki Query showing a table * with the relevant data * * @param className XWiki Class Name to display the list of columns for * @param prefix Prefix to add to the field name * @param query Query to pre-select the currently selected columns * @return text of the select field * @throws XWikiException exception is a failure occured */ public String displaySearchColumns(String className, String prefix, XWikiQuery query) throws XWikiException { return xwiki.displaySearchColumns(className, prefix, query, getXWikiContext()); } /** * API to display a select box for the list of available field for a specific class This field * data can then be used to generate the order element of an XWiki Query showing a table with the relevant data * * @param className XWiki Class Name to display the list of columns for * @param query Query to pre-select the currently selected columns * @return text of the select field * @throws XWikiException exception is a failure occured */ public String displaySearchOrder(String className, XWikiQuery query) throws XWikiException { return xwiki.displaySearchOrder(className, "", query, getXWikiContext()); } /** * API to display a select box for the list of available field for a specific class, optionally * adding a prefix This field data can then be used to generate the order element of an XWiki Query showing a table * with the relevant data * * @param className XWiki Class Name to display the list of columns for * @param prefix Prefix to add to the field name * @param query Query to pre-select the currently selected columns * @return text of the select field * @throws XWikiException exception is a failure occured */ public String displaySearchOrder(String className, String prefix, XWikiQuery query) throws XWikiException { return xwiki.displaySearchOrder(className, prefix, query, getXWikiContext()); } /** * API to display a field in search mode for a specific class without preselected values This * field data can then be used to generate an XWiki Query showing a table with the relevant data * * @param fieldname field name in the class * @param className class name to display the field from * @return text of the select field * @throws XWikiException exception is a failure occured */ public String displaySearch(String fieldname, String className) throws XWikiException { return xwiki.displaySearch(fieldname, className, getXWikiContext()); } /** * API to display a field in search mode for a specific class with preselected values This field * data can then be used to generate an XWiki Query showing a table with the relevant data * * @param fieldname field name in the class * @param className class name to display the field from * @param criteria XWikiCriteria object (usually the XWikiQuery object) to take the preselected * values from * @return text of the select field * @throws XWikiException exception is a failure occured */ public String displaySearch(String fieldname, String className, XWikiCriteria criteria) throws XWikiException { return xwiki.displaySearch(fieldname, className, criteria, getXWikiContext()); } /** * API to display a field in search mode for a specific class with preselected values, * optionally adding a prefix to the field name This field data can then be used to generate an * XWiki Query showing a table with the relevant data * * @param fieldname field name in the class * @param className class name to display the field from * @param prefix prefix to add to the field name * @param criteria XWikiCriteria object (usually the XWikiQuery object) to take the preselected * values from * @return text of the select field * @throws XWikiException exception is a failure occured */ public String displaySearch(String fieldname, String className, String prefix, XWikiCriteria criteria) throws XWikiException { return xwiki.displaySearch(fieldname, className, prefix, criteria, getXWikiContext()); } /** * API to run a search from an XWikiQuery Object An XWikiQuery object can be created from a * request using the createQueryFromRequest function * * @param query query to run the search for * @return A list of document names matching the query * @throws XWikiException exception is a failure occured */ public List search(XWikiQuery query) throws XWikiException { return xwiki.search(query, getXWikiContext()); } /** * API to create a query from a request Object The request object is the result of a form * created from the displaySearch() and displaySearchColumns() functions * * @param className class name to create the query from * @return an XWikiQuery object matching the selected values in the request object * @throws XWikiException exception is a failure occured */ public XWikiQuery createQueryFromRequest(String className) throws XWikiException { return xwiki.createQueryFromRequest(className, getXWikiContext()); } /** * API to run a search from an XWikiQuery Object and display it as a HTML table An XWikiQuery * object can be created from a request using the createQueryFromRequest function * * @param query query to run the search for * @return An HTML table showing the result * @throws XWikiException exception is a failure occured */ public String searchAsTable(XWikiQuery query) throws XWikiException { return xwiki.searchAsTable(query, getXWikiContext()); } /** * API to get the Property object from a class based on a property path A property path looks * like XWiki.ArticleClass_fieldname * * @param propPath Property path * @return a PropertyClass object from a BaseClass object */ public com.xpn.xwiki.api.PropertyClass getPropertyClassFromName(String propPath) { return new PropertyClass(xwiki.getPropertyClassFromName(propPath, getXWikiContext()), getXWikiContext()); } /** * Generates a unique page name based on initial page name and already existing pages * * @param name * @return a unique page name */ public String getUniquePageName(String name) { return xwiki.getUniquePageName(name, getXWikiContext()); } /** * Generates a unique page name based on initial page name and already existing pages * * @param space * @param name * @return a unique page name */ public String getUniquePageName(String space, String name) { return xwiki.getUniquePageName(space, name, getXWikiContext()); } /** * Cleans up the page name to make it valid * * @param name * @return A valid page name */ public String clearName(String name) { return xwiki.clearName(name, getXWikiContext()); } /** * Inserts a tooltip using toolTip.js * * @param html HTML viewed * @param message HTML Tooltip message * @param params Parameters in Javascropt added to the tooltip config * @return HTML with working tooltip */ public String addTooltip(String html, String message, String params) { return xwiki.addTooltip(html, message, params, getXWikiContext()); } /** * Inserts a tooltip using toolTip.js * * @param html HTML viewed * @param message HTML Tooltip message * @return HTML with working tooltip */ public String addTooltip(String html, String message) { return xwiki.addTooltip(html, message, getXWikiContext()); } /** * Inserts the tooltip Javascript * * @return */ public String addTooltipJS() { return xwiki.addTooltipJS(getXWikiContext()); } /* * Inserts a Mandatory asterix */ public String addMandatory() { return xwiki.addMandatory(getXWikiContext()); } /* * Clear accents */ public String clearAccents(String text) { return Util.noaccents(text); } /** * Get the XWiki Class object defined in the passed Document name. * * <p>Note: This method doesn't require any rights for accessing the passed Document (as * opposed to the {@link com.xpn.xwiki.api.Document#getxWikiClass()} method which * does require to get a Document object first. This is thus useful in cases where * the calling code doesn't have the access right to the specified Document. It is * safe because there are no sensitive data stored in a Class definition. * </p> * * @param documentName the name of the document for which to get the Class object. * For example "XWiki.XWikiPreferences" * @return the XWiki Class object defined in the passed Document name. If the passed Document * name points to a Document with no Class defined then an empty Class object is * returned (i.e. a Class object with no properties). * @throws XWikiException if the passed document name doesn't point to a valid Document */ public Class getClass(String documentName) throws XWikiException { // TODO: The implementation should be done in com.xpn.xwiki.XWiki as this class should // delegate all implementations to that Class. return new Class(xwiki.getDocument(documentName, context).getxWikiClass(), context); } /** * Provides an absolute counter * @param name Counter name * @return String */ public String getCounter(String name) { XWikiEngineContext econtext = context.getEngineContext(); Integer counter = (Integer) econtext.getAttribute(name); if (counter==null) { counter = new Integer(0); } counter = new Integer(counter.intValue() + 1); econtext.setAttribute(name, counter); return counter.toString(); } /** * Check authentication from request and set according persitent login information * If it fails user is unlogged * @return null if failed, non null XWikiUser if sucess * @throws XWikiException */ public XWikiUser checkAuth() throws XWikiException { return context.getWiki().getAuthService().checkAuth(context); } /** * Check authentication from username and password and set according persitent login information * If it fails user is unlogged * @param username username to check * @param password password to check * @param rememberme "1" if you want to remember the login accross navigator restart * @return null if failed, non null XWikiUser if sucess * @throws XWikiException */ public XWikiUser checkAuth(String username, String password, String rememberme) throws XWikiException { return context.getWiki().getAuthService().checkAuth(username, password, rememberme, context); } /** * Add a and b because velocity operations are not always working * @param a * @param b * @return a+b */ public int add(int a, int b) { return a+b; } /** * Add a and b because velocity operations are not working with longs * @param a * @param b * @return a+b */ public long add(long a, long b) { return a+b; } /** * Add a and b because velocity operations are not working with longs * @param a * @param b * @return a+b */ public String add(String a, String b) { long c = Long.parseLong(a) + Long.parseLong(b); return "" + c; } /** * Access statistics api * * @return a StatsService instance that can be used to retrieve different xwiki statistics */ public StatsService getStatsService() { return this.statsService; } }
xwiki-core/src/main/java/com/xpn/xwiki/api/XWiki.java
/* * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package com.xpn.xwiki.api; import com.sun.image.codec.jpeg.JPEGCodec; import com.sun.image.codec.jpeg.JPEGImageEncoder; import com.xpn.xwiki.XWikiContext; import com.xpn.xwiki.XWikiException; import com.xpn.xwiki.user.api.XWikiUser; import com.xpn.xwiki.util.Util; import com.xpn.xwiki.plugin.query.XWikiQuery; import com.xpn.xwiki.plugin.query.XWikiCriteria; import com.xpn.xwiki.doc.XWikiDeletedDocument; import com.xpn.xwiki.doc.XWikiDocument; import com.xpn.xwiki.objects.meta.MetaClass; import com.xpn.xwiki.stats.api.XWikiStatsService; import com.xpn.xwiki.stats.impl.DocumentStats; import com.xpn.xwiki.web.Utils; import com.xpn.xwiki.web.XWikiEngineContext; import org.suigeneris.jrcs.diff.delta.Chunk; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import java.awt.image.BufferedImage; import java.io.IOException; import java.io.OutputStream; import java.lang.Object; import java.util.*; public class XWiki extends Api { protected static final Log LOG = LogFactory.getLog(XWiki.class); private com.xpn.xwiki.XWiki xwiki; /** * @see #getStatsService() */ private StatsService statsService; /** * XWiki API Constructor * * @param xwiki XWiki Main Object to wrap * @param context XWikiContext to wrap */ public XWiki(com.xpn.xwiki.XWiki xwiki, XWikiContext context) { super(context); this.xwiki = xwiki; this.statsService = new StatsService(context); } /** * Priviledge API allowing to access the underlying main XWiki Object * * @return Priviledged Main XWiki Object */ public com.xpn.xwiki.XWiki getXWiki() { if (hasProgrammingRights()) return xwiki; return null; } /** * @return XWiki's version in the format <code>(version).(SVN build number)</code>, or * "Unknown version" if it failed to be retrieved */ public String getVersion() { return xwiki.getVersion(); } /** * API Allowing to access the current request URL being requested * * @return URL * @throws XWikiException */ public String getRequestURL() throws XWikiException { return getXWikiContext().getURLFactory().getRequestURL(getXWikiContext()).toString(); } /** * Loads an Document from the database. Rights are checked before sending back the document. * * @param fullname Fullname of the XWiki document to be loaded * @return a Document object or null if it is not accessible * @throws XWikiException */ public Document getDocument(String fullname) throws XWikiException { XWikiDocument doc = xwiki.getDocument(fullname, getXWikiContext()); if (xwiki.getRightService().hasAccessLevel("view", getXWikiContext().getUser(), doc.getFullName(), getXWikiContext()) == false) { return null; } Document newdoc = doc.newDocument(getXWikiContext()); return newdoc; } /** * @return all deleted documents in recycle bin * @param fullname - {@link XWikiDocument#getFullName()} * @param lang - {@link XWikiDocument#getLanguage()} * @throws XWikiException if any error */ public List getDeletedDocuments(String fullname, String lang) throws XWikiException { XWikiDeletedDocument[] dds = xwiki.getDeletedDocuments(fullname, lang, context); if (dds == null || dds.length == 0) { return Collections.EMPTY_LIST; } List result = new ArrayList(dds.length); for (int i = 0; i < dds.length; i++) { result.add(new DeletedDocument(dds[i], context)); } return result; } /** * @return specified documents in recycle bin * @param fullname - {@link XWikiDocument#getFullName()} * @param lang - {@link XWikiDocument#getLanguage()} * @throws XWikiException if any error */ public DeletedDocument getDeletedDocument(String fullname, String lang, String index) throws XWikiException { XWikiDeletedDocument dd = xwiki.getDeletedDocument(fullname, lang, Integer.parseInt(index), context); if (dd == null) { return null; } return new DeletedDocument(dd, context); } /** * Returns wether a document exists or not * * @param fullname Fullname of the XWiki document to be loaded * @return true if the document exists, false if not * @throws XWikiException */ public boolean exists(String fullname) throws XWikiException { return xwiki.exists(fullname, getXWikiContext()); } /** * Verify the rights the current user has on a document. If the document requires rights and the * user is not authenticated he will be redirected to the login page. * * @param docname fullname of the document * @param right right to check ("view", "edit", "admin", "delete") * @return true if it exists */ public boolean checkAccess(String docname, String right) { try { XWikiDocument doc = getXWikiContext().getWiki().getDocument(docname, context); return getXWikiContext().getWiki().checkAccess(right, doc, getXWikiContext()); } catch (XWikiException e) { return false; } } /** * Loads an Document from the database. Rights are checked before sending back the document. * * @param web Space to use in case no space is defined in the fullname * @param fullname Fullname or relative name of the document to load * @return a Document object or null if it is not accessible * @throws XWikiException */ public Document getDocument(String web, String fullname) throws XWikiException { XWikiDocument doc = xwiki.getDocument(web, fullname, getXWikiContext()); if (xwiki.getRightService().hasAccessLevel("view", getXWikiContext().getUser(), doc.getFullName(), getXWikiContext()) == false) { return null; } Document newdoc = doc.newDocument(getXWikiContext()); return newdoc; } /** * Load a specific revision of a document * * @param doc Document for which to load a specific revision * @param rev Revision number * @return Specific revision of a document * @throws XWikiException */ public Document getDocument(Document doc, String rev) throws XWikiException { if ((doc == null) || (doc.getDoc() == null)) return null; if (xwiki.getRightService().hasAccessLevel("view", getXWikiContext().getUser(), doc.getFullName(), getXWikiContext()) == false) { // Finally we return null, otherwise showing search result is a real pain return null; } try { XWikiDocument revdoc = xwiki.getDocument(doc.getDoc(), rev, getXWikiContext()); Document newdoc = revdoc.newDocument(getXWikiContext()); return newdoc; } catch (Exception e) { // Can't read versioned document e.printStackTrace(); return null; } } /** * Transform a text in a form compatible text * * @param content text to transform * @return encoded result */ public String getFormEncoded(String content) { return com.xpn.xwiki.XWiki.getFormEncoded(content); } /** * Transform a text in a URL compatible text * * @param content text to transform * @return encoded result */ public String getURLEncoded(String content) { return xwiki.getURLEncoded(content); } /** * Transform a text in a XML compatible text * * @param content text to transform * @return encoded result */ public String getXMLEncoded(String content) { return com.xpn.xwiki.XWiki.getXMLEncoded(content); } /** * API to protect Text from Wiki transformation * @param text * @return escaped text */ public String escapeText(String text) { return Util.escapeText(text); } /** * API to protect URLs from Wiki transformation * @param url * @return encoded URL */ public String escapeURL(String url) { return Util.escapeURL(url); } /** * Output content in the edit content textarea * * @param content content to output * @return the textarea text content */ public String getTextArea(String content) { return com.xpn.xwiki.XWiki.getTextArea(content, getXWikiContext()); } /** * Output content in the edit content htmlarea * * @param content content to output * @return the htmlarea text content */ public String getHTMLArea(String content) { return xwiki.getHTMLArea(content, getXWikiContext()); } /** * Get the list of available classes in the wiki * * @return list of classes names * @throws XWikiException */ public List getClassList() throws XWikiException { return xwiki.getClassList(getXWikiContext()); } /** * Get the global MetaClass object * * @return MetaClass object */ public MetaClass getMetaclass() { return xwiki.getMetaclass(); } /** * Priviledged API allowing to run a search on the database returning a list of data This search * is send to the store engine (Hibernate HQL, JCR XPATH or other) * * @param wheresql Query to be run (HQL, XPath) * @return A list of rows (Object[]) * @throws XWikiException */ public List search(String wheresql) throws XWikiException { if (hasProgrammingRights()) return xwiki.search(wheresql, getXWikiContext()); return Collections.EMPTY_LIST; } /** * Priviledged API allowing to run a search on the database returning a list of data This search * is send to the store engine (Hibernate HQL, JCR XPATH or other) * * @param wheresql Query to be run (HQL, XPath) * @param nb return only 'nb' rows * @param start skip the 'start' first elements * @return A list of rows (Object[]) * @throws XWikiException */ public List search(String wheresql, int nb, int start) throws XWikiException { if (hasProgrammingRights()) return xwiki.search(wheresql, nb, start, getXWikiContext()); return Collections.EMPTY_LIST; } /** * API allowing to search for document names matching a query. * Examples: * <ul> * <li>Query: <code>where doc.web='Main' order by doc.creationDate desc</code>. * Result: All the documents in space 'Main' ordered by the creation date from the most * recent</li> * <li>Query: <code>where doc.name like '%sport%' order by doc.name asc</code>. * Result: All the documents containing 'sport' in their name ordered by document * name</li> * <li>Query: <code>where doc.content like '%sport%' order by doc.author</code> * Result: All the documents containing 'sport' in their content ordered by the * author</li> * <li>Query: <code>where doc.creator = 'XWiki.LudovicDubost' order by doc.creationDate * desc</code>. * Result: All the documents with creator LudovicDubost ordered by the creation date * from the most recent</li> * <li>Query: <code>where doc.author = 'XWiki.LudovicDubost' order by doc.date desc</code>. * Result: All the documents with last author LudovicDubost ordered by the last * modification date from the most recent.</li> * <li>Query: <code>,BaseObject as obj where doc.fullName=obj.name and * obj.className='XWiki.XWikiComments' order by doc.date desc</code>. * Result: All the documents with at least one comment ordered by the last modification * date from the most recent</li> * <li>Query: <code>,BaseObject as obj, StringProperty as prop where * doc.fullName=obj.name and obj.className='XWiki.XWikiComments' and obj.id=prop.id.id * and prop.id.name='author' and prop.value='XWiki.LudovicDubost' order by doc.date * desc</code>. * Result: All the documents with at least one comment from LudovicDubost ordered by the * last modification date from the most recent</li> * </ul> * * @param wheresql Query to be run (either starting with ", BaseObject as obj where.." or by * "where ..." * @return List of document names matching (Main.Page1, Main.Page2) * @throws XWikiException */ public List searchDocuments(String wheresql) throws XWikiException { return xwiki.getStore().searchDocumentsNames(wheresql, getXWikiContext()); } /** * API allowing to search for document names matching a query return only a limited number of * elements and skipping the first rows. The query part is the same as searchDocuments * * @param wheresql query to use similar to searchDocuments(wheresql) * @param nb return only 'nb' rows * @param start skip the first 'start' rows * @return List of document names matching * @throws XWikiException * @see List searchDocuments(String where sql) */ public List searchDocuments(String wheresql, int nb, int start) throws XWikiException { return xwiki.getStore().searchDocumentsNames(wheresql, nb, start, getXWikiContext()); } /** * Priviledged API allowing to search for document names matching a query return only a limited * number of elements and skipping the first rows. The return values contain the list of columns * spciefied in addition to the document space and name The query part is the same as * searchDocuments * * @param wheresql query to use similar to searchDocuments(wheresql) * @param nb return only 'nb' rows * @param start skip the first 'start' rows * @param selectColumns List of columns to add to the result * @return List of Object[] with the column values of the matching rows * @throws XWikiException */ public List searchDocuments(String wheresql, int nb, int start, String selectColumns) throws XWikiException { if (hasProgrammingRights()) return xwiki.getStore().searchDocumentsNames(wheresql, nb, start, selectColumns, getXWikiContext()); return Collections.EMPTY_LIST; } /** * API allowing to search for documents allowing to have mutliple entries per language * * @param wheresql query to use similar to searchDocuments(wheresql) * @param distinctbylanguage true to return multiple rows per language * @return List of Document object matching * @throws XWikiException */ public List searchDocuments(String wheresql, boolean distinctbylanguage) throws XWikiException { return wrapDocs(xwiki.getStore().searchDocuments(wheresql, getXWikiContext())); } /** * API allowing to search for documents allowing to have mutliple entries per language * * @param wheresql query to use similar to searchDocuments(wheresql) * @param distinctbylanguage true to return multiple rows per language * @return List of Document object matching * @param nb return only 'nb' rows * @param start skip the first 'start' rows * @throws XWikiException */ public List searchDocuments(String wheresql, boolean distinctbylanguage, int nb, int start) throws XWikiException { return wrapDocs(xwiki.getStore().searchDocuments(wheresql, nb, start, getXWikiContext())); } /** * Search documents by passing HQL where clause values as parameters. This allows generating * a Named HQL query which will automatically encode the passed values (like escaping single * quotes). This API is recommended to be used over the other similar methods where the values * are passed inside the where clause and for which you'll need to do the encoding/escpaing * yourself before calling them. * * <p>Example</p> * <pre><code> * #set($orphans = $xwiki.searchDocuments(" where doc.fullName <> ? and (doc.parent = ? or " * + "(doc.parent = ? and doc.web = ?))", * ["${doc.fullName}as", ${doc.fullName}, ${doc.name}, ${doc.web}])) * </code></pre> * * @param parametrizedSqlClause the HQL where clause. For example <code>" where doc.fullName * <> ? and (doc.parent = ? or (doc.parent = ? and doc.web = ?))"</code> * @param nb the number of rows to return. If 0 then all rows are returned * @param start the number of rows to skip. If 0 don't skip any row * @param parameterValues the where clause values that replace the question marks (?) * @return a list of document names * @throws XWikiException in case of error while performing the query */ public List searchDocuments(String parametrizedSqlClause, int nb, int start, List parameterValues) throws XWikiException { return xwiki.getStore().searchDocumentsNames(parametrizedSqlClause, nb, start, parameterValues, getXWikiContext()); } /** * Same as {@link #searchDocuments(String, int, int, java.util.List)} but returns all rows. * * @see #searchDocuments(String, int, int, java.util.List) */ public List searchDocuments(String parametrizedSqlClause, List parameterValues) throws XWikiException { return xwiki.getStore().searchDocumentsNames(parametrizedSqlClause, parameterValues, getXWikiContext()); } /** * Function to wrap a list of XWikiDocument into Document objects * * @param docs list of XWikiDocument * @return list of Document objects */ public List wrapDocs(List docs) { List result = new ArrayList(); if (docs != null) { for (Iterator iter = docs.iterator(); iter.hasNext();) { Object obj = iter.next(); try { if (obj instanceof XWikiDocument) { XWikiDocument doc = (XWikiDocument) obj; Document wrappedDoc = doc.newDocument(getXWikiContext()); result.add(wrappedDoc); } else if (obj instanceof Document) { result.add(obj); } else if (obj instanceof String) { Document doc = getDocument(obj.toString()); if(doc != null) { result.add(doc); } } } catch (XWikiException ex) { } } } return result; } /** * API allowing to parse a text content to evaluate velocity scripts * * @param content * @return evaluated content if the content contains velocity scripts */ public String parseContent(String content) { return xwiki.parseContent(content, getXWikiContext()); } /** * API to parse the message being stored in the Context A message can be an error message or an * information message either as text or as a message ID pointing to ApplicationResources The * message is also parse for velocity scripts * * @return Final message */ public String parseMessage() { return xwiki.parseMessage(getXWikiContext()); } /** * API to parse a message A message can be an error message or an information message either as * text or as a message ID pointing to ApplicationResources The message is also parse for * velocity scripts * * @return Final message * @param id * @return the result of the parsed message */ public String parseMessage(String id) { return xwiki.parseMessage(id, getXWikiContext()); } /** * API to get a message A message can be an error message or an information message either as * text or as a message ID pointing to ApplicationResources The message is also parsed for * velocity scripts * * @return Final message * @param id * @return the result of the parsed message */ public String getMessage(String id) { return xwiki.getMessage(id, getXWikiContext()); } /** * API to parse a velocity template provided by the current Skin The template is first looked in * the skin active for the user, the space or the wiki. If the template does not exist in that * skin, the template is looked up in the "parent skin" of the skin * * @param template Template name ("view", "edit", "comment") * @return Evaluated content from the template */ public String parseTemplate(String template) { return xwiki.parseTemplate(template, getXWikiContext()); } /** * API to render a velocity template provided by the current Skin The template is first looked * in the skin active for the user, the space or the wiki. If the template does not exist in * that skin, the template is looked up in the "parent skin" of the skin * * @param template Template name ("view", "edit", "comment") * @return Evaluated content from the template */ public String renderTemplate(String template) { return xwiki.renderTemplate(template, getXWikiContext()); } /** * Designed to include dynamic content, such as Servlets or JSPs, inside Velocity templates; * works by creating a RequestDispatcher, buffering the output, then returning it as a string. * * @param url URL of the servlet * @return text result of the servlet */ public String invokeServletAndReturnAsString(String url) { return xwiki.invokeServletAndReturnAsString(url, getXWikiContext()); } /** * Return the URL of the static file provided by the current skin The file is first looked in * the skin active for the user, the space or the wiki. If the file does not exist in that skin, * the file is looked up in the "parent skin" of the skin. The file can be a CSS file, an image * file, a javascript file, etc. * * @param filename Filename to be looked up in the skin (logo.gif, style.css) * @return URL to access this file */ public String getSkinFile(String filename) { return xwiki.getSkinFile(filename, getXWikiContext()); } /** * Return the URL of the static file provided by the current skin The file is first looked in * the skin active for the user, the space or the wiki. If the file does not exist in that skin, * the file is looked up in the "parent skin" of the skin. The file can be a CSS file, an image * file, a javascript file, etc. * * @param filename Filename to be looked up in the skin (logo.gif, style.css) * @param forceSkinAction true to make sure that static files are retrieved through the skin * action, to allow parsing of velocity on CSS files * @return URL to access this file */ public String getSkinFile(String filename, boolean forceSkinAction) { return xwiki.getSkinFile(filename, forceSkinAction, getXWikiContext()); } /** * API to retrieve the current skin for this request and user The skin is first derived from the * request "skin" parameter If this parameter does not exist, the user preference "skin" is * looked up If this parameter does not exist or is empty, the space preference "skin" is looked * up If this parameter does not exist or is empty, the XWiki preference "skin" is looked up If * this parameter does not exist or is empty, the xwiki.cfg parameter xwiki.defaultskin is * looked up If this parameter does not exist or is empty, the xwiki.cfg parameter * xwiki.defaultbaseskin is looked up If this parameter does not exist or is empty, the skin is * "albatross" * * @return The current skin for this request and user */ public String getSkin() { return xwiki.getSkin(getXWikiContext()); } /** * API to retrieve the current skin for this request and user. Each skin has a skin it is based * on. If not the base skin is the xwiki.cfg parameter "xwiki.defaultbaseskin". If this * parameter does not exist or is empty, the base skin is "albatross". * * @return The current baseskin for this request and user */ public String getBaseSkin() { return xwiki.getBaseSkin(getXWikiContext()); } /** * API to access the copyright for this space. The copyright is read in the space preferences. * If it does not exist or is empty it is read from the XWiki preferences. * * @return the text for the copyright */ public String getWebCopyright() { return xwiki.getWebCopyright(getXWikiContext()); } /** * API to access an XWiki Preference There can be one preference object per language This * function will find the right preference object associated to the current active language * * @param prefname Preference name * @return The preference for this wiki and the current language */ public String getXWikiPreference(String prefname) { return xwiki.getXWikiPreference(prefname, getXWikiContext()); } /** * API to access an XWiki Preference There can be one preference object per language This * function will find the right preference object associated to the current active language * * @param prefname Preference name * @param default_value default value to return if the prefenrece does not exist or is empty * @return The preference for this wiki and the current language */ public String getXWikiPreference(String prefname, String default_value) { return xwiki.getXWikiPreference(prefname, default_value, getXWikiContext()); } /** * API to access an Space Preference There can be one preference object per language This * function will find the right preference object associated to the current active language If * no preference is found it will look in the XWiki Preferences * * @param prefname Preference name * @return The preference for this wiki and the current language */ public String getWebPreference(String prefname) { return xwiki.getWebPreference(prefname, getXWikiContext()); } /** * API to access an Space Preference There can be one preference object per language This * function will find the right preference object associated to the current active language If * no preference is found it will look in the XWiki Preferences * * @param prefname Preference name * @param space The space for which this preference is requested * @return The preference for this wiki and the current language */ public String getWebPreferenceFor(String prefname, String space) { return xwiki.getWebPreference(prefname, space, "", getXWikiContext()); } /** * API to access an Space Preference There can be one preference object per language This * function will find the right preference object associated to the current active language If * no preference is found it will look in the XWiki Preferences * * @param prefname Preference name * @param default_value default value to return if the preference does not exist or is empty * @return The preference for this wiki and the current language */ public String getWebPreference(String prefname, String default_value) { return xwiki.getWebPreference(prefname, default_value, getXWikiContext()); } /** * API to access a Skin Preference The skin object is the current user's skin * * @param prefname Preference name * @return The preference for the current skin */ public String getSkinPreference(String prefname) { return xwiki.getSkinPreference(prefname, getXWikiContext()); } /** * API to access a Skin Preference The skin object is the current user's skin * * @param prefname Preference name * @param default_value default value to return if the preference does not exist or is empty * @return The preference for the current skin */ public String getSkinPreference(String prefname, String default_value) { return xwiki.getSkinPreference(prefname, default_value, getXWikiContext()); } /** * API to access an XWiki Preference as a long number There can be one preference object per * language This function will find the right preference object associated to the current active * language * * @param prefname Preference name * @param space The space for which this preference is requested * @param default_value default value to return if the prefenrece does not exist or is empty * @return The preference for this wiki and the current language in long format */ public String getWebPreferenceFor(String prefname, String space, String default_value) { return xwiki.getWebPreference(prefname, space, default_value, getXWikiContext()); } /** * API to access an XWiki Preference as a long number There can be one preference object per * language This function will find the right preference object associated to the current active * language * * @param prefname Preference name * @param default_value default value to return if the prefenrece does not exist or is empty * @return The preference for this wiki and the current language in long format */ public long getXWikiPreferenceAsLong(String prefname, long default_value) { return xwiki.getXWikiPreferenceAsLong(prefname, default_value, getXWikiContext()); } /** * API to access an XWiki Preference as a long number There can be one preference object per * language This function will find the right preference object associated to the current active * language * * @param prefname Preference name * @return The preference for this wiki and the current language in long format */ public long getXWikiPreferenceAsLong(String prefname) { return xwiki.getXWikiPreferenceAsLong(prefname, getXWikiContext()); } /** * API to access an Web Preference as a long number There can be one preference object per * language This function will find the right preference object associated to the current active * language If no preference is found it will look for the XWiki Preference * * @param prefname Preference name * @param default_value default value to return if the prefenrece does not exist or is empty * @return The preference for this wiki and the current language in long format */ public long getWebPreferenceAsLong(String prefname, long default_value) { return xwiki.getWebPreferenceAsLong(prefname, default_value, getXWikiContext()); } /** * API to access an Web Preference as a long number There can be one preference object per * language This function will find the right preference object associated to the current active * language If no preference is found it will look for the XWiki Preference * * @param prefname Preference name * @return The preference for this wiki and the current language in long format */ public long getWebPreferenceAsLong(String prefname) { return xwiki.getWebPreferenceAsLong(prefname, getXWikiContext()); } /** * API to access an XWiki Preference as an int number There can be one preference object per * language This function will find the right preference object associated to the current active * language * * @param prefname Preference name * @param default_value default value to return if the prefenrece does not exist or is empty * @return The preference for this wiki and the current language in int format */ public int getXWikiPreferenceAsInt(String prefname, int default_value) { return xwiki.getXWikiPreferenceAsInt(prefname, default_value, getXWikiContext()); } /** * API to access an XWiki Preference as a int number There can be one preference object per * language This function will find the right preference object associated to the current active * language * * @param prefname Preference name * @return The preference for this wiki and the current language in int format */ public int getXWikiPreferenceAsInt(String prefname) { return xwiki.getXWikiPreferenceAsInt(prefname, getXWikiContext()); } /** * API to access an Web Preference as a int number There can be one preference object per * language This function will find the right preference object associated to the current active * language If no preference is found it will look for the XWiki Preference * * @param prefname Preference name * @param default_value default value to return if the prefenrece does not exist or is empty * @return The preference for this wiki and the current language in int format */ public int getWebPreferenceAsInt(String prefname, int default_value) { return xwiki.getWebPreferenceAsInt(prefname, default_value, getXWikiContext()); } /** * API to access an Web Preference as a int number There can be one preference object per * language This function will find the right preference object associated to the current active * language If no preference is found it will look for the XWiki Preference * * @param prefname Preference name * @return The preference for this wiki and the current language in int format */ public int getWebPreferenceAsInt(String prefname) { return xwiki.getWebPreferenceAsInt(prefname, getXWikiContext()); } /** * API to access a User Preference This function will look in the User profile for the * preference If no preference is found it will look in the Space Preferences If no preference * is found it will look in the XWiki Preferences * * @param prefname Preference name * @return The preference for this wiki and the current language */ public String getUserPreference(String prefname) { return xwiki.getUserPreference(prefname, getXWikiContext()); } /** * API to access a User Preference from cookie This function will look in the session cookie for * the preference * * @param prefname Preference name * @return The preference for this wiki and the current language */ public String getUserPreferenceFromCookie(String prefname) { return xwiki.getUserPreferenceFromCookie(prefname, getXWikiContext()); } /** * First try to find the current language in use from the XWiki context. If none is used * and if the wiki is not multilingual use the default language defined in the XWiki * preferences. If the wiki is multilingual try to get the language passed in the request. * If none was passed try to get it from a cookie. If no language cookie exists then use the * user default language and barring that use the browser's "Accept-Language" header sent in * HTTP request. If none is defined use the default language. * * @return the language to use */ public String getLanguagePreference() { return xwiki.getLanguagePreference(getXWikiContext()); } /** * @deprecated use {@link #getLanguagePreference()} instead */ public String getDocLanguagePreference() { return xwiki.getDocLanguagePreference(getXWikiContext()); } /** * API to access the interface language preference for the request Order of evaluation is: * Language of the wiki in mono-lingual mode language request paramater language in context * language user preference language in cookie language accepted by the navigator * * @return the document language preference for the request */ public String getInterfaceLanguagePreference() { return xwiki.getInterfaceLanguagePreference(getXWikiContext()); } /** * API to check if wiki is in multi-wiki mode (virtual) * * @return true for multi-wiki/false for mono-wiki */ public boolean isVirtual() { return xwiki.isVirtual(); } /** * API to check is wiki is multi-lingual * * @return true for multi-lingual/false for mono-lingual */ public boolean isMultiLingual() { return xwiki.isMultiLingual(getXWikiContext()); } /** * Priviledged API to flush the cache of the Wiki installation This flushed the cache of all * wikis, all plugins, all renderers */ public void flushCache() { if (hasProgrammingRights()) xwiki.flushCache(getXWikiContext()); } /** * Priviledged API to reset the rendenring engine This would restore the rendering engine * evaluation loop and take into account new configuration parameters */ public void resetRenderingEngine() { if (hasProgrammingRights()) try { xwiki.resetRenderingEngine(getXWikiContext()); } catch (XWikiException e) { } } /** * Priviledged API to create a new user from the request This API is used by RegisterNewUser * wiki page * * @return true for success/false for failure * @throws XWikiException */ public int createUser() throws XWikiException { return createUser(false, "edit"); } /** * Priviledged API to create a new user from the request This API is used by RegisterNewUser * wiki page This version sends a validation email to the user Configuration of validation email * is in the XWiki Preferences * * @param withValidation true to send the validationemail * @return true for success/false for failure * @throws XWikiException */ public int createUser(boolean withValidation) throws XWikiException { return createUser(withValidation, "edit"); } /** * Priviledged API to create a new user from the request This API is used by RegisterNewUser * wiki page This version sends a validation email to the user Configuration of validation email * is in the XWiki Preferences * * @param withValidation true to send the validation email * @param userRights Rights to set for the user for it's own page(defaults to "edit") * @return true for success/false for failure * @throws XWikiException */ public int createUser(boolean withValidation, String userRights) throws XWikiException { boolean registerRight; try { // So, what's the register right for? This says that if the creator of the page // (Admin) has programming rights, anybody can register. Is this OK? if (hasProgrammingRights()) { registerRight = true; } else { registerRight = xwiki.getRightService().hasAccessLevel("register", getXWikiContext().getUser(), "XWiki.XWikiPreferences", getXWikiContext()); } if (registerRight) return xwiki.createUser(withValidation, userRights, getXWikiContext()); return -1; } catch (Exception e) { e.printStackTrace(); return -2; } } /** * Priviledged API to create a new Wiki from an existing wiki This creates the database, copies * to documents from a existing wiki Assigns the admin rights, creates the Wiki identification * page in the main wiki * * @param wikiName Wiki Name to create * @param wikiUrl Wiki URL to accept requests from * @param wikiAdmin Wiki admin user * @param baseWikiName Wiki to copy documents from * @param failOnExist true to fail if the wiki already exists, false to overwrite * @return Success of Failure code (0 for success, -1 for missing programming rights, > 0 for * other errors * @throws XWikiException */ public int createNewWiki(String wikiName, String wikiUrl, String wikiAdmin, String baseWikiName, boolean failOnExist) throws XWikiException { return createNewWiki(wikiName, wikiUrl, wikiAdmin, baseWikiName, "", null, failOnExist); } /** * Priviledged API to create a new Wiki from an existing wiki This creates the database, copies * to documents from a existing wiki Assigns the admin rights, creates the Wiki identification * page in the main wiki * * @param wikiName Wiki Name to create * @param wikiUrl Wiki URL to accept requests from * @param wikiAdmin Wiki admin user * @param baseWikiName Wiki to copy documents from * @param description Description of the Wiki * @param failOnExist true to fail if the wiki already exists, false to overwrite * @return Success of Failure code (0 for success, -1 for missing programming rights, > 0 for * other errors * @throws XWikiException */ public int createNewWiki(String wikiName, String wikiUrl, String wikiAdmin, String baseWikiName, String description, boolean failOnExist) throws XWikiException { return createNewWiki(wikiName, wikiUrl, wikiAdmin, baseWikiName, description, null, failOnExist); } /** * Priviledged API to create a new Wiki from an existing wiki This creates the database, copies * to documents from a existing wiki Assigns the admin rights, creates the Wiki identification * page in the main wiki Copy is limited to documents of a specified language. If a document for * the language is not found, the default language document is used * * @param wikiName Wiki Name to create * @param wikiUrl Wiki URL to accept requests from * @param wikiAdmin Wiki admin user * @param baseWikiName Wiki to copy documents from * @param description Description of the Wiki * @param language Language to copy * @param failOnExist true to fail if the wiki already exists, false to overwrite * @return Success of Failure code (0 for success, -1 for missing programming rights, > 0 for * other errors * @throws XWikiException */ public int createNewWiki(String wikiName, String wikiUrl, String wikiAdmin, String baseWikiName, String description, String language, boolean failOnExist) throws XWikiException { if (hasProgrammingRights()) return xwiki.createNewWiki(wikiName, wikiUrl, wikiAdmin, baseWikiName, description, language, failOnExist, getXWikiContext()); return -1; } /** * Priviledged API to validate the return code given by a user in response to an email * validation email The validation information are taken from the request object * * @param withConfirmEmail true to send a account confirmation email/false to not send it * @return Success of Failure code (0 for success, -1 for missing programming rights, > 0 for * other errors * @throws XWikiException */ public int validateUser(boolean withConfirmEmail) throws XWikiException { return xwiki.validateUser(withConfirmEmail, getXWikiContext()); } /** * Priviledged API to add a user to the XWiki.XWikiAllGroup * * @param fullwikiname user name to add * @throws XWikiException */ public void addToAllGroup(String fullwikiname) throws XWikiException { if (hasProgrammingRights()) xwiki.setUserDefaultGroup(fullwikiname, getXWikiContext()); } /** * Priviledged API to send a confirmation email to a user * * @param xwikiname user to send the email to * @param password password to put in the mail * @param email email to send to * @param add_message Additional message to send to the user * @param contentfield Preference field to use as a mail template * @throws XWikiException if the mail was not send successfully */ public void sendConfirmationMail(String xwikiname, String password, String email, String add_message, String contentfield) throws XWikiException { if (hasProgrammingRights()) xwiki.sendConfirmationEmail(xwikiname, password, email, add_message, contentfield, getXWikiContext()); } /** * Priviledged API to send a confirmation email to a user * * @param xwikiname user to send the email to * @param password password to put in the mail * @param email email to send to * @param contentfield Preference field to use as a mail template * @throws XWikiException if the mail was not send successfully */ public void sendConfirmationMail(String xwikiname, String password, String email, String contentfield) throws XWikiException { if (hasProgrammingRights()) xwiki.sendConfirmationEmail(xwikiname, password, email, "", contentfield, getXWikiContext()); } /** * Priviledged API to send a message to an email address * * @param sender email of the sender of the message * @param recipient email of the recipient of the message * @param message Message to send * @throws XWikiException if the mail was not send successfully */ public void sendMessage(String sender, String recipient, String message) throws XWikiException { if (hasProgrammingRights()) xwiki.sendMessage(sender, recipient, message, getXWikiContext()); } /** * Priviledged API to send a message to an email address * * @param sender email of the sender of the message * @param recipient emails of the recipients of the message * @param message Message to send * @throws XWikiException if the mail was not send successfully */ public void sendMessage(String sender, String[] recipient, String message) throws XWikiException { if (hasProgrammingRights()) xwiki.sendMessage(sender, recipient, message, getXWikiContext()); } /** * Priviledged API to copy a document to another document in the same wiki * * @param docname source document * @param targetdocname target document * @return true if the copy was sucessfull * @throws XWikiException if the document was not copied properly */ public boolean copyDocument(String docname, String targetdocname) throws XWikiException { return this.copyDocument(docname, targetdocname, null, null, null, false, false); } /** * Priviledged API to copy a translation of a document to another document in the same wiki * * @param docname source document * @param targetdocname target document * @param wikilanguage language to copy * @return true if the copy was sucessfull * @throws XWikiException if the document was not copied properly */ public boolean copyDocument(String docname, String targetdocname, String wikilanguage) throws XWikiException { return this.copyDocument(docname, targetdocname, null, null, wikilanguage, false, false); } /** * Priviledged API to copy a translation of a document to another document of the same name in * another wiki * * @param docname source document * @param sourceWiki source wiki * @param targetWiki target wiki * @param wikilanguage language to copy * @return true if the copy was sucessfull * @throws XWikiException if the document was not copied properly */ public boolean copyDocument(String docname, String sourceWiki, String targetWiki, String wikilanguage) throws XWikiException { return this.copyDocument(docname, docname, sourceWiki, targetWiki, wikilanguage, true, false); } /** * Priviledged API to copy a translation of a document to another document of the same name in * another wiki additionally resetting the version * * @param docname source document * @param sourceWiki source wiki * @param targetWiki target wiki * @param wikilanguage language to copy * @param reset true to reset versions * @return true if the copy was sucessfull * @throws XWikiException if the document was not copied properly */ public boolean copyDocument(String docname, String targetdocname, String sourceWiki, String targetWiki, String wikilanguage, boolean reset) throws XWikiException { return this.copyDocument(docname, targetdocname, sourceWiki, targetWiki, wikilanguage, reset, false); } /** * Priviledged API to copy a translation of a document to another document of the same name in * another wiki additionally resetting the version and overwriting the previous document * * @param docname source document * @param sourceWiki source wiki * @param targetWiki target wiki * @param wikilanguage language to copy * @param reset true to reset versions * @param force true to overwrite the previous document * @return true if the copy was sucessfull * @throws XWikiException if the document was not copied properly */ public boolean copyDocument(String docname, String targetdocname, String sourceWiki, String targetWiki, String wikilanguage, boolean reset, boolean force) throws XWikiException { if (hasProgrammingRights()) { return xwiki.copyDocument(docname, targetdocname, sourceWiki, targetWiki, wikilanguage, reset, force, true, getXWikiContext()); } return false; } /** * Priviledged API to copy a space to another wiki, optionally deleting all document of the * target space * * @param web source Space * @param sourceWiki source Wiki * @param targetWiki target Wiki * @param wikiLanguage language to copy * @param clean true to delete all document of the target space * @return number of copied documents * @throws XWikiException if the space was not copied properly */ public int copyWikiWeb(String web, String sourceWiki, String targetWiki, String wikiLanguage, boolean clean) throws XWikiException { if (hasProgrammingRights()) return xwiki.copyWikiWeb(web, sourceWiki, targetWiki, wikiLanguage, clean, getXWikiContext()); return -1; } /** * API to include a topic into another The topic is rendered fully in the context of itself * * @param topic page name of the topic to include * @return the content of the included page * @throws XWikiException if the include failed */ public String includeTopic(String topic) throws XWikiException { return includeTopic(topic, true); } /** * API to execute a form in the context of an including topic The rendering is evaluated in the * context of the including topic All velocity variables are the one of the including topic This * api is usually called using #includeForm in a page, which modifies the behavior of "Edit this * page" button to direct for Form mode (inline) * * @param topic page name of the form to execute * @return the content of the included page * @throws XWikiException if the include failed */ public String includeForm(String topic) throws XWikiException { return includeForm(topic, true); } /** * API to include a topic into another, optionnaly surrounding the content with {pre}{/pre} to * avoid future wiki rendering The topic is rendered fully in the context of itself * * @param topic page name of the topic to include * @param pre true to add {pre} {/pre} * @return the content of the included page * @throws XWikiException if the include failed */ public String includeTopic(String topic, boolean pre) throws XWikiException { if (pre) return "{pre}" + xwiki.include(topic, false, getXWikiContext()) + "{/pre}"; return xwiki.include(topic, false, getXWikiContext()); } /** * API to execute a form in the context of an including topic, optionnaly surrounding the * content with {pre}{/pre} to avoid future wiki rendering The rendering is evaluated in the * context of the including topic All velocity variables are the one of the including topic This * api is usually called using #includeForm in a page, which modifies the behavior of "Edit this * page" button to direct for Form mode (inline) * * @param topic page name of the form to execute * @param pre true to add {pre} {/pre} * @return the content of the included page * @throws XWikiException if the include failed */ public String includeForm(String topic, boolean pre) throws XWikiException { if (pre) return "{pre}" + xwiki.include(topic, true, getXWikiContext()) + "{/pre}"; return xwiki.include(topic, true, getXWikiContext()); } /** * API to check rights on the current document for the current user * * @param level right to check (view, edit, comment, delete) * @return true if right is granted/false if not */ public boolean hasAccessLevel(String level) { try { return xwiki.getRightService().hasAccessLevel(level, getXWikiContext().getUser(), getXWikiContext().getDoc().getFullName(), getXWikiContext()); } catch (Exception e) { return false; } } /** * v * API to check rights on a document for a given user * * @param level right to check (view, edit, comment, delete) * @param user user for which to check the right * @param docname document on which to check the rights * @return true if right is granted/false if not */ public boolean hasAccessLevel(String level, String user, String docname) { try { return xwiki.getRightService() .hasAccessLevel(level, user, docname, getXWikiContext()); } catch (Exception e) { return false; } } /** * API to render a text in the context of a document * * @param text text to render * @param doc the text is evaluated in the content of this document * @return evaluated content * @throws XWikiException if the evaluation went wrong */ public String renderText(String text, Document doc) throws XWikiException { return xwiki.getRenderingEngine().renderText(text, doc.getDoc(), getXWikiContext()); } /** * API to render a chunk (difference between two versions * * @param chunk difference between versions to render * @param doc document to use as a context for rendering * @return resuilt of the rendering */ public String renderChunk(Chunk chunk, Document doc) { return renderChunk(chunk, false, doc); } /** * API to render a chunk (difference between two versions * * @param chunk difference between versions to render * @param doc document to use as a context for rendering * @param source true to render the difference as wiki source and not as wiki rendered text * @return resuilt of the rendering */ public String renderChunk(Chunk chunk, boolean source, Document doc) { StringBuffer buf = new StringBuffer(); chunk.toString(buf, "", "\n"); if (source == true) return buf.toString(); try { return xwiki.getRenderingEngine().renderText(buf.toString(), doc.getDoc(), getXWikiContext()); } catch (Exception e) { return buf.toString(); } } /** * API to list the current spaces in thiswiki * * @return a list for strings reprenseting the spaces * @throws XWikiException if something went wrong */ public List getSpaces() throws XWikiException { return xwiki.getSpaces(getXWikiContext()); } /** * API to list all documents in a space * * @param SpaceName space tolest * @return A list of strings to lest the document * @throws XWikiException if the loading went wrong */ public List getSpaceDocsName(String SpaceName) throws XWikiException { return xwiki.getSpaceDocsName(SpaceName, getXWikiContext()); } /** * API to retrieve a java object with the current date * * @return the current date */ public Date getCurrentDate() { return xwiki.getCurrentDate(); } /** * API to retrieve a java object with the current date * * @return the current date */ public Date getDate() { return xwiki.getCurrentDate(); } /** * API to retrieve the time delta in milliseconds between the current date and the time passed * as parameter. * * @param time * @return delta of the time in milliseconds */ public int getTimeDelta(long time) { return xwiki.getTimeDelta(time); } /** * API to convert a date from a time in milliseconds since 01/01/1970 to a Java Date Object * * @param time time in milliseconds since 1970, 00:00:00 GMT * @return Date object */ public Date getDate(long time) { return xwiki.getDate(time); } /** * API to split a text to an array of texts, according to a separator * * @param str original text * @param sep separator characters. The separator is one or more of the separator characters * @return An array of the splitted text */ public String[] split(String str, String sep) { return xwiki.split(str, sep); } /** * API to retrieve an exception stack trace in a String * * @param e Exception to retrieve the stack trace from * @return Text showing the exception stack trace */ public String printStrackTrace(Throwable e) { return xwiki.printStrackTrace(e); } /** * API to retrieve the current encoding of the wiki engine The encoding is stored in xwiki.cfg * Default encoding is ISO-8891-1 * * @return encoding active in this wiki */ public String getEncoding() { return xwiki.getEncoding(); } /** * API to retrieve a NULL object This is usefull in Velocity where there is no real null object * for comparaisons * * @return A null Object */ public Object getNull() { return null; } /** * API to retrieve a New Line character This is usefull in Velocity where there is no real new * line character for inclusion in texts * * @return A new line character */ public String getNl() { return "\n"; } /** * API to retrieve the URL of an attached file in a Wiki Document The URL is generated * differently depending on the environement (Servlet, Portlet, PDF, etc..) The URL generation * can be modified by implementing a new XWikiURLFactory object For compatibility with any * target environement (and especially the portlet environment) It is important to always use * the URL functions to generate URL and never hardcode URLs * * @param fullname page name which includes the attached file * @param filename attached filename to create a link for * @return a URL as a string pointing to the filename * @throws XWikiException if the URL could not be generated properly */ public String getAttachmentURL(String fullname, String filename) throws XWikiException { return xwiki.getAttachmentURL(fullname, filename, getXWikiContext()); } /** * API to retrieve the URL of an a Wiki Document in view mode The URL is generated differently * depending on the environement (Servlet, Portlet, PDF, etc..) The URL generation can be * modified by implementing a new XWikiURLFactory object For compatibility with any target * environement (and especially the portlet environment) It is important to always use the URL * functions to generate URL and never hardcode URLs * * @param fullname page name which includes the attached file * @return a URL as a string pointing to the wiki document in view mode * @throws XWikiException if the URL could not be generated properly */ public String getURL(String fullname) throws XWikiException { return xwiki.getURL(fullname, "view", getXWikiContext()); } /** * API to retrieve the URL of an a Wiki Document in any mode The URL is generated differently * depending on the environement (Servlet, Portlet, PDF, etc..) The URL generation can be * modified by implementing a new XWikiURLFactory object For compatibility with any target * environement (and especially the portlet environment) It is important to always use the URL * functions to generate URL and never hardcode URLs * * @param fullname page name which includes the attached file * @param action mode in which to access the document (view/edit/save/..). Any valid XWiki * action is possible. * @return a URL as a string pointing to the wiki document in view mode * @throws XWikiException if the URL could not be generated properly */ public String getURL(String fullname, String action) throws XWikiException { return xwiki.getURL(fullname, action, getXWikiContext()); } /** * API to retrieve the URL of an a Wiki Document in any mode, optionally adding a query string * The URL is generated differently depending on the environement (Servlet, Portlet, PDF, etc..) * The URL generation can be modified by implementing a new XWikiURLFactory object The query * string will be modified to be added in the way the environement needs it It is important to * not add the query string parameter manually after a URL Some environements will not accept * this (like the Portlet environement) * * @param fullname page name which includes the attached file * @param action mode in which to access the document (view/edit/save/..). Any valid XWiki * action is possible. * @param querystring Query String to provide in the usual mode (name1=value1&name2=value=2) * including encoding. * @return a URL as a string pointing to the wiki document in view mode * @throws XWikiException if the URL could not be generated properly */ public String getURL(String fullname, String action, String querystring) throws XWikiException { return xwiki.getURL(fullname, action, querystring, getXWikiContext()); } /** * @see #getExoService(String) * @deprecated use {@link #getExoService(String)} instead */ public java.lang.Object getService(String className) throws XWikiException { return getExoService(className); } /** * Privileged API to access an eXo Platform service from the Wiki Engine * * @param className eXo classname to retrieve the service from * @return A object representing the service or null if the user doesn't have programming * rights * @throws XWikiException if the service cannot be loaded * @since 1.1 Beta 1 */ public java.lang.Object getExoService(String className) throws XWikiException { java.lang.Object service = null; if (hasProgrammingRights()) { service = xwiki.getExoService(className); } return service; } /** * @see #getExoPortalService(String) * @deprecated use {@link #getExoPortalService(String)} instead */ public java.lang.Object getPortalService(String className) throws XWikiException { return getExoPortalService(className); } /** * Privileged API to access an eXo Platform Portal service from the Wiki Engine * * @param className eXo classname to retrieve the service from * @return A object representing the service or null if the user doesn't have programming * rights * @throws XWikiException if the service cannot be loaded * @since 1.1 Beta 1 */ public java.lang.Object getExoPortalService(String className) throws XWikiException { java.lang.Object portalService = null; if (hasProgrammingRights()) { portalService = xwiki.getExoPortalService(className); } return portalService; } /** * API to retrieve an List object This is usefull is velocity where you cannot create objects * * @return a java.util.ArrayList object casted to List */ public List getArrayList() { return new ArrayList(); } /** * API to retrieve an Map object This is usefull is velocity where you cannot create objects * * @return a java.util.HashMap object casted to Map */ public Map getHashMap() { return new HashMap(); } public Map getTreeMap() { return new TreeMap(); } /** * API to sort a list over standard comparator. Elements need to be mutally comparable and * implement the Comparable interface * * @param list List to sort * @return the sorted list (in the same oject) * @see Collections void sort(List list) */ public List sort(List list) { Collections.sort(list); return list; } public Number toNumber(Object o) { try { return new Long(o.toString()); } catch (Exception e) { return null; } } /** * API to generate a random string * * @param size Desired size of the string * @return the generated string */ public String generateRandomString(int size) { return xwiki.generateRandomString(size); } /** * API to Outpout an BufferedImage object into the response outputstream Once this function has * been called, not further action is possible Users should set $context.setFinished(true) to * avoid template output The image is outpout as image/jpeg * * @param image BufferedImage to output * @throws IOException exception if the output fails */ public void outputImage(BufferedImage image) throws IOException { JPEGImageEncoder encoder; OutputStream ostream = getXWikiContext().getResponse().getOutputStream(); encoder = JPEGCodec.createJPEGEncoder(ostream); encoder.encode(image); ostream.flush(); } /** * API to access the current starts for the Wiki for a specific action It retrieves the number * of times the action was performed for the whole wiki The statistics module need to be * activated (xwiki.stats=1 in xwiki.cfg) * * @param action action for which to retrieve statistics (view/save/download) * @return A DocumentStats object with number of actions performed, unique visitors, number of * visits * @deprecated use {@link #getStatsService()} instead */ public DocumentStats getCurrentMonthXWikiStats(String action) { return getXWikiContext().getWiki().getStatsService(getXWikiContext()).getDocMonthStats( "", action, new Date(), getXWikiContext()); } /** * API to retrieve a viewable referer text for a referer Referers are URL where users have * clicked on a link to an XWiki page Search engine referer URLs are transformed to a nicer view * (Google: search query string) For other URL the http:// part is stripped * * @param referer referer URL to transform * @return A viewable string */ public String getRefererText(String referer) { try { return xwiki.getRefererText(referer, getXWikiContext()); } catch (Exception e) { return ""; } } /** * API to retrieve a viewable referer text for a referer with a maximum length Referers are URL * where users have clicked on a link to an XWiki page Search engine referer URLs are * transformed to a nicer view (Google: search query string) For other URL the http:// part is * stripped * * @param referer referer URL to transform * @param length Maximum length. "..." is added to the end of the text * @return A viewable string */ public String getShortRefererText(String referer, int length) { try { return xwiki.getRefererText(referer, getXWikiContext()).substring(0, length); } catch (Exception e) { return xwiki.getRefererText(referer, getXWikiContext()); } } /** * Deprecated API which was retrieving the SQL to represent the fullName Document field * depending on the database used This is not needed anymore and returns 'doc.fullName' for all * databases * * @deprecated * @return "doc.fullName" */ public String getFullNameSQL() { return xwiki.getFullNameSQL(); } /** * API to retrieve a link to the User Name page displayed for the first name and last name of * the user The link will link to the page on the wiki where the user is registered (in virtual * wiki mode) * * @param user Fully qualified username as retrieved from $context.user (XWiki.LudovicDubost) * @return The first name and last name fields surrounded with a link to the user page */ public String getUserName(String user) { return xwiki.getUserName(user, null, getXWikiContext()); } /** * API to retrieve a link to the User Name page displayed with a custom view The link will link * to the page on the wiki where the user is registered (in virtual wiki mode) The formating is * done using the format parameter which can contain velocity scripting and access all * properties of the User profile using variables ($first_name $last_name $email $city) * * @param user Fully qualified username as retrieved from $context.user (XWiki.LudovicDubost) * @param format formatting to be used ("$first_name $last_name", "$first_name") * @return The first name and last name fields surrounded with a link to the user page */ public String getUserName(String user, String format) { return xwiki.getUserName(user, format, getXWikiContext()); } /** * API to retrieve a link to the User Name page displayed for the first name and last name of * the user The link will link to the page on the local wiki even if the user is registered on a * different wiki (in virtual wiki mode) * * @param user Fully qualified username as retrieved from $context.user (XWiki.LudovicDubost) * @return The first name and last name fields surrounded with a link to the user page */ public String getLocalUserName(String user) { try { return xwiki.getUserName(user.substring(user.indexOf(":") + 1), null, getXWikiContext()); } catch (Exception e) { return xwiki.getUserName(user, null, getXWikiContext()); } } /** * API to retrieve a link to the User Name page displayed with a custom view The link will link * to the page on the local wiki even if the user is registered on a different wiki (in virtual * wiki mode) The formating is done using the format parameter which can contain velocity * scripting and access all properties of the User profile using variables ($first_name * $last_name $email $city) * * @param user Fully qualified username as retrieved from $context.user (XWiki.LudovicDubost) * @param format formatting to be used ("$first_name $last_name", "$first_name") * @return The first name and last name fields surrounded with a link to the user page */ public String getLocalUserName(String user, String format) { try { return xwiki.getUserName(user.substring(user.indexOf(":") + 1), format, getXWikiContext()); } catch (Exception e) { return xwiki.getUserName(user, format, getXWikiContext()); } } /** * API to retrieve a text representing the user with the first name and last name of the user * With the link param set to false it will not link to the user page With the link param set to * true, the link will link to the page on the wiki where the user was registered (in virtual * wiki mode) * * @param user Fully qualified username as retrieved from $context.user (XWiki.LudovicDubost) * @param link false to not add an HTML link to the user profile * @return The first name and last name fields surrounded with a link to the user page */ public String getUserName(String user, boolean link) { return xwiki.getUserName(user, null, link, getXWikiContext()); } /** * API to retrieve a text representing the user with a custom view With the link param set to * false it will not link to the user page With the link param set to true, the link will link * to the page on the wiki where the user was registered (in virtual wiki mode) The formating is * done using the format parameter which can contain velocity scripting and access all * properties of the User profile using variables ($first_name $last_name $email $city) * * @param user Fully qualified username as retrieved from $context.user (XWiki.LudovicDubost) * @param format formatting to be used ("$first_name $last_name", "$first_name") * @param link false to not add an HTML link to the user profile * @return The first name and last name fields surrounded with a link to the user page */ public String getUserName(String user, String format, boolean link) { return xwiki.getUserName(user, format, link, getXWikiContext()); } /** * API to retrieve a text representing the user with the first name and last name of the user * With the link param set to false it will not link to the user page With the link param set to * true, the link will link to the page on the local wiki even if the user is registered on a * different wiki (in virtual wiki mode) * * @param user Fully qualified username as retrieved from $context.user (XWiki.LudovicDubost) * @param link false to not add an HTML link to the user profile * @return The first name and last name fields surrounded with a link to the user page */ public String getLocalUserName(String user, boolean link) { try { return xwiki.getUserName(user.substring(user.indexOf(":") + 1), null, link, getXWikiContext()); } catch (Exception e) { return xwiki.getUserName(user, null, link, getXWikiContext()); } } /** * API to retrieve a text representing the user with a custom view The formating is done using * the format parameter which can contain velocity scripting and access all properties of the * User profile using variables ($first_name $last_name $email $city) With the link param set to * false it will not link to the user page With the link param set to true, the link will link * to the page on the local wiki even if the user is registered on a different wiki (in virtual * wiki mode) * * @param user Fully qualified username as retrieved from $context.user (XWiki.LudovicDubost) * @param format formatting to be used ("$first_name $last_name", "$first_name") * @param link false to not add an HTML link to the user profile * @return The first name and last name fields surrounded with a link to the user page */ public String getLocalUserName(String user, String format, boolean link) { try { return xwiki.getUserName(user.substring(user.indexOf(":") + 1), format, link, getXWikiContext()); } catch (Exception e) { return xwiki.getUserName(user, format, link, getXWikiContext()); } } public User getUser() { return xwiki.getUser(getXWikiContext()); } public User getUser(String username) { return xwiki.getUser(username, getXWikiContext()); } /** * API allowing to format a date according to the default Wiki setting The date format is * provided in the 'dateformat' parameter of the XWiki Preferences * * @param date date object to format * @return A string with the date formating from the default Wiki setting */ public String formatDate(Date date) { return xwiki.formatDate(date, null, getXWikiContext()); } /** * API allowing to format a date according to a custom format The date format is from * java.text.SimpleDateFormat Example: "dd/MM/yyyy HH:mm:ss" or "d MMM yyyy" If the format is * invalid the default format will be used to show the date * * @param date date to format * @param format format of the date to be used * @return the formatted date * @see java.text.SimpleDateFormat */ public String formatDate(Date date, String format) { return xwiki.formatDate(date, format, getXWikiContext()); } /* Allow to read user setting providing the user timezone All dates will be expressed with this timezone @return the timezone */ public String getUserTimeZone() { return xwiki.getUserTimeZone(context); } /** * Returns a plugin from the plugin API. Plugin Rights can be verified. Note that although * this API is a duplicate of {@link #getPlugin(String)} it used to provide an easy access * from Velocity to XWiki plugins. Indeed Velocity has a feature in that if a class has * a get method, using the dot notation will automatically call the get method for the class. * See http://velocity.apache.org/engine/releases/velocity-1.5/user-guide.html#propertylookuprules. * This this allows the following constructs: * <code>$xwiki.pluginName.somePluginMethod()</code> * * @param name Name of the plugin to retrieve (either short of full class name) * @return a plugin object */ public Api get(String name) { return xwiki.getPluginApi(name, getXWikiContext()); } /** * Returns a plugin from the plugin API. Plugin Rights can be verified. * * @param name Name of the plugin to retrieve (either short of full class name) * @return a plugin object */ public Api getPlugin(String name) { return xwiki.getPluginApi(name, getXWikiContext()); } /** * Returns the recently visited pages for a specific action * * @param action ("view" or "edit") * @param size how many recent actions to retrieve * @return a ArrayList of document names * @deprecated use {@link #getStatsService()} instead */ public java.util.Collection getRecentActions(String action, int size) { XWikiStatsService stats = getXWikiContext().getWiki().getStatsService(getXWikiContext()); if (stats == null) return Collections.EMPTY_LIST; return stats.getRecentActions(action, size, getXWikiContext()); } /** * Returns the Advertisement system from the preferences * * @return "google" or "none" */ public String getAdType() { return xwiki.getAdType(getXWikiContext()); } /** * Returns the Advertisement client ID from the preferences * * @return an Ad affiliate ID */ public String getAdClientId() { return xwiki.getAdClientId(getXWikiContext()); } /** * Retrieves a int from a String * * @param str String to convert to int * @return the int or zero in case of exception */ public int parseInt(String str) { try { return Integer.parseInt(str); } catch (Exception e) { return 0; } } /** * Retrieves a int from a String * * @param str String to convert to int * @return the int or zero in case of exception */ public Integer parseInteger(String str) { return new Integer(parseInt(str)); } /** * Retrieves a long from a String * * @param str String to convert to long * @return the long or zero in case of exception */ public long parseLong(String str) { try { return Long.parseLong(str); } catch (Exception e) { return 0; } } /** * Retrieves a float from a String * * @param str String to convert to float * @return the float or zero in case of exception */ public float parseFloat(String str) { try { return Float.parseFloat(str); } catch (Exception e) { return 0; } } /** * Retrieves a double from a String * * @param str String to convert to double * @return the double or zero in case of exception */ public double parseDouble(String str) { try { return Double.parseDouble(str); } catch (Exception e) { return 0; } } /** * Returns the content of an HTTP/HTTPS URL protected using Basic Authentication * * @param surl url to retrieve * @param username username for the basic authentication * @param password password for the basic authentication * @return Content of the specified URL * @throws IOException */ public String getURLContent(String surl, String username, String password) throws IOException { try { return xwiki.getURLContent(surl, username, password, context); } catch (Exception e) { LOG.warn("Failed to retrieve content from [" + surl + "]", e); return ""; } } /** * Returns the content of an HTTP/HTTPS URL * * @param surl url to retrieve * @return Content of the specified URL * @throws IOException */ public String getURLContent(String surl) throws IOException { try { return xwiki.getURLContent(surl, context); } catch (Exception e) { LOG.warn("Failed to retrieve content from [" + surl + "]", e); return ""; } } /** * Returns the content of an HTTP/HTTPS URL protected using Basic Authentication * * @param surl url to retrieve * @param username username for the basic authentication * @param password password for the basic authentication * @param timeout manuel timeout in milliseconds * @return Content of the specified URL * @throws IOException */ public String getURLContent(String surl, String username, String password, int timeout) throws IOException { try { return xwiki.getURLContent(surl, username, password, timeout, xwiki.getHttpUserAgent(context)); } catch (Exception e) { return ""; } } /** * Returns the content of an HTTP/HTTPS URL * * @param surl url to retrieve * @param timeout manuel timeout in milliseconds * @return Content of the specified URL * @throws IOException */ public String getURLContent(String surl, int timeout) throws IOException { try { return xwiki.getURLContent(surl, timeout, xwiki.getHttpUserAgent(context)); } catch (Exception e) { return ""; } } /** * Returns the content of an HTTP/HTTPS URL protected using Basic Authentication as Bytes * * @param surl url to retrieve * @param username username for the basic authentication * @param password password for the basic authentication * @return Content of the specified URL * @throws IOException */ public byte[] getURLContentAsBytes(String surl, String username, String password) throws IOException { try { return xwiki.getURLContentAsBytes(surl, username, password, context); } catch (Exception e) { return null; } } /** * Returns the content of an HTTP/HTTPS URL as Bytes * * @param surl url to retrieve * @return Content of the specified URL * @throws IOException */ public byte[] getURLContentAsBytes(String surl) throws IOException { try { return xwiki.getURLContentAsBytes(surl, context); } catch (Exception e) { return null; } } /** * Filters text to be include in = or like clause in SQL * * @param text text to filter * @return filtered text */ public String sqlfilter(String text) { return Utils.SQLFilter(text); } /** * Returns the list of Macros documents in the specified content * * @param defaultweb Default Web to use for relative path names * @param content Content to parse * @return ArrayList of document names */ public List getIncludedMacros(String defaultweb, String content) { return xwiki.getIncludedMacros(defaultweb, content, getXWikiContext()); } /** * returns true if xwiki.readonly is set in the configuration file * * @return the value of xwiki.isReadOnly() * @see com.xpn.xwiki.XWiki */ public boolean isReadOnly() { return xwiki.isReadOnly(); } /** * Priviledged API to set/unset the readonly status of the Wiki After setting this to true no * writing to the database will be performed All Edit buttons will be removed and save actions * disabled This is used for maintenance purposes * * @param ro true to set read-only mode/false to unset */ public void setReadOnly(boolean ro) { if (hasAdminRights()) { xwiki.setReadOnly(ro); } } /** * Priviledge API to regenerate the links/backlinks table Normally links and backlinks are * stored when a page is modified This function will regenerate all the backlinks This function * can be long to run * * @throws XWikiException exception if the generation fails */ public void refreshLinks() throws XWikiException { if (hasAdminRights()) { xwiki.refreshLinks(getXWikiContext()); } } /** * API to check if the backlinks feature is active * Backlinks are activated in xwiki.cfg or in the XWiki Preferences * @return true if the backlinks feature is active * @throws XWikiException exception if the preference could not be retrieved */ public boolean hasBacklinks() throws XWikiException { return xwiki.hasBacklinks(getXWikiContext()); } /** * API to check if the tags feature is active. * Tags are activated in xwiki.cfg or in the XWiki Preferences * @return true if the tags feature is active, false otherwise * @throws XWikiException exception if the preference could not be retrieved */ public boolean hasTags() throws XWikiException { return xwiki.hasTags(getXWikiContext()); } /** * API to check if the edit comment feature is active * Edit comments are activated in xwiki.cfg or in the XWiki Preferences * @return */ public boolean hasEditComment() { return xwiki.hasEditComment(context); } /** * API to check if the edit comment field is shown in the edit form * Edit comments are activated in xwiki.cfg or in the XWiki Preferences * @return */ public boolean isEditCommentFieldHidden() { return xwiki.isEditCommentFieldHidden(context); } /** * API to check if the edit comment is suggested (prompted once by Javascript if empty) * Edit comments are activated in xwiki.cfg or in the XWiki Preferences * @return */ public boolean isEditCommentSuggested() { return xwiki.isEditCommentSuggested(context); } /** * API to check if the edit comment is mandatory (prompted by Javascript if empty) * Edit comments are activated in xwiki.cfg or in the XWiki Preferences * @return */ public boolean isEditCommentMandatory() { return xwiki.isEditCommentMandatory(context); } /** * API to check if the minor edit feature is active * minor edit is activated in xwiki.cfg or in the XWiki Preferences */ public boolean hasMinorEdit() { return xwiki.hasMinorEdit(context); } /** * API to check if the recycle bin feature is active * recycle bin is activated in xwiki.cfg or in the XWiki Preferences */ public boolean hasRecycleBin() { return xwiki.hasRecycleBin(context); } /** * API to rename a page (experimental) Rights are necessary to edit the source and target page * All objects and attachments ID are modified in the process to link to the new page name * * @param doc page to rename * @param newFullName target page name to move the information to * @throws XWikiException exception if the rename fails */ public boolean renamePage(Document doc, String newFullName) { try { if (xwiki.exists(newFullName, getXWikiContext()) && !xwiki.getRightService().hasAccessLevel("delete", getXWikiContext().getUser(), newFullName, getXWikiContext())) return false; if (xwiki.getRightService().hasAccessLevel("edit", getXWikiContext().getUser(), doc.getFullName(), getXWikiContext())) { xwiki.renamePage(doc.getFullName(), newFullName, getXWikiContext()); } } catch (XWikiException e) { return false; } return true; } /** * Retrieves the current editor preference for the request The preference is first looked up in * the user preference and then in the space and wiki preference * * @return "wysiwyg" or "text" */ public String getEditorPreference() { return xwiki.getEditorPreference(getXWikiContext()); } /** * Priviledged API to retrieve an object instanciated from groovy code in a String Groovy * scripts compilation is cached * * @param script script containing a Groovy class definition (public class MyClass { ... }) * @return An object instanciating this class * @throws XWikiException */ public Object parseGroovyFromString(String script) throws XWikiException { if (hasProgrammingRights()) return xwiki.parseGroovyFromString(script, getXWikiContext()); return "groovy_missingrights"; } /** * Priviledged API to retrieve an object instanciated from groovy code in a String Groovy * scripts compilation is cached * * @param fullname // script containing a Groovy class definition (public class MyClass { ... }) * @return An object instanciating this class * @throws XWikiException */ public Object parseGroovyFromPage(String fullname, String jarWikiPage) throws XWikiException { XWikiDocument doc = xwiki.getDocument(fullname, getXWikiContext()); if (xwiki.getRightService().hasProgrammingRights(doc, getXWikiContext())) return xwiki.parseGroovyFromString(doc.getContent(), jarWikiPage, getXWikiContext()); return "groovy_missingrights"; } /** * Priviledged API to retrieve an object instanciated from groovy code in a String Groovy * scripts compilation is cached * * @param fullname // script containing a Groovy class definition (public class MyClass { ... }) * @return An object instanciating this class * @throws XWikiException */ public Object parseGroovyFromPage(String fullname) throws XWikiException { XWikiDocument doc = xwiki.getDocument(fullname, getXWikiContext()); if (xwiki.getRightService().hasProgrammingRights(doc, getXWikiContext())) return xwiki.parseGroovyFromString(doc.getContent(), getXWikiContext()); return "groovy_missingrights"; } /** * API to get the macro list from the XWiki Preferences The macro list are the macros available * from the Macro Mapping System * * @return String with each macro on each line */ public String getMacroList() { return xwiki.getMacroList(getXWikiContext()); } /** * API to check if using which toolbars in Wysiwyg editor * * @return a string value */ public String getWysiwygToolbars() { return xwiki.getWysiwygToolbars(getXWikiContext()); } /** * API to create an object from the request The parameters are the ones that are created from * doc.display("field","edit") calls * * @param className XWiki Class Name to create the object from * @return a BaseObject wrapped in an Object * @throws XWikiException exception if the object could not be read */ public com.xpn.xwiki.api.Object getObjectFromRequest(String className) throws XWikiException { return new com.xpn.xwiki.api.Object(xwiki.getObjectFromRequest(className, getXWikiContext()), getXWikiContext()); } /** * API to create an empty document * * @return an XWikiDocument wrapped in a Document */ public Document createDocument() { return new XWikiDocument().newDocument(getXWikiContext()); } /** * API to convert the username depending on the configuration The username can be converted from * email to a valid XWiki page name hidding the email address The username can be then used to * login and link to the right user page * * @param username username to use for login * @return converted wiki page name for this username */ public String convertUsername(String username) { return xwiki.convertUsername(username, getXWikiContext()); } /** * API to display a select box for the list of available field for a specific class This field * data can then be used to generate an XWiki Query showing a table with the relevant data * * @param className XWiki Class Name to display the list of columns for * @param query Query to pre-select the currently selected columns * @return text of the select field * @throws XWikiException exception is a failure occured */ public String displaySearchColumns(String className, XWikiQuery query) throws XWikiException { return xwiki.displaySearchColumns(className, "", query, getXWikiContext()); } /** * API to display a select box for the list of available field for a specific class, optionally * adding a prefix This field data can then be used to generate an XWiki Query showing a table * with the relevant data * * @param className XWiki Class Name to display the list of columns for * @param prefix Prefix to add to the field name * @param query Query to pre-select the currently selected columns * @return text of the select field * @throws XWikiException exception is a failure occured */ public String displaySearchColumns(String className, String prefix, XWikiQuery query) throws XWikiException { return xwiki.displaySearchColumns(className, prefix, query, getXWikiContext()); } /** * API to display a select box for the list of available field for a specific class This field * data can then be used to generate the order element of an XWiki Query showing a table with the relevant data * * @param className XWiki Class Name to display the list of columns for * @param query Query to pre-select the currently selected columns * @return text of the select field * @throws XWikiException exception is a failure occured */ public String displaySearchOrder(String className, XWikiQuery query) throws XWikiException { return xwiki.displaySearchOrder(className, "", query, getXWikiContext()); } /** * API to display a select box for the list of available field for a specific class, optionally * adding a prefix This field data can then be used to generate the order element of an XWiki Query showing a table * with the relevant data * * @param className XWiki Class Name to display the list of columns for * @param prefix Prefix to add to the field name * @param query Query to pre-select the currently selected columns * @return text of the select field * @throws XWikiException exception is a failure occured */ public String displaySearchOrder(String className, String prefix, XWikiQuery query) throws XWikiException { return xwiki.displaySearchOrder(className, prefix, query, getXWikiContext()); } /** * API to display a field in search mode for a specific class without preselected values This * field data can then be used to generate an XWiki Query showing a table with the relevant data * * @param fieldname field name in the class * @param className class name to display the field from * @return text of the select field * @throws XWikiException exception is a failure occured */ public String displaySearch(String fieldname, String className) throws XWikiException { return xwiki.displaySearch(fieldname, className, getXWikiContext()); } /** * API to display a field in search mode for a specific class with preselected values This field * data can then be used to generate an XWiki Query showing a table with the relevant data * * @param fieldname field name in the class * @param className class name to display the field from * @param criteria XWikiCriteria object (usually the XWikiQuery object) to take the preselected * values from * @return text of the select field * @throws XWikiException exception is a failure occured */ public String displaySearch(String fieldname, String className, XWikiCriteria criteria) throws XWikiException { return xwiki.displaySearch(fieldname, className, criteria, getXWikiContext()); } /** * API to display a field in search mode for a specific class with preselected values, * optionally adding a prefix to the field name This field data can then be used to generate an * XWiki Query showing a table with the relevant data * * @param fieldname field name in the class * @param className class name to display the field from * @param prefix prefix to add to the field name * @param criteria XWikiCriteria object (usually the XWikiQuery object) to take the preselected * values from * @return text of the select field * @throws XWikiException exception is a failure occured */ public String displaySearch(String fieldname, String className, String prefix, XWikiCriteria criteria) throws XWikiException { return xwiki.displaySearch(fieldname, className, prefix, criteria, getXWikiContext()); } /** * API to run a search from an XWikiQuery Object An XWikiQuery object can be created from a * request using the createQueryFromRequest function * * @param query query to run the search for * @return A list of document names matching the query * @throws XWikiException exception is a failure occured */ public List search(XWikiQuery query) throws XWikiException { return xwiki.search(query, getXWikiContext()); } /** * API to create a query from a request Object The request object is the result of a form * created from the displaySearch() and displaySearchColumns() functions * * @param className class name to create the query from * @return an XWikiQuery object matching the selected values in the request object * @throws XWikiException exception is a failure occured */ public XWikiQuery createQueryFromRequest(String className) throws XWikiException { return xwiki.createQueryFromRequest(className, getXWikiContext()); } /** * API to run a search from an XWikiQuery Object and display it as a HTML table An XWikiQuery * object can be created from a request using the createQueryFromRequest function * * @param query query to run the search for * @return An HTML table showing the result * @throws XWikiException exception is a failure occured */ public String searchAsTable(XWikiQuery query) throws XWikiException { return xwiki.searchAsTable(query, getXWikiContext()); } /** * API to get the Property object from a class based on a property path A property path looks * like XWiki.ArticleClass_fieldname * * @param propPath Property path * @return a PropertyClass object from a BaseClass object */ public com.xpn.xwiki.api.PropertyClass getPropertyClassFromName(String propPath) { return new PropertyClass(xwiki.getPropertyClassFromName(propPath, getXWikiContext()), getXWikiContext()); } /** * Generates a unique page name based on initial page name and already existing pages * * @param name * @return a unique page name */ public String getUniquePageName(String name) { return xwiki.getUniquePageName(name, getXWikiContext()); } /** * Generates a unique page name based on initial page name and already existing pages * * @param space * @param name * @return a unique page name */ public String getUniquePageName(String space, String name) { return xwiki.getUniquePageName(space, name, getXWikiContext()); } /** * Cleans up the page name to make it valid * * @param name * @return A valid page name */ public String clearName(String name) { return xwiki.clearName(name, getXWikiContext()); } /** * Inserts a tooltip using toolTip.js * * @param html HTML viewed * @param message HTML Tooltip message * @param params Parameters in Javascropt added to the tooltip config * @return HTML with working tooltip */ public String addTooltip(String html, String message, String params) { return xwiki.addTooltip(html, message, params, getXWikiContext()); } /** * Inserts a tooltip using toolTip.js * * @param html HTML viewed * @param message HTML Tooltip message * @return HTML with working tooltip */ public String addTooltip(String html, String message) { return xwiki.addTooltip(html, message, getXWikiContext()); } /** * Inserts the tooltip Javascript * * @return */ public String addTooltipJS() { return xwiki.addTooltipJS(getXWikiContext()); } /* * Inserts a Mandatory asterix */ public String addMandatory() { return xwiki.addMandatory(getXWikiContext()); } /* * Clear accents */ public String clearAccents(String text) { return Util.noaccents(text); } /** * Get the XWiki Class object defined in the passed Document name. * * <p>Note: This method doesn't require any rights for accessing the passed Document (as * opposed to the {@link com.xpn.xwiki.api.Document#getxWikiClass()} method which * does require to get a Document object first. This is thus useful in cases where * the calling code doesn't have the access right to the specified Document. It is * safe because there are no sensitive data stored in a Class definition. * </p> * * @param documentName the name of the document for which to get the Class object. * For example "XWiki.XWikiPreferences" * @return the XWiki Class object defined in the passed Document name. If the passed Document * name points to a Document with no Class defined then an empty Class object is * returned (i.e. a Class object with no properties). * @throws XWikiException if the passed document name doesn't point to a valid Document */ public Class getClass(String documentName) throws XWikiException { // TODO: The implementation should be done in com.xpn.xwiki.XWiki as this class should // delegate all implementations to that Class. return new Class(xwiki.getDocument(documentName, context).getxWikiClass(), context); } /** * Provides an absolute counter * @param name Counter name * @return String */ public String getCounter(String name) { XWikiEngineContext econtext = context.getEngineContext(); Integer counter = (Integer) econtext.getAttribute(name); if (counter==null) { counter = new Integer(0); } counter = new Integer(counter.intValue() + 1); econtext.setAttribute(name, counter); return counter.toString(); } /** * Check authentication from request and set according persitent login information * If it fails user is unlogged * @return null if failed, non null XWikiUser if sucess * @throws XWikiException */ public XWikiUser checkAuth() throws XWikiException { return context.getWiki().getAuthService().checkAuth(context); } /** * Check authentication from username and password and set according persitent login information * If it fails user is unlogged * @param username username to check * @param password password to check * @param rememberme "1" if you want to remember the login accross navigator restart * @return null if failed, non null XWikiUser if sucess * @throws XWikiException */ public XWikiUser checkAuth(String username, String password, String rememberme) throws XWikiException { return context.getWiki().getAuthService().checkAuth(username, password, rememberme, context); } /** * Add a and b because velocity operations are not always working * @param a * @param b * @return a+b */ public int add(int a, int b) { return a+b; } /** * Add a and b because velocity operations are not working with longs * @param a * @param b * @return a+b */ public long add(long a, long b) { return a+b; } /** * Add a and b because velocity operations are not working with longs * @param a * @param b * @return a+b */ public String add(String a, String b) { long c = Long.parseLong(a) + Long.parseLong(b); return "" + c; } /** * Access statistics api * * @return a StatsService instance that can be used to retrieve different xwiki statistics */ public StatsService getStatsService() { return this.statsService; } }
XWIKI-1930: XWiki.searchDocuments(String, boolean, int, int) does not use "distinctbylanguage" parameter git-svn-id: cfa2b40e478804c47c05d0f328c574ec5aa2b82e@6314 f329d543-caf0-0310-9063-dda96c69346f
xwiki-core/src/main/java/com/xpn/xwiki/api/XWiki.java
XWIKI-1930: XWiki.searchDocuments(String, boolean, int, int) does not use "distinctbylanguage" parameter
<ide><path>wiki-core/src/main/java/com/xpn/xwiki/api/XWiki.java <ide> public List searchDocuments(String wheresql, boolean distinctbylanguage) <ide> throws XWikiException <ide> { <del> return wrapDocs(xwiki.getStore().searchDocuments(wheresql, getXWikiContext())); <add> return wrapDocs(xwiki.getStore().searchDocuments(wheresql, distinctbylanguage, getXWikiContext())); <ide> } <ide> <ide> /** <ide> public List searchDocuments(String wheresql, boolean distinctbylanguage, int nb, int start) <ide> throws XWikiException <ide> { <del> return wrapDocs(xwiki.getStore().searchDocuments(wheresql, nb, start, getXWikiContext())); <add> return wrapDocs(xwiki.getStore().searchDocuments(wheresql, distinctbylanguage, nb, start, getXWikiContext())); <ide> } <ide> <ide> /**
Java
epl-1.0
504235193c38e4123065f9d5789350308cb50907
0
riuvshin/che-plugins,riuvshin/che-plugins,riuvshin/che-plugins,riuvshin/che-plugins
/******************************************************************************* * Copyright (c) 2012-2015 Codenvy, S.A. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Codenvy, S.A. - initial API and implementation *******************************************************************************/ package org.eclipse.che.ide.ext.git.server.nativegit; import org.eclipse.che.api.core.ServerException; import org.eclipse.che.api.git.CredentialsProvider; import org.eclipse.che.api.git.GitException; import org.eclipse.che.api.git.UserCredential; import org.eclipse.che.api.git.shared.GitUser; import org.eclipse.che.api.user.server.dao.PreferenceDao; import org.eclipse.che.commons.env.EnvironmentContext; import org.eclipse.che.commons.user.User; import javax.inject.Inject; import javax.inject.Named; import javax.inject.Singleton; import java.net.URI; import java.net.URISyntaxException; import java.util.Map; import static com.google.common.base.Strings.isNullOrEmpty; import static org.eclipse.che.dto.server.DtoFactory.newDto; /** * Credentials provider for Che * * @author Alexander Garagatyi * @author Valeriy Svydenko */ @Singleton public class CheAccessTokenCredentialProvider implements CredentialsProvider { private static String OAUTH_PROVIDER_NAME = "che"; private final String cheHostName; private PreferenceDao preferenceDao; @Inject public CheAccessTokenCredentialProvider(@Named("api.endpoint") String apiEndPoint, PreferenceDao preferenceDao) throws URISyntaxException { this.preferenceDao = preferenceDao; this.cheHostName = new URI(apiEndPoint).getHost(); } @Override public UserCredential getUserCredential() throws GitException { String token = EnvironmentContext.getCurrent() .getUser() .getToken(); if (token != null) { return new UserCredential(token, "x-che", OAUTH_PROVIDER_NAME); } return null; } @Override public GitUser getUser() throws GitException { User user = EnvironmentContext.getCurrent().getUser(); GitUser gitUser = newDto(GitUser.class); if (user.isTemporary()) { gitUser.setEmail("[email protected]"); gitUser.setName("Anonymous"); } else { String name = null; String email = null; try { Map<String, String> preferences = preferenceDao.getPreferences(EnvironmentContext.getCurrent().getUser().getId(), "git.committer.\\w+"); name = preferences.get("git.committer.name"); email = preferences.get("git.committer.email"); } catch (ServerException e) { //ignored } gitUser.setName(isNullOrEmpty(name) ? "Anonymous" : name); gitUser.setEmail(isNullOrEmpty(email) ? "[email protected]" : email); } return gitUser; } @Override public String getId() { return OAUTH_PROVIDER_NAME; } @Override public boolean canProvideCredentials(String url) { return url.contains(cheHostName); } }
plugin-git/che-plugin-git-provider-che/src/main/java/org/eclipse/che/ide/ext/git/server/nativegit/CheAccessTokenCredentialProvider.java
/******************************************************************************* * Copyright (c) 2012-2015 Codenvy, S.A. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Codenvy, S.A. - initial API and implementation *******************************************************************************/ package org.eclipse.che.ide.ext.git.server.nativegit; import org.eclipse.che.api.core.ServerException; import org.eclipse.che.api.git.CredentialsProvider; import org.eclipse.che.api.git.GitException; import org.eclipse.che.api.git.UserCredential; import org.eclipse.che.api.git.shared.GitUser; import org.eclipse.che.api.user.server.dao.PreferenceDao; import org.eclipse.che.commons.env.EnvironmentContext; import org.eclipse.che.commons.user.User; import javax.inject.Inject; import javax.inject.Named; import javax.inject.Singleton; import java.net.URI; import java.net.URISyntaxException; import java.util.Map; import static com.google.common.base.Strings.isNullOrEmpty; import static org.eclipse.che.dto.server.DtoFactory.newDto; /** * Credentials provider for Che * * @author Alexander Garagatyi * @author Valeriy Svydenko */ @Singleton public class CheAccessTokenCredentialProvider implements CredentialsProvider { private final String cheHostName; private PreferenceDao preferenceDao; private static String OAUTH_PROVIDER_NAME = "che"; @Inject public CheAccessTokenCredentialProvider(@Named("api.endpoint") String apiEndPoint, PreferenceDao preferenceDao) throws URISyntaxException { this.preferenceDao = preferenceDao; this.cheHostName = new URI(apiEndPoint).getHost(); } @Override public UserCredential getUserCredential() throws GitException { String token = EnvironmentContext.getCurrent() .getUser() .getToken(); if (token != null) { return new UserCredential(token, "x-che", OAUTH_PROVIDER_NAME); } return null; } @Override public GitUser getUser() throws GitException { User user = EnvironmentContext.getCurrent().getUser(); GitUser gitUser = newDto(GitUser.class); if (user.isTemporary()) { gitUser.setEmail("[email protected]"); gitUser.setName("Anonymous"); } else { String name = null; String email = null; try { Map<String, String> preferences = preferenceDao.getPreferences(EnvironmentContext.getCurrent().getUser().getId(), "git.committer.\\w+"); name = preferences.get("git.committer.name"); email = preferences.get("git.committer.email"); } catch (ServerException e) { //ignored } gitUser.setName(isNullOrEmpty(name) ? "Anonymous" : name); gitUser.setEmail(isNullOrEmpty(email) ? "[email protected]" : email); } return gitUser; } @Override public String getId() { return OAUTH_PROVIDER_NAME; } @Override public boolean canProvideCredentials(String url) { return url.contains(cheHostName); } }
IDEX-3038; chande namings from codenvy to che;
plugin-git/che-plugin-git-provider-che/src/main/java/org/eclipse/che/ide/ext/git/server/nativegit/CheAccessTokenCredentialProvider.java
IDEX-3038; chande namings from codenvy to che;
<ide><path>lugin-git/che-plugin-git-provider-che/src/main/java/org/eclipse/che/ide/ext/git/server/nativegit/CheAccessTokenCredentialProvider.java <ide> */ <ide> @Singleton <ide> public class CheAccessTokenCredentialProvider implements CredentialsProvider { <add> <add> private static String OAUTH_PROVIDER_NAME = "che"; <ide> private final String cheHostName; <ide> private PreferenceDao preferenceDao; <del> private static String OAUTH_PROVIDER_NAME = "che"; <ide> <ide> @Inject <ide> public CheAccessTokenCredentialProvider(@Named("api.endpoint") String apiEndPoint,
JavaScript
mit
5f3535f577c94ea60c1407ac1be31156493d1927
0
joegarb/joegarbcom,joegarb/joegarbcom
var gulp = require('gulp'), concat = require('gulp-concat'), runSequence = require('run-sequence'), del = require('del'), rename = require('gulp-rename'); gulp.task('clean', function() { return del(['dist']); }); gulp.task('build:js', function() { // Bundle JS files // todo: cache busting return gulp.src([ 'src/js/*.js', 'bower_components/angular-css/angular-css.min.js' ]).pipe(concat('bundle.js')) .pipe(gulp.dest('dist')); }); gulp.task('build:htaccess:prod', function() { // Use the production version of the .htaccess gulp.src('src/.htaccess.production') .pipe(rename('.htaccess')) .pipe(gulp.dest('dist')); }); gulp.task('build:htaccess', function() { // Just copy the .htaccess since it is already the dev/staging version gulp.src('src/.htaccess') .pipe(gulp.dest('dist')); }); gulp.task('build:static', function() { // Copy static content to the output folder gulp.src([ 'src/**', // Exclude JS files already bundled and copied '!src/js{,/**}', ]).pipe(gulp.dest('dist')); }); gulp.task('build:prod', (done) => { runSequence( 'clean', 'build:js', 'build:htaccess:prod', 'build:static', done) }); gulp.task('build', (done) => { runSequence( 'clean', 'build:js', 'build:htaccess', 'build:static', done) }); gulp.task('default', ['build']);
gulpfile.js
var gulp = require('gulp'), concat = require('gulp-concat'), runSequence = require('run-sequence'), del = require('del'), rename = require('gulp-rename'); gulp.task('clean', function() { return del(['dist']); }); gulp.task('build:js', function() { // Bundle JS files // todo: cache busting return gulp.src([ 'src/js/*.js', 'bower_components/angular-css/angular-css.min.js' ]).pipe(concat('bundle.js')) .pipe(gulp.dest('dist')); }); gulp.task('build:htaccess:prod', function() { // Use the production version of the .htaccess gulp.src('src/.htaccess.production') .pipe(rename('.htaccess')) .pipe(gulp.dest('dist')); }); gulp.task('build:htaccess', function() { // Just copy the .htaccess since it is already the dev/staging version gulp.src('src/.htaccess') .pipe(gulp.dest('dist')); }); gulp.task('build:static', function() { // Copy static content to the output folder gulp.src([ 'src/**', // Exclude JS files already bundled and copied '!src/js{,/**}', ]).pipe(gulp.dest('dist')); }); gulp.task('build:prod', (done) => { runSequence( 'clean', 'build:js', 'build:htaccess:prod', 'build:static', done) }); gulp.task('build', (done) => { runSequence( 'clean', 'build:js', 'build:htaccess', 'build:static', done) }); gulp.task('default', ['build']);
Untabify
gulpfile.js
Untabify
<ide><path>ulpfile.js <ide> var gulp = require('gulp'), <del> concat = require('gulp-concat'), <del> runSequence = require('run-sequence'), <del> del = require('del'), <del> rename = require('gulp-rename'); <add> concat = require('gulp-concat'), <add> runSequence = require('run-sequence'), <add> del = require('del'), <add> rename = require('gulp-rename'); <ide> <ide> gulp.task('clean', function() { <del> return del(['dist']); <add> return del(['dist']); <ide> }); <ide> <ide> gulp.task('build:js', function() { <del> // Bundle JS files <del> // todo: cache busting <add> // Bundle JS files <add> // todo: cache busting <ide> return gulp.src([ <del> 'src/js/*.js', <del> 'bower_components/angular-css/angular-css.min.js' <del> ]).pipe(concat('bundle.js')) <add> 'src/js/*.js', <add> 'bower_components/angular-css/angular-css.min.js' <add> ]).pipe(concat('bundle.js')) <ide> .pipe(gulp.dest('dist')); <ide> }); <ide> <ide> gulp.task('build:htaccess:prod', function() { <del> // Use the production version of the .htaccess <del> gulp.src('src/.htaccess.production') <del> .pipe(rename('.htaccess')) <del> .pipe(gulp.dest('dist')); <add> // Use the production version of the .htaccess <add> gulp.src('src/.htaccess.production') <add> .pipe(rename('.htaccess')) <add> .pipe(gulp.dest('dist')); <ide> }); <ide> <ide> gulp.task('build:htaccess', function() { <del> // Just copy the .htaccess since it is already the dev/staging version <del> gulp.src('src/.htaccess') <del> .pipe(gulp.dest('dist')); <add> // Just copy the .htaccess since it is already the dev/staging version <add> gulp.src('src/.htaccess') <add> .pipe(gulp.dest('dist')); <ide> }); <ide> <ide> gulp.task('build:static', function() { <del> // Copy static content to the output folder <del> gulp.src([ <del> 'src/**', <del> // Exclude JS files already bundled and copied <del> '!src/js{,/**}', <del> ]).pipe(gulp.dest('dist')); <add> // Copy static content to the output folder <add> gulp.src([ <add> 'src/**', <add> // Exclude JS files already bundled and copied <add> '!src/js{,/**}', <add> ]).pipe(gulp.dest('dist')); <ide> }); <ide> <ide> gulp.task('build:prod', (done) => { <del> runSequence( <del> 'clean', <del> 'build:js', <del> 'build:htaccess:prod', <del> 'build:static', <del> done) <add> runSequence( <add> 'clean', <add> 'build:js', <add> 'build:htaccess:prod', <add> 'build:static', <add> done) <ide> }); <ide> <ide> gulp.task('build', (done) => { <del> runSequence( <del> 'clean', <del> 'build:js', <del> 'build:htaccess', <del> 'build:static', <del> done) <add> runSequence( <add> 'clean', <add> 'build:js', <add> 'build:htaccess', <add> 'build:static', <add> done) <ide> }); <ide> <ide> gulp.task('default', ['build']);
JavaScript
mit
3b818c117a8396610de28e3bb752c3300a97576a
0
fffej/codekatas,fffej/codekatas,fffej/codekatas
"use strict"; var assert = require('assert'); var Yahtzee = function() { var dice = []; for (var i=0;i<5;++i) { dice.push(1 + (Math.random() * 6 | 0)); } return { dice: function() { return dice; } }; }; var categories = []; describe('yahtzee', function() { describe('five dice', function() { it('should consist of 5 values between 1 and 6', function() { var yahtzee = new Yahtzee(); assert(yahtzee); assert.equal(5, yahtzee.dice().length); for(var d=0;d<5;d++) { assert(yahtzee.dice()[d] > 0 && yahtzee.dice()[d] < 7); } }); }); describe('categories', function() { it('should have 15 categories', function() { assert.equal(15, categories.length); }); }); });
yahtzee/yahtzee.js
"use strict"; var assert = require('assert'); var Yahtzee = function() { var dice = []; for (var i=0;i<5;++i) { dice.push(1 + (Math.random() * 6 | 0)); } return { dice: function() { return dice; } }; }; describe('yahtzee', function() { describe('five dice', function() { it('should consist of 5 values between 1 and 6', function() { var yahtzee = new Yahtzee(); assert(yahtzee); assert.equal(5, yahtzee.dice().length); for(var d=0;d<5;d++) { assert(yahtzee.dice()[d] > 0 && yahtzee.dice()[d] < 7); } }); }); describe('categories', function() { it('should have 15 categories', function() { assert.equal(15, categories.length); }); }); });
define categories
yahtzee/yahtzee.js
define categories
<ide><path>ahtzee/yahtzee.js <ide> } <ide> }; <ide> }; <add> <add>var categories = []; <ide> <ide> describe('yahtzee', function() { <ide> describe('five dice', function() {
Java
mit
error: pathspec 'src/main/java/com/sunny/grokkingalgorithms/ctc/c2/DeleteMiddleOfLinkedListGivenReference.java' did not match any file(s) known to git
1730d47b34df26acb12d12c67f5d34f16e0cc238
1
sunnydas/grokkingalgos,sunnydas/grokkingalgos
package com.sunny.grokkingalgorithms.ctc.c2; import com.sunny.grokkingalgorithms.ctc.c2.util.LinkedListUtil; import com.sunny.grokkingalgorithms.ctc.c2.util.Node; /** * Created by sundas on 3/14/2018. */ public class DeleteMiddleOfLinkedListGivenReference { /* Given reference to a node , delete it from the linked list.If it is last element you will not be delete it. a->b->c->d delete c after deletion a->b->d */ /** * * @param middle */ public static void deleteNodeFromMiddle(Node middle){ if(middle != null && middle.next != null){ /* Copy next node's value into middle which is essentailly line overwriting existing middle's value and then delete next */ middle.data = middle.next.data; middle.next = middle.next.next; } } /** * * @param root * @return */ public static Node findMiddleOfLinkedList(Node root){ /* Once again we use the fast runner apporach. The fast runner covers twice as much noes as slow runner so when fast runner will be at the end, slow runner will be in the middle */ Node slowRunner = root; Node fastRunner = root; while(fastRunner != null && slowRunner != null){ slowRunner = slowRunner.next; fastRunner = fastRunner.next; if(fastRunner != null){ fastRunner = fastRunner.next; } } //Now slow runner is at the middle return slowRunner; } /** * * @param args */ public static void main(String[] args) { int[] input = new int[]{1,2,3,4,5,6,7,8}; Node root = LinkedListUtil.createLinkedList(input); LinkedListUtil.printLinkedList(root); Node middle = findMiddleOfLinkedList(root); System.out.println(middle.data); deleteNodeFromMiddle(middle); LinkedListUtil.printLinkedList(root); } }
src/main/java/com/sunny/grokkingalgorithms/ctc/c2/DeleteMiddleOfLinkedListGivenReference.java
Delete node from middle of linked list.
src/main/java/com/sunny/grokkingalgorithms/ctc/c2/DeleteMiddleOfLinkedListGivenReference.java
Delete node from middle of linked list.
<ide><path>rc/main/java/com/sunny/grokkingalgorithms/ctc/c2/DeleteMiddleOfLinkedListGivenReference.java <add>package com.sunny.grokkingalgorithms.ctc.c2; <add> <add>import com.sunny.grokkingalgorithms.ctc.c2.util.LinkedListUtil; <add>import com.sunny.grokkingalgorithms.ctc.c2.util.Node; <add> <add>/** <add> * Created by sundas on 3/14/2018. <add> */ <add>public class DeleteMiddleOfLinkedListGivenReference { <add> <add> /* <add> Given reference to a node , delete it from the linked list.If it is last element you <add> will not be delete it. <add> <add> a->b->c->d <add> delete c <add> after deletion <add> a->b->d <add> */ <add> <add> /** <add> * <add> * @param middle <add> */ <add> public static void deleteNodeFromMiddle(Node middle){ <add> if(middle != null <add> && middle.next != null){ <add> /* <add> Copy next node's value into middle which is essentailly line overwriting existing middle's value and then <add> delete next <add> */ <add> middle.data = middle.next.data; <add> middle.next = middle.next.next; <add> } <add> } <add> <add> /** <add> * <add> * @param root <add> * @return <add> */ <add> public static Node findMiddleOfLinkedList(Node root){ <add> /* <add> Once again we use the fast runner apporach. The fast runner covers twice as much noes as slow <add> runner so when fast runner will be at the end, slow runner will be in the middle <add> */ <add> Node slowRunner = root; <add> Node fastRunner = root; <add> while(fastRunner != null && slowRunner != null){ <add> slowRunner = slowRunner.next; <add> fastRunner = fastRunner.next; <add> if(fastRunner != null){ <add> fastRunner = fastRunner.next; <add> } <add> } <add> //Now slow runner is at the middle <add> return slowRunner; <add> } <add> <add> /** <add> * <add> * @param args <add> */ <add> public static void main(String[] args) { <add> int[] input = new int[]{1,2,3,4,5,6,7,8}; <add> Node root = LinkedListUtil.createLinkedList(input); <add> LinkedListUtil.printLinkedList(root); <add> Node middle = findMiddleOfLinkedList(root); <add> System.out.println(middle.data); <add> deleteNodeFromMiddle(middle); <add> LinkedListUtil.printLinkedList(root); <add> } <add>}
Java
apache-2.0
7a356fec6840201f85a71292c80d02fc27c98a09
0
PathVisio/pathvisio,PathVisio/pathvisio,PathVisio/pathvisio,markwoon/pathvisio,markwoon/pathvisio,markwoon/pathvisio,markwoon/pathvisio,PathVisio/pathvisio
// PathVisio, // a tool for data visualization and analysis using Biological Pathways // Copyright 2006-2007 BiGCaT Bioinformatics // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package org.pathvisio.model; import java.awt.Color; import java.awt.geom.Point2D; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.Reader; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.xml.XMLConstants; import javax.xml.transform.stream.StreamSource; import javax.xml.validation.Schema; import javax.xml.validation.SchemaFactory; import javax.xml.validation.ValidatorHandler; import org.jdom.Attribute; import org.jdom.Content; import org.jdom.Document; import org.jdom.Element; import org.jdom.JDOMException; import org.jdom.Namespace; import org.jdom.input.JDOMParseException; import org.jdom.input.SAXBuilder; import org.jdom.output.Format; import org.jdom.output.SAXOutputter; import org.jdom.output.XMLOutputter; import org.pathvisio.debug.Logger; import org.pathvisio.model.GraphLink.GraphIdContainer; import org.pathvisio.model.PathwayElement.MAnchor; import org.pathvisio.model.PathwayElement.MPoint; import org.xml.sax.SAXException; /** * class responsible for interaction with Gpml format. * Contains all gpml-specific constants, * and should be the only class (apart from svgFormat) * that needs to import jdom * * @author Martijn * */ public class GpmlFormat implements PathwayImporter, PathwayExporter { /** * The factor that is used to convert pixel coordinates * to the GPML model coordinates. E.g. if you want to convert the * width from pixels to GPML model coordinates you use: * * double mWidth = width * pixel2model; */ public static final double pixel2model = 15; public static final Namespace GPML = Namespace.getNamespace("http://genmapp.org/GPML/2007"); public static final Namespace RDF = Namespace.getNamespace("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#"); public static final Namespace RDFS = Namespace.getNamespace("rdfs", "http://www.w3.org/2000/01/rdf-schema#"); public static final Namespace BIOPAX = Namespace.getNamespace("bp", "http://www.biopax.org/release/biopax-level2.owl#"); public static final Namespace OWL = Namespace.getNamespace("owl", "http://www.w3.org/2002/07/owl#"); /** * name of resource containing the gpml schema definition */ final private static String xsdFile = "GPML.xsd"; private static class AttributeInfo { /** * xsd validated type. Note that in the current implementation * we don't do anything with restrictions, only with the * base type. */ public String schemaType; /** * default value for the attribute */ public String def; // default /** * use of the attribute: can be "required" or "optional" */ public String use; AttributeInfo (String _schemaType, String _def, String _use) { schemaType = _schemaType; def = _def; use = _use; } } static final Map<String, AttributeInfo> attributeInfo = initAttributeInfo(); static Map<String, AttributeInfo> initAttributeInfo() { Map<String, AttributeInfo> result = new HashMap<String, AttributeInfo>(); // IMPORTANT: this array has been generated from the xsd with // an automated perl script. Don't edit this directly, use the perl script instead. /* START OF AUTO-GENERATED CONTENT */ result.put("PublicationXref@ID", new AttributeInfo ("xsd:string", null, "required")); result.put("PublicationXref@Database", new AttributeInfo ("xsd:string", null, "required")); result.put("Comment@Source", new AttributeInfo ("xsd:string", null, "optional")); result.put("Pathway.Graphics@BoardWidth", new AttributeInfo ("gpml:Dimension", null, "required")); result.put("Pathway.Graphics@BoardHeight", new AttributeInfo ("gpml:Dimension", null, "required")); result.put("Pathway.Graphics@WindowWidth", new AttributeInfo ("gpml:Dimension", "18000", "optional")); result.put("Pathway.Graphics@WindowHeight", new AttributeInfo ("gpml:Dimension", "12000", "optional")); result.put("Pathway@Name", new AttributeInfo ("gpml:NameType", null, "required")); result.put("Pathway@Organism", new AttributeInfo ("xsd:string", null, "optional")); result.put("Pathway@Data-Source", new AttributeInfo ("xsd:string", null, "optional")); result.put("Pathway@Version", new AttributeInfo ("xsd:string", null, "optional")); result.put("Pathway@Author", new AttributeInfo ("xsd:string", null, "optional")); result.put("Pathway@Maintainer", new AttributeInfo ("xsd:string", null, "optional")); result.put("Pathway@Email", new AttributeInfo ("xsd:string", null, "optional")); result.put("Pathway@Copyright", new AttributeInfo ("xsd:string", null, "optional")); result.put("Pathway@Last-Modified", new AttributeInfo ("xsd:string", null, "optional")); result.put("DataNode.Graphics@CenterX", new AttributeInfo ("xsd:float", null, "required")); result.put("DataNode.Graphics@CenterY", new AttributeInfo ("xsd:float", null, "required")); result.put("DataNode.Graphics@Width", new AttributeInfo ("gpml:Dimension", null, "required")); result.put("DataNode.Graphics@Height", new AttributeInfo ("gpml:Dimension", null, "required")); result.put("DataNode.Graphics@Color", new AttributeInfo ("gpml:ColorType", null, "optional")); result.put("DataNode.Xref@Database", new AttributeInfo ("gpml:DatabaseType", null, "required")); result.put("DataNode.Xref@ID", new AttributeInfo ("gpml:NameType", null, "required")); result.put("DataNode@GraphId", new AttributeInfo ("xsd:ID", null, "optional")); result.put("DataNode@GroupRef", new AttributeInfo ("xsd:string", null, "optional")); result.put("DataNode@ObjectType", new AttributeInfo ("gpml:ObjectType", "Annotation", "optional")); result.put("DataNode@TextLabel", new AttributeInfo ("xsd:string", null, "required")); result.put("DataNode@BackpageHead", new AttributeInfo ("xsd:string", null, "optional")); result.put("DataNode@GenMAPP-Xref", new AttributeInfo ("xsd:string", null, "optional")); result.put("DataNode@Type", new AttributeInfo ("gpml:DataNodeType", "Unknown", "optional")); result.put("Line.Graphics.Point@x", new AttributeInfo ("xsd:float", null, "required")); result.put("Line.Graphics.Point@y", new AttributeInfo ("xsd:float", null, "required")); result.put("Line.Graphics.Point@relX", new AttributeInfo ("xsd:float", "0", "optional")); result.put("Line.Graphics.Point@relY", new AttributeInfo ("xsd:float", "0", "optional")); result.put("Line.Graphics.Point@GraphRef", new AttributeInfo ("xsd:IDREF", null, "optional")); result.put("Line.Graphics.Point@GraphId", new AttributeInfo ("xsd:ID", null, "optional")); result.put("Line.Graphics.Point@Head", new AttributeInfo ("xsd:string", "Line", "optional")); result.put("Line.Graphics.Point@ArrowHead", new AttributeInfo ("xsd:string", "Line", "optional")); result.put("Line.Graphics.Anchor@position", new AttributeInfo ("xsd:float", null, "required")); result.put("Line.Graphics.Anchor@Shape", new AttributeInfo ("xsd:string", "LigandRound", "required")); result.put("Line.Graphics.Anchor@GraphId", new AttributeInfo ("xsd:ID", null, "optional")); result.put("Line.Graphics@Color", new AttributeInfo ("gpml:ColorType", "Black", "optional")); result.put("Line.Graphics.Segment@direction", new AttributeInfo ("gpml:string", null, "required")); result.put("Line.Graphics.Segment@length", new AttributeInfo ("gpml:float", null, "required")); result.put("Line.Graphics@ConnectorType", new AttributeInfo ("gpml:string", "Straight", "optional")); result.put("Line@Style", new AttributeInfo ("xsd:string", "Solid", "optional")); result.put("Label.Graphics@CenterX", new AttributeInfo ("xsd:float", null, "required")); result.put("Label.Graphics@CenterY", new AttributeInfo ("xsd:float", null, "required")); result.put("Label.Graphics@Width", new AttributeInfo ("gpml:Dimension", null, "required")); result.put("Label.Graphics@Height", new AttributeInfo ("gpml:Dimension", null, "required")); result.put("Label.Graphics@Color", new AttributeInfo ("gpml:ColorType", null, "optional")); result.put("Label.Graphics@FontName", new AttributeInfo ("xsd:string", "Arial", "optional")); result.put("Label.Graphics@FontStyle", new AttributeInfo ("xsd:string", "Normal", "optional")); result.put("Label.Graphics@FontDecoration", new AttributeInfo ("xsd:string", "Normal", "optional")); result.put("Label.Graphics@FontStrikethru", new AttributeInfo ("xsd:string", "Normal", "optional")); result.put("Label.Graphics@FontWeight", new AttributeInfo ("xsd:string", "Normal", "optional")); result.put("Label.Graphics@FontSize", new AttributeInfo ("xsd:nonNegativeInteger", "12", "optional")); result.put("Label@GraphId", new AttributeInfo ("xsd:ID", null, "optional")); result.put("Label@GroupRef", new AttributeInfo ("xsd:string", null, "optional")); result.put("Label@ObjectType", new AttributeInfo ("gpml:ObjectType", "Annotation", "optional")); result.put("Label@TextLabel", new AttributeInfo ("xsd:string", null, "required")); result.put("Label@Xref", new AttributeInfo ("xsd:string", null, "optional")); result.put("Label@GenMAPP-Xref", new AttributeInfo ("xsd:string", null, "optional")); result.put("Label@Outline", new AttributeInfo ("xsd:string", "None", "optional")); result.put("Link.Graphics@CenterX", new AttributeInfo ("xsd:float", null, "required")); result.put("Link.Graphics@CenterY", new AttributeInfo ("xsd:float", null, "required")); result.put("Link.Graphics@Width", new AttributeInfo ("gpml:Dimension", null, "required")); result.put("Link.Graphics@Height", new AttributeInfo ("gpml:Dimension", null, "required")); result.put("Link.Graphics@Color", new AttributeInfo ("gpml:ColorType", null, "optional")); result.put("Link.Graphics@FontName", new AttributeInfo ("xsd:string", "Arial", "optional")); result.put("Link.Graphics@FontStyle", new AttributeInfo ("xsd:string", "Normal", "optional")); result.put("Link.Graphics@FontDecoration", new AttributeInfo ("xsd:string", "Normal", "optional")); result.put("Link.Graphics@FontStrikethru", new AttributeInfo ("xsd:string", "Normal", "optional")); result.put("Link.Graphics@FontWeight", new AttributeInfo ("xsd:string", "Normal", "optional")); result.put("Link.Graphics@FontSize", new AttributeInfo ("xsd:nonNegativeInteger", "12", "optional")); result.put("Link@Href", new AttributeInfo ("xsd:string", null, "optional")); result.put("Link@GraphId", new AttributeInfo ("xsd:ID", null, "optional")); result.put("Link@GroupRef", new AttributeInfo ("xsd:string", null, "optional")); result.put("Link@ObjectType", new AttributeInfo ("gpml:ObjectType", "Annotation", "optional")); result.put("Link@TextLabel", new AttributeInfo ("xsd:string", null, "required")); result.put("Link@GenMAPP-Xref", new AttributeInfo ("xsd:string", null, "optional")); result.put("Shape.Graphics@CenterX", new AttributeInfo ("xsd:float", null, "required")); result.put("Shape.Graphics@CenterY", new AttributeInfo ("xsd:float", null, "required")); result.put("Shape.Graphics@Width", new AttributeInfo ("gpml:Dimension", null, "required")); result.put("Shape.Graphics@Height", new AttributeInfo ("gpml:Dimension", null, "required")); result.put("Shape.Graphics@Color", new AttributeInfo ("gpml:ColorType", "Black", "optional")); result.put("Shape.Graphics@Rotation", new AttributeInfo ("gpml:RotationType", "Top", "optional")); result.put("Shape.Graphics@FillColor", new AttributeInfo ("gpml:ColorType", "Transparent", "optional")); result.put("Shape@Type", new AttributeInfo ("xsd:string", null, "required")); result.put("Shape@GraphId", new AttributeInfo ("xsd:ID", null, "optional")); result.put("Shape@GroupRef", new AttributeInfo ("xsd:string", null, "optional")); result.put("Shape@ObjectType", new AttributeInfo ("gpml:ObjectType", "Annotation", "optional")); result.put("Shape@Style", new AttributeInfo ("gpml:StyleType", "Solid", "optional")); result.put("Group@GroupId", new AttributeInfo ("xsd:ID", null, "required")); result.put("Group@GroupRef", new AttributeInfo ("xsd:string", null, "optional")); result.put("Group@TextLabel", new AttributeInfo("xsd:string", null, "optional")); result.put("Group@Style", new AttributeInfo ("gpml:GroupStyleType", "None", "optional")); result.put("InfoBox@CenterX", new AttributeInfo ("xsd:float", null, "required")); result.put("InfoBox@CenterY", new AttributeInfo ("xsd:float", null, "required")); result.put("Legend@CenterX", new AttributeInfo ("xsd:float", null, "required")); result.put("Legend@CenterY", new AttributeInfo ("xsd:float", null, "required")); /* END OF AUTO-GENERATED CONTENT */ return result; }; /** * Sets a certain attribute value, * Does a basic check for some types, * throws an exception when you're trying to set an invalid value * If you're trying to set a default value, or an optional value to null, * the attribute is omitted, * leading to a leaner xml output. * * @param tag used for lookup in the defaults table * @param name used for lookup in the defaults table * @param el jdom element where this attribute belongs in * @param value value you wan't to check and set */ private static void setAttribute(String tag, String name, Element el, String value) throws ConverterException { String key = tag + "@" + name; if (!attributeInfo.containsKey(key)) throw new ConverterException("Trying to set invalid attribute " + key); AttributeInfo aInfo = attributeInfo.get(key); boolean isDefault = false; // here we start seeing if the attribute is equal to the // default value // if so, we can leave out the attribute from the jdom // altogether if (aInfo.use.equals("optional")) { if (aInfo.schemaType.equals("xsd:string") || aInfo.schemaType.equals("xsd:ID")) { if ((aInfo.def == null && value == null) || (aInfo.def != null && aInfo.def.equals(value)) || (aInfo.def == null && value != null && value.equals(""))) isDefault = true; } else if (aInfo.schemaType.equals("xsd:float") || aInfo.schemaType.equals("Dimension")) { Double x = Double.parseDouble(aInfo.def); Double y = Double.parseDouble(value); if (Math.abs(x - y) < 1e-6) isDefault = true; } } if (!isDefault) el.setAttribute(name, value); } /** * Gets a certain attribute value, * replaces it with a suitable default under certain conditions. * * @param tag used for lookup in the defaults table * @param name used for lookup in the defaults table * @param el jdom element to get the attribute from * @throws ConverterException */ private static String getAttribute(String tag, String name, Element el) throws ConverterException { String key = tag + "@" + name; if (!attributeInfo.containsKey(key)) throw new ConverterException("Trying to get invalid attribute " + key); AttributeInfo aInfo = attributeInfo.get(key); String result = ((el == null) ? aInfo.def : el.getAttributeValue(name, aInfo.def)); return result; } /** * The GPML xsd implies a certain ordering for children of the pathway element. * (e.g. DataNode always comes before LineShape, etc.) * * This Comparator can sort jdom Elements so that they are in the correct order * for the xsd. * * @author Martijn.vanIersel */ private static class ByElementName implements Comparator<Element> { // hashmap for quick lookups during sorting private HashMap<String, Integer> elementOrdering; // correctly ordered list of tag names, which are loaded into the hashmap in // the constructor. private final String[] elements = new String[] { "Comment", "BiopaxRef", "Graphics", "DataNode", "Line", "Label", "Shape", "Group", "InfoBox", "Legend", "Biopax" }; /* * Constructor */ public ByElementName() { elementOrdering = new HashMap<String, Integer>(); for (int i = 0; i < elements.length; ++i) { elementOrdering.put (elements[i], new Integer(i)); } } /* * As a comparison measure, returns difference of index of element names of a and b * in elements array. E.g: * Comment -> index 1 in elements array * Graphics -> index 2 in elements array. * If a.getName() is Comment and b.getName() is Graphics, returns 1-2 -> -1 */ public int compare(Element a, Element b) { return ((Integer)elementOrdering.get(a.getName())).intValue() - ((Integer)elementOrdering.get(b.getName())).intValue(); } } public static Document createJdom(Pathway data) throws ConverterException { Document doc = new Document(); Namespace ns = GPML; Element root = new Element("Pathway", ns); doc.setRootElement(root); List<Element> elementList = new ArrayList<Element>(); List<PathwayElement> pathwayElements = data.getDataObjects(); Collections.sort(pathwayElements); for (PathwayElement o : pathwayElements) { if (o.getObjectType() == ObjectType.MAPPINFO) { setAttribute("Pathway", "Name", root, o.getMapInfoName()); setAttribute("Pathway", "Data-Source", root, o.getMapInfoDataSource()); setAttribute("Pathway", "Version", root, o.getVersion()); setAttribute("Pathway", "Author", root, o.getAuthor()); setAttribute("Pathway", "Maintainer", root, o.getMaintainer()); setAttribute("Pathway", "Email", root, o.getEmail()); setAttribute("Pathway", "Copyright", root, o.getCopyright()); setAttribute("Pathway", "Last-Modified", root, o.getLastModified()); setAttribute("Pathway", "Organism", root, o.getOrganism()); updateComments(o, root); updateBiopaxRef(o, root); Element graphics = new Element("Graphics", ns); root.addContent(graphics); double[] size = o.getMBoardSize(); setAttribute("Pathway.Graphics", "BoardWidth", graphics, "" +size[0]); setAttribute("Pathway.Graphics", "BoardHeight", graphics, "" + size[1]); setAttribute("Pathway.Graphics", "WindowWidth", graphics, "" + o.getWindowWidth()); setAttribute("Pathway.Graphics", "WindowHeight", graphics, "" + o.getWindowHeight()); } else { Element e = createJdomElement(o, ns); if (e != null) elementList.add(e); } } // now sort the generated elements in the order defined by the xsd Collections.sort(elementList, new ByElementName()); for (Element e : elementList) { root.addContent(e); } return doc; } public static PathwayElement mapElement(Element e) throws ConverterException { return mapElement (e, null); } /** Create a single PathwayElement based on a piece of Jdom tree. Used also by Patch utility Pathway p may be null */ public static PathwayElement mapElement(Element e, Pathway p) throws ConverterException { String tag = e.getName(); int ot = ObjectType.getTagMapping(tag); if (ot == -1) { // do nothing. This could be caused by // tags <comment> or <graphics> that appear // as subtags of <pathway> return null; } PathwayElement o = PathwayElement.createPathwayElement(ot); if (p != null) { p.add (o); } switch (o.getObjectType()) { case ObjectType.DATANODE: mapShapeData(o, e, "DataNode"); mapColor(o, e); mapComments(o, e); mapDataNode(o, e); mapGraphId(o, e); mapGroupRef(o, e); mapBiopaxRef(o, e); break; case ObjectType.LABEL: mapShapeData(o, e, "Label"); mapColor(o, e); mapLabelData(o, e); mapComments(o, e); mapGraphId(o, e); mapGroupRef(o, e); mapBiopaxRef(o, e); break; case ObjectType.LINE: mapLineData(o, e); mapColor(o, e); mapComments(o, e); mapGroupRef(o, e); mapBiopaxRef(o, e); break; case ObjectType.MAPPINFO: mapMappInfoData(o, e); mapBiopaxRef(o, e); mapComments(o, e); break; case ObjectType.SHAPE: mapShapeData(o, e, "Shape"); mapShapeColor (o, e); mapColor(o, e); mapComments(o, e); mapShapeType(o, e); mapGraphId(o, e); mapGroupRef(o, e); mapBiopaxRef(o, e); break; case ObjectType.LEGEND: mapSimpleCenter(o, e); break; case ObjectType.INFOBOX: mapSimpleCenter (o, e); break; case ObjectType.GROUP: mapGroupRef(o, e); mapGroup (o, e); mapComments(o, e); mapBiopaxRef(o, e); break; case ObjectType.BIOPAX: mapBiopax(o, e); break; default: throw new ConverterException("Invalid ObjectType'" + tag + "'"); } return o; } private static void mapLineData(PathwayElement o, Element e) throws ConverterException { Element graphics = e.getChild("Graphics", e.getNamespace()); List<MPoint> mPoints = new ArrayList<MPoint>(); String startType = null; String endType = null; List<Element> pointElements = graphics.getChildren("Point", e.getNamespace()); for(int i = 0; i < pointElements.size(); i++) { Element pe = pointElements.get(i); MPoint mp = o.new MPoint( Double.parseDouble(getAttribute("Line.Graphics.Point", "x", pe)), Double.parseDouble(getAttribute("Line.Graphics.Point", "y", pe)) ); mPoints.add(mp); String ref = getAttribute("Line.Graphics.Point", "GraphRef", pe); if (ref == null) ref = ""; mp.setGraphRef(ref); if(i == 0) { startType = getAttribute("Line.Graphics.Point", "ArrowHead", pe); endType = getAttribute("Line.Graphics.Point", "Head", pe); } else if(i == pointElements.size() - 1) { /** read deprecated Head attribute for backwards compatibility. If an arrowhead attribute is present on the other point, it overrides this one. */ if (pe.getAttributeValue("ArrowHead") != null) { endType = getAttribute("Line.Graphics.Point", "ArrowHead", pe); } } } o.setMPoints(mPoints); String style = getAttribute("Line", "Style", e); o.setLineStyle ((style.equals("Solid")) ? LineStyle.SOLID : LineStyle.DASHED); o.setStartLineType (LineType.fromName(startType)); o.setEndLineType (LineType.fromName(endType)); String connType = getAttribute("Line.Graphics", "ConnectorType", graphics); o.setConnectorType(ConnectorType.fromName(connType)); //Map anchors List<Element> anchors = graphics.getChildren("Anchor", e.getNamespace()); for(Element ae : anchors) { double position = Double.parseDouble(getAttribute("Line.Graphics.Anchor", "position", ae)); MAnchor anchor = o.addMAnchor(position); mapGraphId(anchor, ae); String shape = getAttribute("Line.Graphics.Anchor", "Shape", ae); if(shape != null) { anchor.setShape(AnchorType.fromName(shape)); } } } private static void updateLineData(PathwayElement o, Element e) throws ConverterException { if(e != null) { setAttribute("Line", "Style", e, o.getLineStyle() == LineStyle.SOLID ? "Solid" : "Broken"); Element jdomGraphics = e.getChild("Graphics", e.getNamespace()); List<MPoint> mPoints = o.getMPoints(); for(int i = 0; i < mPoints.size(); i++) { MPoint mp = mPoints.get(i); Element pe = new Element("Point", e.getNamespace()); jdomGraphics.addContent(pe); setAttribute("Line.Graphics.Point", "x", pe, Double.toString(mp.getX())); setAttribute("Line.Graphics.Point", "y", pe, Double.toString(mp.getY())); if (mp.getGraphRef() != null && !mp.getGraphRef().equals("")) { setAttribute("Line.Graphics.Point", "GraphRef", pe, mp.getGraphRef()); setAttribute("Line.Graphics.Point", "relX", pe, Double.toString(mp.getRelX())); setAttribute("Line.Graphics.Point", "relY", pe, Double.toString(mp.getRelY())); } if(i == 0) { setAttribute("Line.Graphics.Point", "ArrowHead", pe, o.getStartLineType().getName()); } else if(i == mPoints.size() - 1) { setAttribute("Line.Graphics.Point", "ArrowHead", pe, o.getEndLineType().getName()); } } for(MAnchor anchor : o.getMAnchors()) { Element ae = new Element("Anchor", e.getNamespace()); setAttribute("Line.Graphics.Anchor", "position", ae, Double.toString(anchor.getPosition())); setAttribute("Line.Graphics.Anchor", "Shape", ae, anchor.getShape().getName()); updateGraphId(anchor, ae); jdomGraphics.addContent(ae); } ConnectorType ctype = o.getConnectorType(); setAttribute("Line.Graphics", "ConnectorType", jdomGraphics, ctype.getName()); } } private static void mapColor(PathwayElement o, Element e) throws ConverterException { Element graphics = e.getChild("Graphics", e.getNamespace()); String scol = getAttribute(e.getName() + ".Graphics", "Color", graphics); o.setColor (gmmlString2Color(scol)); } private static void mapShapeColor(PathwayElement o, Element e) throws ConverterException { Element graphics = e.getChild("Graphics", e.getNamespace()); String scol = getAttribute("Shape.Graphics", "FillColor", graphics); if(scol.equals("Transparent")) { o.setTransparent (true); } else { o.setTransparent (false); o.setFillColor (gmmlString2Color(scol)); } } private static void updateColor(PathwayElement o, Element e) throws ConverterException { if(e != null) { Element jdomGraphics = e.getChild("Graphics", e.getNamespace()); if(jdomGraphics != null) { setAttribute(e.getName() + ".Graphics", "Color", jdomGraphics, color2HexBin(o.getColor())); } } } private static void updateShapeColor(PathwayElement o, Element e) { if(e != null) { Element jdomGraphics = e.getChild("Graphics", e.getNamespace()); if(jdomGraphics != null) { if (o.isTransparent()) jdomGraphics.setAttribute("FillColor", "Transparent"); else jdomGraphics.setAttribute("FillColor", color2HexBin(o.getFillColor())); } } } private static void mapComments(PathwayElement o, Element e) throws ConverterException { for (Object f : e.getChildren("Comment", e.getNamespace())) { o.addComment(((Element)f).getText(), getAttribute("Comment", "Source", (Element)f)); } } private static void updateComments(PathwayElement o, Element e) throws ConverterException { if(e != null) { for (PathwayElement.Comment c : o.getComments()) { Element f = new Element ("Comment", e.getNamespace()); f.setText (c.getComment()); setAttribute("Comment", "Source", f, c.getSource()); e.addContent(f); } } } private static void mapGraphId (GraphIdContainer o, Element e) { String id = e.getAttributeValue("GraphId"); if((id == null || id.equals("")) && o.getGmmlData() != null) { id = o.getGmmlData().getUniqueGraphId(); } o.setGraphId (id); } private static void updateGraphId (GraphIdContainer o, Element e) { String id = o.getGraphId(); // id has to be unique! if (id != null && !id.equals("")) { e.setAttribute("GraphId", o.getGraphId()); } } private static void mapGroupRef (PathwayElement o, Element e) { String id = e.getAttributeValue("GroupRef"); if(id != null && !id.equals("")) { o.setGroupRef (id); } } private static void updateGroupRef (PathwayElement o, Element e) { String id = o.getGroupRef(); if (id != null && !id.equals("")) { e.setAttribute("GroupRef", o.getGroupRef()); } } private static void mapGroup (PathwayElement o, Element e) throws ConverterException { //ID String id = e.getAttributeValue("GroupId"); if((id == null || id.equals("")) && o.getParent() != null) {id = o.getParent().getUniqueGroupId();} o.setGroupId (id); //GraphId mapGraphId(o, e); //Style o.setGroupStyle(GroupStyle.fromGpmlName(getAttribute("Group", "Style", e))); //Label o.setTextLabel (getAttribute("Group", "TextLabel", e)); } private static void updateGroup (PathwayElement o, Element e) throws ConverterException { //ID String id = o.createGroupId(); if (id != null && !id.equals("")) {e.setAttribute("GroupId", o.createGroupId());} //GraphId updateGraphId(o, e); //Style setAttribute("Group", "Style", e, GroupStyle.toGpmlName(o.getGroupStyle())); //Label setAttribute ("Group", "TextLabel", e, o.getTextLabel()); } private static void mapDataNode(PathwayElement o, Element e) throws ConverterException { o.setTextLabel (getAttribute("DataNode", "TextLabel", e)); o.setGenMappXref (getAttribute("DataNode", "GenMAPP-Xref", e)); o.setDataNodeType (getAttribute("DataNode", "Type", e)); o.setBackpageHead (getAttribute("DataNode", "BackpageHead", e)); Element xref = e.getChild ("Xref", e.getNamespace()); o.setGeneID (getAttribute("DataNode.Xref", "ID", xref)); o.setDataSource (DataSource.getByFullName (getAttribute("DataNode.Xref", "Database", xref))); } private static void updateDataNode(PathwayElement o, Element e) throws ConverterException { if(e != null) { setAttribute ("DataNode", "TextLabel", e, o.getTextLabel()); setAttribute ("DataNode", "GenMAPP-Xref", e, o.getGenMappXref()); setAttribute ("DataNode", "Type", e, o.getDataNodeType()); setAttribute ("DataNode", "BackpageHead", e, o.getBackpageHead()); Element xref = e.getChild("Xref", e.getNamespace()); String database = o.getDataSource() == null ? "" : o.getDataSource().getFullName(); setAttribute ("DataNode.Xref", "Database", xref, database == null ? "" : database); setAttribute ("DataNode.Xref", "ID", xref, o.getGeneID()); } } private static void mapSimpleCenter(PathwayElement o, Element e) { o.setMCenterX (Double.parseDouble(e.getAttributeValue("CenterX"))); o.setMCenterY (Double.parseDouble(e.getAttributeValue("CenterY"))); } private static void updateSimpleCenter(PathwayElement o, Element e) { if(e != null) { e.setAttribute("CenterX", Double.toString(o.getMCenterX())); e.setAttribute("CenterY", Double.toString(o.getMCenterY())); } } private static void mapShapeData(PathwayElement o, Element e, String base) throws ConverterException { Element graphics = e.getChild("Graphics", e.getNamespace()); o.setMCenterX (Double.parseDouble(getAttribute(base + ".Graphics", "CenterX", graphics))); o.setMCenterY (Double.parseDouble(getAttribute(base + ".Graphics", "CenterY", graphics))); o.setMWidth (Double.parseDouble(getAttribute(base + ".Graphics", "Width", graphics))); o.setMHeight (Double.parseDouble(getAttribute(base + ".Graphics", "Height", graphics))); } private static void updateShapeData(PathwayElement o, Element e, String base) throws ConverterException { if(e != null) { Element graphics = e.getChild("Graphics", e.getNamespace()); if(graphics !=null) { setAttribute(base + ".Graphics", "CenterX", graphics, "" + o.getMCenterX()); setAttribute(base + ".Graphics", "CenterY", graphics, "" + o.getMCenterY()); setAttribute(base + ".Graphics", "Width", graphics, "" + o.getMWidth()); setAttribute(base + ".Graphics", "Height", graphics, "" + o.getMHeight()); } } } private static void mapShapeType(PathwayElement o, Element e) throws ConverterException { o.setShapeType (ShapeType.fromGpmlName(getAttribute("Shape", "Type", e))); String style = getAttribute ("Shape", "Style", e); o.setLineStyle ((style.equals("Solid")) ? LineStyle.SOLID : LineStyle.DASHED); Element graphics = e.getChild("Graphics", e.getNamespace()); String rotation = getAttribute("Shape.Graphics", "Rotation", graphics); double result; if (rotation.equals("Top")) { result = 0.0; } else if (rotation.equals("Right")) { result = 0.5 * Math.PI; } else if (rotation.equals("Bottom")) { result = Math.PI; } else if (rotation.equals("Left")) { result = 1.5 * Math.PI; } else { result = Double.parseDouble(rotation); } o.setRotation (result); } private static void updateShapeType(PathwayElement o, Element e) throws ConverterException { if(e != null) { e.setAttribute("Type", o.getShapeType().getName()); setAttribute("Line", "Style", e, o.getLineStyle() == LineStyle.SOLID ? "Solid" : "Broken"); Element jdomGraphics = e.getChild("Graphics", e.getNamespace()); if(jdomGraphics !=null) { jdomGraphics.setAttribute("Rotation", Double.toString(o.getRotation())); } } } private static void mapLabelData(PathwayElement o, Element e) throws ConverterException { o.setTextLabel (getAttribute("Label", "TextLabel", e)); Element graphics = e.getChild("Graphics", e.getNamespace()); o.setMFontSize (Integer.parseInt(graphics.getAttributeValue("FontSize"))); String fontWeight = getAttribute("Label.Graphics", "FontWeight", graphics); String fontStyle = getAttribute("Label.Graphics", "FontStyle", graphics); String fontDecoration = getAttribute("Label.Graphics", "FontDecoration", graphics); String fontStrikethru = getAttribute("Label.Graphics", "FontStrikethru", graphics); o.setBold (fontWeight != null && fontWeight.equals("Bold")); o.setItalic (fontStyle != null && fontStyle.equals("Italic")); o.setUnderline (fontDecoration != null && fontDecoration.equals("Underline")); o.setStrikethru (fontStrikethru != null && fontStrikethru.equals("Strikethru")); o.setFontName (getAttribute("Label.Graphics", "FontName", graphics)); String xref = getAttribute("Label", "Xref", e); if (xref == null) xref = ""; o.setGenMappXref(xref); String outline = getAttribute("Label", "Outline", e); o.setOutline (OutlineType.fromTag (outline)); } private static void updateLabelData(PathwayElement o, Element e) throws ConverterException { if(e != null) { setAttribute("Label", "TextLabel", e, o.getTextLabel()); setAttribute("Label", "Xref", e, o.getGenMappXref() == null ? "" : o.getGenMappXref()); setAttribute("Label", "Outline", e, o.getOutline().getTag()); Element graphics = e.getChild("Graphics", e.getNamespace()); if(graphics !=null) { setAttribute("Label.Graphics", "FontName", graphics, o.getFontName() == null ? "" : o.getFontName()); setAttribute("Label.Graphics", "FontWeight", graphics, o.isBold() ? "Bold" : "Normal"); setAttribute("Label.Graphics", "FontStyle", graphics, o.isItalic() ? "Italic" : "Normal"); setAttribute("Label.Graphics", "FontDecoration", graphics, o.isUnderline() ? "Underline" : "Normal"); setAttribute("Label.Graphics", "FontStrikethru", graphics, o.isStrikethru() ? "Strikethru" : "Normal"); setAttribute("Label.Graphics", "FontSize", graphics, Integer.toString((int)o.getMFontSize())); } } } private static void mapMappInfoData(PathwayElement o, Element e) throws ConverterException { o.setMapInfoName (getAttribute("Pathway", "Name", e)); o.setOrganism (getAttribute("Pathway", "Organism", e)); o.setMapInfoDataSource (getAttribute("Pathway", "Data-Source", e)); o.setVersion (getAttribute("Pathway", "Version", e)); o.setAuthor (getAttribute("Pathway", "Author", e)); o.setMaintainer (getAttribute("Pathway", "Maintainer", e)); o.setEmail (getAttribute("Pathway", "Email", e)); o.setLastModified (getAttribute("Pathway", "Last-Modified", e)); o.setCopyright (getAttribute("Pathway", "Copyright", e)); Element g = e.getChild("Graphics", e.getNamespace()); //Board size will be calculated // o.setMBoardWidth (Double.parseDouble(getAttribute("Pathway.Graphics", "BoardWidth", g))); // o.setMBoardHeight (Double.parseDouble(getAttribute("Pathway.Graphics", "BoardHeight", g))); o.setWindowWidth (Double.parseDouble(getAttribute("Pathway.Graphics", "WindowWidth", g))); o.setWindowHeight (Double.parseDouble(getAttribute("Pathway.Graphics", "WindowHeight", g))); } private static void mapBiopax(PathwayElement o, Element e) throws ConverterException { //this method clones all content, //getContent will leave them attached to the parent, which we don't want //We can safely remove them, since the JDOM element isn't used anymore after this method Element root = new Element("RDF", RDF); root.addNamespaceDeclaration(RDFS); root.addNamespaceDeclaration(RDF); root.addNamespaceDeclaration(OWL); root.addNamespaceDeclaration(BIOPAX); root.setAttribute(new Attribute("base", GPML.getURI() + "#", Namespace.XML_NAMESPACE)); //Element owl = new Element("Ontology", OWL); //owl.setAttribute(new Attribute("about", "", RDF)); //Element imp = new Element("imports", OWL); //imp.setAttribute(new Attribute("resource", BIOPAX.getURI(), RDF)); //owl.addContent(imp); //root.addContent(owl); root.addContent(e.cloneContent()); Document bp = new Document(root); o.setBiopax(bp); } private static void updateBiopax(PathwayElement o, Element e) throws ConverterException { Document bp = o.getBiopax(); if(e != null && bp != null) { List<Content> content = bp.getRootElement().cloneContent(); for(Content c : content) { if(c instanceof Element) { Element elm = (Element)c; if(elm.getNamespace().equals(BIOPAX)) { e.addContent(c); } else if(elm.getName().equals("RDF") && elm.getNamespace().equals(RDF)) { for(Object ce : elm.getChildren()) { if(((Element)ce).getNamespace().equals(BIOPAX)) { e.addContent((Element)ce); } } } else { Logger.log.info("Skipped non-biopax element" + c); } } } } } private static void mapBiopaxRef(PathwayElement o, Element e) throws ConverterException { for (Object f : e.getChildren("BiopaxRef", e.getNamespace())) { o.addBiopaxRef(((Element)f).getText()); } } private static void updateBiopaxRef(PathwayElement o, Element e) throws ConverterException { if(e != null) { for (String ref : o.getBiopaxRefs()) { Element f = new Element ("BiopaxRef", e.getNamespace()); f.setText (ref); e.addContent(f); } } } static public Element createJdomElement(PathwayElement o, Namespace ns) throws ConverterException { Element e = null; switch (o.getObjectType()) { case ObjectType.DATANODE: e = new Element("DataNode", ns); updateComments(o, e); updateBiopaxRef(o, e); e.addContent(new Element("Graphics", ns)); e.addContent(new Element("Xref", ns)); updateDataNode(o, e); updateColor(o, e); updateShapeData(o, e, "DataNode"); updateGraphId(o, e); updateGroupRef(o, e); break; case ObjectType.SHAPE: e = new Element ("Shape", ns); updateComments(o, e); updateBiopaxRef(o, e); e.addContent(new Element("Graphics", ns)); updateShapeColor(o, e); updateColor(o, e); updateShapeData(o, e, "Shape"); updateShapeType(o, e); updateGraphId(o, e); updateGroupRef(o, e); break; case ObjectType.LINE: e = new Element("Line", ns); updateComments(o, e); updateBiopaxRef(o, e); e.addContent(new Element("Graphics", ns)); updateLineData(o, e); updateColor(o, e); updateGroupRef(o, e); break; case ObjectType.LABEL: e = new Element("Label", ns); updateComments(o, e); updateBiopaxRef(o, e); e.addContent(new Element("Graphics", ns)); updateLabelData(o, e); updateColor(o, e); updateShapeData(o, e, "Label"); updateGraphId(o, e); updateGroupRef(o, e); break; case ObjectType.LEGEND: e = new Element ("Legend", ns); updateSimpleCenter (o, e); break; case ObjectType.INFOBOX: e = new Element ("InfoBox", ns); updateSimpleCenter (o, e); break; case ObjectType.GROUP: e = new Element ("Group", ns); updateGroup (o, e); updateGroupRef(o, e); updateComments(o, e); updateBiopaxRef(o, e); break; case ObjectType.BIOPAX: e = new Element ("Biopax", ns); updateBiopax(o, e); break; } if (e == null) { throw new ConverterException ("Error creating jdom element with objectType " + o.getObjectType()); } return e; } /** * Converts a string containing either a named color (as specified in gpml) or a hexbinary number * to an {@link Color} object * @param strColor */ public static Color gmmlString2Color(String strColor) { if(colorMappings.contains(strColor)) { double[] color = (double[])rgbMappings.get(colorMappings.indexOf(strColor)); return new Color((int)(255*color[0]),(int)(255*color[1]),(int)(255*color[2])); } else { try { strColor = padding(strColor, 6, '0'); int red = Integer.valueOf(strColor.substring(0,2),16); int green = Integer.valueOf(strColor.substring(2,4),16); int blue = Integer.valueOf(strColor.substring(4,6),16); return new Color(red,green,blue); } catch (Exception e) { Logger.log.error("while converting color: " + "Color " + strColor + " is not valid, element color is set to black", e); } } return new Color(0,0,0); } /** * Converts an {@link Color} object to a hexbinary string * @param color */ public static String color2HexBin(Color color) { String red = padding(Integer.toBinaryString(color.getRed()), 8, '0'); String green = padding(Integer.toBinaryString(color.getGreen()), 8, '0'); String blue = padding(Integer.toBinaryString(color.getBlue()), 8, '0'); String hexBinary = Integer.toHexString(Integer.valueOf(red + green + blue, 2)); return padding(hexBinary, 6, '0'); } /** * Prepends character c x-times to the input string to make it length n * @param s String to pad * @param n Number of characters of the resulting string * @param c character to append * @return string of length n or larger (if given string s > n) */ public static String padding(String s, int n, char c) { while(s.length() < n) { s = c + s; } return s; } public static final List<double[]> rgbMappings = Arrays.asList(new double[][] { {0, 1, 1}, // aqua {0, 0, 0}, // black {0, 0, 1}, // blue {1, 0, 1}, // fuchsia {.5, .5, .5,}, // gray {0, .5, 0}, // green {0, 1, 0}, // lime {.5, 0, 0}, // maroon {0, 0, .5}, // navy {.5, .5, 0}, // olive {.5, 0, .5}, // purple {1, 0, 0}, // red {.75, .75, .75},// silver {0, .5, .5}, // teal {1, 1, 1}, // white {1, 1, 0}, // yellow {0, 0, 0} // transparent (actually irrelevant) }); public static final List<String> colorMappings = Arrays.asList(new String[]{ "Aqua", "Black", "Blue", "Fuchsia", "Gray", "Green", "Lime", "Maroon", "Navy", "Olive", "Purple", "Red", "Silver", "Teal", "White", "Yellow", "Transparent" }); public void doImport(File file, Pathway pathway) throws ConverterException { readFromXml(pathway, file, true); } public void doExport(File file, Pathway pathway) throws ConverterException { writeToXml(pathway, file, true); } public String[] getExtensions() { return new String[] { "gpml", "xml" }; } public String getName() { return "GPML file"; } /** * Writes the JDOM document to the file specified * @param file the file to which the JDOM document should be saved * @param validate if true, validate the dom structure before writing to file. If there is a validation error, * or the xsd is not in the classpath, an exception will be thrown. */ static public void writeToXml(Pathway pwy, File file, boolean validate) throws ConverterException { Document doc = createJdom(pwy); //Validate the JDOM document if (validate) validateDocument(doc); // Get the XML code XMLOutputter xmlcode = new XMLOutputter(Format.getPrettyFormat()); Format f = xmlcode.getFormat(); f.setEncoding("ISO-8859-1"); f.setTextMode(Format.TextMode.PRESERVE); xmlcode.setFormat(f); //Open a filewriter try { FileWriter writer = new FileWriter(file); //Send XML code to the filewriter xmlcode.output(doc, writer); } catch (IOException ie) { throw new ConverterException(ie); } } /** * Writes the JDOM document to the outputstream specified * @param out the outputstream to which the JDOM document should be writed * @param validate if true, validate the dom structure before writing. If there is a validation error, * or the xsd is not in the classpath, an exception will be thrown. * @throws ConverterException */ static public void writeToXml(Pathway pwy, OutputStream out, boolean validate) throws ConverterException { Document doc = createJdom(pwy); //Validate the JDOM document if (validate) validateDocument(doc); // Get the XML code XMLOutputter xmlcode = new XMLOutputter(Format.getPrettyFormat()); Format f = xmlcode.getFormat(); f.setEncoding("ISO-8859-1"); f.setTextMode(Format.TextMode.PRESERVE); xmlcode.setFormat(f); try { //Send XML code to the outputstream xmlcode.output(doc, out); } catch (IOException ie) { throw new ConverterException(ie); } } static public void readFromXml(Pathway pwy, File file, boolean validate) throws ConverterException { FileReader inf; try { inf = new FileReader (file); } catch (FileNotFoundException e) { throw new ConverterException (e); } readFromXml (pwy, inf, validate); } static public void readFromXml(Pathway pwy, Reader in, boolean validate) throws ConverterException { // Start XML processing SAXBuilder builder = new SAXBuilder(false); // no validation when reading the xml file // try to read the file; if an error occurs, catch the exception and print feedback try { Logger.log.trace ("Build JDOM tree"); // build JDOM tree Document doc = builder.build(in); Logger.log.trace ("Start Validation"); if (validate) validateDocument(doc); // Copy the pathway information to a VPathway Element root = doc.getRootElement(); Logger.log.trace ("Copy map elements"); mapElement(root, pwy); // MappInfo // Iterate over direct children of the root element for (Object e : root.getChildren()) { mapElement((Element)e, pwy); } Logger.log.trace ("End copying map elements"); //Convert absolute point coordinates of linked points to //relative coordinates convertPointCoordinates(pwy); } catch(JDOMParseException pe) { throw new ConverterException (pe); } catch(JDOMException e) { throw new ConverterException (e); } catch(IOException e) { throw new ConverterException (e); } catch(NullPointerException e) { throw new ConverterException (e); } catch(IllegalArgumentException e) { throw new ConverterException (e); } catch(Exception e) { //Make all types of exceptions a ConverterException throw new ConverterException (e); } } private static void convertPointCoordinates(Pathway pathway) throws ConverterException { for(PathwayElement pe : pathway.getDataObjects()) { if(pe.getObjectType() == ObjectType.LINE) { String sr = pe.getStartGraphRef(); String er = pe.getEndGraphRef(); if(sr != null && !"".equals(sr)) { GraphIdContainer idc = pathway.getGraphIdContainer(sr); Point2D relative = idc.toRelativeCoordinate( new Point2D.Double( pe.getMStart().getRawX(), pe.getMStart().getRawY() ) ); pe.getMStart().setRelativePosition(relative.getX(), relative.getY()); } if(er != null && !"".equals(er)) { GraphIdContainer idc = pathway.getGraphIdContainer(er); Point2D relative = idc.toRelativeCoordinate( new Point2D.Double( pe.getMEnd().getRawX(), pe.getMEnd().getRawY() ) ); pe.getMEnd().setRelativePosition(relative.getX(), relative.getY()); } } } } /** * validates a JDOM document against the xml-schema definition specified by 'xsdFile' * @param doc the document to validate */ public static void validateDocument(Document doc) throws ConverterException { ClassLoader cl = Pathway.class.getClassLoader(); InputStream is = cl.getResourceAsStream(xsdFile); if(is != null) { Schema schema; try { SchemaFactory factory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); StreamSource ss = new StreamSource (is); schema = factory.newSchema(ss); ValidatorHandler vh = schema.newValidatorHandler(); SAXOutputter so = new SAXOutputter(vh); so.output(doc); // If no errors occur, the file is valid according to the gpml xml schema definition Logger.log.info("Document is valid according to the xml schema definition '" + xsdFile.toString() + "'"); } catch (SAXException se) { Logger.log.error("Could not parse the xml-schema definition", se); throw new ConverterException (se); } catch (JDOMException je) { Logger.log.error("Document is invalid according to the xml-schema definition!: " + je.getMessage(), je); XMLOutputter xmlcode = new XMLOutputter(Format.getPrettyFormat()); Logger.log.error("The invalid XML code:\n" + xmlcode.outputString(doc)); throw new ConverterException (je); } } else { Logger.log.error("Document is not validated because the xml schema definition '" + xsdFile + "' could not be found in classpath"); throw new ConverterException ("Document is not validated because the xml schema definition '" + xsdFile + "' could not be found in classpath"); } } }
src/core/org/pathvisio/model/GpmlFormat.java
// PathVisio, // a tool for data visualization and analysis using Biological Pathways // Copyright 2006-2007 BiGCaT Bioinformatics // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package org.pathvisio.model; import java.awt.Color; import java.awt.geom.Point2D; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.Reader; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.xml.XMLConstants; import javax.xml.transform.stream.StreamSource; import javax.xml.validation.Schema; import javax.xml.validation.SchemaFactory; import javax.xml.validation.ValidatorHandler; import org.jdom.Attribute; import org.jdom.Content; import org.jdom.Document; import org.jdom.Element; import org.jdom.JDOMException; import org.jdom.Namespace; import org.jdom.input.JDOMParseException; import org.jdom.input.SAXBuilder; import org.jdom.output.Format; import org.jdom.output.SAXOutputter; import org.jdom.output.XMLOutputter; import org.pathvisio.debug.Logger; import org.pathvisio.model.GraphLink.GraphIdContainer; import org.pathvisio.model.PathwayElement.MAnchor; import org.pathvisio.model.PathwayElement.MPoint; import org.xml.sax.SAXException; /** * class responsible for interaction with Gpml format. * Contains all gpml-specific constants, * and should be the only class (apart from svgFormat) * that needs to import jdom * * @author Martijn * */ public class GpmlFormat implements PathwayImporter, PathwayExporter { /** * The factor that is used to convert pixel coordinates * to the GPML model coordinates. E.g. if you want to convert the * width from pixels to GPML model coordinates you use: * * double mWidth = width * pixel2model; */ public static final double pixel2model = 15; public static final Namespace GPML = Namespace.getNamespace("http://genmapp.org/GPML/2007"); public static final Namespace RDF = Namespace.getNamespace("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#"); public static final Namespace RDFS = Namespace.getNamespace("rdfs", "http://www.w3.org/2000/01/rdf-schema#"); public static final Namespace BIOPAX = Namespace.getNamespace("bp", "http://www.biopax.org/release/biopax-level2.owl#"); public static final Namespace OWL = Namespace.getNamespace("owl", "http://www.w3.org/2002/07/owl#"); /** * name of resource containing the gpml schema definition */ final private static String xsdFile = "GPML.xsd"; private static class AttributeInfo { /** * xsd validated type. Note that in the current implementation * we don't do anything with restrictions, only with the * base type. */ public String schemaType; /** * default value for the attribute */ public String def; // default /** * use of the attribute: can be "required" or "optional" */ public String use; AttributeInfo (String _schemaType, String _def, String _use) { schemaType = _schemaType; def = _def; use = _use; } } static final Map<String, AttributeInfo> attributeInfo = initAttributeInfo(); static Map<String, AttributeInfo> initAttributeInfo() { Map<String, AttributeInfo> result = new HashMap<String, AttributeInfo>(); // IMPORTANT: this array has been generated from the xsd with // an automated perl script. Don't edit this directly, use the perl script instead. /* START OF AUTO-GENERATED CONTENT */ result.put("PublicationXref@ID", new AttributeInfo ("xsd:string", null, "required")); result.put("PublicationXref@Database", new AttributeInfo ("xsd:string", null, "required")); result.put("Comment@Source", new AttributeInfo ("xsd:string", null, "optional")); result.put("Pathway.Graphics@BoardWidth", new AttributeInfo ("gpml:Dimension", null, "required")); result.put("Pathway.Graphics@BoardHeight", new AttributeInfo ("gpml:Dimension", null, "required")); result.put("Pathway.Graphics@WindowWidth", new AttributeInfo ("gpml:Dimension", "18000", "optional")); result.put("Pathway.Graphics@WindowHeight", new AttributeInfo ("gpml:Dimension", "12000", "optional")); result.put("Pathway@Name", new AttributeInfo ("gpml:NameType", null, "required")); result.put("Pathway@Organism", new AttributeInfo ("xsd:string", null, "optional")); result.put("Pathway@Data-Source", new AttributeInfo ("xsd:string", null, "optional")); result.put("Pathway@Version", new AttributeInfo ("xsd:string", null, "optional")); result.put("Pathway@Author", new AttributeInfo ("xsd:string", null, "optional")); result.put("Pathway@Maintainer", new AttributeInfo ("xsd:string", null, "optional")); result.put("Pathway@Email", new AttributeInfo ("xsd:string", null, "optional")); result.put("Pathway@Copyright", new AttributeInfo ("xsd:string", null, "optional")); result.put("Pathway@Last-Modified", new AttributeInfo ("xsd:string", null, "optional")); result.put("DataNode.Graphics@CenterX", new AttributeInfo ("xsd:float", null, "required")); result.put("DataNode.Graphics@CenterY", new AttributeInfo ("xsd:float", null, "required")); result.put("DataNode.Graphics@Width", new AttributeInfo ("gpml:Dimension", null, "required")); result.put("DataNode.Graphics@Height", new AttributeInfo ("gpml:Dimension", null, "required")); result.put("DataNode.Graphics@Color", new AttributeInfo ("gpml:ColorType", null, "optional")); result.put("DataNode.Xref@Database", new AttributeInfo ("gpml:DatabaseType", null, "required")); result.put("DataNode.Xref@ID", new AttributeInfo ("gpml:NameType", null, "required")); result.put("DataNode@GraphId", new AttributeInfo ("xsd:ID", null, "optional")); result.put("DataNode@GroupRef", new AttributeInfo ("xsd:string", null, "optional")); result.put("DataNode@ObjectType", new AttributeInfo ("gpml:ObjectType", "Annotation", "optional")); result.put("DataNode@TextLabel", new AttributeInfo ("xsd:string", null, "required")); result.put("DataNode@BackpageHead", new AttributeInfo ("xsd:string", null, "optional")); result.put("DataNode@GenMAPP-Xref", new AttributeInfo ("xsd:string", null, "optional")); result.put("DataNode@Type", new AttributeInfo ("gpml:DataNodeType", "Unknown", "optional")); result.put("Line.Graphics.Point@x", new AttributeInfo ("xsd:float", null, "required")); result.put("Line.Graphics.Point@y", new AttributeInfo ("xsd:float", null, "required")); result.put("Line.Graphics.Point@relX", new AttributeInfo ("xsd:float", "0", "optional")); result.put("Line.Graphics.Point@relY", new AttributeInfo ("xsd:float", "0", "optional")); result.put("Line.Graphics.Point@GraphRef", new AttributeInfo ("xsd:IDREF", null, "optional")); result.put("Line.Graphics.Point@GraphId", new AttributeInfo ("xsd:ID", null, "optional")); result.put("Line.Graphics.Point@Head", new AttributeInfo ("xsd:string", "Line", "optional")); result.put("Line.Graphics.Point@ArrowHead", new AttributeInfo ("xsd:string", "Line", "optional")); result.put("Line.Graphics.Anchor@position", new AttributeInfo ("xsd:float", null, "required")); result.put("Line.Graphics.Anchor@Shape", new AttributeInfo ("xsd:string", "LigandRound", "required")); result.put("Line.Graphics.Anchor@GraphId", new AttributeInfo ("xsd:ID", null, "optional")); result.put("Line.Graphics@Color", new AttributeInfo ("gpml:ColorType", "Black", "optional")); result.put("Line.Graphics.Segment@direction", new AttributeInfo ("gpml:string", null, "required")); result.put("Line.Graphics.Segment@length", new AttributeInfo ("gpml:float", null, "required")); result.put("Line.Graphics@ConnectorType", new AttributeInfo ("gpml:string", "Straight", "optional")); result.put("Line@Style", new AttributeInfo ("xsd:string", "Solid", "optional")); result.put("Label.Graphics@CenterX", new AttributeInfo ("xsd:float", null, "required")); result.put("Label.Graphics@CenterY", new AttributeInfo ("xsd:float", null, "required")); result.put("Label.Graphics@Width", new AttributeInfo ("gpml:Dimension", null, "required")); result.put("Label.Graphics@Height", new AttributeInfo ("gpml:Dimension", null, "required")); result.put("Label.Graphics@Color", new AttributeInfo ("gpml:ColorType", null, "optional")); result.put("Label.Graphics@FontName", new AttributeInfo ("xsd:string", "Arial", "optional")); result.put("Label.Graphics@FontStyle", new AttributeInfo ("xsd:string", "Normal", "optional")); result.put("Label.Graphics@FontDecoration", new AttributeInfo ("xsd:string", "Normal", "optional")); result.put("Label.Graphics@FontStrikethru", new AttributeInfo ("xsd:string", "Normal", "optional")); result.put("Label.Graphics@FontWeight", new AttributeInfo ("xsd:string", "Normal", "optional")); result.put("Label.Graphics@FontSize", new AttributeInfo ("xsd:nonNegativeInteger", "12", "optional")); result.put("Label@GraphId", new AttributeInfo ("xsd:ID", null, "optional")); result.put("Label@GroupRef", new AttributeInfo ("xsd:string", null, "optional")); result.put("Label@ObjectType", new AttributeInfo ("gpml:ObjectType", "Annotation", "optional")); result.put("Label@TextLabel", new AttributeInfo ("xsd:string", null, "required")); result.put("Label@Xref", new AttributeInfo ("xsd:string", null, "optional")); result.put("Label@GenMAPP-Xref", new AttributeInfo ("xsd:string", null, "optional")); result.put("Label@Outline", new AttributeInfo ("xsd:string", "None", "optional")); result.put("Link.Graphics@CenterX", new AttributeInfo ("xsd:float", null, "required")); result.put("Link.Graphics@CenterY", new AttributeInfo ("xsd:float", null, "required")); result.put("Link.Graphics@Width", new AttributeInfo ("gpml:Dimension", null, "required")); result.put("Link.Graphics@Height", new AttributeInfo ("gpml:Dimension", null, "required")); result.put("Link.Graphics@Color", new AttributeInfo ("gpml:ColorType", null, "optional")); result.put("Link.Graphics@FontName", new AttributeInfo ("xsd:string", "Arial", "optional")); result.put("Link.Graphics@FontStyle", new AttributeInfo ("xsd:string", "Normal", "optional")); result.put("Link.Graphics@FontDecoration", new AttributeInfo ("xsd:string", "Normal", "optional")); result.put("Link.Graphics@FontStrikethru", new AttributeInfo ("xsd:string", "Normal", "optional")); result.put("Link.Graphics@FontWeight", new AttributeInfo ("xsd:string", "Normal", "optional")); result.put("Link.Graphics@FontSize", new AttributeInfo ("xsd:nonNegativeInteger", "12", "optional")); result.put("Link@Href", new AttributeInfo ("xsd:string", null, "optional")); result.put("Link@GraphId", new AttributeInfo ("xsd:ID", null, "optional")); result.put("Link@GroupRef", new AttributeInfo ("xsd:string", null, "optional")); result.put("Link@ObjectType", new AttributeInfo ("gpml:ObjectType", "Annotation", "optional")); result.put("Link@TextLabel", new AttributeInfo ("xsd:string", null, "required")); result.put("Link@GenMAPP-Xref", new AttributeInfo ("xsd:string", null, "optional")); result.put("Shape.Graphics@CenterX", new AttributeInfo ("xsd:float", null, "required")); result.put("Shape.Graphics@CenterY", new AttributeInfo ("xsd:float", null, "required")); result.put("Shape.Graphics@Width", new AttributeInfo ("gpml:Dimension", null, "required")); result.put("Shape.Graphics@Height", new AttributeInfo ("gpml:Dimension", null, "required")); result.put("Shape.Graphics@Color", new AttributeInfo ("gpml:ColorType", "Black", "optional")); result.put("Shape.Graphics@Rotation", new AttributeInfo ("gpml:RotationType", "Top", "optional")); result.put("Shape.Graphics@FillColor", new AttributeInfo ("gpml:ColorType", "Transparent", "optional")); result.put("Shape@Type", new AttributeInfo ("xsd:string", null, "required")); result.put("Shape@GraphId", new AttributeInfo ("xsd:ID", null, "optional")); result.put("Shape@GroupRef", new AttributeInfo ("xsd:string", null, "optional")); result.put("Shape@ObjectType", new AttributeInfo ("gpml:ObjectType", "Annotation", "optional")); result.put("Shape@Style", new AttributeInfo ("gpml:StyleType", "Solid", "optional")); result.put("Group@GroupId", new AttributeInfo ("xsd:ID", null, "required")); result.put("Group@GroupRef", new AttributeInfo ("xsd:string", null, "optional")); result.put("Group@TextLabel", new AttributeInfo("xsd:string", null, "optional")); result.put("Group@Style", new AttributeInfo ("gpml:GroupStyleType", "None", "optional")); result.put("InfoBox@CenterX", new AttributeInfo ("xsd:float", null, "required")); result.put("InfoBox@CenterY", new AttributeInfo ("xsd:float", null, "required")); result.put("Legend@CenterX", new AttributeInfo ("xsd:float", null, "required")); result.put("Legend@CenterY", new AttributeInfo ("xsd:float", null, "required")); /* END OF AUTO-GENERATED CONTENT */ return result; }; /** * Sets a certain attribute value, * Does a basic check for some types, * throws an exception when you're trying to set an invalid value * If you're trying to set a default value, or an optional value to null, * the attribute is omitted, * leading to a leaner xml output. * * @param tag used for lookup in the defaults table * @param name used for lookup in the defaults table * @param el jdom element where this attribute belongs in * @param value value you wan't to check and set */ private static void setAttribute(String tag, String name, Element el, String value) throws ConverterException { String key = tag + "@" + name; if (!attributeInfo.containsKey(key)) throw new ConverterException("Trying to set invalid attribute " + key); AttributeInfo aInfo = attributeInfo.get(key); boolean isDefault = false; // here we start seeing if the attribute is equal to the // default value // if so, we can leave out the attribute from the jdom // altogether if (aInfo.use.equals("optional")) { if (aInfo.schemaType.equals("xsd:string") || aInfo.schemaType.equals("xsd:ID")) { if ((aInfo.def == null && value == null) || (aInfo.def != null && aInfo.def.equals(value)) || (aInfo.def == null && value != null && value.equals(""))) isDefault = true; } else if (aInfo.schemaType.equals("xsd:float") || aInfo.schemaType.equals("Dimension")) { Double x = Double.parseDouble(aInfo.def); Double y = Double.parseDouble(value); if (Math.abs(x - y) < 1e-6) isDefault = true; } } if (!isDefault) el.setAttribute(name, value); } /** * Gets a certain attribute value, * replaces it with a suitable default under certain conditions. * * @param tag used for lookup in the defaults table * @param name used for lookup in the defaults table * @param el jdom element to get the attribute from * @throws ConverterException */ private static String getAttribute(String tag, String name, Element el) throws ConverterException { String key = tag + "@" + name; if (!attributeInfo.containsKey(key)) throw new ConverterException("Trying to get invalid attribute " + key); AttributeInfo aInfo = attributeInfo.get(key); String result = el.getAttributeValue(name, aInfo.def); return result; } /** * The GPML xsd implies a certain ordering for children of the pathway element. * (e.g. DataNode always comes before LineShape, etc.) * * This Comparator can sort jdom Elements so that they are in the correct order * for the xsd. * * @author Martijn.vanIersel */ private static class ByElementName implements Comparator<Element> { // hashmap for quick lookups during sorting private HashMap<String, Integer> elementOrdering; // correctly ordered list of tag names, which are loaded into the hashmap in // the constructor. private final String[] elements = new String[] { "Comment", "BiopaxRef", "Graphics", "DataNode", "Line", "Label", "Shape", "Group", "InfoBox", "Legend", "Biopax" }; /* * Constructor */ public ByElementName() { elementOrdering = new HashMap<String, Integer>(); for (int i = 0; i < elements.length; ++i) { elementOrdering.put (elements[i], new Integer(i)); } } /* * As a comparison measure, returns difference of index of element names of a and b * in elements array. E.g: * Comment -> index 1 in elements array * Graphics -> index 2 in elements array. * If a.getName() is Comment and b.getName() is Graphics, returns 1-2 -> -1 */ public int compare(Element a, Element b) { return ((Integer)elementOrdering.get(a.getName())).intValue() - ((Integer)elementOrdering.get(b.getName())).intValue(); } } public static Document createJdom(Pathway data) throws ConverterException { Document doc = new Document(); Namespace ns = GPML; Element root = new Element("Pathway", ns); doc.setRootElement(root); List<Element> elementList = new ArrayList<Element>(); List<PathwayElement> pathwayElements = data.getDataObjects(); Collections.sort(pathwayElements); for (PathwayElement o : pathwayElements) { if (o.getObjectType() == ObjectType.MAPPINFO) { setAttribute("Pathway", "Name", root, o.getMapInfoName()); setAttribute("Pathway", "Data-Source", root, o.getMapInfoDataSource()); setAttribute("Pathway", "Version", root, o.getVersion()); setAttribute("Pathway", "Author", root, o.getAuthor()); setAttribute("Pathway", "Maintainer", root, o.getMaintainer()); setAttribute("Pathway", "Email", root, o.getEmail()); setAttribute("Pathway", "Copyright", root, o.getCopyright()); setAttribute("Pathway", "Last-Modified", root, o.getLastModified()); setAttribute("Pathway", "Organism", root, o.getOrganism()); updateComments(o, root); updateBiopaxRef(o, root); Element graphics = new Element("Graphics", ns); root.addContent(graphics); double[] size = o.getMBoardSize(); setAttribute("Pathway.Graphics", "BoardWidth", graphics, "" +size[0]); setAttribute("Pathway.Graphics", "BoardHeight", graphics, "" + size[1]); setAttribute("Pathway.Graphics", "WindowWidth", graphics, "" + o.getWindowWidth()); setAttribute("Pathway.Graphics", "WindowHeight", graphics, "" + o.getWindowHeight()); } else { Element e = createJdomElement(o, ns); if (e != null) elementList.add(e); } } // now sort the generated elements in the order defined by the xsd Collections.sort(elementList, new ByElementName()); for (Element e : elementList) { root.addContent(e); } return doc; } public static PathwayElement mapElement(Element e) throws ConverterException { return mapElement (e, null); } /** Create a single PathwayElement based on a piece of Jdom tree. Used also by Patch utility Pathway p may be null */ public static PathwayElement mapElement(Element e, Pathway p) throws ConverterException { String tag = e.getName(); int ot = ObjectType.getTagMapping(tag); if (ot == -1) { // do nothing. This could be caused by // tags <comment> or <graphics> that appear // as subtags of <pathway> return null; } PathwayElement o = PathwayElement.createPathwayElement(ot); if (p != null) { p.add (o); } switch (o.getObjectType()) { case ObjectType.DATANODE: mapShapeData(o, e, "DataNode"); mapColor(o, e); mapComments(o, e); mapDataNode(o, e); mapGraphId(o, e); mapGroupRef(o, e); mapBiopaxRef(o, e); break; case ObjectType.LABEL: mapShapeData(o, e, "Label"); mapColor(o, e); mapLabelData(o, e); mapComments(o, e); mapGraphId(o, e); mapGroupRef(o, e); mapBiopaxRef(o, e); break; case ObjectType.LINE: mapLineData(o, e); mapColor(o, e); mapComments(o, e); mapGroupRef(o, e); mapBiopaxRef(o, e); break; case ObjectType.MAPPINFO: mapMappInfoData(o, e); mapBiopaxRef(o, e); mapComments(o, e); break; case ObjectType.SHAPE: mapShapeData(o, e, "Shape"); mapShapeColor (o, e); mapColor(o, e); mapComments(o, e); mapShapeType(o, e); mapGraphId(o, e); mapGroupRef(o, e); mapBiopaxRef(o, e); break; case ObjectType.LEGEND: mapSimpleCenter(o, e); break; case ObjectType.INFOBOX: mapSimpleCenter (o, e); break; case ObjectType.GROUP: mapGroupRef(o, e); mapGroup (o, e); mapComments(o, e); mapBiopaxRef(o, e); break; case ObjectType.BIOPAX: mapBiopax(o, e); break; default: throw new ConverterException("Invalid ObjectType'" + tag + "'"); } return o; } private static void mapLineData(PathwayElement o, Element e) throws ConverterException { Element graphics = e.getChild("Graphics", e.getNamespace()); List<MPoint> mPoints = new ArrayList<MPoint>(); String startType = null; String endType = null; List<Element> pointElements = graphics.getChildren("Point", e.getNamespace()); for(int i = 0; i < pointElements.size(); i++) { Element pe = pointElements.get(i); MPoint mp = o.new MPoint( Double.parseDouble(getAttribute("Line.Graphics.Point", "x", pe)), Double.parseDouble(getAttribute("Line.Graphics.Point", "y", pe)) ); mPoints.add(mp); String ref = getAttribute("Line.Graphics.Point", "GraphRef", pe); if (ref == null) ref = ""; mp.setGraphRef(ref); if(i == 0) { startType = getAttribute("Line.Graphics.Point", "ArrowHead", pe); endType = getAttribute("Line.Graphics.Point", "Head", pe); } else if(i == pointElements.size() - 1) { /** read deprecated Head attribute for backwards compatibility. If an arrowhead attribute is present on the other point, it overrides this one. */ if (pe.getAttributeValue("ArrowHead") != null) { endType = getAttribute("Line.Graphics.Point", "ArrowHead", pe); } } } o.setMPoints(mPoints); String style = getAttribute("Line", "Style", e); o.setLineStyle ((style.equals("Solid")) ? LineStyle.SOLID : LineStyle.DASHED); o.setStartLineType (LineType.fromName(startType)); o.setEndLineType (LineType.fromName(endType)); String connType = getAttribute("Line.Graphics", "ConnectorType", graphics); o.setConnectorType(ConnectorType.fromName(connType)); //Map anchors List<Element> anchors = graphics.getChildren("Anchor", e.getNamespace()); for(Element ae : anchors) { double position = Double.parseDouble(getAttribute("Line.Graphics.Anchor", "position", ae)); MAnchor anchor = o.addMAnchor(position); mapGraphId(anchor, ae); String shape = getAttribute("Line.Graphics.Anchor", "Shape", ae); if(shape != null) { anchor.setShape(AnchorType.fromName(shape)); } } } private static void updateLineData(PathwayElement o, Element e) throws ConverterException { if(e != null) { setAttribute("Line", "Style", e, o.getLineStyle() == LineStyle.SOLID ? "Solid" : "Broken"); Element jdomGraphics = e.getChild("Graphics", e.getNamespace()); List<MPoint> mPoints = o.getMPoints(); for(int i = 0; i < mPoints.size(); i++) { MPoint mp = mPoints.get(i); Element pe = new Element("Point", e.getNamespace()); jdomGraphics.addContent(pe); setAttribute("Line.Graphics.Point", "x", pe, Double.toString(mp.getX())); setAttribute("Line.Graphics.Point", "y", pe, Double.toString(mp.getY())); if (mp.getGraphRef() != null && !mp.getGraphRef().equals("")) { setAttribute("Line.Graphics.Point", "GraphRef", pe, mp.getGraphRef()); setAttribute("Line.Graphics.Point", "relX", pe, Double.toString(mp.getRelX())); setAttribute("Line.Graphics.Point", "relY", pe, Double.toString(mp.getRelY())); } if(i == 0) { setAttribute("Line.Graphics.Point", "ArrowHead", pe, o.getStartLineType().getName()); } else if(i == mPoints.size() - 1) { setAttribute("Line.Graphics.Point", "ArrowHead", pe, o.getEndLineType().getName()); } } for(MAnchor anchor : o.getMAnchors()) { Element ae = new Element("Anchor", e.getNamespace()); setAttribute("Line.Graphics.Anchor", "position", ae, Double.toString(anchor.getPosition())); setAttribute("Line.Graphics.Anchor", "Shape", ae, anchor.getShape().getName()); updateGraphId(anchor, ae); jdomGraphics.addContent(ae); } ConnectorType ctype = o.getConnectorType(); setAttribute("Line.Graphics", "ConnectorType", jdomGraphics, ctype.getName()); } } private static void mapColor(PathwayElement o, Element e) throws ConverterException { Element graphics = e.getChild("Graphics", e.getNamespace()); String scol = getAttribute(e.getName() + ".Graphics", "Color", graphics); o.setColor (gmmlString2Color(scol)); } private static void mapShapeColor(PathwayElement o, Element e) throws ConverterException { Element graphics = e.getChild("Graphics", e.getNamespace()); String scol = getAttribute("Shape.Graphics", "FillColor", graphics); if(scol.equals("Transparent")) { o.setTransparent (true); } else { o.setTransparent (false); o.setFillColor (gmmlString2Color(scol)); } } private static void updateColor(PathwayElement o, Element e) throws ConverterException { if(e != null) { Element jdomGraphics = e.getChild("Graphics", e.getNamespace()); if(jdomGraphics != null) { setAttribute(e.getName() + ".Graphics", "Color", jdomGraphics, color2HexBin(o.getColor())); } } } private static void updateShapeColor(PathwayElement o, Element e) { if(e != null) { Element jdomGraphics = e.getChild("Graphics", e.getNamespace()); if(jdomGraphics != null) { if (o.isTransparent()) jdomGraphics.setAttribute("FillColor", "Transparent"); else jdomGraphics.setAttribute("FillColor", color2HexBin(o.getFillColor())); } } } private static void mapComments(PathwayElement o, Element e) throws ConverterException { for (Object f : e.getChildren("Comment", e.getNamespace())) { o.addComment(((Element)f).getText(), getAttribute("Comment", "Source", (Element)f)); } } private static void updateComments(PathwayElement o, Element e) throws ConverterException { if(e != null) { for (PathwayElement.Comment c : o.getComments()) { Element f = new Element ("Comment", e.getNamespace()); f.setText (c.getComment()); setAttribute("Comment", "Source", f, c.getSource()); e.addContent(f); } } } private static void mapGraphId (GraphIdContainer o, Element e) { String id = e.getAttributeValue("GraphId"); if((id == null || id.equals("")) && o.getGmmlData() != null) { id = o.getGmmlData().getUniqueGraphId(); } o.setGraphId (id); } private static void updateGraphId (GraphIdContainer o, Element e) { String id = o.getGraphId(); // id has to be unique! if (id != null && !id.equals("")) { e.setAttribute("GraphId", o.getGraphId()); } } private static void mapGroupRef (PathwayElement o, Element e) { String id = e.getAttributeValue("GroupRef"); if(id != null && !id.equals("")) { o.setGroupRef (id); } } private static void updateGroupRef (PathwayElement o, Element e) { String id = o.getGroupRef(); if (id != null && !id.equals("")) { e.setAttribute("GroupRef", o.getGroupRef()); } } private static void mapGroup (PathwayElement o, Element e) throws ConverterException { //ID String id = e.getAttributeValue("GroupId"); if((id == null || id.equals("")) && o.getParent() != null) {id = o.getParent().getUniqueGroupId();} o.setGroupId (id); //GraphId mapGraphId(o, e); //Style o.setGroupStyle(GroupStyle.fromGpmlName(getAttribute("Group", "Style", e))); //Label o.setTextLabel (getAttribute("Group", "TextLabel", e)); } private static void updateGroup (PathwayElement o, Element e) throws ConverterException { //ID String id = o.createGroupId(); if (id != null && !id.equals("")) {e.setAttribute("GroupId", o.createGroupId());} //GraphId updateGraphId(o, e); //Style setAttribute("Group", "Style", e, GroupStyle.toGpmlName(o.getGroupStyle())); //Label setAttribute ("Group", "TextLabel", e, o.getTextLabel()); } private static void mapDataNode(PathwayElement o, Element e) throws ConverterException { o.setTextLabel (getAttribute("DataNode", "TextLabel", e)); o.setGenMappXref (getAttribute("DataNode", "GenMAPP-Xref", e)); o.setDataNodeType (getAttribute("DataNode", "Type", e)); o.setBackpageHead (getAttribute("DataNode", "BackpageHead", e)); Element xref = e.getChild ("Xref", e.getNamespace()); o.setGeneID (getAttribute("DataNode.Xref", "ID", xref)); o.setDataSource (DataSource.getByFullName (getAttribute("DataNode.Xref", "Database", xref))); } private static void updateDataNode(PathwayElement o, Element e) throws ConverterException { if(e != null) { setAttribute ("DataNode", "TextLabel", e, o.getTextLabel()); setAttribute ("DataNode", "GenMAPP-Xref", e, o.getGenMappXref()); setAttribute ("DataNode", "Type", e, o.getDataNodeType()); setAttribute ("DataNode", "BackpageHead", e, o.getBackpageHead()); Element xref = e.getChild("Xref", e.getNamespace()); String database = o.getDataSource() == null ? "" : o.getDataSource().getFullName(); setAttribute ("DataNode.Xref", "Database", xref, database == null ? "" : database); setAttribute ("DataNode.Xref", "ID", xref, o.getGeneID()); } } private static void mapSimpleCenter(PathwayElement o, Element e) { o.setMCenterX (Double.parseDouble(e.getAttributeValue("CenterX"))); o.setMCenterY (Double.parseDouble(e.getAttributeValue("CenterY"))); } private static void updateSimpleCenter(PathwayElement o, Element e) { if(e != null) { e.setAttribute("CenterX", Double.toString(o.getMCenterX())); e.setAttribute("CenterY", Double.toString(o.getMCenterY())); } } private static void mapShapeData(PathwayElement o, Element e, String base) throws ConverterException { Element graphics = e.getChild("Graphics", e.getNamespace()); o.setMCenterX (Double.parseDouble(getAttribute(base + ".Graphics", "CenterX", graphics))); o.setMCenterY (Double.parseDouble(getAttribute(base + ".Graphics", "CenterY", graphics))); o.setMWidth (Double.parseDouble(getAttribute(base + ".Graphics", "Width", graphics))); o.setMHeight (Double.parseDouble(getAttribute(base + ".Graphics", "Height", graphics))); } private static void updateShapeData(PathwayElement o, Element e, String base) throws ConverterException { if(e != null) { Element graphics = e.getChild("Graphics", e.getNamespace()); if(graphics !=null) { setAttribute(base + ".Graphics", "CenterX", graphics, "" + o.getMCenterX()); setAttribute(base + ".Graphics", "CenterY", graphics, "" + o.getMCenterY()); setAttribute(base + ".Graphics", "Width", graphics, "" + o.getMWidth()); setAttribute(base + ".Graphics", "Height", graphics, "" + o.getMHeight()); } } } private static void mapShapeType(PathwayElement o, Element e) throws ConverterException { o.setShapeType (ShapeType.fromGpmlName(getAttribute("Shape", "Type", e))); String style = getAttribute ("Shape", "Style", e); o.setLineStyle ((style.equals("Solid")) ? LineStyle.SOLID : LineStyle.DASHED); Element graphics = e.getChild("Graphics", e.getNamespace()); String rotation = getAttribute("Shape.Graphics", "Rotation", graphics); double result; if (rotation.equals("Top")) { result = 0.0; } else if (rotation.equals("Right")) { result = 0.5 * Math.PI; } else if (rotation.equals("Bottom")) { result = Math.PI; } else if (rotation.equals("Left")) { result = 1.5 * Math.PI; } else { result = Double.parseDouble(rotation); } o.setRotation (result); } private static void updateShapeType(PathwayElement o, Element e) throws ConverterException { if(e != null) { e.setAttribute("Type", o.getShapeType().getName()); setAttribute("Line", "Style", e, o.getLineStyle() == LineStyle.SOLID ? "Solid" : "Broken"); Element jdomGraphics = e.getChild("Graphics", e.getNamespace()); if(jdomGraphics !=null) { jdomGraphics.setAttribute("Rotation", Double.toString(o.getRotation())); } } } private static void mapLabelData(PathwayElement o, Element e) throws ConverterException { o.setTextLabel (getAttribute("Label", "TextLabel", e)); Element graphics = e.getChild("Graphics", e.getNamespace()); o.setMFontSize (Integer.parseInt(graphics.getAttributeValue("FontSize"))); String fontWeight = getAttribute("Label.Graphics", "FontWeight", graphics); String fontStyle = getAttribute("Label.Graphics", "FontStyle", graphics); String fontDecoration = getAttribute("Label.Graphics", "FontDecoration", graphics); String fontStrikethru = getAttribute("Label.Graphics", "FontStrikethru", graphics); o.setBold (fontWeight != null && fontWeight.equals("Bold")); o.setItalic (fontStyle != null && fontStyle.equals("Italic")); o.setUnderline (fontDecoration != null && fontDecoration.equals("Underline")); o.setStrikethru (fontStrikethru != null && fontStrikethru.equals("Strikethru")); o.setFontName (getAttribute("Label.Graphics", "FontName", graphics)); String xref = getAttribute("Label", "Xref", e); if (xref == null) xref = ""; o.setGenMappXref(xref); String outline = getAttribute("Label", "Outline", e); o.setOutline (OutlineType.fromTag (outline)); } private static void updateLabelData(PathwayElement o, Element e) throws ConverterException { if(e != null) { setAttribute("Label", "TextLabel", e, o.getTextLabel()); setAttribute("Label", "Xref", e, o.getGenMappXref() == null ? "" : o.getGenMappXref()); setAttribute("Label", "Outline", e, o.getOutline().getTag()); Element graphics = e.getChild("Graphics", e.getNamespace()); if(graphics !=null) { setAttribute("Label.Graphics", "FontName", graphics, o.getFontName() == null ? "" : o.getFontName()); setAttribute("Label.Graphics", "FontWeight", graphics, o.isBold() ? "Bold" : "Normal"); setAttribute("Label.Graphics", "FontStyle", graphics, o.isItalic() ? "Italic" : "Normal"); setAttribute("Label.Graphics", "FontDecoration", graphics, o.isUnderline() ? "Underline" : "Normal"); setAttribute("Label.Graphics", "FontStrikethru", graphics, o.isStrikethru() ? "Strikethru" : "Normal"); setAttribute("Label.Graphics", "FontSize", graphics, Integer.toString((int)o.getMFontSize())); } } } private static void mapMappInfoData(PathwayElement o, Element e) throws ConverterException { o.setMapInfoName (getAttribute("Pathway", "Name", e)); o.setOrganism (getAttribute("Pathway", "Organism", e)); o.setMapInfoDataSource (getAttribute("Pathway", "Data-Source", e)); o.setVersion (getAttribute("Pathway", "Version", e)); o.setAuthor (getAttribute("Pathway", "Author", e)); o.setMaintainer (getAttribute("Pathway", "Maintainer", e)); o.setEmail (getAttribute("Pathway", "Email", e)); o.setLastModified (getAttribute("Pathway", "Last-Modified", e)); o.setCopyright (getAttribute("Pathway", "Copyright", e)); Element g = e.getChild("Graphics", e.getNamespace()); //Board size will be calculated // o.setMBoardWidth (Double.parseDouble(getAttribute("Pathway.Graphics", "BoardWidth", g))); // o.setMBoardHeight (Double.parseDouble(getAttribute("Pathway.Graphics", "BoardHeight", g))); o.setWindowWidth (Double.parseDouble(getAttribute("Pathway.Graphics", "WindowWidth", g))); o.setWindowHeight (Double.parseDouble(getAttribute("Pathway.Graphics", "WindowHeight", g))); } private static void mapBiopax(PathwayElement o, Element e) throws ConverterException { //this method clones all content, //getContent will leave them attached to the parent, which we don't want //We can safely remove them, since the JDOM element isn't used anymore after this method Element root = new Element("RDF", RDF); root.addNamespaceDeclaration(RDFS); root.addNamespaceDeclaration(RDF); root.addNamespaceDeclaration(OWL); root.addNamespaceDeclaration(BIOPAX); root.setAttribute(new Attribute("base", GPML.getURI() + "#", Namespace.XML_NAMESPACE)); //Element owl = new Element("Ontology", OWL); //owl.setAttribute(new Attribute("about", "", RDF)); //Element imp = new Element("imports", OWL); //imp.setAttribute(new Attribute("resource", BIOPAX.getURI(), RDF)); //owl.addContent(imp); //root.addContent(owl); root.addContent(e.cloneContent()); Document bp = new Document(root); o.setBiopax(bp); } private static void updateBiopax(PathwayElement o, Element e) throws ConverterException { Document bp = o.getBiopax(); if(e != null && bp != null) { List<Content> content = bp.getRootElement().cloneContent(); for(Content c : content) { if(c instanceof Element) { Element elm = (Element)c; if(elm.getNamespace().equals(BIOPAX)) { e.addContent(c); } else if(elm.getName().equals("RDF") && elm.getNamespace().equals(RDF)) { for(Object ce : elm.getChildren()) { if(((Element)ce).getNamespace().equals(BIOPAX)) { e.addContent((Element)ce); } } } else { Logger.log.info("Skipped non-biopax element" + c); } } } } } private static void mapBiopaxRef(PathwayElement o, Element e) throws ConverterException { for (Object f : e.getChildren("BiopaxRef", e.getNamespace())) { o.addBiopaxRef(((Element)f).getText()); } } private static void updateBiopaxRef(PathwayElement o, Element e) throws ConverterException { if(e != null) { for (String ref : o.getBiopaxRefs()) { Element f = new Element ("BiopaxRef", e.getNamespace()); f.setText (ref); e.addContent(f); } } } static public Element createJdomElement(PathwayElement o, Namespace ns) throws ConverterException { Element e = null; switch (o.getObjectType()) { case ObjectType.DATANODE: e = new Element("DataNode", ns); updateComments(o, e); updateBiopaxRef(o, e); e.addContent(new Element("Graphics", ns)); e.addContent(new Element("Xref", ns)); updateDataNode(o, e); updateColor(o, e); updateShapeData(o, e, "DataNode"); updateGraphId(o, e); updateGroupRef(o, e); break; case ObjectType.SHAPE: e = new Element ("Shape", ns); updateComments(o, e); updateBiopaxRef(o, e); e.addContent(new Element("Graphics", ns)); updateShapeColor(o, e); updateColor(o, e); updateShapeData(o, e, "Shape"); updateShapeType(o, e); updateGraphId(o, e); updateGroupRef(o, e); break; case ObjectType.LINE: e = new Element("Line", ns); updateComments(o, e); updateBiopaxRef(o, e); e.addContent(new Element("Graphics", ns)); updateLineData(o, e); updateColor(o, e); updateGroupRef(o, e); break; case ObjectType.LABEL: e = new Element("Label", ns); updateComments(o, e); updateBiopaxRef(o, e); e.addContent(new Element("Graphics", ns)); updateLabelData(o, e); updateColor(o, e); updateShapeData(o, e, "Label"); updateGraphId(o, e); updateGroupRef(o, e); break; case ObjectType.LEGEND: e = new Element ("Legend", ns); updateSimpleCenter (o, e); break; case ObjectType.INFOBOX: e = new Element ("InfoBox", ns); updateSimpleCenter (o, e); break; case ObjectType.GROUP: e = new Element ("Group", ns); updateGroup (o, e); updateGroupRef(o, e); updateComments(o, e); updateBiopaxRef(o, e); break; case ObjectType.BIOPAX: e = new Element ("Biopax", ns); updateBiopax(o, e); break; } if (e == null) { throw new ConverterException ("Error creating jdom element with objectType " + o.getObjectType()); } return e; } /** * Converts a string containing either a named color (as specified in gpml) or a hexbinary number * to an {@link Color} object * @param strColor */ public static Color gmmlString2Color(String strColor) { if(colorMappings.contains(strColor)) { double[] color = (double[])rgbMappings.get(colorMappings.indexOf(strColor)); return new Color((int)(255*color[0]),(int)(255*color[1]),(int)(255*color[2])); } else { try { strColor = padding(strColor, 6, '0'); int red = Integer.valueOf(strColor.substring(0,2),16); int green = Integer.valueOf(strColor.substring(2,4),16); int blue = Integer.valueOf(strColor.substring(4,6),16); return new Color(red,green,blue); } catch (Exception e) { Logger.log.error("while converting color: " + "Color " + strColor + " is not valid, element color is set to black", e); } } return new Color(0,0,0); } /** * Converts an {@link Color} object to a hexbinary string * @param color */ public static String color2HexBin(Color color) { String red = padding(Integer.toBinaryString(color.getRed()), 8, '0'); String green = padding(Integer.toBinaryString(color.getGreen()), 8, '0'); String blue = padding(Integer.toBinaryString(color.getBlue()), 8, '0'); String hexBinary = Integer.toHexString(Integer.valueOf(red + green + blue, 2)); return padding(hexBinary, 6, '0'); } /** * Prepends character c x-times to the input string to make it length n * @param s String to pad * @param n Number of characters of the resulting string * @param c character to append * @return string of length n or larger (if given string s > n) */ public static String padding(String s, int n, char c) { while(s.length() < n) { s = c + s; } return s; } public static final List<double[]> rgbMappings = Arrays.asList(new double[][] { {0, 1, 1}, // aqua {0, 0, 0}, // black {0, 0, 1}, // blue {1, 0, 1}, // fuchsia {.5, .5, .5,}, // gray {0, .5, 0}, // green {0, 1, 0}, // lime {.5, 0, 0}, // maroon {0, 0, .5}, // navy {.5, .5, 0}, // olive {.5, 0, .5}, // purple {1, 0, 0}, // red {.75, .75, .75},// silver {0, .5, .5}, // teal {1, 1, 1}, // white {1, 1, 0}, // yellow {0, 0, 0} // transparent (actually irrelevant) }); public static final List<String> colorMappings = Arrays.asList(new String[]{ "Aqua", "Black", "Blue", "Fuchsia", "Gray", "Green", "Lime", "Maroon", "Navy", "Olive", "Purple", "Red", "Silver", "Teal", "White", "Yellow", "Transparent" }); public void doImport(File file, Pathway pathway) throws ConverterException { readFromXml(pathway, file, true); } public void doExport(File file, Pathway pathway) throws ConverterException { writeToXml(pathway, file, true); } public String[] getExtensions() { return new String[] { "gpml", "xml" }; } public String getName() { return "GPML file"; } /** * Writes the JDOM document to the file specified * @param file the file to which the JDOM document should be saved * @param validate if true, validate the dom structure before writing to file. If there is a validation error, * or the xsd is not in the classpath, an exception will be thrown. */ static public void writeToXml(Pathway pwy, File file, boolean validate) throws ConverterException { Document doc = createJdom(pwy); //Validate the JDOM document if (validate) validateDocument(doc); // Get the XML code XMLOutputter xmlcode = new XMLOutputter(Format.getPrettyFormat()); Format f = xmlcode.getFormat(); f.setEncoding("ISO-8859-1"); f.setTextMode(Format.TextMode.PRESERVE); xmlcode.setFormat(f); //Open a filewriter try { FileWriter writer = new FileWriter(file); //Send XML code to the filewriter xmlcode.output(doc, writer); } catch (IOException ie) { throw new ConverterException(ie); } } /** * Writes the JDOM document to the outputstream specified * @param out the outputstream to which the JDOM document should be writed * @param validate if true, validate the dom structure before writing. If there is a validation error, * or the xsd is not in the classpath, an exception will be thrown. * @throws ConverterException */ static public void writeToXml(Pathway pwy, OutputStream out, boolean validate) throws ConverterException { Document doc = createJdom(pwy); //Validate the JDOM document if (validate) validateDocument(doc); // Get the XML code XMLOutputter xmlcode = new XMLOutputter(Format.getPrettyFormat()); Format f = xmlcode.getFormat(); f.setEncoding("ISO-8859-1"); f.setTextMode(Format.TextMode.PRESERVE); xmlcode.setFormat(f); try { //Send XML code to the outputstream xmlcode.output(doc, out); } catch (IOException ie) { throw new ConverterException(ie); } } static public void readFromXml(Pathway pwy, File file, boolean validate) throws ConverterException { FileReader inf; try { inf = new FileReader (file); } catch (FileNotFoundException e) { throw new ConverterException (e); } readFromXml (pwy, inf, validate); } static public void readFromXml(Pathway pwy, Reader in, boolean validate) throws ConverterException { // Start XML processing SAXBuilder builder = new SAXBuilder(false); // no validation when reading the xml file // try to read the file; if an error occurs, catch the exception and print feedback try { Logger.log.trace ("Build JDOM tree"); // build JDOM tree Document doc = builder.build(in); Logger.log.trace ("Start Validation"); if (validate) validateDocument(doc); // Copy the pathway information to a VPathway Element root = doc.getRootElement(); Logger.log.trace ("Copy map elements"); mapElement(root, pwy); // MappInfo // Iterate over direct children of the root element for (Object e : root.getChildren()) { mapElement((Element)e, pwy); } Logger.log.trace ("End copying map elements"); //Convert absolute point coordinates of linked points to //relative coordinates convertPointCoordinates(pwy); } catch(JDOMParseException pe) { throw new ConverterException (pe); } catch(JDOMException e) { throw new ConverterException (e); } catch(IOException e) { throw new ConverterException (e); } catch(NullPointerException e) { throw new ConverterException (e); } catch(IllegalArgumentException e) { throw new ConverterException (e); } catch(Exception e) { //Make all types of exceptions a ConverterException throw new ConverterException (e); } } private static void convertPointCoordinates(Pathway pathway) throws ConverterException { for(PathwayElement pe : pathway.getDataObjects()) { if(pe.getObjectType() == ObjectType.LINE) { String sr = pe.getStartGraphRef(); String er = pe.getEndGraphRef(); if(sr != null && !"".equals(sr)) { GraphIdContainer idc = pathway.getGraphIdContainer(sr); Point2D relative = idc.toRelativeCoordinate( new Point2D.Double( pe.getMStart().getRawX(), pe.getMStart().getRawY() ) ); pe.getMStart().setRelativePosition(relative.getX(), relative.getY()); } if(er != null && !"".equals(er)) { GraphIdContainer idc = pathway.getGraphIdContainer(er); Point2D relative = idc.toRelativeCoordinate( new Point2D.Double( pe.getMEnd().getRawX(), pe.getMEnd().getRawY() ) ); pe.getMEnd().setRelativePosition(relative.getX(), relative.getY()); } } } } /** * validates a JDOM document against the xml-schema definition specified by 'xsdFile' * @param doc the document to validate */ public static void validateDocument(Document doc) throws ConverterException { ClassLoader cl = Pathway.class.getClassLoader(); InputStream is = cl.getResourceAsStream(xsdFile); if(is != null) { Schema schema; try { SchemaFactory factory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); StreamSource ss = new StreamSource (is); schema = factory.newSchema(ss); ValidatorHandler vh = schema.newValidatorHandler(); SAXOutputter so = new SAXOutputter(vh); so.output(doc); // If no errors occur, the file is valid according to the gpml xml schema definition Logger.log.info("Document is valid according to the xml schema definition '" + xsdFile.toString() + "'"); } catch (SAXException se) { Logger.log.error("Could not parse the xml-schema definition", se); throw new ConverterException (se); } catch (JDOMException je) { Logger.log.error("Document is invalid according to the xml-schema definition!: " + je.getMessage(), je); XMLOutputter xmlcode = new XMLOutputter(Format.getPrettyFormat()); Logger.log.error("The invalid XML code:\n" + xmlcode.outputString(doc)); throw new ConverterException (je); } } else { Logger.log.error("Document is not validated because the xml schema definition '" + xsdFile + "' could not be found in classpath"); throw new ConverterException ("Document is not validated because the xml schema definition '" + xsdFile + "' could not be found in classpath"); } } }
Fixes #440
src/core/org/pathvisio/model/GpmlFormat.java
Fixes #440
<ide><path>rc/core/org/pathvisio/model/GpmlFormat.java <ide> if (!attributeInfo.containsKey(key)) <ide> throw new ConverterException("Trying to get invalid attribute " + key); <ide> AttributeInfo aInfo = attributeInfo.get(key); <del> String result = el.getAttributeValue(name, aInfo.def); <add> String result = ((el == null) ? aInfo.def : el.getAttributeValue(name, aInfo.def)); <ide> return result; <ide> } <ide>
JavaScript
bsd-3-clause
e1c5f56eee2e910853473e40d98544cf79a47e6b
0
Designist/Parks,Designist/Parks,Designist/Parks
;(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);throw new Error("Cannot find module '"+o+"'")}var f=n[o]={exports:{}};t[o][0].call(f.exports,function(e){var n=t[o][1][e];return s(n?n:e)},f,f.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){ if (typeof window.Sheetsee === 'undefined') window.Sheetsee = {}; window.Sheetsee = require('sheetsee-core'); var extend = require('lodash.assign'); extend(window.Sheetsee, require('sheetsee-maps'), require('sheetsee-tables')); module.exports = Sheetsee; },{"lodash.assign":3,"sheetsee-core":28,"sheetsee-maps":29,"sheetsee-tables":56}],2:[function(require,module,exports){ /*! ICanHaz.js version 0.10.2 -- by @HenrikJoreteg More info at: http://icanhazjs.com */ (function () { /* mustache.js — Logic-less templates in JavaScript See http://mustache.github.com/ for more info. */ var Mustache = function () { var _toString = Object.prototype.toString; Array.isArray = Array.isArray || function (obj) { return _toString.call(obj) == "[object Array]"; } var _trim = String.prototype.trim, trim; if (_trim) { trim = function (text) { return text == null ? "" : _trim.call(text); } } else { var trimLeft, trimRight; // IE doesn't match non-breaking spaces with \s. if ((/\S/).test("\xA0")) { trimLeft = /^[\s\xA0]+/; trimRight = /[\s\xA0]+$/; } else { trimLeft = /^\s+/; trimRight = /\s+$/; } trim = function (text) { return text == null ? "" : text.toString().replace(trimLeft, "").replace(trimRight, ""); } } var escapeMap = { "&": "&amp;", "<": "&lt;", ">": "&gt;", '"': '&quot;', "'": '&#39;' }; function escapeHTML(string) { return String(string).replace(/&(?!\w+;)|[<>"']/g, function (s) { return escapeMap[s] || s; }); } var regexCache = {}; var Renderer = function () {}; Renderer.prototype = { otag: "{{", ctag: "}}", pragmas: {}, buffer: [], pragmas_implemented: { "IMPLICIT-ITERATOR": true }, context: {}, render: function (template, context, partials, in_recursion) { // reset buffer & set context if (!in_recursion) { this.context = context; this.buffer = []; // TODO: make this non-lazy } // fail fast if (!this.includes("", template)) { if (in_recursion) { return template; } else { this.send(template); return; } } // get the pragmas together template = this.render_pragmas(template); // render the template var html = this.render_section(template, context, partials); // render_section did not find any sections, we still need to render the tags if (html === false) { html = this.render_tags(template, context, partials, in_recursion); } if (in_recursion) { return html; } else { this.sendLines(html); } }, /* Sends parsed lines */ send: function (line) { if (line !== "") { this.buffer.push(line); } }, sendLines: function (text) { if (text) { var lines = text.split("\n"); for (var i = 0; i < lines.length; i++) { this.send(lines[i]); } } }, /* Looks for %PRAGMAS */ render_pragmas: function (template) { // no pragmas if (!this.includes("%", template)) { return template; } var that = this; var regex = this.getCachedRegex("render_pragmas", function (otag, ctag) { return new RegExp(otag + "%([\\w-]+) ?([\\w]+=[\\w]+)?" + ctag, "g"); }); return template.replace(regex, function (match, pragma, options) { if (!that.pragmas_implemented[pragma]) { throw({message: "This implementation of mustache doesn't understand the '" + pragma + "' pragma"}); } that.pragmas[pragma] = {}; if (options) { var opts = options.split("="); that.pragmas[pragma][opts[0]] = opts[1]; } return ""; // ignore unknown pragmas silently }); }, /* Tries to find a partial in the curent scope and render it */ render_partial: function (name, context, partials) { name = trim(name); if (!partials || partials[name] === undefined) { throw({message: "unknown_partial '" + name + "'"}); } if (!context || typeof context[name] != "object") { return this.render(partials[name], context, partials, true); } return this.render(partials[name], context[name], partials, true); }, /* Renders inverted (^) and normal (#) sections */ render_section: function (template, context, partials) { if (!this.includes("#", template) && !this.includes("^", template)) { // did not render anything, there were no sections return false; } var that = this; var regex = this.getCachedRegex("render_section", function (otag, ctag) { // This regex matches _the first_ section ({{#foo}}{{/foo}}), and captures the remainder return new RegExp( "^([\\s\\S]*?)" + // all the crap at the beginning that is not {{*}} ($1) otag + // {{ "(\\^|\\#)\\s*(.+)\\s*" + // #foo (# == $2, foo == $3) ctag + // }} "\n*([\\s\\S]*?)" + // between the tag ($2). leading newlines are dropped otag + // {{ "\\/\\s*\\3\\s*" + // /foo (backreference to the opening tag). ctag + // }} "\\s*([\\s\\S]*)$", // everything else in the string ($4). leading whitespace is dropped. "g"); }); // for each {{#foo}}{{/foo}} section do... return template.replace(regex, function (match, before, type, name, content, after) { // before contains only tags, no sections var renderedBefore = before ? that.render_tags(before, context, partials, true) : "", // after may contain both sections and tags, so use full rendering function renderedAfter = after ? that.render(after, context, partials, true) : "", // will be computed below renderedContent, value = that.find(name, context); if (type === "^") { // inverted section if (!value || Array.isArray(value) && value.length === 0) { // false or empty list, render it renderedContent = that.render(content, context, partials, true); } else { renderedContent = ""; } } else if (type === "#") { // normal section if (Array.isArray(value)) { // Enumerable, Let's loop! renderedContent = that.map(value, function (row) { return that.render(content, that.create_context(row), partials, true); }).join(""); } else if (that.is_object(value)) { // Object, Use it as subcontext! renderedContent = that.render(content, that.create_context(value), partials, true); } else if (typeof value == "function") { // higher order section renderedContent = value.call(context, content, function (text) { return that.render(text, context, partials, true); }); } else if (value) { // boolean section renderedContent = that.render(content, context, partials, true); } else { renderedContent = ""; } } return renderedBefore + renderedContent + renderedAfter; }); }, /* Replace {{foo}} and friends with values from our view */ render_tags: function (template, context, partials, in_recursion) { // tit for tat var that = this; var new_regex = function () { return that.getCachedRegex("render_tags", function (otag, ctag) { return new RegExp(otag + "(=|!|>|&|\\{|%)?([^#\\^]+?)\\1?" + ctag + "+", "g"); }); }; var regex = new_regex(); var tag_replace_callback = function (match, operator, name) { switch(operator) { case "!": // ignore comments return ""; case "=": // set new delimiters, rebuild the replace regexp that.set_delimiters(name); regex = new_regex(); return ""; case ">": // render partial return that.render_partial(name, context, partials); case "{": // the triple mustache is unescaped case "&": // & operator is an alternative unescape method return that.find(name, context); default: // escape the value return escapeHTML(that.find(name, context)); } }; var lines = template.split("\n"); for(var i = 0; i < lines.length; i++) { lines[i] = lines[i].replace(regex, tag_replace_callback, this); if (!in_recursion) { this.send(lines[i]); } } if (in_recursion) { return lines.join("\n"); } }, set_delimiters: function (delimiters) { var dels = delimiters.split(" "); this.otag = this.escape_regex(dels[0]); this.ctag = this.escape_regex(dels[1]); }, escape_regex: function (text) { // thank you Simon Willison if (!arguments.callee.sRE) { var specials = [ '/', '.', '*', '+', '?', '|', '(', ')', '[', ']', '{', '}', '\\' ]; arguments.callee.sRE = new RegExp( '(\\' + specials.join('|\\') + ')', 'g' ); } return text.replace(arguments.callee.sRE, '\\$1'); }, /* find `name` in current `context`. That is find me a value from the view object */ find: function (name, context) { name = trim(name); // Checks whether a value is thruthy or false or 0 function is_kinda_truthy(bool) { return bool === false || bool === 0 || bool; } var value; // check for dot notation eg. foo.bar if (name.match(/([a-z_]+)\./ig)) { var childValue = this.walk_context(name, context); if (is_kinda_truthy(childValue)) { value = childValue; } } else { if (is_kinda_truthy(context[name])) { value = context[name]; } else if (is_kinda_truthy(this.context[name])) { value = this.context[name]; } } if (typeof value == "function") { return value.apply(context); } if (value !== undefined) { return value; } // silently ignore unkown variables return ""; }, walk_context: function (name, context) { var path = name.split('.'); // if the var doesn't exist in current context, check the top level context var value_context = (context[path[0]] != undefined) ? context : this.context; var value = value_context[path.shift()]; while (value != undefined && path.length > 0) { value_context = value; value = value[path.shift()]; } // if the value is a function, call it, binding the correct context if (typeof value == "function") { return value.apply(value_context); } return value; }, // Utility methods /* includes tag */ includes: function (needle, haystack) { return haystack.indexOf(this.otag + needle) != -1; }, // by @langalex, support for arrays of strings create_context: function (_context) { if (this.is_object(_context)) { return _context; } else { var iterator = "."; if (this.pragmas["IMPLICIT-ITERATOR"]) { iterator = this.pragmas["IMPLICIT-ITERATOR"].iterator; } var ctx = {}; ctx[iterator] = _context; return ctx; } }, is_object: function (a) { return a && typeof a == "object"; }, /* Why, why, why? Because IE. Cry, cry cry. */ map: function (array, fn) { if (typeof array.map == "function") { return array.map(fn); } else { var r = []; var l = array.length; for(var i = 0; i < l; i++) { r.push(fn(array[i])); } return r; } }, getCachedRegex: function (name, generator) { var byOtag = regexCache[this.otag]; if (!byOtag) { byOtag = regexCache[this.otag] = {}; } var byCtag = byOtag[this.ctag]; if (!byCtag) { byCtag = byOtag[this.ctag] = {}; } var regex = byCtag[name]; if (!regex) { regex = byCtag[name] = generator(this.otag, this.ctag); } return regex; } }; return({ name: "mustache.js", version: "0.4.0", /* Turns a template and view into HTML */ to_html: function (template, view, partials, send_fun) { var renderer = new Renderer(); if (send_fun) { renderer.send = send_fun; } renderer.render(template, view || {}, partials); if (!send_fun) { return renderer.buffer.join("\n"); } } }); }(); /*! ICanHaz.js -- by @HenrikJoreteg */ /*global */ (function () { function trim(stuff) { if (''.trim) return stuff.trim(); else return stuff.replace(/^\s+/, '').replace(/\s+$/, ''); } // Establish the root object, `window` in the browser, or `global` on the server. var root = this; var ich = { VERSION: "0.10.2", templates: {}, // grab jquery or zepto if it's there $: (typeof window !== 'undefined') ? window.jQuery || window.Zepto || null : null, // public function for adding templates // can take a name and template string arguments // or can take an object with name/template pairs // We're enforcing uniqueness to avoid accidental template overwrites. // If you want a different template, it should have a different name. addTemplate: function (name, templateString) { if (typeof name === 'object') { for (var template in name) { this.addTemplate(template, name[template]); } return; } if (ich[name]) { console.error("Invalid name: " + name + "."); } else if (ich.templates[name]) { console.error("Template \"" + name + " \" exists"); } else { ich.templates[name] = templateString; ich[name] = function (data, raw) { data = data || {}; var result = Mustache.to_html(ich.templates[name], data, ich.templates); return (ich.$ && !raw) ? ich.$(trim(result)) : result; }; } }, // clears all retrieval functions and empties cache clearAll: function () { for (var key in ich.templates) { delete ich[key]; } ich.templates = {}; }, // clears/grabs refresh: function () { ich.clearAll(); ich.grabTemplates(); }, // grabs templates from the DOM and caches them. // Loop through and add templates. // Whitespace at beginning and end of all templates inside <script> tags will // be trimmed. If you want whitespace around a partial, add it in the parent, // not the partial. Or do it explicitly using <br/> or &nbsp; grabTemplates: function () { var i, l, scripts = document.getElementsByTagName('script'), script, trash = []; for (i = 0, l = scripts.length; i < l; i++) { script = scripts[i]; if (script && script.innerHTML && script.id && (script.type === "text/html" || script.type === "text/x-icanhaz")) { ich.addTemplate(script.id, trim(script.innerHTML)); trash.unshift(script); } } for (i = 0, l = trash.length; i < l; i++) { trash[i].parentNode.removeChild(trash[i]); } } }; // Export the ICanHaz object for **Node.js**, with // backwards-compatibility for the old `require()` API. If we're in // the browser, add `ich` as a global object via a string identifier, // for Closure Compiler "advanced" mode. if (typeof exports !== 'undefined') { if (typeof module !== 'undefined' && module.exports) { exports = module.exports = ich; } exports.ich = ich; } else { root['ich'] = ich; } if (typeof document !== 'undefined') { if (ich.$) { ich.$(function () { ich.grabTemplates(); }); } else { document.addEventListener('DOMContentLoaded', function () { ich.grabTemplates(); }, true); } } })(); })(); },{}],3:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ var baseCreateCallback = require('lodash._basecreatecallback'), keys = require('lodash.keys'), objectTypes = require('lodash._objecttypes'); /** * Assigns own enumerable properties of source object(s) to the destination * object. Subsequent sources will overwrite property assignments of previous * sources. If a callback is provided it will be executed to produce the * assigned values. The callback is bound to `thisArg` and invoked with two * arguments; (objectValue, sourceValue). * * @static * @memberOf _ * @type Function * @alias extend * @category Objects * @param {Object} object The destination object. * @param {...Object} [source] The source objects. * @param {Function} [callback] The function to customize assigning values. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {Object} Returns the destination object. * @example * * _.assign({ 'name': 'moe' }, { 'age': 40 }); * // => { 'name': 'moe', 'age': 40 } * * var defaults = _.partialRight(_.assign, function(a, b) { * return typeof a == 'undefined' ? b : a; * }); * * var food = { 'name': 'apple' }; * defaults(food, { 'name': 'banana', 'type': 'fruit' }); * // => { 'name': 'apple', 'type': 'fruit' } */ var assign = function(object, source, guard) { var index, iterable = object, result = iterable; if (!iterable) return result; var args = arguments, argsIndex = 0, argsLength = typeof guard == 'number' ? 2 : args.length; if (argsLength > 3 && typeof args[argsLength - 2] == 'function') { var callback = baseCreateCallback(args[--argsLength - 1], args[argsLength--], 2); } else if (argsLength > 2 && typeof args[argsLength - 1] == 'function') { callback = args[--argsLength]; } while (++argsIndex < argsLength) { iterable = args[argsIndex]; if (iterable && objectTypes[typeof iterable]) { var ownIndex = -1, ownProps = objectTypes[typeof iterable] && keys(iterable), length = ownProps ? ownProps.length : 0; while (++ownIndex < length) { index = ownProps[ownIndex]; result[index] = callback ? callback(result[index], iterable[index]) : iterable[index]; } } } return result }; module.exports = assign; },{"lodash._basecreatecallback":4,"lodash._objecttypes":23,"lodash.keys":24}],4:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ var bind = require('lodash.bind'), identity = require('lodash.identity'), setBindData = require('lodash._setbinddata'), support = require('lodash.support'); /** Used to detected named functions */ var reFuncName = /^function[ \n\r\t]+\w/; /** Used to detect functions containing a `this` reference */ var reThis = /\bthis\b/; /** Native method shortcuts */ var fnToString = Function.prototype.toString; /** * The base implementation of `_.createCallback` without support for creating * "_.pluck" or "_.where" style callbacks. * * @private * @param {*} [func=identity] The value to convert to a callback. * @param {*} [thisArg] The `this` binding of the created callback. * @param {number} [argCount] The number of arguments the callback accepts. * @returns {Function} Returns a callback function. */ function baseCreateCallback(func, thisArg, argCount) { if (typeof func != 'function') { return identity; } // exit early if there is no `thisArg` if (typeof thisArg == 'undefined') { return func; } var bindData = func.__bindData__ || (support.funcNames && !func.name); if (typeof bindData == 'undefined') { var source = reThis && fnToString.call(func); if (!support.funcNames && source && !reFuncName.test(source)) { bindData = true; } if (support.funcNames || !bindData) { // checks if `func` references the `this` keyword and stores the result bindData = !support.funcDecomp || reThis.test(source); setBindData(func, bindData); } } // exit early if there are no `this` references or `func` is bound if (bindData !== true && (bindData && bindData[1] & 1)) { return func; } switch (argCount) { case 1: return function(value) { return func.call(thisArg, value); }; case 2: return function(a, b) { return func.call(thisArg, a, b); }; case 3: return function(value, index, collection) { return func.call(thisArg, value, index, collection); }; case 4: return function(accumulator, value, index, collection) { return func.call(thisArg, accumulator, value, index, collection); }; } return bind(func, thisArg); } module.exports = baseCreateCallback; },{"lodash._setbinddata":5,"lodash.bind":13,"lodash.identity":20,"lodash.support":21}],5:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ var getObject = require('lodash._getobject'), noop = require('lodash._noop'), reNative = require('lodash._renative'), releaseObject = require('lodash._releaseobject'); /** Used for native method references */ var objectProto = Object.prototype; var defineProperty = (function() { try { var o = {}, func = reNative.test(func = Object.defineProperty) && func, result = func(o, o, o) && func; } catch(e) { } return result; }()); /** * Sets `this` binding data on a given function. * * @private * @param {Function} func The function to set data on. * @param {*} value The value to set. */ var setBindData = !defineProperty ? noop : function(func, value) { var descriptor = getObject(); descriptor.value = value; defineProperty(func, '__bindData__', descriptor); releaseObject(descriptor); }; module.exports = setBindData; },{"lodash._getobject":6,"lodash._noop":8,"lodash._releaseobject":9,"lodash._renative":12}],6:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ var objectPool = require('lodash._objectpool'); /** * Gets an object from the object pool or creates a new one if the pool is empty. * * @private * @returns {Object} The object from the pool. */ function getObject() { return objectPool.pop() || { 'array': null, 'cache': null, 'configurable': false, 'criteria': null, 'enumerable': false, 'false': false, 'index': 0, 'leading': false, 'maxWait': 0, 'null': false, 'number': null, 'object': null, 'push': null, 'string': null, 'trailing': false, 'true': false, 'undefined': false, 'value': null, 'writable': false }; } module.exports = getObject; },{"lodash._objectpool":7}],7:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ /** Used to pool arrays and objects used internally */ var objectPool = []; module.exports = objectPool; },{}],8:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ /** * A no-operation function. * * @private */ function noop() { // no operation performed } module.exports = noop; },{}],9:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ var maxPoolSize = require('lodash._maxpoolsize'), objectPool = require('lodash._objectpool'); /** * Releases the given object back to the object pool. * * @private * @param {Object} [object] The object to release. */ function releaseObject(object) { var cache = object.cache; if (cache) { releaseObject(cache); } object.array = object.cache = object.criteria = object.object = object.number = object.string = object.value = null; if (objectPool.length < maxPoolSize) { objectPool.push(object); } } module.exports = releaseObject; },{"lodash._maxpoolsize":10,"lodash._objectpool":11}],10:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ /** Used as the max size of the `arrayPool` and `objectPool` */ var maxPoolSize = 40; module.exports = maxPoolSize; },{}],11:[function(require,module,exports){ module.exports=require(7) },{}],12:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ /** Used for native method references */ var objectProto = Object.prototype; /** Used to detect if a method is native */ var reNative = RegExp('^' + String(objectProto.valueOf) .replace(/[.*+?^${}()|[\]\\]/g, '\\$&') .replace(/valueOf|for [^\]]+/g, '.+?') + '$' ); module.exports = reNative; },{}],13:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ var createBound = require('lodash._createbound'), reNative = require('lodash._renative'); /** * Used for `Array` method references. * * Normally `Array.prototype` would suffice, however, using an array literal * avoids issues in Narwhal. */ var arrayRef = []; /* Native method shortcuts for methods with the same name as other `lodash` methods */ var nativeSlice = arrayRef.slice; /** * Creates a function that, when called, invokes `func` with the `this` * binding of `thisArg` and prepends any additional `bind` arguments to those * provided to the bound function. * * @static * @memberOf _ * @category Functions * @param {Function} func The function to bind. * @param {*} [thisArg] The `this` binding of `func`. * @param {...*} [arg] Arguments to be partially applied. * @returns {Function} Returns the new bound function. * @example * * var func = function(greeting) { * return greeting + ' ' + this.name; * }; * * func = _.bind(func, { 'name': 'moe' }, 'hi'); * func(); * // => 'hi moe' */ function bind(func, thisArg) { return arguments.length > 2 ? createBound(func, 17, nativeSlice.call(arguments, 2), null, thisArg) : createBound(func, 1, null, null, thisArg); } module.exports = bind; },{"lodash._createbound":14,"lodash._renative":19}],14:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ var createObject = require('lodash._createobject'), isFunction = require('lodash.isfunction'), isObject = require('lodash.isobject'), reNative = require('lodash._renative'), setBindData = require('lodash._setbinddata'), support = require('lodash.support'); /** * Used for `Array` method references. * * Normally `Array.prototype` would suffice, however, using an array literal * avoids issues in Narwhal. */ var arrayRef = []; /** Used for native method references */ var objectProto = Object.prototype; /** Native method shortcuts */ var push = arrayRef.push, toString = objectProto.toString, unshift = arrayRef.unshift; /* Native method shortcuts for methods with the same name as other `lodash` methods */ var nativeBind = reNative.test(nativeBind = toString.bind) && nativeBind, nativeSlice = arrayRef.slice; /** * Creates a function that, when called, either curries or invokes `func` * with an optional `this` binding and partially applied arguments. * * @private * @param {Function|string} func The function or method name to reference. * @param {number} bitmask The bitmask of method flags to compose. * The bitmask may be composed of the following flags: * 1 - `_.bind` * 2 - `_.bindKey` * 4 - `_.curry` * 8 - `_.curry` (bound) * 16 - `_.partial` * 32 - `_.partialRight` * @param {Array} [partialArgs] An array of arguments to prepend to those * provided to the new function. * @param {Array} [partialRightArgs] An array of arguments to append to those * provided to the new function. * @param {*} [thisArg] The `this` binding of `func`. * @param {number} [arity] The arity of `func`. * @returns {Function} Returns the new bound function. */ function createBound(func, bitmask, partialArgs, partialRightArgs, thisArg, arity) { var isBind = bitmask & 1, isBindKey = bitmask & 2, isCurry = bitmask & 4, isCurryBound = bitmask & 8, isPartial = bitmask & 16, isPartialRight = bitmask & 32, key = func; if (!isBindKey && !isFunction(func)) { throw new TypeError; } if (isPartial && !partialArgs.length) { bitmask &= ~16; isPartial = partialArgs = false; } if (isPartialRight && !partialRightArgs.length) { bitmask &= ~32; isPartialRight = partialRightArgs = false; } var bindData = func && func.__bindData__; if (bindData) { if (isBind && !(bindData[1] & 1)) { bindData[4] = thisArg; } if (!isBind && bindData[1] & 1) { bitmask |= 8; } if (isCurry && !(bindData[1] & 4)) { bindData[5] = arity; } if (isPartial) { push.apply(bindData[2] || (bindData[2] = []), partialArgs); } if (isPartialRight) { push.apply(bindData[3] || (bindData[3] = []), partialRightArgs); } bindData[1] |= bitmask; return createBound.apply(null, bindData); } // use `Function#bind` if it exists and is fast // (in V8 `Function#bind` is slower except when partially applied) if (isBind && !(isBindKey || isCurry || isPartialRight) && (support.fastBind || (nativeBind && isPartial))) { if (isPartial) { var args = [thisArg]; push.apply(args, partialArgs); } var bound = isPartial ? nativeBind.apply(func, args) : nativeBind.call(func, thisArg); } else { bound = function() { // `Function#bind` spec // http://es5.github.io/#x15.3.4.5 var args = arguments, thisBinding = isBind ? thisArg : this; if (isCurry || isPartial || isPartialRight) { args = nativeSlice.call(args); if (isPartial) { unshift.apply(args, partialArgs); } if (isPartialRight) { push.apply(args, partialRightArgs); } if (isCurry && args.length < arity) { bitmask |= 16 & ~32; return createBound(func, (isCurryBound ? bitmask : bitmask & ~3), args, null, thisArg, arity); } } if (isBindKey) { func = thisBinding[key]; } if (this instanceof bound) { // ensure `new bound` is an instance of `func` thisBinding = createObject(func.prototype); // mimic the constructor's `return` behavior // http://es5.github.io/#x13.2.2 var result = func.apply(thisBinding, args); return isObject(result) ? result : thisBinding; } return func.apply(thisBinding, args); }; } setBindData(bound, nativeSlice.call(arguments)); return bound; } module.exports = createBound; },{"lodash._createobject":15,"lodash._renative":19,"lodash._setbinddata":5,"lodash.isfunction":17,"lodash.isobject":18,"lodash.support":21}],15:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ var isObject = require('lodash.isobject'), noop = require('lodash._noop'), reNative = require('lodash._renative'); /** Used for native method references */ var objectProto = Object.prototype; /* Native method shortcuts for methods with the same name as other `lodash` methods */ var nativeCreate = reNative.test(nativeCreate = Object.create) && nativeCreate; /** * Creates a new object with the specified `prototype`. * * @private * @param {Object} prototype The prototype object. * @returns {Object} Returns the new object. */ function createObject(prototype) { return isObject(prototype) ? nativeCreate(prototype) : {}; } module.exports = createObject; },{"lodash._noop":16,"lodash._renative":19,"lodash.isobject":18}],16:[function(require,module,exports){ module.exports=require(8) },{}],17:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ /** * Checks if `value` is a function. * * @static * @memberOf _ * @category Objects * @param {*} value The value to check. * @returns {boolean} Returns `true` if the `value` is a function, else `false`. * @example * * _.isFunction(_); * // => true */ function isFunction(value) { return typeof value == 'function'; } module.exports = isFunction; },{}],18:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ var objectTypes = require('lodash._objecttypes'); /** * Checks if `value` is the language type of Object. * (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) * * @static * @memberOf _ * @category Objects * @param {*} value The value to check. * @returns {boolean} Returns `true` if the `value` is an object, else `false`. * @example * * _.isObject({}); * // => true * * _.isObject([1, 2, 3]); * // => true * * _.isObject(1); * // => false */ function isObject(value) { // check if the value is the ECMAScript language type of Object // http://es5.github.io/#x8 // and avoid a V8 bug // http://code.google.com/p/v8/issues/detail?id=2291 return !!(value && objectTypes[typeof value]); } module.exports = isObject; },{"lodash._objecttypes":23}],19:[function(require,module,exports){ module.exports=require(12) },{}],20:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ /** * This method returns the first argument provided to it. * * @static * @memberOf _ * @category Utilities * @param {*} value Any value. * @returns {*} Returns `value`. * @example * * var moe = { 'name': 'moe' }; * moe === _.identity(moe); * // => true */ function identity(value) { return value; } module.exports = identity; },{}],21:[function(require,module,exports){ var global=typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {};/** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ var reNative = require('lodash._renative'); /** Used to detect functions containing a `this` reference */ var reThis = /\bthis\b/; /** Used for native method references */ var objectProto = Object.prototype; /** Native method shortcuts */ var toString = objectProto.toString; /* Native method shortcuts for methods with the same name as other `lodash` methods */ var nativeBind = reNative.test(nativeBind = toString.bind) && nativeBind; /** Detect various environments */ var isIeOpera = reNative.test(global.attachEvent), isV8 = nativeBind && !/\n|true/.test(nativeBind + isIeOpera); /** * An object used to flag environments features. * * @static * @memberOf _ * @type Object */ var support = {}; /** * Detect if `Function#bind` exists and is inferred to be fast (all but V8). * * @memberOf _.support * @type boolean */ support.fastBind = nativeBind && !isV8; /** * Detect if functions can be decompiled by `Function#toString` * (all but PS3 and older Opera mobile browsers & avoided in Windows 8 apps). * * @memberOf _.support * @type boolean */ support.funcDecomp = !reNative.test(global.WinRTError) && reThis.test(function() { return this; }); /** * Detect if `Function#name` is supported (all but IE). * * @memberOf _.support * @type boolean */ support.funcNames = typeof Function.name == 'string'; module.exports = support; },{"lodash._renative":22}],22:[function(require,module,exports){ module.exports=require(12) },{}],23:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ /** Used to determine if values are of the language type Object */ var objectTypes = { 'boolean': false, 'function': true, 'object': true, 'number': false, 'string': false, 'undefined': false }; module.exports = objectTypes; },{}],24:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ var isObject = require('lodash.isobject'), reNative = require('lodash._renative'), shimKeys = require('lodash._shimkeys'); /** Used for native method references */ var objectProto = Object.prototype; /* Native method shortcuts for methods with the same name as other `lodash` methods */ var nativeKeys = reNative.test(nativeKeys = Object.keys) && nativeKeys; /** * Creates an array composed of the own enumerable property names of an object. * * @static * @memberOf _ * @category Objects * @param {Object} object The object to inspect. * @returns {Array} Returns an array of property names. * @example * * _.keys({ 'one': 1, 'two': 2, 'three': 3 }); * // => ['one', 'two', 'three'] (property order is not guaranteed across environments) */ var keys = !nativeKeys ? shimKeys : function(object) { if (!isObject(object)) { return []; } return nativeKeys(object); }; module.exports = keys; },{"lodash._renative":25,"lodash._shimkeys":26,"lodash.isobject":27}],25:[function(require,module,exports){ module.exports=require(12) },{}],26:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ var objectTypes = require('lodash._objecttypes'); /** Used for native method references */ var objectProto = Object.prototype; /** Native method shortcuts */ var hasOwnProperty = objectProto.hasOwnProperty; /** * A fallback implementation of `Object.keys` which produces an array of the * given object's own enumerable property names. * * @private * @type Function * @param {Object} object The object to inspect. * @returns {Array} Returns an array of property names. */ var shimKeys = function(object) { var index, iterable = object, result = []; if (!iterable) return result; if (!(objectTypes[typeof object])) return result; for (index in iterable) { if (hasOwnProperty.call(iterable, index)) { result.push(index); } } return result }; module.exports = shimKeys; },{"lodash._objecttypes":23}],27:[function(require,module,exports){ module.exports=require(18) },{"lodash._objecttypes":23}],28:[function(require,module,exports){ var ich = require('icanhaz') module.exports.ich = ich module.exports.getKeywordCount = function(data, keyword) { var group = [] data.forEach(function (d) { for(var key in d) { var value = d[key].toString().toLowerCase() if (value.match(keyword.toLowerCase())) group.push(d) } }) return group.length if (group = []) return "0" } module.exports.getKeyword = function(data, keyword) { var group = [] data.forEach(function (d) { for(var key in d) { var value = d[key].toString().toLowerCase() if (value.match(keyword.toLowerCase())) group.push(d) } }) return group if (group = []) return "no matches" } module.exports.getColumnTotal = function(data, column) { var total = [] data.forEach(function (d) { if (d[column] === "") return total.push(+d[column]) }) return total.reduce(function(a,b) { return a + b }) } module.exports.getColumnAverage = function(data, column) { var total = getColumnTotal(data, column) var average = total / data.length return average } module.exports.getMax = function(data, column) { var result = [] data.forEach(function (element){ if (result.length === 0) return result.push(element) else { if (element[column].valueOf() > result[0][column].valueOf()) { result.length = 0 return result.push(element) } if (element[column].valueOf() === result[0][column].valueOf()) { return result.push(element) } } }) return result } module.exports.getMin = function(data, column) { var result = [] data.forEach(function (element){ if (result.length === 0) return result.push(element) else { if (element[column].valueOf() < result[0][column].valueOf()) { result.length = 0 return result.push(element) } if (element[column].valueOf() === result[0][column].valueOf()) { return result.push(element) } } }) return result } // out of the data, filter something from a category module.exports.getMatches = function (data, filter, category) { var matches = [] data.forEach(function (element) { var projectType = element[category].toString().toLowerCase() if (projectType === filter.toLowerCase()) matches.push(element) }) return matches } module.exports.mostFrequent = function(data, category) { var count = {} for (var i = 0; i < data.length; i++) { if (!count[data[i][category]]) { count[data[i][category]] = 0 } count[data[i][category]]++ } var sortable = [] for (var category in count) { sortable.push([category, count[category]]) } sortable.sort(function(a, b) {return b[1] - a[1]}) return sortable // returns array of arrays, in order } // thank you! http://james.padolsey.com/javascript/deep-copying-of-objects-and-arrays/ module.exports.deepCopy = function(obj) { if (Object.prototype.toString.call(obj) === '[object Array]') { var out = [], i = 0, len = obj.length; for ( ; i < len; i++ ) { out[i] = arguments.callee(obj[i]); } return out; } if (typeof obj === 'object') { var out = {}, i; for ( i in obj ) { out[i] = arguments.callee(obj[i]); } return out; } return obj; } module.exports.getOccurance = function(data, category) { var occuranceCount = {} for (var i = 0; i < data.length; i++) { if (!occuranceCount[data[i][category]]) { occuranceCount[data[i][category]] = 0 } occuranceCount[data[i][category]]++ } return occuranceCount // returns object, keys alphabetical } module.exports.makeColorArrayOfObject = function(data, colors, category) { var category = category var keys = Object.keys(data) var counter = 1 var colorIndex return keys.map(function(key){ if (keys.length > colors.length || keys.length <= colors.length ) { colorIndex = counter % colors.length } var h = {units: data[key], hexcolor: colors[colorIndex]} h[category] = key counter++ colorIndex = counter return h }) } module.exports.makeArrayOfObject = function(data) { var keys = Object.keys(data) return keys.map(function(key){ // var h = {label: key, units: data[key], hexcolor: "#FDBDBD"} var h = {label: key, units: data[key]} return h }) } },{"icanhaz":2}],29:[function(require,module,exports){ var mapbox = require('mapbox.js') var ich = require('icanhaz') module.exports.buildOptionObject = buildOptionObject function buildOptionObject(optionsJSON, lineItem) { var newObj = {} optionsJSON.forEach(function(option) { newObj[option] = lineItem[option] }) return newObj } module.exports.makeupOptionObject = function(lineItem) { var options = [] for (var i in lineItem) { options.push(i); } return options } module.exports.createGeoJSON = function(data, optionsJSON) { var geoJSON = [] data.forEach(function(lineItem){ var hasGeo = confirmGeo(lineItem) if (hasGeo && !lineItem.lat && !lineItem.long) handleLatLong(lineItem) if (lineItem.linestring || lineItem.multipolygon) hasGeo = true if (!hasGeo) return if (!optionsJSON) { optionsJSON = makeupOptionObject(lineItem) var optionObj = buildOptionObject(optionsJSON, lineItem) } else { optionObj = buildOptionObject(optionsJSON, lineItem) } var type = determineType(lineItem) if (lineItem.polygon || lineItem.multipolygon || lineItem.linestring) { var shapeFeature = shapeJSON(lineItem, type, optionObj) geoJSON.push(shapeFeature) } else { var pointFeature = pointJSON(lineItem, type, optionObj) geoJSON.push(pointFeature) } }) return geoJSON } module.exports.confirmGeo = confirmGeo function confirmGeo(lineItem) { var hasGeo = false if (lineItem.lat && lineItem.long || lineItem.polygon) hasGeo = true if (lineItem.latitude && lineItem.longitude || lineItem.polygon) hasGeo = true if (lineItem.geolatitude && lineItem.geolongitude || lineItem.polygon) hasGeo = true return hasGeo } module.exports.handleLatLong = handleLatLong function handleLatLong(lineItem) { if (lineItem.latitude && lineItem.longitude || lineItem.polygon) { lineItem.lat = lineItem.latitude lineItem.long = lineItem.longitude delete lineItem.latitude delete lineItem.longitude return lineItem } if (lineItem.geolatitude && lineItem.geolongitude || lineItem.polygon) { lineItem.lat = lineItem.geolatitude lineItem.long = lineItem.geolongitude delete lineItem.geolatitude delete lineItem.geolongitude return lineItem } } module.exports.pointJSON = pointJSON function pointJSON(lineItem, type, optionObj) { var lowercaseType = type.toLowerCase() var pointFeature = { type: "Feature", "geometry": { "type": type, "coordinates": [+lineItem.long, +lineItem.lat] }, "properties": { "marker-size": "small", "marker-color": lineItem.hexcolor }, "opts": optionObj } return pointFeature } module.exports.shapeJSON = shapeJSON function shapeJSON(lineItem, type, optionObj) { var lowercaseType = type.toLowerCase() var coords if (type !== "LineString") { coords = JSON.parse( "[[" + lineItem[lowercaseType] + "]]" ) } else { coords = JSON.parse("[" + lineItem[lowercaseType] + "]") } var shapeFeature = { type: "Feature", "geometry": { "type": type, "coordinates": coords }, "properties": { "fillColor": lineItem.hexcolor, "color": lineItem.hexcolor }, "opts": optionObj } return shapeFeature } module.exports.determineType = determineType function determineType(lineItem) { var type = "" if (lineItem.lat && lineItem.long) type = "Point" if (lineItem.polygon) type = "Polygon" if (lineItem.multipolygon) type = "MultiPolygon" if (lineItem.linestring) type = "LineString" return type } module.exports.loadMap = function(mapDiv) { var map = L.mapbox.map(mapDiv) map.touchZoom.disable() map.doubleClickZoom.disable() map.scrollWheelZoom.disable() return map } module.exports.addTileLayer = function(map, tileLayer) { var layer = L.mapbox.tileLayer(tileLayer) layer.addTo(map) } module.exports.makePopupTemplate = makePopupTemplate function makePopupTemplate(geoJSON) { var allOptions = geoJSON[0].opts var keys = [] for (var i in allOptions) keys.push(i) var mustacheKeys = mustachify(keys) var template = {} template.name = "popup" + Math.random() template.template = templateString(mustacheKeys) return template } module.exports.templateString = templateString function templateString(mustacheKeys) { var template = "<ul>" var counter = mustacheKeys.length mustacheKeys.forEach(function(key) { counter-- if (counter === 0) template = template.concat(key, "</ul>") else template = template.concat(key) }) return template } module.exports.mustachify = mustachify function mustachify(array) { var newArray = [] array.forEach(function(item) { item = "<li><b>" + item + ":</b> {{" + item + "}}</li>" newArray.push(item) }) return newArray } module.exports.addMarkerLayer = function(geoJSON, map, template, clusterMarkers) { if (!template) { template = makePopupTemplate(geoJSON) ich.addTemplate(template.name, template.template) } else { var template = {"template": template} template.name = "popup" + Math.random() ich.addTemplate(template.name, template.template) } var features = { "type": "FeatureCollection", "features": geoJSON } var layer = L.geoJson(features, { pointToLayer: L.mapbox.marker.style, style: function(feature) { return feature.properties } }) var bounds = layer.getBounds() // check option and Leaflet extension var cluster = clusterMarkers && 'MarkerClusterGroup' in L if (cluster) { var clusterGroup = new L.MarkerClusterGroup() } map.fitBounds(bounds) layer.eachLayer(function(marker) { var popupContent = ich[template.name](marker.feature.opts) marker.bindPopup(popupContent.html(), {closeButton: false}) if (cluster) { clusterGroup.addLayer(marker) } }) if (cluster) { map.addLayer(clusterGroup) } else { layer.addTo(map) } return layer } },{"icanhaz":2,"mapbox.js":31}],30:[function(require,module,exports){ // Copyright (C) 2010 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview * Implements RFC 3986 for parsing/formatting URIs. * * @author [email protected] * \@provides URI * \@overrides window */ var URI = (function () { /** * creates a uri from the string form. The parser is relaxed, so special * characters that aren't escaped but don't cause ambiguities will not cause * parse failures. * * @return {URI|null} */ function parse(uriStr) { var m = ('' + uriStr).match(URI_RE_); if (!m) { return null; } return new URI( nullIfAbsent(m[1]), nullIfAbsent(m[2]), nullIfAbsent(m[3]), nullIfAbsent(m[4]), nullIfAbsent(m[5]), nullIfAbsent(m[6]), nullIfAbsent(m[7])); } /** * creates a uri from the given parts. * * @param scheme {string} an unencoded scheme such as "http" or null * @param credentials {string} unencoded user credentials or null * @param domain {string} an unencoded domain name or null * @param port {number} a port number in [1, 32768]. * -1 indicates no port, as does null. * @param path {string} an unencoded path * @param query {Array.<string>|string|null} a list of unencoded cgi * parameters where even values are keys and odds the corresponding values * or an unencoded query. * @param fragment {string} an unencoded fragment without the "#" or null. * @return {URI} */ function create(scheme, credentials, domain, port, path, query, fragment) { var uri = new URI( encodeIfExists2(scheme, URI_DISALLOWED_IN_SCHEME_OR_CREDENTIALS_), encodeIfExists2( credentials, URI_DISALLOWED_IN_SCHEME_OR_CREDENTIALS_), encodeIfExists(domain), port > 0 ? port.toString() : null, encodeIfExists2(path, URI_DISALLOWED_IN_PATH_), null, encodeIfExists(fragment)); if (query) { if ('string' === typeof query) { uri.setRawQuery(query.replace(/[^?&=0-9A-Za-z_\-~.%]/g, encodeOne)); } else { uri.setAllParameters(query); } } return uri; } function encodeIfExists(unescapedPart) { if ('string' == typeof unescapedPart) { return encodeURIComponent(unescapedPart); } return null; }; /** * if unescapedPart is non null, then escapes any characters in it that aren't * valid characters in a url and also escapes any special characters that * appear in extra. * * @param unescapedPart {string} * @param extra {RegExp} a character set of characters in [\01-\177]. * @return {string|null} null iff unescapedPart == null. */ function encodeIfExists2(unescapedPart, extra) { if ('string' == typeof unescapedPart) { return encodeURI(unescapedPart).replace(extra, encodeOne); } return null; }; /** converts a character in [\01-\177] to its url encoded equivalent. */ function encodeOne(ch) { var n = ch.charCodeAt(0); return '%' + '0123456789ABCDEF'.charAt((n >> 4) & 0xf) + '0123456789ABCDEF'.charAt(n & 0xf); } /** * {@updoc * $ normPath('foo/./bar') * # 'foo/bar' * $ normPath('./foo') * # 'foo' * $ normPath('foo/.') * # 'foo' * $ normPath('foo//bar') * # 'foo/bar' * } */ function normPath(path) { return path.replace(/(^|\/)\.(?:\/|$)/g, '$1').replace(/\/{2,}/g, '/'); } var PARENT_DIRECTORY_HANDLER = new RegExp( '' // A path break + '(/|^)' // followed by a non .. path element // (cannot be . because normPath is used prior to this RegExp) + '(?:[^./][^/]*|\\.{2,}(?:[^./][^/]*)|\\.{3,}[^/]*)' // followed by .. followed by a path break. + '/\\.\\.(?:/|$)'); var PARENT_DIRECTORY_HANDLER_RE = new RegExp(PARENT_DIRECTORY_HANDLER); var EXTRA_PARENT_PATHS_RE = /^(?:\.\.\/)*(?:\.\.$)?/; /** * Normalizes its input path and collapses all . and .. sequences except for * .. sequences that would take it above the root of the current parent * directory. * {@updoc * $ collapse_dots('foo/../bar') * # 'bar' * $ collapse_dots('foo/./bar') * # 'foo/bar' * $ collapse_dots('foo/../bar/./../../baz') * # 'baz' * $ collapse_dots('../foo') * # '../foo' * $ collapse_dots('../foo').replace(EXTRA_PARENT_PATHS_RE, '') * # 'foo' * } */ function collapse_dots(path) { if (path === null) { return null; } var p = normPath(path); // Only /../ left to flatten var r = PARENT_DIRECTORY_HANDLER_RE; // We replace with $1 which matches a / before the .. because this // guarantees that: // (1) we have at most 1 / between the adjacent place, // (2) always have a slash if there is a preceding path section, and // (3) we never turn a relative path into an absolute path. for (var q; (q = p.replace(r, '$1')) != p; p = q) {}; return p; } /** * resolves a relative url string to a base uri. * @return {URI} */ function resolve(baseUri, relativeUri) { // there are several kinds of relative urls: // 1. //foo - replaces everything from the domain on. foo is a domain name // 2. foo - replaces the last part of the path, the whole query and fragment // 3. /foo - replaces the the path, the query and fragment // 4. ?foo - replace the query and fragment // 5. #foo - replace the fragment only var absoluteUri = baseUri.clone(); // we satisfy these conditions by looking for the first part of relativeUri // that is not blank and applying defaults to the rest var overridden = relativeUri.hasScheme(); if (overridden) { absoluteUri.setRawScheme(relativeUri.getRawScheme()); } else { overridden = relativeUri.hasCredentials(); } if (overridden) { absoluteUri.setRawCredentials(relativeUri.getRawCredentials()); } else { overridden = relativeUri.hasDomain(); } if (overridden) { absoluteUri.setRawDomain(relativeUri.getRawDomain()); } else { overridden = relativeUri.hasPort(); } var rawPath = relativeUri.getRawPath(); var simplifiedPath = collapse_dots(rawPath); if (overridden) { absoluteUri.setPort(relativeUri.getPort()); simplifiedPath = simplifiedPath && simplifiedPath.replace(EXTRA_PARENT_PATHS_RE, ''); } else { overridden = !!rawPath; if (overridden) { // resolve path properly if (simplifiedPath.charCodeAt(0) !== 0x2f /* / */) { // path is relative var absRawPath = collapse_dots(absoluteUri.getRawPath() || '') .replace(EXTRA_PARENT_PATHS_RE, ''); var slash = absRawPath.lastIndexOf('/') + 1; simplifiedPath = collapse_dots( (slash ? absRawPath.substring(0, slash) : '') + collapse_dots(rawPath)) .replace(EXTRA_PARENT_PATHS_RE, ''); } } else { simplifiedPath = simplifiedPath && simplifiedPath.replace(EXTRA_PARENT_PATHS_RE, ''); if (simplifiedPath !== rawPath) { absoluteUri.setRawPath(simplifiedPath); } } } if (overridden) { absoluteUri.setRawPath(simplifiedPath); } else { overridden = relativeUri.hasQuery(); } if (overridden) { absoluteUri.setRawQuery(relativeUri.getRawQuery()); } else { overridden = relativeUri.hasFragment(); } if (overridden) { absoluteUri.setRawFragment(relativeUri.getRawFragment()); } return absoluteUri; } /** * a mutable URI. * * This class contains setters and getters for the parts of the URI. * The <tt>getXYZ</tt>/<tt>setXYZ</tt> methods return the decoded part -- so * <code>uri.parse('/foo%20bar').getPath()</code> will return the decoded path, * <tt>/foo bar</tt>. * * <p>The raw versions of fields are available too. * <code>uri.parse('/foo%20bar').getRawPath()</code> will return the raw path, * <tt>/foo%20bar</tt>. Use the raw setters with care, since * <code>URI::toString</code> is not guaranteed to return a valid url if a * raw setter was used. * * <p>All setters return <tt>this</tt> and so may be chained, a la * <code>uri.parse('/foo').setFragment('part').toString()</code>. * * <p>You should not use this constructor directly -- please prefer the factory * functions {@link uri.parse}, {@link uri.create}, {@link uri.resolve} * instead.</p> * * <p>The parameters are all raw (assumed to be properly escaped) parts, and * any (but not all) may be null. Undefined is not allowed.</p> * * @constructor */ function URI( rawScheme, rawCredentials, rawDomain, port, rawPath, rawQuery, rawFragment) { this.scheme_ = rawScheme; this.credentials_ = rawCredentials; this.domain_ = rawDomain; this.port_ = port; this.path_ = rawPath; this.query_ = rawQuery; this.fragment_ = rawFragment; /** * @type {Array|null} */ this.paramCache_ = null; } /** returns the string form of the url. */ URI.prototype.toString = function () { var out = []; if (null !== this.scheme_) { out.push(this.scheme_, ':'); } if (null !== this.domain_) { out.push('//'); if (null !== this.credentials_) { out.push(this.credentials_, '@'); } out.push(this.domain_); if (null !== this.port_) { out.push(':', this.port_.toString()); } } if (null !== this.path_) { out.push(this.path_); } if (null !== this.query_) { out.push('?', this.query_); } if (null !== this.fragment_) { out.push('#', this.fragment_); } return out.join(''); }; URI.prototype.clone = function () { return new URI(this.scheme_, this.credentials_, this.domain_, this.port_, this.path_, this.query_, this.fragment_); }; URI.prototype.getScheme = function () { // HTML5 spec does not require the scheme to be lowercased but // all common browsers except Safari lowercase the scheme. return this.scheme_ && decodeURIComponent(this.scheme_).toLowerCase(); }; URI.prototype.getRawScheme = function () { return this.scheme_; }; URI.prototype.setScheme = function (newScheme) { this.scheme_ = encodeIfExists2( newScheme, URI_DISALLOWED_IN_SCHEME_OR_CREDENTIALS_); return this; }; URI.prototype.setRawScheme = function (newScheme) { this.scheme_ = newScheme ? newScheme : null; return this; }; URI.prototype.hasScheme = function () { return null !== this.scheme_; }; URI.prototype.getCredentials = function () { return this.credentials_ && decodeURIComponent(this.credentials_); }; URI.prototype.getRawCredentials = function () { return this.credentials_; }; URI.prototype.setCredentials = function (newCredentials) { this.credentials_ = encodeIfExists2( newCredentials, URI_DISALLOWED_IN_SCHEME_OR_CREDENTIALS_); return this; }; URI.prototype.setRawCredentials = function (newCredentials) { this.credentials_ = newCredentials ? newCredentials : null; return this; }; URI.prototype.hasCredentials = function () { return null !== this.credentials_; }; URI.prototype.getDomain = function () { return this.domain_ && decodeURIComponent(this.domain_); }; URI.prototype.getRawDomain = function () { return this.domain_; }; URI.prototype.setDomain = function (newDomain) { return this.setRawDomain(newDomain && encodeURIComponent(newDomain)); }; URI.prototype.setRawDomain = function (newDomain) { this.domain_ = newDomain ? newDomain : null; // Maintain the invariant that paths must start with a slash when the URI // is not path-relative. return this.setRawPath(this.path_); }; URI.prototype.hasDomain = function () { return null !== this.domain_; }; URI.prototype.getPort = function () { return this.port_ && decodeURIComponent(this.port_); }; URI.prototype.setPort = function (newPort) { if (newPort) { newPort = Number(newPort); if (newPort !== (newPort & 0xffff)) { throw new Error('Bad port number ' + newPort); } this.port_ = '' + newPort; } else { this.port_ = null; } return this; }; URI.prototype.hasPort = function () { return null !== this.port_; }; URI.prototype.getPath = function () { return this.path_ && decodeURIComponent(this.path_); }; URI.prototype.getRawPath = function () { return this.path_; }; URI.prototype.setPath = function (newPath) { return this.setRawPath(encodeIfExists2(newPath, URI_DISALLOWED_IN_PATH_)); }; URI.prototype.setRawPath = function (newPath) { if (newPath) { newPath = String(newPath); this.path_ = // Paths must start with '/' unless this is a path-relative URL. (!this.domain_ || /^\//.test(newPath)) ? newPath : '/' + newPath; } else { this.path_ = null; } return this; }; URI.prototype.hasPath = function () { return null !== this.path_; }; URI.prototype.getQuery = function () { // From http://www.w3.org/Addressing/URL/4_URI_Recommentations.html // Within the query string, the plus sign is reserved as shorthand notation // for a space. return this.query_ && decodeURIComponent(this.query_).replace(/\+/g, ' '); }; URI.prototype.getRawQuery = function () { return this.query_; }; URI.prototype.setQuery = function (newQuery) { this.paramCache_ = null; this.query_ = encodeIfExists(newQuery); return this; }; URI.prototype.setRawQuery = function (newQuery) { this.paramCache_ = null; this.query_ = newQuery ? newQuery : null; return this; }; URI.prototype.hasQuery = function () { return null !== this.query_; }; /** * sets the query given a list of strings of the form * [ key0, value0, key1, value1, ... ]. * * <p><code>uri.setAllParameters(['a', 'b', 'c', 'd']).getQuery()</code> * will yield <code>'a=b&c=d'</code>. */ URI.prototype.setAllParameters = function (params) { if (typeof params === 'object') { if (!(params instanceof Array) && (params instanceof Object || Object.prototype.toString.call(params) !== '[object Array]')) { var newParams = []; var i = -1; for (var k in params) { var v = params[k]; if ('string' === typeof v) { newParams[++i] = k; newParams[++i] = v; } } params = newParams; } } this.paramCache_ = null; var queryBuf = []; var separator = ''; for (var j = 0; j < params.length;) { var k = params[j++]; var v = params[j++]; queryBuf.push(separator, encodeURIComponent(k.toString())); separator = '&'; if (v) { queryBuf.push('=', encodeURIComponent(v.toString())); } } this.query_ = queryBuf.join(''); return this; }; URI.prototype.checkParameterCache_ = function () { if (!this.paramCache_) { var q = this.query_; if (!q) { this.paramCache_ = []; } else { var cgiParams = q.split(/[&\?]/); var out = []; var k = -1; for (var i = 0; i < cgiParams.length; ++i) { var m = cgiParams[i].match(/^([^=]*)(?:=(.*))?$/); // From http://www.w3.org/Addressing/URL/4_URI_Recommentations.html // Within the query string, the plus sign is reserved as shorthand // notation for a space. out[++k] = decodeURIComponent(m[1]).replace(/\+/g, ' '); out[++k] = decodeURIComponent(m[2] || '').replace(/\+/g, ' '); } this.paramCache_ = out; } } }; /** * sets the values of the named cgi parameters. * * <p>So, <code>uri.parse('foo?a=b&c=d&e=f').setParameterValues('c', ['new']) * </code> yields <tt>foo?a=b&c=new&e=f</tt>.</p> * * @param key {string} * @param values {Array.<string>} the new values. If values is a single string * then it will be treated as the sole value. */ URI.prototype.setParameterValues = function (key, values) { // be nice and avoid subtle bugs where [] operator on string performs charAt // on some browsers and crashes on IE if (typeof values === 'string') { values = [ values ]; } this.checkParameterCache_(); var newValueIndex = 0; var pc = this.paramCache_; var params = []; for (var i = 0, k = 0; i < pc.length; i += 2) { if (key === pc[i]) { if (newValueIndex < values.length) { params.push(key, values[newValueIndex++]); } } else { params.push(pc[i], pc[i + 1]); } } while (newValueIndex < values.length) { params.push(key, values[newValueIndex++]); } this.setAllParameters(params); return this; }; URI.prototype.removeParameter = function (key) { return this.setParameterValues(key, []); }; /** * returns the parameters specified in the query part of the uri as a list of * keys and values like [ key0, value0, key1, value1, ... ]. * * @return {Array.<string>} */ URI.prototype.getAllParameters = function () { this.checkParameterCache_(); return this.paramCache_.slice(0, this.paramCache_.length); }; /** * returns the value<b>s</b> for a given cgi parameter as a list of decoded * query parameter values. * @return {Array.<string>} */ URI.prototype.getParameterValues = function (paramNameUnescaped) { this.checkParameterCache_(); var values = []; for (var i = 0; i < this.paramCache_.length; i += 2) { if (paramNameUnescaped === this.paramCache_[i]) { values.push(this.paramCache_[i + 1]); } } return values; }; /** * returns a map of cgi parameter names to (non-empty) lists of values. * @return {Object.<string,Array.<string>>} */ URI.prototype.getParameterMap = function (paramNameUnescaped) { this.checkParameterCache_(); var paramMap = {}; for (var i = 0; i < this.paramCache_.length; i += 2) { var key = this.paramCache_[i++], value = this.paramCache_[i++]; if (!(key in paramMap)) { paramMap[key] = [value]; } else { paramMap[key].push(value); } } return paramMap; }; /** * returns the first value for a given cgi parameter or null if the given * parameter name does not appear in the query string. * If the given parameter name does appear, but has no '<tt>=</tt>' following * it, then the empty string will be returned. * @return {string|null} */ URI.prototype.getParameterValue = function (paramNameUnescaped) { this.checkParameterCache_(); for (var i = 0; i < this.paramCache_.length; i += 2) { if (paramNameUnescaped === this.paramCache_[i]) { return this.paramCache_[i + 1]; } } return null; }; URI.prototype.getFragment = function () { return this.fragment_ && decodeURIComponent(this.fragment_); }; URI.prototype.getRawFragment = function () { return this.fragment_; }; URI.prototype.setFragment = function (newFragment) { this.fragment_ = newFragment ? encodeURIComponent(newFragment) : null; return this; }; URI.prototype.setRawFragment = function (newFragment) { this.fragment_ = newFragment ? newFragment : null; return this; }; URI.prototype.hasFragment = function () { return null !== this.fragment_; }; function nullIfAbsent(matchPart) { return ('string' == typeof matchPart) && (matchPart.length > 0) ? matchPart : null; } /** * a regular expression for breaking a URI into its component parts. * * <p>http://www.gbiv.com/protocols/uri/rfc/rfc3986.html#RFC2234 says * As the "first-match-wins" algorithm is identical to the "greedy" * disambiguation method used by POSIX regular expressions, it is natural and * commonplace to use a regular expression for parsing the potential five * components of a URI reference. * * <p>The following line is the regular expression for breaking-down a * well-formed URI reference into its components. * * <pre> * ^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))? * 12 3 4 5 6 7 8 9 * </pre> * * <p>The numbers in the second line above are only to assist readability; they * indicate the reference points for each subexpression (i.e., each paired * parenthesis). We refer to the value matched for subexpression <n> as $<n>. * For example, matching the above expression to * <pre> * http://www.ics.uci.edu/pub/ietf/uri/#Related * </pre> * results in the following subexpression matches: * <pre> * $1 = http: * $2 = http * $3 = //www.ics.uci.edu * $4 = www.ics.uci.edu * $5 = /pub/ietf/uri/ * $6 = <undefined> * $7 = <undefined> * $8 = #Related * $9 = Related * </pre> * where <undefined> indicates that the component is not present, as is the * case for the query component in the above example. Therefore, we can * determine the value of the five components as * <pre> * scheme = $2 * authority = $4 * path = $5 * query = $7 * fragment = $9 * </pre> * * <p>msamuel: I have modified the regular expression slightly to expose the * credentials, domain, and port separately from the authority. * The modified version yields * <pre> * $1 = http scheme * $2 = <undefined> credentials -\ * $3 = www.ics.uci.edu domain | authority * $4 = <undefined> port -/ * $5 = /pub/ietf/uri/ path * $6 = <undefined> query without ? * $7 = Related fragment without # * </pre> */ var URI_RE_ = new RegExp( "^" + "(?:" + "([^:/?#]+)" + // scheme ":)?" + "(?://" + "(?:([^/?#]*)@)?" + // credentials "([^/?#:@]*)" + // domain "(?::([0-9]+))?" + // port ")?" + "([^?#]+)?" + // path "(?:\\?([^#]*))?" + // query "(?:#(.*))?" + // fragment "$" ); var URI_DISALLOWED_IN_SCHEME_OR_CREDENTIALS_ = /[#\/\?@]/g; var URI_DISALLOWED_IN_PATH_ = /[\#\?]/g; URI.parse = parse; URI.create = create; URI.resolve = resolve; URI.collapse_dots = collapse_dots; // Visible for testing. // lightweight string-based api for loadModuleMaker URI.utils = { mimeTypeOf: function (uri) { var uriObj = parse(uri); if (/\.html$/.test(uriObj.getPath())) { return 'text/html'; } else { return 'application/javascript'; } }, resolve: function (base, uri) { if (base) { return resolve(parse(base), parse(uri)).toString(); } else { return '' + uri; } } }; return URI; })(); // Copyright Google Inc. // Licensed under the Apache Licence Version 2.0 // Autogenerated at Mon Feb 25 13:05:42 EST 2013 // @overrides window // @provides html4 var html4 = {}; html4.atype = { 'NONE': 0, 'URI': 1, 'URI_FRAGMENT': 11, 'SCRIPT': 2, 'STYLE': 3, 'HTML': 12, 'ID': 4, 'IDREF': 5, 'IDREFS': 6, 'GLOBAL_NAME': 7, 'LOCAL_NAME': 8, 'CLASSES': 9, 'FRAME_TARGET': 10, 'MEDIA_QUERY': 13 }; html4[ 'atype' ] = html4.atype; html4.ATTRIBS = { '*::class': 9, '*::dir': 0, '*::draggable': 0, '*::hidden': 0, '*::id': 4, '*::inert': 0, '*::itemprop': 0, '*::itemref': 6, '*::itemscope': 0, '*::lang': 0, '*::onblur': 2, '*::onchange': 2, '*::onclick': 2, '*::ondblclick': 2, '*::onfocus': 2, '*::onkeydown': 2, '*::onkeypress': 2, '*::onkeyup': 2, '*::onload': 2, '*::onmousedown': 2, '*::onmousemove': 2, '*::onmouseout': 2, '*::onmouseover': 2, '*::onmouseup': 2, '*::onreset': 2, '*::onscroll': 2, '*::onselect': 2, '*::onsubmit': 2, '*::onunload': 2, '*::spellcheck': 0, '*::style': 3, '*::title': 0, '*::translate': 0, 'a::accesskey': 0, 'a::coords': 0, 'a::href': 1, 'a::hreflang': 0, 'a::name': 7, 'a::onblur': 2, 'a::onfocus': 2, 'a::shape': 0, 'a::tabindex': 0, 'a::target': 10, 'a::type': 0, 'area::accesskey': 0, 'area::alt': 0, 'area::coords': 0, 'area::href': 1, 'area::nohref': 0, 'area::onblur': 2, 'area::onfocus': 2, 'area::shape': 0, 'area::tabindex': 0, 'area::target': 10, 'audio::controls': 0, 'audio::loop': 0, 'audio::mediagroup': 5, 'audio::muted': 0, 'audio::preload': 0, 'bdo::dir': 0, 'blockquote::cite': 1, 'br::clear': 0, 'button::accesskey': 0, 'button::disabled': 0, 'button::name': 8, 'button::onblur': 2, 'button::onfocus': 2, 'button::tabindex': 0, 'button::type': 0, 'button::value': 0, 'canvas::height': 0, 'canvas::width': 0, 'caption::align': 0, 'col::align': 0, 'col::char': 0, 'col::charoff': 0, 'col::span': 0, 'col::valign': 0, 'col::width': 0, 'colgroup::align': 0, 'colgroup::char': 0, 'colgroup::charoff': 0, 'colgroup::span': 0, 'colgroup::valign': 0, 'colgroup::width': 0, 'command::checked': 0, 'command::command': 5, 'command::disabled': 0, 'command::icon': 1, 'command::label': 0, 'command::radiogroup': 0, 'command::type': 0, 'data::value': 0, 'del::cite': 1, 'del::datetime': 0, 'details::open': 0, 'dir::compact': 0, 'div::align': 0, 'dl::compact': 0, 'fieldset::disabled': 0, 'font::color': 0, 'font::face': 0, 'font::size': 0, 'form::accept': 0, 'form::action': 1, 'form::autocomplete': 0, 'form::enctype': 0, 'form::method': 0, 'form::name': 7, 'form::novalidate': 0, 'form::onreset': 2, 'form::onsubmit': 2, 'form::target': 10, 'h1::align': 0, 'h2::align': 0, 'h3::align': 0, 'h4::align': 0, 'h5::align': 0, 'h6::align': 0, 'hr::align': 0, 'hr::noshade': 0, 'hr::size': 0, 'hr::width': 0, 'iframe::align': 0, 'iframe::frameborder': 0, 'iframe::height': 0, 'iframe::marginheight': 0, 'iframe::marginwidth': 0, 'iframe::width': 0, 'img::align': 0, 'img::alt': 0, 'img::border': 0, 'img::height': 0, 'img::hspace': 0, 'img::ismap': 0, 'img::name': 7, 'img::src': 1, 'img::usemap': 11, 'img::vspace': 0, 'img::width': 0, 'input::accept': 0, 'input::accesskey': 0, 'input::align': 0, 'input::alt': 0, 'input::autocomplete': 0, 'input::checked': 0, 'input::disabled': 0, 'input::inputmode': 0, 'input::ismap': 0, 'input::list': 5, 'input::max': 0, 'input::maxlength': 0, 'input::min': 0, 'input::multiple': 0, 'input::name': 8, 'input::onblur': 2, 'input::onchange': 2, 'input::onfocus': 2, 'input::onselect': 2, 'input::placeholder': 0, 'input::readonly': 0, 'input::required': 0, 'input::size': 0, 'input::src': 1, 'input::step': 0, 'input::tabindex': 0, 'input::type': 0, 'input::usemap': 11, 'input::value': 0, 'ins::cite': 1, 'ins::datetime': 0, 'label::accesskey': 0, 'label::for': 5, 'label::onblur': 2, 'label::onfocus': 2, 'legend::accesskey': 0, 'legend::align': 0, 'li::type': 0, 'li::value': 0, 'map::name': 7, 'menu::compact': 0, 'menu::label': 0, 'menu::type': 0, 'meter::high': 0, 'meter::low': 0, 'meter::max': 0, 'meter::min': 0, 'meter::value': 0, 'ol::compact': 0, 'ol::reversed': 0, 'ol::start': 0, 'ol::type': 0, 'optgroup::disabled': 0, 'optgroup::label': 0, 'option::disabled': 0, 'option::label': 0, 'option::selected': 0, 'option::value': 0, 'output::for': 6, 'output::name': 8, 'p::align': 0, 'pre::width': 0, 'progress::max': 0, 'progress::min': 0, 'progress::value': 0, 'q::cite': 1, 'select::autocomplete': 0, 'select::disabled': 0, 'select::multiple': 0, 'select::name': 8, 'select::onblur': 2, 'select::onchange': 2, 'select::onfocus': 2, 'select::required': 0, 'select::size': 0, 'select::tabindex': 0, 'source::type': 0, 'table::align': 0, 'table::bgcolor': 0, 'table::border': 0, 'table::cellpadding': 0, 'table::cellspacing': 0, 'table::frame': 0, 'table::rules': 0, 'table::summary': 0, 'table::width': 0, 'tbody::align': 0, 'tbody::char': 0, 'tbody::charoff': 0, 'tbody::valign': 0, 'td::abbr': 0, 'td::align': 0, 'td::axis': 0, 'td::bgcolor': 0, 'td::char': 0, 'td::charoff': 0, 'td::colspan': 0, 'td::headers': 6, 'td::height': 0, 'td::nowrap': 0, 'td::rowspan': 0, 'td::scope': 0, 'td::valign': 0, 'td::width': 0, 'textarea::accesskey': 0, 'textarea::autocomplete': 0, 'textarea::cols': 0, 'textarea::disabled': 0, 'textarea::inputmode': 0, 'textarea::name': 8, 'textarea::onblur': 2, 'textarea::onchange': 2, 'textarea::onfocus': 2, 'textarea::onselect': 2, 'textarea::placeholder': 0, 'textarea::readonly': 0, 'textarea::required': 0, 'textarea::rows': 0, 'textarea::tabindex': 0, 'textarea::wrap': 0, 'tfoot::align': 0, 'tfoot::char': 0, 'tfoot::charoff': 0, 'tfoot::valign': 0, 'th::abbr': 0, 'th::align': 0, 'th::axis': 0, 'th::bgcolor': 0, 'th::char': 0, 'th::charoff': 0, 'th::colspan': 0, 'th::headers': 6, 'th::height': 0, 'th::nowrap': 0, 'th::rowspan': 0, 'th::scope': 0, 'th::valign': 0, 'th::width': 0, 'thead::align': 0, 'thead::char': 0, 'thead::charoff': 0, 'thead::valign': 0, 'tr::align': 0, 'tr::bgcolor': 0, 'tr::char': 0, 'tr::charoff': 0, 'tr::valign': 0, 'track::default': 0, 'track::kind': 0, 'track::label': 0, 'track::srclang': 0, 'ul::compact': 0, 'ul::type': 0, 'video::controls': 0, 'video::height': 0, 'video::loop': 0, 'video::mediagroup': 5, 'video::muted': 0, 'video::poster': 1, 'video::preload': 0, 'video::width': 0 }; html4[ 'ATTRIBS' ] = html4.ATTRIBS; html4.eflags = { 'OPTIONAL_ENDTAG': 1, 'EMPTY': 2, 'CDATA': 4, 'RCDATA': 8, 'UNSAFE': 16, 'FOLDABLE': 32, 'SCRIPT': 64, 'STYLE': 128, 'VIRTUALIZED': 256 }; html4[ 'eflags' ] = html4.eflags; html4.ELEMENTS = { 'a': 0, 'abbr': 0, 'acronym': 0, 'address': 0, 'applet': 272, 'area': 2, 'article': 0, 'aside': 0, 'audio': 0, 'b': 0, 'base': 274, 'basefont': 274, 'bdi': 0, 'bdo': 0, 'big': 0, 'blockquote': 0, 'body': 305, 'br': 2, 'button': 0, 'canvas': 0, 'caption': 0, 'center': 0, 'cite': 0, 'code': 0, 'col': 2, 'colgroup': 1, 'command': 2, 'data': 0, 'datalist': 0, 'dd': 1, 'del': 0, 'details': 0, 'dfn': 0, 'dialog': 272, 'dir': 0, 'div': 0, 'dl': 0, 'dt': 1, 'em': 0, 'fieldset': 0, 'figcaption': 0, 'figure': 0, 'font': 0, 'footer': 0, 'form': 0, 'frame': 274, 'frameset': 272, 'h1': 0, 'h2': 0, 'h3': 0, 'h4': 0, 'h5': 0, 'h6': 0, 'head': 305, 'header': 0, 'hgroup': 0, 'hr': 2, 'html': 305, 'i': 0, 'iframe': 4, 'img': 2, 'input': 2, 'ins': 0, 'isindex': 274, 'kbd': 0, 'keygen': 274, 'label': 0, 'legend': 0, 'li': 1, 'link': 274, 'map': 0, 'mark': 0, 'menu': 0, 'meta': 274, 'meter': 0, 'nav': 0, 'nobr': 0, 'noembed': 276, 'noframes': 276, 'noscript': 276, 'object': 272, 'ol': 0, 'optgroup': 0, 'option': 1, 'output': 0, 'p': 1, 'param': 274, 'pre': 0, 'progress': 0, 'q': 0, 's': 0, 'samp': 0, 'script': 84, 'section': 0, 'select': 0, 'small': 0, 'source': 2, 'span': 0, 'strike': 0, 'strong': 0, 'style': 148, 'sub': 0, 'summary': 0, 'sup': 0, 'table': 0, 'tbody': 1, 'td': 1, 'textarea': 8, 'tfoot': 1, 'th': 1, 'thead': 1, 'time': 0, 'title': 280, 'tr': 1, 'track': 2, 'tt': 0, 'u': 0, 'ul': 0, 'var': 0, 'video': 0, 'wbr': 2 }; html4[ 'ELEMENTS' ] = html4.ELEMENTS; html4.ELEMENT_DOM_INTERFACES = { 'a': 'HTMLAnchorElement', 'abbr': 'HTMLElement', 'acronym': 'HTMLElement', 'address': 'HTMLElement', 'applet': 'HTMLAppletElement', 'area': 'HTMLAreaElement', 'article': 'HTMLElement', 'aside': 'HTMLElement', 'audio': 'HTMLAudioElement', 'b': 'HTMLElement', 'base': 'HTMLBaseElement', 'basefont': 'HTMLBaseFontElement', 'bdi': 'HTMLElement', 'bdo': 'HTMLElement', 'big': 'HTMLElement', 'blockquote': 'HTMLQuoteElement', 'body': 'HTMLBodyElement', 'br': 'HTMLBRElement', 'button': 'HTMLButtonElement', 'canvas': 'HTMLCanvasElement', 'caption': 'HTMLTableCaptionElement', 'center': 'HTMLElement', 'cite': 'HTMLElement', 'code': 'HTMLElement', 'col': 'HTMLTableColElement', 'colgroup': 'HTMLTableColElement', 'command': 'HTMLCommandElement', 'data': 'HTMLElement', 'datalist': 'HTMLDataListElement', 'dd': 'HTMLElement', 'del': 'HTMLModElement', 'details': 'HTMLDetailsElement', 'dfn': 'HTMLElement', 'dialog': 'HTMLDialogElement', 'dir': 'HTMLDirectoryElement', 'div': 'HTMLDivElement', 'dl': 'HTMLDListElement', 'dt': 'HTMLElement', 'em': 'HTMLElement', 'fieldset': 'HTMLFieldSetElement', 'figcaption': 'HTMLElement', 'figure': 'HTMLElement', 'font': 'HTMLFontElement', 'footer': 'HTMLElement', 'form': 'HTMLFormElement', 'frame': 'HTMLFrameElement', 'frameset': 'HTMLFrameSetElement', 'h1': 'HTMLHeadingElement', 'h2': 'HTMLHeadingElement', 'h3': 'HTMLHeadingElement', 'h4': 'HTMLHeadingElement', 'h5': 'HTMLHeadingElement', 'h6': 'HTMLHeadingElement', 'head': 'HTMLHeadElement', 'header': 'HTMLElement', 'hgroup': 'HTMLElement', 'hr': 'HTMLHRElement', 'html': 'HTMLHtmlElement', 'i': 'HTMLElement', 'iframe': 'HTMLIFrameElement', 'img': 'HTMLImageElement', 'input': 'HTMLInputElement', 'ins': 'HTMLModElement', 'isindex': 'HTMLUnknownElement', 'kbd': 'HTMLElement', 'keygen': 'HTMLKeygenElement', 'label': 'HTMLLabelElement', 'legend': 'HTMLLegendElement', 'li': 'HTMLLIElement', 'link': 'HTMLLinkElement', 'map': 'HTMLMapElement', 'mark': 'HTMLElement', 'menu': 'HTMLMenuElement', 'meta': 'HTMLMetaElement', 'meter': 'HTMLMeterElement', 'nav': 'HTMLElement', 'nobr': 'HTMLElement', 'noembed': 'HTMLElement', 'noframes': 'HTMLElement', 'noscript': 'HTMLElement', 'object': 'HTMLObjectElement', 'ol': 'HTMLOListElement', 'optgroup': 'HTMLOptGroupElement', 'option': 'HTMLOptionElement', 'output': 'HTMLOutputElement', 'p': 'HTMLParagraphElement', 'param': 'HTMLParamElement', 'pre': 'HTMLPreElement', 'progress': 'HTMLProgressElement', 'q': 'HTMLQuoteElement', 's': 'HTMLElement', 'samp': 'HTMLElement', 'script': 'HTMLScriptElement', 'section': 'HTMLElement', 'select': 'HTMLSelectElement', 'small': 'HTMLElement', 'source': 'HTMLSourceElement', 'span': 'HTMLSpanElement', 'strike': 'HTMLElement', 'strong': 'HTMLElement', 'style': 'HTMLStyleElement', 'sub': 'HTMLElement', 'summary': 'HTMLElement', 'sup': 'HTMLElement', 'table': 'HTMLTableElement', 'tbody': 'HTMLTableSectionElement', 'td': 'HTMLTableDataCellElement', 'textarea': 'HTMLTextAreaElement', 'tfoot': 'HTMLTableSectionElement', 'th': 'HTMLTableHeaderCellElement', 'thead': 'HTMLTableSectionElement', 'time': 'HTMLTimeElement', 'title': 'HTMLTitleElement', 'tr': 'HTMLTableRowElement', 'track': 'HTMLTrackElement', 'tt': 'HTMLElement', 'u': 'HTMLElement', 'ul': 'HTMLUListElement', 'var': 'HTMLElement', 'video': 'HTMLVideoElement', 'wbr': 'HTMLElement' }; html4[ 'ELEMENT_DOM_INTERFACES' ] = html4.ELEMENT_DOM_INTERFACES; html4.ueffects = { 'NOT_LOADED': 0, 'SAME_DOCUMENT': 1, 'NEW_DOCUMENT': 2 }; html4[ 'ueffects' ] = html4.ueffects; html4.URIEFFECTS = { 'a::href': 2, 'area::href': 2, 'blockquote::cite': 0, 'command::icon': 1, 'del::cite': 0, 'form::action': 2, 'img::src': 1, 'input::src': 1, 'ins::cite': 0, 'q::cite': 0, 'video::poster': 1 }; html4[ 'URIEFFECTS' ] = html4.URIEFFECTS; html4.ltypes = { 'UNSANDBOXED': 2, 'SANDBOXED': 1, 'DATA': 0 }; html4[ 'ltypes' ] = html4.ltypes; html4.LOADERTYPES = { 'a::href': 2, 'area::href': 2, 'blockquote::cite': 2, 'command::icon': 1, 'del::cite': 2, 'form::action': 2, 'img::src': 1, 'input::src': 1, 'ins::cite': 2, 'q::cite': 2, 'video::poster': 1 }; html4[ 'LOADERTYPES' ] = html4.LOADERTYPES; // Copyright (C) 2006 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview * An HTML sanitizer that can satisfy a variety of security policies. * * <p> * The HTML sanitizer is built around a SAX parser and HTML element and * attributes schemas. * * If the cssparser is loaded, inline styles are sanitized using the * css property and value schemas. Else they are remove during * sanitization. * * If it exists, uses parseCssDeclarations, sanitizeCssProperty, cssSchema * * @author [email protected] * @author [email protected] * \@requires html4, URI * \@overrides window * \@provides html, html_sanitize */ // The Turkish i seems to be a non-issue, but abort in case it is. if ('I'.toLowerCase() !== 'i') { throw 'I/i problem'; } /** * \@namespace */ var html = (function(html4) { // For closure compiler var parseCssDeclarations, sanitizeCssProperty, cssSchema; if ('undefined' !== typeof window) { parseCssDeclarations = window['parseCssDeclarations']; sanitizeCssProperty = window['sanitizeCssProperty']; cssSchema = window['cssSchema']; } // The keys of this object must be 'quoted' or JSCompiler will mangle them! // This is a partial list -- lookupEntity() uses the host browser's parser // (when available) to implement full entity lookup. // Note that entities are in general case-sensitive; the uppercase ones are // explicitly defined by HTML5 (presumably as compatibility). var ENTITIES = { 'lt': '<', 'LT': '<', 'gt': '>', 'GT': '>', 'amp': '&', 'AMP': '&', 'quot': '"', 'apos': '\'', 'nbsp': '\240' }; // Patterns for types of entity/character reference names. var decimalEscapeRe = /^#(\d+)$/; var hexEscapeRe = /^#x([0-9A-Fa-f]+)$/; // contains every entity per http://www.w3.org/TR/2011/WD-html5-20110113/named-character-references.html var safeEntityNameRe = /^[A-Za-z][A-za-z0-9]+$/; // Used as a hook to invoke the browser's entity parsing. <textarea> is used // because its content is parsed for entities but not tags. // TODO(kpreid): This retrieval is a kludge and leads to silent loss of // functionality if the document isn't available. var entityLookupElement = ('undefined' !== typeof window && window['document']) ? window['document'].createElement('textarea') : null; /** * Decodes an HTML entity. * * {\@updoc * $ lookupEntity('lt') * # '<' * $ lookupEntity('GT') * # '>' * $ lookupEntity('amp') * # '&' * $ lookupEntity('nbsp') * # '\xA0' * $ lookupEntity('apos') * # "'" * $ lookupEntity('quot') * # '"' * $ lookupEntity('#xa') * # '\n' * $ lookupEntity('#10') * # '\n' * $ lookupEntity('#x0a') * # '\n' * $ lookupEntity('#010') * # '\n' * $ lookupEntity('#x00A') * # '\n' * $ lookupEntity('Pi') // Known failure * # '\u03A0' * $ lookupEntity('pi') // Known failure * # '\u03C0' * } * * @param {string} name the content between the '&' and the ';'. * @return {string} a single unicode code-point as a string. */ function lookupEntity(name) { // TODO: entity lookup as specified by HTML5 actually depends on the // presence of the ";". if (ENTITIES.hasOwnProperty(name)) { return ENTITIES[name]; } var m = name.match(decimalEscapeRe); if (m) { return String.fromCharCode(parseInt(m[1], 10)); } else if (!!(m = name.match(hexEscapeRe))) { return String.fromCharCode(parseInt(m[1], 16)); } else if (entityLookupElement && safeEntityNameRe.test(name)) { entityLookupElement.innerHTML = '&' + name + ';'; var text = entityLookupElement.textContent; ENTITIES[name] = text; return text; } else { return '&' + name + ';'; } } function decodeOneEntity(_, name) { return lookupEntity(name); } var nulRe = /\0/g; function stripNULs(s) { return s.replace(nulRe, ''); } var ENTITY_RE_1 = /&(#[0-9]+|#[xX][0-9A-Fa-f]+|\w+);/g; var ENTITY_RE_2 = /^(#[0-9]+|#[xX][0-9A-Fa-f]+|\w+);/; /** * The plain text of a chunk of HTML CDATA which possibly containing. * * {\@updoc * $ unescapeEntities('') * # '' * $ unescapeEntities('hello World!') * # 'hello World!' * $ unescapeEntities('1 &lt; 2 &amp;&AMP; 4 &gt; 3&#10;') * # '1 < 2 && 4 > 3\n' * $ unescapeEntities('&lt;&lt <- unfinished entity&gt;') * # '<&lt <- unfinished entity>' * $ unescapeEntities('/foo?bar=baz&copy=true') // & often unescaped in URLS * # '/foo?bar=baz&copy=true' * $ unescapeEntities('pi=&pi;&#x3c0;, Pi=&Pi;\u03A0') // FIXME: known failure * # 'pi=\u03C0\u03c0, Pi=\u03A0\u03A0' * } * * @param {string} s a chunk of HTML CDATA. It must not start or end inside * an HTML entity. */ function unescapeEntities(s) { return s.replace(ENTITY_RE_1, decodeOneEntity); } var ampRe = /&/g; var looseAmpRe = /&([^a-z#]|#(?:[^0-9x]|x(?:[^0-9a-f]|$)|$)|$)/gi; var ltRe = /[<]/g; var gtRe = />/g; var quotRe = /\"/g; /** * Escapes HTML special characters in attribute values. * * {\@updoc * $ escapeAttrib('') * # '' * $ escapeAttrib('"<<&==&>>"') // Do not just escape the first occurrence. * # '&#34;&lt;&lt;&amp;&#61;&#61;&amp;&gt;&gt;&#34;' * $ escapeAttrib('Hello <World>!') * # 'Hello &lt;World&gt;!' * } */ function escapeAttrib(s) { return ('' + s).replace(ampRe, '&amp;').replace(ltRe, '&lt;') .replace(gtRe, '&gt;').replace(quotRe, '&#34;'); } /** * Escape entities in RCDATA that can be escaped without changing the meaning. * {\@updoc * $ normalizeRCData('1 < 2 &&amp; 3 > 4 &amp;& 5 &lt; 7&8') * # '1 &lt; 2 &amp;&amp; 3 &gt; 4 &amp;&amp; 5 &lt; 7&amp;8' * } */ function normalizeRCData(rcdata) { return rcdata .replace(looseAmpRe, '&amp;$1') .replace(ltRe, '&lt;') .replace(gtRe, '&gt;'); } // TODO(felix8a): validate sanitizer regexs against the HTML5 grammar at // http://www.whatwg.org/specs/web-apps/current-work/multipage/syntax.html // http://www.whatwg.org/specs/web-apps/current-work/multipage/parsing.html // http://www.whatwg.org/specs/web-apps/current-work/multipage/tokenization.html // http://www.whatwg.org/specs/web-apps/current-work/multipage/tree-construction.html // We initially split input so that potentially meaningful characters // like '<' and '>' are separate tokens, using a fast dumb process that // ignores quoting. Then we walk that token stream, and when we see a // '<' that's the start of a tag, we use ATTR_RE to extract tag // attributes from the next token. That token will never have a '>' // character. However, it might have an unbalanced quote character, and // when we see that, we combine additional tokens to balance the quote. var ATTR_RE = new RegExp( '^\\s*' + '([-.:\\w]+)' + // 1 = Attribute name '(?:' + ( '\\s*(=)\\s*' + // 2 = Is there a value? '(' + ( // 3 = Attribute value // TODO(felix8a): maybe use backref to match quotes '(\")[^\"]*(\"|$)' + // 4, 5 = Double-quoted string '|' + '(\')[^\']*(\'|$)' + // 6, 7 = Single-quoted string '|' + // Positive lookahead to prevent interpretation of // <foo a= b=c> as <foo a='b=c'> // TODO(felix8a): might be able to drop this case '(?=[a-z][-\\w]*\\s*=)' + '|' + // Unquoted value that isn't an attribute name // (since we didn't match the positive lookahead above) '[^\"\'\\s]*' ) + ')' ) + ')?', 'i'); // false on IE<=8, true on most other browsers var splitWillCapture = ('a,b'.split(/(,)/).length === 3); // bitmask for tags with special parsing, like <script> and <textarea> var EFLAGS_TEXT = html4.eflags['CDATA'] | html4.eflags['RCDATA']; /** * Given a SAX-like event handler, produce a function that feeds those * events and a parameter to the event handler. * * The event handler has the form:{@code * { * // Name is an upper-case HTML tag name. Attribs is an array of * // alternating upper-case attribute names, and attribute values. The * // attribs array is reused by the parser. Param is the value passed to * // the saxParser. * startTag: function (name, attribs, param) { ... }, * endTag: function (name, param) { ... }, * pcdata: function (text, param) { ... }, * rcdata: function (text, param) { ... }, * cdata: function (text, param) { ... }, * startDoc: function (param) { ... }, * endDoc: function (param) { ... } * }} * * @param {Object} handler a record containing event handlers. * @return {function(string, Object)} A function that takes a chunk of HTML * and a parameter. The parameter is passed on to the handler methods. */ function makeSaxParser(handler) { // Accept quoted or unquoted keys (Closure compat) var hcopy = { cdata: handler.cdata || handler['cdata'], comment: handler.comment || handler['comment'], endDoc: handler.endDoc || handler['endDoc'], endTag: handler.endTag || handler['endTag'], pcdata: handler.pcdata || handler['pcdata'], rcdata: handler.rcdata || handler['rcdata'], startDoc: handler.startDoc || handler['startDoc'], startTag: handler.startTag || handler['startTag'] }; return function(htmlText, param) { return parse(htmlText, hcopy, param); }; } // Parsing strategy is to split input into parts that might be lexically // meaningful (every ">" becomes a separate part), and then recombine // parts if we discover they're in a different context. // TODO(felix8a): Significant performance regressions from -legacy, // tested on // Chrome 18.0 // Firefox 11.0 // IE 6, 7, 8, 9 // Opera 11.61 // Safari 5.1.3 // Many of these are unusual patterns that are linearly slower and still // pretty fast (eg 1ms to 5ms), so not necessarily worth fixing. // TODO(felix8a): "<script> && && && ... <\/script>" is slower on all // browsers. The hotspot is htmlSplit. // TODO(felix8a): "<p title='>>>>...'><\/p>" is slower on all browsers. // This is partly htmlSplit, but the hotspot is parseTagAndAttrs. // TODO(felix8a): "<a><\/a><a><\/a>..." is slower on IE9. // "<a>1<\/a><a>1<\/a>..." is faster, "<a><\/a>2<a><\/a>2..." is faster. // TODO(felix8a): "<p<p<p..." is slower on IE[6-8] var continuationMarker = {}; function parse(htmlText, handler, param) { var m, p, tagName; var parts = htmlSplit(htmlText); var state = { noMoreGT: false, noMoreEndComments: false }; parseCPS(handler, parts, 0, state, param); } function continuationMaker(h, parts, initial, state, param) { return function () { parseCPS(h, parts, initial, state, param); }; } function parseCPS(h, parts, initial, state, param) { try { if (h.startDoc && initial == 0) { h.startDoc(param); } var m, p, tagName; for (var pos = initial, end = parts.length; pos < end;) { var current = parts[pos++]; var next = parts[pos]; switch (current) { case '&': if (ENTITY_RE_2.test(next)) { if (h.pcdata) { h.pcdata('&' + next, param, continuationMarker, continuationMaker(h, parts, pos, state, param)); } pos++; } else { if (h.pcdata) { h.pcdata("&amp;", param, continuationMarker, continuationMaker(h, parts, pos, state, param)); } } break; case '<\/': if (m = /^([-\w:]+)[^\'\"]*/.exec(next)) { if (m[0].length === next.length && parts[pos + 1] === '>') { // fast case, no attribute parsing needed pos += 2; tagName = m[1].toLowerCase(); if (h.endTag) { h.endTag(tagName, param, continuationMarker, continuationMaker(h, parts, pos, state, param)); } } else { // slow case, need to parse attributes // TODO(felix8a): do we really care about misparsing this? pos = parseEndTag( parts, pos, h, param, continuationMarker, state); } } else { if (h.pcdata) { h.pcdata('&lt;/', param, continuationMarker, continuationMaker(h, parts, pos, state, param)); } } break; case '<': if (m = /^([-\w:]+)\s*\/?/.exec(next)) { if (m[0].length === next.length && parts[pos + 1] === '>') { // fast case, no attribute parsing needed pos += 2; tagName = m[1].toLowerCase(); if (h.startTag) { h.startTag(tagName, [], param, continuationMarker, continuationMaker(h, parts, pos, state, param)); } // tags like <script> and <textarea> have special parsing var eflags = html4.ELEMENTS[tagName]; if (eflags & EFLAGS_TEXT) { var tag = { name: tagName, next: pos, eflags: eflags }; pos = parseText( parts, tag, h, param, continuationMarker, state); } } else { // slow case, need to parse attributes pos = parseStartTag( parts, pos, h, param, continuationMarker, state); } } else { if (h.pcdata) { h.pcdata('&lt;', param, continuationMarker, continuationMaker(h, parts, pos, state, param)); } } break; case '<\!--': // The pathological case is n copies of '<\!--' without '-->', and // repeated failure to find '-->' is quadratic. We avoid that by // remembering when search for '-->' fails. if (!state.noMoreEndComments) { // A comment <\!--x--> is split into three tokens: // '<\!--', 'x--', '>' // We want to find the next '>' token that has a preceding '--'. // pos is at the 'x--'. for (p = pos + 1; p < end; p++) { if (parts[p] === '>' && /--$/.test(parts[p - 1])) { break; } } if (p < end) { if (h.comment) { var comment = parts.slice(pos, p).join(''); h.comment( comment.substr(0, comment.length - 2), param, continuationMarker, continuationMaker(h, parts, p + 1, state, param)); } pos = p + 1; } else { state.noMoreEndComments = true; } } if (state.noMoreEndComments) { if (h.pcdata) { h.pcdata('&lt;!--', param, continuationMarker, continuationMaker(h, parts, pos, state, param)); } } break; case '<\!': if (!/^\w/.test(next)) { if (h.pcdata) { h.pcdata('&lt;!', param, continuationMarker, continuationMaker(h, parts, pos, state, param)); } } else { // similar to noMoreEndComment logic if (!state.noMoreGT) { for (p = pos + 1; p < end; p++) { if (parts[p] === '>') { break; } } if (p < end) { pos = p + 1; } else { state.noMoreGT = true; } } if (state.noMoreGT) { if (h.pcdata) { h.pcdata('&lt;!', param, continuationMarker, continuationMaker(h, parts, pos, state, param)); } } } break; case '<?': // similar to noMoreEndComment logic if (!state.noMoreGT) { for (p = pos + 1; p < end; p++) { if (parts[p] === '>') { break; } } if (p < end) { pos = p + 1; } else { state.noMoreGT = true; } } if (state.noMoreGT) { if (h.pcdata) { h.pcdata('&lt;?', param, continuationMarker, continuationMaker(h, parts, pos, state, param)); } } break; case '>': if (h.pcdata) { h.pcdata("&gt;", param, continuationMarker, continuationMaker(h, parts, pos, state, param)); } break; case '': break; default: if (h.pcdata) { h.pcdata(current, param, continuationMarker, continuationMaker(h, parts, pos, state, param)); } break; } } if (h.endDoc) { h.endDoc(param); } } catch (e) { if (e !== continuationMarker) { throw e; } } } // Split str into parts for the html parser. function htmlSplit(str) { // can't hoist this out of the function because of the re.exec loop. var re = /(<\/|<\!--|<[!?]|[&<>])/g; str += ''; if (splitWillCapture) { return str.split(re); } else { var parts = []; var lastPos = 0; var m; while ((m = re.exec(str)) !== null) { parts.push(str.substring(lastPos, m.index)); parts.push(m[0]); lastPos = m.index + m[0].length; } parts.push(str.substring(lastPos)); return parts; } } function parseEndTag(parts, pos, h, param, continuationMarker, state) { var tag = parseTagAndAttrs(parts, pos); // drop unclosed tags if (!tag) { return parts.length; } if (h.endTag) { h.endTag(tag.name, param, continuationMarker, continuationMaker(h, parts, pos, state, param)); } return tag.next; } function parseStartTag(parts, pos, h, param, continuationMarker, state) { var tag = parseTagAndAttrs(parts, pos); // drop unclosed tags if (!tag) { return parts.length; } if (h.startTag) { h.startTag(tag.name, tag.attrs, param, continuationMarker, continuationMaker(h, parts, tag.next, state, param)); } // tags like <script> and <textarea> have special parsing if (tag.eflags & EFLAGS_TEXT) { return parseText(parts, tag, h, param, continuationMarker, state); } else { return tag.next; } } var endTagRe = {}; // Tags like <script> and <textarea> are flagged as CDATA or RCDATA, // which means everything is text until we see the correct closing tag. function parseText(parts, tag, h, param, continuationMarker, state) { var end = parts.length; if (!endTagRe.hasOwnProperty(tag.name)) { endTagRe[tag.name] = new RegExp('^' + tag.name + '(?:[\\s\\/]|$)', 'i'); } var re = endTagRe[tag.name]; var first = tag.next; var p = tag.next + 1; for (; p < end; p++) { if (parts[p - 1] === '<\/' && re.test(parts[p])) { break; } } if (p < end) { p -= 1; } var buf = parts.slice(first, p).join(''); if (tag.eflags & html4.eflags['CDATA']) { if (h.cdata) { h.cdata(buf, param, continuationMarker, continuationMaker(h, parts, p, state, param)); } } else if (tag.eflags & html4.eflags['RCDATA']) { if (h.rcdata) { h.rcdata(normalizeRCData(buf), param, continuationMarker, continuationMaker(h, parts, p, state, param)); } } else { throw new Error('bug'); } return p; } // at this point, parts[pos-1] is either "<" or "<\/". function parseTagAndAttrs(parts, pos) { var m = /^([-\w:]+)/.exec(parts[pos]); var tag = {}; tag.name = m[1].toLowerCase(); tag.eflags = html4.ELEMENTS[tag.name]; var buf = parts[pos].substr(m[0].length); // Find the next '>'. We optimistically assume this '>' is not in a // quoted context, and further down we fix things up if it turns out to // be quoted. var p = pos + 1; var end = parts.length; for (; p < end; p++) { if (parts[p] === '>') { break; } buf += parts[p]; } if (end <= p) { return void 0; } var attrs = []; while (buf !== '') { m = ATTR_RE.exec(buf); if (!m) { // No attribute found: skip garbage buf = buf.replace(/^[\s\S][^a-z\s]*/, ''); } else if ((m[4] && !m[5]) || (m[6] && !m[7])) { // Unterminated quote: slurp to the next unquoted '>' var quote = m[4] || m[6]; var sawQuote = false; var abuf = [buf, parts[p++]]; for (; p < end; p++) { if (sawQuote) { if (parts[p] === '>') { break; } } else if (0 <= parts[p].indexOf(quote)) { sawQuote = true; } abuf.push(parts[p]); } // Slurp failed: lose the garbage if (end <= p) { break; } // Otherwise retry attribute parsing buf = abuf.join(''); continue; } else { // We have an attribute var aName = m[1].toLowerCase(); var aValue = m[2] ? decodeValue(m[3]) : ''; attrs.push(aName, aValue); buf = buf.substr(m[0].length); } } tag.attrs = attrs; tag.next = p + 1; return tag; } function decodeValue(v) { var q = v.charCodeAt(0); if (q === 0x22 || q === 0x27) { // " or ' v = v.substr(1, v.length - 2); } return unescapeEntities(stripNULs(v)); } /** * Returns a function that strips unsafe tags and attributes from html. * @param {function(string, Array.<string>): ?Array.<string>} tagPolicy * A function that takes (tagName, attribs[]), where tagName is a key in * html4.ELEMENTS and attribs is an array of alternating attribute names * and values. It should return a record (as follows), or null to delete * the element. It's okay for tagPolicy to modify the attribs array, * but the same array is reused, so it should not be held between calls. * Record keys: * attribs: (required) Sanitized attributes array. * tagName: Replacement tag name. * @return {function(string, Array)} A function that sanitizes a string of * HTML and appends result strings to the second argument, an array. */ function makeHtmlSanitizer(tagPolicy) { var stack; var ignoring; var emit = function (text, out) { if (!ignoring) { out.push(text); } }; return makeSaxParser({ 'startDoc': function(_) { stack = []; ignoring = false; }, 'startTag': function(tagNameOrig, attribs, out) { if (ignoring) { return; } if (!html4.ELEMENTS.hasOwnProperty(tagNameOrig)) { return; } var eflagsOrig = html4.ELEMENTS[tagNameOrig]; if (eflagsOrig & html4.eflags['FOLDABLE']) { return; } var decision = tagPolicy(tagNameOrig, attribs); if (!decision) { ignoring = !(eflagsOrig & html4.eflags['EMPTY']); return; } else if (typeof decision !== 'object') { throw new Error('tagPolicy did not return object (old API?)'); } if ('attribs' in decision) { attribs = decision['attribs']; } else { throw new Error('tagPolicy gave no attribs'); } var eflagsRep; var tagNameRep; if ('tagName' in decision) { tagNameRep = decision['tagName']; eflagsRep = html4.ELEMENTS[tagNameRep]; } else { tagNameRep = tagNameOrig; eflagsRep = eflagsOrig; } // TODO(mikesamuel): relying on tagPolicy not to insert unsafe // attribute names. // If this is an optional-end-tag element and either this element or its // previous like sibling was rewritten, then insert a close tag to // preserve structure. if (eflagsOrig & html4.eflags['OPTIONAL_ENDTAG']) { var onStack = stack[stack.length - 1]; if (onStack && onStack.orig === tagNameOrig && (onStack.rep !== tagNameRep || tagNameOrig !== tagNameRep)) { out.push('<\/', onStack.rep, '>'); } } if (!(eflagsOrig & html4.eflags['EMPTY'])) { stack.push({orig: tagNameOrig, rep: tagNameRep}); } out.push('<', tagNameRep); for (var i = 0, n = attribs.length; i < n; i += 2) { var attribName = attribs[i], value = attribs[i + 1]; if (value !== null && value !== void 0) { out.push(' ', attribName, '="', escapeAttrib(value), '"'); } } out.push('>'); if ((eflagsOrig & html4.eflags['EMPTY']) && !(eflagsRep & html4.eflags['EMPTY'])) { // replacement is non-empty, synthesize end tag out.push('<\/', tagNameRep, '>'); } }, 'endTag': function(tagName, out) { if (ignoring) { ignoring = false; return; } if (!html4.ELEMENTS.hasOwnProperty(tagName)) { return; } var eflags = html4.ELEMENTS[tagName]; if (!(eflags & (html4.eflags['EMPTY'] | html4.eflags['FOLDABLE']))) { var index; if (eflags & html4.eflags['OPTIONAL_ENDTAG']) { for (index = stack.length; --index >= 0;) { var stackElOrigTag = stack[index].orig; if (stackElOrigTag === tagName) { break; } if (!(html4.ELEMENTS[stackElOrigTag] & html4.eflags['OPTIONAL_ENDTAG'])) { // Don't pop non optional end tags looking for a match. return; } } } else { for (index = stack.length; --index >= 0;) { if (stack[index].orig === tagName) { break; } } } if (index < 0) { return; } // Not opened. for (var i = stack.length; --i > index;) { var stackElRepTag = stack[i].rep; if (!(html4.ELEMENTS[stackElRepTag] & html4.eflags['OPTIONAL_ENDTAG'])) { out.push('<\/', stackElRepTag, '>'); } } if (index < stack.length) { tagName = stack[index].rep; } stack.length = index; out.push('<\/', tagName, '>'); } }, 'pcdata': emit, 'rcdata': emit, 'cdata': emit, 'endDoc': function(out) { for (; stack.length; stack.length--) { out.push('<\/', stack[stack.length - 1].rep, '>'); } } }); } var ALLOWED_URI_SCHEMES = /^(?:https?|mailto|data)$/i; function safeUri(uri, effect, ltype, hints, naiveUriRewriter) { if (!naiveUriRewriter) { return null; } try { var parsed = URI.parse('' + uri); if (parsed) { if (!parsed.hasScheme() || ALLOWED_URI_SCHEMES.test(parsed.getScheme())) { var safe = naiveUriRewriter(parsed, effect, ltype, hints); return safe ? safe.toString() : null; } } } catch (e) { return null; } return null; } function log(logger, tagName, attribName, oldValue, newValue) { if (!attribName) { logger(tagName + " removed", { change: "removed", tagName: tagName }); } if (oldValue !== newValue) { var changed = "changed"; if (oldValue && !newValue) { changed = "removed"; } else if (!oldValue && newValue) { changed = "added"; } logger(tagName + "." + attribName + " " + changed, { change: changed, tagName: tagName, attribName: attribName, oldValue: oldValue, newValue: newValue }); } } function lookupAttribute(map, tagName, attribName) { var attribKey; attribKey = tagName + '::' + attribName; if (map.hasOwnProperty(attribKey)) { return map[attribKey]; } attribKey = '*::' + attribName; if (map.hasOwnProperty(attribKey)) { return map[attribKey]; } return void 0; } function getAttributeType(tagName, attribName) { return lookupAttribute(html4.ATTRIBS, tagName, attribName); } function getLoaderType(tagName, attribName) { return lookupAttribute(html4.LOADERTYPES, tagName, attribName); } function getUriEffect(tagName, attribName) { return lookupAttribute(html4.URIEFFECTS, tagName, attribName); } /** * Sanitizes attributes on an HTML tag. * @param {string} tagName An HTML tag name in lowercase. * @param {Array.<?string>} attribs An array of alternating names and values. * @param {?function(?string): ?string} opt_naiveUriRewriter A transform to * apply to URI attributes; it can return a new string value, or null to * delete the attribute. If unspecified, URI attributes are deleted. * @param {function(?string): ?string} opt_nmTokenPolicy A transform to apply * to attributes containing HTML names, element IDs, and space-separated * lists of classes; it can return a new string value, or null to delete * the attribute. If unspecified, these attributes are kept unchanged. * @return {Array.<?string>} The sanitized attributes as a list of alternating * names and values, where a null value means to omit the attribute. */ function sanitizeAttribs(tagName, attribs, opt_naiveUriRewriter, opt_nmTokenPolicy, opt_logger) { // TODO(felix8a): it's obnoxious that domado duplicates much of this // TODO(felix8a): maybe consistently enforce constraints like target= for (var i = 0; i < attribs.length; i += 2) { var attribName = attribs[i]; var value = attribs[i + 1]; var oldValue = value; var atype = null, attribKey; if ((attribKey = tagName + '::' + attribName, html4.ATTRIBS.hasOwnProperty(attribKey)) || (attribKey = '*::' + attribName, html4.ATTRIBS.hasOwnProperty(attribKey))) { atype = html4.ATTRIBS[attribKey]; } if (atype !== null) { switch (atype) { case html4.atype['NONE']: break; case html4.atype['SCRIPT']: value = null; if (opt_logger) { log(opt_logger, tagName, attribName, oldValue, value); } break; case html4.atype['STYLE']: if ('undefined' === typeof parseCssDeclarations) { value = null; if (opt_logger) { log(opt_logger, tagName, attribName, oldValue, value); } break; } var sanitizedDeclarations = []; parseCssDeclarations( value, { declaration: function (property, tokens) { var normProp = property.toLowerCase(); var schema = cssSchema[normProp]; if (!schema) { return; } sanitizeCssProperty( normProp, schema, tokens, opt_naiveUriRewriter ? function (url) { return safeUri( url, html4.ueffects.SAME_DOCUMENT, html4.ltypes.SANDBOXED, { "TYPE": "CSS", "CSS_PROP": normProp }, opt_naiveUriRewriter); } : null); sanitizedDeclarations.push(property + ': ' + tokens.join(' ')); } }); value = sanitizedDeclarations.length > 0 ? sanitizedDeclarations.join(' ; ') : null; if (opt_logger) { log(opt_logger, tagName, attribName, oldValue, value); } break; case html4.atype['ID']: case html4.atype['IDREF']: case html4.atype['IDREFS']: case html4.atype['GLOBAL_NAME']: case html4.atype['LOCAL_NAME']: case html4.atype['CLASSES']: value = opt_nmTokenPolicy ? opt_nmTokenPolicy(value) : value; if (opt_logger) { log(opt_logger, tagName, attribName, oldValue, value); } break; case html4.atype['URI']: value = safeUri(value, getUriEffect(tagName, attribName), getLoaderType(tagName, attribName), { "TYPE": "MARKUP", "XML_ATTR": attribName, "XML_TAG": tagName }, opt_naiveUriRewriter); if (opt_logger) { log(opt_logger, tagName, attribName, oldValue, value); } break; case html4.atype['URI_FRAGMENT']: if (value && '#' === value.charAt(0)) { value = value.substring(1); // remove the leading '#' value = opt_nmTokenPolicy ? opt_nmTokenPolicy(value) : value; if (value !== null && value !== void 0) { value = '#' + value; // restore the leading '#' } } else { value = null; } if (opt_logger) { log(opt_logger, tagName, attribName, oldValue, value); } break; default: value = null; if (opt_logger) { log(opt_logger, tagName, attribName, oldValue, value); } break; } } else { value = null; if (opt_logger) { log(opt_logger, tagName, attribName, oldValue, value); } } attribs[i + 1] = value; } return attribs; } /** * Creates a tag policy that omits all tags marked UNSAFE in html4-defs.js * and applies the default attribute sanitizer with the supplied policy for * URI attributes and NMTOKEN attributes. * @param {?function(?string): ?string} opt_naiveUriRewriter A transform to * apply to URI attributes. If not given, URI attributes are deleted. * @param {function(?string): ?string} opt_nmTokenPolicy A transform to apply * to attributes containing HTML names, element IDs, and space-separated * lists of classes. If not given, such attributes are left unchanged. * @return {function(string, Array.<?string>)} A tagPolicy suitable for * passing to html.sanitize. */ function makeTagPolicy( opt_naiveUriRewriter, opt_nmTokenPolicy, opt_logger) { return function(tagName, attribs) { if (!(html4.ELEMENTS[tagName] & html4.eflags['UNSAFE'])) { return { 'attribs': sanitizeAttribs(tagName, attribs, opt_naiveUriRewriter, opt_nmTokenPolicy, opt_logger) }; } else { if (opt_logger) { log(opt_logger, tagName, undefined, undefined, undefined); } } }; } /** * Sanitizes HTML tags and attributes according to a given policy. * @param {string} inputHtml The HTML to sanitize. * @param {function(string, Array.<?string>)} tagPolicy A function that * decides which tags to accept and sanitizes their attributes (see * makeHtmlSanitizer above for details). * @return {string} The sanitized HTML. */ function sanitizeWithPolicy(inputHtml, tagPolicy) { var outputArray = []; makeHtmlSanitizer(tagPolicy)(inputHtml, outputArray); return outputArray.join(''); } /** * Strips unsafe tags and attributes from HTML. * @param {string} inputHtml The HTML to sanitize. * @param {?function(?string): ?string} opt_naiveUriRewriter A transform to * apply to URI attributes. If not given, URI attributes are deleted. * @param {function(?string): ?string} opt_nmTokenPolicy A transform to apply * to attributes containing HTML names, element IDs, and space-separated * lists of classes. If not given, such attributes are left unchanged. */ function sanitize(inputHtml, opt_naiveUriRewriter, opt_nmTokenPolicy, opt_logger) { var tagPolicy = makeTagPolicy( opt_naiveUriRewriter, opt_nmTokenPolicy, opt_logger); return sanitizeWithPolicy(inputHtml, tagPolicy); } // Export both quoted and unquoted names for Closure linkage. var html = {}; html.escapeAttrib = html['escapeAttrib'] = escapeAttrib; html.makeHtmlSanitizer = html['makeHtmlSanitizer'] = makeHtmlSanitizer; html.makeSaxParser = html['makeSaxParser'] = makeSaxParser; html.makeTagPolicy = html['makeTagPolicy'] = makeTagPolicy; html.normalizeRCData = html['normalizeRCData'] = normalizeRCData; html.sanitize = html['sanitize'] = sanitize; html.sanitizeAttribs = html['sanitizeAttribs'] = sanitizeAttribs; html.sanitizeWithPolicy = html['sanitizeWithPolicy'] = sanitizeWithPolicy; html.unescapeEntities = html['unescapeEntities'] = unescapeEntities; return html; })(html4); var html_sanitize = html['sanitize']; // Loosen restrictions of Caja's // html-sanitizer to allow for styling html4.ATTRIBS['*::style'] = 0; html4.ELEMENTS['style'] = 0; html4.ATTRIBS['a::target'] = 0; html4.ELEMENTS['video'] = 0; html4.ATTRIBS['video::src'] = 0; html4.ATTRIBS['video::poster'] = 0; html4.ATTRIBS['video::controls'] = 0; html4.ELEMENTS['audio'] = 0; html4.ATTRIBS['audio::src'] = 0; html4.ATTRIBS['video::autoplay'] = 0; html4.ATTRIBS['video::controls'] = 0; if (typeof module !== 'undefined') { module.exports = html_sanitize; } },{}],31:[function(require,module,exports){ require('./leaflet'); require('./mapbox'); },{"./leaflet":32,"./mapbox":33}],32:[function(require,module,exports){ window.L = require('leaflet/dist/leaflet-src'); },{"leaflet/dist/leaflet-src":36}],33:[function(require,module,exports){ // Hardcode image path, because Leaflet's autodetection // fails, because mapbox.js is not named leaflet.js window.L.Icon.Default.imagePath = '//api.tiles.mapbox.com/mapbox.js/' + 'v' + require('./package.json').version + '/images'; L.mapbox = module.exports = { VERSION: require('./package.json').version, geocoder: require('./src/geocoder'), marker: require('./src/marker'), tileLayer: require('./src/tile_layer'), shareControl: require('./src/share_control'), legendControl: require('./src/legend_control'), geocoderControl: require('./src/geocoder_control'), gridControl: require('./src/grid_control'), gridLayer: require('./src/grid_layer'), markerLayer: require('./src/marker_layer'), map: require('./src/map'), config: require('./src/config'), sanitize: require('./src/sanitize'), template: require('mustache').to_html }; },{"./package.json":38,"./src/config":39,"./src/geocoder":40,"./src/geocoder_control":41,"./src/grid_control":43,"./src/grid_layer":44,"./src/legend_control":45,"./src/map":47,"./src/marker":48,"./src/marker_layer":49,"./src/sanitize":51,"./src/share_control":52,"./src/tile_layer":53,"mustache":37}],34:[function(require,module,exports){ function xhr(url, callback, cors) { if (typeof window.XMLHttpRequest === 'undefined') { return callback(Error('Browser not supported')); } if (typeof cors === 'undefined') { var m = url.match(/^\s*https?:\/\/[^\/]*/); cors = m && (m[0] !== location.protocol + '//' + location.domain + (location.port ? ':' + location.port : '')); } var x; function isSuccessful(status) { return status >= 200 && status < 300 || status === 304; } if (cors && ( // IE7-9 Quirks & Compatibility typeof window.XDomainRequest === 'object' || // IE9 Standards mode typeof window.XDomainRequest === 'function' )) { // IE8-10 x = new window.XDomainRequest(); } else { x = new window.XMLHttpRequest(); } function loaded() { if ( // XDomainRequest x.status === undefined || // modern browsers isSuccessful(x.status)) callback.call(x, null, x); else callback.call(x, x, null); } // Both `onreadystatechange` and `onload` can fire. `onreadystatechange` // has [been supported for longer](http://stackoverflow.com/a/9181508/229001). if ('onload' in x) { x.onload = loaded; } else { x.onreadystatechange = function readystate() { if (x.readyState === 4) { loaded(); } }; } // Call the callback with the XMLHttpRequest object as an error and prevent // it from ever being called again by reassigning it to `noop` x.onerror = function error(evt) { callback.call(this, evt, null); callback = function() { }; }; // IE9 must have onprogress be set to a unique function. x.onprogress = function() { }; x.ontimeout = function(evt) { callback.call(this, evt, null); callback = function() { }; }; x.onabort = function(evt) { callback.call(this, evt, null); callback = function() { }; }; // GET is the only supported HTTP Verb by XDomainRequest and is the // only one supported here. x.open('GET', url, true); // Send the request. Sending data is not supported. x.send(null); return xhr; } if (typeof module !== 'undefined') module.exports = xhr; },{}],35:[function(require,module,exports){ /*! JSON v3.2.6 | http://bestiejs.github.io/json3 | Copyright 2012-2013, Kit Cambridge | http://kit.mit-license.org */ ;(function (window) { // Convenience aliases. var getClass = {}.toString, isProperty, forEach, undef; // Detect the `define` function exposed by asynchronous module loaders. The // strict `define` check is necessary for compatibility with `r.js`. var isLoader = typeof define === "function" && define.amd; // Detect native implementations. var nativeJSON = typeof JSON == "object" && JSON; // Set up the JSON 3 namespace, preferring the CommonJS `exports` object if // available. var JSON3 = typeof exports == "object" && exports && !exports.nodeType && exports; if (JSON3 && nativeJSON) { // Explicitly delegate to the native `stringify` and `parse` // implementations in CommonJS environments. JSON3.stringify = nativeJSON.stringify; JSON3.parse = nativeJSON.parse; } else { // Export for web browsers, JavaScript engines, and asynchronous module // loaders, using the global `JSON` object if available. JSON3 = window.JSON = nativeJSON || {}; } // Test the `Date#getUTC*` methods. Based on work by @Yaffle. var isExtended = new Date(-3509827334573292); try { // The `getUTCFullYear`, `Month`, and `Date` methods return nonsensical // results for certain dates in Opera >= 10.53. isExtended = isExtended.getUTCFullYear() == -109252 && isExtended.getUTCMonth() === 0 && isExtended.getUTCDate() === 1 && // Safari < 2.0.2 stores the internal millisecond time value correctly, // but clips the values returned by the date methods to the range of // signed 32-bit integers ([-2 ** 31, 2 ** 31 - 1]). isExtended.getUTCHours() == 10 && isExtended.getUTCMinutes() == 37 && isExtended.getUTCSeconds() == 6 && isExtended.getUTCMilliseconds() == 708; } catch (exception) {} // Internal: Determines whether the native `JSON.stringify` and `parse` // implementations are spec-compliant. Based on work by Ken Snyder. function has(name) { if (has[name] !== undef) { // Return cached feature test result. return has[name]; } var isSupported; if (name == "bug-string-char-index") { // IE <= 7 doesn't support accessing string characters using square // bracket notation. IE 8 only supports this for primitives. isSupported = "a"[0] != "a"; } else if (name == "json") { // Indicates whether both `JSON.stringify` and `JSON.parse` are // supported. isSupported = has("json-stringify") && has("json-parse"); } else { var value, serialized = '{"a":[1,true,false,null,"\\u0000\\b\\n\\f\\r\\t"]}'; // Test `JSON.stringify`. if (name == "json-stringify") { var stringify = JSON3.stringify, stringifySupported = typeof stringify == "function" && isExtended; if (stringifySupported) { // A test function object with a custom `toJSON` method. (value = function () { return 1; }).toJSON = value; try { stringifySupported = // Firefox 3.1b1 and b2 serialize string, number, and boolean // primitives as object literals. stringify(0) === "0" && // FF 3.1b1, b2, and JSON 2 serialize wrapped primitives as object // literals. stringify(new Number()) === "0" && stringify(new String()) == '""' && // FF 3.1b1, 2 throw an error if the value is `null`, `undefined`, or // does not define a canonical JSON representation (this applies to // objects with `toJSON` properties as well, *unless* they are nested // within an object or array). stringify(getClass) === undef && // IE 8 serializes `undefined` as `"undefined"`. Safari <= 5.1.7 and // FF 3.1b3 pass this test. stringify(undef) === undef && // Safari <= 5.1.7 and FF 3.1b3 throw `Error`s and `TypeError`s, // respectively, if the value is omitted entirely. stringify() === undef && // FF 3.1b1, 2 throw an error if the given value is not a number, // string, array, object, Boolean, or `null` literal. This applies to // objects with custom `toJSON` methods as well, unless they are nested // inside object or array literals. YUI 3.0.0b1 ignores custom `toJSON` // methods entirely. stringify(value) === "1" && stringify([value]) == "[1]" && // Prototype <= 1.6.1 serializes `[undefined]` as `"[]"` instead of // `"[null]"`. stringify([undef]) == "[null]" && // YUI 3.0.0b1 fails to serialize `null` literals. stringify(null) == "null" && // FF 3.1b1, 2 halts serialization if an array contains a function: // `[1, true, getClass, 1]` serializes as "[1,true,],". FF 3.1b3 // elides non-JSON values from objects and arrays, unless they // define custom `toJSON` methods. stringify([undef, getClass, null]) == "[null,null,null]" && // Simple serialization test. FF 3.1b1 uses Unicode escape sequences // where character escape codes are expected (e.g., `\b` => `\u0008`). stringify({ "a": [value, true, false, null, "\x00\b\n\f\r\t"] }) == serialized && // FF 3.1b1 and b2 ignore the `filter` and `width` arguments. stringify(null, value) === "1" && stringify([1, 2], null, 1) == "[\n 1,\n 2\n]" && // JSON 2, Prototype <= 1.7, and older WebKit builds incorrectly // serialize extended years. stringify(new Date(-8.64e15)) == '"-271821-04-20T00:00:00.000Z"' && // The milliseconds are optional in ES 5, but required in 5.1. stringify(new Date(8.64e15)) == '"+275760-09-13T00:00:00.000Z"' && // Firefox <= 11.0 incorrectly serializes years prior to 0 as negative // four-digit years instead of six-digit years. Credits: @Yaffle. stringify(new Date(-621987552e5)) == '"-000001-01-01T00:00:00.000Z"' && // Safari <= 5.1.5 and Opera >= 10.53 incorrectly serialize millisecond // values less than 1000. Credits: @Yaffle. stringify(new Date(-1)) == '"1969-12-31T23:59:59.999Z"'; } catch (exception) { stringifySupported = false; } } isSupported = stringifySupported; } // Test `JSON.parse`. if (name == "json-parse") { var parse = JSON3.parse; if (typeof parse == "function") { try { // FF 3.1b1, b2 will throw an exception if a bare literal is provided. // Conforming implementations should also coerce the initial argument to // a string prior to parsing. if (parse("0") === 0 && !parse(false)) { // Simple parsing test. value = parse(serialized); var parseSupported = value["a"].length == 5 && value["a"][0] === 1; if (parseSupported) { try { // Safari <= 5.1.2 and FF 3.1b1 allow unescaped tabs in strings. parseSupported = !parse('"\t"'); } catch (exception) {} if (parseSupported) { try { // FF 4.0 and 4.0.1 allow leading `+` signs and leading // decimal points. FF 4.0, 4.0.1, and IE 9-10 also allow // certain octal literals. parseSupported = parse("01") !== 1; } catch (exception) {} } if (parseSupported) { try { // FF 4.0, 4.0.1, and Rhino 1.7R3-R4 allow trailing decimal // points. These environments, along with FF 3.1b1 and 2, // also allow trailing commas in JSON objects and arrays. parseSupported = parse("1.") !== 1; } catch (exception) {} } } } } catch (exception) { parseSupported = false; } } isSupported = parseSupported; } } return has[name] = !!isSupported; } if (!has("json")) { // Common `[[Class]]` name aliases. var functionClass = "[object Function]"; var dateClass = "[object Date]"; var numberClass = "[object Number]"; var stringClass = "[object String]"; var arrayClass = "[object Array]"; var booleanClass = "[object Boolean]"; // Detect incomplete support for accessing string characters by index. var charIndexBuggy = has("bug-string-char-index"); // Define additional utility methods if the `Date` methods are buggy. if (!isExtended) { var floor = Math.floor; // A mapping between the months of the year and the number of days between // January 1st and the first of the respective month. var Months = [0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334]; // Internal: Calculates the number of days between the Unix epoch and the // first day of the given month. var getDay = function (year, month) { return Months[month] + 365 * (year - 1970) + floor((year - 1969 + (month = +(month > 1))) / 4) - floor((year - 1901 + month) / 100) + floor((year - 1601 + month) / 400); }; } // Internal: Determines if a property is a direct property of the given // object. Delegates to the native `Object#hasOwnProperty` method. if (!(isProperty = {}.hasOwnProperty)) { isProperty = function (property) { var members = {}, constructor; if ((members.__proto__ = null, members.__proto__ = { // The *proto* property cannot be set multiple times in recent // versions of Firefox and SeaMonkey. "toString": 1 }, members).toString != getClass) { // Safari <= 2.0.3 doesn't implement `Object#hasOwnProperty`, but // supports the mutable *proto* property. isProperty = function (property) { // Capture and break the object's prototype chain (see section 8.6.2 // of the ES 5.1 spec). The parenthesized expression prevents an // unsafe transformation by the Closure Compiler. var original = this.__proto__, result = property in (this.__proto__ = null, this); // Restore the original prototype chain. this.__proto__ = original; return result; }; } else { // Capture a reference to the top-level `Object` constructor. constructor = members.constructor; // Use the `constructor` property to simulate `Object#hasOwnProperty` in // other environments. isProperty = function (property) { var parent = (this.constructor || constructor).prototype; return property in this && !(property in parent && this[property] === parent[property]); }; } members = null; return isProperty.call(this, property); }; } // Internal: A set of primitive types used by `isHostType`. var PrimitiveTypes = { 'boolean': 1, 'number': 1, 'string': 1, 'undefined': 1 }; // Internal: Determines if the given object `property` value is a // non-primitive. var isHostType = function (object, property) { var type = typeof object[property]; return type == 'object' ? !!object[property] : !PrimitiveTypes[type]; }; // Internal: Normalizes the `for...in` iteration algorithm across // environments. Each enumerated key is yielded to a `callback` function. forEach = function (object, callback) { var size = 0, Properties, members, property; // Tests for bugs in the current environment's `for...in` algorithm. The // `valueOf` property inherits the non-enumerable flag from // `Object.prototype` in older versions of IE, Netscape, and Mozilla. (Properties = function () { this.valueOf = 0; }).prototype.valueOf = 0; // Iterate over a new instance of the `Properties` class. members = new Properties(); for (property in members) { // Ignore all properties inherited from `Object.prototype`. if (isProperty.call(members, property)) { size++; } } Properties = members = null; // Normalize the iteration algorithm. if (!size) { // A list of non-enumerable properties inherited from `Object.prototype`. members = ["valueOf", "toString", "toLocaleString", "propertyIsEnumerable", "isPrototypeOf", "hasOwnProperty", "constructor"]; // IE <= 8, Mozilla 1.0, and Netscape 6.2 ignore shadowed non-enumerable // properties. forEach = function (object, callback) { var isFunction = getClass.call(object) == functionClass, property, length; var hasProperty = !isFunction && typeof object.constructor != 'function' && isHostType(object, 'hasOwnProperty') ? object.hasOwnProperty : isProperty; for (property in object) { // Gecko <= 1.0 enumerates the `prototype` property of functions under // certain conditions; IE does not. if (!(isFunction && property == "prototype") && hasProperty.call(object, property)) { callback(property); } } // Manually invoke the callback for each non-enumerable property. for (length = members.length; property = members[--length]; hasProperty.call(object, property) && callback(property)); }; } else if (size == 2) { // Safari <= 2.0.4 enumerates shadowed properties twice. forEach = function (object, callback) { // Create a set of iterated properties. var members = {}, isFunction = getClass.call(object) == functionClass, property; for (property in object) { // Store each property name to prevent double enumeration. The // `prototype` property of functions is not enumerated due to cross- // environment inconsistencies. if (!(isFunction && property == "prototype") && !isProperty.call(members, property) && (members[property] = 1) && isProperty.call(object, property)) { callback(property); } } }; } else { // No bugs detected; use the standard `for...in` algorithm. forEach = function (object, callback) { var isFunction = getClass.call(object) == functionClass, property, isConstructor; for (property in object) { if (!(isFunction && property == "prototype") && isProperty.call(object, property) && !(isConstructor = property === "constructor")) { callback(property); } } // Manually invoke the callback for the `constructor` property due to // cross-environment inconsistencies. if (isConstructor || isProperty.call(object, (property = "constructor"))) { callback(property); } }; } return forEach(object, callback); }; // Public: Serializes a JavaScript `value` as a JSON string. The optional // `filter` argument may specify either a function that alters how object and // array members are serialized, or an array of strings and numbers that // indicates which properties should be serialized. The optional `width` // argument may be either a string or number that specifies the indentation // level of the output. if (!has("json-stringify")) { // Internal: A map of control characters and their escaped equivalents. var Escapes = { 92: "\\\\", 34: '\\"', 8: "\\b", 12: "\\f", 10: "\\n", 13: "\\r", 9: "\\t" }; // Internal: Converts `value` into a zero-padded string such that its // length is at least equal to `width`. The `width` must be <= 6. var leadingZeroes = "000000"; var toPaddedString = function (width, value) { // The `|| 0` expression is necessary to work around a bug in // Opera <= 7.54u2 where `0 == -0`, but `String(-0) !== "0"`. return (leadingZeroes + (value || 0)).slice(-width); }; // Internal: Double-quotes a string `value`, replacing all ASCII control // characters (characters with code unit values between 0 and 31) with // their escaped equivalents. This is an implementation of the // `Quote(value)` operation defined in ES 5.1 section 15.12.3. var unicodePrefix = "\\u00"; var quote = function (value) { var result = '"', index = 0, length = value.length, isLarge = length > 10 && charIndexBuggy, symbols; if (isLarge) { symbols = value.split(""); } for (; index < length; index++) { var charCode = value.charCodeAt(index); // If the character is a control character, append its Unicode or // shorthand escape sequence; otherwise, append the character as-is. switch (charCode) { case 8: case 9: case 10: case 12: case 13: case 34: case 92: result += Escapes[charCode]; break; default: if (charCode < 32) { result += unicodePrefix + toPaddedString(2, charCode.toString(16)); break; } result += isLarge ? symbols[index] : charIndexBuggy ? value.charAt(index) : value[index]; } } return result + '"'; }; // Internal: Recursively serializes an object. Implements the // `Str(key, holder)`, `JO(value)`, and `JA(value)` operations. var serialize = function (property, object, callback, properties, whitespace, indentation, stack) { var value, className, year, month, date, time, hours, minutes, seconds, milliseconds, results, element, index, length, prefix, result; try { // Necessary for host object support. value = object[property]; } catch (exception) {} if (typeof value == "object" && value) { className = getClass.call(value); if (className == dateClass && !isProperty.call(value, "toJSON")) { if (value > -1 / 0 && value < 1 / 0) { // Dates are serialized according to the `Date#toJSON` method // specified in ES 5.1 section 15.9.5.44. See section 15.9.1.15 // for the ISO 8601 date time string format. if (getDay) { // Manually compute the year, month, date, hours, minutes, // seconds, and milliseconds if the `getUTC*` methods are // buggy. Adapted from @Yaffle's `date-shim` project. date = floor(value / 864e5); for (year = floor(date / 365.2425) + 1970 - 1; getDay(year + 1, 0) <= date; year++); for (month = floor((date - getDay(year, 0)) / 30.42); getDay(year, month + 1) <= date; month++); date = 1 + date - getDay(year, month); // The `time` value specifies the time within the day (see ES // 5.1 section 15.9.1.2). The formula `(A % B + B) % B` is used // to compute `A modulo B`, as the `%` operator does not // correspond to the `modulo` operation for negative numbers. time = (value % 864e5 + 864e5) % 864e5; // The hours, minutes, seconds, and milliseconds are obtained by // decomposing the time within the day. See section 15.9.1.10. hours = floor(time / 36e5) % 24; minutes = floor(time / 6e4) % 60; seconds = floor(time / 1e3) % 60; milliseconds = time % 1e3; } else { year = value.getUTCFullYear(); month = value.getUTCMonth(); date = value.getUTCDate(); hours = value.getUTCHours(); minutes = value.getUTCMinutes(); seconds = value.getUTCSeconds(); milliseconds = value.getUTCMilliseconds(); } // Serialize extended years correctly. value = (year <= 0 || year >= 1e4 ? (year < 0 ? "-" : "+") + toPaddedString(6, year < 0 ? -year : year) : toPaddedString(4, year)) + "-" + toPaddedString(2, month + 1) + "-" + toPaddedString(2, date) + // Months, dates, hours, minutes, and seconds should have two // digits; milliseconds should have three. "T" + toPaddedString(2, hours) + ":" + toPaddedString(2, minutes) + ":" + toPaddedString(2, seconds) + // Milliseconds are optional in ES 5.0, but required in 5.1. "." + toPaddedString(3, milliseconds) + "Z"; } else { value = null; } } else if (typeof value.toJSON == "function" && ((className != numberClass && className != stringClass && className != arrayClass) || isProperty.call(value, "toJSON"))) { // Prototype <= 1.6.1 adds non-standard `toJSON` methods to the // `Number`, `String`, `Date`, and `Array` prototypes. JSON 3 // ignores all `toJSON` methods on these objects unless they are // defined directly on an instance. value = value.toJSON(property); } } if (callback) { // If a replacement function was provided, call it to obtain the value // for serialization. value = callback.call(object, property, value); } if (value === null) { return "null"; } className = getClass.call(value); if (className == booleanClass) { // Booleans are represented literally. return "" + value; } else if (className == numberClass) { // JSON numbers must be finite. `Infinity` and `NaN` are serialized as // `"null"`. return value > -1 / 0 && value < 1 / 0 ? "" + value : "null"; } else if (className == stringClass) { // Strings are double-quoted and escaped. return quote("" + value); } // Recursively serialize objects and arrays. if (typeof value == "object") { // Check for cyclic structures. This is a linear search; performance // is inversely proportional to the number of unique nested objects. for (length = stack.length; length--;) { if (stack[length] === value) { // Cyclic structures cannot be serialized by `JSON.stringify`. throw TypeError(); } } // Add the object to the stack of traversed objects. stack.push(value); results = []; // Save the current indentation level and indent one additional level. prefix = indentation; indentation += whitespace; if (className == arrayClass) { // Recursively serialize array elements. for (index = 0, length = value.length; index < length; index++) { element = serialize(index, value, callback, properties, whitespace, indentation, stack); results.push(element === undef ? "null" : element); } result = results.length ? (whitespace ? "[\n" + indentation + results.join(",\n" + indentation) + "\n" + prefix + "]" : ("[" + results.join(",") + "]")) : "[]"; } else { // Recursively serialize object members. Members are selected from // either a user-specified list of property names, or the object // itself. forEach(properties || value, function (property) { var element = serialize(property, value, callback, properties, whitespace, indentation, stack); if (element !== undef) { // According to ES 5.1 section 15.12.3: "If `gap` {whitespace} // is not the empty string, let `member` {quote(property) + ":"} // be the concatenation of `member` and the `space` character." // The "`space` character" refers to the literal space // character, not the `space` {width} argument provided to // `JSON.stringify`. results.push(quote(property) + ":" + (whitespace ? " " : "") + element); } }); result = results.length ? (whitespace ? "{\n" + indentation + results.join(",\n" + indentation) + "\n" + prefix + "}" : ("{" + results.join(",") + "}")) : "{}"; } // Remove the object from the traversed object stack. stack.pop(); return result; } }; // Public: `JSON.stringify`. See ES 5.1 section 15.12.3. JSON3.stringify = function (source, filter, width) { var whitespace, callback, properties, className; if (typeof filter == "function" || typeof filter == "object" && filter) { if ((className = getClass.call(filter)) == functionClass) { callback = filter; } else if (className == arrayClass) { // Convert the property names array into a makeshift set. properties = {}; for (var index = 0, length = filter.length, value; index < length; value = filter[index++], ((className = getClass.call(value)), className == stringClass || className == numberClass) && (properties[value] = 1)); } } if (width) { if ((className = getClass.call(width)) == numberClass) { // Convert the `width` to an integer and create a string containing // `width` number of space characters. if ((width -= width % 1) > 0) { for (whitespace = "", width > 10 && (width = 10); whitespace.length < width; whitespace += " "); } } else if (className == stringClass) { whitespace = width.length <= 10 ? width : width.slice(0, 10); } } // Opera <= 7.54u2 discards the values associated with empty string keys // (`""`) only if they are used directly within an object member list // (e.g., `!("" in { "": 1})`). return serialize("", (value = {}, value[""] = source, value), callback, properties, whitespace, "", []); }; } // Public: Parses a JSON source string. if (!has("json-parse")) { var fromCharCode = String.fromCharCode; // Internal: A map of escaped control characters and their unescaped // equivalents. var Unescapes = { 92: "\\", 34: '"', 47: "/", 98: "\b", 116: "\t", 110: "\n", 102: "\f", 114: "\r" }; // Internal: Stores the parser state. var Index, Source; // Internal: Resets the parser state and throws a `SyntaxError`. var abort = function() { Index = Source = null; throw SyntaxError(); }; // Internal: Returns the next token, or `"$"` if the parser has reached // the end of the source string. A token may be a string, number, `null` // literal, or Boolean literal. var lex = function () { var source = Source, length = source.length, value, begin, position, isSigned, charCode; while (Index < length) { charCode = source.charCodeAt(Index); switch (charCode) { case 9: case 10: case 13: case 32: // Skip whitespace tokens, including tabs, carriage returns, line // feeds, and space characters. Index++; break; case 123: case 125: case 91: case 93: case 58: case 44: // Parse a punctuator token (`{`, `}`, `[`, `]`, `:`, or `,`) at // the current position. value = charIndexBuggy ? source.charAt(Index) : source[Index]; Index++; return value; case 34: // `"` delimits a JSON string; advance to the next character and // begin parsing the string. String tokens are prefixed with the // sentinel `@` character to distinguish them from punctuators and // end-of-string tokens. for (value = "@", Index++; Index < length;) { charCode = source.charCodeAt(Index); if (charCode < 32) { // Unescaped ASCII control characters (those with a code unit // less than the space character) are not permitted. abort(); } else if (charCode == 92) { // A reverse solidus (`\`) marks the beginning of an escaped // control character (including `"`, `\`, and `/`) or Unicode // escape sequence. charCode = source.charCodeAt(++Index); switch (charCode) { case 92: case 34: case 47: case 98: case 116: case 110: case 102: case 114: // Revive escaped control characters. value += Unescapes[charCode]; Index++; break; case 117: // `\u` marks the beginning of a Unicode escape sequence. // Advance to the first character and validate the // four-digit code point. begin = ++Index; for (position = Index + 4; Index < position; Index++) { charCode = source.charCodeAt(Index); // A valid sequence comprises four hexdigits (case- // insensitive) that form a single hexadecimal value. if (!(charCode >= 48 && charCode <= 57 || charCode >= 97 && charCode <= 102 || charCode >= 65 && charCode <= 70)) { // Invalid Unicode escape sequence. abort(); } } // Revive the escaped character. value += fromCharCode("0x" + source.slice(begin, Index)); break; default: // Invalid escape sequence. abort(); } } else { if (charCode == 34) { // An unescaped double-quote character marks the end of the // string. break; } charCode = source.charCodeAt(Index); begin = Index; // Optimize for the common case where a string is valid. while (charCode >= 32 && charCode != 92 && charCode != 34) { charCode = source.charCodeAt(++Index); } // Append the string as-is. value += source.slice(begin, Index); } } if (source.charCodeAt(Index) == 34) { // Advance to the next character and return the revived string. Index++; return value; } // Unterminated string. abort(); default: // Parse numbers and literals. begin = Index; // Advance past the negative sign, if one is specified. if (charCode == 45) { isSigned = true; charCode = source.charCodeAt(++Index); } // Parse an integer or floating-point value. if (charCode >= 48 && charCode <= 57) { // Leading zeroes are interpreted as octal literals. if (charCode == 48 && ((charCode = source.charCodeAt(Index + 1)), charCode >= 48 && charCode <= 57)) { // Illegal octal literal. abort(); } isSigned = false; // Parse the integer component. for (; Index < length && ((charCode = source.charCodeAt(Index)), charCode >= 48 && charCode <= 57); Index++); // Floats cannot contain a leading decimal point; however, this // case is already accounted for by the parser. if (source.charCodeAt(Index) == 46) { position = ++Index; // Parse the decimal component. for (; position < length && ((charCode = source.charCodeAt(position)), charCode >= 48 && charCode <= 57); position++); if (position == Index) { // Illegal trailing decimal. abort(); } Index = position; } // Parse exponents. The `e` denoting the exponent is // case-insensitive. charCode = source.charCodeAt(Index); if (charCode == 101 || charCode == 69) { charCode = source.charCodeAt(++Index); // Skip past the sign following the exponent, if one is // specified. if (charCode == 43 || charCode == 45) { Index++; } // Parse the exponential component. for (position = Index; position < length && ((charCode = source.charCodeAt(position)), charCode >= 48 && charCode <= 57); position++); if (position == Index) { // Illegal empty exponent. abort(); } Index = position; } // Coerce the parsed value to a JavaScript number. return +source.slice(begin, Index); } // A negative sign may only precede numbers. if (isSigned) { abort(); } // `true`, `false`, and `null` literals. if (source.slice(Index, Index + 4) == "true") { Index += 4; return true; } else if (source.slice(Index, Index + 5) == "false") { Index += 5; return false; } else if (source.slice(Index, Index + 4) == "null") { Index += 4; return null; } // Unrecognized token. abort(); } } // Return the sentinel `$` character if the parser has reached the end // of the source string. return "$"; }; // Internal: Parses a JSON `value` token. var get = function (value) { var results, hasMembers; if (value == "$") { // Unexpected end of input. abort(); } if (typeof value == "string") { if ((charIndexBuggy ? value.charAt(0) : value[0]) == "@") { // Remove the sentinel `@` character. return value.slice(1); } // Parse object and array literals. if (value == "[") { // Parses a JSON array, returning a new JavaScript array. results = []; for (;; hasMembers || (hasMembers = true)) { value = lex(); // A closing square bracket marks the end of the array literal. if (value == "]") { break; } // If the array literal contains elements, the current token // should be a comma separating the previous element from the // next. if (hasMembers) { if (value == ",") { value = lex(); if (value == "]") { // Unexpected trailing `,` in array literal. abort(); } } else { // A `,` must separate each array element. abort(); } } // Elisions and leading commas are not permitted. if (value == ",") { abort(); } results.push(get(value)); } return results; } else if (value == "{") { // Parses a JSON object, returning a new JavaScript object. results = {}; for (;; hasMembers || (hasMembers = true)) { value = lex(); // A closing curly brace marks the end of the object literal. if (value == "}") { break; } // If the object literal contains members, the current token // should be a comma separator. if (hasMembers) { if (value == ",") { value = lex(); if (value == "}") { // Unexpected trailing `,` in object literal. abort(); } } else { // A `,` must separate each object member. abort(); } } // Leading commas are not permitted, object property names must be // double-quoted strings, and a `:` must separate each property // name and value. if (value == "," || typeof value != "string" || (charIndexBuggy ? value.charAt(0) : value[0]) != "@" || lex() != ":") { abort(); } results[value.slice(1)] = get(lex()); } return results; } // Unexpected token encountered. abort(); } return value; }; // Internal: Updates a traversed object member. var update = function(source, property, callback) { var element = walk(source, property, callback); if (element === undef) { delete source[property]; } else { source[property] = element; } }; // Internal: Recursively traverses a parsed JSON object, invoking the // `callback` function for each value. This is an implementation of the // `Walk(holder, name)` operation defined in ES 5.1 section 15.12.2. var walk = function (source, property, callback) { var value = source[property], length; if (typeof value == "object" && value) { // `forEach` can't be used to traverse an array in Opera <= 8.54 // because its `Object#hasOwnProperty` implementation returns `false` // for array indices (e.g., `![1, 2, 3].hasOwnProperty("0")`). if (getClass.call(value) == arrayClass) { for (length = value.length; length--;) { update(value, length, callback); } } else { forEach(value, function (property) { update(value, property, callback); }); } } return callback.call(source, property, value); }; // Public: `JSON.parse`. See ES 5.1 section 15.12.2. JSON3.parse = function (source, callback) { var result, value; Index = 0; Source = "" + source; result = get(lex()); // If a JSON string contains multiple tokens, it is invalid. if (lex() != "$") { abort(); } // Reset the parser state. Index = Source = null; return callback && getClass.call(callback) == functionClass ? walk((value = {}, value[""] = result, value), "", callback) : result; }; } } // Export for asynchronous module loaders. if (isLoader) { define(function () { return JSON3; }); } }(this)); },{}],36:[function(require,module,exports){ /* Leaflet, a JavaScript library for mobile-friendly interactive maps. http://leafletjs.com (c) 2010-2013, Vladimir Agafonkin (c) 2010-2011, CloudMade */ (function (window, document, undefined) { var oldL = window.L, L = {}; L.version = '0.6.2'; // define Leaflet for Node module pattern loaders, including Browserify if (typeof module === 'object' && typeof module.exports === 'object') { module.exports = L; // define Leaflet as an AMD module } else if (typeof define === 'function' && define.amd) { define(L); } // define Leaflet as a global L variable, saving the original L to restore later if needed L.noConflict = function () { window.L = oldL; return this; }; window.L = L; /* * L.Util contains various utility functions used throughout Leaflet code. */ L.Util = { extend: function (dest) { // (Object[, Object, ...]) -> var sources = Array.prototype.slice.call(arguments, 1), i, j, len, src; for (j = 0, len = sources.length; j < len; j++) { src = sources[j] || {}; for (i in src) { if (src.hasOwnProperty(i)) { dest[i] = src[i]; } } } return dest; }, bind: function (fn, obj) { // (Function, Object) -> Function var args = arguments.length > 2 ? Array.prototype.slice.call(arguments, 2) : null; return function () { return fn.apply(obj, args || arguments); }; }, stamp: (function () { var lastId = 0, key = '_leaflet_id'; return function (obj) { obj[key] = obj[key] || ++lastId; return obj[key]; }; }()), invokeEach: function (obj, method, context) { var i, args; if (typeof obj === 'object') { args = Array.prototype.slice.call(arguments, 3); for (i in obj) { method.apply(context, [i, obj[i]].concat(args)); } return true; } return false; }, limitExecByInterval: function (fn, time, context) { var lock, execOnUnlock; return function wrapperFn() { var args = arguments; if (lock) { execOnUnlock = true; return; } lock = true; setTimeout(function () { lock = false; if (execOnUnlock) { wrapperFn.apply(context, args); execOnUnlock = false; } }, time); fn.apply(context, args); }; }, falseFn: function () { return false; }, formatNum: function (num, digits) { var pow = Math.pow(10, digits || 5); return Math.round(num * pow) / pow; }, trim: function (str) { return str.trim ? str.trim() : str.replace(/^\s+|\s+$/g, ''); }, splitWords: function (str) { return L.Util.trim(str).split(/\s+/); }, setOptions: function (obj, options) { obj.options = L.extend({}, obj.options, options); return obj.options; }, getParamString: function (obj, existingUrl, uppercase) { var params = []; for (var i in obj) { params.push(encodeURIComponent(uppercase ? i.toUpperCase() : i) + '=' + encodeURIComponent(obj[i])); } return ((!existingUrl || existingUrl.indexOf('?') === -1) ? '?' : '&') + params.join('&'); }, template: function (str, data) { return str.replace(/\{ *([\w_]+) *\}/g, function (str, key) { var value = data[key]; if (value === undefined) { throw new Error('No value provided for variable ' + str); } else if (typeof value === 'function') { value = value(data); } return value; }); }, isArray: function (obj) { return (Object.prototype.toString.call(obj) === '[object Array]'); }, emptyImageUrl: 'data:image/gif;base64,R0lGODlhAQABAAD/ACwAAAAAAQABAAACADs=' }; (function () { // inspired by http://paulirish.com/2011/requestanimationframe-for-smart-animating/ function getPrefixed(name) { var i, fn, prefixes = ['webkit', 'moz', 'o', 'ms']; for (i = 0; i < prefixes.length && !fn; i++) { fn = window[prefixes[i] + name]; } return fn; } var lastTime = 0; function timeoutDefer(fn) { var time = +new Date(), timeToCall = Math.max(0, 16 - (time - lastTime)); lastTime = time + timeToCall; return window.setTimeout(fn, timeToCall); } var requestFn = window.requestAnimationFrame || getPrefixed('RequestAnimationFrame') || timeoutDefer; var cancelFn = window.cancelAnimationFrame || getPrefixed('CancelAnimationFrame') || getPrefixed('CancelRequestAnimationFrame') || function (id) { window.clearTimeout(id); }; L.Util.requestAnimFrame = function (fn, context, immediate, element) { fn = L.bind(fn, context); if (immediate && requestFn === timeoutDefer) { fn(); } else { return requestFn.call(window, fn, element); } }; L.Util.cancelAnimFrame = function (id) { if (id) { cancelFn.call(window, id); } }; }()); // shortcuts for most used utility functions L.extend = L.Util.extend; L.bind = L.Util.bind; L.stamp = L.Util.stamp; L.setOptions = L.Util.setOptions; /* * L.Class powers the OOP facilities of the library. * Thanks to John Resig and Dean Edwards for inspiration! */ L.Class = function () {}; L.Class.extend = function (props) { // extended class with the new prototype var NewClass = function () { // call the constructor if (this.initialize) { this.initialize.apply(this, arguments); } // call all constructor hooks if (this._initHooks) { this.callInitHooks(); } }; // instantiate class without calling constructor var F = function () {}; F.prototype = this.prototype; var proto = new F(); proto.constructor = NewClass; NewClass.prototype = proto; //inherit parent's statics for (var i in this) { if (this.hasOwnProperty(i) && i !== 'prototype') { NewClass[i] = this[i]; } } // mix static properties into the class if (props.statics) { L.extend(NewClass, props.statics); delete props.statics; } // mix includes into the prototype if (props.includes) { L.Util.extend.apply(null, [proto].concat(props.includes)); delete props.includes; } // merge options if (props.options && proto.options) { props.options = L.extend({}, proto.options, props.options); } // mix given properties into the prototype L.extend(proto, props); proto._initHooks = []; var parent = this; // jshint camelcase: false NewClass.__super__ = parent.prototype; // add method for calling all hooks proto.callInitHooks = function () { if (this._initHooksCalled) { return; } if (parent.prototype.callInitHooks) { parent.prototype.callInitHooks.call(this); } this._initHooksCalled = true; for (var i = 0, len = proto._initHooks.length; i < len; i++) { proto._initHooks[i].call(this); } }; return NewClass; }; // method for adding properties to prototype L.Class.include = function (props) { L.extend(this.prototype, props); }; // merge new default options to the Class L.Class.mergeOptions = function (options) { L.extend(this.prototype.options, options); }; // add a constructor hook L.Class.addInitHook = function (fn) { // (Function) || (String, args...) var args = Array.prototype.slice.call(arguments, 1); var init = typeof fn === 'function' ? fn : function () { this[fn].apply(this, args); }; this.prototype._initHooks = this.prototype._initHooks || []; this.prototype._initHooks.push(init); }; /* * L.Mixin.Events is used to add custom events functionality to Leaflet classes. */ var eventsKey = '_leaflet_events'; L.Mixin = {}; L.Mixin.Events = { addEventListener: function (types, fn, context) { // (String, Function[, Object]) or (Object[, Object]) // types can be a map of types/handlers if (L.Util.invokeEach(types, this.addEventListener, this, fn, context)) { return this; } var events = this[eventsKey] = this[eventsKey] || {}, contextId = context && L.stamp(context), i, len, event, type, indexKey, indexLenKey, typeIndex; // types can be a string of space-separated words types = L.Util.splitWords(types); for (i = 0, len = types.length; i < len; i++) { event = { action: fn, context: context || this }; type = types[i]; if (context) { // store listeners of a particular context in a separate hash (if it has an id) // gives a major performance boost when removing thousands of map layers indexKey = type + '_idx'; indexLenKey = indexKey + '_len'; typeIndex = events[indexKey] = events[indexKey] || {}; if (!typeIndex[contextId]) { typeIndex[contextId] = []; // keep track of the number of keys in the index to quickly check if it's empty events[indexLenKey] = (events[indexLenKey] || 0) + 1; } typeIndex[contextId].push(event); } else { events[type] = events[type] || []; events[type].push(event); } } return this; }, hasEventListeners: function (type) { // (String) -> Boolean var events = this[eventsKey]; return !!events && ((type in events && events[type].length > 0) || (type + '_idx' in events && events[type + '_idx_len'] > 0)); }, removeEventListener: function (types, fn, context) { // ([String, Function, Object]) or (Object[, Object]) if (!this[eventsKey]) { return this; } if (!types) { return this.clearAllEventListeners(); } if (L.Util.invokeEach(types, this.removeEventListener, this, fn, context)) { return this; } var events = this[eventsKey], contextId = context && L.stamp(context), i, len, type, listeners, j, indexKey, indexLenKey, typeIndex, removed; types = L.Util.splitWords(types); for (i = 0, len = types.length; i < len; i++) { type = types[i]; indexKey = type + '_idx'; indexLenKey = indexKey + '_len'; typeIndex = events[indexKey]; if (!fn) { // clear all listeners for a type if function isn't specified delete events[type]; delete events[indexKey]; } else { listeners = context && typeIndex ? typeIndex[contextId] : events[type]; if (listeners) { for (j = listeners.length - 1; j >= 0; j--) { if ((listeners[j].action === fn) && (!context || (listeners[j].context === context))) { removed = listeners.splice(j, 1); // set the old action to a no-op, because it is possible // that the listener is being iterated over as part of a dispatch removed[0].action = L.Util.falseFn; } } if (context && typeIndex && (listeners.length === 0)) { delete typeIndex[contextId]; events[indexLenKey]--; } } } } return this; }, clearAllEventListeners: function () { delete this[eventsKey]; return this; }, fireEvent: function (type, data) { // (String[, Object]) if (!this.hasEventListeners(type)) { return this; } var event = L.Util.extend({}, data, { type: type, target: this }); var events = this[eventsKey], listeners, i, len, typeIndex, contextId; if (events[type]) { // make sure adding/removing listeners inside other listeners won't cause infinite loop listeners = events[type].slice(); for (i = 0, len = listeners.length; i < len; i++) { listeners[i].action.call(listeners[i].context || this, event); } } // fire event for the context-indexed listeners as well typeIndex = events[type + '_idx']; for (contextId in typeIndex) { listeners = typeIndex[contextId].slice(); if (listeners) { for (i = 0, len = listeners.length; i < len; i++) { listeners[i].action.call(listeners[i].context || this, event); } } } return this; }, addOneTimeEventListener: function (types, fn, context) { if (L.Util.invokeEach(types, this.addOneTimeEventListener, this, fn, context)) { return this; } var handler = L.bind(function () { this .removeEventListener(types, fn, context) .removeEventListener(types, handler, context); }, this); return this .addEventListener(types, fn, context) .addEventListener(types, handler, context); } }; L.Mixin.Events.on = L.Mixin.Events.addEventListener; L.Mixin.Events.off = L.Mixin.Events.removeEventListener; L.Mixin.Events.once = L.Mixin.Events.addOneTimeEventListener; L.Mixin.Events.fire = L.Mixin.Events.fireEvent; /* * L.Browser handles different browser and feature detections for internal Leaflet use. */ (function () { var ie = !!window.ActiveXObject, ie6 = ie && !window.XMLHttpRequest, ie7 = ie && !document.querySelector, ielt9 = ie && !document.addEventListener, // terrible browser detection to work around Safari / iOS / Android browser bugs ua = navigator.userAgent.toLowerCase(), webkit = ua.indexOf('webkit') !== -1, chrome = ua.indexOf('chrome') !== -1, phantomjs = ua.indexOf('phantom') !== -1, android = ua.indexOf('android') !== -1, android23 = ua.search('android [23]') !== -1, mobile = typeof orientation !== undefined + '', msTouch = window.navigator && window.navigator.msPointerEnabled && window.navigator.msMaxTouchPoints, retina = ('devicePixelRatio' in window && window.devicePixelRatio > 1) || ('matchMedia' in window && window.matchMedia('(min-resolution:144dpi)') && window.matchMedia('(min-resolution:144dpi)').matches), doc = document.documentElement, ie3d = ie && ('transition' in doc.style), webkit3d = ('WebKitCSSMatrix' in window) && ('m11' in new window.WebKitCSSMatrix()), gecko3d = 'MozPerspective' in doc.style, opera3d = 'OTransition' in doc.style, any3d = !window.L_DISABLE_3D && (ie3d || webkit3d || gecko3d || opera3d) && !phantomjs; // PhantomJS has 'ontouchstart' in document.documentElement, but doesn't actually support touch. // https://github.com/Leaflet/Leaflet/pull/1434#issuecomment-13843151 var touch = !window.L_NO_TOUCH && !phantomjs && (function () { var startName = 'ontouchstart'; // IE10+ (We simulate these into touch* events in L.DomEvent and L.DomEvent.MsTouch) or WebKit, etc. if (msTouch || (startName in doc)) { return true; } // Firefox/Gecko var div = document.createElement('div'), supported = false; if (!div.setAttribute) { return false; } div.setAttribute(startName, 'return;'); if (typeof div[startName] === 'function') { supported = true; } div.removeAttribute(startName); div = null; return supported; }()); L.Browser = { ie: ie, ie6: ie6, ie7: ie7, ielt9: ielt9, webkit: webkit, android: android, android23: android23, chrome: chrome, ie3d: ie3d, webkit3d: webkit3d, gecko3d: gecko3d, opera3d: opera3d, any3d: any3d, mobile: mobile, mobileWebkit: mobile && webkit, mobileWebkit3d: mobile && webkit3d, mobileOpera: mobile && window.opera, touch: touch, msTouch: msTouch, retina: retina }; }()); /* * L.Point represents a point with x and y coordinates. */ L.Point = function (/*Number*/ x, /*Number*/ y, /*Boolean*/ round) { this.x = (round ? Math.round(x) : x); this.y = (round ? Math.round(y) : y); }; L.Point.prototype = { clone: function () { return new L.Point(this.x, this.y); }, // non-destructive, returns a new point add: function (point) { return this.clone()._add(L.point(point)); }, // destructive, used directly for performance in situations where it's safe to modify existing point _add: function (point) { this.x += point.x; this.y += point.y; return this; }, subtract: function (point) { return this.clone()._subtract(L.point(point)); }, _subtract: function (point) { this.x -= point.x; this.y -= point.y; return this; }, divideBy: function (num) { return this.clone()._divideBy(num); }, _divideBy: function (num) { this.x /= num; this.y /= num; return this; }, multiplyBy: function (num) { return this.clone()._multiplyBy(num); }, _multiplyBy: function (num) { this.x *= num; this.y *= num; return this; }, round: function () { return this.clone()._round(); }, _round: function () { this.x = Math.round(this.x); this.y = Math.round(this.y); return this; }, floor: function () { return this.clone()._floor(); }, _floor: function () { this.x = Math.floor(this.x); this.y = Math.floor(this.y); return this; }, distanceTo: function (point) { point = L.point(point); var x = point.x - this.x, y = point.y - this.y; return Math.sqrt(x * x + y * y); }, equals: function (point) { point = L.point(point); return point.x === this.x && point.y === this.y; }, contains: function (point) { point = L.point(point); return Math.abs(point.x) <= Math.abs(this.x) && Math.abs(point.y) <= Math.abs(this.y); }, toString: function () { return 'Point(' + L.Util.formatNum(this.x) + ', ' + L.Util.formatNum(this.y) + ')'; } }; L.point = function (x, y, round) { if (x instanceof L.Point) { return x; } if (L.Util.isArray(x)) { return new L.Point(x[0], x[1]); } if (x === undefined || x === null) { return x; } return new L.Point(x, y, round); }; /* * L.Bounds represents a rectangular area on the screen in pixel coordinates. */ L.Bounds = function (a, b) { //(Point, Point) or Point[] if (!a) { return; } var points = b ? [a, b] : a; for (var i = 0, len = points.length; i < len; i++) { this.extend(points[i]); } }; L.Bounds.prototype = { // extend the bounds to contain the given point extend: function (point) { // (Point) point = L.point(point); if (!this.min && !this.max) { this.min = point.clone(); this.max = point.clone(); } else { this.min.x = Math.min(point.x, this.min.x); this.max.x = Math.max(point.x, this.max.x); this.min.y = Math.min(point.y, this.min.y); this.max.y = Math.max(point.y, this.max.y); } return this; }, getCenter: function (round) { // (Boolean) -> Point return new L.Point( (this.min.x + this.max.x) / 2, (this.min.y + this.max.y) / 2, round); }, getBottomLeft: function () { // -> Point return new L.Point(this.min.x, this.max.y); }, getTopRight: function () { // -> Point return new L.Point(this.max.x, this.min.y); }, getSize: function () { return this.max.subtract(this.min); }, contains: function (obj) { // (Bounds) or (Point) -> Boolean var min, max; if (typeof obj[0] === 'number' || obj instanceof L.Point) { obj = L.point(obj); } else { obj = L.bounds(obj); } if (obj instanceof L.Bounds) { min = obj.min; max = obj.max; } else { min = max = obj; } return (min.x >= this.min.x) && (max.x <= this.max.x) && (min.y >= this.min.y) && (max.y <= this.max.y); }, intersects: function (bounds) { // (Bounds) -> Boolean bounds = L.bounds(bounds); var min = this.min, max = this.max, min2 = bounds.min, max2 = bounds.max, xIntersects = (max2.x >= min.x) && (min2.x <= max.x), yIntersects = (max2.y >= min.y) && (min2.y <= max.y); return xIntersects && yIntersects; }, isValid: function () { return !!(this.min && this.max); } }; L.bounds = function (a, b) { // (Bounds) or (Point, Point) or (Point[]) if (!a || a instanceof L.Bounds) { return a; } return new L.Bounds(a, b); }; /* * L.Transformation is an utility class to perform simple point transformations through a 2d-matrix. */ L.Transformation = function (a, b, c, d) { this._a = a; this._b = b; this._c = c; this._d = d; }; L.Transformation.prototype = { transform: function (point, scale) { // (Point, Number) -> Point return this._transform(point.clone(), scale); }, // destructive transform (faster) _transform: function (point, scale) { scale = scale || 1; point.x = scale * (this._a * point.x + this._b); point.y = scale * (this._c * point.y + this._d); return point; }, untransform: function (point, scale) { scale = scale || 1; return new L.Point( (point.x / scale - this._b) / this._a, (point.y / scale - this._d) / this._c); } }; /* * L.DomUtil contains various utility functions for working with DOM. */ L.DomUtil = { get: function (id) { return (typeof id === 'string' ? document.getElementById(id) : id); }, getStyle: function (el, style) { var value = el.style[style]; if (!value && el.currentStyle) { value = el.currentStyle[style]; } if ((!value || value === 'auto') && document.defaultView) { var css = document.defaultView.getComputedStyle(el, null); value = css ? css[style] : null; } return value === 'auto' ? null : value; }, getViewportOffset: function (element) { var top = 0, left = 0, el = element, docBody = document.body, docEl = document.documentElement, pos, ie7 = L.Browser.ie7; do { top += el.offsetTop || 0; left += el.offsetLeft || 0; //add borders top += parseInt(L.DomUtil.getStyle(el, 'borderTopWidth'), 10) || 0; left += parseInt(L.DomUtil.getStyle(el, 'borderLeftWidth'), 10) || 0; pos = L.DomUtil.getStyle(el, 'position'); if (el.offsetParent === docBody && pos === 'absolute') { break; } if (pos === 'fixed') { top += docBody.scrollTop || docEl.scrollTop || 0; left += docBody.scrollLeft || docEl.scrollLeft || 0; break; } if (pos === 'relative' && !el.offsetLeft) { var width = L.DomUtil.getStyle(el, 'width'), maxWidth = L.DomUtil.getStyle(el, 'max-width'), r = el.getBoundingClientRect(); if (width !== 'none' || maxWidth !== 'none') { left += r.left + el.clientLeft; } //calculate full y offset since we're breaking out of the loop top += r.top + (docBody.scrollTop || docEl.scrollTop || 0); break; } el = el.offsetParent; } while (el); el = element; do { if (el === docBody) { break; } top -= el.scrollTop || 0; left -= el.scrollLeft || 0; // webkit (and ie <= 7) handles RTL scrollLeft different to everyone else // https://code.google.com/p/closure-library/source/browse/trunk/closure/goog/style/bidi.js if (!L.DomUtil.documentIsLtr() && (L.Browser.webkit || ie7)) { left += el.scrollWidth - el.clientWidth; // ie7 shows the scrollbar by default and provides clientWidth counting it, so we // need to add it back in if it is visible; scrollbar is on the left as we are RTL if (ie7 && L.DomUtil.getStyle(el, 'overflow-y') !== 'hidden' && L.DomUtil.getStyle(el, 'overflow') !== 'hidden') { left += 17; } } el = el.parentNode; } while (el); return new L.Point(left, top); }, documentIsLtr: function () { if (!L.DomUtil._docIsLtrCached) { L.DomUtil._docIsLtrCached = true; L.DomUtil._docIsLtr = L.DomUtil.getStyle(document.body, 'direction') === 'ltr'; } return L.DomUtil._docIsLtr; }, create: function (tagName, className, container) { var el = document.createElement(tagName); el.className = className; if (container) { container.appendChild(el); } return el; }, hasClass: function (el, name) { return (el.className.length > 0) && new RegExp('(^|\\s)' + name + '(\\s|$)').test(el.className); }, addClass: function (el, name) { if (!L.DomUtil.hasClass(el, name)) { el.className += (el.className ? ' ' : '') + name; } }, removeClass: function (el, name) { el.className = L.Util.trim((' ' + el.className + ' ').replace(' ' + name + ' ', ' ')); }, setOpacity: function (el, value) { if ('opacity' in el.style) { el.style.opacity = value; } else if ('filter' in el.style) { var filter = false, filterName = 'DXImageTransform.Microsoft.Alpha'; // filters collection throws an error if we try to retrieve a filter that doesn't exist try { filter = el.filters.item(filterName); } catch (e) { // don't set opacity to 1 if we haven't already set an opacity, // it isn't needed and breaks transparent pngs. if (value === 1) { return; } } value = Math.round(value * 100); if (filter) { filter.Enabled = (value !== 100); filter.Opacity = value; } else { el.style.filter += ' progid:' + filterName + '(opacity=' + value + ')'; } } }, testProp: function (props) { var style = document.documentElement.style; for (var i = 0; i < props.length; i++) { if (props[i] in style) { return props[i]; } } return false; }, getTranslateString: function (point) { // on WebKit browsers (Chrome/Safari/iOS Safari/Android) using translate3d instead of translate // makes animation smoother as it ensures HW accel is used. Firefox 13 doesn't care // (same speed either way), Opera 12 doesn't support translate3d var is3d = L.Browser.webkit3d, open = 'translate' + (is3d ? '3d' : '') + '(', close = (is3d ? ',0' : '') + ')'; return open + point.x + 'px,' + point.y + 'px' + close; }, getScaleString: function (scale, origin) { var preTranslateStr = L.DomUtil.getTranslateString(origin.add(origin.multiplyBy(-1 * scale))), scaleStr = ' scale(' + scale + ') '; return preTranslateStr + scaleStr; }, setPosition: function (el, point, disable3D) { // (HTMLElement, Point[, Boolean]) // jshint camelcase: false el._leaflet_pos = point; if (!disable3D && L.Browser.any3d) { el.style[L.DomUtil.TRANSFORM] = L.DomUtil.getTranslateString(point); // workaround for Android 2/3 stability (https://github.com/CloudMade/Leaflet/issues/69) if (L.Browser.mobileWebkit3d) { el.style.WebkitBackfaceVisibility = 'hidden'; } } else { el.style.left = point.x + 'px'; el.style.top = point.y + 'px'; } }, getPosition: function (el) { // this method is only used for elements previously positioned using setPosition, // so it's safe to cache the position for performance // jshint camelcase: false return el._leaflet_pos; } }; // prefix style property names L.DomUtil.TRANSFORM = L.DomUtil.testProp( ['transform', 'WebkitTransform', 'OTransform', 'MozTransform', 'msTransform']); // webkitTransition comes first because some browser versions that drop vendor prefix don't do // the same for the transitionend event, in particular the Android 4.1 stock browser L.DomUtil.TRANSITION = L.DomUtil.testProp( ['webkitTransition', 'transition', 'OTransition', 'MozTransition', 'msTransition']); L.DomUtil.TRANSITION_END = L.DomUtil.TRANSITION === 'webkitTransition' || L.DomUtil.TRANSITION === 'OTransition' ? L.DomUtil.TRANSITION + 'End' : 'transitionend'; (function () { var userSelectProperty = L.DomUtil.testProp( ['userSelect', 'WebkitUserSelect', 'OUserSelect', 'MozUserSelect', 'msUserSelect']); var userDragProperty = L.DomUtil.testProp( ['userDrag', 'WebkitUserDrag', 'OUserDrag', 'MozUserDrag', 'msUserDrag']); L.extend(L.DomUtil, { disableTextSelection: function () { if (userSelectProperty) { var style = document.documentElement.style; this._userSelect = style[userSelectProperty]; style[userSelectProperty] = 'none'; } else { L.DomEvent.on(window, 'selectstart', L.DomEvent.stop); } }, enableTextSelection: function () { if (userSelectProperty) { document.documentElement.style[userSelectProperty] = this._userSelect; delete this._userSelect; } else { L.DomEvent.off(window, 'selectstart', L.DomEvent.stop); } }, disableImageDrag: function () { if (userDragProperty) { var style = document.documentElement.style; this._userDrag = style[userDragProperty]; style[userDragProperty] = 'none'; } else { L.DomEvent.on(window, 'dragstart', L.DomEvent.stop); } }, enableImageDrag: function () { if (userDragProperty) { document.documentElement.style[userDragProperty] = this._userDrag; delete this._userDrag; } else { L.DomEvent.off(window, 'dragstart', L.DomEvent.stop); } } }); })(); /* * L.LatLng represents a geographical point with latitude and longitude coordinates. */ L.LatLng = function (rawLat, rawLng) { // (Number, Number) var lat = parseFloat(rawLat), lng = parseFloat(rawLng); if (isNaN(lat) || isNaN(lng)) { throw new Error('Invalid LatLng object: (' + rawLat + ', ' + rawLng + ')'); } this.lat = lat; this.lng = lng; }; L.extend(L.LatLng, { DEG_TO_RAD: Math.PI / 180, RAD_TO_DEG: 180 / Math.PI, MAX_MARGIN: 1.0E-9 // max margin of error for the "equals" check }); L.LatLng.prototype = { equals: function (obj) { // (LatLng) -> Boolean if (!obj) { return false; } obj = L.latLng(obj); var margin = Math.max( Math.abs(this.lat - obj.lat), Math.abs(this.lng - obj.lng)); return margin <= L.LatLng.MAX_MARGIN; }, toString: function (precision) { // (Number) -> String return 'LatLng(' + L.Util.formatNum(this.lat, precision) + ', ' + L.Util.formatNum(this.lng, precision) + ')'; }, // Haversine distance formula, see http://en.wikipedia.org/wiki/Haversine_formula // TODO move to projection code, LatLng shouldn't know about Earth distanceTo: function (other) { // (LatLng) -> Number other = L.latLng(other); var R = 6378137, // earth radius in meters d2r = L.LatLng.DEG_TO_RAD, dLat = (other.lat - this.lat) * d2r, dLon = (other.lng - this.lng) * d2r, lat1 = this.lat * d2r, lat2 = other.lat * d2r, sin1 = Math.sin(dLat / 2), sin2 = Math.sin(dLon / 2); var a = sin1 * sin1 + sin2 * sin2 * Math.cos(lat1) * Math.cos(lat2); return R * 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a)); }, wrap: function (a, b) { // (Number, Number) -> LatLng var lng = this.lng; a = a || -180; b = b || 180; lng = (lng + b) % (b - a) + (lng < a || lng === b ? b : a); return new L.LatLng(this.lat, lng); } }; L.latLng = function (a, b) { // (LatLng) or ([Number, Number]) or (Number, Number) if (a instanceof L.LatLng) { return a; } if (L.Util.isArray(a)) { return new L.LatLng(a[0], a[1]); } if (a === undefined || a === null) { return a; } if (typeof a === 'object' && 'lat' in a) { return new L.LatLng(a.lat, 'lng' in a ? a.lng : a.lon); } return new L.LatLng(a, b); }; /* * L.LatLngBounds represents a rectangular area on the map in geographical coordinates. */ L.LatLngBounds = function (southWest, northEast) { // (LatLng, LatLng) or (LatLng[]) if (!southWest) { return; } var latlngs = northEast ? [southWest, northEast] : southWest; for (var i = 0, len = latlngs.length; i < len; i++) { this.extend(latlngs[i]); } }; L.LatLngBounds.prototype = { // extend the bounds to contain the given point or bounds extend: function (obj) { // (LatLng) or (LatLngBounds) if (!obj) { return this; } if (typeof obj[0] === 'number' || typeof obj[0] === 'string' || obj instanceof L.LatLng) { obj = L.latLng(obj); } else { obj = L.latLngBounds(obj); } if (obj instanceof L.LatLng) { if (!this._southWest && !this._northEast) { this._southWest = new L.LatLng(obj.lat, obj.lng); this._northEast = new L.LatLng(obj.lat, obj.lng); } else { this._southWest.lat = Math.min(obj.lat, this._southWest.lat); this._southWest.lng = Math.min(obj.lng, this._southWest.lng); this._northEast.lat = Math.max(obj.lat, this._northEast.lat); this._northEast.lng = Math.max(obj.lng, this._northEast.lng); } } else if (obj instanceof L.LatLngBounds) { this.extend(obj._southWest); this.extend(obj._northEast); } return this; }, // extend the bounds by a percentage pad: function (bufferRatio) { // (Number) -> LatLngBounds var sw = this._southWest, ne = this._northEast, heightBuffer = Math.abs(sw.lat - ne.lat) * bufferRatio, widthBuffer = Math.abs(sw.lng - ne.lng) * bufferRatio; return new L.LatLngBounds( new L.LatLng(sw.lat - heightBuffer, sw.lng - widthBuffer), new L.LatLng(ne.lat + heightBuffer, ne.lng + widthBuffer)); }, getCenter: function () { // -> LatLng return new L.LatLng( (this._southWest.lat + this._northEast.lat) / 2, (this._southWest.lng + this._northEast.lng) / 2); }, getSouthWest: function () { return this._southWest; }, getNorthEast: function () { return this._northEast; }, getNorthWest: function () { return new L.LatLng(this.getNorth(), this.getWest()); }, getSouthEast: function () { return new L.LatLng(this.getSouth(), this.getEast()); }, getWest: function () { return this._southWest.lng; }, getSouth: function () { return this._southWest.lat; }, getEast: function () { return this._northEast.lng; }, getNorth: function () { return this._northEast.lat; }, contains: function (obj) { // (LatLngBounds) or (LatLng) -> Boolean if (typeof obj[0] === 'number' || obj instanceof L.LatLng) { obj = L.latLng(obj); } else { obj = L.latLngBounds(obj); } var sw = this._southWest, ne = this._northEast, sw2, ne2; if (obj instanceof L.LatLngBounds) { sw2 = obj.getSouthWest(); ne2 = obj.getNorthEast(); } else { sw2 = ne2 = obj; } return (sw2.lat >= sw.lat) && (ne2.lat <= ne.lat) && (sw2.lng >= sw.lng) && (ne2.lng <= ne.lng); }, intersects: function (bounds) { // (LatLngBounds) bounds = L.latLngBounds(bounds); var sw = this._southWest, ne = this._northEast, sw2 = bounds.getSouthWest(), ne2 = bounds.getNorthEast(), latIntersects = (ne2.lat >= sw.lat) && (sw2.lat <= ne.lat), lngIntersects = (ne2.lng >= sw.lng) && (sw2.lng <= ne.lng); return latIntersects && lngIntersects; }, toBBoxString: function () { return [this.getWest(), this.getSouth(), this.getEast(), this.getNorth()].join(','); }, equals: function (bounds) { // (LatLngBounds) if (!bounds) { return false; } bounds = L.latLngBounds(bounds); return this._southWest.equals(bounds.getSouthWest()) && this._northEast.equals(bounds.getNorthEast()); }, isValid: function () { return !!(this._southWest && this._northEast); } }; //TODO International date line? L.latLngBounds = function (a, b) { // (LatLngBounds) or (LatLng, LatLng) if (!a || a instanceof L.LatLngBounds) { return a; } return new L.LatLngBounds(a, b); }; /* * L.Projection contains various geographical projections used by CRS classes. */ L.Projection = {}; /* * Spherical Mercator is the most popular map projection, used by EPSG:3857 CRS used by default. */ L.Projection.SphericalMercator = { MAX_LATITUDE: 85.0511287798, project: function (latlng) { // (LatLng) -> Point var d = L.LatLng.DEG_TO_RAD, max = this.MAX_LATITUDE, lat = Math.max(Math.min(max, latlng.lat), -max), x = latlng.lng * d, y = lat * d; y = Math.log(Math.tan((Math.PI / 4) + (y / 2))); return new L.Point(x, y); }, unproject: function (point) { // (Point, Boolean) -> LatLng var d = L.LatLng.RAD_TO_DEG, lng = point.x * d, lat = (2 * Math.atan(Math.exp(point.y)) - (Math.PI / 2)) * d; return new L.LatLng(lat, lng); } }; /* * Simple equirectangular (Plate Carree) projection, used by CRS like EPSG:4326 and Simple. */ L.Projection.LonLat = { project: function (latlng) { return new L.Point(latlng.lng, latlng.lat); }, unproject: function (point) { return new L.LatLng(point.y, point.x); } }; /* * L.CRS is a base object for all defined CRS (Coordinate Reference Systems) in Leaflet. */ L.CRS = { latLngToPoint: function (latlng, zoom) { // (LatLng, Number) -> Point var projectedPoint = this.projection.project(latlng), scale = this.scale(zoom); return this.transformation._transform(projectedPoint, scale); }, pointToLatLng: function (point, zoom) { // (Point, Number[, Boolean]) -> LatLng var scale = this.scale(zoom), untransformedPoint = this.transformation.untransform(point, scale); return this.projection.unproject(untransformedPoint); }, project: function (latlng) { return this.projection.project(latlng); }, scale: function (zoom) { return 256 * Math.pow(2, zoom); } }; /* * A simple CRS that can be used for flat non-Earth maps like panoramas or game maps. */ L.CRS.Simple = L.extend({}, L.CRS, { projection: L.Projection.LonLat, transformation: new L.Transformation(1, 0, -1, 0), scale: function (zoom) { return Math.pow(2, zoom); } }); /* * L.CRS.EPSG3857 (Spherical Mercator) is the most common CRS for web mapping * and is used by Leaflet by default. */ L.CRS.EPSG3857 = L.extend({}, L.CRS, { code: 'EPSG:3857', projection: L.Projection.SphericalMercator, transformation: new L.Transformation(0.5 / Math.PI, 0.5, -0.5 / Math.PI, 0.5), project: function (latlng) { // (LatLng) -> Point var projectedPoint = this.projection.project(latlng), earthRadius = 6378137; return projectedPoint.multiplyBy(earthRadius); } }); L.CRS.EPSG900913 = L.extend({}, L.CRS.EPSG3857, { code: 'EPSG:900913' }); /* * L.CRS.EPSG4326 is a CRS popular among advanced GIS specialists. */ L.CRS.EPSG4326 = L.extend({}, L.CRS, { code: 'EPSG:4326', projection: L.Projection.LonLat, transformation: new L.Transformation(1 / 360, 0.5, -1 / 360, 0.5) }); /* * L.Map is the central class of the API - it is used to create a map. */ L.Map = L.Class.extend({ includes: L.Mixin.Events, options: { crs: L.CRS.EPSG3857, /* center: LatLng, zoom: Number, layers: Array, */ fadeAnimation: L.DomUtil.TRANSITION && !L.Browser.android23, trackResize: true, markerZoomAnimation: L.DomUtil.TRANSITION && L.Browser.any3d }, initialize: function (id, options) { // (HTMLElement or String, Object) options = L.setOptions(this, options); this._initContainer(id); this._initLayout(); this._initEvents(); if (options.maxBounds) { this.setMaxBounds(options.maxBounds); } if (options.center && options.zoom !== undefined) { this.setView(L.latLng(options.center), options.zoom, {reset: true}); } this._handlers = []; this._layers = {}; this._zoomBoundLayers = {}; this._tileLayersNum = 0; this.callInitHooks(); this._addLayers(options.layers); }, // public methods that modify map state // replaced by animation-powered implementation in Map.PanAnimation.js setView: function (center, zoom) { this._resetView(L.latLng(center), this._limitZoom(zoom)); return this; }, setZoom: function (zoom, options) { return this.setView(this.getCenter(), zoom, {zoom: options}); }, zoomIn: function (delta, options) { return this.setZoom(this._zoom + (delta || 1), options); }, zoomOut: function (delta, options) { return this.setZoom(this._zoom - (delta || 1), options); }, setZoomAround: function (latlng, zoom, options) { var scale = this.getZoomScale(zoom), viewHalf = this.getSize().divideBy(2), containerPoint = latlng instanceof L.Point ? latlng : this.latLngToContainerPoint(latlng), centerOffset = containerPoint.subtract(viewHalf).multiplyBy(1 - 1 / scale), newCenter = this.containerPointToLatLng(viewHalf.add(centerOffset)); return this.setView(newCenter, zoom, {zoom: options}); }, fitBounds: function (bounds, options) { options = options || {}; bounds = bounds.getBounds ? bounds.getBounds() : L.latLngBounds(bounds); var paddingTL = L.point(options.paddingTopLeft || options.padding || [0, 0]), paddingBR = L.point(options.paddingBottomRight || options.padding || [0, 0]), zoom = this.getBoundsZoom(bounds, false, paddingTL.add(paddingBR)), paddingOffset = paddingBR.subtract(paddingTL).divideBy(2), swPoint = this.project(bounds.getSouthWest(), zoom), nePoint = this.project(bounds.getNorthEast(), zoom), center = this.unproject(swPoint.add(nePoint).divideBy(2).add(paddingOffset), zoom); return this.setView(center, zoom, options); }, fitWorld: function (options) { return this.fitBounds([[-90, -180], [90, 180]], options); }, panTo: function (center, options) { // (LatLng) return this.setView(center, this._zoom, {pan: options}); }, panBy: function (offset) { // (Point) // replaced with animated panBy in Map.Animation.js this.fire('movestart'); this._rawPanBy(L.point(offset)); this.fire('move'); return this.fire('moveend'); }, setMaxBounds: function (bounds) { bounds = L.latLngBounds(bounds); this.options.maxBounds = bounds; if (!bounds) { this._boundsMinZoom = null; this.off('moveend', this._panInsideMaxBounds, this); return this; } var minZoom = this.getBoundsZoom(bounds, true); this._boundsMinZoom = minZoom; if (this._loaded) { if (this._zoom < minZoom) { this.setView(bounds.getCenter(), minZoom); } else { this.panInsideBounds(bounds); } } this.on('moveend', this._panInsideMaxBounds, this); return this; }, panInsideBounds: function (bounds) { bounds = L.latLngBounds(bounds); var viewBounds = this.getPixelBounds(), viewSw = viewBounds.getBottomLeft(), viewNe = viewBounds.getTopRight(), sw = this.project(bounds.getSouthWest()), ne = this.project(bounds.getNorthEast()), dx = 0, dy = 0; if (viewNe.y < ne.y) { // north dy = Math.ceil(ne.y - viewNe.y); } if (viewNe.x > ne.x) { // east dx = Math.floor(ne.x - viewNe.x); } if (viewSw.y > sw.y) { // south dy = Math.floor(sw.y - viewSw.y); } if (viewSw.x < sw.x) { // west dx = Math.ceil(sw.x - viewSw.x); } if (dx || dy) { return this.panBy([dx, dy]); } return this; }, addLayer: function (layer) { // TODO method is too big, refactor var id = L.stamp(layer); if (this._layers[id]) { return this; } this._layers[id] = layer; // TODO getMaxZoom, getMinZoom in ILayer (instead of options) if (layer.options && (!isNaN(layer.options.maxZoom) || !isNaN(layer.options.minZoom))) { this._zoomBoundLayers[id] = layer; this._updateZoomLevels(); } // TODO looks ugly, refactor!!! if (this.options.zoomAnimation && L.TileLayer && (layer instanceof L.TileLayer)) { this._tileLayersNum++; this._tileLayersToLoad++; layer.on('load', this._onTileLayerLoad, this); } if (this._loaded) { this._layerAdd(layer); } return this; }, removeLayer: function (layer) { var id = L.stamp(layer); if (!this._layers[id]) { return; } if (this._loaded) { layer.onRemove(this); this.fire('layerremove', {layer: layer}); } delete this._layers[id]; if (this._zoomBoundLayers[id]) { delete this._zoomBoundLayers[id]; this._updateZoomLevels(); } // TODO looks ugly, refactor if (this.options.zoomAnimation && L.TileLayer && (layer instanceof L.TileLayer)) { this._tileLayersNum--; this._tileLayersToLoad--; layer.off('load', this._onTileLayerLoad, this); } return this; }, hasLayer: function (layer) { if (!layer) { return false; } return (L.stamp(layer) in this._layers); }, eachLayer: function (method, context) { for (var i in this._layers) { method.call(context, this._layers[i]); } return this; }, invalidateSize: function (options) { options = L.extend({ animate: false, pan: true }, options === true ? {animate: true} : options); var oldSize = this.getSize(); this._sizeChanged = true; if (this.options.maxBounds) { this.setMaxBounds(this.options.maxBounds); } if (!this._loaded) { return this; } var newSize = this.getSize(), offset = oldSize.subtract(newSize).divideBy(2).round(); if (!offset.x && !offset.y) { return this; } if (options.animate && options.pan) { this.panBy(offset); } else { if (options.pan) { this._rawPanBy(offset); } this.fire('move'); // make sure moveend is not fired too often on resize clearTimeout(this._sizeTimer); this._sizeTimer = setTimeout(L.bind(this.fire, this, 'moveend'), 200); } return this.fire('resize', { oldSize: oldSize, newSize: newSize }); }, // TODO handler.addTo addHandler: function (name, HandlerClass) { if (!HandlerClass) { return; } var handler = this[name] = new HandlerClass(this); this._handlers.push(handler); if (this.options[name]) { handler.enable(); } return this; }, remove: function () { if (this._loaded) { this.fire('unload'); } this._initEvents('off'); delete this._container._leaflet; this._clearPanes(); if (this._clearControlPos) { this._clearControlPos(); } this._clearHandlers(); return this; }, // public methods for getting map state getCenter: function () { // (Boolean) -> LatLng this._checkIfLoaded(); if (!this._moved()) { return this._initialCenter; } return this.layerPointToLatLng(this._getCenterLayerPoint()); }, getZoom: function () { return this._zoom; }, getBounds: function () { var bounds = this.getPixelBounds(), sw = this.unproject(bounds.getBottomLeft()), ne = this.unproject(bounds.getTopRight()); return new L.LatLngBounds(sw, ne); }, getMinZoom: function () { var z1 = this.options.minZoom || 0, z2 = this._layersMinZoom || 0, z3 = this._boundsMinZoom || 0; return Math.max(z1, z2, z3); }, getMaxZoom: function () { var z1 = this.options.maxZoom === undefined ? Infinity : this.options.maxZoom, z2 = this._layersMaxZoom === undefined ? Infinity : this._layersMaxZoom; return Math.min(z1, z2); }, getBoundsZoom: function (bounds, inside, padding) { // (LatLngBounds[, Boolean, Point]) -> Number bounds = L.latLngBounds(bounds); var zoom = this.getMinZoom() - (inside ? 1 : 0), maxZoom = this.getMaxZoom(), size = this.getSize(), nw = bounds.getNorthWest(), se = bounds.getSouthEast(), zoomNotFound = true, boundsSize; padding = L.point(padding || [0, 0]); do { zoom++; boundsSize = this.project(se, zoom).subtract(this.project(nw, zoom)).add(padding); zoomNotFound = !inside ? size.contains(boundsSize) : boundsSize.x < size.x || boundsSize.y < size.y; } while (zoomNotFound && zoom <= maxZoom); if (zoomNotFound && inside) { return null; } return inside ? zoom : zoom - 1; }, getSize: function () { if (!this._size || this._sizeChanged) { this._size = new L.Point( this._container.clientWidth, this._container.clientHeight); this._sizeChanged = false; } return this._size.clone(); }, getPixelBounds: function () { var topLeftPoint = this._getTopLeftPoint(); return new L.Bounds(topLeftPoint, topLeftPoint.add(this.getSize())); }, getPixelOrigin: function () { this._checkIfLoaded(); return this._initialTopLeftPoint; }, getPanes: function () { return this._panes; }, getContainer: function () { return this._container; }, // TODO replace with universal implementation after refactoring projections getZoomScale: function (toZoom) { var crs = this.options.crs; return crs.scale(toZoom) / crs.scale(this._zoom); }, getScaleZoom: function (scale) { return this._zoom + (Math.log(scale) / Math.LN2); }, // conversion methods project: function (latlng, zoom) { // (LatLng[, Number]) -> Point zoom = zoom === undefined ? this._zoom : zoom; return this.options.crs.latLngToPoint(L.latLng(latlng), zoom); }, unproject: function (point, zoom) { // (Point[, Number]) -> LatLng zoom = zoom === undefined ? this._zoom : zoom; return this.options.crs.pointToLatLng(L.point(point), zoom); }, layerPointToLatLng: function (point) { // (Point) var projectedPoint = L.point(point).add(this.getPixelOrigin()); return this.unproject(projectedPoint); }, latLngToLayerPoint: function (latlng) { // (LatLng) var projectedPoint = this.project(L.latLng(latlng))._round(); return projectedPoint._subtract(this.getPixelOrigin()); }, containerPointToLayerPoint: function (point) { // (Point) return L.point(point).subtract(this._getMapPanePos()); }, layerPointToContainerPoint: function (point) { // (Point) return L.point(point).add(this._getMapPanePos()); }, containerPointToLatLng: function (point) { var layerPoint = this.containerPointToLayerPoint(L.point(point)); return this.layerPointToLatLng(layerPoint); }, latLngToContainerPoint: function (latlng) { return this.layerPointToContainerPoint(this.latLngToLayerPoint(L.latLng(latlng))); }, mouseEventToContainerPoint: function (e) { // (MouseEvent) return L.DomEvent.getMousePosition(e, this._container); }, mouseEventToLayerPoint: function (e) { // (MouseEvent) return this.containerPointToLayerPoint(this.mouseEventToContainerPoint(e)); }, mouseEventToLatLng: function (e) { // (MouseEvent) return this.layerPointToLatLng(this.mouseEventToLayerPoint(e)); }, // map initialization methods _initContainer: function (id) { var container = this._container = L.DomUtil.get(id); if (!container) { throw new Error('Map container not found.'); } else if (container._leaflet) { throw new Error('Map container is already initialized.'); } container._leaflet = true; }, _initLayout: function () { var container = this._container; L.DomUtil.addClass(container, 'leaflet-container' + (L.Browser.touch ? ' leaflet-touch' : '') + (L.Browser.retina ? ' leaflet-retina' : '') + (this.options.fadeAnimation ? ' leaflet-fade-anim' : '')); var position = L.DomUtil.getStyle(container, 'position'); if (position !== 'absolute' && position !== 'relative' && position !== 'fixed') { container.style.position = 'relative'; } this._initPanes(); if (this._initControlPos) { this._initControlPos(); } }, _initPanes: function () { var panes = this._panes = {}; this._mapPane = panes.mapPane = this._createPane('leaflet-map-pane', this._container); this._tilePane = panes.tilePane = this._createPane('leaflet-tile-pane', this._mapPane); panes.objectsPane = this._createPane('leaflet-objects-pane', this._mapPane); panes.shadowPane = this._createPane('leaflet-shadow-pane'); panes.overlayPane = this._createPane('leaflet-overlay-pane'); panes.markerPane = this._createPane('leaflet-marker-pane'); panes.popupPane = this._createPane('leaflet-popup-pane'); var zoomHide = ' leaflet-zoom-hide'; if (!this.options.markerZoomAnimation) { L.DomUtil.addClass(panes.markerPane, zoomHide); L.DomUtil.addClass(panes.shadowPane, zoomHide); L.DomUtil.addClass(panes.popupPane, zoomHide); } }, _createPane: function (className, container) { return L.DomUtil.create('div', className, container || this._panes.objectsPane); }, _clearPanes: function () { this._container.removeChild(this._mapPane); }, _addLayers: function (layers) { layers = layers ? (L.Util.isArray(layers) ? layers : [layers]) : []; for (var i = 0, len = layers.length; i < len; i++) { this.addLayer(layers[i]); } }, // private methods that modify map state _resetView: function (center, zoom, preserveMapOffset, afterZoomAnim) { var zoomChanged = (this._zoom !== zoom); if (!afterZoomAnim) { this.fire('movestart'); if (zoomChanged) { this.fire('zoomstart'); } } this._zoom = zoom; this._initialCenter = center; this._initialTopLeftPoint = this._getNewTopLeftPoint(center); if (!preserveMapOffset) { L.DomUtil.setPosition(this._mapPane, new L.Point(0, 0)); } else { this._initialTopLeftPoint._add(this._getMapPanePos()); } this._tileLayersToLoad = this._tileLayersNum; var loading = !this._loaded; this._loaded = true; if (loading) { this.fire('load'); this.eachLayer(this._layerAdd, this); } this.fire('viewreset', {hard: !preserveMapOffset}); this.fire('move'); if (zoomChanged || afterZoomAnim) { this.fire('zoomend'); } this.fire('moveend', {hard: !preserveMapOffset}); }, _rawPanBy: function (offset) { L.DomUtil.setPosition(this._mapPane, this._getMapPanePos().subtract(offset)); }, _getZoomSpan: function () { return this.getMaxZoom() - this.getMinZoom(); }, _updateZoomLevels: function () { var i, minZoom = Infinity, maxZoom = -Infinity, oldZoomSpan = this._getZoomSpan(); for (i in this._zoomBoundLayers) { var layer = this._zoomBoundLayers[i]; if (!isNaN(layer.options.minZoom)) { minZoom = Math.min(minZoom, layer.options.minZoom); } if (!isNaN(layer.options.maxZoom)) { maxZoom = Math.max(maxZoom, layer.options.maxZoom); } } if (i === undefined) { // we have no tilelayers this._layersMaxZoom = this._layersMinZoom = undefined; } else { this._layersMaxZoom = maxZoom; this._layersMinZoom = minZoom; } if (oldZoomSpan !== this._getZoomSpan()) { this.fire('zoomlevelschange'); } }, _panInsideMaxBounds: function () { this.panInsideBounds(this.options.maxBounds); }, _checkIfLoaded: function () { if (!this._loaded) { throw new Error('Set map center and zoom first.'); } }, // map events _initEvents: function (onOff) { if (!L.DomEvent) { return; } onOff = onOff || 'on'; L.DomEvent[onOff](this._container, 'click', this._onMouseClick, this); var events = ['dblclick', 'mousedown', 'mouseup', 'mouseenter', 'mouseleave', 'mousemove', 'contextmenu'], i, len; for (i = 0, len = events.length; i < len; i++) { L.DomEvent[onOff](this._container, events[i], this._fireMouseEvent, this); } if (this.options.trackResize) { L.DomEvent[onOff](window, 'resize', this._onResize, this); } }, _onResize: function () { L.Util.cancelAnimFrame(this._resizeRequest); this._resizeRequest = L.Util.requestAnimFrame( this.invalidateSize, this, false, this._container); }, _onMouseClick: function (e) { // jshint camelcase: false if (!this._loaded || (!e._simulated && this.dragging && this.dragging.moved()) || e._leaflet_stop) { return; } this.fire('preclick'); this._fireMouseEvent(e); }, _fireMouseEvent: function (e) { // jshint camelcase: false if (!this._loaded || e._leaflet_stop) { return; } var type = e.type; type = (type === 'mouseenter' ? 'mouseover' : (type === 'mouseleave' ? 'mouseout' : type)); if (!this.hasEventListeners(type)) { return; } if (type === 'contextmenu') { L.DomEvent.preventDefault(e); } var containerPoint = this.mouseEventToContainerPoint(e), layerPoint = this.containerPointToLayerPoint(containerPoint), latlng = this.layerPointToLatLng(layerPoint); this.fire(type, { latlng: latlng, layerPoint: layerPoint, containerPoint: containerPoint, originalEvent: e }); }, _onTileLayerLoad: function () { this._tileLayersToLoad--; if (this._tileLayersNum && !this._tileLayersToLoad) { this.fire('tilelayersload'); } }, _clearHandlers: function () { for (var i = 0, len = this._handlers.length; i < len; i++) { this._handlers[i].disable(); } }, whenReady: function (callback, context) { if (this._loaded) { callback.call(context || this, this); } else { this.on('load', callback, context); } return this; }, _layerAdd: function (layer) { layer.onAdd(this); this.fire('layeradd', {layer: layer}); }, // private methods for getting map state _getMapPanePos: function () { return L.DomUtil.getPosition(this._mapPane); }, _moved: function () { var pos = this._getMapPanePos(); return pos && !pos.equals([0, 0]); }, _getTopLeftPoint: function () { return this.getPixelOrigin().subtract(this._getMapPanePos()); }, _getNewTopLeftPoint: function (center, zoom) { var viewHalf = this.getSize()._divideBy(2); // TODO round on display, not calculation to increase precision? return this.project(center, zoom)._subtract(viewHalf)._round(); }, _latLngToNewLayerPoint: function (latlng, newZoom, newCenter) { var topLeft = this._getNewTopLeftPoint(newCenter, newZoom).add(this._getMapPanePos()); return this.project(latlng, newZoom)._subtract(topLeft); }, // layer point of the current center _getCenterLayerPoint: function () { return this.containerPointToLayerPoint(this.getSize()._divideBy(2)); }, // offset of the specified place to the current center in pixels _getCenterOffset: function (latlng) { return this.latLngToLayerPoint(latlng).subtract(this._getCenterLayerPoint()); }, _limitZoom: function (zoom) { var min = this.getMinZoom(), max = this.getMaxZoom(); return Math.max(min, Math.min(max, zoom)); } }); L.map = function (id, options) { return new L.Map(id, options); }; /* * Mercator projection that takes into account that the Earth is not a perfect sphere. * Less popular than spherical mercator; used by projections like EPSG:3395. */ L.Projection.Mercator = { MAX_LATITUDE: 85.0840591556, R_MINOR: 6356752.314245179, R_MAJOR: 6378137, project: function (latlng) { // (LatLng) -> Point var d = L.LatLng.DEG_TO_RAD, max = this.MAX_LATITUDE, lat = Math.max(Math.min(max, latlng.lat), -max), r = this.R_MAJOR, r2 = this.R_MINOR, x = latlng.lng * d * r, y = lat * d, tmp = r2 / r, eccent = Math.sqrt(1.0 - tmp * tmp), con = eccent * Math.sin(y); con = Math.pow((1 - con) / (1 + con), eccent * 0.5); var ts = Math.tan(0.5 * ((Math.PI * 0.5) - y)) / con; y = -r * Math.log(ts); return new L.Point(x, y); }, unproject: function (point) { // (Point, Boolean) -> LatLng var d = L.LatLng.RAD_TO_DEG, r = this.R_MAJOR, r2 = this.R_MINOR, lng = point.x * d / r, tmp = r2 / r, eccent = Math.sqrt(1 - (tmp * tmp)), ts = Math.exp(- point.y / r), phi = (Math.PI / 2) - 2 * Math.atan(ts), numIter = 15, tol = 1e-7, i = numIter, dphi = 0.1, con; while ((Math.abs(dphi) > tol) && (--i > 0)) { con = eccent * Math.sin(phi); dphi = (Math.PI / 2) - 2 * Math.atan(ts * Math.pow((1.0 - con) / (1.0 + con), 0.5 * eccent)) - phi; phi += dphi; } return new L.LatLng(phi * d, lng); } }; L.CRS.EPSG3395 = L.extend({}, L.CRS, { code: 'EPSG:3395', projection: L.Projection.Mercator, transformation: (function () { var m = L.Projection.Mercator, r = m.R_MAJOR, r2 = m.R_MINOR; return new L.Transformation(0.5 / (Math.PI * r), 0.5, -0.5 / (Math.PI * r2), 0.5); }()) }); /* * L.TileLayer is used for standard xyz-numbered tile layers. */ L.TileLayer = L.Class.extend({ includes: L.Mixin.Events, options: { minZoom: 0, maxZoom: 18, tileSize: 256, subdomains: 'abc', errorTileUrl: '', attribution: '', zoomOffset: 0, opacity: 1, /* (undefined works too) zIndex: null, tms: false, continuousWorld: false, noWrap: false, zoomReverse: false, detectRetina: false, reuseTiles: false, bounds: false, */ unloadInvisibleTiles: L.Browser.mobile, updateWhenIdle: L.Browser.mobile }, initialize: function (url, options) { options = L.setOptions(this, options); // detecting retina displays, adjusting tileSize and zoom levels if (options.detectRetina && L.Browser.retina && options.maxZoom > 0) { options.tileSize = Math.floor(options.tileSize / 2); options.zoomOffset++; if (options.minZoom > 0) { options.minZoom--; } this.options.maxZoom--; } if (options.bounds) { options.bounds = L.latLngBounds(options.bounds); } this._url = url; var subdomains = this.options.subdomains; if (typeof subdomains === 'string') { this.options.subdomains = subdomains.split(''); } }, onAdd: function (map) { this._map = map; this._animated = map._zoomAnimated; // create a container div for tiles this._initContainer(); // create an image to clone for tiles this._createTileProto(); // set up events map.on({ 'viewreset': this._reset, 'moveend': this._update }, this); if (this._animated) { map.on({ 'zoomanim': this._animateZoom, 'zoomend': this._endZoomAnim }, this); } if (!this.options.updateWhenIdle) { this._limitedUpdate = L.Util.limitExecByInterval(this._update, 150, this); map.on('move', this._limitedUpdate, this); } this._reset(); this._update(); }, addTo: function (map) { map.addLayer(this); return this; }, onRemove: function (map) { this._container.parentNode.removeChild(this._container); map.off({ 'viewreset': this._reset, 'moveend': this._update }, this); if (this._animated) { map.off({ 'zoomanim': this._animateZoom, 'zoomend': this._endZoomAnim }, this); } if (!this.options.updateWhenIdle) { map.off('move', this._limitedUpdate, this); } this._container = null; this._map = null; }, bringToFront: function () { var pane = this._map._panes.tilePane; if (this._container) { pane.appendChild(this._container); this._setAutoZIndex(pane, Math.max); } return this; }, bringToBack: function () { var pane = this._map._panes.tilePane; if (this._container) { pane.insertBefore(this._container, pane.firstChild); this._setAutoZIndex(pane, Math.min); } return this; }, getAttribution: function () { return this.options.attribution; }, getContainer: function () { return this._container; }, setOpacity: function (opacity) { this.options.opacity = opacity; if (this._map) { this._updateOpacity(); } return this; }, setZIndex: function (zIndex) { this.options.zIndex = zIndex; this._updateZIndex(); return this; }, setUrl: function (url, noRedraw) { this._url = url; if (!noRedraw) { this.redraw(); } return this; }, redraw: function () { if (this._map) { this._reset({hard: true}); this._update(); } return this; }, _updateZIndex: function () { if (this._container && this.options.zIndex !== undefined) { this._container.style.zIndex = this.options.zIndex; } }, _setAutoZIndex: function (pane, compare) { var layers = pane.children, edgeZIndex = -compare(Infinity, -Infinity), // -Infinity for max, Infinity for min zIndex, i, len; for (i = 0, len = layers.length; i < len; i++) { if (layers[i] !== this._container) { zIndex = parseInt(layers[i].style.zIndex, 10); if (!isNaN(zIndex)) { edgeZIndex = compare(edgeZIndex, zIndex); } } } this.options.zIndex = this._container.style.zIndex = (isFinite(edgeZIndex) ? edgeZIndex : 0) + compare(1, -1); }, _updateOpacity: function () { var i, tiles = this._tiles; if (L.Browser.ielt9) { for (i in tiles) { L.DomUtil.setOpacity(tiles[i], this.options.opacity); } } else { L.DomUtil.setOpacity(this._container, this.options.opacity); } }, _initContainer: function () { var tilePane = this._map._panes.tilePane; if (!this._container) { this._container = L.DomUtil.create('div', 'leaflet-layer'); this._updateZIndex(); if (this._animated) { var className = 'leaflet-tile-container leaflet-zoom-animated'; this._bgBuffer = L.DomUtil.create('div', className, this._container); this._tileContainer = L.DomUtil.create('div', className, this._container); } else { this._tileContainer = this._container; } tilePane.appendChild(this._container); if (this.options.opacity < 1) { this._updateOpacity(); } } }, _reset: function (e) { for (var key in this._tiles) { this.fire('tileunload', {tile: this._tiles[key]}); } this._tiles = {}; this._tilesToLoad = 0; if (this.options.reuseTiles) { this._unusedTiles = []; } this._tileContainer.innerHTML = ''; if (this._animated && e && e.hard) { this._clearBgBuffer(); } this._initContainer(); }, _update: function () { if (!this._map) { return; } var bounds = this._map.getPixelBounds(), zoom = this._map.getZoom(), tileSize = this.options.tileSize; if (zoom > this.options.maxZoom || zoom < this.options.minZoom) { return; } var tileBounds = L.bounds( bounds.min.divideBy(tileSize)._floor(), bounds.max.divideBy(tileSize)._floor()); this._addTilesFromCenterOut(tileBounds); if (this.options.unloadInvisibleTiles || this.options.reuseTiles) { this._removeOtherTiles(tileBounds); } }, _addTilesFromCenterOut: function (bounds) { var queue = [], center = bounds.getCenter(); var j, i, point; for (j = bounds.min.y; j <= bounds.max.y; j++) { for (i = bounds.min.x; i <= bounds.max.x; i++) { point = new L.Point(i, j); if (this._tileShouldBeLoaded(point)) { queue.push(point); } } } var tilesToLoad = queue.length; if (tilesToLoad === 0) { return; } // load tiles in order of their distance to center queue.sort(function (a, b) { return a.distanceTo(center) - b.distanceTo(center); }); var fragment = document.createDocumentFragment(); // if its the first batch of tiles to load if (!this._tilesToLoad) { this.fire('loading'); } this._tilesToLoad += tilesToLoad; for (i = 0; i < tilesToLoad; i++) { this._addTile(queue[i], fragment); } this._tileContainer.appendChild(fragment); }, _tileShouldBeLoaded: function (tilePoint) { if ((tilePoint.x + ':' + tilePoint.y) in this._tiles) { return false; // already loaded } var options = this.options; if (!options.continuousWorld) { var limit = this._getWrapTileNum(); // don't load if exceeds world bounds if ((options.noWrap && (tilePoint.x < 0 || tilePoint.x >= limit)) || tilePoint.y < 0 || tilePoint.y >= limit) { return false; } } if (options.bounds) { var tileSize = options.tileSize, nwPoint = tilePoint.multiplyBy(tileSize), sePoint = nwPoint.add([tileSize, tileSize]), nw = this._map.unproject(nwPoint), se = this._map.unproject(sePoint); // TODO temporary hack, will be removed after refactoring projections // https://github.com/Leaflet/Leaflet/issues/1618 if (!options.continuousWorld && !options.noWrap) { nw = nw.wrap(); se = se.wrap(); } if (!options.bounds.intersects([nw, se])) { return false; } } return true; }, _removeOtherTiles: function (bounds) { var kArr, x, y, key; for (key in this._tiles) { kArr = key.split(':'); x = parseInt(kArr[0], 10); y = parseInt(kArr[1], 10); // remove tile if it's out of bounds if (x < bounds.min.x || x > bounds.max.x || y < bounds.min.y || y > bounds.max.y) { this._removeTile(key); } } }, _removeTile: function (key) { var tile = this._tiles[key]; this.fire('tileunload', {tile: tile, url: tile.src}); if (this.options.reuseTiles) { L.DomUtil.removeClass(tile, 'leaflet-tile-loaded'); this._unusedTiles.push(tile); } else if (tile.parentNode === this._tileContainer) { this._tileContainer.removeChild(tile); } // for https://github.com/CloudMade/Leaflet/issues/137 if (!L.Browser.android) { tile.onload = null; tile.src = L.Util.emptyImageUrl; } delete this._tiles[key]; }, _addTile: function (tilePoint, container) { var tilePos = this._getTilePos(tilePoint); // get unused tile - or create a new tile var tile = this._getTile(); /* Chrome 20 layouts much faster with top/left (verify with timeline, frames) Android 4 browser has display issues with top/left and requires transform instead Android 2 browser requires top/left or tiles disappear on load or first drag (reappear after zoom) https://github.com/CloudMade/Leaflet/issues/866 (other browsers don't currently care) - see debug/hacks/jitter.html for an example */ L.DomUtil.setPosition(tile, tilePos, L.Browser.chrome || L.Browser.android23); this._tiles[tilePoint.x + ':' + tilePoint.y] = tile; this._loadTile(tile, tilePoint); if (tile.parentNode !== this._tileContainer) { container.appendChild(tile); } }, _getZoomForUrl: function () { var options = this.options, zoom = this._map.getZoom(); if (options.zoomReverse) { zoom = options.maxZoom - zoom; } return zoom + options.zoomOffset; }, _getTilePos: function (tilePoint) { var origin = this._map.getPixelOrigin(), tileSize = this.options.tileSize; return tilePoint.multiplyBy(tileSize).subtract(origin); }, // image-specific code (override to implement e.g. Canvas or SVG tile layer) getTileUrl: function (tilePoint) { return L.Util.template(this._url, L.extend({ s: this._getSubdomain(tilePoint), z: tilePoint.z, x: tilePoint.x, y: tilePoint.y }, this.options)); }, _getWrapTileNum: function () { // TODO refactor, limit is not valid for non-standard projections return Math.pow(2, this._getZoomForUrl()); }, _adjustTilePoint: function (tilePoint) { var limit = this._getWrapTileNum(); // wrap tile coordinates if (!this.options.continuousWorld && !this.options.noWrap) { tilePoint.x = ((tilePoint.x % limit) + limit) % limit; } if (this.options.tms) { tilePoint.y = limit - tilePoint.y - 1; } tilePoint.z = this._getZoomForUrl(); }, _getSubdomain: function (tilePoint) { var index = Math.abs(tilePoint.x + tilePoint.y) % this.options.subdomains.length; return this.options.subdomains[index]; }, _createTileProto: function () { var img = this._tileImg = L.DomUtil.create('img', 'leaflet-tile'); img.style.width = img.style.height = this.options.tileSize + 'px'; img.galleryimg = 'no'; }, _getTile: function () { if (this.options.reuseTiles && this._unusedTiles.length > 0) { var tile = this._unusedTiles.pop(); this._resetTile(tile); return tile; } return this._createTile(); }, // Override if data stored on a tile needs to be cleaned up before reuse _resetTile: function (/*tile*/) {}, _createTile: function () { var tile = this._tileImg.cloneNode(false); tile.onselectstart = tile.onmousemove = L.Util.falseFn; if (L.Browser.ielt9 && this.options.opacity !== undefined) { L.DomUtil.setOpacity(tile, this.options.opacity); } return tile; }, _loadTile: function (tile, tilePoint) { tile._layer = this; tile.onload = this._tileOnLoad; tile.onerror = this._tileOnError; this._adjustTilePoint(tilePoint); tile.src = this.getTileUrl(tilePoint); }, _tileLoaded: function () { this._tilesToLoad--; if (!this._tilesToLoad) { this.fire('load'); if (this._animated) { // clear scaled tiles after all new tiles are loaded (for performance) clearTimeout(this._clearBgBufferTimer); this._clearBgBufferTimer = setTimeout(L.bind(this._clearBgBuffer, this), 500); } } }, _tileOnLoad: function () { var layer = this._layer; //Only if we are loading an actual image if (this.src !== L.Util.emptyImageUrl) { L.DomUtil.addClass(this, 'leaflet-tile-loaded'); layer.fire('tileload', { tile: this, url: this.src }); } layer._tileLoaded(); }, _tileOnError: function () { var layer = this._layer; layer.fire('tileerror', { tile: this, url: this.src }); var newUrl = layer.options.errorTileUrl; if (newUrl) { this.src = newUrl; } layer._tileLoaded(); } }); L.tileLayer = function (url, options) { return new L.TileLayer(url, options); }; /* * L.TileLayer.WMS is used for putting WMS tile layers on the map. */ L.TileLayer.WMS = L.TileLayer.extend({ defaultWmsParams: { service: 'WMS', request: 'GetMap', version: '1.1.1', layers: '', styles: '', format: 'image/jpeg', transparent: false }, initialize: function (url, options) { // (String, Object) this._url = url; var wmsParams = L.extend({}, this.defaultWmsParams), tileSize = options.tileSize || this.options.tileSize; if (options.detectRetina && L.Browser.retina) { wmsParams.width = wmsParams.height = tileSize * 2; } else { wmsParams.width = wmsParams.height = tileSize; } for (var i in options) { // all keys that are not TileLayer options go to WMS params if (!this.options.hasOwnProperty(i) && i !== 'crs') { wmsParams[i] = options[i]; } } this.wmsParams = wmsParams; L.setOptions(this, options); }, onAdd: function (map) { this._crs = this.options.crs || map.options.crs; var projectionKey = parseFloat(this.wmsParams.version) >= 1.3 ? 'crs' : 'srs'; this.wmsParams[projectionKey] = this._crs.code; L.TileLayer.prototype.onAdd.call(this, map); }, getTileUrl: function (tilePoint, zoom) { // (Point, Number) -> String var map = this._map, tileSize = this.options.tileSize, nwPoint = tilePoint.multiplyBy(tileSize), sePoint = nwPoint.add([tileSize, tileSize]), nw = this._crs.project(map.unproject(nwPoint, zoom)), se = this._crs.project(map.unproject(sePoint, zoom)), bbox = [nw.x, se.y, se.x, nw.y].join(','), url = L.Util.template(this._url, {s: this._getSubdomain(tilePoint)}); return url + L.Util.getParamString(this.wmsParams, url, true) + '&BBOX=' + bbox; }, setParams: function (params, noRedraw) { L.extend(this.wmsParams, params); if (!noRedraw) { this.redraw(); } return this; } }); L.tileLayer.wms = function (url, options) { return new L.TileLayer.WMS(url, options); }; /* * L.TileLayer.Canvas is a class that you can use as a base for creating * dynamically drawn Canvas-based tile layers. */ L.TileLayer.Canvas = L.TileLayer.extend({ options: { async: false }, initialize: function (options) { L.setOptions(this, options); }, redraw: function () { for (var i in this._tiles) { this._redrawTile(this._tiles[i]); } return this; }, _redrawTile: function (tile) { this.drawTile(tile, tile._tilePoint, this._map._zoom); }, _createTileProto: function () { var proto = this._canvasProto = L.DomUtil.create('canvas', 'leaflet-tile'); proto.width = proto.height = this.options.tileSize; }, _createTile: function () { var tile = this._canvasProto.cloneNode(false); tile.onselectstart = tile.onmousemove = L.Util.falseFn; return tile; }, _loadTile: function (tile, tilePoint) { tile._layer = this; tile._tilePoint = tilePoint; this._redrawTile(tile); if (!this.options.async) { this.tileDrawn(tile); } }, drawTile: function (/*tile, tilePoint*/) { // override with rendering code }, tileDrawn: function (tile) { this._tileOnLoad.call(tile); } }); L.tileLayer.canvas = function (options) { return new L.TileLayer.Canvas(options); }; /* * L.ImageOverlay is used to overlay images over the map (to specific geographical bounds). */ L.ImageOverlay = L.Class.extend({ includes: L.Mixin.Events, options: { opacity: 1 }, initialize: function (url, bounds, options) { // (String, LatLngBounds, Object) this._url = url; this._bounds = L.latLngBounds(bounds); L.setOptions(this, options); }, onAdd: function (map) { this._map = map; if (!this._image) { this._initImage(); } map._panes.overlayPane.appendChild(this._image); map.on('viewreset', this._reset, this); if (map.options.zoomAnimation && L.Browser.any3d) { map.on('zoomanim', this._animateZoom, this); } this._reset(); }, onRemove: function (map) { map.getPanes().overlayPane.removeChild(this._image); map.off('viewreset', this._reset, this); if (map.options.zoomAnimation) { map.off('zoomanim', this._animateZoom, this); } }, addTo: function (map) { map.addLayer(this); return this; }, setOpacity: function (opacity) { this.options.opacity = opacity; this._updateOpacity(); return this; }, // TODO remove bringToFront/bringToBack duplication from TileLayer/Path bringToFront: function () { if (this._image) { this._map._panes.overlayPane.appendChild(this._image); } return this; }, bringToBack: function () { var pane = this._map._panes.overlayPane; if (this._image) { pane.insertBefore(this._image, pane.firstChild); } return this; }, _initImage: function () { this._image = L.DomUtil.create('img', 'leaflet-image-layer'); if (this._map.options.zoomAnimation && L.Browser.any3d) { L.DomUtil.addClass(this._image, 'leaflet-zoom-animated'); } else { L.DomUtil.addClass(this._image, 'leaflet-zoom-hide'); } this._updateOpacity(); //TODO createImage util method to remove duplication L.extend(this._image, { galleryimg: 'no', onselectstart: L.Util.falseFn, onmousemove: L.Util.falseFn, onload: L.bind(this._onImageLoad, this), src: this._url }); }, _animateZoom: function (e) { var map = this._map, image = this._image, scale = map.getZoomScale(e.zoom), nw = this._bounds.getNorthWest(), se = this._bounds.getSouthEast(), topLeft = map._latLngToNewLayerPoint(nw, e.zoom, e.center), size = map._latLngToNewLayerPoint(se, e.zoom, e.center)._subtract(topLeft), origin = topLeft._add(size._multiplyBy((1 / 2) * (1 - 1 / scale))); image.style[L.DomUtil.TRANSFORM] = L.DomUtil.getTranslateString(origin) + ' scale(' + scale + ') '; }, _reset: function () { var image = this._image, topLeft = this._map.latLngToLayerPoint(this._bounds.getNorthWest()), size = this._map.latLngToLayerPoint(this._bounds.getSouthEast())._subtract(topLeft); L.DomUtil.setPosition(image, topLeft); image.style.width = size.x + 'px'; image.style.height = size.y + 'px'; }, _onImageLoad: function () { this.fire('load'); }, _updateOpacity: function () { L.DomUtil.setOpacity(this._image, this.options.opacity); } }); L.imageOverlay = function (url, bounds, options) { return new L.ImageOverlay(url, bounds, options); }; /* * L.Icon is an image-based icon class that you can use with L.Marker for custom markers. */ L.Icon = L.Class.extend({ options: { /* iconUrl: (String) (required) iconRetinaUrl: (String) (optional, used for retina devices if detected) iconSize: (Point) (can be set through CSS) iconAnchor: (Point) (centered by default, can be set in CSS with negative margins) popupAnchor: (Point) (if not specified, popup opens in the anchor point) shadowUrl: (String) (no shadow by default) shadowRetinaUrl: (String) (optional, used for retina devices if detected) shadowSize: (Point) shadowAnchor: (Point) */ className: '' }, initialize: function (options) { L.setOptions(this, options); }, createIcon: function (oldIcon) { return this._createIcon('icon', oldIcon); }, createShadow: function (oldIcon) { return this._createIcon('shadow', oldIcon); }, _createIcon: function (name, oldIcon) { var src = this._getIconUrl(name); if (!src) { if (name === 'icon') { throw new Error('iconUrl not set in Icon options (see the docs).'); } return null; } var img; if (!oldIcon || oldIcon.tagName !== 'IMG') { img = this._createImg(src); } else { img = this._createImg(src, oldIcon); } this._setIconStyles(img, name); return img; }, _setIconStyles: function (img, name) { var options = this.options, size = L.point(options[name + 'Size']), anchor; if (name === 'shadow') { anchor = L.point(options.shadowAnchor || options.iconAnchor); } else { anchor = L.point(options.iconAnchor); } if (!anchor && size) { anchor = size.divideBy(2, true); } img.className = 'leaflet-marker-' + name + ' ' + options.className; if (anchor) { img.style.marginLeft = (-anchor.x) + 'px'; img.style.marginTop = (-anchor.y) + 'px'; } if (size) { img.style.width = size.x + 'px'; img.style.height = size.y + 'px'; } }, _createImg: function (src, el) { if (!L.Browser.ie6) { if (!el) { el = document.createElement('img'); } el.src = src; } else { if (!el) { el = document.createElement('div'); } el.style.filter = 'progid:DXImageTransform.Microsoft.AlphaImageLoader(src="' + src + '")'; } return el; }, _getIconUrl: function (name) { if (L.Browser.retina && this.options[name + 'RetinaUrl']) { return this.options[name + 'RetinaUrl']; } return this.options[name + 'Url']; } }); L.icon = function (options) { return new L.Icon(options); }; /* * L.Icon.Default is the blue marker icon used by default in Leaflet. */ L.Icon.Default = L.Icon.extend({ options: { iconSize: [25, 41], iconAnchor: [12, 41], popupAnchor: [1, -34], shadowSize: [41, 41] }, _getIconUrl: function (name) { var key = name + 'Url'; if (this.options[key]) { return this.options[key]; } if (L.Browser.retina && name === 'icon') { name += '-2x'; } var path = L.Icon.Default.imagePath; if (!path) { throw new Error('Couldn\'t autodetect L.Icon.Default.imagePath, set it manually.'); } return path + '/marker-' + name + '.png'; } }); L.Icon.Default.imagePath = (function () { var scripts = document.getElementsByTagName('script'), leafletRe = /[\/^]leaflet[\-\._]?([\w\-\._]*)\.js\??/; var i, len, src, matches, path; for (i = 0, len = scripts.length; i < len; i++) { src = scripts[i].src; matches = src.match(leafletRe); if (matches) { path = src.split(leafletRe)[0]; return (path ? path + '/' : '') + 'images'; } } }()); /* * L.Marker is used to display clickable/draggable icons on the map. */ L.Marker = L.Class.extend({ includes: L.Mixin.Events, options: { icon: new L.Icon.Default(), title: '', clickable: true, draggable: false, keyboard: true, zIndexOffset: 0, opacity: 1, riseOnHover: false, riseOffset: 250 }, initialize: function (latlng, options) { L.setOptions(this, options); this._latlng = L.latLng(latlng); }, onAdd: function (map) { this._map = map; map.on('viewreset', this.update, this); this._initIcon(); this.update(); if (map.options.zoomAnimation && map.options.markerZoomAnimation) { map.on('zoomanim', this._animateZoom, this); } }, addTo: function (map) { map.addLayer(this); return this; }, onRemove: function (map) { if (this.dragging) { this.dragging.disable(); } this._removeIcon(); this._removeShadow(); this.fire('remove'); map.off({ 'viewreset': this.update, 'zoomanim': this._animateZoom }, this); this._map = null; }, getLatLng: function () { return this._latlng; }, setLatLng: function (latlng) { this._latlng = L.latLng(latlng); this.update(); return this.fire('move', { latlng: this._latlng }); }, setZIndexOffset: function (offset) { this.options.zIndexOffset = offset; this.update(); return this; }, setIcon: function (icon) { this.options.icon = icon; if (this._map) { this._initIcon(); this.update(); } return this; }, update: function () { if (this._icon) { var pos = this._map.latLngToLayerPoint(this._latlng).round(); this._setPos(pos); } return this; }, _initIcon: function () { var options = this.options, map = this._map, animation = (map.options.zoomAnimation && map.options.markerZoomAnimation), classToAdd = animation ? 'leaflet-zoom-animated' : 'leaflet-zoom-hide'; var icon = options.icon.createIcon(this._icon), addIcon = false; // if we're not reusing the icon, remove the old one and init new one if (icon !== this._icon) { if (this._icon) { this._removeIcon(); } addIcon = true; if (options.title) { icon.title = options.title; } } L.DomUtil.addClass(icon, classToAdd); if (options.keyboard) { icon.tabIndex = '0'; } this._icon = icon; this._initInteraction(); if (options.riseOnHover) { L.DomEvent .on(icon, 'mouseover', this._bringToFront, this) .on(icon, 'mouseout', this._resetZIndex, this); } var newShadow = options.icon.createShadow(this._shadow), addShadow = false; if (newShadow !== this._shadow) { this._removeShadow(); addShadow = true; if (newShadow) { L.DomUtil.addClass(newShadow, classToAdd); } } this._shadow = newShadow; if (options.opacity < 1) { this._updateOpacity(); } var panes = this._map._panes; if (addIcon) { panes.markerPane.appendChild(this._icon); } if (newShadow && addShadow) { panes.shadowPane.appendChild(this._shadow); } }, _removeIcon: function () { if (this.options.riseOnHover) { L.DomEvent .off(this._icon, 'mouseover', this._bringToFront) .off(this._icon, 'mouseout', this._resetZIndex); } this._map._panes.markerPane.removeChild(this._icon); this._icon = null; }, _removeShadow: function () { if (this._shadow) { this._map._panes.shadowPane.removeChild(this._shadow); } this._shadow = null; }, _setPos: function (pos) { L.DomUtil.setPosition(this._icon, pos); if (this._shadow) { L.DomUtil.setPosition(this._shadow, pos); } this._zIndex = pos.y + this.options.zIndexOffset; this._resetZIndex(); }, _updateZIndex: function (offset) { this._icon.style.zIndex = this._zIndex + offset; }, _animateZoom: function (opt) { var pos = this._map._latLngToNewLayerPoint(this._latlng, opt.zoom, opt.center); this._setPos(pos); }, _initInteraction: function () { if (!this.options.clickable) { return; } // TODO refactor into something shared with Map/Path/etc. to DRY it up var icon = this._icon, events = ['dblclick', 'mousedown', 'mouseover', 'mouseout', 'contextmenu']; L.DomUtil.addClass(icon, 'leaflet-clickable'); L.DomEvent.on(icon, 'click', this._onMouseClick, this); L.DomEvent.on(icon, 'keypress', this._onKeyPress, this); for (var i = 0; i < events.length; i++) { L.DomEvent.on(icon, events[i], this._fireMouseEvent, this); } if (L.Handler.MarkerDrag) { this.dragging = new L.Handler.MarkerDrag(this); if (this.options.draggable) { this.dragging.enable(); } } }, _onMouseClick: function (e) { var wasDragged = this.dragging && this.dragging.moved(); if (this.hasEventListeners(e.type) || wasDragged) { L.DomEvent.stopPropagation(e); } if (wasDragged) { return; } if ((!this.dragging || !this.dragging._enabled) && this._map.dragging && this._map.dragging.moved()) { return; } this.fire(e.type, { originalEvent: e, latlng: this._latlng }); }, _onKeyPress: function (e) { if (e.keyCode === 13) { this.fire('click', { originalEvent: e, latlng: this._latlng }); } }, _fireMouseEvent: function (e) { this.fire(e.type, { originalEvent: e, latlng: this._latlng }); // TODO proper custom event propagation // this line will always be called if marker is in a FeatureGroup if (e.type === 'contextmenu' && this.hasEventListeners(e.type)) { L.DomEvent.preventDefault(e); } if (e.type !== 'mousedown') { L.DomEvent.stopPropagation(e); } else { L.DomEvent.preventDefault(e); } }, setOpacity: function (opacity) { this.options.opacity = opacity; if (this._map) { this._updateOpacity(); } }, _updateOpacity: function () { L.DomUtil.setOpacity(this._icon, this.options.opacity); if (this._shadow) { L.DomUtil.setOpacity(this._shadow, this.options.opacity); } }, _bringToFront: function () { this._updateZIndex(this.options.riseOffset); }, _resetZIndex: function () { this._updateZIndex(0); } }); L.marker = function (latlng, options) { return new L.Marker(latlng, options); }; /* * L.DivIcon is a lightweight HTML-based icon class (as opposed to the image-based L.Icon) * to use with L.Marker. */ L.DivIcon = L.Icon.extend({ options: { iconSize: [12, 12], // also can be set through CSS /* iconAnchor: (Point) popupAnchor: (Point) html: (String) bgPos: (Point) */ className: 'leaflet-div-icon', html: false }, createIcon: function (oldIcon) { var div = (oldIcon && oldIcon.tagName === 'DIV') ? oldIcon : document.createElement('div'), options = this.options; if (options.html !== false) { div.innerHTML = options.html; } else { div.innerHTML = ''; } if (options.bgPos) { div.style.backgroundPosition = (-options.bgPos.x) + 'px ' + (-options.bgPos.y) + 'px'; } this._setIconStyles(div, 'icon'); return div; }, createShadow: function () { return null; } }); L.divIcon = function (options) { return new L.DivIcon(options); }; /* * L.Popup is used for displaying popups on the map. */ L.Map.mergeOptions({ closePopupOnClick: true }); L.Popup = L.Class.extend({ includes: L.Mixin.Events, options: { minWidth: 50, maxWidth: 300, maxHeight: null, autoPan: true, closeButton: true, offset: [0, 7], autoPanPadding: [5, 5], keepInView: false, className: '', zoomAnimation: true }, initialize: function (options, source) { L.setOptions(this, options); this._source = source; this._animated = L.Browser.any3d && this.options.zoomAnimation; this._isOpen = false; }, onAdd: function (map) { this._map = map; if (!this._container) { this._initLayout(); } this._updateContent(); var animFade = map.options.fadeAnimation; if (animFade) { L.DomUtil.setOpacity(this._container, 0); } map._panes.popupPane.appendChild(this._container); map.on(this._getEvents(), this); this._update(); if (animFade) { L.DomUtil.setOpacity(this._container, 1); } this.fire('open'); map.fire('popupopen', {popup: this}); if (this._source) { this._source.fire('popupopen', {popup: this}); } }, addTo: function (map) { map.addLayer(this); return this; }, openOn: function (map) { map.openPopup(this); return this; }, onRemove: function (map) { map._panes.popupPane.removeChild(this._container); L.Util.falseFn(this._container.offsetWidth); // force reflow map.off(this._getEvents(), this); if (map.options.fadeAnimation) { L.DomUtil.setOpacity(this._container, 0); } this._map = null; this.fire('close'); map.fire('popupclose', {popup: this}); if (this._source) { this._source.fire('popupclose', {popup: this}); } }, setLatLng: function (latlng) { this._latlng = L.latLng(latlng); this._update(); return this; }, setContent: function (content) { this._content = content; this._update(); return this; }, _getEvents: function () { var events = { viewreset: this._updatePosition }; if (this._animated) { events.zoomanim = this._zoomAnimation; } if ('closeOnClick' in this.options ? this.options.closeOnClick : this._map.options.closePopupOnClick) { events.preclick = this._close; } if (this.options.keepInView) { events.moveend = this._adjustPan; } return events; }, _close: function () { if (this._map) { this._map.closePopup(this); } }, _initLayout: function () { var prefix = 'leaflet-popup', containerClass = prefix + ' ' + this.options.className + ' leaflet-zoom-' + (this._animated ? 'animated' : 'hide'), container = this._container = L.DomUtil.create('div', containerClass), closeButton; if (this.options.closeButton) { closeButton = this._closeButton = L.DomUtil.create('a', prefix + '-close-button', container); closeButton.href = '#close'; closeButton.innerHTML = '&#215;'; L.DomEvent.disableClickPropagation(closeButton); L.DomEvent.on(closeButton, 'click', this._onCloseButtonClick, this); } var wrapper = this._wrapper = L.DomUtil.create('div', prefix + '-content-wrapper', container); L.DomEvent.disableClickPropagation(wrapper); this._contentNode = L.DomUtil.create('div', prefix + '-content', wrapper); L.DomEvent.on(this._contentNode, 'mousewheel', L.DomEvent.stopPropagation); L.DomEvent.on(wrapper, 'contextmenu', L.DomEvent.stopPropagation); this._tipContainer = L.DomUtil.create('div', prefix + '-tip-container', container); this._tip = L.DomUtil.create('div', prefix + '-tip', this._tipContainer); }, _update: function () { if (!this._map) { return; } this._container.style.visibility = 'hidden'; this._updateContent(); this._updateLayout(); this._updatePosition(); this._container.style.visibility = ''; this._adjustPan(); }, _updateContent: function () { if (!this._content) { return; } if (typeof this._content === 'string') { this._contentNode.innerHTML = this._content; } else { while (this._contentNode.hasChildNodes()) { this._contentNode.removeChild(this._contentNode.firstChild); } this._contentNode.appendChild(this._content); } this.fire('contentupdate'); }, _updateLayout: function () { var container = this._contentNode, style = container.style; style.width = ''; style.whiteSpace = 'nowrap'; var width = container.offsetWidth; width = Math.min(width, this.options.maxWidth); width = Math.max(width, this.options.minWidth); style.width = (width + 1) + 'px'; style.whiteSpace = ''; style.height = ''; var height = container.offsetHeight, maxHeight = this.options.maxHeight, scrolledClass = 'leaflet-popup-scrolled'; if (maxHeight && height > maxHeight) { style.height = maxHeight + 'px'; L.DomUtil.addClass(container, scrolledClass); } else { L.DomUtil.removeClass(container, scrolledClass); } this._containerWidth = this._container.offsetWidth; }, _updatePosition: function () { if (!this._map) { return; } var pos = this._map.latLngToLayerPoint(this._latlng), animated = this._animated, offset = L.point(this.options.offset); if (animated) { L.DomUtil.setPosition(this._container, pos); } this._containerBottom = -offset.y - (animated ? 0 : pos.y); this._containerLeft = -Math.round(this._containerWidth / 2) + offset.x + (animated ? 0 : pos.x); // bottom position the popup in case the height of the popup changes (images loading etc) this._container.style.bottom = this._containerBottom + 'px'; this._container.style.left = this._containerLeft + 'px'; }, _zoomAnimation: function (opt) { var pos = this._map._latLngToNewLayerPoint(this._latlng, opt.zoom, opt.center); L.DomUtil.setPosition(this._container, pos); }, _adjustPan: function () { if (!this.options.autoPan) { return; } var map = this._map, containerHeight = this._container.offsetHeight, containerWidth = this._containerWidth, layerPos = new L.Point(this._containerLeft, -containerHeight - this._containerBottom); if (this._animated) { layerPos._add(L.DomUtil.getPosition(this._container)); } var containerPos = map.layerPointToContainerPoint(layerPos), padding = L.point(this.options.autoPanPadding), size = map.getSize(), dx = 0, dy = 0; if (containerPos.x + containerWidth > size.x) { // right dx = containerPos.x + containerWidth - size.x + padding.x; } if (containerPos.x - dx < 0) { // left dx = containerPos.x - padding.x; } if (containerPos.y + containerHeight > size.y) { // bottom dy = containerPos.y + containerHeight - size.y + padding.y; } if (containerPos.y - dy < 0) { // top dy = containerPos.y - padding.y; } if (dx || dy) { map .fire('autopanstart') .panBy([dx, dy]); } }, _onCloseButtonClick: function (e) { this._close(); L.DomEvent.stop(e); } }); L.popup = function (options, source) { return new L.Popup(options, source); }; L.Map.include({ openPopup: function (popup, latlng, options) { // (Popup) or (String || HTMLElement, LatLng[, Object]) this.closePopup(); if (!(popup instanceof L.Popup)) { var content = popup; popup = new L.Popup(options) .setLatLng(latlng) .setContent(content); } popup._isOpen = true; this._popup = popup; return this.addLayer(popup); }, closePopup: function (popup) { if (!popup || popup === this._popup) { popup = this._popup; this._popup = null; } if (popup) { this.removeLayer(popup); popup._isOpen = false; } return this; } }); /* * Popup extension to L.Marker, adding popup-related methods. */ L.Marker.include({ openPopup: function () { if (this._popup && this._map && !this._map.hasLayer(this._popup)) { this._popup.setLatLng(this._latlng); this._map.openPopup(this._popup); } return this; }, closePopup: function () { if (this._popup) { this._popup._close(); } return this; }, togglePopup: function () { if (this._popup) { if (this._popup._isOpen) { this.closePopup(); } else { this.openPopup(); } } return this; }, bindPopup: function (content, options) { var anchor = L.point(this.options.icon.options.popupAnchor || [0, 0]); anchor = anchor.add(L.Popup.prototype.options.offset); if (options && options.offset) { anchor = anchor.add(options.offset); } options = L.extend({offset: anchor}, options); if (!this._popup) { this .on('click', this.togglePopup, this) .on('remove', this.closePopup, this) .on('move', this._movePopup, this); } if (content instanceof L.Popup) { L.setOptions(content, options); this._popup = content; } else { this._popup = new L.Popup(options, this) .setContent(content); } return this; }, setPopupContent: function (content) { if (this._popup) { this._popup.setContent(content); } return this; }, unbindPopup: function () { if (this._popup) { this._popup = null; this .off('click', this.togglePopup) .off('remove', this.closePopup) .off('move', this._movePopup); } return this; }, _movePopup: function (e) { this._popup.setLatLng(e.latlng); } }); /* * L.LayerGroup is a class to combine several layers into one so that * you can manipulate the group (e.g. add/remove it) as one layer. */ L.LayerGroup = L.Class.extend({ initialize: function (layers) { this._layers = {}; var i, len; if (layers) { for (i = 0, len = layers.length; i < len; i++) { this.addLayer(layers[i]); } } }, addLayer: function (layer) { var id = this.getLayerId(layer); this._layers[id] = layer; if (this._map) { this._map.addLayer(layer); } return this; }, removeLayer: function (layer) { var id = layer in this._layers ? layer : this.getLayerId(layer); if (this._map && this._layers[id]) { this._map.removeLayer(this._layers[id]); } delete this._layers[id]; return this; }, hasLayer: function (layer) { if (!layer) { return false; } return (layer in this._layers || this.getLayerId(layer) in this._layers); }, clearLayers: function () { this.eachLayer(this.removeLayer, this); return this; }, invoke: function (methodName) { var args = Array.prototype.slice.call(arguments, 1), i, layer; for (i in this._layers) { layer = this._layers[i]; if (layer[methodName]) { layer[methodName].apply(layer, args); } } return this; }, onAdd: function (map) { this._map = map; this.eachLayer(map.addLayer, map); }, onRemove: function (map) { this.eachLayer(map.removeLayer, map); this._map = null; }, addTo: function (map) { map.addLayer(this); return this; }, eachLayer: function (method, context) { for (var i in this._layers) { method.call(context, this._layers[i]); } return this; }, getLayer: function (id) { return this._layers[id]; }, getLayers: function () { var layers = []; for (var i in this._layers) { layers.push(this._layers[i]); } return layers; }, setZIndex: function (zIndex) { return this.invoke('setZIndex', zIndex); }, getLayerId: function (layer) { return L.stamp(layer); } }); L.layerGroup = function (layers) { return new L.LayerGroup(layers); }; /* * L.FeatureGroup extends L.LayerGroup by introducing mouse events and additional methods * shared between a group of interactive layers (like vectors or markers). */ L.FeatureGroup = L.LayerGroup.extend({ includes: L.Mixin.Events, statics: { EVENTS: 'click dblclick mouseover mouseout mousemove contextmenu popupopen popupclose' }, addLayer: function (layer) { if (this.hasLayer(layer)) { return this; } layer.on(L.FeatureGroup.EVENTS, this._propagateEvent, this); L.LayerGroup.prototype.addLayer.call(this, layer); if (this._popupContent && layer.bindPopup) { layer.bindPopup(this._popupContent, this._popupOptions); } return this.fire('layeradd', {layer: layer}); }, removeLayer: function (layer) { if (layer in this._layers) { layer = this._layers[layer]; } layer.off(L.FeatureGroup.EVENTS, this._propagateEvent, this); L.LayerGroup.prototype.removeLayer.call(this, layer); if (this._popupContent) { this.invoke('unbindPopup'); } return this.fire('layerremove', {layer: layer}); }, bindPopup: function (content, options) { this._popupContent = content; this._popupOptions = options; return this.invoke('bindPopup', content, options); }, setStyle: function (style) { return this.invoke('setStyle', style); }, bringToFront: function () { return this.invoke('bringToFront'); }, bringToBack: function () { return this.invoke('bringToBack'); }, getBounds: function () { var bounds = new L.LatLngBounds(); this.eachLayer(function (layer) { bounds.extend(layer instanceof L.Marker ? layer.getLatLng() : layer.getBounds()); }); return bounds; }, _propagateEvent: function (e) { if (!e.layer) { e.layer = e.target; } e.target = this; this.fire(e.type, e); } }); L.featureGroup = function (layers) { return new L.FeatureGroup(layers); }; /* * L.Path is a base class for rendering vector paths on a map. Inherited by Polyline, Circle, etc. */ L.Path = L.Class.extend({ includes: [L.Mixin.Events], statics: { // how much to extend the clip area around the map view // (relative to its size, e.g. 0.5 is half the screen in each direction) // set it so that SVG element doesn't exceed 1280px (vectors flicker on dragend if it is) CLIP_PADDING: L.Browser.mobile ? Math.max(0, Math.min(0.5, (1280 / Math.max(window.innerWidth, window.innerHeight) - 1) / 2)) : 0.5 }, options: { stroke: true, color: '#0033ff', dashArray: null, weight: 5, opacity: 0.5, fill: false, fillColor: null, //same as color by default fillOpacity: 0.2, clickable: true }, initialize: function (options) { L.setOptions(this, options); }, onAdd: function (map) { this._map = map; if (!this._container) { this._initElements(); this._initEvents(); } this.projectLatlngs(); this._updatePath(); if (this._container) { this._map._pathRoot.appendChild(this._container); } this.fire('add'); map.on({ 'viewreset': this.projectLatlngs, 'moveend': this._updatePath }, this); }, addTo: function (map) { map.addLayer(this); return this; }, onRemove: function (map) { map._pathRoot.removeChild(this._container); // Need to fire remove event before we set _map to null as the event hooks might need the object this.fire('remove'); this._map = null; if (L.Browser.vml) { this._container = null; this._stroke = null; this._fill = null; } map.off({ 'viewreset': this.projectLatlngs, 'moveend': this._updatePath }, this); }, projectLatlngs: function () { // do all projection stuff here }, setStyle: function (style) { L.setOptions(this, style); if (this._container) { this._updateStyle(); } return this; }, redraw: function () { if (this._map) { this.projectLatlngs(); this._updatePath(); } return this; } }); L.Map.include({ _updatePathViewport: function () { var p = L.Path.CLIP_PADDING, size = this.getSize(), panePos = L.DomUtil.getPosition(this._mapPane), min = panePos.multiplyBy(-1)._subtract(size.multiplyBy(p)._round()), max = min.add(size.multiplyBy(1 + p * 2)._round()); this._pathViewport = new L.Bounds(min, max); } }); /* * Extends L.Path with SVG-specific rendering code. */ L.Path.SVG_NS = 'http://www.w3.org/2000/svg'; L.Browser.svg = !!(document.createElementNS && document.createElementNS(L.Path.SVG_NS, 'svg').createSVGRect); L.Path = L.Path.extend({ statics: { SVG: L.Browser.svg }, bringToFront: function () { var root = this._map._pathRoot, path = this._container; if (path && root.lastChild !== path) { root.appendChild(path); } return this; }, bringToBack: function () { var root = this._map._pathRoot, path = this._container, first = root.firstChild; if (path && first !== path) { root.insertBefore(path, first); } return this; }, getPathString: function () { // form path string here }, _createElement: function (name) { return document.createElementNS(L.Path.SVG_NS, name); }, _initElements: function () { this._map._initPathRoot(); this._initPath(); this._initStyle(); }, _initPath: function () { this._container = this._createElement('g'); this._path = this._createElement('path'); this._container.appendChild(this._path); }, _initStyle: function () { if (this.options.stroke) { this._path.setAttribute('stroke-linejoin', 'round'); this._path.setAttribute('stroke-linecap', 'round'); } if (this.options.fill) { this._path.setAttribute('fill-rule', 'evenodd'); } if (this.options.pointerEvents) { this._path.setAttribute('pointer-events', this.options.pointerEvents); } if (!this.options.clickable && !this.options.pointerEvents) { this._path.setAttribute('pointer-events', 'none'); } this._updateStyle(); }, _updateStyle: function () { if (this.options.stroke) { this._path.setAttribute('stroke', this.options.color); this._path.setAttribute('stroke-opacity', this.options.opacity); this._path.setAttribute('stroke-width', this.options.weight); if (this.options.dashArray) { this._path.setAttribute('stroke-dasharray', this.options.dashArray); } else { this._path.removeAttribute('stroke-dasharray'); } } else { this._path.setAttribute('stroke', 'none'); } if (this.options.fill) { this._path.setAttribute('fill', this.options.fillColor || this.options.color); this._path.setAttribute('fill-opacity', this.options.fillOpacity); } else { this._path.setAttribute('fill', 'none'); } }, _updatePath: function () { var str = this.getPathString(); if (!str) { // fix webkit empty string parsing bug str = 'M0 0'; } this._path.setAttribute('d', str); }, // TODO remove duplication with L.Map _initEvents: function () { if (this.options.clickable) { if (L.Browser.svg || !L.Browser.vml) { this._path.setAttribute('class', 'leaflet-clickable'); } L.DomEvent.on(this._container, 'click', this._onMouseClick, this); var events = ['dblclick', 'mousedown', 'mouseover', 'mouseout', 'mousemove', 'contextmenu']; for (var i = 0; i < events.length; i++) { L.DomEvent.on(this._container, events[i], this._fireMouseEvent, this); } } }, _onMouseClick: function (e) { if (this._map.dragging && this._map.dragging.moved()) { return; } this._fireMouseEvent(e); }, _fireMouseEvent: function (e) { if (!this.hasEventListeners(e.type)) { return; } var map = this._map, containerPoint = map.mouseEventToContainerPoint(e), layerPoint = map.containerPointToLayerPoint(containerPoint), latlng = map.layerPointToLatLng(layerPoint); this.fire(e.type, { latlng: latlng, layerPoint: layerPoint, containerPoint: containerPoint, originalEvent: e }); if (e.type === 'contextmenu') { L.DomEvent.preventDefault(e); } if (e.type !== 'mousemove') { L.DomEvent.stopPropagation(e); } } }); L.Map.include({ _initPathRoot: function () { if (!this._pathRoot) { this._pathRoot = L.Path.prototype._createElement('svg'); this._panes.overlayPane.appendChild(this._pathRoot); if (this.options.zoomAnimation && L.Browser.any3d) { this._pathRoot.setAttribute('class', ' leaflet-zoom-animated'); this.on({ 'zoomanim': this._animatePathZoom, 'zoomend': this._endPathZoom }); } else { this._pathRoot.setAttribute('class', ' leaflet-zoom-hide'); } this.on('moveend', this._updateSvgViewport); this._updateSvgViewport(); } }, _animatePathZoom: function (e) { var scale = this.getZoomScale(e.zoom), offset = this._getCenterOffset(e.center)._multiplyBy(-scale)._add(this._pathViewport.min); this._pathRoot.style[L.DomUtil.TRANSFORM] = L.DomUtil.getTranslateString(offset) + ' scale(' + scale + ') '; this._pathZooming = true; }, _endPathZoom: function () { this._pathZooming = false; }, _updateSvgViewport: function () { if (this._pathZooming) { // Do not update SVGs while a zoom animation is going on otherwise the animation will break. // When the zoom animation ends we will be updated again anyway // This fixes the case where you do a momentum move and zoom while the move is still ongoing. return; } this._updatePathViewport(); var vp = this._pathViewport, min = vp.min, max = vp.max, width = max.x - min.x, height = max.y - min.y, root = this._pathRoot, pane = this._panes.overlayPane; // Hack to make flicker on drag end on mobile webkit less irritating if (L.Browser.mobileWebkit) { pane.removeChild(root); } L.DomUtil.setPosition(root, min); root.setAttribute('width', width); root.setAttribute('height', height); root.setAttribute('viewBox', [min.x, min.y, width, height].join(' ')); if (L.Browser.mobileWebkit) { pane.appendChild(root); } } }); /* * Popup extension to L.Path (polylines, polygons, circles), adding popup-related methods. */ L.Path.include({ bindPopup: function (content, options) { if (content instanceof L.Popup) { this._popup = content; } else { if (!this._popup || options) { this._popup = new L.Popup(options, this); } this._popup.setContent(content); } if (!this._popupHandlersAdded) { this .on('click', this._openPopup, this) .on('remove', this.closePopup, this); this._popupHandlersAdded = true; } return this; }, unbindPopup: function () { if (this._popup) { this._popup = null; this .off('click', this._openPopup) .off('remove', this.closePopup); this._popupHandlersAdded = false; } return this; }, openPopup: function (latlng) { if (this._popup) { // open the popup from one of the path's points if not specified latlng = latlng || this._latlng || this._latlngs[Math.floor(this._latlngs.length / 2)]; this._openPopup({latlng: latlng}); } return this; }, closePopup: function () { if (this._popup) { this._popup._close(); } return this; }, _openPopup: function (e) { this._popup.setLatLng(e.latlng); this._map.openPopup(this._popup); } }); /* * Vector rendering for IE6-8 through VML. * Thanks to Dmitry Baranovsky and his Raphael library for inspiration! */ L.Browser.vml = !L.Browser.svg && (function () { try { var div = document.createElement('div'); div.innerHTML = '<v:shape adj="1"/>'; var shape = div.firstChild; shape.style.behavior = 'url(#default#VML)'; return shape && (typeof shape.adj === 'object'); } catch (e) { return false; } }()); L.Path = L.Browser.svg || !L.Browser.vml ? L.Path : L.Path.extend({ statics: { VML: true, CLIP_PADDING: 0.02 }, _createElement: (function () { try { document.namespaces.add('lvml', 'urn:schemas-microsoft-com:vml'); return function (name) { return document.createElement('<lvml:' + name + ' class="lvml">'); }; } catch (e) { return function (name) { return document.createElement( '<' + name + ' xmlns="urn:schemas-microsoft.com:vml" class="lvml">'); }; } }()), _initPath: function () { var container = this._container = this._createElement('shape'); L.DomUtil.addClass(container, 'leaflet-vml-shape'); if (this.options.clickable) { L.DomUtil.addClass(container, 'leaflet-clickable'); } container.coordsize = '1 1'; this._path = this._createElement('path'); container.appendChild(this._path); this._map._pathRoot.appendChild(container); }, _initStyle: function () { this._updateStyle(); }, _updateStyle: function () { var stroke = this._stroke, fill = this._fill, options = this.options, container = this._container; container.stroked = options.stroke; container.filled = options.fill; if (options.stroke) { if (!stroke) { stroke = this._stroke = this._createElement('stroke'); stroke.endcap = 'round'; container.appendChild(stroke); } stroke.weight = options.weight + 'px'; stroke.color = options.color; stroke.opacity = options.opacity; if (options.dashArray) { stroke.dashStyle = options.dashArray instanceof Array ? options.dashArray.join(' ') : options.dashArray.replace(/( *, *)/g, ' '); } else { stroke.dashStyle = ''; } } else if (stroke) { container.removeChild(stroke); this._stroke = null; } if (options.fill) { if (!fill) { fill = this._fill = this._createElement('fill'); container.appendChild(fill); } fill.color = options.fillColor || options.color; fill.opacity = options.fillOpacity; } else if (fill) { container.removeChild(fill); this._fill = null; } }, _updatePath: function () { var style = this._container.style; style.display = 'none'; this._path.v = this.getPathString() + ' '; // the space fixes IE empty path string bug style.display = ''; } }); L.Map.include(L.Browser.svg || !L.Browser.vml ? {} : { _initPathRoot: function () { if (this._pathRoot) { return; } var root = this._pathRoot = document.createElement('div'); root.className = 'leaflet-vml-container'; this._panes.overlayPane.appendChild(root); this.on('moveend', this._updatePathViewport); this._updatePathViewport(); } }); /* * Vector rendering for all browsers that support canvas. */ L.Browser.canvas = (function () { return !!document.createElement('canvas').getContext; }()); L.Path = (L.Path.SVG && !window.L_PREFER_CANVAS) || !L.Browser.canvas ? L.Path : L.Path.extend({ statics: { //CLIP_PADDING: 0.02, // not sure if there's a need to set it to a small value CANVAS: true, SVG: false }, redraw: function () { if (this._map) { this.projectLatlngs(); this._requestUpdate(); } return this; }, setStyle: function (style) { L.setOptions(this, style); if (this._map) { this._updateStyle(); this._requestUpdate(); } return this; }, onRemove: function (map) { map .off('viewreset', this.projectLatlngs, this) .off('moveend', this._updatePath, this); if (this.options.clickable) { this._map.off('click', this._onClick, this); this._map.off('mousemove', this._onMouseMove, this); } this._requestUpdate(); this._map = null; }, _requestUpdate: function () { if (this._map && !L.Path._updateRequest) { L.Path._updateRequest = L.Util.requestAnimFrame(this._fireMapMoveEnd, this._map); } }, _fireMapMoveEnd: function () { L.Path._updateRequest = null; this.fire('moveend'); }, _initElements: function () { this._map._initPathRoot(); this._ctx = this._map._canvasCtx; }, _updateStyle: function () { var options = this.options; if (options.stroke) { this._ctx.lineWidth = options.weight; this._ctx.strokeStyle = options.color; } if (options.fill) { this._ctx.fillStyle = options.fillColor || options.color; } }, _drawPath: function () { var i, j, len, len2, point, drawMethod; this._ctx.beginPath(); for (i = 0, len = this._parts.length; i < len; i++) { for (j = 0, len2 = this._parts[i].length; j < len2; j++) { point = this._parts[i][j]; drawMethod = (j === 0 ? 'move' : 'line') + 'To'; this._ctx[drawMethod](point.x, point.y); } // TODO refactor ugly hack if (this instanceof L.Polygon) { this._ctx.closePath(); } } }, _checkIfEmpty: function () { return !this._parts.length; }, _updatePath: function () { if (this._checkIfEmpty()) { return; } var ctx = this._ctx, options = this.options; this._drawPath(); ctx.save(); this._updateStyle(); if (options.fill) { ctx.globalAlpha = options.fillOpacity; ctx.fill(); } if (options.stroke) { ctx.globalAlpha = options.opacity; ctx.stroke(); } ctx.restore(); // TODO optimization: 1 fill/stroke for all features with equal style instead of 1 for each feature }, _initEvents: function () { if (this.options.clickable) { // TODO dblclick this._map.on('mousemove', this._onMouseMove, this); this._map.on('click', this._onClick, this); } }, _onClick: function (e) { if (this._containsPoint(e.layerPoint)) { this.fire('click', e); } }, _onMouseMove: function (e) { if (!this._map || this._map._animatingZoom) { return; } // TODO don't do on each move if (this._containsPoint(e.layerPoint)) { this._ctx.canvas.style.cursor = 'pointer'; this._mouseInside = true; this.fire('mouseover', e); } else if (this._mouseInside) { this._ctx.canvas.style.cursor = ''; this._mouseInside = false; this.fire('mouseout', e); } } }); L.Map.include((L.Path.SVG && !window.L_PREFER_CANVAS) || !L.Browser.canvas ? {} : { _initPathRoot: function () { var root = this._pathRoot, ctx; if (!root) { root = this._pathRoot = document.createElement('canvas'); root.style.position = 'absolute'; ctx = this._canvasCtx = root.getContext('2d'); ctx.lineCap = 'round'; ctx.lineJoin = 'round'; this._panes.overlayPane.appendChild(root); if (this.options.zoomAnimation) { this._pathRoot.className = 'leaflet-zoom-animated'; this.on('zoomanim', this._animatePathZoom); this.on('zoomend', this._endPathZoom); } this.on('moveend', this._updateCanvasViewport); this._updateCanvasViewport(); } }, _updateCanvasViewport: function () { // don't redraw while zooming. See _updateSvgViewport for more details if (this._pathZooming) { return; } this._updatePathViewport(); var vp = this._pathViewport, min = vp.min, size = vp.max.subtract(min), root = this._pathRoot; //TODO check if this works properly on mobile webkit L.DomUtil.setPosition(root, min); root.width = size.x; root.height = size.y; root.getContext('2d').translate(-min.x, -min.y); } }); /* * L.LineUtil contains different utility functions for line segments * and polylines (clipping, simplification, distances, etc.) */ /*jshint bitwise:false */ // allow bitwise oprations for this file L.LineUtil = { // Simplify polyline with vertex reduction and Douglas-Peucker simplification. // Improves rendering performance dramatically by lessening the number of points to draw. simplify: function (/*Point[]*/ points, /*Number*/ tolerance) { if (!tolerance || !points.length) { return points.slice(); } var sqTolerance = tolerance * tolerance; // stage 1: vertex reduction points = this._reducePoints(points, sqTolerance); // stage 2: Douglas-Peucker simplification points = this._simplifyDP(points, sqTolerance); return points; }, // distance from a point to a segment between two points pointToSegmentDistance: function (/*Point*/ p, /*Point*/ p1, /*Point*/ p2) { return Math.sqrt(this._sqClosestPointOnSegment(p, p1, p2, true)); }, closestPointOnSegment: function (/*Point*/ p, /*Point*/ p1, /*Point*/ p2) { return this._sqClosestPointOnSegment(p, p1, p2); }, // Douglas-Peucker simplification, see http://en.wikipedia.org/wiki/Douglas-Peucker_algorithm _simplifyDP: function (points, sqTolerance) { var len = points.length, ArrayConstructor = typeof Uint8Array !== undefined + '' ? Uint8Array : Array, markers = new ArrayConstructor(len); markers[0] = markers[len - 1] = 1; this._simplifyDPStep(points, markers, sqTolerance, 0, len - 1); var i, newPoints = []; for (i = 0; i < len; i++) { if (markers[i]) { newPoints.push(points[i]); } } return newPoints; }, _simplifyDPStep: function (points, markers, sqTolerance, first, last) { var maxSqDist = 0, index, i, sqDist; for (i = first + 1; i <= last - 1; i++) { sqDist = this._sqClosestPointOnSegment(points[i], points[first], points[last], true); if (sqDist > maxSqDist) { index = i; maxSqDist = sqDist; } } if (maxSqDist > sqTolerance) { markers[index] = 1; this._simplifyDPStep(points, markers, sqTolerance, first, index); this._simplifyDPStep(points, markers, sqTolerance, index, last); } }, // reduce points that are too close to each other to a single point _reducePoints: function (points, sqTolerance) { var reducedPoints = [points[0]]; for (var i = 1, prev = 0, len = points.length; i < len; i++) { if (this._sqDist(points[i], points[prev]) > sqTolerance) { reducedPoints.push(points[i]); prev = i; } } if (prev < len - 1) { reducedPoints.push(points[len - 1]); } return reducedPoints; }, // Cohen-Sutherland line clipping algorithm. // Used to avoid rendering parts of a polyline that are not currently visible. clipSegment: function (a, b, bounds, useLastCode) { var codeA = useLastCode ? this._lastCode : this._getBitCode(a, bounds), codeB = this._getBitCode(b, bounds), codeOut, p, newCode; // save 2nd code to avoid calculating it on the next segment this._lastCode = codeB; while (true) { // if a,b is inside the clip window (trivial accept) if (!(codeA | codeB)) { return [a, b]; // if a,b is outside the clip window (trivial reject) } else if (codeA & codeB) { return false; // other cases } else { codeOut = codeA || codeB; p = this._getEdgeIntersection(a, b, codeOut, bounds); newCode = this._getBitCode(p, bounds); if (codeOut === codeA) { a = p; codeA = newCode; } else { b = p; codeB = newCode; } } } }, _getEdgeIntersection: function (a, b, code, bounds) { var dx = b.x - a.x, dy = b.y - a.y, min = bounds.min, max = bounds.max; if (code & 8) { // top return new L.Point(a.x + dx * (max.y - a.y) / dy, max.y); } else if (code & 4) { // bottom return new L.Point(a.x + dx * (min.y - a.y) / dy, min.y); } else if (code & 2) { // right return new L.Point(max.x, a.y + dy * (max.x - a.x) / dx); } else if (code & 1) { // left return new L.Point(min.x, a.y + dy * (min.x - a.x) / dx); } }, _getBitCode: function (/*Point*/ p, bounds) { var code = 0; if (p.x < bounds.min.x) { // left code |= 1; } else if (p.x > bounds.max.x) { // right code |= 2; } if (p.y < bounds.min.y) { // bottom code |= 4; } else if (p.y > bounds.max.y) { // top code |= 8; } return code; }, // square distance (to avoid unnecessary Math.sqrt calls) _sqDist: function (p1, p2) { var dx = p2.x - p1.x, dy = p2.y - p1.y; return dx * dx + dy * dy; }, // return closest point on segment or distance to that point _sqClosestPointOnSegment: function (p, p1, p2, sqDist) { var x = p1.x, y = p1.y, dx = p2.x - x, dy = p2.y - y, dot = dx * dx + dy * dy, t; if (dot > 0) { t = ((p.x - x) * dx + (p.y - y) * dy) / dot; if (t > 1) { x = p2.x; y = p2.y; } else if (t > 0) { x += dx * t; y += dy * t; } } dx = p.x - x; dy = p.y - y; return sqDist ? dx * dx + dy * dy : new L.Point(x, y); } }; /* * L.Polyline is used to display polylines on a map. */ L.Polyline = L.Path.extend({ initialize: function (latlngs, options) { L.Path.prototype.initialize.call(this, options); this._latlngs = this._convertLatLngs(latlngs); }, options: { // how much to simplify the polyline on each zoom level // more = better performance and smoother look, less = more accurate smoothFactor: 1.0, noClip: false }, projectLatlngs: function () { this._originalPoints = []; for (var i = 0, len = this._latlngs.length; i < len; i++) { this._originalPoints[i] = this._map.latLngToLayerPoint(this._latlngs[i]); } }, getPathString: function () { for (var i = 0, len = this._parts.length, str = ''; i < len; i++) { str += this._getPathPartStr(this._parts[i]); } return str; }, getLatLngs: function () { return this._latlngs; }, setLatLngs: function (latlngs) { this._latlngs = this._convertLatLngs(latlngs); return this.redraw(); }, addLatLng: function (latlng) { this._latlngs.push(L.latLng(latlng)); return this.redraw(); }, spliceLatLngs: function () { // (Number index, Number howMany) var removed = [].splice.apply(this._latlngs, arguments); this._convertLatLngs(this._latlngs, true); this.redraw(); return removed; }, closestLayerPoint: function (p) { var minDistance = Infinity, parts = this._parts, p1, p2, minPoint = null; for (var j = 0, jLen = parts.length; j < jLen; j++) { var points = parts[j]; for (var i = 1, len = points.length; i < len; i++) { p1 = points[i - 1]; p2 = points[i]; var sqDist = L.LineUtil._sqClosestPointOnSegment(p, p1, p2, true); if (sqDist < minDistance) { minDistance = sqDist; minPoint = L.LineUtil._sqClosestPointOnSegment(p, p1, p2); } } } if (minPoint) { minPoint.distance = Math.sqrt(minDistance); } return minPoint; }, getBounds: function () { return new L.LatLngBounds(this.getLatLngs()); }, _convertLatLngs: function (latlngs, overwrite) { var i, len, target = overwrite ? latlngs : []; for (i = 0, len = latlngs.length; i < len; i++) { if (L.Util.isArray(latlngs[i]) && typeof latlngs[i][0] !== 'number') { return; } target[i] = L.latLng(latlngs[i]); } return target; }, _initEvents: function () { L.Path.prototype._initEvents.call(this); }, _getPathPartStr: function (points) { var round = L.Path.VML; for (var j = 0, len2 = points.length, str = '', p; j < len2; j++) { p = points[j]; if (round) { p._round(); } str += (j ? 'L' : 'M') + p.x + ' ' + p.y; } return str; }, _clipPoints: function () { var points = this._originalPoints, len = points.length, i, k, segment; if (this.options.noClip) { this._parts = [points]; return; } this._parts = []; var parts = this._parts, vp = this._map._pathViewport, lu = L.LineUtil; for (i = 0, k = 0; i < len - 1; i++) { segment = lu.clipSegment(points[i], points[i + 1], vp, i); if (!segment) { continue; } parts[k] = parts[k] || []; parts[k].push(segment[0]); // if segment goes out of screen, or it's the last one, it's the end of the line part if ((segment[1] !== points[i + 1]) || (i === len - 2)) { parts[k].push(segment[1]); k++; } } }, // simplify each clipped part of the polyline _simplifyPoints: function () { var parts = this._parts, lu = L.LineUtil; for (var i = 0, len = parts.length; i < len; i++) { parts[i] = lu.simplify(parts[i], this.options.smoothFactor); } }, _updatePath: function () { if (!this._map) { return; } this._clipPoints(); this._simplifyPoints(); L.Path.prototype._updatePath.call(this); } }); L.polyline = function (latlngs, options) { return new L.Polyline(latlngs, options); }; /* * L.PolyUtil contains utility functions for polygons (clipping, etc.). */ /*jshint bitwise:false */ // allow bitwise operations here L.PolyUtil = {}; /* * Sutherland-Hodgeman polygon clipping algorithm. * Used to avoid rendering parts of a polygon that are not currently visible. */ L.PolyUtil.clipPolygon = function (points, bounds) { var clippedPoints, edges = [1, 4, 2, 8], i, j, k, a, b, len, edge, p, lu = L.LineUtil; for (i = 0, len = points.length; i < len; i++) { points[i]._code = lu._getBitCode(points[i], bounds); } // for each edge (left, bottom, right, top) for (k = 0; k < 4; k++) { edge = edges[k]; clippedPoints = []; for (i = 0, len = points.length, j = len - 1; i < len; j = i++) { a = points[i]; b = points[j]; // if a is inside the clip window if (!(a._code & edge)) { // if b is outside the clip window (a->b goes out of screen) if (b._code & edge) { p = lu._getEdgeIntersection(b, a, edge, bounds); p._code = lu._getBitCode(p, bounds); clippedPoints.push(p); } clippedPoints.push(a); // else if b is inside the clip window (a->b enters the screen) } else if (!(b._code & edge)) { p = lu._getEdgeIntersection(b, a, edge, bounds); p._code = lu._getBitCode(p, bounds); clippedPoints.push(p); } } points = clippedPoints; } return points; }; /* * L.Polygon is used to display polygons on a map. */ L.Polygon = L.Polyline.extend({ options: { fill: true }, initialize: function (latlngs, options) { var i, len, hole; L.Polyline.prototype.initialize.call(this, latlngs, options); if (latlngs && L.Util.isArray(latlngs[0]) && (typeof latlngs[0][0] !== 'number')) { this._latlngs = this._convertLatLngs(latlngs[0]); this._holes = latlngs.slice(1); for (i = 0, len = this._holes.length; i < len; i++) { hole = this._holes[i] = this._convertLatLngs(this._holes[i]); if (hole[0].equals(hole[hole.length - 1])) { hole.pop(); } } } // filter out last point if its equal to the first one latlngs = this._latlngs; if (latlngs.length >= 2 && latlngs[0].equals(latlngs[latlngs.length - 1])) { latlngs.pop(); } }, projectLatlngs: function () { L.Polyline.prototype.projectLatlngs.call(this); // project polygon holes points // TODO move this logic to Polyline to get rid of duplication this._holePoints = []; if (!this._holes) { return; } var i, j, len, len2; for (i = 0, len = this._holes.length; i < len; i++) { this._holePoints[i] = []; for (j = 0, len2 = this._holes[i].length; j < len2; j++) { this._holePoints[i][j] = this._map.latLngToLayerPoint(this._holes[i][j]); } } }, _clipPoints: function () { var points = this._originalPoints, newParts = []; this._parts = [points].concat(this._holePoints); if (this.options.noClip) { return; } for (var i = 0, len = this._parts.length; i < len; i++) { var clipped = L.PolyUtil.clipPolygon(this._parts[i], this._map._pathViewport); if (clipped.length) { newParts.push(clipped); } } this._parts = newParts; }, _getPathPartStr: function (points) { var str = L.Polyline.prototype._getPathPartStr.call(this, points); return str + (L.Browser.svg ? 'z' : 'x'); } }); L.polygon = function (latlngs, options) { return new L.Polygon(latlngs, options); }; /* * Contains L.MultiPolyline and L.MultiPolygon layers. */ (function () { function createMulti(Klass) { return L.FeatureGroup.extend({ initialize: function (latlngs, options) { this._layers = {}; this._options = options; this.setLatLngs(latlngs); }, setLatLngs: function (latlngs) { var i = 0, len = latlngs.length; this.eachLayer(function (layer) { if (i < len) { layer.setLatLngs(latlngs[i++]); } else { this.removeLayer(layer); } }, this); while (i < len) { this.addLayer(new Klass(latlngs[i++], this._options)); } return this; } }); } L.MultiPolyline = createMulti(L.Polyline); L.MultiPolygon = createMulti(L.Polygon); L.multiPolyline = function (latlngs, options) { return new L.MultiPolyline(latlngs, options); }; L.multiPolygon = function (latlngs, options) { return new L.MultiPolygon(latlngs, options); }; }()); /* * L.Rectangle extends Polygon and creates a rectangle when passed a LatLngBounds object. */ L.Rectangle = L.Polygon.extend({ initialize: function (latLngBounds, options) { L.Polygon.prototype.initialize.call(this, this._boundsToLatLngs(latLngBounds), options); }, setBounds: function (latLngBounds) { this.setLatLngs(this._boundsToLatLngs(latLngBounds)); }, _boundsToLatLngs: function (latLngBounds) { latLngBounds = L.latLngBounds(latLngBounds); return [ latLngBounds.getSouthWest(), latLngBounds.getNorthWest(), latLngBounds.getNorthEast(), latLngBounds.getSouthEast() ]; } }); L.rectangle = function (latLngBounds, options) { return new L.Rectangle(latLngBounds, options); }; /* * L.Circle is a circle overlay (with a certain radius in meters). */ L.Circle = L.Path.extend({ initialize: function (latlng, radius, options) { L.Path.prototype.initialize.call(this, options); this._latlng = L.latLng(latlng); this._mRadius = radius; }, options: { fill: true }, setLatLng: function (latlng) { this._latlng = L.latLng(latlng); return this.redraw(); }, setRadius: function (radius) { this._mRadius = radius; return this.redraw(); }, projectLatlngs: function () { var lngRadius = this._getLngRadius(), latlng = this._latlng, pointLeft = this._map.latLngToLayerPoint([latlng.lat, latlng.lng - lngRadius]); this._point = this._map.latLngToLayerPoint(latlng); this._radius = Math.max(this._point.x - pointLeft.x, 1); }, getBounds: function () { var lngRadius = this._getLngRadius(), latRadius = (this._mRadius / 40075017) * 360, latlng = this._latlng; return new L.LatLngBounds( [latlng.lat - latRadius, latlng.lng - lngRadius], [latlng.lat + latRadius, latlng.lng + lngRadius]); }, getLatLng: function () { return this._latlng; }, getPathString: function () { var p = this._point, r = this._radius; if (this._checkIfEmpty()) { return ''; } if (L.Browser.svg) { return 'M' + p.x + ',' + (p.y - r) + 'A' + r + ',' + r + ',0,1,1,' + (p.x - 0.1) + ',' + (p.y - r) + ' z'; } else { p._round(); r = Math.round(r); return 'AL ' + p.x + ',' + p.y + ' ' + r + ',' + r + ' 0,' + (65535 * 360); } }, getRadius: function () { return this._mRadius; }, // TODO Earth hardcoded, move into projection code! _getLatRadius: function () { return (this._mRadius / 40075017) * 360; }, _getLngRadius: function () { return this._getLatRadius() / Math.cos(L.LatLng.DEG_TO_RAD * this._latlng.lat); }, _checkIfEmpty: function () { if (!this._map) { return false; } var vp = this._map._pathViewport, r = this._radius, p = this._point; return p.x - r > vp.max.x || p.y - r > vp.max.y || p.x + r < vp.min.x || p.y + r < vp.min.y; } }); L.circle = function (latlng, radius, options) { return new L.Circle(latlng, radius, options); }; /* * L.CircleMarker is a circle overlay with a permanent pixel radius. */ L.CircleMarker = L.Circle.extend({ options: { radius: 10, weight: 2 }, initialize: function (latlng, options) { L.Circle.prototype.initialize.call(this, latlng, null, options); this._radius = this.options.radius; }, projectLatlngs: function () { this._point = this._map.latLngToLayerPoint(this._latlng); }, _updateStyle : function () { L.Circle.prototype._updateStyle.call(this); this.setRadius(this.options.radius); }, setRadius: function (radius) { this.options.radius = this._radius = radius; return this.redraw(); } }); L.circleMarker = function (latlng, options) { return new L.CircleMarker(latlng, options); }; /* * Extends L.Polyline to be able to manually detect clicks on Canvas-rendered polylines. */ L.Polyline.include(!L.Path.CANVAS ? {} : { _containsPoint: function (p, closed) { var i, j, k, len, len2, dist, part, w = this.options.weight / 2; if (L.Browser.touch) { w += 10; // polyline click tolerance on touch devices } for (i = 0, len = this._parts.length; i < len; i++) { part = this._parts[i]; for (j = 0, len2 = part.length, k = len2 - 1; j < len2; k = j++) { if (!closed && (j === 0)) { continue; } dist = L.LineUtil.pointToSegmentDistance(p, part[k], part[j]); if (dist <= w) { return true; } } } return false; } }); /* * Extends L.Polygon to be able to manually detect clicks on Canvas-rendered polygons. */ L.Polygon.include(!L.Path.CANVAS ? {} : { _containsPoint: function (p) { var inside = false, part, p1, p2, i, j, k, len, len2; // TODO optimization: check if within bounds first if (L.Polyline.prototype._containsPoint.call(this, p, true)) { // click on polygon border return true; } // ray casting algorithm for detecting if point is in polygon for (i = 0, len = this._parts.length; i < len; i++) { part = this._parts[i]; for (j = 0, len2 = part.length, k = len2 - 1; j < len2; k = j++) { p1 = part[j]; p2 = part[k]; if (((p1.y > p.y) !== (p2.y > p.y)) && (p.x < (p2.x - p1.x) * (p.y - p1.y) / (p2.y - p1.y) + p1.x)) { inside = !inside; } } } return inside; } }); /* * Extends L.Circle with Canvas-specific code. */ L.Circle.include(!L.Path.CANVAS ? {} : { _drawPath: function () { var p = this._point; this._ctx.beginPath(); this._ctx.arc(p.x, p.y, this._radius, 0, Math.PI * 2, false); }, _containsPoint: function (p) { var center = this._point, w2 = this.options.stroke ? this.options.weight / 2 : 0; return (p.distanceTo(center) <= this._radius + w2); } }); /* * CircleMarker canvas specific drawing parts. */ L.CircleMarker.include(!L.Path.CANVAS ? {} : { _updateStyle: function () { L.Path.prototype._updateStyle.call(this); } }); /* * L.GeoJSON turns any GeoJSON data into a Leaflet layer. */ L.GeoJSON = L.FeatureGroup.extend({ initialize: function (geojson, options) { L.setOptions(this, options); this._layers = {}; if (geojson) { this.addData(geojson); } }, addData: function (geojson) { var features = L.Util.isArray(geojson) ? geojson : geojson.features, i, len; if (features) { for (i = 0, len = features.length; i < len; i++) { // Only add this if geometry or geometries are set and not null if (features[i].geometries || features[i].geometry || features[i].features) { this.addData(features[i]); } } return this; } var options = this.options; if (options.filter && !options.filter(geojson)) { return; } var layer = L.GeoJSON.geometryToLayer(geojson, options.pointToLayer, options.coordsToLatLng); layer.feature = L.GeoJSON.asFeature(geojson); layer.defaultOptions = layer.options; this.resetStyle(layer); if (options.onEachFeature) { options.onEachFeature(geojson, layer); } return this.addLayer(layer); }, resetStyle: function (layer) { var style = this.options.style; if (style) { // reset any custom styles L.Util.extend(layer.options, layer.defaultOptions); this._setLayerStyle(layer, style); } }, setStyle: function (style) { this.eachLayer(function (layer) { this._setLayerStyle(layer, style); }, this); }, _setLayerStyle: function (layer, style) { if (typeof style === 'function') { style = style(layer.feature); } if (layer.setStyle) { layer.setStyle(style); } } }); L.extend(L.GeoJSON, { geometryToLayer: function (geojson, pointToLayer, coordsToLatLng) { var geometry = geojson.type === 'Feature' ? geojson.geometry : geojson, coords = geometry.coordinates, layers = [], latlng, latlngs, i, len, layer; coordsToLatLng = coordsToLatLng || this.coordsToLatLng; switch (geometry.type) { case 'Point': latlng = coordsToLatLng(coords); return pointToLayer ? pointToLayer(geojson, latlng) : new L.Marker(latlng); case 'MultiPoint': for (i = 0, len = coords.length; i < len; i++) { latlng = coordsToLatLng(coords[i]); layer = pointToLayer ? pointToLayer(geojson, latlng) : new L.Marker(latlng); layers.push(layer); } return new L.FeatureGroup(layers); case 'LineString': latlngs = this.coordsToLatLngs(coords, 0, coordsToLatLng); return new L.Polyline(latlngs); case 'Polygon': latlngs = this.coordsToLatLngs(coords, 1, coordsToLatLng); return new L.Polygon(latlngs); case 'MultiLineString': latlngs = this.coordsToLatLngs(coords, 1, coordsToLatLng); return new L.MultiPolyline(latlngs); case 'MultiPolygon': latlngs = this.coordsToLatLngs(coords, 2, coordsToLatLng); return new L.MultiPolygon(latlngs); case 'GeometryCollection': for (i = 0, len = geometry.geometries.length; i < len; i++) { layer = this.geometryToLayer({ geometry: geometry.geometries[i], type: 'Feature', properties: geojson.properties }, pointToLayer, coordsToLatLng); layers.push(layer); } return new L.FeatureGroup(layers); default: throw new Error('Invalid GeoJSON object.'); } }, coordsToLatLng: function (coords) { // (Array[, Boolean]) -> LatLng return new L.LatLng(coords[1], coords[0]); }, coordsToLatLngs: function (coords, levelsDeep, coordsToLatLng) { // (Array[, Number, Function]) -> Array var latlng, i, len, latlngs = []; for (i = 0, len = coords.length; i < len; i++) { latlng = levelsDeep ? this.coordsToLatLngs(coords[i], levelsDeep - 1, coordsToLatLng) : (coordsToLatLng || this.coordsToLatLng)(coords[i]); latlngs.push(latlng); } return latlngs; }, latLngToCoords: function (latLng) { return [latLng.lng, latLng.lat]; }, latLngsToCoords: function (latLngs) { var coords = []; for (var i = 0, len = latLngs.length; i < len; i++) { coords.push(L.GeoJSON.latLngToCoords(latLngs[i])); } return coords; }, getFeature: function (layer, newGeometry) { return layer.feature ? L.extend({}, layer.feature, {geometry: newGeometry}) : L.GeoJSON.asFeature(newGeometry); }, asFeature: function (geoJSON) { if (geoJSON.type === 'Feature') { return geoJSON; } return { type: 'Feature', properties: {}, geometry: geoJSON }; } }); var PointToGeoJSON = { toGeoJSON: function () { return L.GeoJSON.getFeature(this, { type: 'Point', coordinates: L.GeoJSON.latLngToCoords(this.getLatLng()) }); } }; L.Marker.include(PointToGeoJSON); L.Circle.include(PointToGeoJSON); L.CircleMarker.include(PointToGeoJSON); L.Polyline.include({ toGeoJSON: function () { return L.GeoJSON.getFeature(this, { type: 'LineString', coordinates: L.GeoJSON.latLngsToCoords(this.getLatLngs()) }); } }); L.Polygon.include({ toGeoJSON: function () { var coords = [L.GeoJSON.latLngsToCoords(this.getLatLngs())], i, len, hole; coords[0].push(coords[0][0]); if (this._holes) { for (i = 0, len = this._holes.length; i < len; i++) { hole = L.GeoJSON.latLngsToCoords(this._holes[i]); hole.push(hole[0]); coords.push(hole); } } return L.GeoJSON.getFeature(this, { type: 'Polygon', coordinates: coords }); } }); (function () { function includeMulti(Klass, type) { Klass.include({ toGeoJSON: function () { var coords = []; this.eachLayer(function (layer) { coords.push(layer.toGeoJSON().geometry.coordinates); }); return L.GeoJSON.getFeature(this, { type: type, coordinates: coords }); } }); } includeMulti(L.MultiPolyline, 'MultiLineString'); includeMulti(L.MultiPolygon, 'MultiPolygon'); }()); L.LayerGroup.include({ toGeoJSON: function () { var features = []; this.eachLayer(function (layer) { if (layer.toGeoJSON) { features.push(L.GeoJSON.asFeature(layer.toGeoJSON())); } }); return { type: 'FeatureCollection', features: features }; } }); L.geoJson = function (geojson, options) { return new L.GeoJSON(geojson, options); }; /* * L.DomEvent contains functions for working with DOM events. */ L.DomEvent = { /* inspired by John Resig, Dean Edwards and YUI addEvent implementations */ addListener: function (obj, type, fn, context) { // (HTMLElement, String, Function[, Object]) var id = L.stamp(fn), key = '_leaflet_' + type + id, handler, originalHandler, newType; if (obj[key]) { return this; } handler = function (e) { return fn.call(context || obj, e || L.DomEvent._getEvent()); }; if (L.Browser.msTouch && type.indexOf('touch') === 0) { return this.addMsTouchListener(obj, type, handler, id); } if (L.Browser.touch && (type === 'dblclick') && this.addDoubleTapListener) { this.addDoubleTapListener(obj, handler, id); } if ('addEventListener' in obj) { if (type === 'mousewheel') { obj.addEventListener('DOMMouseScroll', handler, false); obj.addEventListener(type, handler, false); } else if ((type === 'mouseenter') || (type === 'mouseleave')) { originalHandler = handler; newType = (type === 'mouseenter' ? 'mouseover' : 'mouseout'); handler = function (e) { if (!L.DomEvent._checkMouse(obj, e)) { return; } return originalHandler(e); }; obj.addEventListener(newType, handler, false); } else if (type === 'click' && L.Browser.android) { originalHandler = handler; handler = function (e) { return L.DomEvent._filterClick(e, originalHandler); }; obj.addEventListener(type, handler, false); } else { obj.addEventListener(type, handler, false); } } else if ('attachEvent' in obj) { obj.attachEvent('on' + type, handler); } obj[key] = handler; return this; }, removeListener: function (obj, type, fn) { // (HTMLElement, String, Function) var id = L.stamp(fn), key = '_leaflet_' + type + id, handler = obj[key]; if (!handler) { return this; } if (L.Browser.msTouch && type.indexOf('touch') === 0) { this.removeMsTouchListener(obj, type, id); } else if (L.Browser.touch && (type === 'dblclick') && this.removeDoubleTapListener) { this.removeDoubleTapListener(obj, id); } else if ('removeEventListener' in obj) { if (type === 'mousewheel') { obj.removeEventListener('DOMMouseScroll', handler, false); obj.removeEventListener(type, handler, false); } else if ((type === 'mouseenter') || (type === 'mouseleave')) { obj.removeEventListener((type === 'mouseenter' ? 'mouseover' : 'mouseout'), handler, false); } else { obj.removeEventListener(type, handler, false); } } else if ('detachEvent' in obj) { obj.detachEvent('on' + type, handler); } obj[key] = null; return this; }, stopPropagation: function (e) { if (e.stopPropagation) { e.stopPropagation(); } else { e.cancelBubble = true; } return this; }, disableClickPropagation: function (el) { var stop = L.DomEvent.stopPropagation; for (var i = L.Draggable.START.length - 1; i >= 0; i--) { L.DomEvent.addListener(el, L.Draggable.START[i], stop); } return L.DomEvent .addListener(el, 'click', L.DomEvent._fakeStop) .addListener(el, 'dblclick', stop); }, preventDefault: function (e) { if (e.preventDefault) { e.preventDefault(); } else { e.returnValue = false; } return this; }, stop: function (e) { return L.DomEvent.preventDefault(e).stopPropagation(e); }, getMousePosition: function (e, container) { var body = document.body, docEl = document.documentElement, x = e.pageX ? e.pageX : e.clientX + body.scrollLeft + docEl.scrollLeft, y = e.pageY ? e.pageY : e.clientY + body.scrollTop + docEl.scrollTop, pos = new L.Point(x, y); return (container ? pos._subtract(L.DomUtil.getViewportOffset(container)) : pos); }, getWheelDelta: function (e) { var delta = 0; if (e.wheelDelta) { delta = e.wheelDelta / 120; } if (e.detail) { delta = -e.detail / 3; } return delta; }, _fakeStop: function stop(e) { // fakes stopPropagation by setting a special event flag checked in Map mouse events handler // jshint camelcase: false e._leaflet_stop = true; }, // check if element really left/entered the event target (for mouseenter/mouseleave) _checkMouse: function (el, e) { var related = e.relatedTarget; if (!related) { return true; } try { while (related && (related !== el)) { related = related.parentNode; } } catch (err) { return false; } return (related !== el); }, _getEvent: function () { // evil magic for IE /*jshint noarg:false */ var e = window.event; if (!e) { var caller = arguments.callee.caller; while (caller) { e = caller['arguments'][0]; if (e && window.Event === e.constructor) { break; } caller = caller.caller; } } return e; }, // this is a horrible workaround for a bug in Android where a single touch triggers two click events _filterClick: function (e, handler) { var timeStamp = (e.timeStamp || e.originalEvent.timeStamp), elapsed = L.DomEvent._lastClick && (timeStamp - L.DomEvent._lastClick); // are they closer together than 1000ms yet more than 100ms? // Android typically triggers them ~300ms apart while multiple listeners // on the same event should be triggered far faster; // or check if click is simulated on the element, and if it is, reject any non-simulated events if ((elapsed && elapsed > 100 && elapsed < 1000) || (e.target._simulatedClick && !e._simulated)) { L.DomEvent.stop(e); return; } L.DomEvent._lastClick = timeStamp; return handler(e); } }; L.DomEvent.on = L.DomEvent.addListener; L.DomEvent.off = L.DomEvent.removeListener; /* * L.Draggable allows you to add dragging capabilities to any element. Supports mobile devices too. */ L.Draggable = L.Class.extend({ includes: L.Mixin.Events, statics: { START: L.Browser.touch ? ['touchstart', 'mousedown'] : ['mousedown'], END: { mousedown: 'mouseup', touchstart: 'touchend', MSPointerDown: 'touchend' }, MOVE: { mousedown: 'mousemove', touchstart: 'touchmove', MSPointerDown: 'touchmove' } }, initialize: function (element, dragStartTarget) { this._element = element; this._dragStartTarget = dragStartTarget || element; }, enable: function () { if (this._enabled) { return; } for (var i = L.Draggable.START.length - 1; i >= 0; i--) { L.DomEvent.on(this._dragStartTarget, L.Draggable.START[i], this._onDown, this); } this._enabled = true; }, disable: function () { if (!this._enabled) { return; } for (var i = L.Draggable.START.length - 1; i >= 0; i--) { L.DomEvent.off(this._dragStartTarget, L.Draggable.START[i], this._onDown, this); } this._enabled = false; this._moved = false; }, _onDown: function (e) { if (e.shiftKey || ((e.which !== 1) && (e.button !== 1) && !e.touches)) { return; } L.DomEvent .stopPropagation(e); if (L.Draggable._disabled) { return; } L.DomUtil.disableImageDrag(); L.DomUtil.disableTextSelection(); var first = e.touches ? e.touches[0] : e, el = first.target; // if touching a link, highlight it if (L.Browser.touch && el.tagName.toLowerCase() === 'a') { L.DomUtil.addClass(el, 'leaflet-active'); } this._moved = false; if (this._moving) { return; } this._startPoint = new L.Point(first.clientX, first.clientY); this._startPos = this._newPos = L.DomUtil.getPosition(this._element); L.DomEvent .on(document, L.Draggable.MOVE[e.type], this._onMove, this) .on(document, L.Draggable.END[e.type], this._onUp, this); }, _onMove: function (e) { if (e.touches && e.touches.length > 1) { return; } var first = (e.touches && e.touches.length === 1 ? e.touches[0] : e), newPoint = new L.Point(first.clientX, first.clientY), offset = newPoint.subtract(this._startPoint); if (!offset.x && !offset.y) { return; } L.DomEvent.preventDefault(e); if (!this._moved) { this.fire('dragstart'); this._moved = true; this._startPos = L.DomUtil.getPosition(this._element).subtract(offset); if (!L.Browser.touch) { L.DomUtil.addClass(document.body, 'leaflet-dragging'); } } this._newPos = this._startPos.add(offset); this._moving = true; L.Util.cancelAnimFrame(this._animRequest); this._animRequest = L.Util.requestAnimFrame(this._updatePosition, this, true, this._dragStartTarget); }, _updatePosition: function () { this.fire('predrag'); L.DomUtil.setPosition(this._element, this._newPos); this.fire('drag'); }, _onUp: function () { if (!L.Browser.touch) { L.DomUtil.removeClass(document.body, 'leaflet-dragging'); } for (var i in L.Draggable.MOVE) { L.DomEvent .off(document, L.Draggable.MOVE[i], this._onMove) .off(document, L.Draggable.END[i], this._onUp); } L.DomUtil.enableImageDrag(); L.DomUtil.enableTextSelection(); if (this._moved) { // ensure drag is not fired after dragend L.Util.cancelAnimFrame(this._animRequest); this.fire('dragend'); } this._moving = false; } }); /* L.Handler is a base class for handler classes that are used internally to inject interaction features like dragging to classes like Map and Marker. */ L.Handler = L.Class.extend({ initialize: function (map) { this._map = map; }, enable: function () { if (this._enabled) { return; } this._enabled = true; this.addHooks(); }, disable: function () { if (!this._enabled) { return; } this._enabled = false; this.removeHooks(); }, enabled: function () { return !!this._enabled; } }); /* * L.Handler.MapDrag is used to make the map draggable (with panning inertia), enabled by default. */ L.Map.mergeOptions({ dragging: true, inertia: !L.Browser.android23, inertiaDeceleration: 3400, // px/s^2 inertiaMaxSpeed: Infinity, // px/s inertiaThreshold: L.Browser.touch ? 32 : 18, // ms easeLinearity: 0.25, // TODO refactor, move to CRS worldCopyJump: false }); L.Map.Drag = L.Handler.extend({ addHooks: function () { if (!this._draggable) { var map = this._map; this._draggable = new L.Draggable(map._mapPane, map._container); this._draggable.on({ 'dragstart': this._onDragStart, 'drag': this._onDrag, 'dragend': this._onDragEnd }, this); if (map.options.worldCopyJump) { this._draggable.on('predrag', this._onPreDrag, this); map.on('viewreset', this._onViewReset, this); } } this._draggable.enable(); }, removeHooks: function () { this._draggable.disable(); }, moved: function () { return this._draggable && this._draggable._moved; }, _onDragStart: function () { var map = this._map; if (map._panAnim) { map._panAnim.stop(); } map .fire('movestart') .fire('dragstart'); if (map.options.inertia) { this._positions = []; this._times = []; } }, _onDrag: function () { if (this._map.options.inertia) { var time = this._lastTime = +new Date(), pos = this._lastPos = this._draggable._newPos; this._positions.push(pos); this._times.push(time); if (time - this._times[0] > 200) { this._positions.shift(); this._times.shift(); } } this._map .fire('move') .fire('drag'); }, _onViewReset: function () { // TODO fix hardcoded Earth values var pxCenter = this._map.getSize()._divideBy(2), pxWorldCenter = this._map.latLngToLayerPoint([0, 0]); this._initialWorldOffset = pxWorldCenter.subtract(pxCenter).x; this._worldWidth = this._map.project([0, 180]).x; }, _onPreDrag: function () { // TODO refactor to be able to adjust map pane position after zoom var worldWidth = this._worldWidth, halfWidth = Math.round(worldWidth / 2), dx = this._initialWorldOffset, x = this._draggable._newPos.x, newX1 = (x - halfWidth + dx) % worldWidth + halfWidth - dx, newX2 = (x + halfWidth + dx) % worldWidth - halfWidth - dx, newX = Math.abs(newX1 + dx) < Math.abs(newX2 + dx) ? newX1 : newX2; this._draggable._newPos.x = newX; }, _onDragEnd: function () { var map = this._map, options = map.options, delay = +new Date() - this._lastTime, noInertia = !options.inertia || delay > options.inertiaThreshold || !this._positions[0]; map.fire('dragend'); if (noInertia) { map.fire('moveend'); } else { var direction = this._lastPos.subtract(this._positions[0]), duration = (this._lastTime + delay - this._times[0]) / 1000, ease = options.easeLinearity, speedVector = direction.multiplyBy(ease / duration), speed = speedVector.distanceTo([0, 0]), limitedSpeed = Math.min(options.inertiaMaxSpeed, speed), limitedSpeedVector = speedVector.multiplyBy(limitedSpeed / speed), decelerationDuration = limitedSpeed / (options.inertiaDeceleration * ease), offset = limitedSpeedVector.multiplyBy(-decelerationDuration / 2).round(); if (!offset.x || !offset.y) { map.fire('moveend'); } else { L.Util.requestAnimFrame(function () { map.panBy(offset, { duration: decelerationDuration, easeLinearity: ease, noMoveStart: true }); }); } } } }); L.Map.addInitHook('addHandler', 'dragging', L.Map.Drag); /* * L.Handler.DoubleClickZoom is used to handle double-click zoom on the map, enabled by default. */ L.Map.mergeOptions({ doubleClickZoom: true }); L.Map.DoubleClickZoom = L.Handler.extend({ addHooks: function () { this._map.on('dblclick', this._onDoubleClick); }, removeHooks: function () { this._map.off('dblclick', this._onDoubleClick); }, _onDoubleClick: function (e) { this.setZoomAround(e.containerPoint, this._zoom + 1); } }); L.Map.addInitHook('addHandler', 'doubleClickZoom', L.Map.DoubleClickZoom); /* * L.Handler.ScrollWheelZoom is used by L.Map to enable mouse scroll wheel zoom on the map. */ L.Map.mergeOptions({ scrollWheelZoom: true }); L.Map.ScrollWheelZoom = L.Handler.extend({ addHooks: function () { L.DomEvent.on(this._map._container, 'mousewheel', this._onWheelScroll, this); L.DomEvent.on(this._map._container, 'MozMousePixelScroll', L.DomEvent.preventDefault); this._delta = 0; }, removeHooks: function () { L.DomEvent.off(this._map._container, 'mousewheel', this._onWheelScroll); L.DomEvent.off(this._map._container, 'MozMousePixelScroll', L.DomEvent.preventDefault); }, _onWheelScroll: function (e) { var delta = L.DomEvent.getWheelDelta(e); this._delta += delta; this._lastMousePos = this._map.mouseEventToContainerPoint(e); if (!this._startTime) { this._startTime = +new Date(); } var left = Math.max(40 - (+new Date() - this._startTime), 0); clearTimeout(this._timer); this._timer = setTimeout(L.bind(this._performZoom, this), left); L.DomEvent.preventDefault(e); L.DomEvent.stopPropagation(e); }, _performZoom: function () { var map = this._map, delta = this._delta, zoom = map.getZoom(); delta = delta > 0 ? Math.ceil(delta) : Math.floor(delta); delta = Math.max(Math.min(delta, 4), -4); delta = map._limitZoom(zoom + delta) - zoom; this._delta = 0; this._startTime = null; if (!delta) { return; } map.setZoomAround(this._lastMousePos, zoom + delta); } }); L.Map.addInitHook('addHandler', 'scrollWheelZoom', L.Map.ScrollWheelZoom); /* * Extends the event handling code with double tap support for mobile browsers. */ L.extend(L.DomEvent, { _touchstart: L.Browser.msTouch ? 'MSPointerDown' : 'touchstart', _touchend: L.Browser.msTouch ? 'MSPointerUp' : 'touchend', // inspired by Zepto touch code by Thomas Fuchs addDoubleTapListener: function (obj, handler, id) { var last, doubleTap = false, delay = 250, touch, pre = '_leaflet_', touchstart = this._touchstart, touchend = this._touchend, trackedTouches = []; function onTouchStart(e) { var count; if (L.Browser.msTouch) { trackedTouches.push(e.pointerId); count = trackedTouches.length; } else { count = e.touches.length; } if (count > 1) { return; } var now = Date.now(), delta = now - (last || now); touch = e.touches ? e.touches[0] : e; doubleTap = (delta > 0 && delta <= delay); last = now; } function onTouchEnd(e) { if (L.Browser.msTouch) { var idx = trackedTouches.indexOf(e.pointerId); if (idx === -1) { return; } trackedTouches.splice(idx, 1); } if (doubleTap) { if (L.Browser.msTouch) { // work around .type being readonly with MSPointer* events var newTouch = { }, prop; // jshint forin:false for (var i in touch) { prop = touch[i]; if (typeof prop === 'function') { newTouch[i] = prop.bind(touch); } else { newTouch[i] = prop; } } touch = newTouch; } touch.type = 'dblclick'; handler(touch); last = null; } } obj[pre + touchstart + id] = onTouchStart; obj[pre + touchend + id] = onTouchEnd; // on msTouch we need to listen on the document, otherwise a drag starting on the map and moving off screen // will not come through to us, so we will lose track of how many touches are ongoing var endElement = L.Browser.msTouch ? document.documentElement : obj; obj.addEventListener(touchstart, onTouchStart, false); endElement.addEventListener(touchend, onTouchEnd, false); if (L.Browser.msTouch) { endElement.addEventListener('MSPointerCancel', onTouchEnd, false); } return this; }, removeDoubleTapListener: function (obj, id) { var pre = '_leaflet_'; obj.removeEventListener(this._touchstart, obj[pre + this._touchstart + id], false); (L.Browser.msTouch ? document.documentElement : obj).removeEventListener( this._touchend, obj[pre + this._touchend + id], false); if (L.Browser.msTouch) { document.documentElement.removeEventListener('MSPointerCancel', obj[pre + this._touchend + id], false); } return this; } }); /* * Extends L.DomEvent to provide touch support for Internet Explorer and Windows-based devices. */ L.extend(L.DomEvent, { _msTouches: [], _msDocumentListener: false, // Provides a touch events wrapper for msPointer events. // Based on changes by veproza https://github.com/CloudMade/Leaflet/pull/1019 addMsTouchListener: function (obj, type, handler, id) { switch (type) { case 'touchstart': return this.addMsTouchListenerStart(obj, type, handler, id); case 'touchend': return this.addMsTouchListenerEnd(obj, type, handler, id); case 'touchmove': return this.addMsTouchListenerMove(obj, type, handler, id); default: throw 'Unknown touch event type'; } }, addMsTouchListenerStart: function (obj, type, handler, id) { var pre = '_leaflet_', touches = this._msTouches; var cb = function (e) { var alreadyInArray = false; for (var i = 0; i < touches.length; i++) { if (touches[i].pointerId === e.pointerId) { alreadyInArray = true; break; } } if (!alreadyInArray) { touches.push(e); } e.touches = touches.slice(); e.changedTouches = [e]; handler(e); }; obj[pre + 'touchstart' + id] = cb; obj.addEventListener('MSPointerDown', cb, false); // need to also listen for end events to keep the _msTouches list accurate // this needs to be on the body and never go away if (!this._msDocumentListener) { var internalCb = function (e) { for (var i = 0; i < touches.length; i++) { if (touches[i].pointerId === e.pointerId) { touches.splice(i, 1); break; } } }; //We listen on the documentElement as any drags that end by moving the touch off the screen get fired there document.documentElement.addEventListener('MSPointerUp', internalCb, false); document.documentElement.addEventListener('MSPointerCancel', internalCb, false); this._msDocumentListener = true; } return this; }, addMsTouchListenerMove: function (obj, type, handler, id) { var pre = '_leaflet_', touches = this._msTouches; function cb(e) { // don't fire touch moves when mouse isn't down if (e.pointerType === e.MSPOINTER_TYPE_MOUSE && e.buttons === 0) { return; } for (var i = 0; i < touches.length; i++) { if (touches[i].pointerId === e.pointerId) { touches[i] = e; break; } } e.touches = touches.slice(); e.changedTouches = [e]; handler(e); } obj[pre + 'touchmove' + id] = cb; obj.addEventListener('MSPointerMove', cb, false); return this; }, addMsTouchListenerEnd: function (obj, type, handler, id) { var pre = '_leaflet_', touches = this._msTouches; var cb = function (e) { for (var i = 0; i < touches.length; i++) { if (touches[i].pointerId === e.pointerId) { touches.splice(i, 1); break; } } e.touches = touches.slice(); e.changedTouches = [e]; handler(e); }; obj[pre + 'touchend' + id] = cb; obj.addEventListener('MSPointerUp', cb, false); obj.addEventListener('MSPointerCancel', cb, false); return this; }, removeMsTouchListener: function (obj, type, id) { var pre = '_leaflet_', cb = obj[pre + type + id]; switch (type) { case 'touchstart': obj.removeEventListener('MSPointerDown', cb, false); break; case 'touchmove': obj.removeEventListener('MSPointerMove', cb, false); break; case 'touchend': obj.removeEventListener('MSPointerUp', cb, false); obj.removeEventListener('MSPointerCancel', cb, false); break; } return this; } }); /* * L.Handler.TouchZoom is used by L.Map to add pinch zoom on supported mobile browsers. */ L.Map.mergeOptions({ touchZoom: L.Browser.touch && !L.Browser.android23 }); L.Map.TouchZoom = L.Handler.extend({ addHooks: function () { L.DomEvent.on(this._map._container, 'touchstart', this._onTouchStart, this); }, removeHooks: function () { L.DomEvent.off(this._map._container, 'touchstart', this._onTouchStart, this); }, _onTouchStart: function (e) { var map = this._map; if (!e.touches || e.touches.length !== 2 || map._animatingZoom || this._zooming) { return; } var p1 = map.mouseEventToLayerPoint(e.touches[0]), p2 = map.mouseEventToLayerPoint(e.touches[1]), viewCenter = map._getCenterLayerPoint(); this._startCenter = p1.add(p2)._divideBy(2); this._startDist = p1.distanceTo(p2); this._moved = false; this._zooming = true; this._centerOffset = viewCenter.subtract(this._startCenter); if (map._panAnim) { map._panAnim.stop(); } L.DomEvent .on(document, 'touchmove', this._onTouchMove, this) .on(document, 'touchend', this._onTouchEnd, this); L.DomEvent.preventDefault(e); }, _onTouchMove: function (e) { var map = this._map; if (!e.touches || e.touches.length !== 2 || !this._zooming) { return; } var p1 = map.mouseEventToLayerPoint(e.touches[0]), p2 = map.mouseEventToLayerPoint(e.touches[1]); this._scale = p1.distanceTo(p2) / this._startDist; this._delta = p1._add(p2)._divideBy(2)._subtract(this._startCenter); if (this._scale === 1) { return; } if (!this._moved) { L.DomUtil.addClass(map._mapPane, 'leaflet-touching'); map .fire('movestart') .fire('zoomstart'); this._moved = true; } L.Util.cancelAnimFrame(this._animRequest); this._animRequest = L.Util.requestAnimFrame( this._updateOnMove, this, true, this._map._container); L.DomEvent.preventDefault(e); }, _updateOnMove: function () { var map = this._map, origin = this._getScaleOrigin(), center = map.layerPointToLatLng(origin), zoom = map.getScaleZoom(this._scale); map._animateZoom(center, zoom, this._startCenter, this._scale, this._delta); }, _onTouchEnd: function () { if (!this._moved || !this._zooming) { this._zooming = false; return; } var map = this._map; this._zooming = false; L.DomUtil.removeClass(map._mapPane, 'leaflet-touching'); L.Util.cancelAnimFrame(this._animRequest); L.DomEvent .off(document, 'touchmove', this._onTouchMove) .off(document, 'touchend', this._onTouchEnd); var origin = this._getScaleOrigin(), center = map.layerPointToLatLng(origin), oldZoom = map.getZoom(), floatZoomDelta = map.getScaleZoom(this._scale) - oldZoom, roundZoomDelta = (floatZoomDelta > 0 ? Math.ceil(floatZoomDelta) : Math.floor(floatZoomDelta)), zoom = map._limitZoom(oldZoom + roundZoomDelta), scale = map.getZoomScale(zoom) / this._scale; map._animateZoom(center, zoom, origin, scale); }, _getScaleOrigin: function () { var centerOffset = this._centerOffset.subtract(this._delta).divideBy(this._scale); return this._startCenter.add(centerOffset); } }); L.Map.addInitHook('addHandler', 'touchZoom', L.Map.TouchZoom); /* * L.Map.Tap is used to enable mobile hacks like quick taps and long hold. */ L.Map.mergeOptions({ tap: true, tapTolerance: 15 }); L.Map.Tap = L.Handler.extend({ addHooks: function () { L.DomEvent.on(this._map._container, 'touchstart', this._onDown, this); }, removeHooks: function () { L.DomEvent.off(this._map._container, 'touchstart', this._onDown, this); }, _onDown: function (e) { if (!e.touches) { return; } L.DomEvent.preventDefault(e); this._fireClick = true; // don't simulate click or track longpress if more than 1 touch if (e.touches.length > 1) { this._fireClick = false; clearTimeout(this._holdTimeout); return; } var first = e.touches[0], el = first.target; this._startPos = this._newPos = new L.Point(first.clientX, first.clientY); // if touching a link, highlight it if (el.tagName.toLowerCase() === 'a') { L.DomUtil.addClass(el, 'leaflet-active'); } // simulate long hold but setting a timeout this._holdTimeout = setTimeout(L.bind(function () { if (this._isTapValid()) { this._fireClick = false; this._onUp(); this._simulateEvent('contextmenu', first); } }, this), 1000); L.DomEvent .on(document, 'touchmove', this._onMove, this) .on(document, 'touchend', this._onUp, this); }, _onUp: function (e) { clearTimeout(this._holdTimeout); L.DomEvent .off(document, 'touchmove', this._onMove, this) .off(document, 'touchend', this._onUp, this); if (this._fireClick && e && e.changedTouches) { var first = e.changedTouches[0], el = first.target; if (el.tagName.toLowerCase() === 'a') { L.DomUtil.removeClass(el, 'leaflet-active'); } // simulate click if the touch didn't move too much if (this._isTapValid()) { this._simulateEvent('click', first); } } }, _isTapValid: function () { return this._newPos.distanceTo(this._startPos) <= this._map.options.tapTolerance; }, _onMove: function (e) { var first = e.touches[0]; this._newPos = new L.Point(first.clientX, first.clientY); }, _simulateEvent: function (type, e) { var simulatedEvent = document.createEvent('MouseEvents'); simulatedEvent._simulated = true; e.target._simulatedClick = true; simulatedEvent.initMouseEvent( type, true, true, window, 1, e.screenX, e.screenY, e.clientX, e.clientY, false, false, false, false, 0, null); e.target.dispatchEvent(simulatedEvent); } }); if (L.Browser.touch && !L.Browser.msTouch) { L.Map.addInitHook('addHandler', 'tap', L.Map.Tap); } /* * L.Handler.ShiftDragZoom is used to add shift-drag zoom interaction to the map * (zoom to a selected bounding box), enabled by default. */ L.Map.mergeOptions({ boxZoom: true }); L.Map.BoxZoom = L.Handler.extend({ initialize: function (map) { this._map = map; this._container = map._container; this._pane = map._panes.overlayPane; }, addHooks: function () { L.DomEvent.on(this._container, 'mousedown', this._onMouseDown, this); }, removeHooks: function () { L.DomEvent.off(this._container, 'mousedown', this._onMouseDown); }, _onMouseDown: function (e) { if (!e.shiftKey || ((e.which !== 1) && (e.button !== 1))) { return false; } L.DomUtil.disableTextSelection(); L.DomUtil.disableImageDrag(); this._startLayerPoint = this._map.mouseEventToLayerPoint(e); this._box = L.DomUtil.create('div', 'leaflet-zoom-box', this._pane); L.DomUtil.setPosition(this._box, this._startLayerPoint); //TODO refactor: move cursor to styles this._container.style.cursor = 'crosshair'; L.DomEvent .on(document, 'mousemove', this._onMouseMove, this) .on(document, 'mouseup', this._onMouseUp, this) .on(document, 'keydown', this._onKeyDown, this); this._map.fire('boxzoomstart'); }, _onMouseMove: function (e) { var startPoint = this._startLayerPoint, box = this._box, layerPoint = this._map.mouseEventToLayerPoint(e), offset = layerPoint.subtract(startPoint), newPos = new L.Point( Math.min(layerPoint.x, startPoint.x), Math.min(layerPoint.y, startPoint.y)); L.DomUtil.setPosition(box, newPos); // TODO refactor: remove hardcoded 4 pixels box.style.width = (Math.max(0, Math.abs(offset.x) - 4)) + 'px'; box.style.height = (Math.max(0, Math.abs(offset.y) - 4)) + 'px'; }, _finish: function () { this._pane.removeChild(this._box); this._container.style.cursor = ''; L.DomUtil.enableTextSelection(); L.DomUtil.enableImageDrag(); L.DomEvent .off(document, 'mousemove', this._onMouseMove) .off(document, 'mouseup', this._onMouseUp) .off(document, 'keydown', this._onKeyDown); }, _onMouseUp: function (e) { this._finish(); var map = this._map, layerPoint = map.mouseEventToLayerPoint(e); if (this._startLayerPoint.equals(layerPoint)) { return; } var bounds = new L.LatLngBounds( map.layerPointToLatLng(this._startLayerPoint), map.layerPointToLatLng(layerPoint)); map.fitBounds(bounds); map.fire('boxzoomend', { boxZoomBounds: bounds }); }, _onKeyDown: function (e) { if (e.keyCode === 27) { this._finish(); } } }); L.Map.addInitHook('addHandler', 'boxZoom', L.Map.BoxZoom); /* * L.Map.Keyboard is handling keyboard interaction with the map, enabled by default. */ L.Map.mergeOptions({ keyboard: true, keyboardPanOffset: 80, keyboardZoomOffset: 1 }); L.Map.Keyboard = L.Handler.extend({ keyCodes: { left: [37], right: [39], down: [40], up: [38], zoomIn: [187, 107, 61], zoomOut: [189, 109, 173] }, initialize: function (map) { this._map = map; this._setPanOffset(map.options.keyboardPanOffset); this._setZoomOffset(map.options.keyboardZoomOffset); }, addHooks: function () { var container = this._map._container; // make the container focusable by tabbing if (container.tabIndex === -1) { container.tabIndex = '0'; } L.DomEvent .on(container, 'focus', this._onFocus, this) .on(container, 'blur', this._onBlur, this) .on(container, 'mousedown', this._onMouseDown, this); this._map .on('focus', this._addHooks, this) .on('blur', this._removeHooks, this); }, removeHooks: function () { this._removeHooks(); var container = this._map._container; L.DomEvent .off(container, 'focus', this._onFocus, this) .off(container, 'blur', this._onBlur, this) .off(container, 'mousedown', this._onMouseDown, this); this._map .off('focus', this._addHooks, this) .off('blur', this._removeHooks, this); }, _onMouseDown: function () { if (this._focused) { return; } var body = document.body, docEl = document.documentElement, top = body.scrollTop || docEl.scrollTop, left = body.scrollTop || docEl.scrollLeft; this._map._container.focus(); window.scrollTo(left, top); }, _onFocus: function () { this._focused = true; this._map.fire('focus'); }, _onBlur: function () { this._focused = false; this._map.fire('blur'); }, _setPanOffset: function (pan) { var keys = this._panKeys = {}, codes = this.keyCodes, i, len; for (i = 0, len = codes.left.length; i < len; i++) { keys[codes.left[i]] = [-1 * pan, 0]; } for (i = 0, len = codes.right.length; i < len; i++) { keys[codes.right[i]] = [pan, 0]; } for (i = 0, len = codes.down.length; i < len; i++) { keys[codes.down[i]] = [0, pan]; } for (i = 0, len = codes.up.length; i < len; i++) { keys[codes.up[i]] = [0, -1 * pan]; } }, _setZoomOffset: function (zoom) { var keys = this._zoomKeys = {}, codes = this.keyCodes, i, len; for (i = 0, len = codes.zoomIn.length; i < len; i++) { keys[codes.zoomIn[i]] = zoom; } for (i = 0, len = codes.zoomOut.length; i < len; i++) { keys[codes.zoomOut[i]] = -zoom; } }, _addHooks: function () { L.DomEvent.on(document, 'keydown', this._onKeyDown, this); }, _removeHooks: function () { L.DomEvent.off(document, 'keydown', this._onKeyDown, this); }, _onKeyDown: function (e) { var key = e.keyCode, map = this._map; if (key in this._panKeys) { if (map._panAnim && map._panAnim._inProgress) { return; } map.panBy(this._panKeys[key]); if (map.options.maxBounds) { map.panInsideBounds(map.options.maxBounds); } } else if (key in this._zoomKeys) { map.setZoom(map.getZoom() + this._zoomKeys[key]); } else { return; } L.DomEvent.stop(e); } }); L.Map.addInitHook('addHandler', 'keyboard', L.Map.Keyboard); /* * L.Handler.MarkerDrag is used internally by L.Marker to make the markers draggable. */ L.Handler.MarkerDrag = L.Handler.extend({ initialize: function (marker) { this._marker = marker; }, addHooks: function () { var icon = this._marker._icon; if (!this._draggable) { this._draggable = new L.Draggable(icon, icon); } this._draggable .on('dragstart', this._onDragStart, this) .on('drag', this._onDrag, this) .on('dragend', this._onDragEnd, this); this._draggable.enable(); }, removeHooks: function () { this._draggable .off('dragstart', this._onDragStart, this) .off('drag', this._onDrag, this) .off('dragend', this._onDragEnd, this); this._draggable.disable(); }, moved: function () { return this._draggable && this._draggable._moved; }, _onDragStart: function () { this._marker .closePopup() .fire('movestart') .fire('dragstart'); }, _onDrag: function () { var marker = this._marker, shadow = marker._shadow, iconPos = L.DomUtil.getPosition(marker._icon), latlng = marker._map.layerPointToLatLng(iconPos); // update shadow position if (shadow) { L.DomUtil.setPosition(shadow, iconPos); } marker._latlng = latlng; marker .fire('move', {latlng: latlng}) .fire('drag'); }, _onDragEnd: function () { this._marker .fire('moveend') .fire('dragend'); } }); /* * L.Control is a base class for implementing map controls. Handles positioning. * All other controls extend from this class. */ L.Control = L.Class.extend({ options: { position: 'topright' }, initialize: function (options) { L.setOptions(this, options); }, getPosition: function () { return this.options.position; }, setPosition: function (position) { var map = this._map; if (map) { map.removeControl(this); } this.options.position = position; if (map) { map.addControl(this); } return this; }, getContainer: function () { return this._container; }, addTo: function (map) { this._map = map; var container = this._container = this.onAdd(map), pos = this.getPosition(), corner = map._controlCorners[pos]; L.DomUtil.addClass(container, 'leaflet-control'); if (pos.indexOf('bottom') !== -1) { corner.insertBefore(container, corner.firstChild); } else { corner.appendChild(container); } return this; }, removeFrom: function (map) { var pos = this.getPosition(), corner = map._controlCorners[pos]; corner.removeChild(this._container); this._map = null; if (this.onRemove) { this.onRemove(map); } return this; } }); L.control = function (options) { return new L.Control(options); }; // adds control-related methods to L.Map L.Map.include({ addControl: function (control) { control.addTo(this); return this; }, removeControl: function (control) { control.removeFrom(this); return this; }, _initControlPos: function () { var corners = this._controlCorners = {}, l = 'leaflet-', container = this._controlContainer = L.DomUtil.create('div', l + 'control-container', this._container); function createCorner(vSide, hSide) { var className = l + vSide + ' ' + l + hSide; corners[vSide + hSide] = L.DomUtil.create('div', className, container); } createCorner('top', 'left'); createCorner('top', 'right'); createCorner('bottom', 'left'); createCorner('bottom', 'right'); }, _clearControlPos: function () { this._container.removeChild(this._controlContainer); } }); /* * L.Control.Zoom is used for the default zoom buttons on the map. */ L.Control.Zoom = L.Control.extend({ options: { position: 'topleft' }, onAdd: function (map) { var zoomName = 'leaflet-control-zoom', container = L.DomUtil.create('div', zoomName + ' leaflet-bar'); this._map = map; this._zoomInButton = this._createButton( '+', 'Zoom in', zoomName + '-in', container, this._zoomIn, this); this._zoomOutButton = this._createButton( '-', 'Zoom out', zoomName + '-out', container, this._zoomOut, this); map.on('zoomend zoomlevelschange', this._updateDisabled, this); return container; }, onRemove: function (map) { map.off('zoomend zoomlevelschange', this._updateDisabled, this); }, _zoomIn: function (e) { this._map.zoomIn(e.shiftKey ? 3 : 1); }, _zoomOut: function (e) { this._map.zoomOut(e.shiftKey ? 3 : 1); }, _createButton: function (html, title, className, container, fn, context) { var link = L.DomUtil.create('a', className, container); link.innerHTML = html; link.href = '#'; link.title = title; var stop = L.DomEvent.stopPropagation; L.DomEvent .on(link, 'click', stop) .on(link, 'mousedown', stop) .on(link, 'dblclick', stop) .on(link, 'click', L.DomEvent.preventDefault) .on(link, 'click', fn, context); return link; }, _updateDisabled: function () { var map = this._map, className = 'leaflet-disabled'; L.DomUtil.removeClass(this._zoomInButton, className); L.DomUtil.removeClass(this._zoomOutButton, className); if (map._zoom === map.getMinZoom()) { L.DomUtil.addClass(this._zoomOutButton, className); } if (map._zoom === map.getMaxZoom()) { L.DomUtil.addClass(this._zoomInButton, className); } } }); L.Map.mergeOptions({ zoomControl: true }); L.Map.addInitHook(function () { if (this.options.zoomControl) { this.zoomControl = new L.Control.Zoom(); this.addControl(this.zoomControl); } }); L.control.zoom = function (options) { return new L.Control.Zoom(options); }; /* * L.Control.Attribution is used for displaying attribution on the map (added by default). */ L.Control.Attribution = L.Control.extend({ options: { position: 'bottomright', prefix: '<a href="http://leafletjs.com" title="A JS library for interactive maps">Leaflet</a>' }, initialize: function (options) { L.setOptions(this, options); this._attributions = {}; }, onAdd: function (map) { this._container = L.DomUtil.create('div', 'leaflet-control-attribution'); L.DomEvent.disableClickPropagation(this._container); map .on('layeradd', this._onLayerAdd, this) .on('layerremove', this._onLayerRemove, this); this._update(); return this._container; }, onRemove: function (map) { map .off('layeradd', this._onLayerAdd) .off('layerremove', this._onLayerRemove); }, setPrefix: function (prefix) { this.options.prefix = prefix; this._update(); return this; }, addAttribution: function (text) { if (!text) { return; } if (!this._attributions[text]) { this._attributions[text] = 0; } this._attributions[text]++; this._update(); return this; }, removeAttribution: function (text) { if (!text) { return; } if (this._attributions[text]) { this._attributions[text]--; this._update(); } return this; }, _update: function () { if (!this._map) { return; } var attribs = []; for (var i in this._attributions) { if (this._attributions[i]) { attribs.push(i); } } var prefixAndAttribs = []; if (this.options.prefix) { prefixAndAttribs.push(this.options.prefix); } if (attribs.length) { prefixAndAttribs.push(attribs.join(', ')); } this._container.innerHTML = prefixAndAttribs.join(' | '); }, _onLayerAdd: function (e) { if (e.layer.getAttribution) { this.addAttribution(e.layer.getAttribution()); } }, _onLayerRemove: function (e) { if (e.layer.getAttribution) { this.removeAttribution(e.layer.getAttribution()); } } }); L.Map.mergeOptions({ attributionControl: true }); L.Map.addInitHook(function () { if (this.options.attributionControl) { this.attributionControl = (new L.Control.Attribution()).addTo(this); } }); L.control.attribution = function (options) { return new L.Control.Attribution(options); }; /* * L.Control.Scale is used for displaying metric/imperial scale on the map. */ L.Control.Scale = L.Control.extend({ options: { position: 'bottomleft', maxWidth: 100, metric: true, imperial: true, updateWhenIdle: false }, onAdd: function (map) { this._map = map; var className = 'leaflet-control-scale', container = L.DomUtil.create('div', className), options = this.options; this._addScales(options, className, container); map.on(options.updateWhenIdle ? 'moveend' : 'move', this._update, this); map.whenReady(this._update, this); return container; }, onRemove: function (map) { map.off(this.options.updateWhenIdle ? 'moveend' : 'move', this._update, this); }, _addScales: function (options, className, container) { if (options.metric) { this._mScale = L.DomUtil.create('div', className + '-line', container); } if (options.imperial) { this._iScale = L.DomUtil.create('div', className + '-line', container); } }, _update: function () { var bounds = this._map.getBounds(), centerLat = bounds.getCenter().lat, halfWorldMeters = 6378137 * Math.PI * Math.cos(centerLat * Math.PI / 180), dist = halfWorldMeters * (bounds.getNorthEast().lng - bounds.getSouthWest().lng) / 180, size = this._map.getSize(), options = this.options, maxMeters = 0; if (size.x > 0) { maxMeters = dist * (options.maxWidth / size.x); } this._updateScales(options, maxMeters); }, _updateScales: function (options, maxMeters) { if (options.metric && maxMeters) { this._updateMetric(maxMeters); } if (options.imperial && maxMeters) { this._updateImperial(maxMeters); } }, _updateMetric: function (maxMeters) { var meters = this._getRoundNum(maxMeters); this._mScale.style.width = this._getScaleWidth(meters / maxMeters) + 'px'; this._mScale.innerHTML = meters < 1000 ? meters + ' m' : (meters / 1000) + ' km'; }, _updateImperial: function (maxMeters) { var maxFeet = maxMeters * 3.2808399, scale = this._iScale, maxMiles, miles, feet; if (maxFeet > 5280) { maxMiles = maxFeet / 5280; miles = this._getRoundNum(maxMiles); scale.style.width = this._getScaleWidth(miles / maxMiles) + 'px'; scale.innerHTML = miles + ' mi'; } else { feet = this._getRoundNum(maxFeet); scale.style.width = this._getScaleWidth(feet / maxFeet) + 'px'; scale.innerHTML = feet + ' ft'; } }, _getScaleWidth: function (ratio) { return Math.round(this.options.maxWidth * ratio) - 10; }, _getRoundNum: function (num) { var pow10 = Math.pow(10, (Math.floor(num) + '').length - 1), d = num / pow10; d = d >= 10 ? 10 : d >= 5 ? 5 : d >= 3 ? 3 : d >= 2 ? 2 : 1; return pow10 * d; } }); L.control.scale = function (options) { return new L.Control.Scale(options); }; /* * L.Control.Layers is a control to allow users to switch between different layers on the map. */ L.Control.Layers = L.Control.extend({ options: { collapsed: true, position: 'topright', autoZIndex: true }, initialize: function (baseLayers, overlays, options) { L.setOptions(this, options); this._layers = {}; this._lastZIndex = 0; this._handlingClick = false; for (var i in baseLayers) { this._addLayer(baseLayers[i], i); } for (i in overlays) { this._addLayer(overlays[i], i, true); } }, onAdd: function (map) { this._initLayout(); this._update(); map .on('layeradd', this._onLayerChange, this) .on('layerremove', this._onLayerChange, this); return this._container; }, onRemove: function (map) { map .off('layeradd', this._onLayerChange) .off('layerremove', this._onLayerChange); }, addBaseLayer: function (layer, name) { this._addLayer(layer, name); this._update(); return this; }, addOverlay: function (layer, name) { this._addLayer(layer, name, true); this._update(); return this; }, removeLayer: function (layer) { var id = L.stamp(layer); delete this._layers[id]; this._update(); return this; }, _initLayout: function () { var className = 'leaflet-control-layers', container = this._container = L.DomUtil.create('div', className); //Makes this work on IE10 Touch devices by stopping it from firing a mouseout event when the touch is released container.setAttribute('aria-haspopup', true); if (!L.Browser.touch) { L.DomEvent.disableClickPropagation(container); L.DomEvent.on(container, 'mousewheel', L.DomEvent.stopPropagation); } else { L.DomEvent.on(container, 'click', L.DomEvent.stopPropagation); } var form = this._form = L.DomUtil.create('form', className + '-list'); if (this.options.collapsed) { if (!L.Browser.android) { L.DomEvent .on(container, 'mouseover', this._expand, this) .on(container, 'mouseout', this._collapse, this); } var link = this._layersLink = L.DomUtil.create('a', className + '-toggle', container); link.href = '#'; link.title = 'Layers'; if (L.Browser.touch) { L.DomEvent .on(link, 'click', L.DomEvent.stop) .on(link, 'click', this._expand, this); } else { L.DomEvent.on(link, 'focus', this._expand, this); } this._map.on('click', this._collapse, this); // TODO keyboard accessibility } else { this._expand(); } this._baseLayersList = L.DomUtil.create('div', className + '-base', form); this._separator = L.DomUtil.create('div', className + '-separator', form); this._overlaysList = L.DomUtil.create('div', className + '-overlays', form); container.appendChild(form); }, _addLayer: function (layer, name, overlay) { var id = L.stamp(layer); this._layers[id] = { layer: layer, name: name, overlay: overlay }; if (this.options.autoZIndex && layer.setZIndex) { this._lastZIndex++; layer.setZIndex(this._lastZIndex); } }, _update: function () { if (!this._container) { return; } this._baseLayersList.innerHTML = ''; this._overlaysList.innerHTML = ''; var baseLayersPresent = false, overlaysPresent = false, i, obj; for (i in this._layers) { obj = this._layers[i]; this._addItem(obj); overlaysPresent = overlaysPresent || obj.overlay; baseLayersPresent = baseLayersPresent || !obj.overlay; } this._separator.style.display = overlaysPresent && baseLayersPresent ? '' : 'none'; }, _onLayerChange: function (e) { var obj = this._layers[L.stamp(e.layer)]; if (!obj) { return; } if (!this._handlingClick) { this._update(); } var type = obj.overlay ? (e.type === 'layeradd' ? 'overlayadd' : 'overlayremove') : (e.type === 'layeradd' ? 'baselayerchange' : null); if (type) { this._map.fire(type, obj); } }, // IE7 bugs out if you create a radio dynamically, so you have to do it this hacky way (see http://bit.ly/PqYLBe) _createRadioElement: function (name, checked) { var radioHtml = '<input type="radio" class="leaflet-control-layers-selector" name="' + name + '"'; if (checked) { radioHtml += ' checked="checked"'; } radioHtml += '/>'; var radioFragment = document.createElement('div'); radioFragment.innerHTML = radioHtml; return radioFragment.firstChild; }, _addItem: function (obj) { var label = document.createElement('label'), input, checked = this._map.hasLayer(obj.layer); if (obj.overlay) { input = document.createElement('input'); input.type = 'checkbox'; input.className = 'leaflet-control-layers-selector'; input.defaultChecked = checked; } else { input = this._createRadioElement('leaflet-base-layers', checked); } input.layerId = L.stamp(obj.layer); L.DomEvent.on(input, 'click', this._onInputClick, this); var name = document.createElement('span'); name.innerHTML = ' ' + obj.name; label.appendChild(input); label.appendChild(name); var container = obj.overlay ? this._overlaysList : this._baseLayersList; container.appendChild(label); return label; }, _onInputClick: function () { var i, input, obj, inputs = this._form.getElementsByTagName('input'), inputsLen = inputs.length; this._handlingClick = true; for (i = 0; i < inputsLen; i++) { input = inputs[i]; obj = this._layers[input.layerId]; if (input.checked && !this._map.hasLayer(obj.layer)) { this._map.addLayer(obj.layer); } else if (!input.checked && this._map.hasLayer(obj.layer)) { this._map.removeLayer(obj.layer); } } this._handlingClick = false; }, _expand: function () { L.DomUtil.addClass(this._container, 'leaflet-control-layers-expanded'); }, _collapse: function () { this._container.className = this._container.className.replace(' leaflet-control-layers-expanded', ''); } }); L.control.layers = function (baseLayers, overlays, options) { return new L.Control.Layers(baseLayers, overlays, options); }; /* * L.PosAnimation is used by Leaflet internally for pan animations. */ L.PosAnimation = L.Class.extend({ includes: L.Mixin.Events, run: function (el, newPos, duration, easeLinearity) { // (HTMLElement, Point[, Number, Number]) this.stop(); this._el = el; this._inProgress = true; this._newPos = newPos; this.fire('start'); el.style[L.DomUtil.TRANSITION] = 'all ' + (duration || 0.25) + 's cubic-bezier(0,0,' + (easeLinearity || 0.5) + ',1)'; L.DomEvent.on(el, L.DomUtil.TRANSITION_END, this._onTransitionEnd, this); L.DomUtil.setPosition(el, newPos); // toggle reflow, Chrome flickers for some reason if you don't do this L.Util.falseFn(el.offsetWidth); // there's no native way to track value updates of transitioned properties, so we imitate this this._stepTimer = setInterval(L.bind(this._onStep, this), 50); }, stop: function () { if (!this._inProgress) { return; } // if we just removed the transition property, the element would jump to its final position, // so we need to make it stay at the current position L.DomUtil.setPosition(this._el, this._getPos()); this._onTransitionEnd(); L.Util.falseFn(this._el.offsetWidth); // force reflow in case we are about to start a new animation }, _onStep: function () { // jshint camelcase: false // make L.DomUtil.getPosition return intermediate position value during animation this._el._leaflet_pos = this._getPos(); this.fire('step'); }, // you can't easily get intermediate values of properties animated with CSS3 Transitions, // we need to parse computed style (in case of transform it returns matrix string) _transformRe: /([-+]?(?:\d*\.)?\d+)\D*, ([-+]?(?:\d*\.)?\d+)\D*\)/, _getPos: function () { var left, top, matches, el = this._el, style = window.getComputedStyle(el); if (L.Browser.any3d) { matches = style[L.DomUtil.TRANSFORM].match(this._transformRe); left = matches ? parseFloat(matches[1]) : 0; top = matches ? parseFloat(matches[2]) : 0; } else { left = parseFloat(style.left); top = parseFloat(style.top); } return new L.Point(left, top, true); }, _onTransitionEnd: function () { L.DomEvent.off(this._el, L.DomUtil.TRANSITION_END, this._onTransitionEnd, this); if (!this._inProgress) { return; } this._inProgress = false; this._el.style[L.DomUtil.TRANSITION] = ''; // jshint camelcase: false // make sure L.DomUtil.getPosition returns the final position value after animation this._el._leaflet_pos = this._newPos; clearInterval(this._stepTimer); this.fire('step').fire('end'); } }); /* * Extends L.Map to handle panning animations. */ L.Map.include({ setView: function (center, zoom, options) { zoom = this._limitZoom(zoom); center = L.latLng(center); options = options || {}; if (this._panAnim) { this._panAnim.stop(); } if (this._loaded && !options.reset && options !== true) { if (options.animate !== undefined) { options.zoom = L.extend({animate: options.animate}, options.zoom); options.pan = L.extend({animate: options.animate}, options.pan); } // try animating pan or zoom var animated = (this._zoom !== zoom) ? this._tryAnimatedZoom && this._tryAnimatedZoom(center, zoom, options.zoom) : this._tryAnimatedPan(center, options.pan); if (animated) { // prevent resize handler call, the view will refresh after animation anyway clearTimeout(this._sizeTimer); return this; } } // animation didn't start, just reset the map view this._resetView(center, zoom); return this; }, panBy: function (offset, options) { offset = L.point(offset).round(); options = options || {}; if (!offset.x && !offset.y) { return this; } if (!this._panAnim) { this._panAnim = new L.PosAnimation(); this._panAnim.on({ 'step': this._onPanTransitionStep, 'end': this._onPanTransitionEnd }, this); } // don't fire movestart if animating inertia if (!options.noMoveStart) { this.fire('movestart'); } // animate pan unless animate: false specified if (options.animate !== false) { L.DomUtil.addClass(this._mapPane, 'leaflet-pan-anim'); var newPos = this._getMapPanePos().subtract(offset); this._panAnim.run(this._mapPane, newPos, options.duration || 0.25, options.easeLinearity); } else { this._rawPanBy(offset); this.fire('move').fire('moveend'); } return this; }, _onPanTransitionStep: function () { this.fire('move'); }, _onPanTransitionEnd: function () { L.DomUtil.removeClass(this._mapPane, 'leaflet-pan-anim'); this.fire('moveend'); }, _tryAnimatedPan: function (center, options) { // difference between the new and current centers in pixels var offset = this._getCenterOffset(center)._floor(); // don't animate too far unless animate: true specified in options if ((options && options.animate) !== true && !this.getSize().contains(offset)) { return false; } this.panBy(offset, options); return true; } }); /* * L.PosAnimation fallback implementation that powers Leaflet pan animations * in browsers that don't support CSS3 Transitions. */ L.PosAnimation = L.DomUtil.TRANSITION ? L.PosAnimation : L.PosAnimation.extend({ run: function (el, newPos, duration, easeLinearity) { // (HTMLElement, Point[, Number, Number]) this.stop(); this._el = el; this._inProgress = true; this._duration = duration || 0.25; this._easeOutPower = 1 / Math.max(easeLinearity || 0.5, 0.2); this._startPos = L.DomUtil.getPosition(el); this._offset = newPos.subtract(this._startPos); this._startTime = +new Date(); this.fire('start'); this._animate(); }, stop: function () { if (!this._inProgress) { return; } this._step(); this._complete(); }, _animate: function () { // animation loop this._animId = L.Util.requestAnimFrame(this._animate, this); this._step(); }, _step: function () { var elapsed = (+new Date()) - this._startTime, duration = this._duration * 1000; if (elapsed < duration) { this._runFrame(this._easeOut(elapsed / duration)); } else { this._runFrame(1); this._complete(); } }, _runFrame: function (progress) { var pos = this._startPos.add(this._offset.multiplyBy(progress)); L.DomUtil.setPosition(this._el, pos); this.fire('step'); }, _complete: function () { L.Util.cancelAnimFrame(this._animId); this._inProgress = false; this.fire('end'); }, _easeOut: function (t) { return 1 - Math.pow(1 - t, this._easeOutPower); } }); /* * Extends L.Map to handle zoom animations. */ L.Map.mergeOptions({ zoomAnimation: true, zoomAnimationThreshold: 4 }); if (L.DomUtil.TRANSITION) { L.Map.addInitHook(function () { // don't animate on browsers without hardware-accelerated transitions or old Android/Opera this._zoomAnimated = this.options.zoomAnimation && L.DomUtil.TRANSITION && L.Browser.any3d && !L.Browser.android23 && !L.Browser.mobileOpera; // zoom transitions run with the same duration for all layers, so if one of transitionend events // happens after starting zoom animation (propagating to the map pane), we know that it ended globally if (this._zoomAnimated) { L.DomEvent.on(this._mapPane, L.DomUtil.TRANSITION_END, this._catchTransitionEnd, this); } }); } L.Map.include(!L.DomUtil.TRANSITION ? {} : { _catchTransitionEnd: function () { if (this._animatingZoom) { this._onZoomTransitionEnd(); } }, _tryAnimatedZoom: function (center, zoom, options) { if (this._animatingZoom) { return true; } options = options || {}; // don't animate if disabled, not supported or zoom difference is too large if (!this._zoomAnimated || options.animate === false || Math.abs(zoom - this._zoom) > this.options.zoomAnimationThreshold) { return false; } // offset is the pixel coords of the zoom origin relative to the current center var scale = this.getZoomScale(zoom), offset = this._getCenterOffset(center)._divideBy(1 - 1 / scale), origin = this._getCenterLayerPoint()._add(offset); // don't animate if the zoom origin isn't within one screen from the current center, unless forced if (options.animate !== true && !this.getSize().contains(offset)) { return false; } this .fire('movestart') .fire('zoomstart'); this._animateZoom(center, zoom, origin, scale, null, true); return true; }, _animateZoom: function (center, zoom, origin, scale, delta, backwards) { this._animatingZoom = true; // put transform transition on all layers with leaflet-zoom-animated class L.DomUtil.addClass(this._mapPane, 'leaflet-zoom-anim'); // remember what center/zoom to set after animation this._animateToCenter = center; this._animateToZoom = zoom; // disable any dragging during animation if (L.Draggable) { L.Draggable._disabled = true; } this.fire('zoomanim', { center: center, zoom: zoom, origin: origin, scale: scale, delta: delta, backwards: backwards }); }, _onZoomTransitionEnd: function () { this._animatingZoom = false; L.DomUtil.removeClass(this._mapPane, 'leaflet-zoom-anim'); this._resetView(this._animateToCenter, this._animateToZoom, true, true); if (L.Draggable) { L.Draggable._disabled = false; } } }); /* Zoom animation logic for L.TileLayer. */ L.TileLayer.include({ _animateZoom: function (e) { if (!this._animating) { this._animating = true; this._prepareBgBuffer(); } var bg = this._bgBuffer, transform = L.DomUtil.TRANSFORM, initialTransform = e.delta ? L.DomUtil.getTranslateString(e.delta) : bg.style[transform], scaleStr = L.DomUtil.getScaleString(e.scale, e.origin); bg.style[transform] = e.backwards ? scaleStr + ' ' + initialTransform : initialTransform + ' ' + scaleStr; }, _endZoomAnim: function () { var front = this._tileContainer, bg = this._bgBuffer; front.style.visibility = ''; front.parentNode.appendChild(front); // Bring to fore // force reflow L.Util.falseFn(bg.offsetWidth); this._animating = false; }, _clearBgBuffer: function () { var map = this._map; if (map && !map._animatingZoom && !map.touchZoom._zooming) { this._bgBuffer.innerHTML = ''; this._bgBuffer.style[L.DomUtil.TRANSFORM] = ''; } }, _prepareBgBuffer: function () { var front = this._tileContainer, bg = this._bgBuffer; // if foreground layer doesn't have many tiles but bg layer does, // keep the existing bg layer and just zoom it some more var bgLoaded = this._getLoadedTilesPercentage(bg), frontLoaded = this._getLoadedTilesPercentage(front); if (bg && bgLoaded > 0.5 && frontLoaded < 0.5) { front.style.visibility = 'hidden'; this._stopLoadingImages(front); return; } // prepare the buffer to become the front tile pane bg.style.visibility = 'hidden'; bg.style[L.DomUtil.TRANSFORM] = ''; // switch out the current layer to be the new bg layer (and vice-versa) this._tileContainer = bg; bg = this._bgBuffer = front; this._stopLoadingImages(bg); //prevent bg buffer from clearing right after zoom clearTimeout(this._clearBgBufferTimer); }, _getLoadedTilesPercentage: function (container) { var tiles = container.getElementsByTagName('img'), i, len, count = 0; for (i = 0, len = tiles.length; i < len; i++) { if (tiles[i].complete) { count++; } } return count / len; }, // stops loading all tiles in the background layer _stopLoadingImages: function (container) { var tiles = Array.prototype.slice.call(container.getElementsByTagName('img')), i, len, tile; for (i = 0, len = tiles.length; i < len; i++) { tile = tiles[i]; if (!tile.complete) { tile.onload = L.Util.falseFn; tile.onerror = L.Util.falseFn; tile.src = L.Util.emptyImageUrl; tile.parentNode.removeChild(tile); } } } }); /* * Provides L.Map with convenient shortcuts for using browser geolocation features. */ L.Map.include({ _defaultLocateOptions: { watch: false, setView: false, maxZoom: Infinity, timeout: 10000, maximumAge: 0, enableHighAccuracy: false }, locate: function (/*Object*/ options) { options = this._locateOptions = L.extend(this._defaultLocateOptions, options); if (!navigator.geolocation) { this._handleGeolocationError({ code: 0, message: 'Geolocation not supported.' }); return this; } var onResponse = L.bind(this._handleGeolocationResponse, this), onError = L.bind(this._handleGeolocationError, this); if (options.watch) { this._locationWatchId = navigator.geolocation.watchPosition(onResponse, onError, options); } else { navigator.geolocation.getCurrentPosition(onResponse, onError, options); } return this; }, stopLocate: function () { if (navigator.geolocation) { navigator.geolocation.clearWatch(this._locationWatchId); } if (this._locateOptions) { this._locateOptions.setView = false; } return this; }, _handleGeolocationError: function (error) { var c = error.code, message = error.message || (c === 1 ? 'permission denied' : (c === 2 ? 'position unavailable' : 'timeout')); if (this._locateOptions.setView && !this._loaded) { this.fitWorld(); } this.fire('locationerror', { code: c, message: 'Geolocation error: ' + message + '.' }); }, _handleGeolocationResponse: function (pos) { var lat = pos.coords.latitude, lng = pos.coords.longitude, latlng = new L.LatLng(lat, lng), latAccuracy = 180 * pos.coords.accuracy / 40075017, lngAccuracy = latAccuracy / Math.cos(L.LatLng.DEG_TO_RAD * lat), bounds = L.latLngBounds( [lat - latAccuracy, lng - lngAccuracy], [lat + latAccuracy, lng + lngAccuracy]), options = this._locateOptions; if (options.setView) { var zoom = Math.min(this.getBoundsZoom(bounds), options.maxZoom); this.setView(latlng, zoom); } var data = { latlng: latlng, bounds: bounds, }; for (var i in pos.coords) { if (typeof pos.coords[i] === 'number') { data[i] = pos.coords[i]; } } this.fire('locationfound', data); } }); }(window, document)); },{}],37:[function(require,module,exports){ /*! * mustache.js - Logic-less {{mustache}} templates with JavaScript * http://github.com/janl/mustache.js */ /*global define: false*/ (function (root, factory) { if (typeof exports === "object" && exports) { factory(exports); // CommonJS } else { var mustache = {}; factory(mustache); if (typeof define === "function" && define.amd) { define(mustache); // AMD } else { root.Mustache = mustache; // <script> } } }(this, function (mustache) { var whiteRe = /\s*/; var spaceRe = /\s+/; var nonSpaceRe = /\S/; var eqRe = /\s*=/; var curlyRe = /\s*\}/; var tagRe = /#|\^|\/|>|\{|&|=|!/; // Workaround for https://issues.apache.org/jira/browse/COUCHDB-577 // See https://github.com/janl/mustache.js/issues/189 var RegExp_test = RegExp.prototype.test; function testRegExp(re, string) { return RegExp_test.call(re, string); } function isWhitespace(string) { return !testRegExp(nonSpaceRe, string); } var Object_toString = Object.prototype.toString; var isArray = Array.isArray || function (object) { return Object_toString.call(object) === '[object Array]'; }; function isFunction(object) { return typeof object === 'function'; } function escapeRegExp(string) { return string.replace(/[\-\[\]{}()*+?.,\\\^$|#\s]/g, "\\$&"); } var entityMap = { "&": "&amp;", "<": "&lt;", ">": "&gt;", '"': '&quot;', "'": '&#39;', "/": '&#x2F;' }; function escapeHtml(string) { return String(string).replace(/[&<>"'\/]/g, function (s) { return entityMap[s]; }); } function Scanner(string) { this.string = string; this.tail = string; this.pos = 0; } /** * Returns `true` if the tail is empty (end of string). */ Scanner.prototype.eos = function () { return this.tail === ""; }; /** * Tries to match the given regular expression at the current position. * Returns the matched text if it can match, the empty string otherwise. */ Scanner.prototype.scan = function (re) { var match = this.tail.match(re); if (match && match.index === 0) { var string = match[0]; this.tail = this.tail.substring(string.length); this.pos += string.length; return string; } return ""; }; /** * Skips all text until the given regular expression can be matched. Returns * the skipped string, which is the entire tail if no match can be made. */ Scanner.prototype.scanUntil = function (re) { var index = this.tail.search(re), match; switch (index) { case -1: match = this.tail; this.tail = ""; break; case 0: match = ""; break; default: match = this.tail.substring(0, index); this.tail = this.tail.substring(index); } this.pos += match.length; return match; }; function Context(view, parent) { this.view = view == null ? {} : view; this.parent = parent; this._cache = { '.': this.view }; } Context.make = function (view) { return (view instanceof Context) ? view : new Context(view); }; Context.prototype.push = function (view) { return new Context(view, this); }; Context.prototype.lookup = function (name) { var value; if (name in this._cache) { value = this._cache[name]; } else { var context = this; while (context) { if (name.indexOf('.') > 0) { value = context.view; var names = name.split('.'), i = 0; while (value != null && i < names.length) { value = value[names[i++]]; } } else { value = context.view[name]; } if (value != null) break; context = context.parent; } this._cache[name] = value; } if (isFunction(value)) { value = value.call(this.view); } return value; }; function Writer() { this.clearCache(); } Writer.prototype.clearCache = function () { this._cache = {}; this._partialCache = {}; }; Writer.prototype.compile = function (template, tags) { var fn = this._cache[template]; if (!fn) { var tokens = mustache.parse(template, tags); fn = this._cache[template] = this.compileTokens(tokens, template); } return fn; }; Writer.prototype.compilePartial = function (name, template, tags) { var fn = this.compile(template, tags); this._partialCache[name] = fn; return fn; }; Writer.prototype.getPartial = function (name) { if (!(name in this._partialCache) && this._loadPartial) { this.compilePartial(name, this._loadPartial(name)); } return this._partialCache[name]; }; Writer.prototype.compileTokens = function (tokens, template) { var self = this; return function (view, partials) { if (partials) { if (isFunction(partials)) { self._loadPartial = partials; } else { for (var name in partials) { self.compilePartial(name, partials[name]); } } } return renderTokens(tokens, self, Context.make(view), template); }; }; Writer.prototype.render = function (template, view, partials) { return this.compile(template)(view, partials); }; /** * Low-level function that renders the given `tokens` using the given `writer` * and `context`. The `template` string is only needed for templates that use * higher-order sections to extract the portion of the original template that * was contained in that section. */ function renderTokens(tokens, writer, context, template) { var buffer = ''; // This function is used to render an artbitrary template // in the current context by higher-order functions. function subRender(template) { return writer.render(template, context); } var token, tokenValue, value; for (var i = 0, len = tokens.length; i < len; ++i) { token = tokens[i]; tokenValue = token[1]; switch (token[0]) { case '#': value = context.lookup(tokenValue); if (typeof value === 'object' || typeof value === 'string') { if (isArray(value)) { for (var j = 0, jlen = value.length; j < jlen; ++j) { buffer += renderTokens(token[4], writer, context.push(value[j]), template); } } else if (value) { buffer += renderTokens(token[4], writer, context.push(value), template); } } else if (isFunction(value)) { var text = template == null ? null : template.slice(token[3], token[5]); value = value.call(context.view, text, subRender); if (value != null) buffer += value; } else if (value) { buffer += renderTokens(token[4], writer, context, template); } break; case '^': value = context.lookup(tokenValue); // Use JavaScript's definition of falsy. Include empty arrays. // See https://github.com/janl/mustache.js/issues/186 if (!value || (isArray(value) && value.length === 0)) { buffer += renderTokens(token[4], writer, context, template); } break; case '>': value = writer.getPartial(tokenValue); if (isFunction(value)) buffer += value(context); break; case '&': value = context.lookup(tokenValue); if (value != null) buffer += value; break; case 'name': value = context.lookup(tokenValue); if (value != null) buffer += mustache.escape(value); break; case 'text': buffer += tokenValue; break; } } return buffer; } /** * Forms the given array of `tokens` into a nested tree structure where * tokens that represent a section have two additional items: 1) an array of * all tokens that appear in that section and 2) the index in the original * template that represents the end of that section. */ function nestTokens(tokens) { var tree = []; var collector = tree; var sections = []; var token; for (var i = 0, len = tokens.length; i < len; ++i) { token = tokens[i]; switch (token[0]) { case '#': case '^': sections.push(token); collector.push(token); collector = token[4] = []; break; case '/': var section = sections.pop(); section[5] = token[2]; collector = sections.length > 0 ? sections[sections.length - 1][4] : tree; break; default: collector.push(token); } } return tree; } /** * Combines the values of consecutive text tokens in the given `tokens` array * to a single token. */ function squashTokens(tokens) { var squashedTokens = []; var token, lastToken; for (var i = 0, len = tokens.length; i < len; ++i) { token = tokens[i]; if (token) { if (token[0] === 'text' && lastToken && lastToken[0] === 'text') { lastToken[1] += token[1]; lastToken[3] = token[3]; } else { lastToken = token; squashedTokens.push(token); } } } return squashedTokens; } function escapeTags(tags) { return [ new RegExp(escapeRegExp(tags[0]) + "\\s*"), new RegExp("\\s*" + escapeRegExp(tags[1])) ]; } /** * Breaks up the given `template` string into a tree of token objects. If * `tags` is given here it must be an array with two string values: the * opening and closing tags used in the template (e.g. ["<%", "%>"]). Of * course, the default is to use mustaches (i.e. Mustache.tags). */ function parseTemplate(template, tags) { template = template || ''; tags = tags || mustache.tags; if (typeof tags === 'string') tags = tags.split(spaceRe); if (tags.length !== 2) throw new Error('Invalid tags: ' + tags.join(', ')); var tagRes = escapeTags(tags); var scanner = new Scanner(template); var sections = []; // Stack to hold section tokens var tokens = []; // Buffer to hold the tokens var spaces = []; // Indices of whitespace tokens on the current line var hasTag = false; // Is there a {{tag}} on the current line? var nonSpace = false; // Is there a non-space char on the current line? // Strips all whitespace tokens array for the current line // if there was a {{#tag}} on it and otherwise only space. function stripSpace() { if (hasTag && !nonSpace) { while (spaces.length) { delete tokens[spaces.pop()]; } } else { spaces = []; } hasTag = false; nonSpace = false; } var start, type, value, chr, token, openSection; while (!scanner.eos()) { start = scanner.pos; // Match any text between tags. value = scanner.scanUntil(tagRes[0]); if (value) { for (var i = 0, len = value.length; i < len; ++i) { chr = value.charAt(i); if (isWhitespace(chr)) { spaces.push(tokens.length); } else { nonSpace = true; } tokens.push(['text', chr, start, start + 1]); start += 1; // Check for whitespace on the current line. if (chr == '\n') stripSpace(); } } // Match the opening tag. if (!scanner.scan(tagRes[0])) break; hasTag = true; // Get the tag type. type = scanner.scan(tagRe) || 'name'; scanner.scan(whiteRe); // Get the tag value. if (type === '=') { value = scanner.scanUntil(eqRe); scanner.scan(eqRe); scanner.scanUntil(tagRes[1]); } else if (type === '{') { value = scanner.scanUntil(new RegExp('\\s*' + escapeRegExp('}' + tags[1]))); scanner.scan(curlyRe); scanner.scanUntil(tagRes[1]); type = '&'; } else { value = scanner.scanUntil(tagRes[1]); } // Match the closing tag. if (!scanner.scan(tagRes[1])) throw new Error('Unclosed tag at ' + scanner.pos); token = [type, value, start, scanner.pos]; tokens.push(token); if (type === '#' || type === '^') { sections.push(token); } else if (type === '/') { // Check section nesting. openSection = sections.pop(); if (!openSection) { throw new Error('Unopened section "' + value + '" at ' + start); } if (openSection[1] !== value) { throw new Error('Unclosed section "' + openSection[1] + '" at ' + start); } } else if (type === 'name' || type === '{' || type === '&') { nonSpace = true; } else if (type === '=') { // Set the tags for the next time around. tags = value.split(spaceRe); if (tags.length !== 2) { throw new Error('Invalid tags at ' + start + ': ' + tags.join(', ')); } tagRes = escapeTags(tags); } } // Make sure there are no open sections when we're done. openSection = sections.pop(); if (openSection) { throw new Error('Unclosed section "' + openSection[1] + '" at ' + scanner.pos); } return nestTokens(squashTokens(tokens)); } mustache.name = "mustache.js"; mustache.version = "0.7.3"; mustache.tags = ["{{", "}}"]; mustache.Scanner = Scanner; mustache.Context = Context; mustache.Writer = Writer; mustache.parse = parseTemplate; // Export the escaping function so that the user may override it. // See https://github.com/janl/mustache.js/issues/244 mustache.escape = escapeHtml; // All Mustache.* functions use this writer. var defaultWriter = new Writer(); /** * Clears all cached templates and partials in the default writer. */ mustache.clearCache = function () { return defaultWriter.clearCache(); }; /** * Compiles the given `template` to a reusable function using the default * writer. */ mustache.compile = function (template, tags) { return defaultWriter.compile(template, tags); }; /** * Compiles the partial with the given `name` and `template` to a reusable * function using the default writer. */ mustache.compilePartial = function (name, template, tags) { return defaultWriter.compilePartial(name, template, tags); }; /** * Compiles the given array of tokens (the output of a parse) to a reusable * function using the default writer. */ mustache.compileTokens = function (tokens, template) { return defaultWriter.compileTokens(tokens, template); }; /** * Renders the `template` with the given `view` and `partials` using the * default writer. */ mustache.render = function (template, view, partials) { return defaultWriter.render(template, view, partials); }; // This is here for backwards compatibility with 0.4.x. mustache.to_html = function (template, view, partials, send) { var result = mustache.render(template, view, partials); if (isFunction(send)) { send(result); } else { return result; } }; })); },{}],38:[function(require,module,exports){ module.exports={ "author": { "name": "MapBox" }, "name": "mapbox.js", "description": "mapbox javascript api", "version": "1.3.1", "homepage": "http://mapbox.com/", "repository": { "type": "git", "url": "git://github.com/mapbox/mapbox.js.git" }, "main": "index.js", "dependencies": { "leaflet": "0.6.2", "mustache": "~0.7.2", "corslite": "0.0.3", "json3": "~3.2.5" }, "scripts": { "test": "mocha-phantomjs test/index.html" }, "devDependencies": { "leaflet-hash": "git://github.com/mlevans/leaflet-hash.git#b039a3aa4e2492a5c7448075172ac26769e601d6", "leaflet-fullscreen": "0.0.0", "uglify-js": "~2.2.5", "mocha": "~1.9", "expect.js": "~0.2.0", "sinon": "~1.7.3", "mocha-phantomjs": "~1.1.1", "happen": "~0.1.2", "browserify": "~2.22.0" }, "optionalDependencies": {}, "engines": { "node": "*" }, "bugs": { "url": "https://github.com/mapbox/mapbox.js/issues" }, "_id": "[email protected]", "dist": { "shasum": "a6d144286157eecf7273b202782b31a695450f6a", "tarball": "http://registry.npmjs.org/mapbox.js/-/mapbox.js-1.3.1.tgz" }, "_from": "mapbox.js@>=1.3.1 <1.4.0", "_npmVersion": "1.2.32", "_npmUser": { "name": "tmcw", "email": "[email protected]" }, "maintainers": [ { "name": "tmcw", "email": "[email protected]" }, { "name": "tristen", "email": "[email protected]" }, { "name": "ansis", "email": "[email protected]" }, { "name": "yhahn", "email": "[email protected]" }, { "name": "willwhite", "email": "[email protected]" }, { "name": "jfirebaugh", "email": "[email protected]" }, { "name": "heyitsgarrett", "email": "[email protected]" } ], "directories": {}, "_shasum": "a6d144286157eecf7273b202782b31a695450f6a", "_resolved": "https://registry.npmjs.org/mapbox.js/-/mapbox.js-1.3.1.tgz" } },{}],39:[function(require,module,exports){ 'use strict'; module.exports = { HTTP_URLS: [ 'http://a.tiles.mapbox.com/v3/', 'http://b.tiles.mapbox.com/v3/', 'http://c.tiles.mapbox.com/v3/', 'http://d.tiles.mapbox.com/v3/'], FORCE_HTTPS: false, HTTPS_URLS: [ 'https://a.tiles.mapbox.com/v3/', 'https://b.tiles.mapbox.com/v3/', 'https://c.tiles.mapbox.com/v3/', 'https://d.tiles.mapbox.com/v3/'] }; },{}],40:[function(require,module,exports){ 'use strict'; var util = require('./util'), urlhelper = require('./url'), request = require('./request'); // Low-level geocoding interface - wraps specific API calls and their // return values. module.exports = function(_) { var geocoder = {}, url; geocoder.getURL = function(_) { return url; }; geocoder.setURL = function(_) { url = urlhelper.jsonify(_); return geocoder; }; geocoder.setID = function(_) { util.strict(_, 'string'); geocoder.setURL(urlhelper.base() + _ + '/geocode/{query}.json'); return geocoder; }; geocoder.setTileJSON = function(_) { util.strict(_, 'object'); geocoder.setURL(_.geocoder); return geocoder; }; geocoder.queryURL = function(_) { util.strict(_, 'string'); if (!geocoder.getURL()) throw new Error('Geocoding map ID not set'); return L.Util.template(geocoder.getURL(), { query: encodeURIComponent(_) }); }; geocoder.query = function(_, callback) { util.strict(_, 'string'); util.strict(callback, 'function'); request(geocoder.queryURL(_), function(err, json) { if (json && json.results && json.results.length) { var res = { results: json.results, latlng: [json.results[0][0].lat, json.results[0][0].lon] }; if (json.results[0][0].bounds !== undefined) { res.bounds = json.results[0][0].bounds; res.lbounds = util.lbounds(res.bounds); } callback(null, res); } else callback(err || true); }); return geocoder; }; // a reverse geocode: // // geocoder.reverseQuery([80, 20]) geocoder.reverseQuery = function(_, callback) { var q = ''; function norm(x) { if (x.lat !== undefined && x.lng !== undefined) return x.lng + ',' + x.lat; else if (x.lat !== undefined && x.lon !== undefined) return x.lon + ',' + x.lat; else return x[0] + ',' + x[1]; } if (_.length && _[0].length) { for (var i = 0, pts = []; i < _.length; i++) pts.push(norm(_[i])); q = pts.join(';'); } else q = norm(_); request(geocoder.queryURL(q), function(err, json) { callback(err, json); }); return geocoder; }; if (typeof _ === 'string') { if (_.indexOf('/') == -1) geocoder.setID(_); else geocoder.setURL(_); } else if (typeof _ === 'object') geocoder.setTileJSON(_); return geocoder; }; },{"./request":50,"./url":54,"./util":55}],41:[function(require,module,exports){ 'use strict'; var geocoder = require('./geocoder'); var GeocoderControl = L.Control.extend({ includes: L.Mixin.Events, options: { position: 'topleft' }, initialize: function(_) { this.geocoder = geocoder(_); }, setURL: function(_) { this.geocoder.setURL(_); return this; }, getURL: function() { return this.geocoder.getURL(); }, setID: function(_) { this.geocoder.setID(_); return this; }, setTileJSON: function(_) { this.geocoder.setTileJSON(_); return this; }, _toggle: function(e) { if (e) L.DomEvent.stop(e); if (L.DomUtil.hasClass(this._container, 'active')) { L.DomUtil.removeClass(this._container, 'active'); this._results.innerHTML = ''; this._input.blur(); } else { L.DomUtil.addClass(this._container, 'active'); this._input.focus(); this._input.select(); } }, _closeIfOpen: function(e) { if (L.DomUtil.hasClass(this._container, 'active')) { L.DomUtil.removeClass(this._container, 'active'); this._results.innerHTML = ''; this._input.blur(); } }, onAdd: function(map) { var container = L.DomUtil.create('div', 'leaflet-control-mapbox-geocoder leaflet-bar leaflet-control'), link = L.DomUtil.create('a', 'leaflet-control-mapbox-geocoder-toggle mapbox-icon mapbox-icon-geocoder', container), results = L.DomUtil.create('div', 'leaflet-control-mapbox-geocoder-results', container), wrap = L.DomUtil.create('div', 'leaflet-control-mapbox-geocoder-wrap', container), form = L.DomUtil.create('form', 'leaflet-control-mapbox-geocoder-form', wrap), input = L.DomUtil.create('input', '', form); link.href = '#'; link.innerHTML = '&nbsp;'; input.type = 'text'; input.setAttribute('placeholder', 'Search'); L.DomEvent.addListener(link, 'click', this._toggle, this); L.DomEvent.addListener(form, 'submit', this._geocode, this); L.DomEvent.disableClickPropagation(container); this._map = map; this._results = results; this._input = input; this._form = form; this._map.on('click', this._closeIfOpen, this); return container; }, _geocode: function(e) { L.DomEvent.preventDefault(e); L.DomUtil.addClass(this._container, 'searching'); var map = this._map; this.geocoder.query(this._input.value, L.bind(function(err, resp) { L.DomUtil.removeClass(this._container, 'searching'); if (err || !resp || !resp.results || !resp.results.length) { this.fire('error', {error: err}); } else { this._results.innerHTML = ''; if (resp.results.length === 1 && resp.lbounds) { this._map.fitBounds(resp.lbounds); this._closeIfOpen(); } else { for (var i = 0, l = Math.min(resp.results.length, 5); i < l; i++) { var name = []; for (var j = 0; j < resp.results[i].length; j++) { resp.results[i][j].name && name.push(resp.results[i][j].name); } if (!name.length) continue; var r = L.DomUtil.create('a', '', this._results); r.innerHTML = name.join(', '); r.href = '#'; (function(result) { L.DomEvent.addListener(r, 'click', function(e) { var _ = result[0].bounds; map.fitBounds(L.latLngBounds([[_[1], _[0]], [_[3], _[2]]])); L.DomEvent.stop(e); }); })(resp.results[i]); } if (resp.results.length > 5) { var outof = L.DomUtil.create('span', '', this._results); outof.innerHTML = 'Top 5 of ' + resp.results.length + ' results'; } } this.fire('found', resp); } }, this)); } }); module.exports = function(options) { return new GeocoderControl(options); }; },{"./geocoder":40}],42:[function(require,module,exports){ 'use strict'; function utfDecode(c) { if (c >= 93) c--; if (c >= 35) c--; return c - 32; } module.exports = function(data) { return function(x, y) { if (!data) return; var idx = utfDecode(data.grid[y].charCodeAt(x)), key = data.keys[idx]; return data.data[key]; }; }; },{}],43:[function(require,module,exports){ 'use strict'; var util = require('./util'), Mustache = require('mustache'); var GridControl = L.Control.extend({ options: { pinnable: true, follow: false, sanitizer: require('./sanitize'), touchTeaser: true, location: true }, _currentContent: '', // pinned means that this control is on a feature and the user has likely // clicked. pinned will not become false unless the user clicks off // of the feature onto another or clicks x _pinned: false, initialize: function(_, options) { L.Util.setOptions(this, options); util.strict_instance(_, L.Class, 'L.mapbox.gridLayer'); this._layer = _; }, setTemplate: function(template) { this.options.template = template; }, _template: function(format, data) { if (!data) return; var template = this.options.template || this._layer.getTileJSON().template; if (template) { var d = {}; d['__' + format + '__'] = true; return this.options.sanitizer( Mustache.to_html(template, L.extend(d, data))); } }, // change the content of the tooltip HTML if it has changed, otherwise // noop _show: function(content, o) { if (content === this._currentContent) return; this._currentContent = content; if (this.options.follow) { this._popup.setContent(content) .setLatLng(o.latLng); if (this._map._popup !== this._popup) this._popup.openOn(this._map); } else { this._container.style.display = 'block'; this._contentWrapper.innerHTML = content; } }, _hide: function() { this._pinned = false; this._currentContent = ''; this._map.closePopup(); this._container.style.display = 'none'; this._contentWrapper.innerHTML = ''; L.DomUtil.removeClass(this._container, 'closable'); }, _mouseover: function(o) { if (o.data) { L.DomUtil.addClass(this._map._container, 'map-clickable'); } else { L.DomUtil.removeClass(this._map._container, 'map-clickable'); } if (this._pinned) return; var content = this._template('teaser', o.data); if (content) { this._show(content, o); } else { this._hide(); } }, _mousemove: function(o) { if (this._pinned) return; if (!this.options.follow) return; this._popup.setLatLng(o.latLng); }, _navigateTo: function(url) { window.top.location.href = url; }, _click: function(o) { var location_formatted = this._template('location', o.data); if (this.options.location && location_formatted && location_formatted.search(/^https?:/) === 0) { return this._navigateTo(this._template('location', o.data)); } if (!this.options.pinnable) return; var content = this._template('full', o.data); if (!content && this.options.touchTeaser && L.Browser.touch) { content = this._template('teaser', o.data); } if (content) { L.DomUtil.addClass(this._container, 'closable'); this._pinned = true; this._show(content, o); } else if (this._pinned) { L.DomUtil.removeClass(this._container, 'closable'); this._pinned = false; this._hide(); } }, _onPopupClose: function() { this._currentContent = null; this._pinned = false; }, _createClosebutton: function(container, fn) { var link = L.DomUtil.create('a', 'close', container); link.innerHTML = 'close'; link.href = '#'; link.title = 'close'; L.DomEvent .on(link, 'click', L.DomEvent.stopPropagation) .on(link, 'mousedown', L.DomEvent.stopPropagation) .on(link, 'dblclick', L.DomEvent.stopPropagation) .on(link, 'click', L.DomEvent.preventDefault) .on(link, 'click', fn, this); return link; }, onAdd: function(map) { this._map = map; var className = 'leaflet-control-grid map-tooltip', container = L.DomUtil.create('div', className), contentWrapper = L.DomUtil.create('div', 'map-tooltip-content'); // hide the container element initially container.style.display = 'none'; this._createClosebutton(container, this._hide); container.appendChild(contentWrapper); this._contentWrapper = contentWrapper; this._popup = new L.Popup({ autoPan: false, closeOnClick: false }); map.on('popupclose', this._onPopupClose, this); L.DomEvent .disableClickPropagation(container) // allow people to scroll tooltips with mousewheel .addListener(container, 'mousewheel', L.DomEvent.stopPropagation); this._layer .on('mouseover', this._mouseover, this) .on('mousemove', this._mousemove, this) .on('click', this._click, this); return container; }, onRemove: function (map) { map.off('popupclose', this._onPopupClose, this); this._layer .off('mouseover', this._mouseover, this) .off('mousemove', this._mousemove, this) .off('click', this._click, this); } }); module.exports = function(_, options) { return new GridControl(_, options); }; },{"./sanitize":51,"./util":55,"mustache":37}],44:[function(require,module,exports){ 'use strict'; var util = require('./util'), url = require('./url'), request = require('./request'), grid = require('./grid'); // forked from danzel/L.UTFGrid var GridLayer = L.Class.extend({ includes: [L.Mixin.Events, require('./load_tilejson')], options: { template: function() { return ''; } }, _mouseOn: null, _tilejson: {}, _cache: {}, initialize: function(_, options) { L.Util.setOptions(this, options); this._loadTileJSON(_); }, _setTileJSON: function(json) { util.strict(json, 'object'); L.extend(this.options, { grids: json.grids, minZoom: json.minzoom, maxZoom: json.maxzoom, bounds: json.bounds && util.lbounds(json.bounds) }); this._tilejson = json; this._cache = {}; this._update(); return this; }, getTileJSON: function() { return this._tilejson; }, active: function() { return !!(this._map && this.options.grids && this.options.grids.length); }, addTo: function (map) { map.addLayer(this); return this; }, onAdd: function(map) { this._map = map; this._update(); this._map .on('click', this._click, this) .on('mousemove', this._move, this) .on('moveend', this._update, this); }, onRemove: function() { this._map .off('click', this._click, this) .off('mousemove', this._move, this) .off('moveend', this._update, this); }, getData: function(latlng, callback) { if (!this.active()) return; var map = this._map, point = map.project(latlng), tileSize = 256, resolution = 4, x = Math.floor(point.x / tileSize), y = Math.floor(point.y / tileSize), max = map.options.crs.scale(map.getZoom()) / tileSize; x = (x + max) % max; y = (y + max) % max; this._getTile(map.getZoom(), x, y, function(grid) { var gridX = Math.floor((point.x - (x * tileSize)) / resolution), gridY = Math.floor((point.y - (y * tileSize)) / resolution); callback(grid(gridX, gridY)); }); return this; }, _click: function(e) { this.getData(e.latlng, L.bind(function(data) { this.fire('click', { latLng: e.latlng, data: data }); }, this)); }, _move: function(e) { this.getData(e.latlng, L.bind(function(data) { if (data !== this._mouseOn) { if (this._mouseOn) { this.fire('mouseout', { latLng: e.latlng, data: this._mouseOn }); } this.fire('mouseover', { latLng: e.latlng, data: data }); this._mouseOn = data; } else { this.fire('mousemove', { latLng: e.latlng, data: data }); } }, this)); }, _getTileURL: function(tilePoint) { var urls = this.options.grids, index = (tilePoint.x + tilePoint.y) % urls.length, url = urls[index]; return L.Util.template(url, tilePoint); }, // Load up all required json grid files _update: function() { if (!this.active()) return; var bounds = this._map.getPixelBounds(), z = this._map.getZoom(), tileSize = 256; if (z > this.options.maxZoom || z < this.options.minZoom) return; var nwTilePoint = new L.Point( Math.floor(bounds.min.x / tileSize), Math.floor(bounds.min.y / tileSize)), seTilePoint = new L.Point( Math.floor(bounds.max.x / tileSize), Math.floor(bounds.max.y / tileSize)), max = this._map.options.crs.scale(z) / tileSize; for (var x = nwTilePoint.x; x <= seTilePoint.x; x++) { for (var y = nwTilePoint.y; y <= seTilePoint.y; y++) { // x wrapped var xw = (x + max) % max, yw = (y + max) % max; this._getTile(z, xw, yw); } } }, _getTile: function(z, x, y, callback) { var key = z + '_' + x + '_' + y, tilePoint = L.point(x, y); tilePoint.z = z; if (!this._tileShouldBeLoaded(tilePoint)) { return; } if (key in this._cache) { if (!callback) return; if (typeof this._cache[key] === 'function') { callback(this._cache[key]); // Already loaded } else { this._cache[key].push(callback); // Pending } return; } this._cache[key] = []; if (callback) { this._cache[key].push(callback); } request(this._getTileURL(tilePoint), L.bind(function(err, json) { var callbacks = this._cache[key]; this._cache[key] = grid(json); for (var i = 0; i < callbacks.length; ++i) { callbacks[i](this._cache[key]); } }, this)); }, _tileShouldBeLoaded: function(tilePoint) { if (tilePoint.z > this.options.maxZoom || tilePoint.z < this.options.minZoom) { return false; } if (this.options.bounds) { var tileSize = 256, nwPoint = tilePoint.multiplyBy(tileSize), sePoint = nwPoint.add(new L.Point(tileSize, tileSize)), nw = this._map.unproject(nwPoint), se = this._map.unproject(sePoint), bounds = new L.LatLngBounds([nw, se]); if (!this.options.bounds.intersects(bounds)) { return false; } } return true; } }); module.exports = function(_, options) { return new GridLayer(_, options); }; },{"./grid":42,"./load_tilejson":46,"./request":50,"./url":54,"./util":55}],45:[function(require,module,exports){ 'use strict'; var LegendControl = L.Control.extend({ options: { position: 'bottomright', sanitizer: require('./sanitize') }, initialize: function(options) { L.setOptions(this, options); this._legends = {}; }, onAdd: function(map) { this._container = L.DomUtil.create('div', 'map-legends wax-legends'); L.DomEvent.disableClickPropagation(this._container); this._update(); return this._container; }, addLegend: function(text) { if (!text) { return this; } if (!this._legends[text]) { this._legends[text] = 0; } this._legends[text]++; return this._update(); }, removeLegend: function(text) { if (!text) { return this; } if (this._legends[text]) this._legends[text]--; return this._update(); }, _update: function() { if (!this._map) { return this; } this._container.innerHTML = ''; var hide = 'none'; for (var i in this._legends) { if (this._legends.hasOwnProperty(i) && this._legends[i]) { var div = this._container.appendChild(document.createElement('div')); div.className = 'map-legend wax-legend'; div.innerHTML = this.options.sanitizer(i); hide = 'block'; } } // hide the control entirely unless there is at least one legend; // otherwise there will be a small grey blemish on the map. this._container.style.display = hide; return this; } }); module.exports = function(options) { return new LegendControl(options); }; },{"./sanitize":51}],46:[function(require,module,exports){ 'use strict'; var request = require('./request'), url = require('./url'), util = require('./util'); module.exports = { _loadTileJSON: function(_) { if (typeof _ === 'string') { if (_.indexOf('/') == -1) { _ = url.base() + _ + '.json'; } request(url.secureFlag(_), L.bind(function(err, json) { if (err) { util.log('could not load TileJSON at ' + _); this.fire('error', {error: err}); } else if (json) { this._setTileJSON(json); this.fire('ready'); } }, this)); } else if (_ && typeof _ === 'object') { this._setTileJSON(_); } } }; },{"./request":50,"./url":54,"./util":55}],47:[function(require,module,exports){ 'use strict'; var util = require('./util'), tileLayer = require('./tile_layer'), markerLayer = require('./marker_layer'), gridLayer = require('./grid_layer'), gridControl = require('./grid_control'), legendControl = require('./legend_control'); var Map = L.Map.extend({ includes: [require('./load_tilejson')], options: { tileLayer: {}, markerLayer: {}, gridLayer: {}, legendControl: {}, gridControl: {} }, _tilejson: {}, initialize: function(element, _, options) { L.Map.prototype.initialize.call(this, element, options); // disable the default 'Powered by Leaflet' text if (this.attributionControl) this.attributionControl.setPrefix(''); if (this.options.tileLayer) { this.tileLayer = tileLayer(undefined, this.options.tileLayer); this.addLayer(this.tileLayer); } if (this.options.markerLayer) { this.markerLayer = markerLayer(undefined, this.options.markerLayer); this.addLayer(this.markerLayer); } if (this.options.gridLayer) { this.gridLayer = gridLayer(undefined, this.options.gridLayer); this.addLayer(this.gridLayer); } if (this.options.gridLayer && this.options.gridControl) { this.gridControl = gridControl(this.gridLayer, this.options.gridControl); this.addControl(this.gridControl); } if (this.options.legendControl) { this.legendControl = legendControl(this.options.legendControl); this.addControl(this.legendControl); } this._loadTileJSON(_); }, // Update certain properties on 'ready' event addLayer: function(layer) { if ('on' in layer) { layer.on('ready', L.bind(function() { this._updateLayer(layer); }, this)); } return L.Map.prototype.addLayer.call(this, layer); }, // use a javascript object of tilejson data to configure this layer _setTileJSON: function(_) { this._tilejson = _; this._initialize(_); return this; }, getTileJSON: function() { return this._tilejson; }, _initialize: function(json) { if (this.tileLayer) { this.tileLayer._setTileJSON(json); this._updateLayer(this.tileLayer); } if (this.markerLayer && !this.markerLayer.getGeoJSON() && json.data && json.data[0]) { this.markerLayer.loadURL(json.data[0]); } if (this.gridLayer) { this.gridLayer._setTileJSON(json); this._updateLayer(this.gridLayer); } if (this.legendControl && json.legend) { this.legendControl.addLegend(json.legend); } if (!this._loaded) { var zoom = json.center[2], center = L.latLng(json.center[1], json.center[0]); this.setView(center, zoom); } }, _updateLayer: function(layer) { if (!layer.options) return; if (this.attributionControl && this._loaded) { this.attributionControl.addAttribution(layer.options.attribution); } if (!(L.stamp(layer) in this._zoomBoundLayers) && (layer.options.maxZoom || layer.options.minZoom)) { this._zoomBoundLayers[L.stamp(layer)] = layer; } this._updateZoomLevels(); } }); module.exports = function(element, _, options) { return new Map(element, _, options); }; },{"./grid_control":43,"./grid_layer":44,"./legend_control":45,"./load_tilejson":46,"./marker_layer":49,"./tile_layer":53,"./util":55}],48:[function(require,module,exports){ 'use strict'; var url = require('./url'), sanitize = require('./sanitize'); // mapbox-related markers functionality // provide an icon from mapbox's simple-style spec and hosted markers // service function icon(fp) { fp = fp || {}; var sizes = { small: [20, 50], medium: [30, 70], large: [35, 90] }, size = fp['marker-size'] || 'medium', symbol = (fp['marker-symbol']) ? '-' + fp['marker-symbol'] : '', color = (fp['marker-color'] || '7e7e7e').replace('#', ''); return L.icon({ iconUrl: url.base() + 'marker/' + 'pin-' + size.charAt(0) + symbol + '+' + color + // detect and use retina markers, which are x2 resolution ((L.Browser.retina) ? '@2x' : '') + '.png', iconSize: sizes[size], iconAnchor: [sizes[size][0] / 2, sizes[size][1] / 2], popupAnchor: [0, -sizes[size][1] / 2] }); } // a factory that provides markers for Leaflet from MapBox's // [simple-style specification](https://github.com/mapbox/simplestyle-spec) // and [Markers API](http://mapbox.com/developers/api/#markers). function style(f, latlon) { return L.marker(latlon, { icon: icon(f.properties), title: f.properties.title }); } function createPopup(f, sanitizer) { if (!f || !f.properties) return ''; var popup = ''; if (f.properties.title) { popup += '<div class="marker-title">' + f.properties.title + '</div>'; } if (f.properties.description) { popup += '<div class="marker-description">' + f.properties.description + '</div>'; } return (sanitizer || sanitize)(popup); } module.exports = { icon: icon, style: style, createPopup: createPopup }; },{"./sanitize":51,"./url":54}],49:[function(require,module,exports){ 'use strict'; var util = require('./util'); var urlhelper = require('./url'); var request = require('./request'); var marker = require('./marker'); // # markerLayer // // A layer of markers, loaded from MapBox or else. Adds the ability // to reset markers, filter them, and load them from a GeoJSON URL. var MarkerLayer = L.FeatureGroup.extend({ options: { filter: function() { return true; }, sanitizer: require('./sanitize') }, initialize: function(_, options) { L.setOptions(this, options); this._layers = {}; if (typeof _ === 'string') { util.idUrl(_, this); // javascript object of TileJSON data } else if (_ && typeof _ === 'object') { this.setGeoJSON(_); } }, setGeoJSON: function(_) { this._geojson = _; this.clearLayers(); this._initialize(_); }, getGeoJSON: function() { return this._geojson; }, loadURL: function(url) { url = urlhelper.jsonify(url); request(url, L.bind(function(err, json) { if (err) { util.log('could not load markers at ' + url); this.fire('error', {error: err}); } else if (json) { this.setGeoJSON(json); this.fire('ready'); } }, this)); return this; }, loadID: function(id) { return this.loadURL(urlhelper.base() + id + '/markers.geojson'); }, setFilter: function(_) { this.options.filter = _; if (this._geojson) { this.clearLayers(); this._initialize(this._geojson); } return this; }, getFilter: function() { return this.options.filter; }, _initialize: function(json) { var features = L.Util.isArray(json) ? json : json.features, i, len; if (features) { for (i = 0, len = features.length; i < len; i++) { // Only add this if geometry or geometries are set and not null if (features[i].geometries || features[i].geometry || features[i].features) { this._initialize(features[i]); } } } else if (this.options.filter(json)) { var layer = L.GeoJSON.geometryToLayer(json, marker.style), popupHtml = marker.createPopup(json, this.options.sanitizer); layer.feature = json; if (popupHtml) { layer.bindPopup(popupHtml, { closeButton: false }); } this.addLayer(layer); } } }); module.exports = function(_, options) { return new MarkerLayer(_, options); }; },{"./marker":48,"./request":50,"./sanitize":51,"./url":54,"./util":55}],50:[function(require,module,exports){ 'use strict'; var corslite = require('corslite'), JSON3 = require('json3'), strict = require('./util').strict; module.exports = function(url, callback) { strict(url, 'string'); strict(callback, 'function'); corslite(url, function(err, resp) { if (!err && resp) { // hardcoded grid response if (resp.responseText[0] == 'g') { resp = JSON3.parse(resp.responseText .substring(5, resp.responseText.length - 2)); } else { resp = JSON3.parse(resp.responseText); } } callback(err, resp); }); }; },{"./util":55,"corslite":34,"json3":35}],51:[function(require,module,exports){ 'use strict'; var html_sanitize = require('../ext/sanitizer/html-sanitizer-bundle.js'); // https://bugzilla.mozilla.org/show_bug.cgi?id=255107 function cleanUrl(url) { if (/^https?/.test(url.getScheme())) return url.toString(); if ('data' == url.getScheme() && /^image/.test(url.getPath())) { return url.toString(); } } function cleanId(id) { return id; } module.exports = function(_) { if (!_) return ''; return html_sanitize(_, cleanUrl, cleanId); }; },{"../ext/sanitizer/html-sanitizer-bundle.js":30}],52:[function(require,module,exports){ 'use strict'; var ShareControl = L.Control.extend({ includes: [require('./load_tilejson')], options: { position: 'topleft', url: '' }, initialize: function(_, options) { L.setOptions(this, options); this._loadTileJSON(_); }, _setTileJSON: function(json) { this._tilejson = json; }, onAdd: function(map) { this._map = map; var container = L.DomUtil.create('div', 'leaflet-control-mapbox-share leaflet-bar'); var link = L.DomUtil.create('a', 'mapbox-share mapbox-icon mapbox-icon-share', container); link.href = '#'; L.DomEvent.addListener(link, 'click', this._share, this); L.DomEvent.disableClickPropagation(container); // Close any open popups this._map.on('mousedown', this._clickOut, this); return container; }, _clickOut: function(e) { if (this._popup) { this._map.removeLayer(this._popup); this._popup = null; return; } }, _share: function(e) { L.DomEvent.stop(e); var tilejson = this._tilejson || this._map._tilejson || {}, twitter = 'http://twitter.com/intent/tweet?status=' + encodeURIComponent(tilejson.name + '\n' + (tilejson.webpage || window.location)), facebook = 'https://www.facebook.com/sharer.php?u=' + encodeURIComponent(this.options.url || tilejson.webpage || window.location) + '&t=' + encodeURIComponent(tilejson.name), share = "<a class='leaflet-popup-close-button' href='#close'>×</a>" + ("<h3>Share this map</h3>" + "<div class='mapbox-share-buttons'><a class='mapbox-share-facebook mapbox-icon mapbox-icon-facebook' target='_blank' href='{{facebook}}'>Facebook</a>" + "<a class='mapbox-share-twitter mapbox-icon mapbox-icon-twitter' target='_blank' href='{{twitter}}'>Twitter</a></div>") .replace('{{twitter}}', twitter) .replace('{{facebook}}', facebook) + ("<h3>Get the embed code</h3>" + "<small>Copy and paste this HTML into your website or blog.</small>") + "<textarea rows=4>{{value}}</textarea>" .replace('{{value}}', ("&lt;iframe width='500' height='300' frameBorder='0' src='{{embed}}'&gt;&lt;/iframe&gt;" .replace('{{embed}}', tilejson.embed || window.location))); this._popup = L.marker(this._map.getCenter(), { zIndexOffset: 10000, icon: L.divIcon({ className: 'mapbox-share-popup', iconSize: L.point(360, 240), iconAnchor: L.point(180, 120), html: share }) }) .on('mousedown', function(e) { L.DomEvent.stopPropagation(e.originalEvent); }) .on('click', clickPopup, this).addTo(this._map); function clickPopup(e) { if (e.originalEvent && e.originalEvent.target.nodeName === 'TEXTAREA') { var target = e.originalEvent.target; target.focus(); target.select(); } else if (e.originalEvent && e.originalEvent.target.getAttribute('href') === '#close') { this._clickOut(e); } L.DomEvent.stop(e.originalEvent); } } }); module.exports = function(_, options) { return new ShareControl(_, options); }; },{"./load_tilejson":46}],53:[function(require,module,exports){ 'use strict'; var util = require('./util'), url = require('./url'); var TileLayer = L.TileLayer.extend({ includes: [require('./load_tilejson')], options: { format: 'png' }, // http://mapbox.com/developers/api/#image_quality formats: [ 'png', // PNG 'png32', 'png64', 'png128', 'png256', // JPG 'jpg70', 'jpg80', 'jpg90'], initialize: function(_, options) { L.TileLayer.prototype.initialize.call(this, undefined, options); this._tilejson = {}; if (options && options.detectRetina && L.Browser.retina && options.retinaVersion) { _ = options.retinaVersion; } if (options && options.format) { util.strict_oneof(options.format, this.formats); } this._loadTileJSON(_); }, setFormat: function(_) { util.strict(_, 'string'); this.options.format = _; this.redraw(); return this; }, // disable the setUrl function, which is not available on mapbox tilelayers setUrl: null, _setTileJSON: function(json) { util.strict(json, 'object'); L.extend(this.options, { tiles: json.tiles, attribution: json.attribution, minZoom: json.minzoom, maxZoom: json.maxzoom, tms: json.scheme === 'tms', bounds: json.bounds && util.lbounds(json.bounds) }); this._tilejson = json; this.redraw(); return this; }, getTileJSON: function() { return this._tilejson; }, // this is an exception to mapbox.js naming rules because it's called // by `L.map` getTileUrl: function(tilePoint) { var tiles = this.options.tiles, index = Math.abs(tilePoint.x + tilePoint.y) % tiles.length, url = tiles[index]; var templated = L.Util.template(url, tilePoint); if (!templated) return templated; else return templated.replace('.png', '.' + this.options.format); }, // TileJSON.TileLayers are added to the map immediately, so that they get // the desired z-index, but do not update until the TileJSON has been loaded. _update: function() { if (this.options.tiles) { L.TileLayer.prototype._update.call(this); } } }); module.exports = function(_, options) { return new TileLayer(_, options); }; },{"./load_tilejson":46,"./url":54,"./util":55}],54:[function(require,module,exports){ 'use strict'; var config = require('./config'); // Return the base url of a specific version of MapBox's API. // // `hash`, if provided must be a number and is used to distribute requests // against multiple `CNAME`s in order to avoid connection limits in browsers module.exports = { isSSL: function() { return 'https:' === document.location.protocol || config.FORCE_HTTPS; }, base: function(hash) { // By default, use public HTTP urls // Support HTTPS if the user has specified HTTPS urls to use, and this // page is under HTTPS var urls = this.isSSL() ? config.HTTPS_URLS : config.HTTP_URLS; if (hash === undefined || typeof hash !== 'number') { return urls[0]; } else { return urls[hash % urls.length]; } }, // Requests that contain URLs need a secure flag appended // to their URLs so that the server knows to send SSL-ified // resource references. secureFlag: function(url) { if (!this.isSSL()) return url; else if (url.match(/(\?|&)secure/)) return url; else if (url.indexOf('?') !== -1) return url + '&secure'; else return url + '?secure'; }, // Convert a JSONP url to a JSON URL. (MapBox TileJSON sometimes hardcodes JSONP.) jsonify: function(url) { return url.replace(/\.(geo)?jsonp(?=$|\?)/, '.$1json'); } }; },{"./config":39}],55:[function(require,module,exports){ 'use strict'; module.exports = { idUrl: function(_, t) { if (_.indexOf('/') == -1) t.loadID(_); else t.loadURL(_); }, log: function(_) { if (console && typeof console.error === 'function') { console.error(_); } }, strict: function(_, type) { if (typeof _ !== type) { throw new Error('Invalid argument: ' + type + ' expected'); } }, strict_instance: function(_, klass, name) { if (!(_ instanceof klass)) { throw new Error('Invalid argument: ' + name + ' expected'); } }, strict_oneof: function(_, values) { if (values.indexOf(_) == -1) { throw new Error('Invalid argument: ' + _ + ' given, valid values are ' + values.join(', ')); } }, lbounds: function(_) { // leaflet-compatible bounds, since leaflet does not do geojson return new L.LatLngBounds([[_[1], _[0]], [_[3], _[2]]]); } }; },{}],56:[function(require,module,exports){ var ich = require('icanhaz'); module.exports.initiateTableFilter = function(opts) { $('.clear').on("click", function() { $(this.id + ".noMatches").css("visibility", "hidden"); $(this.id + opts.filterDiv).val(""); makeTable(opts); }); $(opts.filterDiv).keyup(function(e) { var text = $(e.target).val(); searchTable(opts, text); }); } module.exports.searchTable = searchTable function searchTable(opts, searchTerm) { var filteredList = []; var is_IE = !!document.documentMode; var e = document.getElementById("activityFilter"); var strUser = e.options[e.selectedIndex].value; var dropdown = strUser.toLowerCase(); if (is_IE == false) { term_array = searchTerm.split(" "); opts.data.forEach(function(object) { var stringObject = JSON.stringify(object).toLowerCase(); var does_match = true; for (var i=0; i<term_array.length; i++) { if ((stringObject.includes(term_array[i].toLowerCase())) == false) { does_match = false; }} console.log(dropdown); if (strUser != "View All Parks") { if (object[dropdown] == "no") { does_match = false; } } if (does_match) { filteredList.push(object); } }); } else { opts.data.forEach(function(object) { var stringObject = JSON.stringify(object).toLowerCase(); if (stringObject.match(searchTerm.toLowerCase())) { console.log(strUser); if (strUser != "View All Parks") { console.log(object[dropdown]); if (object[dropdown] == "yes") { filteredList.push(object); } } else { filteredList.push(object); } } }) } if (filteredList.length === 0) { $(".noMatches").css("visibility", "inherit"); makeTable(opts, filteredList); } else { $(".noMatches").css("visibility", "hidden"); makeTable(opts, filteredList); } } module.exports.sortThings = sortThings function sortThings(opts, sorter, sorted, tableDiv) { if (opts.tableDiv != tableDiv) return opts.data.sort(function(a,b){ if (a[sorter]<b[sorter]) return -1 if (a[sorter]>b[sorter]) return 1 return 0 }) if (sorted === "descending") opts.data.reverse() makeTable(opts) var header $(tableDiv + " .tHeader").each(function(i, el){ var contents = resolveDataTitle($(el).text()) if (contents === sorter) header = el }) $(header).attr("data-sorted", sorted) } module.exports.resolveDataTitle = resolveDataTitle function resolveDataTitle(string) { var adjusted = string.toLowerCase().replace(/\s/g, '').replace(/\W/g, '') return adjusted } module.exports.initiateTableSorter = initiateTableSorter function initiateTableSorter(options) { $(document).on("click", ".tHeader", sendToSort) function sendToSort(event) { var tableDiv = "#" + $(event.target).closest("div").attr("id") var sorted = $(event.target).attr("data-sorted") if (sorted) { if (sorted === "descending") sorted = "ascending" else sorted = "descending" } else { sorted = "ascending" } var sorter = resolveDataTitle(event.target.innerHTML) var sortInfo = {"sorter": sorter, "sorted": sorted, "tableDiv": tableDiv} sortThings(options, sorter, sorted, tableDiv) } } module.exports.makeTable = makeTable function makeTable(opts, filteredList) { initiateTableSorter(opts) if (filteredList) var data = filteredList else var data = opts.data var tableId = opts.tableDiv.slice(1) if (!opts.pagination) { table(data, opts) } else { var allRows = data.length var totalPages = Math.ceil(allRows / opts.pagination) var currentPage = 1 var currentStart = (currentPage * opts.pagination) - opts.pagination var currentEnd = currentPage * opts.pagination var currentRows = data.slice(currentStart, currentEnd) table(currentRows, opts) if (opts.data.length > opts.pagination) writePreNext(opts.tableDiv, currentPage, currentPage, totalPages, data, opts.pagination) } } module.exports.setPagClicks = setPagClicks function setPagClicks(data, tableId, currentPage, pagination, totalPages) { $(".pagination-pre-" + tableId).addClass("no-pag") $(document).on("click", (".pagination-next-" + tableId), function() { if ($(this).hasClass("no-pag")) return currentPage = currentPage + 1 var nextPage = currentPage + 1 currentStart = (currentPage * pagination) - pagination currentEnd = currentPage * pagination if (currentPage >= totalPages) { currentRows = data.slice(currentStart, currentEnd) table(currentRows, "#" + tableId) setPreNext("#" + tableId, currentPage, currentPage, totalPages) $(".pagination-next-" + tableId).addClass("no-pag") $(".pagination-next-" + tableId) } else { currentRows = data.slice(currentStart, currentEnd) table(currentRows, "#" + tableId) setPreNext("#" + tableId, currentPage, currentPage, totalPages) } }) $(document).on("click", (".pagination-pre-" + tableId), function() { if (currentPage > 1) $(this).removeClass("no-pag") if ($(this).hasClass("no-pag")) return // if ((currentPage) === 2) { // $(".pagination-pre-" + tableId).addClass("no-pag"); console.log("on page one!", currentPage) // } currentPage = currentPage - 1 var nextPage = currentPage + 1 currentStart = (currentPage * pagination) - pagination currentEnd = currentPage * pagination // currentRows = data.slice(currentStart, currentEnd) // table(currentRows, "#" + tableId) // setPreNext("#" + tableId, currentPage, currentPage, totalPages) if (currentPage === 1) { currentRows = data.slice(currentStart, currentEnd) table(currentRows, "#" + tableId) setPreNext("#" + tableId, currentPage, currentPage, totalPages) $(".pagination-pre-" + tableId).addClass("no-pag") } else { currentRows = data.slice(currentStart, currentEnd) table(currentRows, "#" + tableId) setPreNext("#" + tableId, currentPage, currentPage, totalPages) } }) } module.exports.setPreNext = setPreNext function setPreNext(targetDiv, currentPage, currentPage, totalPages, data, pagination) { var tableId = targetDiv.slice(1) $(targetDiv).append("<div id='Pagination' pageno='" + currentPage + "'" + "class='table-pagination'>Showing page " + currentPage + " of " + totalPages + " <a class='pagination-pre-" + tableId + "'>Previous</a>" + " <a class='pagination-next-" + tableId + "'>Next</a></p></div>" ) } module.exports.writePreNext = writePreNext function writePreNext(targetDiv, currentPage, currentPage, totalPages, data, pagination) { var tableId = targetDiv.slice(1) $(targetDiv).append("<div id='Pagination' pageno='" + currentPage + "'" + "class='table-pagination'>Showing page " + currentPage + " of " + totalPages + " <a class='pagination-pre-" + tableId + "'>Previous</a>" + " <a class='pagination-next-" + tableId + "'>Next</a></p></div>" ) setPagClicks(data, tableId, currentPage, pagination, totalPages) } module.exports.clearPreNext = clearPreNext function clearPreNext() { $(".table-pagination").attr("display", "none") } module.exports.table = table function table(data, opts) { if (opts.templateID) { var templateID = opts.templateID } else var templateID = opts.tableDiv.replace("#", "") var tableContents = ich[templateID]( { rows: data }) $(opts.tableDiv).html(tableContents) } },{"icanhaz":2}]},{},[1]) ;
js/sheetsee.js
;(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);throw new Error("Cannot find module '"+o+"'")}var f=n[o]={exports:{}};t[o][0].call(f.exports,function(e){var n=t[o][1][e];return s(n?n:e)},f,f.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){ if (typeof window.Sheetsee === 'undefined') window.Sheetsee = {}; window.Sheetsee = require('sheetsee-core'); var extend = require('lodash.assign'); extend(window.Sheetsee, require('sheetsee-maps'), require('sheetsee-tables')); module.exports = Sheetsee; },{"lodash.assign":3,"sheetsee-core":28,"sheetsee-maps":29,"sheetsee-tables":56}],2:[function(require,module,exports){ /*! ICanHaz.js version 0.10.2 -- by @HenrikJoreteg More info at: http://icanhazjs.com */ (function () { /* mustache.js — Logic-less templates in JavaScript See http://mustache.github.com/ for more info. */ var Mustache = function () { var _toString = Object.prototype.toString; Array.isArray = Array.isArray || function (obj) { return _toString.call(obj) == "[object Array]"; } var _trim = String.prototype.trim, trim; if (_trim) { trim = function (text) { return text == null ? "" : _trim.call(text); } } else { var trimLeft, trimRight; // IE doesn't match non-breaking spaces with \s. if ((/\S/).test("\xA0")) { trimLeft = /^[\s\xA0]+/; trimRight = /[\s\xA0]+$/; } else { trimLeft = /^\s+/; trimRight = /\s+$/; } trim = function (text) { return text == null ? "" : text.toString().replace(trimLeft, "").replace(trimRight, ""); } } var escapeMap = { "&": "&amp;", "<": "&lt;", ">": "&gt;", '"': '&quot;', "'": '&#39;' }; function escapeHTML(string) { return String(string).replace(/&(?!\w+;)|[<>"']/g, function (s) { return escapeMap[s] || s; }); } var regexCache = {}; var Renderer = function () {}; Renderer.prototype = { otag: "{{", ctag: "}}", pragmas: {}, buffer: [], pragmas_implemented: { "IMPLICIT-ITERATOR": true }, context: {}, render: function (template, context, partials, in_recursion) { // reset buffer & set context if (!in_recursion) { this.context = context; this.buffer = []; // TODO: make this non-lazy } // fail fast if (!this.includes("", template)) { if (in_recursion) { return template; } else { this.send(template); return; } } // get the pragmas together template = this.render_pragmas(template); // render the template var html = this.render_section(template, context, partials); // render_section did not find any sections, we still need to render the tags if (html === false) { html = this.render_tags(template, context, partials, in_recursion); } if (in_recursion) { return html; } else { this.sendLines(html); } }, /* Sends parsed lines */ send: function (line) { if (line !== "") { this.buffer.push(line); } }, sendLines: function (text) { if (text) { var lines = text.split("\n"); for (var i = 0; i < lines.length; i++) { this.send(lines[i]); } } }, /* Looks for %PRAGMAS */ render_pragmas: function (template) { // no pragmas if (!this.includes("%", template)) { return template; } var that = this; var regex = this.getCachedRegex("render_pragmas", function (otag, ctag) { return new RegExp(otag + "%([\\w-]+) ?([\\w]+=[\\w]+)?" + ctag, "g"); }); return template.replace(regex, function (match, pragma, options) { if (!that.pragmas_implemented[pragma]) { throw({message: "This implementation of mustache doesn't understand the '" + pragma + "' pragma"}); } that.pragmas[pragma] = {}; if (options) { var opts = options.split("="); that.pragmas[pragma][opts[0]] = opts[1]; } return ""; // ignore unknown pragmas silently }); }, /* Tries to find a partial in the curent scope and render it */ render_partial: function (name, context, partials) { name = trim(name); if (!partials || partials[name] === undefined) { throw({message: "unknown_partial '" + name + "'"}); } if (!context || typeof context[name] != "object") { return this.render(partials[name], context, partials, true); } return this.render(partials[name], context[name], partials, true); }, /* Renders inverted (^) and normal (#) sections */ render_section: function (template, context, partials) { if (!this.includes("#", template) && !this.includes("^", template)) { // did not render anything, there were no sections return false; } var that = this; var regex = this.getCachedRegex("render_section", function (otag, ctag) { // This regex matches _the first_ section ({{#foo}}{{/foo}}), and captures the remainder return new RegExp( "^([\\s\\S]*?)" + // all the crap at the beginning that is not {{*}} ($1) otag + // {{ "(\\^|\\#)\\s*(.+)\\s*" + // #foo (# == $2, foo == $3) ctag + // }} "\n*([\\s\\S]*?)" + // between the tag ($2). leading newlines are dropped otag + // {{ "\\/\\s*\\3\\s*" + // /foo (backreference to the opening tag). ctag + // }} "\\s*([\\s\\S]*)$", // everything else in the string ($4). leading whitespace is dropped. "g"); }); // for each {{#foo}}{{/foo}} section do... return template.replace(regex, function (match, before, type, name, content, after) { // before contains only tags, no sections var renderedBefore = before ? that.render_tags(before, context, partials, true) : "", // after may contain both sections and tags, so use full rendering function renderedAfter = after ? that.render(after, context, partials, true) : "", // will be computed below renderedContent, value = that.find(name, context); if (type === "^") { // inverted section if (!value || Array.isArray(value) && value.length === 0) { // false or empty list, render it renderedContent = that.render(content, context, partials, true); } else { renderedContent = ""; } } else if (type === "#") { // normal section if (Array.isArray(value)) { // Enumerable, Let's loop! renderedContent = that.map(value, function (row) { return that.render(content, that.create_context(row), partials, true); }).join(""); } else if (that.is_object(value)) { // Object, Use it as subcontext! renderedContent = that.render(content, that.create_context(value), partials, true); } else if (typeof value == "function") { // higher order section renderedContent = value.call(context, content, function (text) { return that.render(text, context, partials, true); }); } else if (value) { // boolean section renderedContent = that.render(content, context, partials, true); } else { renderedContent = ""; } } return renderedBefore + renderedContent + renderedAfter; }); }, /* Replace {{foo}} and friends with values from our view */ render_tags: function (template, context, partials, in_recursion) { // tit for tat var that = this; var new_regex = function () { return that.getCachedRegex("render_tags", function (otag, ctag) { return new RegExp(otag + "(=|!|>|&|\\{|%)?([^#\\^]+?)\\1?" + ctag + "+", "g"); }); }; var regex = new_regex(); var tag_replace_callback = function (match, operator, name) { switch(operator) { case "!": // ignore comments return ""; case "=": // set new delimiters, rebuild the replace regexp that.set_delimiters(name); regex = new_regex(); return ""; case ">": // render partial return that.render_partial(name, context, partials); case "{": // the triple mustache is unescaped case "&": // & operator is an alternative unescape method return that.find(name, context); default: // escape the value return escapeHTML(that.find(name, context)); } }; var lines = template.split("\n"); for(var i = 0; i < lines.length; i++) { lines[i] = lines[i].replace(regex, tag_replace_callback, this); if (!in_recursion) { this.send(lines[i]); } } if (in_recursion) { return lines.join("\n"); } }, set_delimiters: function (delimiters) { var dels = delimiters.split(" "); this.otag = this.escape_regex(dels[0]); this.ctag = this.escape_regex(dels[1]); }, escape_regex: function (text) { // thank you Simon Willison if (!arguments.callee.sRE) { var specials = [ '/', '.', '*', '+', '?', '|', '(', ')', '[', ']', '{', '}', '\\' ]; arguments.callee.sRE = new RegExp( '(\\' + specials.join('|\\') + ')', 'g' ); } return text.replace(arguments.callee.sRE, '\\$1'); }, /* find `name` in current `context`. That is find me a value from the view object */ find: function (name, context) { name = trim(name); // Checks whether a value is thruthy or false or 0 function is_kinda_truthy(bool) { return bool === false || bool === 0 || bool; } var value; // check for dot notation eg. foo.bar if (name.match(/([a-z_]+)\./ig)) { var childValue = this.walk_context(name, context); if (is_kinda_truthy(childValue)) { value = childValue; } } else { if (is_kinda_truthy(context[name])) { value = context[name]; } else if (is_kinda_truthy(this.context[name])) { value = this.context[name]; } } if (typeof value == "function") { return value.apply(context); } if (value !== undefined) { return value; } // silently ignore unkown variables return ""; }, walk_context: function (name, context) { var path = name.split('.'); // if the var doesn't exist in current context, check the top level context var value_context = (context[path[0]] != undefined) ? context : this.context; var value = value_context[path.shift()]; while (value != undefined && path.length > 0) { value_context = value; value = value[path.shift()]; } // if the value is a function, call it, binding the correct context if (typeof value == "function") { return value.apply(value_context); } return value; }, // Utility methods /* includes tag */ includes: function (needle, haystack) { return haystack.indexOf(this.otag + needle) != -1; }, // by @langalex, support for arrays of strings create_context: function (_context) { if (this.is_object(_context)) { return _context; } else { var iterator = "."; if (this.pragmas["IMPLICIT-ITERATOR"]) { iterator = this.pragmas["IMPLICIT-ITERATOR"].iterator; } var ctx = {}; ctx[iterator] = _context; return ctx; } }, is_object: function (a) { return a && typeof a == "object"; }, /* Why, why, why? Because IE. Cry, cry cry. */ map: function (array, fn) { if (typeof array.map == "function") { return array.map(fn); } else { var r = []; var l = array.length; for(var i = 0; i < l; i++) { r.push(fn(array[i])); } return r; } }, getCachedRegex: function (name, generator) { var byOtag = regexCache[this.otag]; if (!byOtag) { byOtag = regexCache[this.otag] = {}; } var byCtag = byOtag[this.ctag]; if (!byCtag) { byCtag = byOtag[this.ctag] = {}; } var regex = byCtag[name]; if (!regex) { regex = byCtag[name] = generator(this.otag, this.ctag); } return regex; } }; return({ name: "mustache.js", version: "0.4.0", /* Turns a template and view into HTML */ to_html: function (template, view, partials, send_fun) { var renderer = new Renderer(); if (send_fun) { renderer.send = send_fun; } renderer.render(template, view || {}, partials); if (!send_fun) { return renderer.buffer.join("\n"); } } }); }(); /*! ICanHaz.js -- by @HenrikJoreteg */ /*global */ (function () { function trim(stuff) { if (''.trim) return stuff.trim(); else return stuff.replace(/^\s+/, '').replace(/\s+$/, ''); } // Establish the root object, `window` in the browser, or `global` on the server. var root = this; var ich = { VERSION: "0.10.2", templates: {}, // grab jquery or zepto if it's there $: (typeof window !== 'undefined') ? window.jQuery || window.Zepto || null : null, // public function for adding templates // can take a name and template string arguments // or can take an object with name/template pairs // We're enforcing uniqueness to avoid accidental template overwrites. // If you want a different template, it should have a different name. addTemplate: function (name, templateString) { if (typeof name === 'object') { for (var template in name) { this.addTemplate(template, name[template]); } return; } if (ich[name]) { console.error("Invalid name: " + name + "."); } else if (ich.templates[name]) { console.error("Template \"" + name + " \" exists"); } else { ich.templates[name] = templateString; ich[name] = function (data, raw) { data = data || {}; var result = Mustache.to_html(ich.templates[name], data, ich.templates); return (ich.$ && !raw) ? ich.$(trim(result)) : result; }; } }, // clears all retrieval functions and empties cache clearAll: function () { for (var key in ich.templates) { delete ich[key]; } ich.templates = {}; }, // clears/grabs refresh: function () { ich.clearAll(); ich.grabTemplates(); }, // grabs templates from the DOM and caches them. // Loop through and add templates. // Whitespace at beginning and end of all templates inside <script> tags will // be trimmed. If you want whitespace around a partial, add it in the parent, // not the partial. Or do it explicitly using <br/> or &nbsp; grabTemplates: function () { var i, l, scripts = document.getElementsByTagName('script'), script, trash = []; for (i = 0, l = scripts.length; i < l; i++) { script = scripts[i]; if (script && script.innerHTML && script.id && (script.type === "text/html" || script.type === "text/x-icanhaz")) { ich.addTemplate(script.id, trim(script.innerHTML)); trash.unshift(script); } } for (i = 0, l = trash.length; i < l; i++) { trash[i].parentNode.removeChild(trash[i]); } } }; // Export the ICanHaz object for **Node.js**, with // backwards-compatibility for the old `require()` API. If we're in // the browser, add `ich` as a global object via a string identifier, // for Closure Compiler "advanced" mode. if (typeof exports !== 'undefined') { if (typeof module !== 'undefined' && module.exports) { exports = module.exports = ich; } exports.ich = ich; } else { root['ich'] = ich; } if (typeof document !== 'undefined') { if (ich.$) { ich.$(function () { ich.grabTemplates(); }); } else { document.addEventListener('DOMContentLoaded', function () { ich.grabTemplates(); }, true); } } })(); })(); },{}],3:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ var baseCreateCallback = require('lodash._basecreatecallback'), keys = require('lodash.keys'), objectTypes = require('lodash._objecttypes'); /** * Assigns own enumerable properties of source object(s) to the destination * object. Subsequent sources will overwrite property assignments of previous * sources. If a callback is provided it will be executed to produce the * assigned values. The callback is bound to `thisArg` and invoked with two * arguments; (objectValue, sourceValue). * * @static * @memberOf _ * @type Function * @alias extend * @category Objects * @param {Object} object The destination object. * @param {...Object} [source] The source objects. * @param {Function} [callback] The function to customize assigning values. * @param {*} [thisArg] The `this` binding of `callback`. * @returns {Object} Returns the destination object. * @example * * _.assign({ 'name': 'moe' }, { 'age': 40 }); * // => { 'name': 'moe', 'age': 40 } * * var defaults = _.partialRight(_.assign, function(a, b) { * return typeof a == 'undefined' ? b : a; * }); * * var food = { 'name': 'apple' }; * defaults(food, { 'name': 'banana', 'type': 'fruit' }); * // => { 'name': 'apple', 'type': 'fruit' } */ var assign = function(object, source, guard) { var index, iterable = object, result = iterable; if (!iterable) return result; var args = arguments, argsIndex = 0, argsLength = typeof guard == 'number' ? 2 : args.length; if (argsLength > 3 && typeof args[argsLength - 2] == 'function') { var callback = baseCreateCallback(args[--argsLength - 1], args[argsLength--], 2); } else if (argsLength > 2 && typeof args[argsLength - 1] == 'function') { callback = args[--argsLength]; } while (++argsIndex < argsLength) { iterable = args[argsIndex]; if (iterable && objectTypes[typeof iterable]) { var ownIndex = -1, ownProps = objectTypes[typeof iterable] && keys(iterable), length = ownProps ? ownProps.length : 0; while (++ownIndex < length) { index = ownProps[ownIndex]; result[index] = callback ? callback(result[index], iterable[index]) : iterable[index]; } } } return result }; module.exports = assign; },{"lodash._basecreatecallback":4,"lodash._objecttypes":23,"lodash.keys":24}],4:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ var bind = require('lodash.bind'), identity = require('lodash.identity'), setBindData = require('lodash._setbinddata'), support = require('lodash.support'); /** Used to detected named functions */ var reFuncName = /^function[ \n\r\t]+\w/; /** Used to detect functions containing a `this` reference */ var reThis = /\bthis\b/; /** Native method shortcuts */ var fnToString = Function.prototype.toString; /** * The base implementation of `_.createCallback` without support for creating * "_.pluck" or "_.where" style callbacks. * * @private * @param {*} [func=identity] The value to convert to a callback. * @param {*} [thisArg] The `this` binding of the created callback. * @param {number} [argCount] The number of arguments the callback accepts. * @returns {Function} Returns a callback function. */ function baseCreateCallback(func, thisArg, argCount) { if (typeof func != 'function') { return identity; } // exit early if there is no `thisArg` if (typeof thisArg == 'undefined') { return func; } var bindData = func.__bindData__ || (support.funcNames && !func.name); if (typeof bindData == 'undefined') { var source = reThis && fnToString.call(func); if (!support.funcNames && source && !reFuncName.test(source)) { bindData = true; } if (support.funcNames || !bindData) { // checks if `func` references the `this` keyword and stores the result bindData = !support.funcDecomp || reThis.test(source); setBindData(func, bindData); } } // exit early if there are no `this` references or `func` is bound if (bindData !== true && (bindData && bindData[1] & 1)) { return func; } switch (argCount) { case 1: return function(value) { return func.call(thisArg, value); }; case 2: return function(a, b) { return func.call(thisArg, a, b); }; case 3: return function(value, index, collection) { return func.call(thisArg, value, index, collection); }; case 4: return function(accumulator, value, index, collection) { return func.call(thisArg, accumulator, value, index, collection); }; } return bind(func, thisArg); } module.exports = baseCreateCallback; },{"lodash._setbinddata":5,"lodash.bind":13,"lodash.identity":20,"lodash.support":21}],5:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ var getObject = require('lodash._getobject'), noop = require('lodash._noop'), reNative = require('lodash._renative'), releaseObject = require('lodash._releaseobject'); /** Used for native method references */ var objectProto = Object.prototype; var defineProperty = (function() { try { var o = {}, func = reNative.test(func = Object.defineProperty) && func, result = func(o, o, o) && func; } catch(e) { } return result; }()); /** * Sets `this` binding data on a given function. * * @private * @param {Function} func The function to set data on. * @param {*} value The value to set. */ var setBindData = !defineProperty ? noop : function(func, value) { var descriptor = getObject(); descriptor.value = value; defineProperty(func, '__bindData__', descriptor); releaseObject(descriptor); }; module.exports = setBindData; },{"lodash._getobject":6,"lodash._noop":8,"lodash._releaseobject":9,"lodash._renative":12}],6:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ var objectPool = require('lodash._objectpool'); /** * Gets an object from the object pool or creates a new one if the pool is empty. * * @private * @returns {Object} The object from the pool. */ function getObject() { return objectPool.pop() || { 'array': null, 'cache': null, 'configurable': false, 'criteria': null, 'enumerable': false, 'false': false, 'index': 0, 'leading': false, 'maxWait': 0, 'null': false, 'number': null, 'object': null, 'push': null, 'string': null, 'trailing': false, 'true': false, 'undefined': false, 'value': null, 'writable': false }; } module.exports = getObject; },{"lodash._objectpool":7}],7:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ /** Used to pool arrays and objects used internally */ var objectPool = []; module.exports = objectPool; },{}],8:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ /** * A no-operation function. * * @private */ function noop() { // no operation performed } module.exports = noop; },{}],9:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ var maxPoolSize = require('lodash._maxpoolsize'), objectPool = require('lodash._objectpool'); /** * Releases the given object back to the object pool. * * @private * @param {Object} [object] The object to release. */ function releaseObject(object) { var cache = object.cache; if (cache) { releaseObject(cache); } object.array = object.cache = object.criteria = object.object = object.number = object.string = object.value = null; if (objectPool.length < maxPoolSize) { objectPool.push(object); } } module.exports = releaseObject; },{"lodash._maxpoolsize":10,"lodash._objectpool":11}],10:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ /** Used as the max size of the `arrayPool` and `objectPool` */ var maxPoolSize = 40; module.exports = maxPoolSize; },{}],11:[function(require,module,exports){ module.exports=require(7) },{}],12:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ /** Used for native method references */ var objectProto = Object.prototype; /** Used to detect if a method is native */ var reNative = RegExp('^' + String(objectProto.valueOf) .replace(/[.*+?^${}()|[\]\\]/g, '\\$&') .replace(/valueOf|for [^\]]+/g, '.+?') + '$' ); module.exports = reNative; },{}],13:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ var createBound = require('lodash._createbound'), reNative = require('lodash._renative'); /** * Used for `Array` method references. * * Normally `Array.prototype` would suffice, however, using an array literal * avoids issues in Narwhal. */ var arrayRef = []; /* Native method shortcuts for methods with the same name as other `lodash` methods */ var nativeSlice = arrayRef.slice; /** * Creates a function that, when called, invokes `func` with the `this` * binding of `thisArg` and prepends any additional `bind` arguments to those * provided to the bound function. * * @static * @memberOf _ * @category Functions * @param {Function} func The function to bind. * @param {*} [thisArg] The `this` binding of `func`. * @param {...*} [arg] Arguments to be partially applied. * @returns {Function} Returns the new bound function. * @example * * var func = function(greeting) { * return greeting + ' ' + this.name; * }; * * func = _.bind(func, { 'name': 'moe' }, 'hi'); * func(); * // => 'hi moe' */ function bind(func, thisArg) { return arguments.length > 2 ? createBound(func, 17, nativeSlice.call(arguments, 2), null, thisArg) : createBound(func, 1, null, null, thisArg); } module.exports = bind; },{"lodash._createbound":14,"lodash._renative":19}],14:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ var createObject = require('lodash._createobject'), isFunction = require('lodash.isfunction'), isObject = require('lodash.isobject'), reNative = require('lodash._renative'), setBindData = require('lodash._setbinddata'), support = require('lodash.support'); /** * Used for `Array` method references. * * Normally `Array.prototype` would suffice, however, using an array literal * avoids issues in Narwhal. */ var arrayRef = []; /** Used for native method references */ var objectProto = Object.prototype; /** Native method shortcuts */ var push = arrayRef.push, toString = objectProto.toString, unshift = arrayRef.unshift; /* Native method shortcuts for methods with the same name as other `lodash` methods */ var nativeBind = reNative.test(nativeBind = toString.bind) && nativeBind, nativeSlice = arrayRef.slice; /** * Creates a function that, when called, either curries or invokes `func` * with an optional `this` binding and partially applied arguments. * * @private * @param {Function|string} func The function or method name to reference. * @param {number} bitmask The bitmask of method flags to compose. * The bitmask may be composed of the following flags: * 1 - `_.bind` * 2 - `_.bindKey` * 4 - `_.curry` * 8 - `_.curry` (bound) * 16 - `_.partial` * 32 - `_.partialRight` * @param {Array} [partialArgs] An array of arguments to prepend to those * provided to the new function. * @param {Array} [partialRightArgs] An array of arguments to append to those * provided to the new function. * @param {*} [thisArg] The `this` binding of `func`. * @param {number} [arity] The arity of `func`. * @returns {Function} Returns the new bound function. */ function createBound(func, bitmask, partialArgs, partialRightArgs, thisArg, arity) { var isBind = bitmask & 1, isBindKey = bitmask & 2, isCurry = bitmask & 4, isCurryBound = bitmask & 8, isPartial = bitmask & 16, isPartialRight = bitmask & 32, key = func; if (!isBindKey && !isFunction(func)) { throw new TypeError; } if (isPartial && !partialArgs.length) { bitmask &= ~16; isPartial = partialArgs = false; } if (isPartialRight && !partialRightArgs.length) { bitmask &= ~32; isPartialRight = partialRightArgs = false; } var bindData = func && func.__bindData__; if (bindData) { if (isBind && !(bindData[1] & 1)) { bindData[4] = thisArg; } if (!isBind && bindData[1] & 1) { bitmask |= 8; } if (isCurry && !(bindData[1] & 4)) { bindData[5] = arity; } if (isPartial) { push.apply(bindData[2] || (bindData[2] = []), partialArgs); } if (isPartialRight) { push.apply(bindData[3] || (bindData[3] = []), partialRightArgs); } bindData[1] |= bitmask; return createBound.apply(null, bindData); } // use `Function#bind` if it exists and is fast // (in V8 `Function#bind` is slower except when partially applied) if (isBind && !(isBindKey || isCurry || isPartialRight) && (support.fastBind || (nativeBind && isPartial))) { if (isPartial) { var args = [thisArg]; push.apply(args, partialArgs); } var bound = isPartial ? nativeBind.apply(func, args) : nativeBind.call(func, thisArg); } else { bound = function() { // `Function#bind` spec // http://es5.github.io/#x15.3.4.5 var args = arguments, thisBinding = isBind ? thisArg : this; if (isCurry || isPartial || isPartialRight) { args = nativeSlice.call(args); if (isPartial) { unshift.apply(args, partialArgs); } if (isPartialRight) { push.apply(args, partialRightArgs); } if (isCurry && args.length < arity) { bitmask |= 16 & ~32; return createBound(func, (isCurryBound ? bitmask : bitmask & ~3), args, null, thisArg, arity); } } if (isBindKey) { func = thisBinding[key]; } if (this instanceof bound) { // ensure `new bound` is an instance of `func` thisBinding = createObject(func.prototype); // mimic the constructor's `return` behavior // http://es5.github.io/#x13.2.2 var result = func.apply(thisBinding, args); return isObject(result) ? result : thisBinding; } return func.apply(thisBinding, args); }; } setBindData(bound, nativeSlice.call(arguments)); return bound; } module.exports = createBound; },{"lodash._createobject":15,"lodash._renative":19,"lodash._setbinddata":5,"lodash.isfunction":17,"lodash.isobject":18,"lodash.support":21}],15:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ var isObject = require('lodash.isobject'), noop = require('lodash._noop'), reNative = require('lodash._renative'); /** Used for native method references */ var objectProto = Object.prototype; /* Native method shortcuts for methods with the same name as other `lodash` methods */ var nativeCreate = reNative.test(nativeCreate = Object.create) && nativeCreate; /** * Creates a new object with the specified `prototype`. * * @private * @param {Object} prototype The prototype object. * @returns {Object} Returns the new object. */ function createObject(prototype) { return isObject(prototype) ? nativeCreate(prototype) : {}; } module.exports = createObject; },{"lodash._noop":16,"lodash._renative":19,"lodash.isobject":18}],16:[function(require,module,exports){ module.exports=require(8) },{}],17:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ /** * Checks if `value` is a function. * * @static * @memberOf _ * @category Objects * @param {*} value The value to check. * @returns {boolean} Returns `true` if the `value` is a function, else `false`. * @example * * _.isFunction(_); * // => true */ function isFunction(value) { return typeof value == 'function'; } module.exports = isFunction; },{}],18:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ var objectTypes = require('lodash._objecttypes'); /** * Checks if `value` is the language type of Object. * (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) * * @static * @memberOf _ * @category Objects * @param {*} value The value to check. * @returns {boolean} Returns `true` if the `value` is an object, else `false`. * @example * * _.isObject({}); * // => true * * _.isObject([1, 2, 3]); * // => true * * _.isObject(1); * // => false */ function isObject(value) { // check if the value is the ECMAScript language type of Object // http://es5.github.io/#x8 // and avoid a V8 bug // http://code.google.com/p/v8/issues/detail?id=2291 return !!(value && objectTypes[typeof value]); } module.exports = isObject; },{"lodash._objecttypes":23}],19:[function(require,module,exports){ module.exports=require(12) },{}],20:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ /** * This method returns the first argument provided to it. * * @static * @memberOf _ * @category Utilities * @param {*} value Any value. * @returns {*} Returns `value`. * @example * * var moe = { 'name': 'moe' }; * moe === _.identity(moe); * // => true */ function identity(value) { return value; } module.exports = identity; },{}],21:[function(require,module,exports){ var global=typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {};/** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ var reNative = require('lodash._renative'); /** Used to detect functions containing a `this` reference */ var reThis = /\bthis\b/; /** Used for native method references */ var objectProto = Object.prototype; /** Native method shortcuts */ var toString = objectProto.toString; /* Native method shortcuts for methods with the same name as other `lodash` methods */ var nativeBind = reNative.test(nativeBind = toString.bind) && nativeBind; /** Detect various environments */ var isIeOpera = reNative.test(global.attachEvent), isV8 = nativeBind && !/\n|true/.test(nativeBind + isIeOpera); /** * An object used to flag environments features. * * @static * @memberOf _ * @type Object */ var support = {}; /** * Detect if `Function#bind` exists and is inferred to be fast (all but V8). * * @memberOf _.support * @type boolean */ support.fastBind = nativeBind && !isV8; /** * Detect if functions can be decompiled by `Function#toString` * (all but PS3 and older Opera mobile browsers & avoided in Windows 8 apps). * * @memberOf _.support * @type boolean */ support.funcDecomp = !reNative.test(global.WinRTError) && reThis.test(function() { return this; }); /** * Detect if `Function#name` is supported (all but IE). * * @memberOf _.support * @type boolean */ support.funcNames = typeof Function.name == 'string'; module.exports = support; },{"lodash._renative":22}],22:[function(require,module,exports){ module.exports=require(12) },{}],23:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ /** Used to determine if values are of the language type Object */ var objectTypes = { 'boolean': false, 'function': true, 'object': true, 'number': false, 'string': false, 'undefined': false }; module.exports = objectTypes; },{}],24:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ var isObject = require('lodash.isobject'), reNative = require('lodash._renative'), shimKeys = require('lodash._shimkeys'); /** Used for native method references */ var objectProto = Object.prototype; /* Native method shortcuts for methods with the same name as other `lodash` methods */ var nativeKeys = reNative.test(nativeKeys = Object.keys) && nativeKeys; /** * Creates an array composed of the own enumerable property names of an object. * * @static * @memberOf _ * @category Objects * @param {Object} object The object to inspect. * @returns {Array} Returns an array of property names. * @example * * _.keys({ 'one': 1, 'two': 2, 'three': 3 }); * // => ['one', 'two', 'three'] (property order is not guaranteed across environments) */ var keys = !nativeKeys ? shimKeys : function(object) { if (!isObject(object)) { return []; } return nativeKeys(object); }; module.exports = keys; },{"lodash._renative":25,"lodash._shimkeys":26,"lodash.isobject":27}],25:[function(require,module,exports){ module.exports=require(12) },{}],26:[function(require,module,exports){ /** * Lo-Dash 2.1.0 (Custom Build) <http://lodash.com/> * Build: `lodash modularize modern exports="npm" -o ./npm` * Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/> * Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE> * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors * Available under MIT license <http://lodash.com/license> */ var objectTypes = require('lodash._objecttypes'); /** Used for native method references */ var objectProto = Object.prototype; /** Native method shortcuts */ var hasOwnProperty = objectProto.hasOwnProperty; /** * A fallback implementation of `Object.keys` which produces an array of the * given object's own enumerable property names. * * @private * @type Function * @param {Object} object The object to inspect. * @returns {Array} Returns an array of property names. */ var shimKeys = function(object) { var index, iterable = object, result = []; if (!iterable) return result; if (!(objectTypes[typeof object])) return result; for (index in iterable) { if (hasOwnProperty.call(iterable, index)) { result.push(index); } } return result }; module.exports = shimKeys; },{"lodash._objecttypes":23}],27:[function(require,module,exports){ module.exports=require(18) },{"lodash._objecttypes":23}],28:[function(require,module,exports){ var ich = require('icanhaz') module.exports.ich = ich module.exports.getKeywordCount = function(data, keyword) { var group = [] data.forEach(function (d) { for(var key in d) { var value = d[key].toString().toLowerCase() if (value.match(keyword.toLowerCase())) group.push(d) } }) return group.length if (group = []) return "0" } module.exports.getKeyword = function(data, keyword) { var group = [] data.forEach(function (d) { for(var key in d) { var value = d[key].toString().toLowerCase() if (value.match(keyword.toLowerCase())) group.push(d) } }) return group if (group = []) return "no matches" } module.exports.getColumnTotal = function(data, column) { var total = [] data.forEach(function (d) { if (d[column] === "") return total.push(+d[column]) }) return total.reduce(function(a,b) { return a + b }) } module.exports.getColumnAverage = function(data, column) { var total = getColumnTotal(data, column) var average = total / data.length return average } module.exports.getMax = function(data, column) { var result = [] data.forEach(function (element){ if (result.length === 0) return result.push(element) else { if (element[column].valueOf() > result[0][column].valueOf()) { result.length = 0 return result.push(element) } if (element[column].valueOf() === result[0][column].valueOf()) { return result.push(element) } } }) return result } module.exports.getMin = function(data, column) { var result = [] data.forEach(function (element){ if (result.length === 0) return result.push(element) else { if (element[column].valueOf() < result[0][column].valueOf()) { result.length = 0 return result.push(element) } if (element[column].valueOf() === result[0][column].valueOf()) { return result.push(element) } } }) return result } // out of the data, filter something from a category module.exports.getMatches = function (data, filter, category) { var matches = [] data.forEach(function (element) { var projectType = element[category].toString().toLowerCase() if (projectType === filter.toLowerCase()) matches.push(element) }) return matches } module.exports.mostFrequent = function(data, category) { var count = {} for (var i = 0; i < data.length; i++) { if (!count[data[i][category]]) { count[data[i][category]] = 0 } count[data[i][category]]++ } var sortable = [] for (var category in count) { sortable.push([category, count[category]]) } sortable.sort(function(a, b) {return b[1] - a[1]}) return sortable // returns array of arrays, in order } // thank you! http://james.padolsey.com/javascript/deep-copying-of-objects-and-arrays/ module.exports.deepCopy = function(obj) { if (Object.prototype.toString.call(obj) === '[object Array]') { var out = [], i = 0, len = obj.length; for ( ; i < len; i++ ) { out[i] = arguments.callee(obj[i]); } return out; } if (typeof obj === 'object') { var out = {}, i; for ( i in obj ) { out[i] = arguments.callee(obj[i]); } return out; } return obj; } module.exports.getOccurance = function(data, category) { var occuranceCount = {} for (var i = 0; i < data.length; i++) { if (!occuranceCount[data[i][category]]) { occuranceCount[data[i][category]] = 0 } occuranceCount[data[i][category]]++ } return occuranceCount // returns object, keys alphabetical } module.exports.makeColorArrayOfObject = function(data, colors, category) { var category = category var keys = Object.keys(data) var counter = 1 var colorIndex return keys.map(function(key){ if (keys.length > colors.length || keys.length <= colors.length ) { colorIndex = counter % colors.length } var h = {units: data[key], hexcolor: colors[colorIndex]} h[category] = key counter++ colorIndex = counter return h }) } module.exports.makeArrayOfObject = function(data) { var keys = Object.keys(data) return keys.map(function(key){ // var h = {label: key, units: data[key], hexcolor: "#FDBDBD"} var h = {label: key, units: data[key]} return h }) } },{"icanhaz":2}],29:[function(require,module,exports){ var mapbox = require('mapbox.js') var ich = require('icanhaz') module.exports.buildOptionObject = buildOptionObject function buildOptionObject(optionsJSON, lineItem) { var newObj = {} optionsJSON.forEach(function(option) { newObj[option] = lineItem[option] }) return newObj } module.exports.makeupOptionObject = function(lineItem) { var options = [] for (var i in lineItem) { options.push(i); } return options } module.exports.createGeoJSON = function(data, optionsJSON) { var geoJSON = [] data.forEach(function(lineItem){ var hasGeo = confirmGeo(lineItem) if (hasGeo && !lineItem.lat && !lineItem.long) handleLatLong(lineItem) if (lineItem.linestring || lineItem.multipolygon) hasGeo = true if (!hasGeo) return if (!optionsJSON) { optionsJSON = makeupOptionObject(lineItem) var optionObj = buildOptionObject(optionsJSON, lineItem) } else { optionObj = buildOptionObject(optionsJSON, lineItem) } var type = determineType(lineItem) if (lineItem.polygon || lineItem.multipolygon || lineItem.linestring) { var shapeFeature = shapeJSON(lineItem, type, optionObj) geoJSON.push(shapeFeature) } else { var pointFeature = pointJSON(lineItem, type, optionObj) geoJSON.push(pointFeature) } }) return geoJSON } module.exports.confirmGeo = confirmGeo function confirmGeo(lineItem) { var hasGeo = false if (lineItem.lat && lineItem.long || lineItem.polygon) hasGeo = true if (lineItem.latitude && lineItem.longitude || lineItem.polygon) hasGeo = true if (lineItem.geolatitude && lineItem.geolongitude || lineItem.polygon) hasGeo = true return hasGeo } module.exports.handleLatLong = handleLatLong function handleLatLong(lineItem) { if (lineItem.latitude && lineItem.longitude || lineItem.polygon) { lineItem.lat = lineItem.latitude lineItem.long = lineItem.longitude delete lineItem.latitude delete lineItem.longitude return lineItem } if (lineItem.geolatitude && lineItem.geolongitude || lineItem.polygon) { lineItem.lat = lineItem.geolatitude lineItem.long = lineItem.geolongitude delete lineItem.geolatitude delete lineItem.geolongitude return lineItem } } module.exports.pointJSON = pointJSON function pointJSON(lineItem, type, optionObj) { var lowercaseType = type.toLowerCase() var pointFeature = { type: "Feature", "geometry": { "type": type, "coordinates": [+lineItem.long, +lineItem.lat] }, "properties": { "marker-size": "small", "marker-color": lineItem.hexcolor }, "opts": optionObj } return pointFeature } module.exports.shapeJSON = shapeJSON function shapeJSON(lineItem, type, optionObj) { var lowercaseType = type.toLowerCase() var coords if (type !== "LineString") { coords = JSON.parse( "[[" + lineItem[lowercaseType] + "]]" ) } else { coords = JSON.parse("[" + lineItem[lowercaseType] + "]") } var shapeFeature = { type: "Feature", "geometry": { "type": type, "coordinates": coords }, "properties": { "fillColor": lineItem.hexcolor, "color": lineItem.hexcolor }, "opts": optionObj } return shapeFeature } module.exports.determineType = determineType function determineType(lineItem) { var type = "" if (lineItem.lat && lineItem.long) type = "Point" if (lineItem.polygon) type = "Polygon" if (lineItem.multipolygon) type = "MultiPolygon" if (lineItem.linestring) type = "LineString" return type } module.exports.loadMap = function(mapDiv) { var map = L.mapbox.map(mapDiv) map.touchZoom.disable() map.doubleClickZoom.disable() map.scrollWheelZoom.disable() return map } module.exports.addTileLayer = function(map, tileLayer) { var layer = L.mapbox.tileLayer(tileLayer) layer.addTo(map) } module.exports.makePopupTemplate = makePopupTemplate function makePopupTemplate(geoJSON) { var allOptions = geoJSON[0].opts var keys = [] for (var i in allOptions) keys.push(i) var mustacheKeys = mustachify(keys) var template = {} template.name = "popup" + Math.random() template.template = templateString(mustacheKeys) return template } module.exports.templateString = templateString function templateString(mustacheKeys) { var template = "<ul>" var counter = mustacheKeys.length mustacheKeys.forEach(function(key) { counter-- if (counter === 0) template = template.concat(key, "</ul>") else template = template.concat(key) }) return template } module.exports.mustachify = mustachify function mustachify(array) { var newArray = [] array.forEach(function(item) { item = "<li><b>" + item + ":</b> {{" + item + "}}</li>" newArray.push(item) }) return newArray } module.exports.addMarkerLayer = function(geoJSON, map, template, clusterMarkers) { if (!template) { template = makePopupTemplate(geoJSON) ich.addTemplate(template.name, template.template) } else { var template = {"template": template} template.name = "popup" + Math.random() ich.addTemplate(template.name, template.template) } var features = { "type": "FeatureCollection", "features": geoJSON } var layer = L.geoJson(features, { pointToLayer: L.mapbox.marker.style, style: function(feature) { return feature.properties } }) var bounds = layer.getBounds() // check option and Leaflet extension var cluster = clusterMarkers && 'MarkerClusterGroup' in L if (cluster) { var clusterGroup = new L.MarkerClusterGroup() } map.fitBounds(bounds) layer.eachLayer(function(marker) { var popupContent = ich[template.name](marker.feature.opts) marker.bindPopup(popupContent.html(), {closeButton: false}) if (cluster) { clusterGroup.addLayer(marker) } }) if (cluster) { map.addLayer(clusterGroup) } else { layer.addTo(map) } return layer } },{"icanhaz":2,"mapbox.js":31}],30:[function(require,module,exports){ // Copyright (C) 2010 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview * Implements RFC 3986 for parsing/formatting URIs. * * @author [email protected] * \@provides URI * \@overrides window */ var URI = (function () { /** * creates a uri from the string form. The parser is relaxed, so special * characters that aren't escaped but don't cause ambiguities will not cause * parse failures. * * @return {URI|null} */ function parse(uriStr) { var m = ('' + uriStr).match(URI_RE_); if (!m) { return null; } return new URI( nullIfAbsent(m[1]), nullIfAbsent(m[2]), nullIfAbsent(m[3]), nullIfAbsent(m[4]), nullIfAbsent(m[5]), nullIfAbsent(m[6]), nullIfAbsent(m[7])); } /** * creates a uri from the given parts. * * @param scheme {string} an unencoded scheme such as "http" or null * @param credentials {string} unencoded user credentials or null * @param domain {string} an unencoded domain name or null * @param port {number} a port number in [1, 32768]. * -1 indicates no port, as does null. * @param path {string} an unencoded path * @param query {Array.<string>|string|null} a list of unencoded cgi * parameters where even values are keys and odds the corresponding values * or an unencoded query. * @param fragment {string} an unencoded fragment without the "#" or null. * @return {URI} */ function create(scheme, credentials, domain, port, path, query, fragment) { var uri = new URI( encodeIfExists2(scheme, URI_DISALLOWED_IN_SCHEME_OR_CREDENTIALS_), encodeIfExists2( credentials, URI_DISALLOWED_IN_SCHEME_OR_CREDENTIALS_), encodeIfExists(domain), port > 0 ? port.toString() : null, encodeIfExists2(path, URI_DISALLOWED_IN_PATH_), null, encodeIfExists(fragment)); if (query) { if ('string' === typeof query) { uri.setRawQuery(query.replace(/[^?&=0-9A-Za-z_\-~.%]/g, encodeOne)); } else { uri.setAllParameters(query); } } return uri; } function encodeIfExists(unescapedPart) { if ('string' == typeof unescapedPart) { return encodeURIComponent(unescapedPart); } return null; }; /** * if unescapedPart is non null, then escapes any characters in it that aren't * valid characters in a url and also escapes any special characters that * appear in extra. * * @param unescapedPart {string} * @param extra {RegExp} a character set of characters in [\01-\177]. * @return {string|null} null iff unescapedPart == null. */ function encodeIfExists2(unescapedPart, extra) { if ('string' == typeof unescapedPart) { return encodeURI(unescapedPart).replace(extra, encodeOne); } return null; }; /** converts a character in [\01-\177] to its url encoded equivalent. */ function encodeOne(ch) { var n = ch.charCodeAt(0); return '%' + '0123456789ABCDEF'.charAt((n >> 4) & 0xf) + '0123456789ABCDEF'.charAt(n & 0xf); } /** * {@updoc * $ normPath('foo/./bar') * # 'foo/bar' * $ normPath('./foo') * # 'foo' * $ normPath('foo/.') * # 'foo' * $ normPath('foo//bar') * # 'foo/bar' * } */ function normPath(path) { return path.replace(/(^|\/)\.(?:\/|$)/g, '$1').replace(/\/{2,}/g, '/'); } var PARENT_DIRECTORY_HANDLER = new RegExp( '' // A path break + '(/|^)' // followed by a non .. path element // (cannot be . because normPath is used prior to this RegExp) + '(?:[^./][^/]*|\\.{2,}(?:[^./][^/]*)|\\.{3,}[^/]*)' // followed by .. followed by a path break. + '/\\.\\.(?:/|$)'); var PARENT_DIRECTORY_HANDLER_RE = new RegExp(PARENT_DIRECTORY_HANDLER); var EXTRA_PARENT_PATHS_RE = /^(?:\.\.\/)*(?:\.\.$)?/; /** * Normalizes its input path and collapses all . and .. sequences except for * .. sequences that would take it above the root of the current parent * directory. * {@updoc * $ collapse_dots('foo/../bar') * # 'bar' * $ collapse_dots('foo/./bar') * # 'foo/bar' * $ collapse_dots('foo/../bar/./../../baz') * # 'baz' * $ collapse_dots('../foo') * # '../foo' * $ collapse_dots('../foo').replace(EXTRA_PARENT_PATHS_RE, '') * # 'foo' * } */ function collapse_dots(path) { if (path === null) { return null; } var p = normPath(path); // Only /../ left to flatten var r = PARENT_DIRECTORY_HANDLER_RE; // We replace with $1 which matches a / before the .. because this // guarantees that: // (1) we have at most 1 / between the adjacent place, // (2) always have a slash if there is a preceding path section, and // (3) we never turn a relative path into an absolute path. for (var q; (q = p.replace(r, '$1')) != p; p = q) {}; return p; } /** * resolves a relative url string to a base uri. * @return {URI} */ function resolve(baseUri, relativeUri) { // there are several kinds of relative urls: // 1. //foo - replaces everything from the domain on. foo is a domain name // 2. foo - replaces the last part of the path, the whole query and fragment // 3. /foo - replaces the the path, the query and fragment // 4. ?foo - replace the query and fragment // 5. #foo - replace the fragment only var absoluteUri = baseUri.clone(); // we satisfy these conditions by looking for the first part of relativeUri // that is not blank and applying defaults to the rest var overridden = relativeUri.hasScheme(); if (overridden) { absoluteUri.setRawScheme(relativeUri.getRawScheme()); } else { overridden = relativeUri.hasCredentials(); } if (overridden) { absoluteUri.setRawCredentials(relativeUri.getRawCredentials()); } else { overridden = relativeUri.hasDomain(); } if (overridden) { absoluteUri.setRawDomain(relativeUri.getRawDomain()); } else { overridden = relativeUri.hasPort(); } var rawPath = relativeUri.getRawPath(); var simplifiedPath = collapse_dots(rawPath); if (overridden) { absoluteUri.setPort(relativeUri.getPort()); simplifiedPath = simplifiedPath && simplifiedPath.replace(EXTRA_PARENT_PATHS_RE, ''); } else { overridden = !!rawPath; if (overridden) { // resolve path properly if (simplifiedPath.charCodeAt(0) !== 0x2f /* / */) { // path is relative var absRawPath = collapse_dots(absoluteUri.getRawPath() || '') .replace(EXTRA_PARENT_PATHS_RE, ''); var slash = absRawPath.lastIndexOf('/') + 1; simplifiedPath = collapse_dots( (slash ? absRawPath.substring(0, slash) : '') + collapse_dots(rawPath)) .replace(EXTRA_PARENT_PATHS_RE, ''); } } else { simplifiedPath = simplifiedPath && simplifiedPath.replace(EXTRA_PARENT_PATHS_RE, ''); if (simplifiedPath !== rawPath) { absoluteUri.setRawPath(simplifiedPath); } } } if (overridden) { absoluteUri.setRawPath(simplifiedPath); } else { overridden = relativeUri.hasQuery(); } if (overridden) { absoluteUri.setRawQuery(relativeUri.getRawQuery()); } else { overridden = relativeUri.hasFragment(); } if (overridden) { absoluteUri.setRawFragment(relativeUri.getRawFragment()); } return absoluteUri; } /** * a mutable URI. * * This class contains setters and getters for the parts of the URI. * The <tt>getXYZ</tt>/<tt>setXYZ</tt> methods return the decoded part -- so * <code>uri.parse('/foo%20bar').getPath()</code> will return the decoded path, * <tt>/foo bar</tt>. * * <p>The raw versions of fields are available too. * <code>uri.parse('/foo%20bar').getRawPath()</code> will return the raw path, * <tt>/foo%20bar</tt>. Use the raw setters with care, since * <code>URI::toString</code> is not guaranteed to return a valid url if a * raw setter was used. * * <p>All setters return <tt>this</tt> and so may be chained, a la * <code>uri.parse('/foo').setFragment('part').toString()</code>. * * <p>You should not use this constructor directly -- please prefer the factory * functions {@link uri.parse}, {@link uri.create}, {@link uri.resolve} * instead.</p> * * <p>The parameters are all raw (assumed to be properly escaped) parts, and * any (but not all) may be null. Undefined is not allowed.</p> * * @constructor */ function URI( rawScheme, rawCredentials, rawDomain, port, rawPath, rawQuery, rawFragment) { this.scheme_ = rawScheme; this.credentials_ = rawCredentials; this.domain_ = rawDomain; this.port_ = port; this.path_ = rawPath; this.query_ = rawQuery; this.fragment_ = rawFragment; /** * @type {Array|null} */ this.paramCache_ = null; } /** returns the string form of the url. */ URI.prototype.toString = function () { var out = []; if (null !== this.scheme_) { out.push(this.scheme_, ':'); } if (null !== this.domain_) { out.push('//'); if (null !== this.credentials_) { out.push(this.credentials_, '@'); } out.push(this.domain_); if (null !== this.port_) { out.push(':', this.port_.toString()); } } if (null !== this.path_) { out.push(this.path_); } if (null !== this.query_) { out.push('?', this.query_); } if (null !== this.fragment_) { out.push('#', this.fragment_); } return out.join(''); }; URI.prototype.clone = function () { return new URI(this.scheme_, this.credentials_, this.domain_, this.port_, this.path_, this.query_, this.fragment_); }; URI.prototype.getScheme = function () { // HTML5 spec does not require the scheme to be lowercased but // all common browsers except Safari lowercase the scheme. return this.scheme_ && decodeURIComponent(this.scheme_).toLowerCase(); }; URI.prototype.getRawScheme = function () { return this.scheme_; }; URI.prototype.setScheme = function (newScheme) { this.scheme_ = encodeIfExists2( newScheme, URI_DISALLOWED_IN_SCHEME_OR_CREDENTIALS_); return this; }; URI.prototype.setRawScheme = function (newScheme) { this.scheme_ = newScheme ? newScheme : null; return this; }; URI.prototype.hasScheme = function () { return null !== this.scheme_; }; URI.prototype.getCredentials = function () { return this.credentials_ && decodeURIComponent(this.credentials_); }; URI.prototype.getRawCredentials = function () { return this.credentials_; }; URI.prototype.setCredentials = function (newCredentials) { this.credentials_ = encodeIfExists2( newCredentials, URI_DISALLOWED_IN_SCHEME_OR_CREDENTIALS_); return this; }; URI.prototype.setRawCredentials = function (newCredentials) { this.credentials_ = newCredentials ? newCredentials : null; return this; }; URI.prototype.hasCredentials = function () { return null !== this.credentials_; }; URI.prototype.getDomain = function () { return this.domain_ && decodeURIComponent(this.domain_); }; URI.prototype.getRawDomain = function () { return this.domain_; }; URI.prototype.setDomain = function (newDomain) { return this.setRawDomain(newDomain && encodeURIComponent(newDomain)); }; URI.prototype.setRawDomain = function (newDomain) { this.domain_ = newDomain ? newDomain : null; // Maintain the invariant that paths must start with a slash when the URI // is not path-relative. return this.setRawPath(this.path_); }; URI.prototype.hasDomain = function () { return null !== this.domain_; }; URI.prototype.getPort = function () { return this.port_ && decodeURIComponent(this.port_); }; URI.prototype.setPort = function (newPort) { if (newPort) { newPort = Number(newPort); if (newPort !== (newPort & 0xffff)) { throw new Error('Bad port number ' + newPort); } this.port_ = '' + newPort; } else { this.port_ = null; } return this; }; URI.prototype.hasPort = function () { return null !== this.port_; }; URI.prototype.getPath = function () { return this.path_ && decodeURIComponent(this.path_); }; URI.prototype.getRawPath = function () { return this.path_; }; URI.prototype.setPath = function (newPath) { return this.setRawPath(encodeIfExists2(newPath, URI_DISALLOWED_IN_PATH_)); }; URI.prototype.setRawPath = function (newPath) { if (newPath) { newPath = String(newPath); this.path_ = // Paths must start with '/' unless this is a path-relative URL. (!this.domain_ || /^\//.test(newPath)) ? newPath : '/' + newPath; } else { this.path_ = null; } return this; }; URI.prototype.hasPath = function () { return null !== this.path_; }; URI.prototype.getQuery = function () { // From http://www.w3.org/Addressing/URL/4_URI_Recommentations.html // Within the query string, the plus sign is reserved as shorthand notation // for a space. return this.query_ && decodeURIComponent(this.query_).replace(/\+/g, ' '); }; URI.prototype.getRawQuery = function () { return this.query_; }; URI.prototype.setQuery = function (newQuery) { this.paramCache_ = null; this.query_ = encodeIfExists(newQuery); return this; }; URI.prototype.setRawQuery = function (newQuery) { this.paramCache_ = null; this.query_ = newQuery ? newQuery : null; return this; }; URI.prototype.hasQuery = function () { return null !== this.query_; }; /** * sets the query given a list of strings of the form * [ key0, value0, key1, value1, ... ]. * * <p><code>uri.setAllParameters(['a', 'b', 'c', 'd']).getQuery()</code> * will yield <code>'a=b&c=d'</code>. */ URI.prototype.setAllParameters = function (params) { if (typeof params === 'object') { if (!(params instanceof Array) && (params instanceof Object || Object.prototype.toString.call(params) !== '[object Array]')) { var newParams = []; var i = -1; for (var k in params) { var v = params[k]; if ('string' === typeof v) { newParams[++i] = k; newParams[++i] = v; } } params = newParams; } } this.paramCache_ = null; var queryBuf = []; var separator = ''; for (var j = 0; j < params.length;) { var k = params[j++]; var v = params[j++]; queryBuf.push(separator, encodeURIComponent(k.toString())); separator = '&'; if (v) { queryBuf.push('=', encodeURIComponent(v.toString())); } } this.query_ = queryBuf.join(''); return this; }; URI.prototype.checkParameterCache_ = function () { if (!this.paramCache_) { var q = this.query_; if (!q) { this.paramCache_ = []; } else { var cgiParams = q.split(/[&\?]/); var out = []; var k = -1; for (var i = 0; i < cgiParams.length; ++i) { var m = cgiParams[i].match(/^([^=]*)(?:=(.*))?$/); // From http://www.w3.org/Addressing/URL/4_URI_Recommentations.html // Within the query string, the plus sign is reserved as shorthand // notation for a space. out[++k] = decodeURIComponent(m[1]).replace(/\+/g, ' '); out[++k] = decodeURIComponent(m[2] || '').replace(/\+/g, ' '); } this.paramCache_ = out; } } }; /** * sets the values of the named cgi parameters. * * <p>So, <code>uri.parse('foo?a=b&c=d&e=f').setParameterValues('c', ['new']) * </code> yields <tt>foo?a=b&c=new&e=f</tt>.</p> * * @param key {string} * @param values {Array.<string>} the new values. If values is a single string * then it will be treated as the sole value. */ URI.prototype.setParameterValues = function (key, values) { // be nice and avoid subtle bugs where [] operator on string performs charAt // on some browsers and crashes on IE if (typeof values === 'string') { values = [ values ]; } this.checkParameterCache_(); var newValueIndex = 0; var pc = this.paramCache_; var params = []; for (var i = 0, k = 0; i < pc.length; i += 2) { if (key === pc[i]) { if (newValueIndex < values.length) { params.push(key, values[newValueIndex++]); } } else { params.push(pc[i], pc[i + 1]); } } while (newValueIndex < values.length) { params.push(key, values[newValueIndex++]); } this.setAllParameters(params); return this; }; URI.prototype.removeParameter = function (key) { return this.setParameterValues(key, []); }; /** * returns the parameters specified in the query part of the uri as a list of * keys and values like [ key0, value0, key1, value1, ... ]. * * @return {Array.<string>} */ URI.prototype.getAllParameters = function () { this.checkParameterCache_(); return this.paramCache_.slice(0, this.paramCache_.length); }; /** * returns the value<b>s</b> for a given cgi parameter as a list of decoded * query parameter values. * @return {Array.<string>} */ URI.prototype.getParameterValues = function (paramNameUnescaped) { this.checkParameterCache_(); var values = []; for (var i = 0; i < this.paramCache_.length; i += 2) { if (paramNameUnescaped === this.paramCache_[i]) { values.push(this.paramCache_[i + 1]); } } return values; }; /** * returns a map of cgi parameter names to (non-empty) lists of values. * @return {Object.<string,Array.<string>>} */ URI.prototype.getParameterMap = function (paramNameUnescaped) { this.checkParameterCache_(); var paramMap = {}; for (var i = 0; i < this.paramCache_.length; i += 2) { var key = this.paramCache_[i++], value = this.paramCache_[i++]; if (!(key in paramMap)) { paramMap[key] = [value]; } else { paramMap[key].push(value); } } return paramMap; }; /** * returns the first value for a given cgi parameter or null if the given * parameter name does not appear in the query string. * If the given parameter name does appear, but has no '<tt>=</tt>' following * it, then the empty string will be returned. * @return {string|null} */ URI.prototype.getParameterValue = function (paramNameUnescaped) { this.checkParameterCache_(); for (var i = 0; i < this.paramCache_.length; i += 2) { if (paramNameUnescaped === this.paramCache_[i]) { return this.paramCache_[i + 1]; } } return null; }; URI.prototype.getFragment = function () { return this.fragment_ && decodeURIComponent(this.fragment_); }; URI.prototype.getRawFragment = function () { return this.fragment_; }; URI.prototype.setFragment = function (newFragment) { this.fragment_ = newFragment ? encodeURIComponent(newFragment) : null; return this; }; URI.prototype.setRawFragment = function (newFragment) { this.fragment_ = newFragment ? newFragment : null; return this; }; URI.prototype.hasFragment = function () { return null !== this.fragment_; }; function nullIfAbsent(matchPart) { return ('string' == typeof matchPart) && (matchPart.length > 0) ? matchPart : null; } /** * a regular expression for breaking a URI into its component parts. * * <p>http://www.gbiv.com/protocols/uri/rfc/rfc3986.html#RFC2234 says * As the "first-match-wins" algorithm is identical to the "greedy" * disambiguation method used by POSIX regular expressions, it is natural and * commonplace to use a regular expression for parsing the potential five * components of a URI reference. * * <p>The following line is the regular expression for breaking-down a * well-formed URI reference into its components. * * <pre> * ^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))? * 12 3 4 5 6 7 8 9 * </pre> * * <p>The numbers in the second line above are only to assist readability; they * indicate the reference points for each subexpression (i.e., each paired * parenthesis). We refer to the value matched for subexpression <n> as $<n>. * For example, matching the above expression to * <pre> * http://www.ics.uci.edu/pub/ietf/uri/#Related * </pre> * results in the following subexpression matches: * <pre> * $1 = http: * $2 = http * $3 = //www.ics.uci.edu * $4 = www.ics.uci.edu * $5 = /pub/ietf/uri/ * $6 = <undefined> * $7 = <undefined> * $8 = #Related * $9 = Related * </pre> * where <undefined> indicates that the component is not present, as is the * case for the query component in the above example. Therefore, we can * determine the value of the five components as * <pre> * scheme = $2 * authority = $4 * path = $5 * query = $7 * fragment = $9 * </pre> * * <p>msamuel: I have modified the regular expression slightly to expose the * credentials, domain, and port separately from the authority. * The modified version yields * <pre> * $1 = http scheme * $2 = <undefined> credentials -\ * $3 = www.ics.uci.edu domain | authority * $4 = <undefined> port -/ * $5 = /pub/ietf/uri/ path * $6 = <undefined> query without ? * $7 = Related fragment without # * </pre> */ var URI_RE_ = new RegExp( "^" + "(?:" + "([^:/?#]+)" + // scheme ":)?" + "(?://" + "(?:([^/?#]*)@)?" + // credentials "([^/?#:@]*)" + // domain "(?::([0-9]+))?" + // port ")?" + "([^?#]+)?" + // path "(?:\\?([^#]*))?" + // query "(?:#(.*))?" + // fragment "$" ); var URI_DISALLOWED_IN_SCHEME_OR_CREDENTIALS_ = /[#\/\?@]/g; var URI_DISALLOWED_IN_PATH_ = /[\#\?]/g; URI.parse = parse; URI.create = create; URI.resolve = resolve; URI.collapse_dots = collapse_dots; // Visible for testing. // lightweight string-based api for loadModuleMaker URI.utils = { mimeTypeOf: function (uri) { var uriObj = parse(uri); if (/\.html$/.test(uriObj.getPath())) { return 'text/html'; } else { return 'application/javascript'; } }, resolve: function (base, uri) { if (base) { return resolve(parse(base), parse(uri)).toString(); } else { return '' + uri; } } }; return URI; })(); // Copyright Google Inc. // Licensed under the Apache Licence Version 2.0 // Autogenerated at Mon Feb 25 13:05:42 EST 2013 // @overrides window // @provides html4 var html4 = {}; html4.atype = { 'NONE': 0, 'URI': 1, 'URI_FRAGMENT': 11, 'SCRIPT': 2, 'STYLE': 3, 'HTML': 12, 'ID': 4, 'IDREF': 5, 'IDREFS': 6, 'GLOBAL_NAME': 7, 'LOCAL_NAME': 8, 'CLASSES': 9, 'FRAME_TARGET': 10, 'MEDIA_QUERY': 13 }; html4[ 'atype' ] = html4.atype; html4.ATTRIBS = { '*::class': 9, '*::dir': 0, '*::draggable': 0, '*::hidden': 0, '*::id': 4, '*::inert': 0, '*::itemprop': 0, '*::itemref': 6, '*::itemscope': 0, '*::lang': 0, '*::onblur': 2, '*::onchange': 2, '*::onclick': 2, '*::ondblclick': 2, '*::onfocus': 2, '*::onkeydown': 2, '*::onkeypress': 2, '*::onkeyup': 2, '*::onload': 2, '*::onmousedown': 2, '*::onmousemove': 2, '*::onmouseout': 2, '*::onmouseover': 2, '*::onmouseup': 2, '*::onreset': 2, '*::onscroll': 2, '*::onselect': 2, '*::onsubmit': 2, '*::onunload': 2, '*::spellcheck': 0, '*::style': 3, '*::title': 0, '*::translate': 0, 'a::accesskey': 0, 'a::coords': 0, 'a::href': 1, 'a::hreflang': 0, 'a::name': 7, 'a::onblur': 2, 'a::onfocus': 2, 'a::shape': 0, 'a::tabindex': 0, 'a::target': 10, 'a::type': 0, 'area::accesskey': 0, 'area::alt': 0, 'area::coords': 0, 'area::href': 1, 'area::nohref': 0, 'area::onblur': 2, 'area::onfocus': 2, 'area::shape': 0, 'area::tabindex': 0, 'area::target': 10, 'audio::controls': 0, 'audio::loop': 0, 'audio::mediagroup': 5, 'audio::muted': 0, 'audio::preload': 0, 'bdo::dir': 0, 'blockquote::cite': 1, 'br::clear': 0, 'button::accesskey': 0, 'button::disabled': 0, 'button::name': 8, 'button::onblur': 2, 'button::onfocus': 2, 'button::tabindex': 0, 'button::type': 0, 'button::value': 0, 'canvas::height': 0, 'canvas::width': 0, 'caption::align': 0, 'col::align': 0, 'col::char': 0, 'col::charoff': 0, 'col::span': 0, 'col::valign': 0, 'col::width': 0, 'colgroup::align': 0, 'colgroup::char': 0, 'colgroup::charoff': 0, 'colgroup::span': 0, 'colgroup::valign': 0, 'colgroup::width': 0, 'command::checked': 0, 'command::command': 5, 'command::disabled': 0, 'command::icon': 1, 'command::label': 0, 'command::radiogroup': 0, 'command::type': 0, 'data::value': 0, 'del::cite': 1, 'del::datetime': 0, 'details::open': 0, 'dir::compact': 0, 'div::align': 0, 'dl::compact': 0, 'fieldset::disabled': 0, 'font::color': 0, 'font::face': 0, 'font::size': 0, 'form::accept': 0, 'form::action': 1, 'form::autocomplete': 0, 'form::enctype': 0, 'form::method': 0, 'form::name': 7, 'form::novalidate': 0, 'form::onreset': 2, 'form::onsubmit': 2, 'form::target': 10, 'h1::align': 0, 'h2::align': 0, 'h3::align': 0, 'h4::align': 0, 'h5::align': 0, 'h6::align': 0, 'hr::align': 0, 'hr::noshade': 0, 'hr::size': 0, 'hr::width': 0, 'iframe::align': 0, 'iframe::frameborder': 0, 'iframe::height': 0, 'iframe::marginheight': 0, 'iframe::marginwidth': 0, 'iframe::width': 0, 'img::align': 0, 'img::alt': 0, 'img::border': 0, 'img::height': 0, 'img::hspace': 0, 'img::ismap': 0, 'img::name': 7, 'img::src': 1, 'img::usemap': 11, 'img::vspace': 0, 'img::width': 0, 'input::accept': 0, 'input::accesskey': 0, 'input::align': 0, 'input::alt': 0, 'input::autocomplete': 0, 'input::checked': 0, 'input::disabled': 0, 'input::inputmode': 0, 'input::ismap': 0, 'input::list': 5, 'input::max': 0, 'input::maxlength': 0, 'input::min': 0, 'input::multiple': 0, 'input::name': 8, 'input::onblur': 2, 'input::onchange': 2, 'input::onfocus': 2, 'input::onselect': 2, 'input::placeholder': 0, 'input::readonly': 0, 'input::required': 0, 'input::size': 0, 'input::src': 1, 'input::step': 0, 'input::tabindex': 0, 'input::type': 0, 'input::usemap': 11, 'input::value': 0, 'ins::cite': 1, 'ins::datetime': 0, 'label::accesskey': 0, 'label::for': 5, 'label::onblur': 2, 'label::onfocus': 2, 'legend::accesskey': 0, 'legend::align': 0, 'li::type': 0, 'li::value': 0, 'map::name': 7, 'menu::compact': 0, 'menu::label': 0, 'menu::type': 0, 'meter::high': 0, 'meter::low': 0, 'meter::max': 0, 'meter::min': 0, 'meter::value': 0, 'ol::compact': 0, 'ol::reversed': 0, 'ol::start': 0, 'ol::type': 0, 'optgroup::disabled': 0, 'optgroup::label': 0, 'option::disabled': 0, 'option::label': 0, 'option::selected': 0, 'option::value': 0, 'output::for': 6, 'output::name': 8, 'p::align': 0, 'pre::width': 0, 'progress::max': 0, 'progress::min': 0, 'progress::value': 0, 'q::cite': 1, 'select::autocomplete': 0, 'select::disabled': 0, 'select::multiple': 0, 'select::name': 8, 'select::onblur': 2, 'select::onchange': 2, 'select::onfocus': 2, 'select::required': 0, 'select::size': 0, 'select::tabindex': 0, 'source::type': 0, 'table::align': 0, 'table::bgcolor': 0, 'table::border': 0, 'table::cellpadding': 0, 'table::cellspacing': 0, 'table::frame': 0, 'table::rules': 0, 'table::summary': 0, 'table::width': 0, 'tbody::align': 0, 'tbody::char': 0, 'tbody::charoff': 0, 'tbody::valign': 0, 'td::abbr': 0, 'td::align': 0, 'td::axis': 0, 'td::bgcolor': 0, 'td::char': 0, 'td::charoff': 0, 'td::colspan': 0, 'td::headers': 6, 'td::height': 0, 'td::nowrap': 0, 'td::rowspan': 0, 'td::scope': 0, 'td::valign': 0, 'td::width': 0, 'textarea::accesskey': 0, 'textarea::autocomplete': 0, 'textarea::cols': 0, 'textarea::disabled': 0, 'textarea::inputmode': 0, 'textarea::name': 8, 'textarea::onblur': 2, 'textarea::onchange': 2, 'textarea::onfocus': 2, 'textarea::onselect': 2, 'textarea::placeholder': 0, 'textarea::readonly': 0, 'textarea::required': 0, 'textarea::rows': 0, 'textarea::tabindex': 0, 'textarea::wrap': 0, 'tfoot::align': 0, 'tfoot::char': 0, 'tfoot::charoff': 0, 'tfoot::valign': 0, 'th::abbr': 0, 'th::align': 0, 'th::axis': 0, 'th::bgcolor': 0, 'th::char': 0, 'th::charoff': 0, 'th::colspan': 0, 'th::headers': 6, 'th::height': 0, 'th::nowrap': 0, 'th::rowspan': 0, 'th::scope': 0, 'th::valign': 0, 'th::width': 0, 'thead::align': 0, 'thead::char': 0, 'thead::charoff': 0, 'thead::valign': 0, 'tr::align': 0, 'tr::bgcolor': 0, 'tr::char': 0, 'tr::charoff': 0, 'tr::valign': 0, 'track::default': 0, 'track::kind': 0, 'track::label': 0, 'track::srclang': 0, 'ul::compact': 0, 'ul::type': 0, 'video::controls': 0, 'video::height': 0, 'video::loop': 0, 'video::mediagroup': 5, 'video::muted': 0, 'video::poster': 1, 'video::preload': 0, 'video::width': 0 }; html4[ 'ATTRIBS' ] = html4.ATTRIBS; html4.eflags = { 'OPTIONAL_ENDTAG': 1, 'EMPTY': 2, 'CDATA': 4, 'RCDATA': 8, 'UNSAFE': 16, 'FOLDABLE': 32, 'SCRIPT': 64, 'STYLE': 128, 'VIRTUALIZED': 256 }; html4[ 'eflags' ] = html4.eflags; html4.ELEMENTS = { 'a': 0, 'abbr': 0, 'acronym': 0, 'address': 0, 'applet': 272, 'area': 2, 'article': 0, 'aside': 0, 'audio': 0, 'b': 0, 'base': 274, 'basefont': 274, 'bdi': 0, 'bdo': 0, 'big': 0, 'blockquote': 0, 'body': 305, 'br': 2, 'button': 0, 'canvas': 0, 'caption': 0, 'center': 0, 'cite': 0, 'code': 0, 'col': 2, 'colgroup': 1, 'command': 2, 'data': 0, 'datalist': 0, 'dd': 1, 'del': 0, 'details': 0, 'dfn': 0, 'dialog': 272, 'dir': 0, 'div': 0, 'dl': 0, 'dt': 1, 'em': 0, 'fieldset': 0, 'figcaption': 0, 'figure': 0, 'font': 0, 'footer': 0, 'form': 0, 'frame': 274, 'frameset': 272, 'h1': 0, 'h2': 0, 'h3': 0, 'h4': 0, 'h5': 0, 'h6': 0, 'head': 305, 'header': 0, 'hgroup': 0, 'hr': 2, 'html': 305, 'i': 0, 'iframe': 4, 'img': 2, 'input': 2, 'ins': 0, 'isindex': 274, 'kbd': 0, 'keygen': 274, 'label': 0, 'legend': 0, 'li': 1, 'link': 274, 'map': 0, 'mark': 0, 'menu': 0, 'meta': 274, 'meter': 0, 'nav': 0, 'nobr': 0, 'noembed': 276, 'noframes': 276, 'noscript': 276, 'object': 272, 'ol': 0, 'optgroup': 0, 'option': 1, 'output': 0, 'p': 1, 'param': 274, 'pre': 0, 'progress': 0, 'q': 0, 's': 0, 'samp': 0, 'script': 84, 'section': 0, 'select': 0, 'small': 0, 'source': 2, 'span': 0, 'strike': 0, 'strong': 0, 'style': 148, 'sub': 0, 'summary': 0, 'sup': 0, 'table': 0, 'tbody': 1, 'td': 1, 'textarea': 8, 'tfoot': 1, 'th': 1, 'thead': 1, 'time': 0, 'title': 280, 'tr': 1, 'track': 2, 'tt': 0, 'u': 0, 'ul': 0, 'var': 0, 'video': 0, 'wbr': 2 }; html4[ 'ELEMENTS' ] = html4.ELEMENTS; html4.ELEMENT_DOM_INTERFACES = { 'a': 'HTMLAnchorElement', 'abbr': 'HTMLElement', 'acronym': 'HTMLElement', 'address': 'HTMLElement', 'applet': 'HTMLAppletElement', 'area': 'HTMLAreaElement', 'article': 'HTMLElement', 'aside': 'HTMLElement', 'audio': 'HTMLAudioElement', 'b': 'HTMLElement', 'base': 'HTMLBaseElement', 'basefont': 'HTMLBaseFontElement', 'bdi': 'HTMLElement', 'bdo': 'HTMLElement', 'big': 'HTMLElement', 'blockquote': 'HTMLQuoteElement', 'body': 'HTMLBodyElement', 'br': 'HTMLBRElement', 'button': 'HTMLButtonElement', 'canvas': 'HTMLCanvasElement', 'caption': 'HTMLTableCaptionElement', 'center': 'HTMLElement', 'cite': 'HTMLElement', 'code': 'HTMLElement', 'col': 'HTMLTableColElement', 'colgroup': 'HTMLTableColElement', 'command': 'HTMLCommandElement', 'data': 'HTMLElement', 'datalist': 'HTMLDataListElement', 'dd': 'HTMLElement', 'del': 'HTMLModElement', 'details': 'HTMLDetailsElement', 'dfn': 'HTMLElement', 'dialog': 'HTMLDialogElement', 'dir': 'HTMLDirectoryElement', 'div': 'HTMLDivElement', 'dl': 'HTMLDListElement', 'dt': 'HTMLElement', 'em': 'HTMLElement', 'fieldset': 'HTMLFieldSetElement', 'figcaption': 'HTMLElement', 'figure': 'HTMLElement', 'font': 'HTMLFontElement', 'footer': 'HTMLElement', 'form': 'HTMLFormElement', 'frame': 'HTMLFrameElement', 'frameset': 'HTMLFrameSetElement', 'h1': 'HTMLHeadingElement', 'h2': 'HTMLHeadingElement', 'h3': 'HTMLHeadingElement', 'h4': 'HTMLHeadingElement', 'h5': 'HTMLHeadingElement', 'h6': 'HTMLHeadingElement', 'head': 'HTMLHeadElement', 'header': 'HTMLElement', 'hgroup': 'HTMLElement', 'hr': 'HTMLHRElement', 'html': 'HTMLHtmlElement', 'i': 'HTMLElement', 'iframe': 'HTMLIFrameElement', 'img': 'HTMLImageElement', 'input': 'HTMLInputElement', 'ins': 'HTMLModElement', 'isindex': 'HTMLUnknownElement', 'kbd': 'HTMLElement', 'keygen': 'HTMLKeygenElement', 'label': 'HTMLLabelElement', 'legend': 'HTMLLegendElement', 'li': 'HTMLLIElement', 'link': 'HTMLLinkElement', 'map': 'HTMLMapElement', 'mark': 'HTMLElement', 'menu': 'HTMLMenuElement', 'meta': 'HTMLMetaElement', 'meter': 'HTMLMeterElement', 'nav': 'HTMLElement', 'nobr': 'HTMLElement', 'noembed': 'HTMLElement', 'noframes': 'HTMLElement', 'noscript': 'HTMLElement', 'object': 'HTMLObjectElement', 'ol': 'HTMLOListElement', 'optgroup': 'HTMLOptGroupElement', 'option': 'HTMLOptionElement', 'output': 'HTMLOutputElement', 'p': 'HTMLParagraphElement', 'param': 'HTMLParamElement', 'pre': 'HTMLPreElement', 'progress': 'HTMLProgressElement', 'q': 'HTMLQuoteElement', 's': 'HTMLElement', 'samp': 'HTMLElement', 'script': 'HTMLScriptElement', 'section': 'HTMLElement', 'select': 'HTMLSelectElement', 'small': 'HTMLElement', 'source': 'HTMLSourceElement', 'span': 'HTMLSpanElement', 'strike': 'HTMLElement', 'strong': 'HTMLElement', 'style': 'HTMLStyleElement', 'sub': 'HTMLElement', 'summary': 'HTMLElement', 'sup': 'HTMLElement', 'table': 'HTMLTableElement', 'tbody': 'HTMLTableSectionElement', 'td': 'HTMLTableDataCellElement', 'textarea': 'HTMLTextAreaElement', 'tfoot': 'HTMLTableSectionElement', 'th': 'HTMLTableHeaderCellElement', 'thead': 'HTMLTableSectionElement', 'time': 'HTMLTimeElement', 'title': 'HTMLTitleElement', 'tr': 'HTMLTableRowElement', 'track': 'HTMLTrackElement', 'tt': 'HTMLElement', 'u': 'HTMLElement', 'ul': 'HTMLUListElement', 'var': 'HTMLElement', 'video': 'HTMLVideoElement', 'wbr': 'HTMLElement' }; html4[ 'ELEMENT_DOM_INTERFACES' ] = html4.ELEMENT_DOM_INTERFACES; html4.ueffects = { 'NOT_LOADED': 0, 'SAME_DOCUMENT': 1, 'NEW_DOCUMENT': 2 }; html4[ 'ueffects' ] = html4.ueffects; html4.URIEFFECTS = { 'a::href': 2, 'area::href': 2, 'blockquote::cite': 0, 'command::icon': 1, 'del::cite': 0, 'form::action': 2, 'img::src': 1, 'input::src': 1, 'ins::cite': 0, 'q::cite': 0, 'video::poster': 1 }; html4[ 'URIEFFECTS' ] = html4.URIEFFECTS; html4.ltypes = { 'UNSANDBOXED': 2, 'SANDBOXED': 1, 'DATA': 0 }; html4[ 'ltypes' ] = html4.ltypes; html4.LOADERTYPES = { 'a::href': 2, 'area::href': 2, 'blockquote::cite': 2, 'command::icon': 1, 'del::cite': 2, 'form::action': 2, 'img::src': 1, 'input::src': 1, 'ins::cite': 2, 'q::cite': 2, 'video::poster': 1 }; html4[ 'LOADERTYPES' ] = html4.LOADERTYPES; // Copyright (C) 2006 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview * An HTML sanitizer that can satisfy a variety of security policies. * * <p> * The HTML sanitizer is built around a SAX parser and HTML element and * attributes schemas. * * If the cssparser is loaded, inline styles are sanitized using the * css property and value schemas. Else they are remove during * sanitization. * * If it exists, uses parseCssDeclarations, sanitizeCssProperty, cssSchema * * @author [email protected] * @author [email protected] * \@requires html4, URI * \@overrides window * \@provides html, html_sanitize */ // The Turkish i seems to be a non-issue, but abort in case it is. if ('I'.toLowerCase() !== 'i') { throw 'I/i problem'; } /** * \@namespace */ var html = (function(html4) { // For closure compiler var parseCssDeclarations, sanitizeCssProperty, cssSchema; if ('undefined' !== typeof window) { parseCssDeclarations = window['parseCssDeclarations']; sanitizeCssProperty = window['sanitizeCssProperty']; cssSchema = window['cssSchema']; } // The keys of this object must be 'quoted' or JSCompiler will mangle them! // This is a partial list -- lookupEntity() uses the host browser's parser // (when available) to implement full entity lookup. // Note that entities are in general case-sensitive; the uppercase ones are // explicitly defined by HTML5 (presumably as compatibility). var ENTITIES = { 'lt': '<', 'LT': '<', 'gt': '>', 'GT': '>', 'amp': '&', 'AMP': '&', 'quot': '"', 'apos': '\'', 'nbsp': '\240' }; // Patterns for types of entity/character reference names. var decimalEscapeRe = /^#(\d+)$/; var hexEscapeRe = /^#x([0-9A-Fa-f]+)$/; // contains every entity per http://www.w3.org/TR/2011/WD-html5-20110113/named-character-references.html var safeEntityNameRe = /^[A-Za-z][A-za-z0-9]+$/; // Used as a hook to invoke the browser's entity parsing. <textarea> is used // because its content is parsed for entities but not tags. // TODO(kpreid): This retrieval is a kludge and leads to silent loss of // functionality if the document isn't available. var entityLookupElement = ('undefined' !== typeof window && window['document']) ? window['document'].createElement('textarea') : null; /** * Decodes an HTML entity. * * {\@updoc * $ lookupEntity('lt') * # '<' * $ lookupEntity('GT') * # '>' * $ lookupEntity('amp') * # '&' * $ lookupEntity('nbsp') * # '\xA0' * $ lookupEntity('apos') * # "'" * $ lookupEntity('quot') * # '"' * $ lookupEntity('#xa') * # '\n' * $ lookupEntity('#10') * # '\n' * $ lookupEntity('#x0a') * # '\n' * $ lookupEntity('#010') * # '\n' * $ lookupEntity('#x00A') * # '\n' * $ lookupEntity('Pi') // Known failure * # '\u03A0' * $ lookupEntity('pi') // Known failure * # '\u03C0' * } * * @param {string} name the content between the '&' and the ';'. * @return {string} a single unicode code-point as a string. */ function lookupEntity(name) { // TODO: entity lookup as specified by HTML5 actually depends on the // presence of the ";". if (ENTITIES.hasOwnProperty(name)) { return ENTITIES[name]; } var m = name.match(decimalEscapeRe); if (m) { return String.fromCharCode(parseInt(m[1], 10)); } else if (!!(m = name.match(hexEscapeRe))) { return String.fromCharCode(parseInt(m[1], 16)); } else if (entityLookupElement && safeEntityNameRe.test(name)) { entityLookupElement.innerHTML = '&' + name + ';'; var text = entityLookupElement.textContent; ENTITIES[name] = text; return text; } else { return '&' + name + ';'; } } function decodeOneEntity(_, name) { return lookupEntity(name); } var nulRe = /\0/g; function stripNULs(s) { return s.replace(nulRe, ''); } var ENTITY_RE_1 = /&(#[0-9]+|#[xX][0-9A-Fa-f]+|\w+);/g; var ENTITY_RE_2 = /^(#[0-9]+|#[xX][0-9A-Fa-f]+|\w+);/; /** * The plain text of a chunk of HTML CDATA which possibly containing. * * {\@updoc * $ unescapeEntities('') * # '' * $ unescapeEntities('hello World!') * # 'hello World!' * $ unescapeEntities('1 &lt; 2 &amp;&AMP; 4 &gt; 3&#10;') * # '1 < 2 && 4 > 3\n' * $ unescapeEntities('&lt;&lt <- unfinished entity&gt;') * # '<&lt <- unfinished entity>' * $ unescapeEntities('/foo?bar=baz&copy=true') // & often unescaped in URLS * # '/foo?bar=baz&copy=true' * $ unescapeEntities('pi=&pi;&#x3c0;, Pi=&Pi;\u03A0') // FIXME: known failure * # 'pi=\u03C0\u03c0, Pi=\u03A0\u03A0' * } * * @param {string} s a chunk of HTML CDATA. It must not start or end inside * an HTML entity. */ function unescapeEntities(s) { return s.replace(ENTITY_RE_1, decodeOneEntity); } var ampRe = /&/g; var looseAmpRe = /&([^a-z#]|#(?:[^0-9x]|x(?:[^0-9a-f]|$)|$)|$)/gi; var ltRe = /[<]/g; var gtRe = />/g; var quotRe = /\"/g; /** * Escapes HTML special characters in attribute values. * * {\@updoc * $ escapeAttrib('') * # '' * $ escapeAttrib('"<<&==&>>"') // Do not just escape the first occurrence. * # '&#34;&lt;&lt;&amp;&#61;&#61;&amp;&gt;&gt;&#34;' * $ escapeAttrib('Hello <World>!') * # 'Hello &lt;World&gt;!' * } */ function escapeAttrib(s) { return ('' + s).replace(ampRe, '&amp;').replace(ltRe, '&lt;') .replace(gtRe, '&gt;').replace(quotRe, '&#34;'); } /** * Escape entities in RCDATA that can be escaped without changing the meaning. * {\@updoc * $ normalizeRCData('1 < 2 &&amp; 3 > 4 &amp;& 5 &lt; 7&8') * # '1 &lt; 2 &amp;&amp; 3 &gt; 4 &amp;&amp; 5 &lt; 7&amp;8' * } */ function normalizeRCData(rcdata) { return rcdata .replace(looseAmpRe, '&amp;$1') .replace(ltRe, '&lt;') .replace(gtRe, '&gt;'); } // TODO(felix8a): validate sanitizer regexs against the HTML5 grammar at // http://www.whatwg.org/specs/web-apps/current-work/multipage/syntax.html // http://www.whatwg.org/specs/web-apps/current-work/multipage/parsing.html // http://www.whatwg.org/specs/web-apps/current-work/multipage/tokenization.html // http://www.whatwg.org/specs/web-apps/current-work/multipage/tree-construction.html // We initially split input so that potentially meaningful characters // like '<' and '>' are separate tokens, using a fast dumb process that // ignores quoting. Then we walk that token stream, and when we see a // '<' that's the start of a tag, we use ATTR_RE to extract tag // attributes from the next token. That token will never have a '>' // character. However, it might have an unbalanced quote character, and // when we see that, we combine additional tokens to balance the quote. var ATTR_RE = new RegExp( '^\\s*' + '([-.:\\w]+)' + // 1 = Attribute name '(?:' + ( '\\s*(=)\\s*' + // 2 = Is there a value? '(' + ( // 3 = Attribute value // TODO(felix8a): maybe use backref to match quotes '(\")[^\"]*(\"|$)' + // 4, 5 = Double-quoted string '|' + '(\')[^\']*(\'|$)' + // 6, 7 = Single-quoted string '|' + // Positive lookahead to prevent interpretation of // <foo a= b=c> as <foo a='b=c'> // TODO(felix8a): might be able to drop this case '(?=[a-z][-\\w]*\\s*=)' + '|' + // Unquoted value that isn't an attribute name // (since we didn't match the positive lookahead above) '[^\"\'\\s]*' ) + ')' ) + ')?', 'i'); // false on IE<=8, true on most other browsers var splitWillCapture = ('a,b'.split(/(,)/).length === 3); // bitmask for tags with special parsing, like <script> and <textarea> var EFLAGS_TEXT = html4.eflags['CDATA'] | html4.eflags['RCDATA']; /** * Given a SAX-like event handler, produce a function that feeds those * events and a parameter to the event handler. * * The event handler has the form:{@code * { * // Name is an upper-case HTML tag name. Attribs is an array of * // alternating upper-case attribute names, and attribute values. The * // attribs array is reused by the parser. Param is the value passed to * // the saxParser. * startTag: function (name, attribs, param) { ... }, * endTag: function (name, param) { ... }, * pcdata: function (text, param) { ... }, * rcdata: function (text, param) { ... }, * cdata: function (text, param) { ... }, * startDoc: function (param) { ... }, * endDoc: function (param) { ... } * }} * * @param {Object} handler a record containing event handlers. * @return {function(string, Object)} A function that takes a chunk of HTML * and a parameter. The parameter is passed on to the handler methods. */ function makeSaxParser(handler) { // Accept quoted or unquoted keys (Closure compat) var hcopy = { cdata: handler.cdata || handler['cdata'], comment: handler.comment || handler['comment'], endDoc: handler.endDoc || handler['endDoc'], endTag: handler.endTag || handler['endTag'], pcdata: handler.pcdata || handler['pcdata'], rcdata: handler.rcdata || handler['rcdata'], startDoc: handler.startDoc || handler['startDoc'], startTag: handler.startTag || handler['startTag'] }; return function(htmlText, param) { return parse(htmlText, hcopy, param); }; } // Parsing strategy is to split input into parts that might be lexically // meaningful (every ">" becomes a separate part), and then recombine // parts if we discover they're in a different context. // TODO(felix8a): Significant performance regressions from -legacy, // tested on // Chrome 18.0 // Firefox 11.0 // IE 6, 7, 8, 9 // Opera 11.61 // Safari 5.1.3 // Many of these are unusual patterns that are linearly slower and still // pretty fast (eg 1ms to 5ms), so not necessarily worth fixing. // TODO(felix8a): "<script> && && && ... <\/script>" is slower on all // browsers. The hotspot is htmlSplit. // TODO(felix8a): "<p title='>>>>...'><\/p>" is slower on all browsers. // This is partly htmlSplit, but the hotspot is parseTagAndAttrs. // TODO(felix8a): "<a><\/a><a><\/a>..." is slower on IE9. // "<a>1<\/a><a>1<\/a>..." is faster, "<a><\/a>2<a><\/a>2..." is faster. // TODO(felix8a): "<p<p<p..." is slower on IE[6-8] var continuationMarker = {}; function parse(htmlText, handler, param) { var m, p, tagName; var parts = htmlSplit(htmlText); var state = { noMoreGT: false, noMoreEndComments: false }; parseCPS(handler, parts, 0, state, param); } function continuationMaker(h, parts, initial, state, param) { return function () { parseCPS(h, parts, initial, state, param); }; } function parseCPS(h, parts, initial, state, param) { try { if (h.startDoc && initial == 0) { h.startDoc(param); } var m, p, tagName; for (var pos = initial, end = parts.length; pos < end;) { var current = parts[pos++]; var next = parts[pos]; switch (current) { case '&': if (ENTITY_RE_2.test(next)) { if (h.pcdata) { h.pcdata('&' + next, param, continuationMarker, continuationMaker(h, parts, pos, state, param)); } pos++; } else { if (h.pcdata) { h.pcdata("&amp;", param, continuationMarker, continuationMaker(h, parts, pos, state, param)); } } break; case '<\/': if (m = /^([-\w:]+)[^\'\"]*/.exec(next)) { if (m[0].length === next.length && parts[pos + 1] === '>') { // fast case, no attribute parsing needed pos += 2; tagName = m[1].toLowerCase(); if (h.endTag) { h.endTag(tagName, param, continuationMarker, continuationMaker(h, parts, pos, state, param)); } } else { // slow case, need to parse attributes // TODO(felix8a): do we really care about misparsing this? pos = parseEndTag( parts, pos, h, param, continuationMarker, state); } } else { if (h.pcdata) { h.pcdata('&lt;/', param, continuationMarker, continuationMaker(h, parts, pos, state, param)); } } break; case '<': if (m = /^([-\w:]+)\s*\/?/.exec(next)) { if (m[0].length === next.length && parts[pos + 1] === '>') { // fast case, no attribute parsing needed pos += 2; tagName = m[1].toLowerCase(); if (h.startTag) { h.startTag(tagName, [], param, continuationMarker, continuationMaker(h, parts, pos, state, param)); } // tags like <script> and <textarea> have special parsing var eflags = html4.ELEMENTS[tagName]; if (eflags & EFLAGS_TEXT) { var tag = { name: tagName, next: pos, eflags: eflags }; pos = parseText( parts, tag, h, param, continuationMarker, state); } } else { // slow case, need to parse attributes pos = parseStartTag( parts, pos, h, param, continuationMarker, state); } } else { if (h.pcdata) { h.pcdata('&lt;', param, continuationMarker, continuationMaker(h, parts, pos, state, param)); } } break; case '<\!--': // The pathological case is n copies of '<\!--' without '-->', and // repeated failure to find '-->' is quadratic. We avoid that by // remembering when search for '-->' fails. if (!state.noMoreEndComments) { // A comment <\!--x--> is split into three tokens: // '<\!--', 'x--', '>' // We want to find the next '>' token that has a preceding '--'. // pos is at the 'x--'. for (p = pos + 1; p < end; p++) { if (parts[p] === '>' && /--$/.test(parts[p - 1])) { break; } } if (p < end) { if (h.comment) { var comment = parts.slice(pos, p).join(''); h.comment( comment.substr(0, comment.length - 2), param, continuationMarker, continuationMaker(h, parts, p + 1, state, param)); } pos = p + 1; } else { state.noMoreEndComments = true; } } if (state.noMoreEndComments) { if (h.pcdata) { h.pcdata('&lt;!--', param, continuationMarker, continuationMaker(h, parts, pos, state, param)); } } break; case '<\!': if (!/^\w/.test(next)) { if (h.pcdata) { h.pcdata('&lt;!', param, continuationMarker, continuationMaker(h, parts, pos, state, param)); } } else { // similar to noMoreEndComment logic if (!state.noMoreGT) { for (p = pos + 1; p < end; p++) { if (parts[p] === '>') { break; } } if (p < end) { pos = p + 1; } else { state.noMoreGT = true; } } if (state.noMoreGT) { if (h.pcdata) { h.pcdata('&lt;!', param, continuationMarker, continuationMaker(h, parts, pos, state, param)); } } } break; case '<?': // similar to noMoreEndComment logic if (!state.noMoreGT) { for (p = pos + 1; p < end; p++) { if (parts[p] === '>') { break; } } if (p < end) { pos = p + 1; } else { state.noMoreGT = true; } } if (state.noMoreGT) { if (h.pcdata) { h.pcdata('&lt;?', param, continuationMarker, continuationMaker(h, parts, pos, state, param)); } } break; case '>': if (h.pcdata) { h.pcdata("&gt;", param, continuationMarker, continuationMaker(h, parts, pos, state, param)); } break; case '': break; default: if (h.pcdata) { h.pcdata(current, param, continuationMarker, continuationMaker(h, parts, pos, state, param)); } break; } } if (h.endDoc) { h.endDoc(param); } } catch (e) { if (e !== continuationMarker) { throw e; } } } // Split str into parts for the html parser. function htmlSplit(str) { // can't hoist this out of the function because of the re.exec loop. var re = /(<\/|<\!--|<[!?]|[&<>])/g; str += ''; if (splitWillCapture) { return str.split(re); } else { var parts = []; var lastPos = 0; var m; while ((m = re.exec(str)) !== null) { parts.push(str.substring(lastPos, m.index)); parts.push(m[0]); lastPos = m.index + m[0].length; } parts.push(str.substring(lastPos)); return parts; } } function parseEndTag(parts, pos, h, param, continuationMarker, state) { var tag = parseTagAndAttrs(parts, pos); // drop unclosed tags if (!tag) { return parts.length; } if (h.endTag) { h.endTag(tag.name, param, continuationMarker, continuationMaker(h, parts, pos, state, param)); } return tag.next; } function parseStartTag(parts, pos, h, param, continuationMarker, state) { var tag = parseTagAndAttrs(parts, pos); // drop unclosed tags if (!tag) { return parts.length; } if (h.startTag) { h.startTag(tag.name, tag.attrs, param, continuationMarker, continuationMaker(h, parts, tag.next, state, param)); } // tags like <script> and <textarea> have special parsing if (tag.eflags & EFLAGS_TEXT) { return parseText(parts, tag, h, param, continuationMarker, state); } else { return tag.next; } } var endTagRe = {}; // Tags like <script> and <textarea> are flagged as CDATA or RCDATA, // which means everything is text until we see the correct closing tag. function parseText(parts, tag, h, param, continuationMarker, state) { var end = parts.length; if (!endTagRe.hasOwnProperty(tag.name)) { endTagRe[tag.name] = new RegExp('^' + tag.name + '(?:[\\s\\/]|$)', 'i'); } var re = endTagRe[tag.name]; var first = tag.next; var p = tag.next + 1; for (; p < end; p++) { if (parts[p - 1] === '<\/' && re.test(parts[p])) { break; } } if (p < end) { p -= 1; } var buf = parts.slice(first, p).join(''); if (tag.eflags & html4.eflags['CDATA']) { if (h.cdata) { h.cdata(buf, param, continuationMarker, continuationMaker(h, parts, p, state, param)); } } else if (tag.eflags & html4.eflags['RCDATA']) { if (h.rcdata) { h.rcdata(normalizeRCData(buf), param, continuationMarker, continuationMaker(h, parts, p, state, param)); } } else { throw new Error('bug'); } return p; } // at this point, parts[pos-1] is either "<" or "<\/". function parseTagAndAttrs(parts, pos) { var m = /^([-\w:]+)/.exec(parts[pos]); var tag = {}; tag.name = m[1].toLowerCase(); tag.eflags = html4.ELEMENTS[tag.name]; var buf = parts[pos].substr(m[0].length); // Find the next '>'. We optimistically assume this '>' is not in a // quoted context, and further down we fix things up if it turns out to // be quoted. var p = pos + 1; var end = parts.length; for (; p < end; p++) { if (parts[p] === '>') { break; } buf += parts[p]; } if (end <= p) { return void 0; } var attrs = []; while (buf !== '') { m = ATTR_RE.exec(buf); if (!m) { // No attribute found: skip garbage buf = buf.replace(/^[\s\S][^a-z\s]*/, ''); } else if ((m[4] && !m[5]) || (m[6] && !m[7])) { // Unterminated quote: slurp to the next unquoted '>' var quote = m[4] || m[6]; var sawQuote = false; var abuf = [buf, parts[p++]]; for (; p < end; p++) { if (sawQuote) { if (parts[p] === '>') { break; } } else if (0 <= parts[p].indexOf(quote)) { sawQuote = true; } abuf.push(parts[p]); } // Slurp failed: lose the garbage if (end <= p) { break; } // Otherwise retry attribute parsing buf = abuf.join(''); continue; } else { // We have an attribute var aName = m[1].toLowerCase(); var aValue = m[2] ? decodeValue(m[3]) : ''; attrs.push(aName, aValue); buf = buf.substr(m[0].length); } } tag.attrs = attrs; tag.next = p + 1; return tag; } function decodeValue(v) { var q = v.charCodeAt(0); if (q === 0x22 || q === 0x27) { // " or ' v = v.substr(1, v.length - 2); } return unescapeEntities(stripNULs(v)); } /** * Returns a function that strips unsafe tags and attributes from html. * @param {function(string, Array.<string>): ?Array.<string>} tagPolicy * A function that takes (tagName, attribs[]), where tagName is a key in * html4.ELEMENTS and attribs is an array of alternating attribute names * and values. It should return a record (as follows), or null to delete * the element. It's okay for tagPolicy to modify the attribs array, * but the same array is reused, so it should not be held between calls. * Record keys: * attribs: (required) Sanitized attributes array. * tagName: Replacement tag name. * @return {function(string, Array)} A function that sanitizes a string of * HTML and appends result strings to the second argument, an array. */ function makeHtmlSanitizer(tagPolicy) { var stack; var ignoring; var emit = function (text, out) { if (!ignoring) { out.push(text); } }; return makeSaxParser({ 'startDoc': function(_) { stack = []; ignoring = false; }, 'startTag': function(tagNameOrig, attribs, out) { if (ignoring) { return; } if (!html4.ELEMENTS.hasOwnProperty(tagNameOrig)) { return; } var eflagsOrig = html4.ELEMENTS[tagNameOrig]; if (eflagsOrig & html4.eflags['FOLDABLE']) { return; } var decision = tagPolicy(tagNameOrig, attribs); if (!decision) { ignoring = !(eflagsOrig & html4.eflags['EMPTY']); return; } else if (typeof decision !== 'object') { throw new Error('tagPolicy did not return object (old API?)'); } if ('attribs' in decision) { attribs = decision['attribs']; } else { throw new Error('tagPolicy gave no attribs'); } var eflagsRep; var tagNameRep; if ('tagName' in decision) { tagNameRep = decision['tagName']; eflagsRep = html4.ELEMENTS[tagNameRep]; } else { tagNameRep = tagNameOrig; eflagsRep = eflagsOrig; } // TODO(mikesamuel): relying on tagPolicy not to insert unsafe // attribute names. // If this is an optional-end-tag element and either this element or its // previous like sibling was rewritten, then insert a close tag to // preserve structure. if (eflagsOrig & html4.eflags['OPTIONAL_ENDTAG']) { var onStack = stack[stack.length - 1]; if (onStack && onStack.orig === tagNameOrig && (onStack.rep !== tagNameRep || tagNameOrig !== tagNameRep)) { out.push('<\/', onStack.rep, '>'); } } if (!(eflagsOrig & html4.eflags['EMPTY'])) { stack.push({orig: tagNameOrig, rep: tagNameRep}); } out.push('<', tagNameRep); for (var i = 0, n = attribs.length; i < n; i += 2) { var attribName = attribs[i], value = attribs[i + 1]; if (value !== null && value !== void 0) { out.push(' ', attribName, '="', escapeAttrib(value), '"'); } } out.push('>'); if ((eflagsOrig & html4.eflags['EMPTY']) && !(eflagsRep & html4.eflags['EMPTY'])) { // replacement is non-empty, synthesize end tag out.push('<\/', tagNameRep, '>'); } }, 'endTag': function(tagName, out) { if (ignoring) { ignoring = false; return; } if (!html4.ELEMENTS.hasOwnProperty(tagName)) { return; } var eflags = html4.ELEMENTS[tagName]; if (!(eflags & (html4.eflags['EMPTY'] | html4.eflags['FOLDABLE']))) { var index; if (eflags & html4.eflags['OPTIONAL_ENDTAG']) { for (index = stack.length; --index >= 0;) { var stackElOrigTag = stack[index].orig; if (stackElOrigTag === tagName) { break; } if (!(html4.ELEMENTS[stackElOrigTag] & html4.eflags['OPTIONAL_ENDTAG'])) { // Don't pop non optional end tags looking for a match. return; } } } else { for (index = stack.length; --index >= 0;) { if (stack[index].orig === tagName) { break; } } } if (index < 0) { return; } // Not opened. for (var i = stack.length; --i > index;) { var stackElRepTag = stack[i].rep; if (!(html4.ELEMENTS[stackElRepTag] & html4.eflags['OPTIONAL_ENDTAG'])) { out.push('<\/', stackElRepTag, '>'); } } if (index < stack.length) { tagName = stack[index].rep; } stack.length = index; out.push('<\/', tagName, '>'); } }, 'pcdata': emit, 'rcdata': emit, 'cdata': emit, 'endDoc': function(out) { for (; stack.length; stack.length--) { out.push('<\/', stack[stack.length - 1].rep, '>'); } } }); } var ALLOWED_URI_SCHEMES = /^(?:https?|mailto|data)$/i; function safeUri(uri, effect, ltype, hints, naiveUriRewriter) { if (!naiveUriRewriter) { return null; } try { var parsed = URI.parse('' + uri); if (parsed) { if (!parsed.hasScheme() || ALLOWED_URI_SCHEMES.test(parsed.getScheme())) { var safe = naiveUriRewriter(parsed, effect, ltype, hints); return safe ? safe.toString() : null; } } } catch (e) { return null; } return null; } function log(logger, tagName, attribName, oldValue, newValue) { if (!attribName) { logger(tagName + " removed", { change: "removed", tagName: tagName }); } if (oldValue !== newValue) { var changed = "changed"; if (oldValue && !newValue) { changed = "removed"; } else if (!oldValue && newValue) { changed = "added"; } logger(tagName + "." + attribName + " " + changed, { change: changed, tagName: tagName, attribName: attribName, oldValue: oldValue, newValue: newValue }); } } function lookupAttribute(map, tagName, attribName) { var attribKey; attribKey = tagName + '::' + attribName; if (map.hasOwnProperty(attribKey)) { return map[attribKey]; } attribKey = '*::' + attribName; if (map.hasOwnProperty(attribKey)) { return map[attribKey]; } return void 0; } function getAttributeType(tagName, attribName) { return lookupAttribute(html4.ATTRIBS, tagName, attribName); } function getLoaderType(tagName, attribName) { return lookupAttribute(html4.LOADERTYPES, tagName, attribName); } function getUriEffect(tagName, attribName) { return lookupAttribute(html4.URIEFFECTS, tagName, attribName); } /** * Sanitizes attributes on an HTML tag. * @param {string} tagName An HTML tag name in lowercase. * @param {Array.<?string>} attribs An array of alternating names and values. * @param {?function(?string): ?string} opt_naiveUriRewriter A transform to * apply to URI attributes; it can return a new string value, or null to * delete the attribute. If unspecified, URI attributes are deleted. * @param {function(?string): ?string} opt_nmTokenPolicy A transform to apply * to attributes containing HTML names, element IDs, and space-separated * lists of classes; it can return a new string value, or null to delete * the attribute. If unspecified, these attributes are kept unchanged. * @return {Array.<?string>} The sanitized attributes as a list of alternating * names and values, where a null value means to omit the attribute. */ function sanitizeAttribs(tagName, attribs, opt_naiveUriRewriter, opt_nmTokenPolicy, opt_logger) { // TODO(felix8a): it's obnoxious that domado duplicates much of this // TODO(felix8a): maybe consistently enforce constraints like target= for (var i = 0; i < attribs.length; i += 2) { var attribName = attribs[i]; var value = attribs[i + 1]; var oldValue = value; var atype = null, attribKey; if ((attribKey = tagName + '::' + attribName, html4.ATTRIBS.hasOwnProperty(attribKey)) || (attribKey = '*::' + attribName, html4.ATTRIBS.hasOwnProperty(attribKey))) { atype = html4.ATTRIBS[attribKey]; } if (atype !== null) { switch (atype) { case html4.atype['NONE']: break; case html4.atype['SCRIPT']: value = null; if (opt_logger) { log(opt_logger, tagName, attribName, oldValue, value); } break; case html4.atype['STYLE']: if ('undefined' === typeof parseCssDeclarations) { value = null; if (opt_logger) { log(opt_logger, tagName, attribName, oldValue, value); } break; } var sanitizedDeclarations = []; parseCssDeclarations( value, { declaration: function (property, tokens) { var normProp = property.toLowerCase(); var schema = cssSchema[normProp]; if (!schema) { return; } sanitizeCssProperty( normProp, schema, tokens, opt_naiveUriRewriter ? function (url) { return safeUri( url, html4.ueffects.SAME_DOCUMENT, html4.ltypes.SANDBOXED, { "TYPE": "CSS", "CSS_PROP": normProp }, opt_naiveUriRewriter); } : null); sanitizedDeclarations.push(property + ': ' + tokens.join(' ')); } }); value = sanitizedDeclarations.length > 0 ? sanitizedDeclarations.join(' ; ') : null; if (opt_logger) { log(opt_logger, tagName, attribName, oldValue, value); } break; case html4.atype['ID']: case html4.atype['IDREF']: case html4.atype['IDREFS']: case html4.atype['GLOBAL_NAME']: case html4.atype['LOCAL_NAME']: case html4.atype['CLASSES']: value = opt_nmTokenPolicy ? opt_nmTokenPolicy(value) : value; if (opt_logger) { log(opt_logger, tagName, attribName, oldValue, value); } break; case html4.atype['URI']: value = safeUri(value, getUriEffect(tagName, attribName), getLoaderType(tagName, attribName), { "TYPE": "MARKUP", "XML_ATTR": attribName, "XML_TAG": tagName }, opt_naiveUriRewriter); if (opt_logger) { log(opt_logger, tagName, attribName, oldValue, value); } break; case html4.atype['URI_FRAGMENT']: if (value && '#' === value.charAt(0)) { value = value.substring(1); // remove the leading '#' value = opt_nmTokenPolicy ? opt_nmTokenPolicy(value) : value; if (value !== null && value !== void 0) { value = '#' + value; // restore the leading '#' } } else { value = null; } if (opt_logger) { log(opt_logger, tagName, attribName, oldValue, value); } break; default: value = null; if (opt_logger) { log(opt_logger, tagName, attribName, oldValue, value); } break; } } else { value = null; if (opt_logger) { log(opt_logger, tagName, attribName, oldValue, value); } } attribs[i + 1] = value; } return attribs; } /** * Creates a tag policy that omits all tags marked UNSAFE in html4-defs.js * and applies the default attribute sanitizer with the supplied policy for * URI attributes and NMTOKEN attributes. * @param {?function(?string): ?string} opt_naiveUriRewriter A transform to * apply to URI attributes. If not given, URI attributes are deleted. * @param {function(?string): ?string} opt_nmTokenPolicy A transform to apply * to attributes containing HTML names, element IDs, and space-separated * lists of classes. If not given, such attributes are left unchanged. * @return {function(string, Array.<?string>)} A tagPolicy suitable for * passing to html.sanitize. */ function makeTagPolicy( opt_naiveUriRewriter, opt_nmTokenPolicy, opt_logger) { return function(tagName, attribs) { if (!(html4.ELEMENTS[tagName] & html4.eflags['UNSAFE'])) { return { 'attribs': sanitizeAttribs(tagName, attribs, opt_naiveUriRewriter, opt_nmTokenPolicy, opt_logger) }; } else { if (opt_logger) { log(opt_logger, tagName, undefined, undefined, undefined); } } }; } /** * Sanitizes HTML tags and attributes according to a given policy. * @param {string} inputHtml The HTML to sanitize. * @param {function(string, Array.<?string>)} tagPolicy A function that * decides which tags to accept and sanitizes their attributes (see * makeHtmlSanitizer above for details). * @return {string} The sanitized HTML. */ function sanitizeWithPolicy(inputHtml, tagPolicy) { var outputArray = []; makeHtmlSanitizer(tagPolicy)(inputHtml, outputArray); return outputArray.join(''); } /** * Strips unsafe tags and attributes from HTML. * @param {string} inputHtml The HTML to sanitize. * @param {?function(?string): ?string} opt_naiveUriRewriter A transform to * apply to URI attributes. If not given, URI attributes are deleted. * @param {function(?string): ?string} opt_nmTokenPolicy A transform to apply * to attributes containing HTML names, element IDs, and space-separated * lists of classes. If not given, such attributes are left unchanged. */ function sanitize(inputHtml, opt_naiveUriRewriter, opt_nmTokenPolicy, opt_logger) { var tagPolicy = makeTagPolicy( opt_naiveUriRewriter, opt_nmTokenPolicy, opt_logger); return sanitizeWithPolicy(inputHtml, tagPolicy); } // Export both quoted and unquoted names for Closure linkage. var html = {}; html.escapeAttrib = html['escapeAttrib'] = escapeAttrib; html.makeHtmlSanitizer = html['makeHtmlSanitizer'] = makeHtmlSanitizer; html.makeSaxParser = html['makeSaxParser'] = makeSaxParser; html.makeTagPolicy = html['makeTagPolicy'] = makeTagPolicy; html.normalizeRCData = html['normalizeRCData'] = normalizeRCData; html.sanitize = html['sanitize'] = sanitize; html.sanitizeAttribs = html['sanitizeAttribs'] = sanitizeAttribs; html.sanitizeWithPolicy = html['sanitizeWithPolicy'] = sanitizeWithPolicy; html.unescapeEntities = html['unescapeEntities'] = unescapeEntities; return html; })(html4); var html_sanitize = html['sanitize']; // Loosen restrictions of Caja's // html-sanitizer to allow for styling html4.ATTRIBS['*::style'] = 0; html4.ELEMENTS['style'] = 0; html4.ATTRIBS['a::target'] = 0; html4.ELEMENTS['video'] = 0; html4.ATTRIBS['video::src'] = 0; html4.ATTRIBS['video::poster'] = 0; html4.ATTRIBS['video::controls'] = 0; html4.ELEMENTS['audio'] = 0; html4.ATTRIBS['audio::src'] = 0; html4.ATTRIBS['video::autoplay'] = 0; html4.ATTRIBS['video::controls'] = 0; if (typeof module !== 'undefined') { module.exports = html_sanitize; } },{}],31:[function(require,module,exports){ require('./leaflet'); require('./mapbox'); },{"./leaflet":32,"./mapbox":33}],32:[function(require,module,exports){ window.L = require('leaflet/dist/leaflet-src'); },{"leaflet/dist/leaflet-src":36}],33:[function(require,module,exports){ // Hardcode image path, because Leaflet's autodetection // fails, because mapbox.js is not named leaflet.js window.L.Icon.Default.imagePath = '//api.tiles.mapbox.com/mapbox.js/' + 'v' + require('./package.json').version + '/images'; L.mapbox = module.exports = { VERSION: require('./package.json').version, geocoder: require('./src/geocoder'), marker: require('./src/marker'), tileLayer: require('./src/tile_layer'), shareControl: require('./src/share_control'), legendControl: require('./src/legend_control'), geocoderControl: require('./src/geocoder_control'), gridControl: require('./src/grid_control'), gridLayer: require('./src/grid_layer'), markerLayer: require('./src/marker_layer'), map: require('./src/map'), config: require('./src/config'), sanitize: require('./src/sanitize'), template: require('mustache').to_html }; },{"./package.json":38,"./src/config":39,"./src/geocoder":40,"./src/geocoder_control":41,"./src/grid_control":43,"./src/grid_layer":44,"./src/legend_control":45,"./src/map":47,"./src/marker":48,"./src/marker_layer":49,"./src/sanitize":51,"./src/share_control":52,"./src/tile_layer":53,"mustache":37}],34:[function(require,module,exports){ function xhr(url, callback, cors) { if (typeof window.XMLHttpRequest === 'undefined') { return callback(Error('Browser not supported')); } if (typeof cors === 'undefined') { var m = url.match(/^\s*https?:\/\/[^\/]*/); cors = m && (m[0] !== location.protocol + '//' + location.domain + (location.port ? ':' + location.port : '')); } var x; function isSuccessful(status) { return status >= 200 && status < 300 || status === 304; } if (cors && ( // IE7-9 Quirks & Compatibility typeof window.XDomainRequest === 'object' || // IE9 Standards mode typeof window.XDomainRequest === 'function' )) { // IE8-10 x = new window.XDomainRequest(); } else { x = new window.XMLHttpRequest(); } function loaded() { if ( // XDomainRequest x.status === undefined || // modern browsers isSuccessful(x.status)) callback.call(x, null, x); else callback.call(x, x, null); } // Both `onreadystatechange` and `onload` can fire. `onreadystatechange` // has [been supported for longer](http://stackoverflow.com/a/9181508/229001). if ('onload' in x) { x.onload = loaded; } else { x.onreadystatechange = function readystate() { if (x.readyState === 4) { loaded(); } }; } // Call the callback with the XMLHttpRequest object as an error and prevent // it from ever being called again by reassigning it to `noop` x.onerror = function error(evt) { callback.call(this, evt, null); callback = function() { }; }; // IE9 must have onprogress be set to a unique function. x.onprogress = function() { }; x.ontimeout = function(evt) { callback.call(this, evt, null); callback = function() { }; }; x.onabort = function(evt) { callback.call(this, evt, null); callback = function() { }; }; // GET is the only supported HTTP Verb by XDomainRequest and is the // only one supported here. x.open('GET', url, true); // Send the request. Sending data is not supported. x.send(null); return xhr; } if (typeof module !== 'undefined') module.exports = xhr; },{}],35:[function(require,module,exports){ /*! JSON v3.2.6 | http://bestiejs.github.io/json3 | Copyright 2012-2013, Kit Cambridge | http://kit.mit-license.org */ ;(function (window) { // Convenience aliases. var getClass = {}.toString, isProperty, forEach, undef; // Detect the `define` function exposed by asynchronous module loaders. The // strict `define` check is necessary for compatibility with `r.js`. var isLoader = typeof define === "function" && define.amd; // Detect native implementations. var nativeJSON = typeof JSON == "object" && JSON; // Set up the JSON 3 namespace, preferring the CommonJS `exports` object if // available. var JSON3 = typeof exports == "object" && exports && !exports.nodeType && exports; if (JSON3 && nativeJSON) { // Explicitly delegate to the native `stringify` and `parse` // implementations in CommonJS environments. JSON3.stringify = nativeJSON.stringify; JSON3.parse = nativeJSON.parse; } else { // Export for web browsers, JavaScript engines, and asynchronous module // loaders, using the global `JSON` object if available. JSON3 = window.JSON = nativeJSON || {}; } // Test the `Date#getUTC*` methods. Based on work by @Yaffle. var isExtended = new Date(-3509827334573292); try { // The `getUTCFullYear`, `Month`, and `Date` methods return nonsensical // results for certain dates in Opera >= 10.53. isExtended = isExtended.getUTCFullYear() == -109252 && isExtended.getUTCMonth() === 0 && isExtended.getUTCDate() === 1 && // Safari < 2.0.2 stores the internal millisecond time value correctly, // but clips the values returned by the date methods to the range of // signed 32-bit integers ([-2 ** 31, 2 ** 31 - 1]). isExtended.getUTCHours() == 10 && isExtended.getUTCMinutes() == 37 && isExtended.getUTCSeconds() == 6 && isExtended.getUTCMilliseconds() == 708; } catch (exception) {} // Internal: Determines whether the native `JSON.stringify` and `parse` // implementations are spec-compliant. Based on work by Ken Snyder. function has(name) { if (has[name] !== undef) { // Return cached feature test result. return has[name]; } var isSupported; if (name == "bug-string-char-index") { // IE <= 7 doesn't support accessing string characters using square // bracket notation. IE 8 only supports this for primitives. isSupported = "a"[0] != "a"; } else if (name == "json") { // Indicates whether both `JSON.stringify` and `JSON.parse` are // supported. isSupported = has("json-stringify") && has("json-parse"); } else { var value, serialized = '{"a":[1,true,false,null,"\\u0000\\b\\n\\f\\r\\t"]}'; // Test `JSON.stringify`. if (name == "json-stringify") { var stringify = JSON3.stringify, stringifySupported = typeof stringify == "function" && isExtended; if (stringifySupported) { // A test function object with a custom `toJSON` method. (value = function () { return 1; }).toJSON = value; try { stringifySupported = // Firefox 3.1b1 and b2 serialize string, number, and boolean // primitives as object literals. stringify(0) === "0" && // FF 3.1b1, b2, and JSON 2 serialize wrapped primitives as object // literals. stringify(new Number()) === "0" && stringify(new String()) == '""' && // FF 3.1b1, 2 throw an error if the value is `null`, `undefined`, or // does not define a canonical JSON representation (this applies to // objects with `toJSON` properties as well, *unless* they are nested // within an object or array). stringify(getClass) === undef && // IE 8 serializes `undefined` as `"undefined"`. Safari <= 5.1.7 and // FF 3.1b3 pass this test. stringify(undef) === undef && // Safari <= 5.1.7 and FF 3.1b3 throw `Error`s and `TypeError`s, // respectively, if the value is omitted entirely. stringify() === undef && // FF 3.1b1, 2 throw an error if the given value is not a number, // string, array, object, Boolean, or `null` literal. This applies to // objects with custom `toJSON` methods as well, unless they are nested // inside object or array literals. YUI 3.0.0b1 ignores custom `toJSON` // methods entirely. stringify(value) === "1" && stringify([value]) == "[1]" && // Prototype <= 1.6.1 serializes `[undefined]` as `"[]"` instead of // `"[null]"`. stringify([undef]) == "[null]" && // YUI 3.0.0b1 fails to serialize `null` literals. stringify(null) == "null" && // FF 3.1b1, 2 halts serialization if an array contains a function: // `[1, true, getClass, 1]` serializes as "[1,true,],". FF 3.1b3 // elides non-JSON values from objects and arrays, unless they // define custom `toJSON` methods. stringify([undef, getClass, null]) == "[null,null,null]" && // Simple serialization test. FF 3.1b1 uses Unicode escape sequences // where character escape codes are expected (e.g., `\b` => `\u0008`). stringify({ "a": [value, true, false, null, "\x00\b\n\f\r\t"] }) == serialized && // FF 3.1b1 and b2 ignore the `filter` and `width` arguments. stringify(null, value) === "1" && stringify([1, 2], null, 1) == "[\n 1,\n 2\n]" && // JSON 2, Prototype <= 1.7, and older WebKit builds incorrectly // serialize extended years. stringify(new Date(-8.64e15)) == '"-271821-04-20T00:00:00.000Z"' && // The milliseconds are optional in ES 5, but required in 5.1. stringify(new Date(8.64e15)) == '"+275760-09-13T00:00:00.000Z"' && // Firefox <= 11.0 incorrectly serializes years prior to 0 as negative // four-digit years instead of six-digit years. Credits: @Yaffle. stringify(new Date(-621987552e5)) == '"-000001-01-01T00:00:00.000Z"' && // Safari <= 5.1.5 and Opera >= 10.53 incorrectly serialize millisecond // values less than 1000. Credits: @Yaffle. stringify(new Date(-1)) == '"1969-12-31T23:59:59.999Z"'; } catch (exception) { stringifySupported = false; } } isSupported = stringifySupported; } // Test `JSON.parse`. if (name == "json-parse") { var parse = JSON3.parse; if (typeof parse == "function") { try { // FF 3.1b1, b2 will throw an exception if a bare literal is provided. // Conforming implementations should also coerce the initial argument to // a string prior to parsing. if (parse("0") === 0 && !parse(false)) { // Simple parsing test. value = parse(serialized); var parseSupported = value["a"].length == 5 && value["a"][0] === 1; if (parseSupported) { try { // Safari <= 5.1.2 and FF 3.1b1 allow unescaped tabs in strings. parseSupported = !parse('"\t"'); } catch (exception) {} if (parseSupported) { try { // FF 4.0 and 4.0.1 allow leading `+` signs and leading // decimal points. FF 4.0, 4.0.1, and IE 9-10 also allow // certain octal literals. parseSupported = parse("01") !== 1; } catch (exception) {} } if (parseSupported) { try { // FF 4.0, 4.0.1, and Rhino 1.7R3-R4 allow trailing decimal // points. These environments, along with FF 3.1b1 and 2, // also allow trailing commas in JSON objects and arrays. parseSupported = parse("1.") !== 1; } catch (exception) {} } } } } catch (exception) { parseSupported = false; } } isSupported = parseSupported; } } return has[name] = !!isSupported; } if (!has("json")) { // Common `[[Class]]` name aliases. var functionClass = "[object Function]"; var dateClass = "[object Date]"; var numberClass = "[object Number]"; var stringClass = "[object String]"; var arrayClass = "[object Array]"; var booleanClass = "[object Boolean]"; // Detect incomplete support for accessing string characters by index. var charIndexBuggy = has("bug-string-char-index"); // Define additional utility methods if the `Date` methods are buggy. if (!isExtended) { var floor = Math.floor; // A mapping between the months of the year and the number of days between // January 1st and the first of the respective month. var Months = [0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334]; // Internal: Calculates the number of days between the Unix epoch and the // first day of the given month. var getDay = function (year, month) { return Months[month] + 365 * (year - 1970) + floor((year - 1969 + (month = +(month > 1))) / 4) - floor((year - 1901 + month) / 100) + floor((year - 1601 + month) / 400); }; } // Internal: Determines if a property is a direct property of the given // object. Delegates to the native `Object#hasOwnProperty` method. if (!(isProperty = {}.hasOwnProperty)) { isProperty = function (property) { var members = {}, constructor; if ((members.__proto__ = null, members.__proto__ = { // The *proto* property cannot be set multiple times in recent // versions of Firefox and SeaMonkey. "toString": 1 }, members).toString != getClass) { // Safari <= 2.0.3 doesn't implement `Object#hasOwnProperty`, but // supports the mutable *proto* property. isProperty = function (property) { // Capture and break the object's prototype chain (see section 8.6.2 // of the ES 5.1 spec). The parenthesized expression prevents an // unsafe transformation by the Closure Compiler. var original = this.__proto__, result = property in (this.__proto__ = null, this); // Restore the original prototype chain. this.__proto__ = original; return result; }; } else { // Capture a reference to the top-level `Object` constructor. constructor = members.constructor; // Use the `constructor` property to simulate `Object#hasOwnProperty` in // other environments. isProperty = function (property) { var parent = (this.constructor || constructor).prototype; return property in this && !(property in parent && this[property] === parent[property]); }; } members = null; return isProperty.call(this, property); }; } // Internal: A set of primitive types used by `isHostType`. var PrimitiveTypes = { 'boolean': 1, 'number': 1, 'string': 1, 'undefined': 1 }; // Internal: Determines if the given object `property` value is a // non-primitive. var isHostType = function (object, property) { var type = typeof object[property]; return type == 'object' ? !!object[property] : !PrimitiveTypes[type]; }; // Internal: Normalizes the `for...in` iteration algorithm across // environments. Each enumerated key is yielded to a `callback` function. forEach = function (object, callback) { var size = 0, Properties, members, property; // Tests for bugs in the current environment's `for...in` algorithm. The // `valueOf` property inherits the non-enumerable flag from // `Object.prototype` in older versions of IE, Netscape, and Mozilla. (Properties = function () { this.valueOf = 0; }).prototype.valueOf = 0; // Iterate over a new instance of the `Properties` class. members = new Properties(); for (property in members) { // Ignore all properties inherited from `Object.prototype`. if (isProperty.call(members, property)) { size++; } } Properties = members = null; // Normalize the iteration algorithm. if (!size) { // A list of non-enumerable properties inherited from `Object.prototype`. members = ["valueOf", "toString", "toLocaleString", "propertyIsEnumerable", "isPrototypeOf", "hasOwnProperty", "constructor"]; // IE <= 8, Mozilla 1.0, and Netscape 6.2 ignore shadowed non-enumerable // properties. forEach = function (object, callback) { var isFunction = getClass.call(object) == functionClass, property, length; var hasProperty = !isFunction && typeof object.constructor != 'function' && isHostType(object, 'hasOwnProperty') ? object.hasOwnProperty : isProperty; for (property in object) { // Gecko <= 1.0 enumerates the `prototype` property of functions under // certain conditions; IE does not. if (!(isFunction && property == "prototype") && hasProperty.call(object, property)) { callback(property); } } // Manually invoke the callback for each non-enumerable property. for (length = members.length; property = members[--length]; hasProperty.call(object, property) && callback(property)); }; } else if (size == 2) { // Safari <= 2.0.4 enumerates shadowed properties twice. forEach = function (object, callback) { // Create a set of iterated properties. var members = {}, isFunction = getClass.call(object) == functionClass, property; for (property in object) { // Store each property name to prevent double enumeration. The // `prototype` property of functions is not enumerated due to cross- // environment inconsistencies. if (!(isFunction && property == "prototype") && !isProperty.call(members, property) && (members[property] = 1) && isProperty.call(object, property)) { callback(property); } } }; } else { // No bugs detected; use the standard `for...in` algorithm. forEach = function (object, callback) { var isFunction = getClass.call(object) == functionClass, property, isConstructor; for (property in object) { if (!(isFunction && property == "prototype") && isProperty.call(object, property) && !(isConstructor = property === "constructor")) { callback(property); } } // Manually invoke the callback for the `constructor` property due to // cross-environment inconsistencies. if (isConstructor || isProperty.call(object, (property = "constructor"))) { callback(property); } }; } return forEach(object, callback); }; // Public: Serializes a JavaScript `value` as a JSON string. The optional // `filter` argument may specify either a function that alters how object and // array members are serialized, or an array of strings and numbers that // indicates which properties should be serialized. The optional `width` // argument may be either a string or number that specifies the indentation // level of the output. if (!has("json-stringify")) { // Internal: A map of control characters and their escaped equivalents. var Escapes = { 92: "\\\\", 34: '\\"', 8: "\\b", 12: "\\f", 10: "\\n", 13: "\\r", 9: "\\t" }; // Internal: Converts `value` into a zero-padded string such that its // length is at least equal to `width`. The `width` must be <= 6. var leadingZeroes = "000000"; var toPaddedString = function (width, value) { // The `|| 0` expression is necessary to work around a bug in // Opera <= 7.54u2 where `0 == -0`, but `String(-0) !== "0"`. return (leadingZeroes + (value || 0)).slice(-width); }; // Internal: Double-quotes a string `value`, replacing all ASCII control // characters (characters with code unit values between 0 and 31) with // their escaped equivalents. This is an implementation of the // `Quote(value)` operation defined in ES 5.1 section 15.12.3. var unicodePrefix = "\\u00"; var quote = function (value) { var result = '"', index = 0, length = value.length, isLarge = length > 10 && charIndexBuggy, symbols; if (isLarge) { symbols = value.split(""); } for (; index < length; index++) { var charCode = value.charCodeAt(index); // If the character is a control character, append its Unicode or // shorthand escape sequence; otherwise, append the character as-is. switch (charCode) { case 8: case 9: case 10: case 12: case 13: case 34: case 92: result += Escapes[charCode]; break; default: if (charCode < 32) { result += unicodePrefix + toPaddedString(2, charCode.toString(16)); break; } result += isLarge ? symbols[index] : charIndexBuggy ? value.charAt(index) : value[index]; } } return result + '"'; }; // Internal: Recursively serializes an object. Implements the // `Str(key, holder)`, `JO(value)`, and `JA(value)` operations. var serialize = function (property, object, callback, properties, whitespace, indentation, stack) { var value, className, year, month, date, time, hours, minutes, seconds, milliseconds, results, element, index, length, prefix, result; try { // Necessary for host object support. value = object[property]; } catch (exception) {} if (typeof value == "object" && value) { className = getClass.call(value); if (className == dateClass && !isProperty.call(value, "toJSON")) { if (value > -1 / 0 && value < 1 / 0) { // Dates are serialized according to the `Date#toJSON` method // specified in ES 5.1 section 15.9.5.44. See section 15.9.1.15 // for the ISO 8601 date time string format. if (getDay) { // Manually compute the year, month, date, hours, minutes, // seconds, and milliseconds if the `getUTC*` methods are // buggy. Adapted from @Yaffle's `date-shim` project. date = floor(value / 864e5); for (year = floor(date / 365.2425) + 1970 - 1; getDay(year + 1, 0) <= date; year++); for (month = floor((date - getDay(year, 0)) / 30.42); getDay(year, month + 1) <= date; month++); date = 1 + date - getDay(year, month); // The `time` value specifies the time within the day (see ES // 5.1 section 15.9.1.2). The formula `(A % B + B) % B` is used // to compute `A modulo B`, as the `%` operator does not // correspond to the `modulo` operation for negative numbers. time = (value % 864e5 + 864e5) % 864e5; // The hours, minutes, seconds, and milliseconds are obtained by // decomposing the time within the day. See section 15.9.1.10. hours = floor(time / 36e5) % 24; minutes = floor(time / 6e4) % 60; seconds = floor(time / 1e3) % 60; milliseconds = time % 1e3; } else { year = value.getUTCFullYear(); month = value.getUTCMonth(); date = value.getUTCDate(); hours = value.getUTCHours(); minutes = value.getUTCMinutes(); seconds = value.getUTCSeconds(); milliseconds = value.getUTCMilliseconds(); } // Serialize extended years correctly. value = (year <= 0 || year >= 1e4 ? (year < 0 ? "-" : "+") + toPaddedString(6, year < 0 ? -year : year) : toPaddedString(4, year)) + "-" + toPaddedString(2, month + 1) + "-" + toPaddedString(2, date) + // Months, dates, hours, minutes, and seconds should have two // digits; milliseconds should have three. "T" + toPaddedString(2, hours) + ":" + toPaddedString(2, minutes) + ":" + toPaddedString(2, seconds) + // Milliseconds are optional in ES 5.0, but required in 5.1. "." + toPaddedString(3, milliseconds) + "Z"; } else { value = null; } } else if (typeof value.toJSON == "function" && ((className != numberClass && className != stringClass && className != arrayClass) || isProperty.call(value, "toJSON"))) { // Prototype <= 1.6.1 adds non-standard `toJSON` methods to the // `Number`, `String`, `Date`, and `Array` prototypes. JSON 3 // ignores all `toJSON` methods on these objects unless they are // defined directly on an instance. value = value.toJSON(property); } } if (callback) { // If a replacement function was provided, call it to obtain the value // for serialization. value = callback.call(object, property, value); } if (value === null) { return "null"; } className = getClass.call(value); if (className == booleanClass) { // Booleans are represented literally. return "" + value; } else if (className == numberClass) { // JSON numbers must be finite. `Infinity` and `NaN` are serialized as // `"null"`. return value > -1 / 0 && value < 1 / 0 ? "" + value : "null"; } else if (className == stringClass) { // Strings are double-quoted and escaped. return quote("" + value); } // Recursively serialize objects and arrays. if (typeof value == "object") { // Check for cyclic structures. This is a linear search; performance // is inversely proportional to the number of unique nested objects. for (length = stack.length; length--;) { if (stack[length] === value) { // Cyclic structures cannot be serialized by `JSON.stringify`. throw TypeError(); } } // Add the object to the stack of traversed objects. stack.push(value); results = []; // Save the current indentation level and indent one additional level. prefix = indentation; indentation += whitespace; if (className == arrayClass) { // Recursively serialize array elements. for (index = 0, length = value.length; index < length; index++) { element = serialize(index, value, callback, properties, whitespace, indentation, stack); results.push(element === undef ? "null" : element); } result = results.length ? (whitespace ? "[\n" + indentation + results.join(",\n" + indentation) + "\n" + prefix + "]" : ("[" + results.join(",") + "]")) : "[]"; } else { // Recursively serialize object members. Members are selected from // either a user-specified list of property names, or the object // itself. forEach(properties || value, function (property) { var element = serialize(property, value, callback, properties, whitespace, indentation, stack); if (element !== undef) { // According to ES 5.1 section 15.12.3: "If `gap` {whitespace} // is not the empty string, let `member` {quote(property) + ":"} // be the concatenation of `member` and the `space` character." // The "`space` character" refers to the literal space // character, not the `space` {width} argument provided to // `JSON.stringify`. results.push(quote(property) + ":" + (whitespace ? " " : "") + element); } }); result = results.length ? (whitespace ? "{\n" + indentation + results.join(",\n" + indentation) + "\n" + prefix + "}" : ("{" + results.join(",") + "}")) : "{}"; } // Remove the object from the traversed object stack. stack.pop(); return result; } }; // Public: `JSON.stringify`. See ES 5.1 section 15.12.3. JSON3.stringify = function (source, filter, width) { var whitespace, callback, properties, className; if (typeof filter == "function" || typeof filter == "object" && filter) { if ((className = getClass.call(filter)) == functionClass) { callback = filter; } else if (className == arrayClass) { // Convert the property names array into a makeshift set. properties = {}; for (var index = 0, length = filter.length, value; index < length; value = filter[index++], ((className = getClass.call(value)), className == stringClass || className == numberClass) && (properties[value] = 1)); } } if (width) { if ((className = getClass.call(width)) == numberClass) { // Convert the `width` to an integer and create a string containing // `width` number of space characters. if ((width -= width % 1) > 0) { for (whitespace = "", width > 10 && (width = 10); whitespace.length < width; whitespace += " "); } } else if (className == stringClass) { whitespace = width.length <= 10 ? width : width.slice(0, 10); } } // Opera <= 7.54u2 discards the values associated with empty string keys // (`""`) only if they are used directly within an object member list // (e.g., `!("" in { "": 1})`). return serialize("", (value = {}, value[""] = source, value), callback, properties, whitespace, "", []); }; } // Public: Parses a JSON source string. if (!has("json-parse")) { var fromCharCode = String.fromCharCode; // Internal: A map of escaped control characters and their unescaped // equivalents. var Unescapes = { 92: "\\", 34: '"', 47: "/", 98: "\b", 116: "\t", 110: "\n", 102: "\f", 114: "\r" }; // Internal: Stores the parser state. var Index, Source; // Internal: Resets the parser state and throws a `SyntaxError`. var abort = function() { Index = Source = null; throw SyntaxError(); }; // Internal: Returns the next token, or `"$"` if the parser has reached // the end of the source string. A token may be a string, number, `null` // literal, or Boolean literal. var lex = function () { var source = Source, length = source.length, value, begin, position, isSigned, charCode; while (Index < length) { charCode = source.charCodeAt(Index); switch (charCode) { case 9: case 10: case 13: case 32: // Skip whitespace tokens, including tabs, carriage returns, line // feeds, and space characters. Index++; break; case 123: case 125: case 91: case 93: case 58: case 44: // Parse a punctuator token (`{`, `}`, `[`, `]`, `:`, or `,`) at // the current position. value = charIndexBuggy ? source.charAt(Index) : source[Index]; Index++; return value; case 34: // `"` delimits a JSON string; advance to the next character and // begin parsing the string. String tokens are prefixed with the // sentinel `@` character to distinguish them from punctuators and // end-of-string tokens. for (value = "@", Index++; Index < length;) { charCode = source.charCodeAt(Index); if (charCode < 32) { // Unescaped ASCII control characters (those with a code unit // less than the space character) are not permitted. abort(); } else if (charCode == 92) { // A reverse solidus (`\`) marks the beginning of an escaped // control character (including `"`, `\`, and `/`) or Unicode // escape sequence. charCode = source.charCodeAt(++Index); switch (charCode) { case 92: case 34: case 47: case 98: case 116: case 110: case 102: case 114: // Revive escaped control characters. value += Unescapes[charCode]; Index++; break; case 117: // `\u` marks the beginning of a Unicode escape sequence. // Advance to the first character and validate the // four-digit code point. begin = ++Index; for (position = Index + 4; Index < position; Index++) { charCode = source.charCodeAt(Index); // A valid sequence comprises four hexdigits (case- // insensitive) that form a single hexadecimal value. if (!(charCode >= 48 && charCode <= 57 || charCode >= 97 && charCode <= 102 || charCode >= 65 && charCode <= 70)) { // Invalid Unicode escape sequence. abort(); } } // Revive the escaped character. value += fromCharCode("0x" + source.slice(begin, Index)); break; default: // Invalid escape sequence. abort(); } } else { if (charCode == 34) { // An unescaped double-quote character marks the end of the // string. break; } charCode = source.charCodeAt(Index); begin = Index; // Optimize for the common case where a string is valid. while (charCode >= 32 && charCode != 92 && charCode != 34) { charCode = source.charCodeAt(++Index); } // Append the string as-is. value += source.slice(begin, Index); } } if (source.charCodeAt(Index) == 34) { // Advance to the next character and return the revived string. Index++; return value; } // Unterminated string. abort(); default: // Parse numbers and literals. begin = Index; // Advance past the negative sign, if one is specified. if (charCode == 45) { isSigned = true; charCode = source.charCodeAt(++Index); } // Parse an integer or floating-point value. if (charCode >= 48 && charCode <= 57) { // Leading zeroes are interpreted as octal literals. if (charCode == 48 && ((charCode = source.charCodeAt(Index + 1)), charCode >= 48 && charCode <= 57)) { // Illegal octal literal. abort(); } isSigned = false; // Parse the integer component. for (; Index < length && ((charCode = source.charCodeAt(Index)), charCode >= 48 && charCode <= 57); Index++); // Floats cannot contain a leading decimal point; however, this // case is already accounted for by the parser. if (source.charCodeAt(Index) == 46) { position = ++Index; // Parse the decimal component. for (; position < length && ((charCode = source.charCodeAt(position)), charCode >= 48 && charCode <= 57); position++); if (position == Index) { // Illegal trailing decimal. abort(); } Index = position; } // Parse exponents. The `e` denoting the exponent is // case-insensitive. charCode = source.charCodeAt(Index); if (charCode == 101 || charCode == 69) { charCode = source.charCodeAt(++Index); // Skip past the sign following the exponent, if one is // specified. if (charCode == 43 || charCode == 45) { Index++; } // Parse the exponential component. for (position = Index; position < length && ((charCode = source.charCodeAt(position)), charCode >= 48 && charCode <= 57); position++); if (position == Index) { // Illegal empty exponent. abort(); } Index = position; } // Coerce the parsed value to a JavaScript number. return +source.slice(begin, Index); } // A negative sign may only precede numbers. if (isSigned) { abort(); } // `true`, `false`, and `null` literals. if (source.slice(Index, Index + 4) == "true") { Index += 4; return true; } else if (source.slice(Index, Index + 5) == "false") { Index += 5; return false; } else if (source.slice(Index, Index + 4) == "null") { Index += 4; return null; } // Unrecognized token. abort(); } } // Return the sentinel `$` character if the parser has reached the end // of the source string. return "$"; }; // Internal: Parses a JSON `value` token. var get = function (value) { var results, hasMembers; if (value == "$") { // Unexpected end of input. abort(); } if (typeof value == "string") { if ((charIndexBuggy ? value.charAt(0) : value[0]) == "@") { // Remove the sentinel `@` character. return value.slice(1); } // Parse object and array literals. if (value == "[") { // Parses a JSON array, returning a new JavaScript array. results = []; for (;; hasMembers || (hasMembers = true)) { value = lex(); // A closing square bracket marks the end of the array literal. if (value == "]") { break; } // If the array literal contains elements, the current token // should be a comma separating the previous element from the // next. if (hasMembers) { if (value == ",") { value = lex(); if (value == "]") { // Unexpected trailing `,` in array literal. abort(); } } else { // A `,` must separate each array element. abort(); } } // Elisions and leading commas are not permitted. if (value == ",") { abort(); } results.push(get(value)); } return results; } else if (value == "{") { // Parses a JSON object, returning a new JavaScript object. results = {}; for (;; hasMembers || (hasMembers = true)) { value = lex(); // A closing curly brace marks the end of the object literal. if (value == "}") { break; } // If the object literal contains members, the current token // should be a comma separator. if (hasMembers) { if (value == ",") { value = lex(); if (value == "}") { // Unexpected trailing `,` in object literal. abort(); } } else { // A `,` must separate each object member. abort(); } } // Leading commas are not permitted, object property names must be // double-quoted strings, and a `:` must separate each property // name and value. if (value == "," || typeof value != "string" || (charIndexBuggy ? value.charAt(0) : value[0]) != "@" || lex() != ":") { abort(); } results[value.slice(1)] = get(lex()); } return results; } // Unexpected token encountered. abort(); } return value; }; // Internal: Updates a traversed object member. var update = function(source, property, callback) { var element = walk(source, property, callback); if (element === undef) { delete source[property]; } else { source[property] = element; } }; // Internal: Recursively traverses a parsed JSON object, invoking the // `callback` function for each value. This is an implementation of the // `Walk(holder, name)` operation defined in ES 5.1 section 15.12.2. var walk = function (source, property, callback) { var value = source[property], length; if (typeof value == "object" && value) { // `forEach` can't be used to traverse an array in Opera <= 8.54 // because its `Object#hasOwnProperty` implementation returns `false` // for array indices (e.g., `![1, 2, 3].hasOwnProperty("0")`). if (getClass.call(value) == arrayClass) { for (length = value.length; length--;) { update(value, length, callback); } } else { forEach(value, function (property) { update(value, property, callback); }); } } return callback.call(source, property, value); }; // Public: `JSON.parse`. See ES 5.1 section 15.12.2. JSON3.parse = function (source, callback) { var result, value; Index = 0; Source = "" + source; result = get(lex()); // If a JSON string contains multiple tokens, it is invalid. if (lex() != "$") { abort(); } // Reset the parser state. Index = Source = null; return callback && getClass.call(callback) == functionClass ? walk((value = {}, value[""] = result, value), "", callback) : result; }; } } // Export for asynchronous module loaders. if (isLoader) { define(function () { return JSON3; }); } }(this)); },{}],36:[function(require,module,exports){ /* Leaflet, a JavaScript library for mobile-friendly interactive maps. http://leafletjs.com (c) 2010-2013, Vladimir Agafonkin (c) 2010-2011, CloudMade */ (function (window, document, undefined) { var oldL = window.L, L = {}; L.version = '0.6.2'; // define Leaflet for Node module pattern loaders, including Browserify if (typeof module === 'object' && typeof module.exports === 'object') { module.exports = L; // define Leaflet as an AMD module } else if (typeof define === 'function' && define.amd) { define(L); } // define Leaflet as a global L variable, saving the original L to restore later if needed L.noConflict = function () { window.L = oldL; return this; }; window.L = L; /* * L.Util contains various utility functions used throughout Leaflet code. */ L.Util = { extend: function (dest) { // (Object[, Object, ...]) -> var sources = Array.prototype.slice.call(arguments, 1), i, j, len, src; for (j = 0, len = sources.length; j < len; j++) { src = sources[j] || {}; for (i in src) { if (src.hasOwnProperty(i)) { dest[i] = src[i]; } } } return dest; }, bind: function (fn, obj) { // (Function, Object) -> Function var args = arguments.length > 2 ? Array.prototype.slice.call(arguments, 2) : null; return function () { return fn.apply(obj, args || arguments); }; }, stamp: (function () { var lastId = 0, key = '_leaflet_id'; return function (obj) { obj[key] = obj[key] || ++lastId; return obj[key]; }; }()), invokeEach: function (obj, method, context) { var i, args; if (typeof obj === 'object') { args = Array.prototype.slice.call(arguments, 3); for (i in obj) { method.apply(context, [i, obj[i]].concat(args)); } return true; } return false; }, limitExecByInterval: function (fn, time, context) { var lock, execOnUnlock; return function wrapperFn() { var args = arguments; if (lock) { execOnUnlock = true; return; } lock = true; setTimeout(function () { lock = false; if (execOnUnlock) { wrapperFn.apply(context, args); execOnUnlock = false; } }, time); fn.apply(context, args); }; }, falseFn: function () { return false; }, formatNum: function (num, digits) { var pow = Math.pow(10, digits || 5); return Math.round(num * pow) / pow; }, trim: function (str) { return str.trim ? str.trim() : str.replace(/^\s+|\s+$/g, ''); }, splitWords: function (str) { return L.Util.trim(str).split(/\s+/); }, setOptions: function (obj, options) { obj.options = L.extend({}, obj.options, options); return obj.options; }, getParamString: function (obj, existingUrl, uppercase) { var params = []; for (var i in obj) { params.push(encodeURIComponent(uppercase ? i.toUpperCase() : i) + '=' + encodeURIComponent(obj[i])); } return ((!existingUrl || existingUrl.indexOf('?') === -1) ? '?' : '&') + params.join('&'); }, template: function (str, data) { return str.replace(/\{ *([\w_]+) *\}/g, function (str, key) { var value = data[key]; if (value === undefined) { throw new Error('No value provided for variable ' + str); } else if (typeof value === 'function') { value = value(data); } return value; }); }, isArray: function (obj) { return (Object.prototype.toString.call(obj) === '[object Array]'); }, emptyImageUrl: 'data:image/gif;base64,R0lGODlhAQABAAD/ACwAAAAAAQABAAACADs=' }; (function () { // inspired by http://paulirish.com/2011/requestanimationframe-for-smart-animating/ function getPrefixed(name) { var i, fn, prefixes = ['webkit', 'moz', 'o', 'ms']; for (i = 0; i < prefixes.length && !fn; i++) { fn = window[prefixes[i] + name]; } return fn; } var lastTime = 0; function timeoutDefer(fn) { var time = +new Date(), timeToCall = Math.max(0, 16 - (time - lastTime)); lastTime = time + timeToCall; return window.setTimeout(fn, timeToCall); } var requestFn = window.requestAnimationFrame || getPrefixed('RequestAnimationFrame') || timeoutDefer; var cancelFn = window.cancelAnimationFrame || getPrefixed('CancelAnimationFrame') || getPrefixed('CancelRequestAnimationFrame') || function (id) { window.clearTimeout(id); }; L.Util.requestAnimFrame = function (fn, context, immediate, element) { fn = L.bind(fn, context); if (immediate && requestFn === timeoutDefer) { fn(); } else { return requestFn.call(window, fn, element); } }; L.Util.cancelAnimFrame = function (id) { if (id) { cancelFn.call(window, id); } }; }()); // shortcuts for most used utility functions L.extend = L.Util.extend; L.bind = L.Util.bind; L.stamp = L.Util.stamp; L.setOptions = L.Util.setOptions; /* * L.Class powers the OOP facilities of the library. * Thanks to John Resig and Dean Edwards for inspiration! */ L.Class = function () {}; L.Class.extend = function (props) { // extended class with the new prototype var NewClass = function () { // call the constructor if (this.initialize) { this.initialize.apply(this, arguments); } // call all constructor hooks if (this._initHooks) { this.callInitHooks(); } }; // instantiate class without calling constructor var F = function () {}; F.prototype = this.prototype; var proto = new F(); proto.constructor = NewClass; NewClass.prototype = proto; //inherit parent's statics for (var i in this) { if (this.hasOwnProperty(i) && i !== 'prototype') { NewClass[i] = this[i]; } } // mix static properties into the class if (props.statics) { L.extend(NewClass, props.statics); delete props.statics; } // mix includes into the prototype if (props.includes) { L.Util.extend.apply(null, [proto].concat(props.includes)); delete props.includes; } // merge options if (props.options && proto.options) { props.options = L.extend({}, proto.options, props.options); } // mix given properties into the prototype L.extend(proto, props); proto._initHooks = []; var parent = this; // jshint camelcase: false NewClass.__super__ = parent.prototype; // add method for calling all hooks proto.callInitHooks = function () { if (this._initHooksCalled) { return; } if (parent.prototype.callInitHooks) { parent.prototype.callInitHooks.call(this); } this._initHooksCalled = true; for (var i = 0, len = proto._initHooks.length; i < len; i++) { proto._initHooks[i].call(this); } }; return NewClass; }; // method for adding properties to prototype L.Class.include = function (props) { L.extend(this.prototype, props); }; // merge new default options to the Class L.Class.mergeOptions = function (options) { L.extend(this.prototype.options, options); }; // add a constructor hook L.Class.addInitHook = function (fn) { // (Function) || (String, args...) var args = Array.prototype.slice.call(arguments, 1); var init = typeof fn === 'function' ? fn : function () { this[fn].apply(this, args); }; this.prototype._initHooks = this.prototype._initHooks || []; this.prototype._initHooks.push(init); }; /* * L.Mixin.Events is used to add custom events functionality to Leaflet classes. */ var eventsKey = '_leaflet_events'; L.Mixin = {}; L.Mixin.Events = { addEventListener: function (types, fn, context) { // (String, Function[, Object]) or (Object[, Object]) // types can be a map of types/handlers if (L.Util.invokeEach(types, this.addEventListener, this, fn, context)) { return this; } var events = this[eventsKey] = this[eventsKey] || {}, contextId = context && L.stamp(context), i, len, event, type, indexKey, indexLenKey, typeIndex; // types can be a string of space-separated words types = L.Util.splitWords(types); for (i = 0, len = types.length; i < len; i++) { event = { action: fn, context: context || this }; type = types[i]; if (context) { // store listeners of a particular context in a separate hash (if it has an id) // gives a major performance boost when removing thousands of map layers indexKey = type + '_idx'; indexLenKey = indexKey + '_len'; typeIndex = events[indexKey] = events[indexKey] || {}; if (!typeIndex[contextId]) { typeIndex[contextId] = []; // keep track of the number of keys in the index to quickly check if it's empty events[indexLenKey] = (events[indexLenKey] || 0) + 1; } typeIndex[contextId].push(event); } else { events[type] = events[type] || []; events[type].push(event); } } return this; }, hasEventListeners: function (type) { // (String) -> Boolean var events = this[eventsKey]; return !!events && ((type in events && events[type].length > 0) || (type + '_idx' in events && events[type + '_idx_len'] > 0)); }, removeEventListener: function (types, fn, context) { // ([String, Function, Object]) or (Object[, Object]) if (!this[eventsKey]) { return this; } if (!types) { return this.clearAllEventListeners(); } if (L.Util.invokeEach(types, this.removeEventListener, this, fn, context)) { return this; } var events = this[eventsKey], contextId = context && L.stamp(context), i, len, type, listeners, j, indexKey, indexLenKey, typeIndex, removed; types = L.Util.splitWords(types); for (i = 0, len = types.length; i < len; i++) { type = types[i]; indexKey = type + '_idx'; indexLenKey = indexKey + '_len'; typeIndex = events[indexKey]; if (!fn) { // clear all listeners for a type if function isn't specified delete events[type]; delete events[indexKey]; } else { listeners = context && typeIndex ? typeIndex[contextId] : events[type]; if (listeners) { for (j = listeners.length - 1; j >= 0; j--) { if ((listeners[j].action === fn) && (!context || (listeners[j].context === context))) { removed = listeners.splice(j, 1); // set the old action to a no-op, because it is possible // that the listener is being iterated over as part of a dispatch removed[0].action = L.Util.falseFn; } } if (context && typeIndex && (listeners.length === 0)) { delete typeIndex[contextId]; events[indexLenKey]--; } } } } return this; }, clearAllEventListeners: function () { delete this[eventsKey]; return this; }, fireEvent: function (type, data) { // (String[, Object]) if (!this.hasEventListeners(type)) { return this; } var event = L.Util.extend({}, data, { type: type, target: this }); var events = this[eventsKey], listeners, i, len, typeIndex, contextId; if (events[type]) { // make sure adding/removing listeners inside other listeners won't cause infinite loop listeners = events[type].slice(); for (i = 0, len = listeners.length; i < len; i++) { listeners[i].action.call(listeners[i].context || this, event); } } // fire event for the context-indexed listeners as well typeIndex = events[type + '_idx']; for (contextId in typeIndex) { listeners = typeIndex[contextId].slice(); if (listeners) { for (i = 0, len = listeners.length; i < len; i++) { listeners[i].action.call(listeners[i].context || this, event); } } } return this; }, addOneTimeEventListener: function (types, fn, context) { if (L.Util.invokeEach(types, this.addOneTimeEventListener, this, fn, context)) { return this; } var handler = L.bind(function () { this .removeEventListener(types, fn, context) .removeEventListener(types, handler, context); }, this); return this .addEventListener(types, fn, context) .addEventListener(types, handler, context); } }; L.Mixin.Events.on = L.Mixin.Events.addEventListener; L.Mixin.Events.off = L.Mixin.Events.removeEventListener; L.Mixin.Events.once = L.Mixin.Events.addOneTimeEventListener; L.Mixin.Events.fire = L.Mixin.Events.fireEvent; /* * L.Browser handles different browser and feature detections for internal Leaflet use. */ (function () { var ie = !!window.ActiveXObject, ie6 = ie && !window.XMLHttpRequest, ie7 = ie && !document.querySelector, ielt9 = ie && !document.addEventListener, // terrible browser detection to work around Safari / iOS / Android browser bugs ua = navigator.userAgent.toLowerCase(), webkit = ua.indexOf('webkit') !== -1, chrome = ua.indexOf('chrome') !== -1, phantomjs = ua.indexOf('phantom') !== -1, android = ua.indexOf('android') !== -1, android23 = ua.search('android [23]') !== -1, mobile = typeof orientation !== undefined + '', msTouch = window.navigator && window.navigator.msPointerEnabled && window.navigator.msMaxTouchPoints, retina = ('devicePixelRatio' in window && window.devicePixelRatio > 1) || ('matchMedia' in window && window.matchMedia('(min-resolution:144dpi)') && window.matchMedia('(min-resolution:144dpi)').matches), doc = document.documentElement, ie3d = ie && ('transition' in doc.style), webkit3d = ('WebKitCSSMatrix' in window) && ('m11' in new window.WebKitCSSMatrix()), gecko3d = 'MozPerspective' in doc.style, opera3d = 'OTransition' in doc.style, any3d = !window.L_DISABLE_3D && (ie3d || webkit3d || gecko3d || opera3d) && !phantomjs; // PhantomJS has 'ontouchstart' in document.documentElement, but doesn't actually support touch. // https://github.com/Leaflet/Leaflet/pull/1434#issuecomment-13843151 var touch = !window.L_NO_TOUCH && !phantomjs && (function () { var startName = 'ontouchstart'; // IE10+ (We simulate these into touch* events in L.DomEvent and L.DomEvent.MsTouch) or WebKit, etc. if (msTouch || (startName in doc)) { return true; } // Firefox/Gecko var div = document.createElement('div'), supported = false; if (!div.setAttribute) { return false; } div.setAttribute(startName, 'return;'); if (typeof div[startName] === 'function') { supported = true; } div.removeAttribute(startName); div = null; return supported; }()); L.Browser = { ie: ie, ie6: ie6, ie7: ie7, ielt9: ielt9, webkit: webkit, android: android, android23: android23, chrome: chrome, ie3d: ie3d, webkit3d: webkit3d, gecko3d: gecko3d, opera3d: opera3d, any3d: any3d, mobile: mobile, mobileWebkit: mobile && webkit, mobileWebkit3d: mobile && webkit3d, mobileOpera: mobile && window.opera, touch: touch, msTouch: msTouch, retina: retina }; }()); /* * L.Point represents a point with x and y coordinates. */ L.Point = function (/*Number*/ x, /*Number*/ y, /*Boolean*/ round) { this.x = (round ? Math.round(x) : x); this.y = (round ? Math.round(y) : y); }; L.Point.prototype = { clone: function () { return new L.Point(this.x, this.y); }, // non-destructive, returns a new point add: function (point) { return this.clone()._add(L.point(point)); }, // destructive, used directly for performance in situations where it's safe to modify existing point _add: function (point) { this.x += point.x; this.y += point.y; return this; }, subtract: function (point) { return this.clone()._subtract(L.point(point)); }, _subtract: function (point) { this.x -= point.x; this.y -= point.y; return this; }, divideBy: function (num) { return this.clone()._divideBy(num); }, _divideBy: function (num) { this.x /= num; this.y /= num; return this; }, multiplyBy: function (num) { return this.clone()._multiplyBy(num); }, _multiplyBy: function (num) { this.x *= num; this.y *= num; return this; }, round: function () { return this.clone()._round(); }, _round: function () { this.x = Math.round(this.x); this.y = Math.round(this.y); return this; }, floor: function () { return this.clone()._floor(); }, _floor: function () { this.x = Math.floor(this.x); this.y = Math.floor(this.y); return this; }, distanceTo: function (point) { point = L.point(point); var x = point.x - this.x, y = point.y - this.y; return Math.sqrt(x * x + y * y); }, equals: function (point) { point = L.point(point); return point.x === this.x && point.y === this.y; }, contains: function (point) { point = L.point(point); return Math.abs(point.x) <= Math.abs(this.x) && Math.abs(point.y) <= Math.abs(this.y); }, toString: function () { return 'Point(' + L.Util.formatNum(this.x) + ', ' + L.Util.formatNum(this.y) + ')'; } }; L.point = function (x, y, round) { if (x instanceof L.Point) { return x; } if (L.Util.isArray(x)) { return new L.Point(x[0], x[1]); } if (x === undefined || x === null) { return x; } return new L.Point(x, y, round); }; /* * L.Bounds represents a rectangular area on the screen in pixel coordinates. */ L.Bounds = function (a, b) { //(Point, Point) or Point[] if (!a) { return; } var points = b ? [a, b] : a; for (var i = 0, len = points.length; i < len; i++) { this.extend(points[i]); } }; L.Bounds.prototype = { // extend the bounds to contain the given point extend: function (point) { // (Point) point = L.point(point); if (!this.min && !this.max) { this.min = point.clone(); this.max = point.clone(); } else { this.min.x = Math.min(point.x, this.min.x); this.max.x = Math.max(point.x, this.max.x); this.min.y = Math.min(point.y, this.min.y); this.max.y = Math.max(point.y, this.max.y); } return this; }, getCenter: function (round) { // (Boolean) -> Point return new L.Point( (this.min.x + this.max.x) / 2, (this.min.y + this.max.y) / 2, round); }, getBottomLeft: function () { // -> Point return new L.Point(this.min.x, this.max.y); }, getTopRight: function () { // -> Point return new L.Point(this.max.x, this.min.y); }, getSize: function () { return this.max.subtract(this.min); }, contains: function (obj) { // (Bounds) or (Point) -> Boolean var min, max; if (typeof obj[0] === 'number' || obj instanceof L.Point) { obj = L.point(obj); } else { obj = L.bounds(obj); } if (obj instanceof L.Bounds) { min = obj.min; max = obj.max; } else { min = max = obj; } return (min.x >= this.min.x) && (max.x <= this.max.x) && (min.y >= this.min.y) && (max.y <= this.max.y); }, intersects: function (bounds) { // (Bounds) -> Boolean bounds = L.bounds(bounds); var min = this.min, max = this.max, min2 = bounds.min, max2 = bounds.max, xIntersects = (max2.x >= min.x) && (min2.x <= max.x), yIntersects = (max2.y >= min.y) && (min2.y <= max.y); return xIntersects && yIntersects; }, isValid: function () { return !!(this.min && this.max); } }; L.bounds = function (a, b) { // (Bounds) or (Point, Point) or (Point[]) if (!a || a instanceof L.Bounds) { return a; } return new L.Bounds(a, b); }; /* * L.Transformation is an utility class to perform simple point transformations through a 2d-matrix. */ L.Transformation = function (a, b, c, d) { this._a = a; this._b = b; this._c = c; this._d = d; }; L.Transformation.prototype = { transform: function (point, scale) { // (Point, Number) -> Point return this._transform(point.clone(), scale); }, // destructive transform (faster) _transform: function (point, scale) { scale = scale || 1; point.x = scale * (this._a * point.x + this._b); point.y = scale * (this._c * point.y + this._d); return point; }, untransform: function (point, scale) { scale = scale || 1; return new L.Point( (point.x / scale - this._b) / this._a, (point.y / scale - this._d) / this._c); } }; /* * L.DomUtil contains various utility functions for working with DOM. */ L.DomUtil = { get: function (id) { return (typeof id === 'string' ? document.getElementById(id) : id); }, getStyle: function (el, style) { var value = el.style[style]; if (!value && el.currentStyle) { value = el.currentStyle[style]; } if ((!value || value === 'auto') && document.defaultView) { var css = document.defaultView.getComputedStyle(el, null); value = css ? css[style] : null; } return value === 'auto' ? null : value; }, getViewportOffset: function (element) { var top = 0, left = 0, el = element, docBody = document.body, docEl = document.documentElement, pos, ie7 = L.Browser.ie7; do { top += el.offsetTop || 0; left += el.offsetLeft || 0; //add borders top += parseInt(L.DomUtil.getStyle(el, 'borderTopWidth'), 10) || 0; left += parseInt(L.DomUtil.getStyle(el, 'borderLeftWidth'), 10) || 0; pos = L.DomUtil.getStyle(el, 'position'); if (el.offsetParent === docBody && pos === 'absolute') { break; } if (pos === 'fixed') { top += docBody.scrollTop || docEl.scrollTop || 0; left += docBody.scrollLeft || docEl.scrollLeft || 0; break; } if (pos === 'relative' && !el.offsetLeft) { var width = L.DomUtil.getStyle(el, 'width'), maxWidth = L.DomUtil.getStyle(el, 'max-width'), r = el.getBoundingClientRect(); if (width !== 'none' || maxWidth !== 'none') { left += r.left + el.clientLeft; } //calculate full y offset since we're breaking out of the loop top += r.top + (docBody.scrollTop || docEl.scrollTop || 0); break; } el = el.offsetParent; } while (el); el = element; do { if (el === docBody) { break; } top -= el.scrollTop || 0; left -= el.scrollLeft || 0; // webkit (and ie <= 7) handles RTL scrollLeft different to everyone else // https://code.google.com/p/closure-library/source/browse/trunk/closure/goog/style/bidi.js if (!L.DomUtil.documentIsLtr() && (L.Browser.webkit || ie7)) { left += el.scrollWidth - el.clientWidth; // ie7 shows the scrollbar by default and provides clientWidth counting it, so we // need to add it back in if it is visible; scrollbar is on the left as we are RTL if (ie7 && L.DomUtil.getStyle(el, 'overflow-y') !== 'hidden' && L.DomUtil.getStyle(el, 'overflow') !== 'hidden') { left += 17; } } el = el.parentNode; } while (el); return new L.Point(left, top); }, documentIsLtr: function () { if (!L.DomUtil._docIsLtrCached) { L.DomUtil._docIsLtrCached = true; L.DomUtil._docIsLtr = L.DomUtil.getStyle(document.body, 'direction') === 'ltr'; } return L.DomUtil._docIsLtr; }, create: function (tagName, className, container) { var el = document.createElement(tagName); el.className = className; if (container) { container.appendChild(el); } return el; }, hasClass: function (el, name) { return (el.className.length > 0) && new RegExp('(^|\\s)' + name + '(\\s|$)').test(el.className); }, addClass: function (el, name) { if (!L.DomUtil.hasClass(el, name)) { el.className += (el.className ? ' ' : '') + name; } }, removeClass: function (el, name) { el.className = L.Util.trim((' ' + el.className + ' ').replace(' ' + name + ' ', ' ')); }, setOpacity: function (el, value) { if ('opacity' in el.style) { el.style.opacity = value; } else if ('filter' in el.style) { var filter = false, filterName = 'DXImageTransform.Microsoft.Alpha'; // filters collection throws an error if we try to retrieve a filter that doesn't exist try { filter = el.filters.item(filterName); } catch (e) { // don't set opacity to 1 if we haven't already set an opacity, // it isn't needed and breaks transparent pngs. if (value === 1) { return; } } value = Math.round(value * 100); if (filter) { filter.Enabled = (value !== 100); filter.Opacity = value; } else { el.style.filter += ' progid:' + filterName + '(opacity=' + value + ')'; } } }, testProp: function (props) { var style = document.documentElement.style; for (var i = 0; i < props.length; i++) { if (props[i] in style) { return props[i]; } } return false; }, getTranslateString: function (point) { // on WebKit browsers (Chrome/Safari/iOS Safari/Android) using translate3d instead of translate // makes animation smoother as it ensures HW accel is used. Firefox 13 doesn't care // (same speed either way), Opera 12 doesn't support translate3d var is3d = L.Browser.webkit3d, open = 'translate' + (is3d ? '3d' : '') + '(', close = (is3d ? ',0' : '') + ')'; return open + point.x + 'px,' + point.y + 'px' + close; }, getScaleString: function (scale, origin) { var preTranslateStr = L.DomUtil.getTranslateString(origin.add(origin.multiplyBy(-1 * scale))), scaleStr = ' scale(' + scale + ') '; return preTranslateStr + scaleStr; }, setPosition: function (el, point, disable3D) { // (HTMLElement, Point[, Boolean]) // jshint camelcase: false el._leaflet_pos = point; if (!disable3D && L.Browser.any3d) { el.style[L.DomUtil.TRANSFORM] = L.DomUtil.getTranslateString(point); // workaround for Android 2/3 stability (https://github.com/CloudMade/Leaflet/issues/69) if (L.Browser.mobileWebkit3d) { el.style.WebkitBackfaceVisibility = 'hidden'; } } else { el.style.left = point.x + 'px'; el.style.top = point.y + 'px'; } }, getPosition: function (el) { // this method is only used for elements previously positioned using setPosition, // so it's safe to cache the position for performance // jshint camelcase: false return el._leaflet_pos; } }; // prefix style property names L.DomUtil.TRANSFORM = L.DomUtil.testProp( ['transform', 'WebkitTransform', 'OTransform', 'MozTransform', 'msTransform']); // webkitTransition comes first because some browser versions that drop vendor prefix don't do // the same for the transitionend event, in particular the Android 4.1 stock browser L.DomUtil.TRANSITION = L.DomUtil.testProp( ['webkitTransition', 'transition', 'OTransition', 'MozTransition', 'msTransition']); L.DomUtil.TRANSITION_END = L.DomUtil.TRANSITION === 'webkitTransition' || L.DomUtil.TRANSITION === 'OTransition' ? L.DomUtil.TRANSITION + 'End' : 'transitionend'; (function () { var userSelectProperty = L.DomUtil.testProp( ['userSelect', 'WebkitUserSelect', 'OUserSelect', 'MozUserSelect', 'msUserSelect']); var userDragProperty = L.DomUtil.testProp( ['userDrag', 'WebkitUserDrag', 'OUserDrag', 'MozUserDrag', 'msUserDrag']); L.extend(L.DomUtil, { disableTextSelection: function () { if (userSelectProperty) { var style = document.documentElement.style; this._userSelect = style[userSelectProperty]; style[userSelectProperty] = 'none'; } else { L.DomEvent.on(window, 'selectstart', L.DomEvent.stop); } }, enableTextSelection: function () { if (userSelectProperty) { document.documentElement.style[userSelectProperty] = this._userSelect; delete this._userSelect; } else { L.DomEvent.off(window, 'selectstart', L.DomEvent.stop); } }, disableImageDrag: function () { if (userDragProperty) { var style = document.documentElement.style; this._userDrag = style[userDragProperty]; style[userDragProperty] = 'none'; } else { L.DomEvent.on(window, 'dragstart', L.DomEvent.stop); } }, enableImageDrag: function () { if (userDragProperty) { document.documentElement.style[userDragProperty] = this._userDrag; delete this._userDrag; } else { L.DomEvent.off(window, 'dragstart', L.DomEvent.stop); } } }); })(); /* * L.LatLng represents a geographical point with latitude and longitude coordinates. */ L.LatLng = function (rawLat, rawLng) { // (Number, Number) var lat = parseFloat(rawLat), lng = parseFloat(rawLng); if (isNaN(lat) || isNaN(lng)) { throw new Error('Invalid LatLng object: (' + rawLat + ', ' + rawLng + ')'); } this.lat = lat; this.lng = lng; }; L.extend(L.LatLng, { DEG_TO_RAD: Math.PI / 180, RAD_TO_DEG: 180 / Math.PI, MAX_MARGIN: 1.0E-9 // max margin of error for the "equals" check }); L.LatLng.prototype = { equals: function (obj) { // (LatLng) -> Boolean if (!obj) { return false; } obj = L.latLng(obj); var margin = Math.max( Math.abs(this.lat - obj.lat), Math.abs(this.lng - obj.lng)); return margin <= L.LatLng.MAX_MARGIN; }, toString: function (precision) { // (Number) -> String return 'LatLng(' + L.Util.formatNum(this.lat, precision) + ', ' + L.Util.formatNum(this.lng, precision) + ')'; }, // Haversine distance formula, see http://en.wikipedia.org/wiki/Haversine_formula // TODO move to projection code, LatLng shouldn't know about Earth distanceTo: function (other) { // (LatLng) -> Number other = L.latLng(other); var R = 6378137, // earth radius in meters d2r = L.LatLng.DEG_TO_RAD, dLat = (other.lat - this.lat) * d2r, dLon = (other.lng - this.lng) * d2r, lat1 = this.lat * d2r, lat2 = other.lat * d2r, sin1 = Math.sin(dLat / 2), sin2 = Math.sin(dLon / 2); var a = sin1 * sin1 + sin2 * sin2 * Math.cos(lat1) * Math.cos(lat2); return R * 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a)); }, wrap: function (a, b) { // (Number, Number) -> LatLng var lng = this.lng; a = a || -180; b = b || 180; lng = (lng + b) % (b - a) + (lng < a || lng === b ? b : a); return new L.LatLng(this.lat, lng); } }; L.latLng = function (a, b) { // (LatLng) or ([Number, Number]) or (Number, Number) if (a instanceof L.LatLng) { return a; } if (L.Util.isArray(a)) { return new L.LatLng(a[0], a[1]); } if (a === undefined || a === null) { return a; } if (typeof a === 'object' && 'lat' in a) { return new L.LatLng(a.lat, 'lng' in a ? a.lng : a.lon); } return new L.LatLng(a, b); }; /* * L.LatLngBounds represents a rectangular area on the map in geographical coordinates. */ L.LatLngBounds = function (southWest, northEast) { // (LatLng, LatLng) or (LatLng[]) if (!southWest) { return; } var latlngs = northEast ? [southWest, northEast] : southWest; for (var i = 0, len = latlngs.length; i < len; i++) { this.extend(latlngs[i]); } }; L.LatLngBounds.prototype = { // extend the bounds to contain the given point or bounds extend: function (obj) { // (LatLng) or (LatLngBounds) if (!obj) { return this; } if (typeof obj[0] === 'number' || typeof obj[0] === 'string' || obj instanceof L.LatLng) { obj = L.latLng(obj); } else { obj = L.latLngBounds(obj); } if (obj instanceof L.LatLng) { if (!this._southWest && !this._northEast) { this._southWest = new L.LatLng(obj.lat, obj.lng); this._northEast = new L.LatLng(obj.lat, obj.lng); } else { this._southWest.lat = Math.min(obj.lat, this._southWest.lat); this._southWest.lng = Math.min(obj.lng, this._southWest.lng); this._northEast.lat = Math.max(obj.lat, this._northEast.lat); this._northEast.lng = Math.max(obj.lng, this._northEast.lng); } } else if (obj instanceof L.LatLngBounds) { this.extend(obj._southWest); this.extend(obj._northEast); } return this; }, // extend the bounds by a percentage pad: function (bufferRatio) { // (Number) -> LatLngBounds var sw = this._southWest, ne = this._northEast, heightBuffer = Math.abs(sw.lat - ne.lat) * bufferRatio, widthBuffer = Math.abs(sw.lng - ne.lng) * bufferRatio; return new L.LatLngBounds( new L.LatLng(sw.lat - heightBuffer, sw.lng - widthBuffer), new L.LatLng(ne.lat + heightBuffer, ne.lng + widthBuffer)); }, getCenter: function () { // -> LatLng return new L.LatLng( (this._southWest.lat + this._northEast.lat) / 2, (this._southWest.lng + this._northEast.lng) / 2); }, getSouthWest: function () { return this._southWest; }, getNorthEast: function () { return this._northEast; }, getNorthWest: function () { return new L.LatLng(this.getNorth(), this.getWest()); }, getSouthEast: function () { return new L.LatLng(this.getSouth(), this.getEast()); }, getWest: function () { return this._southWest.lng; }, getSouth: function () { return this._southWest.lat; }, getEast: function () { return this._northEast.lng; }, getNorth: function () { return this._northEast.lat; }, contains: function (obj) { // (LatLngBounds) or (LatLng) -> Boolean if (typeof obj[0] === 'number' || obj instanceof L.LatLng) { obj = L.latLng(obj); } else { obj = L.latLngBounds(obj); } var sw = this._southWest, ne = this._northEast, sw2, ne2; if (obj instanceof L.LatLngBounds) { sw2 = obj.getSouthWest(); ne2 = obj.getNorthEast(); } else { sw2 = ne2 = obj; } return (sw2.lat >= sw.lat) && (ne2.lat <= ne.lat) && (sw2.lng >= sw.lng) && (ne2.lng <= ne.lng); }, intersects: function (bounds) { // (LatLngBounds) bounds = L.latLngBounds(bounds); var sw = this._southWest, ne = this._northEast, sw2 = bounds.getSouthWest(), ne2 = bounds.getNorthEast(), latIntersects = (ne2.lat >= sw.lat) && (sw2.lat <= ne.lat), lngIntersects = (ne2.lng >= sw.lng) && (sw2.lng <= ne.lng); return latIntersects && lngIntersects; }, toBBoxString: function () { return [this.getWest(), this.getSouth(), this.getEast(), this.getNorth()].join(','); }, equals: function (bounds) { // (LatLngBounds) if (!bounds) { return false; } bounds = L.latLngBounds(bounds); return this._southWest.equals(bounds.getSouthWest()) && this._northEast.equals(bounds.getNorthEast()); }, isValid: function () { return !!(this._southWest && this._northEast); } }; //TODO International date line? L.latLngBounds = function (a, b) { // (LatLngBounds) or (LatLng, LatLng) if (!a || a instanceof L.LatLngBounds) { return a; } return new L.LatLngBounds(a, b); }; /* * L.Projection contains various geographical projections used by CRS classes. */ L.Projection = {}; /* * Spherical Mercator is the most popular map projection, used by EPSG:3857 CRS used by default. */ L.Projection.SphericalMercator = { MAX_LATITUDE: 85.0511287798, project: function (latlng) { // (LatLng) -> Point var d = L.LatLng.DEG_TO_RAD, max = this.MAX_LATITUDE, lat = Math.max(Math.min(max, latlng.lat), -max), x = latlng.lng * d, y = lat * d; y = Math.log(Math.tan((Math.PI / 4) + (y / 2))); return new L.Point(x, y); }, unproject: function (point) { // (Point, Boolean) -> LatLng var d = L.LatLng.RAD_TO_DEG, lng = point.x * d, lat = (2 * Math.atan(Math.exp(point.y)) - (Math.PI / 2)) * d; return new L.LatLng(lat, lng); } }; /* * Simple equirectangular (Plate Carree) projection, used by CRS like EPSG:4326 and Simple. */ L.Projection.LonLat = { project: function (latlng) { return new L.Point(latlng.lng, latlng.lat); }, unproject: function (point) { return new L.LatLng(point.y, point.x); } }; /* * L.CRS is a base object for all defined CRS (Coordinate Reference Systems) in Leaflet. */ L.CRS = { latLngToPoint: function (latlng, zoom) { // (LatLng, Number) -> Point var projectedPoint = this.projection.project(latlng), scale = this.scale(zoom); return this.transformation._transform(projectedPoint, scale); }, pointToLatLng: function (point, zoom) { // (Point, Number[, Boolean]) -> LatLng var scale = this.scale(zoom), untransformedPoint = this.transformation.untransform(point, scale); return this.projection.unproject(untransformedPoint); }, project: function (latlng) { return this.projection.project(latlng); }, scale: function (zoom) { return 256 * Math.pow(2, zoom); } }; /* * A simple CRS that can be used for flat non-Earth maps like panoramas or game maps. */ L.CRS.Simple = L.extend({}, L.CRS, { projection: L.Projection.LonLat, transformation: new L.Transformation(1, 0, -1, 0), scale: function (zoom) { return Math.pow(2, zoom); } }); /* * L.CRS.EPSG3857 (Spherical Mercator) is the most common CRS for web mapping * and is used by Leaflet by default. */ L.CRS.EPSG3857 = L.extend({}, L.CRS, { code: 'EPSG:3857', projection: L.Projection.SphericalMercator, transformation: new L.Transformation(0.5 / Math.PI, 0.5, -0.5 / Math.PI, 0.5), project: function (latlng) { // (LatLng) -> Point var projectedPoint = this.projection.project(latlng), earthRadius = 6378137; return projectedPoint.multiplyBy(earthRadius); } }); L.CRS.EPSG900913 = L.extend({}, L.CRS.EPSG3857, { code: 'EPSG:900913' }); /* * L.CRS.EPSG4326 is a CRS popular among advanced GIS specialists. */ L.CRS.EPSG4326 = L.extend({}, L.CRS, { code: 'EPSG:4326', projection: L.Projection.LonLat, transformation: new L.Transformation(1 / 360, 0.5, -1 / 360, 0.5) }); /* * L.Map is the central class of the API - it is used to create a map. */ L.Map = L.Class.extend({ includes: L.Mixin.Events, options: { crs: L.CRS.EPSG3857, /* center: LatLng, zoom: Number, layers: Array, */ fadeAnimation: L.DomUtil.TRANSITION && !L.Browser.android23, trackResize: true, markerZoomAnimation: L.DomUtil.TRANSITION && L.Browser.any3d }, initialize: function (id, options) { // (HTMLElement or String, Object) options = L.setOptions(this, options); this._initContainer(id); this._initLayout(); this._initEvents(); if (options.maxBounds) { this.setMaxBounds(options.maxBounds); } if (options.center && options.zoom !== undefined) { this.setView(L.latLng(options.center), options.zoom, {reset: true}); } this._handlers = []; this._layers = {}; this._zoomBoundLayers = {}; this._tileLayersNum = 0; this.callInitHooks(); this._addLayers(options.layers); }, // public methods that modify map state // replaced by animation-powered implementation in Map.PanAnimation.js setView: function (center, zoom) { this._resetView(L.latLng(center), this._limitZoom(zoom)); return this; }, setZoom: function (zoom, options) { return this.setView(this.getCenter(), zoom, {zoom: options}); }, zoomIn: function (delta, options) { return this.setZoom(this._zoom + (delta || 1), options); }, zoomOut: function (delta, options) { return this.setZoom(this._zoom - (delta || 1), options); }, setZoomAround: function (latlng, zoom, options) { var scale = this.getZoomScale(zoom), viewHalf = this.getSize().divideBy(2), containerPoint = latlng instanceof L.Point ? latlng : this.latLngToContainerPoint(latlng), centerOffset = containerPoint.subtract(viewHalf).multiplyBy(1 - 1 / scale), newCenter = this.containerPointToLatLng(viewHalf.add(centerOffset)); return this.setView(newCenter, zoom, {zoom: options}); }, fitBounds: function (bounds, options) { options = options || {}; bounds = bounds.getBounds ? bounds.getBounds() : L.latLngBounds(bounds); var paddingTL = L.point(options.paddingTopLeft || options.padding || [0, 0]), paddingBR = L.point(options.paddingBottomRight || options.padding || [0, 0]), zoom = this.getBoundsZoom(bounds, false, paddingTL.add(paddingBR)), paddingOffset = paddingBR.subtract(paddingTL).divideBy(2), swPoint = this.project(bounds.getSouthWest(), zoom), nePoint = this.project(bounds.getNorthEast(), zoom), center = this.unproject(swPoint.add(nePoint).divideBy(2).add(paddingOffset), zoom); return this.setView(center, zoom, options); }, fitWorld: function (options) { return this.fitBounds([[-90, -180], [90, 180]], options); }, panTo: function (center, options) { // (LatLng) return this.setView(center, this._zoom, {pan: options}); }, panBy: function (offset) { // (Point) // replaced with animated panBy in Map.Animation.js this.fire('movestart'); this._rawPanBy(L.point(offset)); this.fire('move'); return this.fire('moveend'); }, setMaxBounds: function (bounds) { bounds = L.latLngBounds(bounds); this.options.maxBounds = bounds; if (!bounds) { this._boundsMinZoom = null; this.off('moveend', this._panInsideMaxBounds, this); return this; } var minZoom = this.getBoundsZoom(bounds, true); this._boundsMinZoom = minZoom; if (this._loaded) { if (this._zoom < minZoom) { this.setView(bounds.getCenter(), minZoom); } else { this.panInsideBounds(bounds); } } this.on('moveend', this._panInsideMaxBounds, this); return this; }, panInsideBounds: function (bounds) { bounds = L.latLngBounds(bounds); var viewBounds = this.getPixelBounds(), viewSw = viewBounds.getBottomLeft(), viewNe = viewBounds.getTopRight(), sw = this.project(bounds.getSouthWest()), ne = this.project(bounds.getNorthEast()), dx = 0, dy = 0; if (viewNe.y < ne.y) { // north dy = Math.ceil(ne.y - viewNe.y); } if (viewNe.x > ne.x) { // east dx = Math.floor(ne.x - viewNe.x); } if (viewSw.y > sw.y) { // south dy = Math.floor(sw.y - viewSw.y); } if (viewSw.x < sw.x) { // west dx = Math.ceil(sw.x - viewSw.x); } if (dx || dy) { return this.panBy([dx, dy]); } return this; }, addLayer: function (layer) { // TODO method is too big, refactor var id = L.stamp(layer); if (this._layers[id]) { return this; } this._layers[id] = layer; // TODO getMaxZoom, getMinZoom in ILayer (instead of options) if (layer.options && (!isNaN(layer.options.maxZoom) || !isNaN(layer.options.minZoom))) { this._zoomBoundLayers[id] = layer; this._updateZoomLevels(); } // TODO looks ugly, refactor!!! if (this.options.zoomAnimation && L.TileLayer && (layer instanceof L.TileLayer)) { this._tileLayersNum++; this._tileLayersToLoad++; layer.on('load', this._onTileLayerLoad, this); } if (this._loaded) { this._layerAdd(layer); } return this; }, removeLayer: function (layer) { var id = L.stamp(layer); if (!this._layers[id]) { return; } if (this._loaded) { layer.onRemove(this); this.fire('layerremove', {layer: layer}); } delete this._layers[id]; if (this._zoomBoundLayers[id]) { delete this._zoomBoundLayers[id]; this._updateZoomLevels(); } // TODO looks ugly, refactor if (this.options.zoomAnimation && L.TileLayer && (layer instanceof L.TileLayer)) { this._tileLayersNum--; this._tileLayersToLoad--; layer.off('load', this._onTileLayerLoad, this); } return this; }, hasLayer: function (layer) { if (!layer) { return false; } return (L.stamp(layer) in this._layers); }, eachLayer: function (method, context) { for (var i in this._layers) { method.call(context, this._layers[i]); } return this; }, invalidateSize: function (options) { options = L.extend({ animate: false, pan: true }, options === true ? {animate: true} : options); var oldSize = this.getSize(); this._sizeChanged = true; if (this.options.maxBounds) { this.setMaxBounds(this.options.maxBounds); } if (!this._loaded) { return this; } var newSize = this.getSize(), offset = oldSize.subtract(newSize).divideBy(2).round(); if (!offset.x && !offset.y) { return this; } if (options.animate && options.pan) { this.panBy(offset); } else { if (options.pan) { this._rawPanBy(offset); } this.fire('move'); // make sure moveend is not fired too often on resize clearTimeout(this._sizeTimer); this._sizeTimer = setTimeout(L.bind(this.fire, this, 'moveend'), 200); } return this.fire('resize', { oldSize: oldSize, newSize: newSize }); }, // TODO handler.addTo addHandler: function (name, HandlerClass) { if (!HandlerClass) { return; } var handler = this[name] = new HandlerClass(this); this._handlers.push(handler); if (this.options[name]) { handler.enable(); } return this; }, remove: function () { if (this._loaded) { this.fire('unload'); } this._initEvents('off'); delete this._container._leaflet; this._clearPanes(); if (this._clearControlPos) { this._clearControlPos(); } this._clearHandlers(); return this; }, // public methods for getting map state getCenter: function () { // (Boolean) -> LatLng this._checkIfLoaded(); if (!this._moved()) { return this._initialCenter; } return this.layerPointToLatLng(this._getCenterLayerPoint()); }, getZoom: function () { return this._zoom; }, getBounds: function () { var bounds = this.getPixelBounds(), sw = this.unproject(bounds.getBottomLeft()), ne = this.unproject(bounds.getTopRight()); return new L.LatLngBounds(sw, ne); }, getMinZoom: function () { var z1 = this.options.minZoom || 0, z2 = this._layersMinZoom || 0, z3 = this._boundsMinZoom || 0; return Math.max(z1, z2, z3); }, getMaxZoom: function () { var z1 = this.options.maxZoom === undefined ? Infinity : this.options.maxZoom, z2 = this._layersMaxZoom === undefined ? Infinity : this._layersMaxZoom; return Math.min(z1, z2); }, getBoundsZoom: function (bounds, inside, padding) { // (LatLngBounds[, Boolean, Point]) -> Number bounds = L.latLngBounds(bounds); var zoom = this.getMinZoom() - (inside ? 1 : 0), maxZoom = this.getMaxZoom(), size = this.getSize(), nw = bounds.getNorthWest(), se = bounds.getSouthEast(), zoomNotFound = true, boundsSize; padding = L.point(padding || [0, 0]); do { zoom++; boundsSize = this.project(se, zoom).subtract(this.project(nw, zoom)).add(padding); zoomNotFound = !inside ? size.contains(boundsSize) : boundsSize.x < size.x || boundsSize.y < size.y; } while (zoomNotFound && zoom <= maxZoom); if (zoomNotFound && inside) { return null; } return inside ? zoom : zoom - 1; }, getSize: function () { if (!this._size || this._sizeChanged) { this._size = new L.Point( this._container.clientWidth, this._container.clientHeight); this._sizeChanged = false; } return this._size.clone(); }, getPixelBounds: function () { var topLeftPoint = this._getTopLeftPoint(); return new L.Bounds(topLeftPoint, topLeftPoint.add(this.getSize())); }, getPixelOrigin: function () { this._checkIfLoaded(); return this._initialTopLeftPoint; }, getPanes: function () { return this._panes; }, getContainer: function () { return this._container; }, // TODO replace with universal implementation after refactoring projections getZoomScale: function (toZoom) { var crs = this.options.crs; return crs.scale(toZoom) / crs.scale(this._zoom); }, getScaleZoom: function (scale) { return this._zoom + (Math.log(scale) / Math.LN2); }, // conversion methods project: function (latlng, zoom) { // (LatLng[, Number]) -> Point zoom = zoom === undefined ? this._zoom : zoom; return this.options.crs.latLngToPoint(L.latLng(latlng), zoom); }, unproject: function (point, zoom) { // (Point[, Number]) -> LatLng zoom = zoom === undefined ? this._zoom : zoom; return this.options.crs.pointToLatLng(L.point(point), zoom); }, layerPointToLatLng: function (point) { // (Point) var projectedPoint = L.point(point).add(this.getPixelOrigin()); return this.unproject(projectedPoint); }, latLngToLayerPoint: function (latlng) { // (LatLng) var projectedPoint = this.project(L.latLng(latlng))._round(); return projectedPoint._subtract(this.getPixelOrigin()); }, containerPointToLayerPoint: function (point) { // (Point) return L.point(point).subtract(this._getMapPanePos()); }, layerPointToContainerPoint: function (point) { // (Point) return L.point(point).add(this._getMapPanePos()); }, containerPointToLatLng: function (point) { var layerPoint = this.containerPointToLayerPoint(L.point(point)); return this.layerPointToLatLng(layerPoint); }, latLngToContainerPoint: function (latlng) { return this.layerPointToContainerPoint(this.latLngToLayerPoint(L.latLng(latlng))); }, mouseEventToContainerPoint: function (e) { // (MouseEvent) return L.DomEvent.getMousePosition(e, this._container); }, mouseEventToLayerPoint: function (e) { // (MouseEvent) return this.containerPointToLayerPoint(this.mouseEventToContainerPoint(e)); }, mouseEventToLatLng: function (e) { // (MouseEvent) return this.layerPointToLatLng(this.mouseEventToLayerPoint(e)); }, // map initialization methods _initContainer: function (id) { var container = this._container = L.DomUtil.get(id); if (!container) { throw new Error('Map container not found.'); } else if (container._leaflet) { throw new Error('Map container is already initialized.'); } container._leaflet = true; }, _initLayout: function () { var container = this._container; L.DomUtil.addClass(container, 'leaflet-container' + (L.Browser.touch ? ' leaflet-touch' : '') + (L.Browser.retina ? ' leaflet-retina' : '') + (this.options.fadeAnimation ? ' leaflet-fade-anim' : '')); var position = L.DomUtil.getStyle(container, 'position'); if (position !== 'absolute' && position !== 'relative' && position !== 'fixed') { container.style.position = 'relative'; } this._initPanes(); if (this._initControlPos) { this._initControlPos(); } }, _initPanes: function () { var panes = this._panes = {}; this._mapPane = panes.mapPane = this._createPane('leaflet-map-pane', this._container); this._tilePane = panes.tilePane = this._createPane('leaflet-tile-pane', this._mapPane); panes.objectsPane = this._createPane('leaflet-objects-pane', this._mapPane); panes.shadowPane = this._createPane('leaflet-shadow-pane'); panes.overlayPane = this._createPane('leaflet-overlay-pane'); panes.markerPane = this._createPane('leaflet-marker-pane'); panes.popupPane = this._createPane('leaflet-popup-pane'); var zoomHide = ' leaflet-zoom-hide'; if (!this.options.markerZoomAnimation) { L.DomUtil.addClass(panes.markerPane, zoomHide); L.DomUtil.addClass(panes.shadowPane, zoomHide); L.DomUtil.addClass(panes.popupPane, zoomHide); } }, _createPane: function (className, container) { return L.DomUtil.create('div', className, container || this._panes.objectsPane); }, _clearPanes: function () { this._container.removeChild(this._mapPane); }, _addLayers: function (layers) { layers = layers ? (L.Util.isArray(layers) ? layers : [layers]) : []; for (var i = 0, len = layers.length; i < len; i++) { this.addLayer(layers[i]); } }, // private methods that modify map state _resetView: function (center, zoom, preserveMapOffset, afterZoomAnim) { var zoomChanged = (this._zoom !== zoom); if (!afterZoomAnim) { this.fire('movestart'); if (zoomChanged) { this.fire('zoomstart'); } } this._zoom = zoom; this._initialCenter = center; this._initialTopLeftPoint = this._getNewTopLeftPoint(center); if (!preserveMapOffset) { L.DomUtil.setPosition(this._mapPane, new L.Point(0, 0)); } else { this._initialTopLeftPoint._add(this._getMapPanePos()); } this._tileLayersToLoad = this._tileLayersNum; var loading = !this._loaded; this._loaded = true; if (loading) { this.fire('load'); this.eachLayer(this._layerAdd, this); } this.fire('viewreset', {hard: !preserveMapOffset}); this.fire('move'); if (zoomChanged || afterZoomAnim) { this.fire('zoomend'); } this.fire('moveend', {hard: !preserveMapOffset}); }, _rawPanBy: function (offset) { L.DomUtil.setPosition(this._mapPane, this._getMapPanePos().subtract(offset)); }, _getZoomSpan: function () { return this.getMaxZoom() - this.getMinZoom(); }, _updateZoomLevels: function () { var i, minZoom = Infinity, maxZoom = -Infinity, oldZoomSpan = this._getZoomSpan(); for (i in this._zoomBoundLayers) { var layer = this._zoomBoundLayers[i]; if (!isNaN(layer.options.minZoom)) { minZoom = Math.min(minZoom, layer.options.minZoom); } if (!isNaN(layer.options.maxZoom)) { maxZoom = Math.max(maxZoom, layer.options.maxZoom); } } if (i === undefined) { // we have no tilelayers this._layersMaxZoom = this._layersMinZoom = undefined; } else { this._layersMaxZoom = maxZoom; this._layersMinZoom = minZoom; } if (oldZoomSpan !== this._getZoomSpan()) { this.fire('zoomlevelschange'); } }, _panInsideMaxBounds: function () { this.panInsideBounds(this.options.maxBounds); }, _checkIfLoaded: function () { if (!this._loaded) { throw new Error('Set map center and zoom first.'); } }, // map events _initEvents: function (onOff) { if (!L.DomEvent) { return; } onOff = onOff || 'on'; L.DomEvent[onOff](this._container, 'click', this._onMouseClick, this); var events = ['dblclick', 'mousedown', 'mouseup', 'mouseenter', 'mouseleave', 'mousemove', 'contextmenu'], i, len; for (i = 0, len = events.length; i < len; i++) { L.DomEvent[onOff](this._container, events[i], this._fireMouseEvent, this); } if (this.options.trackResize) { L.DomEvent[onOff](window, 'resize', this._onResize, this); } }, _onResize: function () { L.Util.cancelAnimFrame(this._resizeRequest); this._resizeRequest = L.Util.requestAnimFrame( this.invalidateSize, this, false, this._container); }, _onMouseClick: function (e) { // jshint camelcase: false if (!this._loaded || (!e._simulated && this.dragging && this.dragging.moved()) || e._leaflet_stop) { return; } this.fire('preclick'); this._fireMouseEvent(e); }, _fireMouseEvent: function (e) { // jshint camelcase: false if (!this._loaded || e._leaflet_stop) { return; } var type = e.type; type = (type === 'mouseenter' ? 'mouseover' : (type === 'mouseleave' ? 'mouseout' : type)); if (!this.hasEventListeners(type)) { return; } if (type === 'contextmenu') { L.DomEvent.preventDefault(e); } var containerPoint = this.mouseEventToContainerPoint(e), layerPoint = this.containerPointToLayerPoint(containerPoint), latlng = this.layerPointToLatLng(layerPoint); this.fire(type, { latlng: latlng, layerPoint: layerPoint, containerPoint: containerPoint, originalEvent: e }); }, _onTileLayerLoad: function () { this._tileLayersToLoad--; if (this._tileLayersNum && !this._tileLayersToLoad) { this.fire('tilelayersload'); } }, _clearHandlers: function () { for (var i = 0, len = this._handlers.length; i < len; i++) { this._handlers[i].disable(); } }, whenReady: function (callback, context) { if (this._loaded) { callback.call(context || this, this); } else { this.on('load', callback, context); } return this; }, _layerAdd: function (layer) { layer.onAdd(this); this.fire('layeradd', {layer: layer}); }, // private methods for getting map state _getMapPanePos: function () { return L.DomUtil.getPosition(this._mapPane); }, _moved: function () { var pos = this._getMapPanePos(); return pos && !pos.equals([0, 0]); }, _getTopLeftPoint: function () { return this.getPixelOrigin().subtract(this._getMapPanePos()); }, _getNewTopLeftPoint: function (center, zoom) { var viewHalf = this.getSize()._divideBy(2); // TODO round on display, not calculation to increase precision? return this.project(center, zoom)._subtract(viewHalf)._round(); }, _latLngToNewLayerPoint: function (latlng, newZoom, newCenter) { var topLeft = this._getNewTopLeftPoint(newCenter, newZoom).add(this._getMapPanePos()); return this.project(latlng, newZoom)._subtract(topLeft); }, // layer point of the current center _getCenterLayerPoint: function () { return this.containerPointToLayerPoint(this.getSize()._divideBy(2)); }, // offset of the specified place to the current center in pixels _getCenterOffset: function (latlng) { return this.latLngToLayerPoint(latlng).subtract(this._getCenterLayerPoint()); }, _limitZoom: function (zoom) { var min = this.getMinZoom(), max = this.getMaxZoom(); return Math.max(min, Math.min(max, zoom)); } }); L.map = function (id, options) { return new L.Map(id, options); }; /* * Mercator projection that takes into account that the Earth is not a perfect sphere. * Less popular than spherical mercator; used by projections like EPSG:3395. */ L.Projection.Mercator = { MAX_LATITUDE: 85.0840591556, R_MINOR: 6356752.314245179, R_MAJOR: 6378137, project: function (latlng) { // (LatLng) -> Point var d = L.LatLng.DEG_TO_RAD, max = this.MAX_LATITUDE, lat = Math.max(Math.min(max, latlng.lat), -max), r = this.R_MAJOR, r2 = this.R_MINOR, x = latlng.lng * d * r, y = lat * d, tmp = r2 / r, eccent = Math.sqrt(1.0 - tmp * tmp), con = eccent * Math.sin(y); con = Math.pow((1 - con) / (1 + con), eccent * 0.5); var ts = Math.tan(0.5 * ((Math.PI * 0.5) - y)) / con; y = -r * Math.log(ts); return new L.Point(x, y); }, unproject: function (point) { // (Point, Boolean) -> LatLng var d = L.LatLng.RAD_TO_DEG, r = this.R_MAJOR, r2 = this.R_MINOR, lng = point.x * d / r, tmp = r2 / r, eccent = Math.sqrt(1 - (tmp * tmp)), ts = Math.exp(- point.y / r), phi = (Math.PI / 2) - 2 * Math.atan(ts), numIter = 15, tol = 1e-7, i = numIter, dphi = 0.1, con; while ((Math.abs(dphi) > tol) && (--i > 0)) { con = eccent * Math.sin(phi); dphi = (Math.PI / 2) - 2 * Math.atan(ts * Math.pow((1.0 - con) / (1.0 + con), 0.5 * eccent)) - phi; phi += dphi; } return new L.LatLng(phi * d, lng); } }; L.CRS.EPSG3395 = L.extend({}, L.CRS, { code: 'EPSG:3395', projection: L.Projection.Mercator, transformation: (function () { var m = L.Projection.Mercator, r = m.R_MAJOR, r2 = m.R_MINOR; return new L.Transformation(0.5 / (Math.PI * r), 0.5, -0.5 / (Math.PI * r2), 0.5); }()) }); /* * L.TileLayer is used for standard xyz-numbered tile layers. */ L.TileLayer = L.Class.extend({ includes: L.Mixin.Events, options: { minZoom: 0, maxZoom: 18, tileSize: 256, subdomains: 'abc', errorTileUrl: '', attribution: '', zoomOffset: 0, opacity: 1, /* (undefined works too) zIndex: null, tms: false, continuousWorld: false, noWrap: false, zoomReverse: false, detectRetina: false, reuseTiles: false, bounds: false, */ unloadInvisibleTiles: L.Browser.mobile, updateWhenIdle: L.Browser.mobile }, initialize: function (url, options) { options = L.setOptions(this, options); // detecting retina displays, adjusting tileSize and zoom levels if (options.detectRetina && L.Browser.retina && options.maxZoom > 0) { options.tileSize = Math.floor(options.tileSize / 2); options.zoomOffset++; if (options.minZoom > 0) { options.minZoom--; } this.options.maxZoom--; } if (options.bounds) { options.bounds = L.latLngBounds(options.bounds); } this._url = url; var subdomains = this.options.subdomains; if (typeof subdomains === 'string') { this.options.subdomains = subdomains.split(''); } }, onAdd: function (map) { this._map = map; this._animated = map._zoomAnimated; // create a container div for tiles this._initContainer(); // create an image to clone for tiles this._createTileProto(); // set up events map.on({ 'viewreset': this._reset, 'moveend': this._update }, this); if (this._animated) { map.on({ 'zoomanim': this._animateZoom, 'zoomend': this._endZoomAnim }, this); } if (!this.options.updateWhenIdle) { this._limitedUpdate = L.Util.limitExecByInterval(this._update, 150, this); map.on('move', this._limitedUpdate, this); } this._reset(); this._update(); }, addTo: function (map) { map.addLayer(this); return this; }, onRemove: function (map) { this._container.parentNode.removeChild(this._container); map.off({ 'viewreset': this._reset, 'moveend': this._update }, this); if (this._animated) { map.off({ 'zoomanim': this._animateZoom, 'zoomend': this._endZoomAnim }, this); } if (!this.options.updateWhenIdle) { map.off('move', this._limitedUpdate, this); } this._container = null; this._map = null; }, bringToFront: function () { var pane = this._map._panes.tilePane; if (this._container) { pane.appendChild(this._container); this._setAutoZIndex(pane, Math.max); } return this; }, bringToBack: function () { var pane = this._map._panes.tilePane; if (this._container) { pane.insertBefore(this._container, pane.firstChild); this._setAutoZIndex(pane, Math.min); } return this; }, getAttribution: function () { return this.options.attribution; }, getContainer: function () { return this._container; }, setOpacity: function (opacity) { this.options.opacity = opacity; if (this._map) { this._updateOpacity(); } return this; }, setZIndex: function (zIndex) { this.options.zIndex = zIndex; this._updateZIndex(); return this; }, setUrl: function (url, noRedraw) { this._url = url; if (!noRedraw) { this.redraw(); } return this; }, redraw: function () { if (this._map) { this._reset({hard: true}); this._update(); } return this; }, _updateZIndex: function () { if (this._container && this.options.zIndex !== undefined) { this._container.style.zIndex = this.options.zIndex; } }, _setAutoZIndex: function (pane, compare) { var layers = pane.children, edgeZIndex = -compare(Infinity, -Infinity), // -Infinity for max, Infinity for min zIndex, i, len; for (i = 0, len = layers.length; i < len; i++) { if (layers[i] !== this._container) { zIndex = parseInt(layers[i].style.zIndex, 10); if (!isNaN(zIndex)) { edgeZIndex = compare(edgeZIndex, zIndex); } } } this.options.zIndex = this._container.style.zIndex = (isFinite(edgeZIndex) ? edgeZIndex : 0) + compare(1, -1); }, _updateOpacity: function () { var i, tiles = this._tiles; if (L.Browser.ielt9) { for (i in tiles) { L.DomUtil.setOpacity(tiles[i], this.options.opacity); } } else { L.DomUtil.setOpacity(this._container, this.options.opacity); } }, _initContainer: function () { var tilePane = this._map._panes.tilePane; if (!this._container) { this._container = L.DomUtil.create('div', 'leaflet-layer'); this._updateZIndex(); if (this._animated) { var className = 'leaflet-tile-container leaflet-zoom-animated'; this._bgBuffer = L.DomUtil.create('div', className, this._container); this._tileContainer = L.DomUtil.create('div', className, this._container); } else { this._tileContainer = this._container; } tilePane.appendChild(this._container); if (this.options.opacity < 1) { this._updateOpacity(); } } }, _reset: function (e) { for (var key in this._tiles) { this.fire('tileunload', {tile: this._tiles[key]}); } this._tiles = {}; this._tilesToLoad = 0; if (this.options.reuseTiles) { this._unusedTiles = []; } this._tileContainer.innerHTML = ''; if (this._animated && e && e.hard) { this._clearBgBuffer(); } this._initContainer(); }, _update: function () { if (!this._map) { return; } var bounds = this._map.getPixelBounds(), zoom = this._map.getZoom(), tileSize = this.options.tileSize; if (zoom > this.options.maxZoom || zoom < this.options.minZoom) { return; } var tileBounds = L.bounds( bounds.min.divideBy(tileSize)._floor(), bounds.max.divideBy(tileSize)._floor()); this._addTilesFromCenterOut(tileBounds); if (this.options.unloadInvisibleTiles || this.options.reuseTiles) { this._removeOtherTiles(tileBounds); } }, _addTilesFromCenterOut: function (bounds) { var queue = [], center = bounds.getCenter(); var j, i, point; for (j = bounds.min.y; j <= bounds.max.y; j++) { for (i = bounds.min.x; i <= bounds.max.x; i++) { point = new L.Point(i, j); if (this._tileShouldBeLoaded(point)) { queue.push(point); } } } var tilesToLoad = queue.length; if (tilesToLoad === 0) { return; } // load tiles in order of their distance to center queue.sort(function (a, b) { return a.distanceTo(center) - b.distanceTo(center); }); var fragment = document.createDocumentFragment(); // if its the first batch of tiles to load if (!this._tilesToLoad) { this.fire('loading'); } this._tilesToLoad += tilesToLoad; for (i = 0; i < tilesToLoad; i++) { this._addTile(queue[i], fragment); } this._tileContainer.appendChild(fragment); }, _tileShouldBeLoaded: function (tilePoint) { if ((tilePoint.x + ':' + tilePoint.y) in this._tiles) { return false; // already loaded } var options = this.options; if (!options.continuousWorld) { var limit = this._getWrapTileNum(); // don't load if exceeds world bounds if ((options.noWrap && (tilePoint.x < 0 || tilePoint.x >= limit)) || tilePoint.y < 0 || tilePoint.y >= limit) { return false; } } if (options.bounds) { var tileSize = options.tileSize, nwPoint = tilePoint.multiplyBy(tileSize), sePoint = nwPoint.add([tileSize, tileSize]), nw = this._map.unproject(nwPoint), se = this._map.unproject(sePoint); // TODO temporary hack, will be removed after refactoring projections // https://github.com/Leaflet/Leaflet/issues/1618 if (!options.continuousWorld && !options.noWrap) { nw = nw.wrap(); se = se.wrap(); } if (!options.bounds.intersects([nw, se])) { return false; } } return true; }, _removeOtherTiles: function (bounds) { var kArr, x, y, key; for (key in this._tiles) { kArr = key.split(':'); x = parseInt(kArr[0], 10); y = parseInt(kArr[1], 10); // remove tile if it's out of bounds if (x < bounds.min.x || x > bounds.max.x || y < bounds.min.y || y > bounds.max.y) { this._removeTile(key); } } }, _removeTile: function (key) { var tile = this._tiles[key]; this.fire('tileunload', {tile: tile, url: tile.src}); if (this.options.reuseTiles) { L.DomUtil.removeClass(tile, 'leaflet-tile-loaded'); this._unusedTiles.push(tile); } else if (tile.parentNode === this._tileContainer) { this._tileContainer.removeChild(tile); } // for https://github.com/CloudMade/Leaflet/issues/137 if (!L.Browser.android) { tile.onload = null; tile.src = L.Util.emptyImageUrl; } delete this._tiles[key]; }, _addTile: function (tilePoint, container) { var tilePos = this._getTilePos(tilePoint); // get unused tile - or create a new tile var tile = this._getTile(); /* Chrome 20 layouts much faster with top/left (verify with timeline, frames) Android 4 browser has display issues with top/left and requires transform instead Android 2 browser requires top/left or tiles disappear on load or first drag (reappear after zoom) https://github.com/CloudMade/Leaflet/issues/866 (other browsers don't currently care) - see debug/hacks/jitter.html for an example */ L.DomUtil.setPosition(tile, tilePos, L.Browser.chrome || L.Browser.android23); this._tiles[tilePoint.x + ':' + tilePoint.y] = tile; this._loadTile(tile, tilePoint); if (tile.parentNode !== this._tileContainer) { container.appendChild(tile); } }, _getZoomForUrl: function () { var options = this.options, zoom = this._map.getZoom(); if (options.zoomReverse) { zoom = options.maxZoom - zoom; } return zoom + options.zoomOffset; }, _getTilePos: function (tilePoint) { var origin = this._map.getPixelOrigin(), tileSize = this.options.tileSize; return tilePoint.multiplyBy(tileSize).subtract(origin); }, // image-specific code (override to implement e.g. Canvas or SVG tile layer) getTileUrl: function (tilePoint) { return L.Util.template(this._url, L.extend({ s: this._getSubdomain(tilePoint), z: tilePoint.z, x: tilePoint.x, y: tilePoint.y }, this.options)); }, _getWrapTileNum: function () { // TODO refactor, limit is not valid for non-standard projections return Math.pow(2, this._getZoomForUrl()); }, _adjustTilePoint: function (tilePoint) { var limit = this._getWrapTileNum(); // wrap tile coordinates if (!this.options.continuousWorld && !this.options.noWrap) { tilePoint.x = ((tilePoint.x % limit) + limit) % limit; } if (this.options.tms) { tilePoint.y = limit - tilePoint.y - 1; } tilePoint.z = this._getZoomForUrl(); }, _getSubdomain: function (tilePoint) { var index = Math.abs(tilePoint.x + tilePoint.y) % this.options.subdomains.length; return this.options.subdomains[index]; }, _createTileProto: function () { var img = this._tileImg = L.DomUtil.create('img', 'leaflet-tile'); img.style.width = img.style.height = this.options.tileSize + 'px'; img.galleryimg = 'no'; }, _getTile: function () { if (this.options.reuseTiles && this._unusedTiles.length > 0) { var tile = this._unusedTiles.pop(); this._resetTile(tile); return tile; } return this._createTile(); }, // Override if data stored on a tile needs to be cleaned up before reuse _resetTile: function (/*tile*/) {}, _createTile: function () { var tile = this._tileImg.cloneNode(false); tile.onselectstart = tile.onmousemove = L.Util.falseFn; if (L.Browser.ielt9 && this.options.opacity !== undefined) { L.DomUtil.setOpacity(tile, this.options.opacity); } return tile; }, _loadTile: function (tile, tilePoint) { tile._layer = this; tile.onload = this._tileOnLoad; tile.onerror = this._tileOnError; this._adjustTilePoint(tilePoint); tile.src = this.getTileUrl(tilePoint); }, _tileLoaded: function () { this._tilesToLoad--; if (!this._tilesToLoad) { this.fire('load'); if (this._animated) { // clear scaled tiles after all new tiles are loaded (for performance) clearTimeout(this._clearBgBufferTimer); this._clearBgBufferTimer = setTimeout(L.bind(this._clearBgBuffer, this), 500); } } }, _tileOnLoad: function () { var layer = this._layer; //Only if we are loading an actual image if (this.src !== L.Util.emptyImageUrl) { L.DomUtil.addClass(this, 'leaflet-tile-loaded'); layer.fire('tileload', { tile: this, url: this.src }); } layer._tileLoaded(); }, _tileOnError: function () { var layer = this._layer; layer.fire('tileerror', { tile: this, url: this.src }); var newUrl = layer.options.errorTileUrl; if (newUrl) { this.src = newUrl; } layer._tileLoaded(); } }); L.tileLayer = function (url, options) { return new L.TileLayer(url, options); }; /* * L.TileLayer.WMS is used for putting WMS tile layers on the map. */ L.TileLayer.WMS = L.TileLayer.extend({ defaultWmsParams: { service: 'WMS', request: 'GetMap', version: '1.1.1', layers: '', styles: '', format: 'image/jpeg', transparent: false }, initialize: function (url, options) { // (String, Object) this._url = url; var wmsParams = L.extend({}, this.defaultWmsParams), tileSize = options.tileSize || this.options.tileSize; if (options.detectRetina && L.Browser.retina) { wmsParams.width = wmsParams.height = tileSize * 2; } else { wmsParams.width = wmsParams.height = tileSize; } for (var i in options) { // all keys that are not TileLayer options go to WMS params if (!this.options.hasOwnProperty(i) && i !== 'crs') { wmsParams[i] = options[i]; } } this.wmsParams = wmsParams; L.setOptions(this, options); }, onAdd: function (map) { this._crs = this.options.crs || map.options.crs; var projectionKey = parseFloat(this.wmsParams.version) >= 1.3 ? 'crs' : 'srs'; this.wmsParams[projectionKey] = this._crs.code; L.TileLayer.prototype.onAdd.call(this, map); }, getTileUrl: function (tilePoint, zoom) { // (Point, Number) -> String var map = this._map, tileSize = this.options.tileSize, nwPoint = tilePoint.multiplyBy(tileSize), sePoint = nwPoint.add([tileSize, tileSize]), nw = this._crs.project(map.unproject(nwPoint, zoom)), se = this._crs.project(map.unproject(sePoint, zoom)), bbox = [nw.x, se.y, se.x, nw.y].join(','), url = L.Util.template(this._url, {s: this._getSubdomain(tilePoint)}); return url + L.Util.getParamString(this.wmsParams, url, true) + '&BBOX=' + bbox; }, setParams: function (params, noRedraw) { L.extend(this.wmsParams, params); if (!noRedraw) { this.redraw(); } return this; } }); L.tileLayer.wms = function (url, options) { return new L.TileLayer.WMS(url, options); }; /* * L.TileLayer.Canvas is a class that you can use as a base for creating * dynamically drawn Canvas-based tile layers. */ L.TileLayer.Canvas = L.TileLayer.extend({ options: { async: false }, initialize: function (options) { L.setOptions(this, options); }, redraw: function () { for (var i in this._tiles) { this._redrawTile(this._tiles[i]); } return this; }, _redrawTile: function (tile) { this.drawTile(tile, tile._tilePoint, this._map._zoom); }, _createTileProto: function () { var proto = this._canvasProto = L.DomUtil.create('canvas', 'leaflet-tile'); proto.width = proto.height = this.options.tileSize; }, _createTile: function () { var tile = this._canvasProto.cloneNode(false); tile.onselectstart = tile.onmousemove = L.Util.falseFn; return tile; }, _loadTile: function (tile, tilePoint) { tile._layer = this; tile._tilePoint = tilePoint; this._redrawTile(tile); if (!this.options.async) { this.tileDrawn(tile); } }, drawTile: function (/*tile, tilePoint*/) { // override with rendering code }, tileDrawn: function (tile) { this._tileOnLoad.call(tile); } }); L.tileLayer.canvas = function (options) { return new L.TileLayer.Canvas(options); }; /* * L.ImageOverlay is used to overlay images over the map (to specific geographical bounds). */ L.ImageOverlay = L.Class.extend({ includes: L.Mixin.Events, options: { opacity: 1 }, initialize: function (url, bounds, options) { // (String, LatLngBounds, Object) this._url = url; this._bounds = L.latLngBounds(bounds); L.setOptions(this, options); }, onAdd: function (map) { this._map = map; if (!this._image) { this._initImage(); } map._panes.overlayPane.appendChild(this._image); map.on('viewreset', this._reset, this); if (map.options.zoomAnimation && L.Browser.any3d) { map.on('zoomanim', this._animateZoom, this); } this._reset(); }, onRemove: function (map) { map.getPanes().overlayPane.removeChild(this._image); map.off('viewreset', this._reset, this); if (map.options.zoomAnimation) { map.off('zoomanim', this._animateZoom, this); } }, addTo: function (map) { map.addLayer(this); return this; }, setOpacity: function (opacity) { this.options.opacity = opacity; this._updateOpacity(); return this; }, // TODO remove bringToFront/bringToBack duplication from TileLayer/Path bringToFront: function () { if (this._image) { this._map._panes.overlayPane.appendChild(this._image); } return this; }, bringToBack: function () { var pane = this._map._panes.overlayPane; if (this._image) { pane.insertBefore(this._image, pane.firstChild); } return this; }, _initImage: function () { this._image = L.DomUtil.create('img', 'leaflet-image-layer'); if (this._map.options.zoomAnimation && L.Browser.any3d) { L.DomUtil.addClass(this._image, 'leaflet-zoom-animated'); } else { L.DomUtil.addClass(this._image, 'leaflet-zoom-hide'); } this._updateOpacity(); //TODO createImage util method to remove duplication L.extend(this._image, { galleryimg: 'no', onselectstart: L.Util.falseFn, onmousemove: L.Util.falseFn, onload: L.bind(this._onImageLoad, this), src: this._url }); }, _animateZoom: function (e) { var map = this._map, image = this._image, scale = map.getZoomScale(e.zoom), nw = this._bounds.getNorthWest(), se = this._bounds.getSouthEast(), topLeft = map._latLngToNewLayerPoint(nw, e.zoom, e.center), size = map._latLngToNewLayerPoint(se, e.zoom, e.center)._subtract(topLeft), origin = topLeft._add(size._multiplyBy((1 / 2) * (1 - 1 / scale))); image.style[L.DomUtil.TRANSFORM] = L.DomUtil.getTranslateString(origin) + ' scale(' + scale + ') '; }, _reset: function () { var image = this._image, topLeft = this._map.latLngToLayerPoint(this._bounds.getNorthWest()), size = this._map.latLngToLayerPoint(this._bounds.getSouthEast())._subtract(topLeft); L.DomUtil.setPosition(image, topLeft); image.style.width = size.x + 'px'; image.style.height = size.y + 'px'; }, _onImageLoad: function () { this.fire('load'); }, _updateOpacity: function () { L.DomUtil.setOpacity(this._image, this.options.opacity); } }); L.imageOverlay = function (url, bounds, options) { return new L.ImageOverlay(url, bounds, options); }; /* * L.Icon is an image-based icon class that you can use with L.Marker for custom markers. */ L.Icon = L.Class.extend({ options: { /* iconUrl: (String) (required) iconRetinaUrl: (String) (optional, used for retina devices if detected) iconSize: (Point) (can be set through CSS) iconAnchor: (Point) (centered by default, can be set in CSS with negative margins) popupAnchor: (Point) (if not specified, popup opens in the anchor point) shadowUrl: (String) (no shadow by default) shadowRetinaUrl: (String) (optional, used for retina devices if detected) shadowSize: (Point) shadowAnchor: (Point) */ className: '' }, initialize: function (options) { L.setOptions(this, options); }, createIcon: function (oldIcon) { return this._createIcon('icon', oldIcon); }, createShadow: function (oldIcon) { return this._createIcon('shadow', oldIcon); }, _createIcon: function (name, oldIcon) { var src = this._getIconUrl(name); if (!src) { if (name === 'icon') { throw new Error('iconUrl not set in Icon options (see the docs).'); } return null; } var img; if (!oldIcon || oldIcon.tagName !== 'IMG') { img = this._createImg(src); } else { img = this._createImg(src, oldIcon); } this._setIconStyles(img, name); return img; }, _setIconStyles: function (img, name) { var options = this.options, size = L.point(options[name + 'Size']), anchor; if (name === 'shadow') { anchor = L.point(options.shadowAnchor || options.iconAnchor); } else { anchor = L.point(options.iconAnchor); } if (!anchor && size) { anchor = size.divideBy(2, true); } img.className = 'leaflet-marker-' + name + ' ' + options.className; if (anchor) { img.style.marginLeft = (-anchor.x) + 'px'; img.style.marginTop = (-anchor.y) + 'px'; } if (size) { img.style.width = size.x + 'px'; img.style.height = size.y + 'px'; } }, _createImg: function (src, el) { if (!L.Browser.ie6) { if (!el) { el = document.createElement('img'); } el.src = src; } else { if (!el) { el = document.createElement('div'); } el.style.filter = 'progid:DXImageTransform.Microsoft.AlphaImageLoader(src="' + src + '")'; } return el; }, _getIconUrl: function (name) { if (L.Browser.retina && this.options[name + 'RetinaUrl']) { return this.options[name + 'RetinaUrl']; } return this.options[name + 'Url']; } }); L.icon = function (options) { return new L.Icon(options); }; /* * L.Icon.Default is the blue marker icon used by default in Leaflet. */ L.Icon.Default = L.Icon.extend({ options: { iconSize: [25, 41], iconAnchor: [12, 41], popupAnchor: [1, -34], shadowSize: [41, 41] }, _getIconUrl: function (name) { var key = name + 'Url'; if (this.options[key]) { return this.options[key]; } if (L.Browser.retina && name === 'icon') { name += '-2x'; } var path = L.Icon.Default.imagePath; if (!path) { throw new Error('Couldn\'t autodetect L.Icon.Default.imagePath, set it manually.'); } return path + '/marker-' + name + '.png'; } }); L.Icon.Default.imagePath = (function () { var scripts = document.getElementsByTagName('script'), leafletRe = /[\/^]leaflet[\-\._]?([\w\-\._]*)\.js\??/; var i, len, src, matches, path; for (i = 0, len = scripts.length; i < len; i++) { src = scripts[i].src; matches = src.match(leafletRe); if (matches) { path = src.split(leafletRe)[0]; return (path ? path + '/' : '') + 'images'; } } }()); /* * L.Marker is used to display clickable/draggable icons on the map. */ L.Marker = L.Class.extend({ includes: L.Mixin.Events, options: { icon: new L.Icon.Default(), title: '', clickable: true, draggable: false, keyboard: true, zIndexOffset: 0, opacity: 1, riseOnHover: false, riseOffset: 250 }, initialize: function (latlng, options) { L.setOptions(this, options); this._latlng = L.latLng(latlng); }, onAdd: function (map) { this._map = map; map.on('viewreset', this.update, this); this._initIcon(); this.update(); if (map.options.zoomAnimation && map.options.markerZoomAnimation) { map.on('zoomanim', this._animateZoom, this); } }, addTo: function (map) { map.addLayer(this); return this; }, onRemove: function (map) { if (this.dragging) { this.dragging.disable(); } this._removeIcon(); this._removeShadow(); this.fire('remove'); map.off({ 'viewreset': this.update, 'zoomanim': this._animateZoom }, this); this._map = null; }, getLatLng: function () { return this._latlng; }, setLatLng: function (latlng) { this._latlng = L.latLng(latlng); this.update(); return this.fire('move', { latlng: this._latlng }); }, setZIndexOffset: function (offset) { this.options.zIndexOffset = offset; this.update(); return this; }, setIcon: function (icon) { this.options.icon = icon; if (this._map) { this._initIcon(); this.update(); } return this; }, update: function () { if (this._icon) { var pos = this._map.latLngToLayerPoint(this._latlng).round(); this._setPos(pos); } return this; }, _initIcon: function () { var options = this.options, map = this._map, animation = (map.options.zoomAnimation && map.options.markerZoomAnimation), classToAdd = animation ? 'leaflet-zoom-animated' : 'leaflet-zoom-hide'; var icon = options.icon.createIcon(this._icon), addIcon = false; // if we're not reusing the icon, remove the old one and init new one if (icon !== this._icon) { if (this._icon) { this._removeIcon(); } addIcon = true; if (options.title) { icon.title = options.title; } } L.DomUtil.addClass(icon, classToAdd); if (options.keyboard) { icon.tabIndex = '0'; } this._icon = icon; this._initInteraction(); if (options.riseOnHover) { L.DomEvent .on(icon, 'mouseover', this._bringToFront, this) .on(icon, 'mouseout', this._resetZIndex, this); } var newShadow = options.icon.createShadow(this._shadow), addShadow = false; if (newShadow !== this._shadow) { this._removeShadow(); addShadow = true; if (newShadow) { L.DomUtil.addClass(newShadow, classToAdd); } } this._shadow = newShadow; if (options.opacity < 1) { this._updateOpacity(); } var panes = this._map._panes; if (addIcon) { panes.markerPane.appendChild(this._icon); } if (newShadow && addShadow) { panes.shadowPane.appendChild(this._shadow); } }, _removeIcon: function () { if (this.options.riseOnHover) { L.DomEvent .off(this._icon, 'mouseover', this._bringToFront) .off(this._icon, 'mouseout', this._resetZIndex); } this._map._panes.markerPane.removeChild(this._icon); this._icon = null; }, _removeShadow: function () { if (this._shadow) { this._map._panes.shadowPane.removeChild(this._shadow); } this._shadow = null; }, _setPos: function (pos) { L.DomUtil.setPosition(this._icon, pos); if (this._shadow) { L.DomUtil.setPosition(this._shadow, pos); } this._zIndex = pos.y + this.options.zIndexOffset; this._resetZIndex(); }, _updateZIndex: function (offset) { this._icon.style.zIndex = this._zIndex + offset; }, _animateZoom: function (opt) { var pos = this._map._latLngToNewLayerPoint(this._latlng, opt.zoom, opt.center); this._setPos(pos); }, _initInteraction: function () { if (!this.options.clickable) { return; } // TODO refactor into something shared with Map/Path/etc. to DRY it up var icon = this._icon, events = ['dblclick', 'mousedown', 'mouseover', 'mouseout', 'contextmenu']; L.DomUtil.addClass(icon, 'leaflet-clickable'); L.DomEvent.on(icon, 'click', this._onMouseClick, this); L.DomEvent.on(icon, 'keypress', this._onKeyPress, this); for (var i = 0; i < events.length; i++) { L.DomEvent.on(icon, events[i], this._fireMouseEvent, this); } if (L.Handler.MarkerDrag) { this.dragging = new L.Handler.MarkerDrag(this); if (this.options.draggable) { this.dragging.enable(); } } }, _onMouseClick: function (e) { var wasDragged = this.dragging && this.dragging.moved(); if (this.hasEventListeners(e.type) || wasDragged) { L.DomEvent.stopPropagation(e); } if (wasDragged) { return; } if ((!this.dragging || !this.dragging._enabled) && this._map.dragging && this._map.dragging.moved()) { return; } this.fire(e.type, { originalEvent: e, latlng: this._latlng }); }, _onKeyPress: function (e) { if (e.keyCode === 13) { this.fire('click', { originalEvent: e, latlng: this._latlng }); } }, _fireMouseEvent: function (e) { this.fire(e.type, { originalEvent: e, latlng: this._latlng }); // TODO proper custom event propagation // this line will always be called if marker is in a FeatureGroup if (e.type === 'contextmenu' && this.hasEventListeners(e.type)) { L.DomEvent.preventDefault(e); } if (e.type !== 'mousedown') { L.DomEvent.stopPropagation(e); } else { L.DomEvent.preventDefault(e); } }, setOpacity: function (opacity) { this.options.opacity = opacity; if (this._map) { this._updateOpacity(); } }, _updateOpacity: function () { L.DomUtil.setOpacity(this._icon, this.options.opacity); if (this._shadow) { L.DomUtil.setOpacity(this._shadow, this.options.opacity); } }, _bringToFront: function () { this._updateZIndex(this.options.riseOffset); }, _resetZIndex: function () { this._updateZIndex(0); } }); L.marker = function (latlng, options) { return new L.Marker(latlng, options); }; /* * L.DivIcon is a lightweight HTML-based icon class (as opposed to the image-based L.Icon) * to use with L.Marker. */ L.DivIcon = L.Icon.extend({ options: { iconSize: [12, 12], // also can be set through CSS /* iconAnchor: (Point) popupAnchor: (Point) html: (String) bgPos: (Point) */ className: 'leaflet-div-icon', html: false }, createIcon: function (oldIcon) { var div = (oldIcon && oldIcon.tagName === 'DIV') ? oldIcon : document.createElement('div'), options = this.options; if (options.html !== false) { div.innerHTML = options.html; } else { div.innerHTML = ''; } if (options.bgPos) { div.style.backgroundPosition = (-options.bgPos.x) + 'px ' + (-options.bgPos.y) + 'px'; } this._setIconStyles(div, 'icon'); return div; }, createShadow: function () { return null; } }); L.divIcon = function (options) { return new L.DivIcon(options); }; /* * L.Popup is used for displaying popups on the map. */ L.Map.mergeOptions({ closePopupOnClick: true }); L.Popup = L.Class.extend({ includes: L.Mixin.Events, options: { minWidth: 50, maxWidth: 300, maxHeight: null, autoPan: true, closeButton: true, offset: [0, 7], autoPanPadding: [5, 5], keepInView: false, className: '', zoomAnimation: true }, initialize: function (options, source) { L.setOptions(this, options); this._source = source; this._animated = L.Browser.any3d && this.options.zoomAnimation; this._isOpen = false; }, onAdd: function (map) { this._map = map; if (!this._container) { this._initLayout(); } this._updateContent(); var animFade = map.options.fadeAnimation; if (animFade) { L.DomUtil.setOpacity(this._container, 0); } map._panes.popupPane.appendChild(this._container); map.on(this._getEvents(), this); this._update(); if (animFade) { L.DomUtil.setOpacity(this._container, 1); } this.fire('open'); map.fire('popupopen', {popup: this}); if (this._source) { this._source.fire('popupopen', {popup: this}); } }, addTo: function (map) { map.addLayer(this); return this; }, openOn: function (map) { map.openPopup(this); return this; }, onRemove: function (map) { map._panes.popupPane.removeChild(this._container); L.Util.falseFn(this._container.offsetWidth); // force reflow map.off(this._getEvents(), this); if (map.options.fadeAnimation) { L.DomUtil.setOpacity(this._container, 0); } this._map = null; this.fire('close'); map.fire('popupclose', {popup: this}); if (this._source) { this._source.fire('popupclose', {popup: this}); } }, setLatLng: function (latlng) { this._latlng = L.latLng(latlng); this._update(); return this; }, setContent: function (content) { this._content = content; this._update(); return this; }, _getEvents: function () { var events = { viewreset: this._updatePosition }; if (this._animated) { events.zoomanim = this._zoomAnimation; } if ('closeOnClick' in this.options ? this.options.closeOnClick : this._map.options.closePopupOnClick) { events.preclick = this._close; } if (this.options.keepInView) { events.moveend = this._adjustPan; } return events; }, _close: function () { if (this._map) { this._map.closePopup(this); } }, _initLayout: function () { var prefix = 'leaflet-popup', containerClass = prefix + ' ' + this.options.className + ' leaflet-zoom-' + (this._animated ? 'animated' : 'hide'), container = this._container = L.DomUtil.create('div', containerClass), closeButton; if (this.options.closeButton) { closeButton = this._closeButton = L.DomUtil.create('a', prefix + '-close-button', container); closeButton.href = '#close'; closeButton.innerHTML = '&#215;'; L.DomEvent.disableClickPropagation(closeButton); L.DomEvent.on(closeButton, 'click', this._onCloseButtonClick, this); } var wrapper = this._wrapper = L.DomUtil.create('div', prefix + '-content-wrapper', container); L.DomEvent.disableClickPropagation(wrapper); this._contentNode = L.DomUtil.create('div', prefix + '-content', wrapper); L.DomEvent.on(this._contentNode, 'mousewheel', L.DomEvent.stopPropagation); L.DomEvent.on(wrapper, 'contextmenu', L.DomEvent.stopPropagation); this._tipContainer = L.DomUtil.create('div', prefix + '-tip-container', container); this._tip = L.DomUtil.create('div', prefix + '-tip', this._tipContainer); }, _update: function () { if (!this._map) { return; } this._container.style.visibility = 'hidden'; this._updateContent(); this._updateLayout(); this._updatePosition(); this._container.style.visibility = ''; this._adjustPan(); }, _updateContent: function () { if (!this._content) { return; } if (typeof this._content === 'string') { this._contentNode.innerHTML = this._content; } else { while (this._contentNode.hasChildNodes()) { this._contentNode.removeChild(this._contentNode.firstChild); } this._contentNode.appendChild(this._content); } this.fire('contentupdate'); }, _updateLayout: function () { var container = this._contentNode, style = container.style; style.width = ''; style.whiteSpace = 'nowrap'; var width = container.offsetWidth; width = Math.min(width, this.options.maxWidth); width = Math.max(width, this.options.minWidth); style.width = (width + 1) + 'px'; style.whiteSpace = ''; style.height = ''; var height = container.offsetHeight, maxHeight = this.options.maxHeight, scrolledClass = 'leaflet-popup-scrolled'; if (maxHeight && height > maxHeight) { style.height = maxHeight + 'px'; L.DomUtil.addClass(container, scrolledClass); } else { L.DomUtil.removeClass(container, scrolledClass); } this._containerWidth = this._container.offsetWidth; }, _updatePosition: function () { if (!this._map) { return; } var pos = this._map.latLngToLayerPoint(this._latlng), animated = this._animated, offset = L.point(this.options.offset); if (animated) { L.DomUtil.setPosition(this._container, pos); } this._containerBottom = -offset.y - (animated ? 0 : pos.y); this._containerLeft = -Math.round(this._containerWidth / 2) + offset.x + (animated ? 0 : pos.x); // bottom position the popup in case the height of the popup changes (images loading etc) this._container.style.bottom = this._containerBottom + 'px'; this._container.style.left = this._containerLeft + 'px'; }, _zoomAnimation: function (opt) { var pos = this._map._latLngToNewLayerPoint(this._latlng, opt.zoom, opt.center); L.DomUtil.setPosition(this._container, pos); }, _adjustPan: function () { if (!this.options.autoPan) { return; } var map = this._map, containerHeight = this._container.offsetHeight, containerWidth = this._containerWidth, layerPos = new L.Point(this._containerLeft, -containerHeight - this._containerBottom); if (this._animated) { layerPos._add(L.DomUtil.getPosition(this._container)); } var containerPos = map.layerPointToContainerPoint(layerPos), padding = L.point(this.options.autoPanPadding), size = map.getSize(), dx = 0, dy = 0; if (containerPos.x + containerWidth > size.x) { // right dx = containerPos.x + containerWidth - size.x + padding.x; } if (containerPos.x - dx < 0) { // left dx = containerPos.x - padding.x; } if (containerPos.y + containerHeight > size.y) { // bottom dy = containerPos.y + containerHeight - size.y + padding.y; } if (containerPos.y - dy < 0) { // top dy = containerPos.y - padding.y; } if (dx || dy) { map .fire('autopanstart') .panBy([dx, dy]); } }, _onCloseButtonClick: function (e) { this._close(); L.DomEvent.stop(e); } }); L.popup = function (options, source) { return new L.Popup(options, source); }; L.Map.include({ openPopup: function (popup, latlng, options) { // (Popup) or (String || HTMLElement, LatLng[, Object]) this.closePopup(); if (!(popup instanceof L.Popup)) { var content = popup; popup = new L.Popup(options) .setLatLng(latlng) .setContent(content); } popup._isOpen = true; this._popup = popup; return this.addLayer(popup); }, closePopup: function (popup) { if (!popup || popup === this._popup) { popup = this._popup; this._popup = null; } if (popup) { this.removeLayer(popup); popup._isOpen = false; } return this; } }); /* * Popup extension to L.Marker, adding popup-related methods. */ L.Marker.include({ openPopup: function () { if (this._popup && this._map && !this._map.hasLayer(this._popup)) { this._popup.setLatLng(this._latlng); this._map.openPopup(this._popup); } return this; }, closePopup: function () { if (this._popup) { this._popup._close(); } return this; }, togglePopup: function () { if (this._popup) { if (this._popup._isOpen) { this.closePopup(); } else { this.openPopup(); } } return this; }, bindPopup: function (content, options) { var anchor = L.point(this.options.icon.options.popupAnchor || [0, 0]); anchor = anchor.add(L.Popup.prototype.options.offset); if (options && options.offset) { anchor = anchor.add(options.offset); } options = L.extend({offset: anchor}, options); if (!this._popup) { this .on('click', this.togglePopup, this) .on('remove', this.closePopup, this) .on('move', this._movePopup, this); } if (content instanceof L.Popup) { L.setOptions(content, options); this._popup = content; } else { this._popup = new L.Popup(options, this) .setContent(content); } return this; }, setPopupContent: function (content) { if (this._popup) { this._popup.setContent(content); } return this; }, unbindPopup: function () { if (this._popup) { this._popup = null; this .off('click', this.togglePopup) .off('remove', this.closePopup) .off('move', this._movePopup); } return this; }, _movePopup: function (e) { this._popup.setLatLng(e.latlng); } }); /* * L.LayerGroup is a class to combine several layers into one so that * you can manipulate the group (e.g. add/remove it) as one layer. */ L.LayerGroup = L.Class.extend({ initialize: function (layers) { this._layers = {}; var i, len; if (layers) { for (i = 0, len = layers.length; i < len; i++) { this.addLayer(layers[i]); } } }, addLayer: function (layer) { var id = this.getLayerId(layer); this._layers[id] = layer; if (this._map) { this._map.addLayer(layer); } return this; }, removeLayer: function (layer) { var id = layer in this._layers ? layer : this.getLayerId(layer); if (this._map && this._layers[id]) { this._map.removeLayer(this._layers[id]); } delete this._layers[id]; return this; }, hasLayer: function (layer) { if (!layer) { return false; } return (layer in this._layers || this.getLayerId(layer) in this._layers); }, clearLayers: function () { this.eachLayer(this.removeLayer, this); return this; }, invoke: function (methodName) { var args = Array.prototype.slice.call(arguments, 1), i, layer; for (i in this._layers) { layer = this._layers[i]; if (layer[methodName]) { layer[methodName].apply(layer, args); } } return this; }, onAdd: function (map) { this._map = map; this.eachLayer(map.addLayer, map); }, onRemove: function (map) { this.eachLayer(map.removeLayer, map); this._map = null; }, addTo: function (map) { map.addLayer(this); return this; }, eachLayer: function (method, context) { for (var i in this._layers) { method.call(context, this._layers[i]); } return this; }, getLayer: function (id) { return this._layers[id]; }, getLayers: function () { var layers = []; for (var i in this._layers) { layers.push(this._layers[i]); } return layers; }, setZIndex: function (zIndex) { return this.invoke('setZIndex', zIndex); }, getLayerId: function (layer) { return L.stamp(layer); } }); L.layerGroup = function (layers) { return new L.LayerGroup(layers); }; /* * L.FeatureGroup extends L.LayerGroup by introducing mouse events and additional methods * shared between a group of interactive layers (like vectors or markers). */ L.FeatureGroup = L.LayerGroup.extend({ includes: L.Mixin.Events, statics: { EVENTS: 'click dblclick mouseover mouseout mousemove contextmenu popupopen popupclose' }, addLayer: function (layer) { if (this.hasLayer(layer)) { return this; } layer.on(L.FeatureGroup.EVENTS, this._propagateEvent, this); L.LayerGroup.prototype.addLayer.call(this, layer); if (this._popupContent && layer.bindPopup) { layer.bindPopup(this._popupContent, this._popupOptions); } return this.fire('layeradd', {layer: layer}); }, removeLayer: function (layer) { if (layer in this._layers) { layer = this._layers[layer]; } layer.off(L.FeatureGroup.EVENTS, this._propagateEvent, this); L.LayerGroup.prototype.removeLayer.call(this, layer); if (this._popupContent) { this.invoke('unbindPopup'); } return this.fire('layerremove', {layer: layer}); }, bindPopup: function (content, options) { this._popupContent = content; this._popupOptions = options; return this.invoke('bindPopup', content, options); }, setStyle: function (style) { return this.invoke('setStyle', style); }, bringToFront: function () { return this.invoke('bringToFront'); }, bringToBack: function () { return this.invoke('bringToBack'); }, getBounds: function () { var bounds = new L.LatLngBounds(); this.eachLayer(function (layer) { bounds.extend(layer instanceof L.Marker ? layer.getLatLng() : layer.getBounds()); }); return bounds; }, _propagateEvent: function (e) { if (!e.layer) { e.layer = e.target; } e.target = this; this.fire(e.type, e); } }); L.featureGroup = function (layers) { return new L.FeatureGroup(layers); }; /* * L.Path is a base class for rendering vector paths on a map. Inherited by Polyline, Circle, etc. */ L.Path = L.Class.extend({ includes: [L.Mixin.Events], statics: { // how much to extend the clip area around the map view // (relative to its size, e.g. 0.5 is half the screen in each direction) // set it so that SVG element doesn't exceed 1280px (vectors flicker on dragend if it is) CLIP_PADDING: L.Browser.mobile ? Math.max(0, Math.min(0.5, (1280 / Math.max(window.innerWidth, window.innerHeight) - 1) / 2)) : 0.5 }, options: { stroke: true, color: '#0033ff', dashArray: null, weight: 5, opacity: 0.5, fill: false, fillColor: null, //same as color by default fillOpacity: 0.2, clickable: true }, initialize: function (options) { L.setOptions(this, options); }, onAdd: function (map) { this._map = map; if (!this._container) { this._initElements(); this._initEvents(); } this.projectLatlngs(); this._updatePath(); if (this._container) { this._map._pathRoot.appendChild(this._container); } this.fire('add'); map.on({ 'viewreset': this.projectLatlngs, 'moveend': this._updatePath }, this); }, addTo: function (map) { map.addLayer(this); return this; }, onRemove: function (map) { map._pathRoot.removeChild(this._container); // Need to fire remove event before we set _map to null as the event hooks might need the object this.fire('remove'); this._map = null; if (L.Browser.vml) { this._container = null; this._stroke = null; this._fill = null; } map.off({ 'viewreset': this.projectLatlngs, 'moveend': this._updatePath }, this); }, projectLatlngs: function () { // do all projection stuff here }, setStyle: function (style) { L.setOptions(this, style); if (this._container) { this._updateStyle(); } return this; }, redraw: function () { if (this._map) { this.projectLatlngs(); this._updatePath(); } return this; } }); L.Map.include({ _updatePathViewport: function () { var p = L.Path.CLIP_PADDING, size = this.getSize(), panePos = L.DomUtil.getPosition(this._mapPane), min = panePos.multiplyBy(-1)._subtract(size.multiplyBy(p)._round()), max = min.add(size.multiplyBy(1 + p * 2)._round()); this._pathViewport = new L.Bounds(min, max); } }); /* * Extends L.Path with SVG-specific rendering code. */ L.Path.SVG_NS = 'http://www.w3.org/2000/svg'; L.Browser.svg = !!(document.createElementNS && document.createElementNS(L.Path.SVG_NS, 'svg').createSVGRect); L.Path = L.Path.extend({ statics: { SVG: L.Browser.svg }, bringToFront: function () { var root = this._map._pathRoot, path = this._container; if (path && root.lastChild !== path) { root.appendChild(path); } return this; }, bringToBack: function () { var root = this._map._pathRoot, path = this._container, first = root.firstChild; if (path && first !== path) { root.insertBefore(path, first); } return this; }, getPathString: function () { // form path string here }, _createElement: function (name) { return document.createElementNS(L.Path.SVG_NS, name); }, _initElements: function () { this._map._initPathRoot(); this._initPath(); this._initStyle(); }, _initPath: function () { this._container = this._createElement('g'); this._path = this._createElement('path'); this._container.appendChild(this._path); }, _initStyle: function () { if (this.options.stroke) { this._path.setAttribute('stroke-linejoin', 'round'); this._path.setAttribute('stroke-linecap', 'round'); } if (this.options.fill) { this._path.setAttribute('fill-rule', 'evenodd'); } if (this.options.pointerEvents) { this._path.setAttribute('pointer-events', this.options.pointerEvents); } if (!this.options.clickable && !this.options.pointerEvents) { this._path.setAttribute('pointer-events', 'none'); } this._updateStyle(); }, _updateStyle: function () { if (this.options.stroke) { this._path.setAttribute('stroke', this.options.color); this._path.setAttribute('stroke-opacity', this.options.opacity); this._path.setAttribute('stroke-width', this.options.weight); if (this.options.dashArray) { this._path.setAttribute('stroke-dasharray', this.options.dashArray); } else { this._path.removeAttribute('stroke-dasharray'); } } else { this._path.setAttribute('stroke', 'none'); } if (this.options.fill) { this._path.setAttribute('fill', this.options.fillColor || this.options.color); this._path.setAttribute('fill-opacity', this.options.fillOpacity); } else { this._path.setAttribute('fill', 'none'); } }, _updatePath: function () { var str = this.getPathString(); if (!str) { // fix webkit empty string parsing bug str = 'M0 0'; } this._path.setAttribute('d', str); }, // TODO remove duplication with L.Map _initEvents: function () { if (this.options.clickable) { if (L.Browser.svg || !L.Browser.vml) { this._path.setAttribute('class', 'leaflet-clickable'); } L.DomEvent.on(this._container, 'click', this._onMouseClick, this); var events = ['dblclick', 'mousedown', 'mouseover', 'mouseout', 'mousemove', 'contextmenu']; for (var i = 0; i < events.length; i++) { L.DomEvent.on(this._container, events[i], this._fireMouseEvent, this); } } }, _onMouseClick: function (e) { if (this._map.dragging && this._map.dragging.moved()) { return; } this._fireMouseEvent(e); }, _fireMouseEvent: function (e) { if (!this.hasEventListeners(e.type)) { return; } var map = this._map, containerPoint = map.mouseEventToContainerPoint(e), layerPoint = map.containerPointToLayerPoint(containerPoint), latlng = map.layerPointToLatLng(layerPoint); this.fire(e.type, { latlng: latlng, layerPoint: layerPoint, containerPoint: containerPoint, originalEvent: e }); if (e.type === 'contextmenu') { L.DomEvent.preventDefault(e); } if (e.type !== 'mousemove') { L.DomEvent.stopPropagation(e); } } }); L.Map.include({ _initPathRoot: function () { if (!this._pathRoot) { this._pathRoot = L.Path.prototype._createElement('svg'); this._panes.overlayPane.appendChild(this._pathRoot); if (this.options.zoomAnimation && L.Browser.any3d) { this._pathRoot.setAttribute('class', ' leaflet-zoom-animated'); this.on({ 'zoomanim': this._animatePathZoom, 'zoomend': this._endPathZoom }); } else { this._pathRoot.setAttribute('class', ' leaflet-zoom-hide'); } this.on('moveend', this._updateSvgViewport); this._updateSvgViewport(); } }, _animatePathZoom: function (e) { var scale = this.getZoomScale(e.zoom), offset = this._getCenterOffset(e.center)._multiplyBy(-scale)._add(this._pathViewport.min); this._pathRoot.style[L.DomUtil.TRANSFORM] = L.DomUtil.getTranslateString(offset) + ' scale(' + scale + ') '; this._pathZooming = true; }, _endPathZoom: function () { this._pathZooming = false; }, _updateSvgViewport: function () { if (this._pathZooming) { // Do not update SVGs while a zoom animation is going on otherwise the animation will break. // When the zoom animation ends we will be updated again anyway // This fixes the case where you do a momentum move and zoom while the move is still ongoing. return; } this._updatePathViewport(); var vp = this._pathViewport, min = vp.min, max = vp.max, width = max.x - min.x, height = max.y - min.y, root = this._pathRoot, pane = this._panes.overlayPane; // Hack to make flicker on drag end on mobile webkit less irritating if (L.Browser.mobileWebkit) { pane.removeChild(root); } L.DomUtil.setPosition(root, min); root.setAttribute('width', width); root.setAttribute('height', height); root.setAttribute('viewBox', [min.x, min.y, width, height].join(' ')); if (L.Browser.mobileWebkit) { pane.appendChild(root); } } }); /* * Popup extension to L.Path (polylines, polygons, circles), adding popup-related methods. */ L.Path.include({ bindPopup: function (content, options) { if (content instanceof L.Popup) { this._popup = content; } else { if (!this._popup || options) { this._popup = new L.Popup(options, this); } this._popup.setContent(content); } if (!this._popupHandlersAdded) { this .on('click', this._openPopup, this) .on('remove', this.closePopup, this); this._popupHandlersAdded = true; } return this; }, unbindPopup: function () { if (this._popup) { this._popup = null; this .off('click', this._openPopup) .off('remove', this.closePopup); this._popupHandlersAdded = false; } return this; }, openPopup: function (latlng) { if (this._popup) { // open the popup from one of the path's points if not specified latlng = latlng || this._latlng || this._latlngs[Math.floor(this._latlngs.length / 2)]; this._openPopup({latlng: latlng}); } return this; }, closePopup: function () { if (this._popup) { this._popup._close(); } return this; }, _openPopup: function (e) { this._popup.setLatLng(e.latlng); this._map.openPopup(this._popup); } }); /* * Vector rendering for IE6-8 through VML. * Thanks to Dmitry Baranovsky and his Raphael library for inspiration! */ L.Browser.vml = !L.Browser.svg && (function () { try { var div = document.createElement('div'); div.innerHTML = '<v:shape adj="1"/>'; var shape = div.firstChild; shape.style.behavior = 'url(#default#VML)'; return shape && (typeof shape.adj === 'object'); } catch (e) { return false; } }()); L.Path = L.Browser.svg || !L.Browser.vml ? L.Path : L.Path.extend({ statics: { VML: true, CLIP_PADDING: 0.02 }, _createElement: (function () { try { document.namespaces.add('lvml', 'urn:schemas-microsoft-com:vml'); return function (name) { return document.createElement('<lvml:' + name + ' class="lvml">'); }; } catch (e) { return function (name) { return document.createElement( '<' + name + ' xmlns="urn:schemas-microsoft.com:vml" class="lvml">'); }; } }()), _initPath: function () { var container = this._container = this._createElement('shape'); L.DomUtil.addClass(container, 'leaflet-vml-shape'); if (this.options.clickable) { L.DomUtil.addClass(container, 'leaflet-clickable'); } container.coordsize = '1 1'; this._path = this._createElement('path'); container.appendChild(this._path); this._map._pathRoot.appendChild(container); }, _initStyle: function () { this._updateStyle(); }, _updateStyle: function () { var stroke = this._stroke, fill = this._fill, options = this.options, container = this._container; container.stroked = options.stroke; container.filled = options.fill; if (options.stroke) { if (!stroke) { stroke = this._stroke = this._createElement('stroke'); stroke.endcap = 'round'; container.appendChild(stroke); } stroke.weight = options.weight + 'px'; stroke.color = options.color; stroke.opacity = options.opacity; if (options.dashArray) { stroke.dashStyle = options.dashArray instanceof Array ? options.dashArray.join(' ') : options.dashArray.replace(/( *, *)/g, ' '); } else { stroke.dashStyle = ''; } } else if (stroke) { container.removeChild(stroke); this._stroke = null; } if (options.fill) { if (!fill) { fill = this._fill = this._createElement('fill'); container.appendChild(fill); } fill.color = options.fillColor || options.color; fill.opacity = options.fillOpacity; } else if (fill) { container.removeChild(fill); this._fill = null; } }, _updatePath: function () { var style = this._container.style; style.display = 'none'; this._path.v = this.getPathString() + ' '; // the space fixes IE empty path string bug style.display = ''; } }); L.Map.include(L.Browser.svg || !L.Browser.vml ? {} : { _initPathRoot: function () { if (this._pathRoot) { return; } var root = this._pathRoot = document.createElement('div'); root.className = 'leaflet-vml-container'; this._panes.overlayPane.appendChild(root); this.on('moveend', this._updatePathViewport); this._updatePathViewport(); } }); /* * Vector rendering for all browsers that support canvas. */ L.Browser.canvas = (function () { return !!document.createElement('canvas').getContext; }()); L.Path = (L.Path.SVG && !window.L_PREFER_CANVAS) || !L.Browser.canvas ? L.Path : L.Path.extend({ statics: { //CLIP_PADDING: 0.02, // not sure if there's a need to set it to a small value CANVAS: true, SVG: false }, redraw: function () { if (this._map) { this.projectLatlngs(); this._requestUpdate(); } return this; }, setStyle: function (style) { L.setOptions(this, style); if (this._map) { this._updateStyle(); this._requestUpdate(); } return this; }, onRemove: function (map) { map .off('viewreset', this.projectLatlngs, this) .off('moveend', this._updatePath, this); if (this.options.clickable) { this._map.off('click', this._onClick, this); this._map.off('mousemove', this._onMouseMove, this); } this._requestUpdate(); this._map = null; }, _requestUpdate: function () { if (this._map && !L.Path._updateRequest) { L.Path._updateRequest = L.Util.requestAnimFrame(this._fireMapMoveEnd, this._map); } }, _fireMapMoveEnd: function () { L.Path._updateRequest = null; this.fire('moveend'); }, _initElements: function () { this._map._initPathRoot(); this._ctx = this._map._canvasCtx; }, _updateStyle: function () { var options = this.options; if (options.stroke) { this._ctx.lineWidth = options.weight; this._ctx.strokeStyle = options.color; } if (options.fill) { this._ctx.fillStyle = options.fillColor || options.color; } }, _drawPath: function () { var i, j, len, len2, point, drawMethod; this._ctx.beginPath(); for (i = 0, len = this._parts.length; i < len; i++) { for (j = 0, len2 = this._parts[i].length; j < len2; j++) { point = this._parts[i][j]; drawMethod = (j === 0 ? 'move' : 'line') + 'To'; this._ctx[drawMethod](point.x, point.y); } // TODO refactor ugly hack if (this instanceof L.Polygon) { this._ctx.closePath(); } } }, _checkIfEmpty: function () { return !this._parts.length; }, _updatePath: function () { if (this._checkIfEmpty()) { return; } var ctx = this._ctx, options = this.options; this._drawPath(); ctx.save(); this._updateStyle(); if (options.fill) { ctx.globalAlpha = options.fillOpacity; ctx.fill(); } if (options.stroke) { ctx.globalAlpha = options.opacity; ctx.stroke(); } ctx.restore(); // TODO optimization: 1 fill/stroke for all features with equal style instead of 1 for each feature }, _initEvents: function () { if (this.options.clickable) { // TODO dblclick this._map.on('mousemove', this._onMouseMove, this); this._map.on('click', this._onClick, this); } }, _onClick: function (e) { if (this._containsPoint(e.layerPoint)) { this.fire('click', e); } }, _onMouseMove: function (e) { if (!this._map || this._map._animatingZoom) { return; } // TODO don't do on each move if (this._containsPoint(e.layerPoint)) { this._ctx.canvas.style.cursor = 'pointer'; this._mouseInside = true; this.fire('mouseover', e); } else if (this._mouseInside) { this._ctx.canvas.style.cursor = ''; this._mouseInside = false; this.fire('mouseout', e); } } }); L.Map.include((L.Path.SVG && !window.L_PREFER_CANVAS) || !L.Browser.canvas ? {} : { _initPathRoot: function () { var root = this._pathRoot, ctx; if (!root) { root = this._pathRoot = document.createElement('canvas'); root.style.position = 'absolute'; ctx = this._canvasCtx = root.getContext('2d'); ctx.lineCap = 'round'; ctx.lineJoin = 'round'; this._panes.overlayPane.appendChild(root); if (this.options.zoomAnimation) { this._pathRoot.className = 'leaflet-zoom-animated'; this.on('zoomanim', this._animatePathZoom); this.on('zoomend', this._endPathZoom); } this.on('moveend', this._updateCanvasViewport); this._updateCanvasViewport(); } }, _updateCanvasViewport: function () { // don't redraw while zooming. See _updateSvgViewport for more details if (this._pathZooming) { return; } this._updatePathViewport(); var vp = this._pathViewport, min = vp.min, size = vp.max.subtract(min), root = this._pathRoot; //TODO check if this works properly on mobile webkit L.DomUtil.setPosition(root, min); root.width = size.x; root.height = size.y; root.getContext('2d').translate(-min.x, -min.y); } }); /* * L.LineUtil contains different utility functions for line segments * and polylines (clipping, simplification, distances, etc.) */ /*jshint bitwise:false */ // allow bitwise oprations for this file L.LineUtil = { // Simplify polyline with vertex reduction and Douglas-Peucker simplification. // Improves rendering performance dramatically by lessening the number of points to draw. simplify: function (/*Point[]*/ points, /*Number*/ tolerance) { if (!tolerance || !points.length) { return points.slice(); } var sqTolerance = tolerance * tolerance; // stage 1: vertex reduction points = this._reducePoints(points, sqTolerance); // stage 2: Douglas-Peucker simplification points = this._simplifyDP(points, sqTolerance); return points; }, // distance from a point to a segment between two points pointToSegmentDistance: function (/*Point*/ p, /*Point*/ p1, /*Point*/ p2) { return Math.sqrt(this._sqClosestPointOnSegment(p, p1, p2, true)); }, closestPointOnSegment: function (/*Point*/ p, /*Point*/ p1, /*Point*/ p2) { return this._sqClosestPointOnSegment(p, p1, p2); }, // Douglas-Peucker simplification, see http://en.wikipedia.org/wiki/Douglas-Peucker_algorithm _simplifyDP: function (points, sqTolerance) { var len = points.length, ArrayConstructor = typeof Uint8Array !== undefined + '' ? Uint8Array : Array, markers = new ArrayConstructor(len); markers[0] = markers[len - 1] = 1; this._simplifyDPStep(points, markers, sqTolerance, 0, len - 1); var i, newPoints = []; for (i = 0; i < len; i++) { if (markers[i]) { newPoints.push(points[i]); } } return newPoints; }, _simplifyDPStep: function (points, markers, sqTolerance, first, last) { var maxSqDist = 0, index, i, sqDist; for (i = first + 1; i <= last - 1; i++) { sqDist = this._sqClosestPointOnSegment(points[i], points[first], points[last], true); if (sqDist > maxSqDist) { index = i; maxSqDist = sqDist; } } if (maxSqDist > sqTolerance) { markers[index] = 1; this._simplifyDPStep(points, markers, sqTolerance, first, index); this._simplifyDPStep(points, markers, sqTolerance, index, last); } }, // reduce points that are too close to each other to a single point _reducePoints: function (points, sqTolerance) { var reducedPoints = [points[0]]; for (var i = 1, prev = 0, len = points.length; i < len; i++) { if (this._sqDist(points[i], points[prev]) > sqTolerance) { reducedPoints.push(points[i]); prev = i; } } if (prev < len - 1) { reducedPoints.push(points[len - 1]); } return reducedPoints; }, // Cohen-Sutherland line clipping algorithm. // Used to avoid rendering parts of a polyline that are not currently visible. clipSegment: function (a, b, bounds, useLastCode) { var codeA = useLastCode ? this._lastCode : this._getBitCode(a, bounds), codeB = this._getBitCode(b, bounds), codeOut, p, newCode; // save 2nd code to avoid calculating it on the next segment this._lastCode = codeB; while (true) { // if a,b is inside the clip window (trivial accept) if (!(codeA | codeB)) { return [a, b]; // if a,b is outside the clip window (trivial reject) } else if (codeA & codeB) { return false; // other cases } else { codeOut = codeA || codeB; p = this._getEdgeIntersection(a, b, codeOut, bounds); newCode = this._getBitCode(p, bounds); if (codeOut === codeA) { a = p; codeA = newCode; } else { b = p; codeB = newCode; } } } }, _getEdgeIntersection: function (a, b, code, bounds) { var dx = b.x - a.x, dy = b.y - a.y, min = bounds.min, max = bounds.max; if (code & 8) { // top return new L.Point(a.x + dx * (max.y - a.y) / dy, max.y); } else if (code & 4) { // bottom return new L.Point(a.x + dx * (min.y - a.y) / dy, min.y); } else if (code & 2) { // right return new L.Point(max.x, a.y + dy * (max.x - a.x) / dx); } else if (code & 1) { // left return new L.Point(min.x, a.y + dy * (min.x - a.x) / dx); } }, _getBitCode: function (/*Point*/ p, bounds) { var code = 0; if (p.x < bounds.min.x) { // left code |= 1; } else if (p.x > bounds.max.x) { // right code |= 2; } if (p.y < bounds.min.y) { // bottom code |= 4; } else if (p.y > bounds.max.y) { // top code |= 8; } return code; }, // square distance (to avoid unnecessary Math.sqrt calls) _sqDist: function (p1, p2) { var dx = p2.x - p1.x, dy = p2.y - p1.y; return dx * dx + dy * dy; }, // return closest point on segment or distance to that point _sqClosestPointOnSegment: function (p, p1, p2, sqDist) { var x = p1.x, y = p1.y, dx = p2.x - x, dy = p2.y - y, dot = dx * dx + dy * dy, t; if (dot > 0) { t = ((p.x - x) * dx + (p.y - y) * dy) / dot; if (t > 1) { x = p2.x; y = p2.y; } else if (t > 0) { x += dx * t; y += dy * t; } } dx = p.x - x; dy = p.y - y; return sqDist ? dx * dx + dy * dy : new L.Point(x, y); } }; /* * L.Polyline is used to display polylines on a map. */ L.Polyline = L.Path.extend({ initialize: function (latlngs, options) { L.Path.prototype.initialize.call(this, options); this._latlngs = this._convertLatLngs(latlngs); }, options: { // how much to simplify the polyline on each zoom level // more = better performance and smoother look, less = more accurate smoothFactor: 1.0, noClip: false }, projectLatlngs: function () { this._originalPoints = []; for (var i = 0, len = this._latlngs.length; i < len; i++) { this._originalPoints[i] = this._map.latLngToLayerPoint(this._latlngs[i]); } }, getPathString: function () { for (var i = 0, len = this._parts.length, str = ''; i < len; i++) { str += this._getPathPartStr(this._parts[i]); } return str; }, getLatLngs: function () { return this._latlngs; }, setLatLngs: function (latlngs) { this._latlngs = this._convertLatLngs(latlngs); return this.redraw(); }, addLatLng: function (latlng) { this._latlngs.push(L.latLng(latlng)); return this.redraw(); }, spliceLatLngs: function () { // (Number index, Number howMany) var removed = [].splice.apply(this._latlngs, arguments); this._convertLatLngs(this._latlngs, true); this.redraw(); return removed; }, closestLayerPoint: function (p) { var minDistance = Infinity, parts = this._parts, p1, p2, minPoint = null; for (var j = 0, jLen = parts.length; j < jLen; j++) { var points = parts[j]; for (var i = 1, len = points.length; i < len; i++) { p1 = points[i - 1]; p2 = points[i]; var sqDist = L.LineUtil._sqClosestPointOnSegment(p, p1, p2, true); if (sqDist < minDistance) { minDistance = sqDist; minPoint = L.LineUtil._sqClosestPointOnSegment(p, p1, p2); } } } if (minPoint) { minPoint.distance = Math.sqrt(minDistance); } return minPoint; }, getBounds: function () { return new L.LatLngBounds(this.getLatLngs()); }, _convertLatLngs: function (latlngs, overwrite) { var i, len, target = overwrite ? latlngs : []; for (i = 0, len = latlngs.length; i < len; i++) { if (L.Util.isArray(latlngs[i]) && typeof latlngs[i][0] !== 'number') { return; } target[i] = L.latLng(latlngs[i]); } return target; }, _initEvents: function () { L.Path.prototype._initEvents.call(this); }, _getPathPartStr: function (points) { var round = L.Path.VML; for (var j = 0, len2 = points.length, str = '', p; j < len2; j++) { p = points[j]; if (round) { p._round(); } str += (j ? 'L' : 'M') + p.x + ' ' + p.y; } return str; }, _clipPoints: function () { var points = this._originalPoints, len = points.length, i, k, segment; if (this.options.noClip) { this._parts = [points]; return; } this._parts = []; var parts = this._parts, vp = this._map._pathViewport, lu = L.LineUtil; for (i = 0, k = 0; i < len - 1; i++) { segment = lu.clipSegment(points[i], points[i + 1], vp, i); if (!segment) { continue; } parts[k] = parts[k] || []; parts[k].push(segment[0]); // if segment goes out of screen, or it's the last one, it's the end of the line part if ((segment[1] !== points[i + 1]) || (i === len - 2)) { parts[k].push(segment[1]); k++; } } }, // simplify each clipped part of the polyline _simplifyPoints: function () { var parts = this._parts, lu = L.LineUtil; for (var i = 0, len = parts.length; i < len; i++) { parts[i] = lu.simplify(parts[i], this.options.smoothFactor); } }, _updatePath: function () { if (!this._map) { return; } this._clipPoints(); this._simplifyPoints(); L.Path.prototype._updatePath.call(this); } }); L.polyline = function (latlngs, options) { return new L.Polyline(latlngs, options); }; /* * L.PolyUtil contains utility functions for polygons (clipping, etc.). */ /*jshint bitwise:false */ // allow bitwise operations here L.PolyUtil = {}; /* * Sutherland-Hodgeman polygon clipping algorithm. * Used to avoid rendering parts of a polygon that are not currently visible. */ L.PolyUtil.clipPolygon = function (points, bounds) { var clippedPoints, edges = [1, 4, 2, 8], i, j, k, a, b, len, edge, p, lu = L.LineUtil; for (i = 0, len = points.length; i < len; i++) { points[i]._code = lu._getBitCode(points[i], bounds); } // for each edge (left, bottom, right, top) for (k = 0; k < 4; k++) { edge = edges[k]; clippedPoints = []; for (i = 0, len = points.length, j = len - 1; i < len; j = i++) { a = points[i]; b = points[j]; // if a is inside the clip window if (!(a._code & edge)) { // if b is outside the clip window (a->b goes out of screen) if (b._code & edge) { p = lu._getEdgeIntersection(b, a, edge, bounds); p._code = lu._getBitCode(p, bounds); clippedPoints.push(p); } clippedPoints.push(a); // else if b is inside the clip window (a->b enters the screen) } else if (!(b._code & edge)) { p = lu._getEdgeIntersection(b, a, edge, bounds); p._code = lu._getBitCode(p, bounds); clippedPoints.push(p); } } points = clippedPoints; } return points; }; /* * L.Polygon is used to display polygons on a map. */ L.Polygon = L.Polyline.extend({ options: { fill: true }, initialize: function (latlngs, options) { var i, len, hole; L.Polyline.prototype.initialize.call(this, latlngs, options); if (latlngs && L.Util.isArray(latlngs[0]) && (typeof latlngs[0][0] !== 'number')) { this._latlngs = this._convertLatLngs(latlngs[0]); this._holes = latlngs.slice(1); for (i = 0, len = this._holes.length; i < len; i++) { hole = this._holes[i] = this._convertLatLngs(this._holes[i]); if (hole[0].equals(hole[hole.length - 1])) { hole.pop(); } } } // filter out last point if its equal to the first one latlngs = this._latlngs; if (latlngs.length >= 2 && latlngs[0].equals(latlngs[latlngs.length - 1])) { latlngs.pop(); } }, projectLatlngs: function () { L.Polyline.prototype.projectLatlngs.call(this); // project polygon holes points // TODO move this logic to Polyline to get rid of duplication this._holePoints = []; if (!this._holes) { return; } var i, j, len, len2; for (i = 0, len = this._holes.length; i < len; i++) { this._holePoints[i] = []; for (j = 0, len2 = this._holes[i].length; j < len2; j++) { this._holePoints[i][j] = this._map.latLngToLayerPoint(this._holes[i][j]); } } }, _clipPoints: function () { var points = this._originalPoints, newParts = []; this._parts = [points].concat(this._holePoints); if (this.options.noClip) { return; } for (var i = 0, len = this._parts.length; i < len; i++) { var clipped = L.PolyUtil.clipPolygon(this._parts[i], this._map._pathViewport); if (clipped.length) { newParts.push(clipped); } } this._parts = newParts; }, _getPathPartStr: function (points) { var str = L.Polyline.prototype._getPathPartStr.call(this, points); return str + (L.Browser.svg ? 'z' : 'x'); } }); L.polygon = function (latlngs, options) { return new L.Polygon(latlngs, options); }; /* * Contains L.MultiPolyline and L.MultiPolygon layers. */ (function () { function createMulti(Klass) { return L.FeatureGroup.extend({ initialize: function (latlngs, options) { this._layers = {}; this._options = options; this.setLatLngs(latlngs); }, setLatLngs: function (latlngs) { var i = 0, len = latlngs.length; this.eachLayer(function (layer) { if (i < len) { layer.setLatLngs(latlngs[i++]); } else { this.removeLayer(layer); } }, this); while (i < len) { this.addLayer(new Klass(latlngs[i++], this._options)); } return this; } }); } L.MultiPolyline = createMulti(L.Polyline); L.MultiPolygon = createMulti(L.Polygon); L.multiPolyline = function (latlngs, options) { return new L.MultiPolyline(latlngs, options); }; L.multiPolygon = function (latlngs, options) { return new L.MultiPolygon(latlngs, options); }; }()); /* * L.Rectangle extends Polygon and creates a rectangle when passed a LatLngBounds object. */ L.Rectangle = L.Polygon.extend({ initialize: function (latLngBounds, options) { L.Polygon.prototype.initialize.call(this, this._boundsToLatLngs(latLngBounds), options); }, setBounds: function (latLngBounds) { this.setLatLngs(this._boundsToLatLngs(latLngBounds)); }, _boundsToLatLngs: function (latLngBounds) { latLngBounds = L.latLngBounds(latLngBounds); return [ latLngBounds.getSouthWest(), latLngBounds.getNorthWest(), latLngBounds.getNorthEast(), latLngBounds.getSouthEast() ]; } }); L.rectangle = function (latLngBounds, options) { return new L.Rectangle(latLngBounds, options); }; /* * L.Circle is a circle overlay (with a certain radius in meters). */ L.Circle = L.Path.extend({ initialize: function (latlng, radius, options) { L.Path.prototype.initialize.call(this, options); this._latlng = L.latLng(latlng); this._mRadius = radius; }, options: { fill: true }, setLatLng: function (latlng) { this._latlng = L.latLng(latlng); return this.redraw(); }, setRadius: function (radius) { this._mRadius = radius; return this.redraw(); }, projectLatlngs: function () { var lngRadius = this._getLngRadius(), latlng = this._latlng, pointLeft = this._map.latLngToLayerPoint([latlng.lat, latlng.lng - lngRadius]); this._point = this._map.latLngToLayerPoint(latlng); this._radius = Math.max(this._point.x - pointLeft.x, 1); }, getBounds: function () { var lngRadius = this._getLngRadius(), latRadius = (this._mRadius / 40075017) * 360, latlng = this._latlng; return new L.LatLngBounds( [latlng.lat - latRadius, latlng.lng - lngRadius], [latlng.lat + latRadius, latlng.lng + lngRadius]); }, getLatLng: function () { return this._latlng; }, getPathString: function () { var p = this._point, r = this._radius; if (this._checkIfEmpty()) { return ''; } if (L.Browser.svg) { return 'M' + p.x + ',' + (p.y - r) + 'A' + r + ',' + r + ',0,1,1,' + (p.x - 0.1) + ',' + (p.y - r) + ' z'; } else { p._round(); r = Math.round(r); return 'AL ' + p.x + ',' + p.y + ' ' + r + ',' + r + ' 0,' + (65535 * 360); } }, getRadius: function () { return this._mRadius; }, // TODO Earth hardcoded, move into projection code! _getLatRadius: function () { return (this._mRadius / 40075017) * 360; }, _getLngRadius: function () { return this._getLatRadius() / Math.cos(L.LatLng.DEG_TO_RAD * this._latlng.lat); }, _checkIfEmpty: function () { if (!this._map) { return false; } var vp = this._map._pathViewport, r = this._radius, p = this._point; return p.x - r > vp.max.x || p.y - r > vp.max.y || p.x + r < vp.min.x || p.y + r < vp.min.y; } }); L.circle = function (latlng, radius, options) { return new L.Circle(latlng, radius, options); }; /* * L.CircleMarker is a circle overlay with a permanent pixel radius. */ L.CircleMarker = L.Circle.extend({ options: { radius: 10, weight: 2 }, initialize: function (latlng, options) { L.Circle.prototype.initialize.call(this, latlng, null, options); this._radius = this.options.radius; }, projectLatlngs: function () { this._point = this._map.latLngToLayerPoint(this._latlng); }, _updateStyle : function () { L.Circle.prototype._updateStyle.call(this); this.setRadius(this.options.radius); }, setRadius: function (radius) { this.options.radius = this._radius = radius; return this.redraw(); } }); L.circleMarker = function (latlng, options) { return new L.CircleMarker(latlng, options); }; /* * Extends L.Polyline to be able to manually detect clicks on Canvas-rendered polylines. */ L.Polyline.include(!L.Path.CANVAS ? {} : { _containsPoint: function (p, closed) { var i, j, k, len, len2, dist, part, w = this.options.weight / 2; if (L.Browser.touch) { w += 10; // polyline click tolerance on touch devices } for (i = 0, len = this._parts.length; i < len; i++) { part = this._parts[i]; for (j = 0, len2 = part.length, k = len2 - 1; j < len2; k = j++) { if (!closed && (j === 0)) { continue; } dist = L.LineUtil.pointToSegmentDistance(p, part[k], part[j]); if (dist <= w) { return true; } } } return false; } }); /* * Extends L.Polygon to be able to manually detect clicks on Canvas-rendered polygons. */ L.Polygon.include(!L.Path.CANVAS ? {} : { _containsPoint: function (p) { var inside = false, part, p1, p2, i, j, k, len, len2; // TODO optimization: check if within bounds first if (L.Polyline.prototype._containsPoint.call(this, p, true)) { // click on polygon border return true; } // ray casting algorithm for detecting if point is in polygon for (i = 0, len = this._parts.length; i < len; i++) { part = this._parts[i]; for (j = 0, len2 = part.length, k = len2 - 1; j < len2; k = j++) { p1 = part[j]; p2 = part[k]; if (((p1.y > p.y) !== (p2.y > p.y)) && (p.x < (p2.x - p1.x) * (p.y - p1.y) / (p2.y - p1.y) + p1.x)) { inside = !inside; } } } return inside; } }); /* * Extends L.Circle with Canvas-specific code. */ L.Circle.include(!L.Path.CANVAS ? {} : { _drawPath: function () { var p = this._point; this._ctx.beginPath(); this._ctx.arc(p.x, p.y, this._radius, 0, Math.PI * 2, false); }, _containsPoint: function (p) { var center = this._point, w2 = this.options.stroke ? this.options.weight / 2 : 0; return (p.distanceTo(center) <= this._radius + w2); } }); /* * CircleMarker canvas specific drawing parts. */ L.CircleMarker.include(!L.Path.CANVAS ? {} : { _updateStyle: function () { L.Path.prototype._updateStyle.call(this); } }); /* * L.GeoJSON turns any GeoJSON data into a Leaflet layer. */ L.GeoJSON = L.FeatureGroup.extend({ initialize: function (geojson, options) { L.setOptions(this, options); this._layers = {}; if (geojson) { this.addData(geojson); } }, addData: function (geojson) { var features = L.Util.isArray(geojson) ? geojson : geojson.features, i, len; if (features) { for (i = 0, len = features.length; i < len; i++) { // Only add this if geometry or geometries are set and not null if (features[i].geometries || features[i].geometry || features[i].features) { this.addData(features[i]); } } return this; } var options = this.options; if (options.filter && !options.filter(geojson)) { return; } var layer = L.GeoJSON.geometryToLayer(geojson, options.pointToLayer, options.coordsToLatLng); layer.feature = L.GeoJSON.asFeature(geojson); layer.defaultOptions = layer.options; this.resetStyle(layer); if (options.onEachFeature) { options.onEachFeature(geojson, layer); } return this.addLayer(layer); }, resetStyle: function (layer) { var style = this.options.style; if (style) { // reset any custom styles L.Util.extend(layer.options, layer.defaultOptions); this._setLayerStyle(layer, style); } }, setStyle: function (style) { this.eachLayer(function (layer) { this._setLayerStyle(layer, style); }, this); }, _setLayerStyle: function (layer, style) { if (typeof style === 'function') { style = style(layer.feature); } if (layer.setStyle) { layer.setStyle(style); } } }); L.extend(L.GeoJSON, { geometryToLayer: function (geojson, pointToLayer, coordsToLatLng) { var geometry = geojson.type === 'Feature' ? geojson.geometry : geojson, coords = geometry.coordinates, layers = [], latlng, latlngs, i, len, layer; coordsToLatLng = coordsToLatLng || this.coordsToLatLng; switch (geometry.type) { case 'Point': latlng = coordsToLatLng(coords); return pointToLayer ? pointToLayer(geojson, latlng) : new L.Marker(latlng); case 'MultiPoint': for (i = 0, len = coords.length; i < len; i++) { latlng = coordsToLatLng(coords[i]); layer = pointToLayer ? pointToLayer(geojson, latlng) : new L.Marker(latlng); layers.push(layer); } return new L.FeatureGroup(layers); case 'LineString': latlngs = this.coordsToLatLngs(coords, 0, coordsToLatLng); return new L.Polyline(latlngs); case 'Polygon': latlngs = this.coordsToLatLngs(coords, 1, coordsToLatLng); return new L.Polygon(latlngs); case 'MultiLineString': latlngs = this.coordsToLatLngs(coords, 1, coordsToLatLng); return new L.MultiPolyline(latlngs); case 'MultiPolygon': latlngs = this.coordsToLatLngs(coords, 2, coordsToLatLng); return new L.MultiPolygon(latlngs); case 'GeometryCollection': for (i = 0, len = geometry.geometries.length; i < len; i++) { layer = this.geometryToLayer({ geometry: geometry.geometries[i], type: 'Feature', properties: geojson.properties }, pointToLayer, coordsToLatLng); layers.push(layer); } return new L.FeatureGroup(layers); default: throw new Error('Invalid GeoJSON object.'); } }, coordsToLatLng: function (coords) { // (Array[, Boolean]) -> LatLng return new L.LatLng(coords[1], coords[0]); }, coordsToLatLngs: function (coords, levelsDeep, coordsToLatLng) { // (Array[, Number, Function]) -> Array var latlng, i, len, latlngs = []; for (i = 0, len = coords.length; i < len; i++) { latlng = levelsDeep ? this.coordsToLatLngs(coords[i], levelsDeep - 1, coordsToLatLng) : (coordsToLatLng || this.coordsToLatLng)(coords[i]); latlngs.push(latlng); } return latlngs; }, latLngToCoords: function (latLng) { return [latLng.lng, latLng.lat]; }, latLngsToCoords: function (latLngs) { var coords = []; for (var i = 0, len = latLngs.length; i < len; i++) { coords.push(L.GeoJSON.latLngToCoords(latLngs[i])); } return coords; }, getFeature: function (layer, newGeometry) { return layer.feature ? L.extend({}, layer.feature, {geometry: newGeometry}) : L.GeoJSON.asFeature(newGeometry); }, asFeature: function (geoJSON) { if (geoJSON.type === 'Feature') { return geoJSON; } return { type: 'Feature', properties: {}, geometry: geoJSON }; } }); var PointToGeoJSON = { toGeoJSON: function () { return L.GeoJSON.getFeature(this, { type: 'Point', coordinates: L.GeoJSON.latLngToCoords(this.getLatLng()) }); } }; L.Marker.include(PointToGeoJSON); L.Circle.include(PointToGeoJSON); L.CircleMarker.include(PointToGeoJSON); L.Polyline.include({ toGeoJSON: function () { return L.GeoJSON.getFeature(this, { type: 'LineString', coordinates: L.GeoJSON.latLngsToCoords(this.getLatLngs()) }); } }); L.Polygon.include({ toGeoJSON: function () { var coords = [L.GeoJSON.latLngsToCoords(this.getLatLngs())], i, len, hole; coords[0].push(coords[0][0]); if (this._holes) { for (i = 0, len = this._holes.length; i < len; i++) { hole = L.GeoJSON.latLngsToCoords(this._holes[i]); hole.push(hole[0]); coords.push(hole); } } return L.GeoJSON.getFeature(this, { type: 'Polygon', coordinates: coords }); } }); (function () { function includeMulti(Klass, type) { Klass.include({ toGeoJSON: function () { var coords = []; this.eachLayer(function (layer) { coords.push(layer.toGeoJSON().geometry.coordinates); }); return L.GeoJSON.getFeature(this, { type: type, coordinates: coords }); } }); } includeMulti(L.MultiPolyline, 'MultiLineString'); includeMulti(L.MultiPolygon, 'MultiPolygon'); }()); L.LayerGroup.include({ toGeoJSON: function () { var features = []; this.eachLayer(function (layer) { if (layer.toGeoJSON) { features.push(L.GeoJSON.asFeature(layer.toGeoJSON())); } }); return { type: 'FeatureCollection', features: features }; } }); L.geoJson = function (geojson, options) { return new L.GeoJSON(geojson, options); }; /* * L.DomEvent contains functions for working with DOM events. */ L.DomEvent = { /* inspired by John Resig, Dean Edwards and YUI addEvent implementations */ addListener: function (obj, type, fn, context) { // (HTMLElement, String, Function[, Object]) var id = L.stamp(fn), key = '_leaflet_' + type + id, handler, originalHandler, newType; if (obj[key]) { return this; } handler = function (e) { return fn.call(context || obj, e || L.DomEvent._getEvent()); }; if (L.Browser.msTouch && type.indexOf('touch') === 0) { return this.addMsTouchListener(obj, type, handler, id); } if (L.Browser.touch && (type === 'dblclick') && this.addDoubleTapListener) { this.addDoubleTapListener(obj, handler, id); } if ('addEventListener' in obj) { if (type === 'mousewheel') { obj.addEventListener('DOMMouseScroll', handler, false); obj.addEventListener(type, handler, false); } else if ((type === 'mouseenter') || (type === 'mouseleave')) { originalHandler = handler; newType = (type === 'mouseenter' ? 'mouseover' : 'mouseout'); handler = function (e) { if (!L.DomEvent._checkMouse(obj, e)) { return; } return originalHandler(e); }; obj.addEventListener(newType, handler, false); } else if (type === 'click' && L.Browser.android) { originalHandler = handler; handler = function (e) { return L.DomEvent._filterClick(e, originalHandler); }; obj.addEventListener(type, handler, false); } else { obj.addEventListener(type, handler, false); } } else if ('attachEvent' in obj) { obj.attachEvent('on' + type, handler); } obj[key] = handler; return this; }, removeListener: function (obj, type, fn) { // (HTMLElement, String, Function) var id = L.stamp(fn), key = '_leaflet_' + type + id, handler = obj[key]; if (!handler) { return this; } if (L.Browser.msTouch && type.indexOf('touch') === 0) { this.removeMsTouchListener(obj, type, id); } else if (L.Browser.touch && (type === 'dblclick') && this.removeDoubleTapListener) { this.removeDoubleTapListener(obj, id); } else if ('removeEventListener' in obj) { if (type === 'mousewheel') { obj.removeEventListener('DOMMouseScroll', handler, false); obj.removeEventListener(type, handler, false); } else if ((type === 'mouseenter') || (type === 'mouseleave')) { obj.removeEventListener((type === 'mouseenter' ? 'mouseover' : 'mouseout'), handler, false); } else { obj.removeEventListener(type, handler, false); } } else if ('detachEvent' in obj) { obj.detachEvent('on' + type, handler); } obj[key] = null; return this; }, stopPropagation: function (e) { if (e.stopPropagation) { e.stopPropagation(); } else { e.cancelBubble = true; } return this; }, disableClickPropagation: function (el) { var stop = L.DomEvent.stopPropagation; for (var i = L.Draggable.START.length - 1; i >= 0; i--) { L.DomEvent.addListener(el, L.Draggable.START[i], stop); } return L.DomEvent .addListener(el, 'click', L.DomEvent._fakeStop) .addListener(el, 'dblclick', stop); }, preventDefault: function (e) { if (e.preventDefault) { e.preventDefault(); } else { e.returnValue = false; } return this; }, stop: function (e) { return L.DomEvent.preventDefault(e).stopPropagation(e); }, getMousePosition: function (e, container) { var body = document.body, docEl = document.documentElement, x = e.pageX ? e.pageX : e.clientX + body.scrollLeft + docEl.scrollLeft, y = e.pageY ? e.pageY : e.clientY + body.scrollTop + docEl.scrollTop, pos = new L.Point(x, y); return (container ? pos._subtract(L.DomUtil.getViewportOffset(container)) : pos); }, getWheelDelta: function (e) { var delta = 0; if (e.wheelDelta) { delta = e.wheelDelta / 120; } if (e.detail) { delta = -e.detail / 3; } return delta; }, _fakeStop: function stop(e) { // fakes stopPropagation by setting a special event flag checked in Map mouse events handler // jshint camelcase: false e._leaflet_stop = true; }, // check if element really left/entered the event target (for mouseenter/mouseleave) _checkMouse: function (el, e) { var related = e.relatedTarget; if (!related) { return true; } try { while (related && (related !== el)) { related = related.parentNode; } } catch (err) { return false; } return (related !== el); }, _getEvent: function () { // evil magic for IE /*jshint noarg:false */ var e = window.event; if (!e) { var caller = arguments.callee.caller; while (caller) { e = caller['arguments'][0]; if (e && window.Event === e.constructor) { break; } caller = caller.caller; } } return e; }, // this is a horrible workaround for a bug in Android where a single touch triggers two click events _filterClick: function (e, handler) { var timeStamp = (e.timeStamp || e.originalEvent.timeStamp), elapsed = L.DomEvent._lastClick && (timeStamp - L.DomEvent._lastClick); // are they closer together than 1000ms yet more than 100ms? // Android typically triggers them ~300ms apart while multiple listeners // on the same event should be triggered far faster; // or check if click is simulated on the element, and if it is, reject any non-simulated events if ((elapsed && elapsed > 100 && elapsed < 1000) || (e.target._simulatedClick && !e._simulated)) { L.DomEvent.stop(e); return; } L.DomEvent._lastClick = timeStamp; return handler(e); } }; L.DomEvent.on = L.DomEvent.addListener; L.DomEvent.off = L.DomEvent.removeListener; /* * L.Draggable allows you to add dragging capabilities to any element. Supports mobile devices too. */ L.Draggable = L.Class.extend({ includes: L.Mixin.Events, statics: { START: L.Browser.touch ? ['touchstart', 'mousedown'] : ['mousedown'], END: { mousedown: 'mouseup', touchstart: 'touchend', MSPointerDown: 'touchend' }, MOVE: { mousedown: 'mousemove', touchstart: 'touchmove', MSPointerDown: 'touchmove' } }, initialize: function (element, dragStartTarget) { this._element = element; this._dragStartTarget = dragStartTarget || element; }, enable: function () { if (this._enabled) { return; } for (var i = L.Draggable.START.length - 1; i >= 0; i--) { L.DomEvent.on(this._dragStartTarget, L.Draggable.START[i], this._onDown, this); } this._enabled = true; }, disable: function () { if (!this._enabled) { return; } for (var i = L.Draggable.START.length - 1; i >= 0; i--) { L.DomEvent.off(this._dragStartTarget, L.Draggable.START[i], this._onDown, this); } this._enabled = false; this._moved = false; }, _onDown: function (e) { if (e.shiftKey || ((e.which !== 1) && (e.button !== 1) && !e.touches)) { return; } L.DomEvent .stopPropagation(e); if (L.Draggable._disabled) { return; } L.DomUtil.disableImageDrag(); L.DomUtil.disableTextSelection(); var first = e.touches ? e.touches[0] : e, el = first.target; // if touching a link, highlight it if (L.Browser.touch && el.tagName.toLowerCase() === 'a') { L.DomUtil.addClass(el, 'leaflet-active'); } this._moved = false; if (this._moving) { return; } this._startPoint = new L.Point(first.clientX, first.clientY); this._startPos = this._newPos = L.DomUtil.getPosition(this._element); L.DomEvent .on(document, L.Draggable.MOVE[e.type], this._onMove, this) .on(document, L.Draggable.END[e.type], this._onUp, this); }, _onMove: function (e) { if (e.touches && e.touches.length > 1) { return; } var first = (e.touches && e.touches.length === 1 ? e.touches[0] : e), newPoint = new L.Point(first.clientX, first.clientY), offset = newPoint.subtract(this._startPoint); if (!offset.x && !offset.y) { return; } L.DomEvent.preventDefault(e); if (!this._moved) { this.fire('dragstart'); this._moved = true; this._startPos = L.DomUtil.getPosition(this._element).subtract(offset); if (!L.Browser.touch) { L.DomUtil.addClass(document.body, 'leaflet-dragging'); } } this._newPos = this._startPos.add(offset); this._moving = true; L.Util.cancelAnimFrame(this._animRequest); this._animRequest = L.Util.requestAnimFrame(this._updatePosition, this, true, this._dragStartTarget); }, _updatePosition: function () { this.fire('predrag'); L.DomUtil.setPosition(this._element, this._newPos); this.fire('drag'); }, _onUp: function () { if (!L.Browser.touch) { L.DomUtil.removeClass(document.body, 'leaflet-dragging'); } for (var i in L.Draggable.MOVE) { L.DomEvent .off(document, L.Draggable.MOVE[i], this._onMove) .off(document, L.Draggable.END[i], this._onUp); } L.DomUtil.enableImageDrag(); L.DomUtil.enableTextSelection(); if (this._moved) { // ensure drag is not fired after dragend L.Util.cancelAnimFrame(this._animRequest); this.fire('dragend'); } this._moving = false; } }); /* L.Handler is a base class for handler classes that are used internally to inject interaction features like dragging to classes like Map and Marker. */ L.Handler = L.Class.extend({ initialize: function (map) { this._map = map; }, enable: function () { if (this._enabled) { return; } this._enabled = true; this.addHooks(); }, disable: function () { if (!this._enabled) { return; } this._enabled = false; this.removeHooks(); }, enabled: function () { return !!this._enabled; } }); /* * L.Handler.MapDrag is used to make the map draggable (with panning inertia), enabled by default. */ L.Map.mergeOptions({ dragging: true, inertia: !L.Browser.android23, inertiaDeceleration: 3400, // px/s^2 inertiaMaxSpeed: Infinity, // px/s inertiaThreshold: L.Browser.touch ? 32 : 18, // ms easeLinearity: 0.25, // TODO refactor, move to CRS worldCopyJump: false }); L.Map.Drag = L.Handler.extend({ addHooks: function () { if (!this._draggable) { var map = this._map; this._draggable = new L.Draggable(map._mapPane, map._container); this._draggable.on({ 'dragstart': this._onDragStart, 'drag': this._onDrag, 'dragend': this._onDragEnd }, this); if (map.options.worldCopyJump) { this._draggable.on('predrag', this._onPreDrag, this); map.on('viewreset', this._onViewReset, this); } } this._draggable.enable(); }, removeHooks: function () { this._draggable.disable(); }, moved: function () { return this._draggable && this._draggable._moved; }, _onDragStart: function () { var map = this._map; if (map._panAnim) { map._panAnim.stop(); } map .fire('movestart') .fire('dragstart'); if (map.options.inertia) { this._positions = []; this._times = []; } }, _onDrag: function () { if (this._map.options.inertia) { var time = this._lastTime = +new Date(), pos = this._lastPos = this._draggable._newPos; this._positions.push(pos); this._times.push(time); if (time - this._times[0] > 200) { this._positions.shift(); this._times.shift(); } } this._map .fire('move') .fire('drag'); }, _onViewReset: function () { // TODO fix hardcoded Earth values var pxCenter = this._map.getSize()._divideBy(2), pxWorldCenter = this._map.latLngToLayerPoint([0, 0]); this._initialWorldOffset = pxWorldCenter.subtract(pxCenter).x; this._worldWidth = this._map.project([0, 180]).x; }, _onPreDrag: function () { // TODO refactor to be able to adjust map pane position after zoom var worldWidth = this._worldWidth, halfWidth = Math.round(worldWidth / 2), dx = this._initialWorldOffset, x = this._draggable._newPos.x, newX1 = (x - halfWidth + dx) % worldWidth + halfWidth - dx, newX2 = (x + halfWidth + dx) % worldWidth - halfWidth - dx, newX = Math.abs(newX1 + dx) < Math.abs(newX2 + dx) ? newX1 : newX2; this._draggable._newPos.x = newX; }, _onDragEnd: function () { var map = this._map, options = map.options, delay = +new Date() - this._lastTime, noInertia = !options.inertia || delay > options.inertiaThreshold || !this._positions[0]; map.fire('dragend'); if (noInertia) { map.fire('moveend'); } else { var direction = this._lastPos.subtract(this._positions[0]), duration = (this._lastTime + delay - this._times[0]) / 1000, ease = options.easeLinearity, speedVector = direction.multiplyBy(ease / duration), speed = speedVector.distanceTo([0, 0]), limitedSpeed = Math.min(options.inertiaMaxSpeed, speed), limitedSpeedVector = speedVector.multiplyBy(limitedSpeed / speed), decelerationDuration = limitedSpeed / (options.inertiaDeceleration * ease), offset = limitedSpeedVector.multiplyBy(-decelerationDuration / 2).round(); if (!offset.x || !offset.y) { map.fire('moveend'); } else { L.Util.requestAnimFrame(function () { map.panBy(offset, { duration: decelerationDuration, easeLinearity: ease, noMoveStart: true }); }); } } } }); L.Map.addInitHook('addHandler', 'dragging', L.Map.Drag); /* * L.Handler.DoubleClickZoom is used to handle double-click zoom on the map, enabled by default. */ L.Map.mergeOptions({ doubleClickZoom: true }); L.Map.DoubleClickZoom = L.Handler.extend({ addHooks: function () { this._map.on('dblclick', this._onDoubleClick); }, removeHooks: function () { this._map.off('dblclick', this._onDoubleClick); }, _onDoubleClick: function (e) { this.setZoomAround(e.containerPoint, this._zoom + 1); } }); L.Map.addInitHook('addHandler', 'doubleClickZoom', L.Map.DoubleClickZoom); /* * L.Handler.ScrollWheelZoom is used by L.Map to enable mouse scroll wheel zoom on the map. */ L.Map.mergeOptions({ scrollWheelZoom: true }); L.Map.ScrollWheelZoom = L.Handler.extend({ addHooks: function () { L.DomEvent.on(this._map._container, 'mousewheel', this._onWheelScroll, this); L.DomEvent.on(this._map._container, 'MozMousePixelScroll', L.DomEvent.preventDefault); this._delta = 0; }, removeHooks: function () { L.DomEvent.off(this._map._container, 'mousewheel', this._onWheelScroll); L.DomEvent.off(this._map._container, 'MozMousePixelScroll', L.DomEvent.preventDefault); }, _onWheelScroll: function (e) { var delta = L.DomEvent.getWheelDelta(e); this._delta += delta; this._lastMousePos = this._map.mouseEventToContainerPoint(e); if (!this._startTime) { this._startTime = +new Date(); } var left = Math.max(40 - (+new Date() - this._startTime), 0); clearTimeout(this._timer); this._timer = setTimeout(L.bind(this._performZoom, this), left); L.DomEvent.preventDefault(e); L.DomEvent.stopPropagation(e); }, _performZoom: function () { var map = this._map, delta = this._delta, zoom = map.getZoom(); delta = delta > 0 ? Math.ceil(delta) : Math.floor(delta); delta = Math.max(Math.min(delta, 4), -4); delta = map._limitZoom(zoom + delta) - zoom; this._delta = 0; this._startTime = null; if (!delta) { return; } map.setZoomAround(this._lastMousePos, zoom + delta); } }); L.Map.addInitHook('addHandler', 'scrollWheelZoom', L.Map.ScrollWheelZoom); /* * Extends the event handling code with double tap support for mobile browsers. */ L.extend(L.DomEvent, { _touchstart: L.Browser.msTouch ? 'MSPointerDown' : 'touchstart', _touchend: L.Browser.msTouch ? 'MSPointerUp' : 'touchend', // inspired by Zepto touch code by Thomas Fuchs addDoubleTapListener: function (obj, handler, id) { var last, doubleTap = false, delay = 250, touch, pre = '_leaflet_', touchstart = this._touchstart, touchend = this._touchend, trackedTouches = []; function onTouchStart(e) { var count; if (L.Browser.msTouch) { trackedTouches.push(e.pointerId); count = trackedTouches.length; } else { count = e.touches.length; } if (count > 1) { return; } var now = Date.now(), delta = now - (last || now); touch = e.touches ? e.touches[0] : e; doubleTap = (delta > 0 && delta <= delay); last = now; } function onTouchEnd(e) { if (L.Browser.msTouch) { var idx = trackedTouches.indexOf(e.pointerId); if (idx === -1) { return; } trackedTouches.splice(idx, 1); } if (doubleTap) { if (L.Browser.msTouch) { // work around .type being readonly with MSPointer* events var newTouch = { }, prop; // jshint forin:false for (var i in touch) { prop = touch[i]; if (typeof prop === 'function') { newTouch[i] = prop.bind(touch); } else { newTouch[i] = prop; } } touch = newTouch; } touch.type = 'dblclick'; handler(touch); last = null; } } obj[pre + touchstart + id] = onTouchStart; obj[pre + touchend + id] = onTouchEnd; // on msTouch we need to listen on the document, otherwise a drag starting on the map and moving off screen // will not come through to us, so we will lose track of how many touches are ongoing var endElement = L.Browser.msTouch ? document.documentElement : obj; obj.addEventListener(touchstart, onTouchStart, false); endElement.addEventListener(touchend, onTouchEnd, false); if (L.Browser.msTouch) { endElement.addEventListener('MSPointerCancel', onTouchEnd, false); } return this; }, removeDoubleTapListener: function (obj, id) { var pre = '_leaflet_'; obj.removeEventListener(this._touchstart, obj[pre + this._touchstart + id], false); (L.Browser.msTouch ? document.documentElement : obj).removeEventListener( this._touchend, obj[pre + this._touchend + id], false); if (L.Browser.msTouch) { document.documentElement.removeEventListener('MSPointerCancel', obj[pre + this._touchend + id], false); } return this; } }); /* * Extends L.DomEvent to provide touch support for Internet Explorer and Windows-based devices. */ L.extend(L.DomEvent, { _msTouches: [], _msDocumentListener: false, // Provides a touch events wrapper for msPointer events. // Based on changes by veproza https://github.com/CloudMade/Leaflet/pull/1019 addMsTouchListener: function (obj, type, handler, id) { switch (type) { case 'touchstart': return this.addMsTouchListenerStart(obj, type, handler, id); case 'touchend': return this.addMsTouchListenerEnd(obj, type, handler, id); case 'touchmove': return this.addMsTouchListenerMove(obj, type, handler, id); default: throw 'Unknown touch event type'; } }, addMsTouchListenerStart: function (obj, type, handler, id) { var pre = '_leaflet_', touches = this._msTouches; var cb = function (e) { var alreadyInArray = false; for (var i = 0; i < touches.length; i++) { if (touches[i].pointerId === e.pointerId) { alreadyInArray = true; break; } } if (!alreadyInArray) { touches.push(e); } e.touches = touches.slice(); e.changedTouches = [e]; handler(e); }; obj[pre + 'touchstart' + id] = cb; obj.addEventListener('MSPointerDown', cb, false); // need to also listen for end events to keep the _msTouches list accurate // this needs to be on the body and never go away if (!this._msDocumentListener) { var internalCb = function (e) { for (var i = 0; i < touches.length; i++) { if (touches[i].pointerId === e.pointerId) { touches.splice(i, 1); break; } } }; //We listen on the documentElement as any drags that end by moving the touch off the screen get fired there document.documentElement.addEventListener('MSPointerUp', internalCb, false); document.documentElement.addEventListener('MSPointerCancel', internalCb, false); this._msDocumentListener = true; } return this; }, addMsTouchListenerMove: function (obj, type, handler, id) { var pre = '_leaflet_', touches = this._msTouches; function cb(e) { // don't fire touch moves when mouse isn't down if (e.pointerType === e.MSPOINTER_TYPE_MOUSE && e.buttons === 0) { return; } for (var i = 0; i < touches.length; i++) { if (touches[i].pointerId === e.pointerId) { touches[i] = e; break; } } e.touches = touches.slice(); e.changedTouches = [e]; handler(e); } obj[pre + 'touchmove' + id] = cb; obj.addEventListener('MSPointerMove', cb, false); return this; }, addMsTouchListenerEnd: function (obj, type, handler, id) { var pre = '_leaflet_', touches = this._msTouches; var cb = function (e) { for (var i = 0; i < touches.length; i++) { if (touches[i].pointerId === e.pointerId) { touches.splice(i, 1); break; } } e.touches = touches.slice(); e.changedTouches = [e]; handler(e); }; obj[pre + 'touchend' + id] = cb; obj.addEventListener('MSPointerUp', cb, false); obj.addEventListener('MSPointerCancel', cb, false); return this; }, removeMsTouchListener: function (obj, type, id) { var pre = '_leaflet_', cb = obj[pre + type + id]; switch (type) { case 'touchstart': obj.removeEventListener('MSPointerDown', cb, false); break; case 'touchmove': obj.removeEventListener('MSPointerMove', cb, false); break; case 'touchend': obj.removeEventListener('MSPointerUp', cb, false); obj.removeEventListener('MSPointerCancel', cb, false); break; } return this; } }); /* * L.Handler.TouchZoom is used by L.Map to add pinch zoom on supported mobile browsers. */ L.Map.mergeOptions({ touchZoom: L.Browser.touch && !L.Browser.android23 }); L.Map.TouchZoom = L.Handler.extend({ addHooks: function () { L.DomEvent.on(this._map._container, 'touchstart', this._onTouchStart, this); }, removeHooks: function () { L.DomEvent.off(this._map._container, 'touchstart', this._onTouchStart, this); }, _onTouchStart: function (e) { var map = this._map; if (!e.touches || e.touches.length !== 2 || map._animatingZoom || this._zooming) { return; } var p1 = map.mouseEventToLayerPoint(e.touches[0]), p2 = map.mouseEventToLayerPoint(e.touches[1]), viewCenter = map._getCenterLayerPoint(); this._startCenter = p1.add(p2)._divideBy(2); this._startDist = p1.distanceTo(p2); this._moved = false; this._zooming = true; this._centerOffset = viewCenter.subtract(this._startCenter); if (map._panAnim) { map._panAnim.stop(); } L.DomEvent .on(document, 'touchmove', this._onTouchMove, this) .on(document, 'touchend', this._onTouchEnd, this); L.DomEvent.preventDefault(e); }, _onTouchMove: function (e) { var map = this._map; if (!e.touches || e.touches.length !== 2 || !this._zooming) { return; } var p1 = map.mouseEventToLayerPoint(e.touches[0]), p2 = map.mouseEventToLayerPoint(e.touches[1]); this._scale = p1.distanceTo(p2) / this._startDist; this._delta = p1._add(p2)._divideBy(2)._subtract(this._startCenter); if (this._scale === 1) { return; } if (!this._moved) { L.DomUtil.addClass(map._mapPane, 'leaflet-touching'); map .fire('movestart') .fire('zoomstart'); this._moved = true; } L.Util.cancelAnimFrame(this._animRequest); this._animRequest = L.Util.requestAnimFrame( this._updateOnMove, this, true, this._map._container); L.DomEvent.preventDefault(e); }, _updateOnMove: function () { var map = this._map, origin = this._getScaleOrigin(), center = map.layerPointToLatLng(origin), zoom = map.getScaleZoom(this._scale); map._animateZoom(center, zoom, this._startCenter, this._scale, this._delta); }, _onTouchEnd: function () { if (!this._moved || !this._zooming) { this._zooming = false; return; } var map = this._map; this._zooming = false; L.DomUtil.removeClass(map._mapPane, 'leaflet-touching'); L.Util.cancelAnimFrame(this._animRequest); L.DomEvent .off(document, 'touchmove', this._onTouchMove) .off(document, 'touchend', this._onTouchEnd); var origin = this._getScaleOrigin(), center = map.layerPointToLatLng(origin), oldZoom = map.getZoom(), floatZoomDelta = map.getScaleZoom(this._scale) - oldZoom, roundZoomDelta = (floatZoomDelta > 0 ? Math.ceil(floatZoomDelta) : Math.floor(floatZoomDelta)), zoom = map._limitZoom(oldZoom + roundZoomDelta), scale = map.getZoomScale(zoom) / this._scale; map._animateZoom(center, zoom, origin, scale); }, _getScaleOrigin: function () { var centerOffset = this._centerOffset.subtract(this._delta).divideBy(this._scale); return this._startCenter.add(centerOffset); } }); L.Map.addInitHook('addHandler', 'touchZoom', L.Map.TouchZoom); /* * L.Map.Tap is used to enable mobile hacks like quick taps and long hold. */ L.Map.mergeOptions({ tap: true, tapTolerance: 15 }); L.Map.Tap = L.Handler.extend({ addHooks: function () { L.DomEvent.on(this._map._container, 'touchstart', this._onDown, this); }, removeHooks: function () { L.DomEvent.off(this._map._container, 'touchstart', this._onDown, this); }, _onDown: function (e) { if (!e.touches) { return; } L.DomEvent.preventDefault(e); this._fireClick = true; // don't simulate click or track longpress if more than 1 touch if (e.touches.length > 1) { this._fireClick = false; clearTimeout(this._holdTimeout); return; } var first = e.touches[0], el = first.target; this._startPos = this._newPos = new L.Point(first.clientX, first.clientY); // if touching a link, highlight it if (el.tagName.toLowerCase() === 'a') { L.DomUtil.addClass(el, 'leaflet-active'); } // simulate long hold but setting a timeout this._holdTimeout = setTimeout(L.bind(function () { if (this._isTapValid()) { this._fireClick = false; this._onUp(); this._simulateEvent('contextmenu', first); } }, this), 1000); L.DomEvent .on(document, 'touchmove', this._onMove, this) .on(document, 'touchend', this._onUp, this); }, _onUp: function (e) { clearTimeout(this._holdTimeout); L.DomEvent .off(document, 'touchmove', this._onMove, this) .off(document, 'touchend', this._onUp, this); if (this._fireClick && e && e.changedTouches) { var first = e.changedTouches[0], el = first.target; if (el.tagName.toLowerCase() === 'a') { L.DomUtil.removeClass(el, 'leaflet-active'); } // simulate click if the touch didn't move too much if (this._isTapValid()) { this._simulateEvent('click', first); } } }, _isTapValid: function () { return this._newPos.distanceTo(this._startPos) <= this._map.options.tapTolerance; }, _onMove: function (e) { var first = e.touches[0]; this._newPos = new L.Point(first.clientX, first.clientY); }, _simulateEvent: function (type, e) { var simulatedEvent = document.createEvent('MouseEvents'); simulatedEvent._simulated = true; e.target._simulatedClick = true; simulatedEvent.initMouseEvent( type, true, true, window, 1, e.screenX, e.screenY, e.clientX, e.clientY, false, false, false, false, 0, null); e.target.dispatchEvent(simulatedEvent); } }); if (L.Browser.touch && !L.Browser.msTouch) { L.Map.addInitHook('addHandler', 'tap', L.Map.Tap); } /* * L.Handler.ShiftDragZoom is used to add shift-drag zoom interaction to the map * (zoom to a selected bounding box), enabled by default. */ L.Map.mergeOptions({ boxZoom: true }); L.Map.BoxZoom = L.Handler.extend({ initialize: function (map) { this._map = map; this._container = map._container; this._pane = map._panes.overlayPane; }, addHooks: function () { L.DomEvent.on(this._container, 'mousedown', this._onMouseDown, this); }, removeHooks: function () { L.DomEvent.off(this._container, 'mousedown', this._onMouseDown); }, _onMouseDown: function (e) { if (!e.shiftKey || ((e.which !== 1) && (e.button !== 1))) { return false; } L.DomUtil.disableTextSelection(); L.DomUtil.disableImageDrag(); this._startLayerPoint = this._map.mouseEventToLayerPoint(e); this._box = L.DomUtil.create('div', 'leaflet-zoom-box', this._pane); L.DomUtil.setPosition(this._box, this._startLayerPoint); //TODO refactor: move cursor to styles this._container.style.cursor = 'crosshair'; L.DomEvent .on(document, 'mousemove', this._onMouseMove, this) .on(document, 'mouseup', this._onMouseUp, this) .on(document, 'keydown', this._onKeyDown, this); this._map.fire('boxzoomstart'); }, _onMouseMove: function (e) { var startPoint = this._startLayerPoint, box = this._box, layerPoint = this._map.mouseEventToLayerPoint(e), offset = layerPoint.subtract(startPoint), newPos = new L.Point( Math.min(layerPoint.x, startPoint.x), Math.min(layerPoint.y, startPoint.y)); L.DomUtil.setPosition(box, newPos); // TODO refactor: remove hardcoded 4 pixels box.style.width = (Math.max(0, Math.abs(offset.x) - 4)) + 'px'; box.style.height = (Math.max(0, Math.abs(offset.y) - 4)) + 'px'; }, _finish: function () { this._pane.removeChild(this._box); this._container.style.cursor = ''; L.DomUtil.enableTextSelection(); L.DomUtil.enableImageDrag(); L.DomEvent .off(document, 'mousemove', this._onMouseMove) .off(document, 'mouseup', this._onMouseUp) .off(document, 'keydown', this._onKeyDown); }, _onMouseUp: function (e) { this._finish(); var map = this._map, layerPoint = map.mouseEventToLayerPoint(e); if (this._startLayerPoint.equals(layerPoint)) { return; } var bounds = new L.LatLngBounds( map.layerPointToLatLng(this._startLayerPoint), map.layerPointToLatLng(layerPoint)); map.fitBounds(bounds); map.fire('boxzoomend', { boxZoomBounds: bounds }); }, _onKeyDown: function (e) { if (e.keyCode === 27) { this._finish(); } } }); L.Map.addInitHook('addHandler', 'boxZoom', L.Map.BoxZoom); /* * L.Map.Keyboard is handling keyboard interaction with the map, enabled by default. */ L.Map.mergeOptions({ keyboard: true, keyboardPanOffset: 80, keyboardZoomOffset: 1 }); L.Map.Keyboard = L.Handler.extend({ keyCodes: { left: [37], right: [39], down: [40], up: [38], zoomIn: [187, 107, 61], zoomOut: [189, 109, 173] }, initialize: function (map) { this._map = map; this._setPanOffset(map.options.keyboardPanOffset); this._setZoomOffset(map.options.keyboardZoomOffset); }, addHooks: function () { var container = this._map._container; // make the container focusable by tabbing if (container.tabIndex === -1) { container.tabIndex = '0'; } L.DomEvent .on(container, 'focus', this._onFocus, this) .on(container, 'blur', this._onBlur, this) .on(container, 'mousedown', this._onMouseDown, this); this._map .on('focus', this._addHooks, this) .on('blur', this._removeHooks, this); }, removeHooks: function () { this._removeHooks(); var container = this._map._container; L.DomEvent .off(container, 'focus', this._onFocus, this) .off(container, 'blur', this._onBlur, this) .off(container, 'mousedown', this._onMouseDown, this); this._map .off('focus', this._addHooks, this) .off('blur', this._removeHooks, this); }, _onMouseDown: function () { if (this._focused) { return; } var body = document.body, docEl = document.documentElement, top = body.scrollTop || docEl.scrollTop, left = body.scrollTop || docEl.scrollLeft; this._map._container.focus(); window.scrollTo(left, top); }, _onFocus: function () { this._focused = true; this._map.fire('focus'); }, _onBlur: function () { this._focused = false; this._map.fire('blur'); }, _setPanOffset: function (pan) { var keys = this._panKeys = {}, codes = this.keyCodes, i, len; for (i = 0, len = codes.left.length; i < len; i++) { keys[codes.left[i]] = [-1 * pan, 0]; } for (i = 0, len = codes.right.length; i < len; i++) { keys[codes.right[i]] = [pan, 0]; } for (i = 0, len = codes.down.length; i < len; i++) { keys[codes.down[i]] = [0, pan]; } for (i = 0, len = codes.up.length; i < len; i++) { keys[codes.up[i]] = [0, -1 * pan]; } }, _setZoomOffset: function (zoom) { var keys = this._zoomKeys = {}, codes = this.keyCodes, i, len; for (i = 0, len = codes.zoomIn.length; i < len; i++) { keys[codes.zoomIn[i]] = zoom; } for (i = 0, len = codes.zoomOut.length; i < len; i++) { keys[codes.zoomOut[i]] = -zoom; } }, _addHooks: function () { L.DomEvent.on(document, 'keydown', this._onKeyDown, this); }, _removeHooks: function () { L.DomEvent.off(document, 'keydown', this._onKeyDown, this); }, _onKeyDown: function (e) { var key = e.keyCode, map = this._map; if (key in this._panKeys) { if (map._panAnim && map._panAnim._inProgress) { return; } map.panBy(this._panKeys[key]); if (map.options.maxBounds) { map.panInsideBounds(map.options.maxBounds); } } else if (key in this._zoomKeys) { map.setZoom(map.getZoom() + this._zoomKeys[key]); } else { return; } L.DomEvent.stop(e); } }); L.Map.addInitHook('addHandler', 'keyboard', L.Map.Keyboard); /* * L.Handler.MarkerDrag is used internally by L.Marker to make the markers draggable. */ L.Handler.MarkerDrag = L.Handler.extend({ initialize: function (marker) { this._marker = marker; }, addHooks: function () { var icon = this._marker._icon; if (!this._draggable) { this._draggable = new L.Draggable(icon, icon); } this._draggable .on('dragstart', this._onDragStart, this) .on('drag', this._onDrag, this) .on('dragend', this._onDragEnd, this); this._draggable.enable(); }, removeHooks: function () { this._draggable .off('dragstart', this._onDragStart, this) .off('drag', this._onDrag, this) .off('dragend', this._onDragEnd, this); this._draggable.disable(); }, moved: function () { return this._draggable && this._draggable._moved; }, _onDragStart: function () { this._marker .closePopup() .fire('movestart') .fire('dragstart'); }, _onDrag: function () { var marker = this._marker, shadow = marker._shadow, iconPos = L.DomUtil.getPosition(marker._icon), latlng = marker._map.layerPointToLatLng(iconPos); // update shadow position if (shadow) { L.DomUtil.setPosition(shadow, iconPos); } marker._latlng = latlng; marker .fire('move', {latlng: latlng}) .fire('drag'); }, _onDragEnd: function () { this._marker .fire('moveend') .fire('dragend'); } }); /* * L.Control is a base class for implementing map controls. Handles positioning. * All other controls extend from this class. */ L.Control = L.Class.extend({ options: { position: 'topright' }, initialize: function (options) { L.setOptions(this, options); }, getPosition: function () { return this.options.position; }, setPosition: function (position) { var map = this._map; if (map) { map.removeControl(this); } this.options.position = position; if (map) { map.addControl(this); } return this; }, getContainer: function () { return this._container; }, addTo: function (map) { this._map = map; var container = this._container = this.onAdd(map), pos = this.getPosition(), corner = map._controlCorners[pos]; L.DomUtil.addClass(container, 'leaflet-control'); if (pos.indexOf('bottom') !== -1) { corner.insertBefore(container, corner.firstChild); } else { corner.appendChild(container); } return this; }, removeFrom: function (map) { var pos = this.getPosition(), corner = map._controlCorners[pos]; corner.removeChild(this._container); this._map = null; if (this.onRemove) { this.onRemove(map); } return this; } }); L.control = function (options) { return new L.Control(options); }; // adds control-related methods to L.Map L.Map.include({ addControl: function (control) { control.addTo(this); return this; }, removeControl: function (control) { control.removeFrom(this); return this; }, _initControlPos: function () { var corners = this._controlCorners = {}, l = 'leaflet-', container = this._controlContainer = L.DomUtil.create('div', l + 'control-container', this._container); function createCorner(vSide, hSide) { var className = l + vSide + ' ' + l + hSide; corners[vSide + hSide] = L.DomUtil.create('div', className, container); } createCorner('top', 'left'); createCorner('top', 'right'); createCorner('bottom', 'left'); createCorner('bottom', 'right'); }, _clearControlPos: function () { this._container.removeChild(this._controlContainer); } }); /* * L.Control.Zoom is used for the default zoom buttons on the map. */ L.Control.Zoom = L.Control.extend({ options: { position: 'topleft' }, onAdd: function (map) { var zoomName = 'leaflet-control-zoom', container = L.DomUtil.create('div', zoomName + ' leaflet-bar'); this._map = map; this._zoomInButton = this._createButton( '+', 'Zoom in', zoomName + '-in', container, this._zoomIn, this); this._zoomOutButton = this._createButton( '-', 'Zoom out', zoomName + '-out', container, this._zoomOut, this); map.on('zoomend zoomlevelschange', this._updateDisabled, this); return container; }, onRemove: function (map) { map.off('zoomend zoomlevelschange', this._updateDisabled, this); }, _zoomIn: function (e) { this._map.zoomIn(e.shiftKey ? 3 : 1); }, _zoomOut: function (e) { this._map.zoomOut(e.shiftKey ? 3 : 1); }, _createButton: function (html, title, className, container, fn, context) { var link = L.DomUtil.create('a', className, container); link.innerHTML = html; link.href = '#'; link.title = title; var stop = L.DomEvent.stopPropagation; L.DomEvent .on(link, 'click', stop) .on(link, 'mousedown', stop) .on(link, 'dblclick', stop) .on(link, 'click', L.DomEvent.preventDefault) .on(link, 'click', fn, context); return link; }, _updateDisabled: function () { var map = this._map, className = 'leaflet-disabled'; L.DomUtil.removeClass(this._zoomInButton, className); L.DomUtil.removeClass(this._zoomOutButton, className); if (map._zoom === map.getMinZoom()) { L.DomUtil.addClass(this._zoomOutButton, className); } if (map._zoom === map.getMaxZoom()) { L.DomUtil.addClass(this._zoomInButton, className); } } }); L.Map.mergeOptions({ zoomControl: true }); L.Map.addInitHook(function () { if (this.options.zoomControl) { this.zoomControl = new L.Control.Zoom(); this.addControl(this.zoomControl); } }); L.control.zoom = function (options) { return new L.Control.Zoom(options); }; /* * L.Control.Attribution is used for displaying attribution on the map (added by default). */ L.Control.Attribution = L.Control.extend({ options: { position: 'bottomright', prefix: '<a href="http://leafletjs.com" title="A JS library for interactive maps">Leaflet</a>' }, initialize: function (options) { L.setOptions(this, options); this._attributions = {}; }, onAdd: function (map) { this._container = L.DomUtil.create('div', 'leaflet-control-attribution'); L.DomEvent.disableClickPropagation(this._container); map .on('layeradd', this._onLayerAdd, this) .on('layerremove', this._onLayerRemove, this); this._update(); return this._container; }, onRemove: function (map) { map .off('layeradd', this._onLayerAdd) .off('layerremove', this._onLayerRemove); }, setPrefix: function (prefix) { this.options.prefix = prefix; this._update(); return this; }, addAttribution: function (text) { if (!text) { return; } if (!this._attributions[text]) { this._attributions[text] = 0; } this._attributions[text]++; this._update(); return this; }, removeAttribution: function (text) { if (!text) { return; } if (this._attributions[text]) { this._attributions[text]--; this._update(); } return this; }, _update: function () { if (!this._map) { return; } var attribs = []; for (var i in this._attributions) { if (this._attributions[i]) { attribs.push(i); } } var prefixAndAttribs = []; if (this.options.prefix) { prefixAndAttribs.push(this.options.prefix); } if (attribs.length) { prefixAndAttribs.push(attribs.join(', ')); } this._container.innerHTML = prefixAndAttribs.join(' | '); }, _onLayerAdd: function (e) { if (e.layer.getAttribution) { this.addAttribution(e.layer.getAttribution()); } }, _onLayerRemove: function (e) { if (e.layer.getAttribution) { this.removeAttribution(e.layer.getAttribution()); } } }); L.Map.mergeOptions({ attributionControl: true }); L.Map.addInitHook(function () { if (this.options.attributionControl) { this.attributionControl = (new L.Control.Attribution()).addTo(this); } }); L.control.attribution = function (options) { return new L.Control.Attribution(options); }; /* * L.Control.Scale is used for displaying metric/imperial scale on the map. */ L.Control.Scale = L.Control.extend({ options: { position: 'bottomleft', maxWidth: 100, metric: true, imperial: true, updateWhenIdle: false }, onAdd: function (map) { this._map = map; var className = 'leaflet-control-scale', container = L.DomUtil.create('div', className), options = this.options; this._addScales(options, className, container); map.on(options.updateWhenIdle ? 'moveend' : 'move', this._update, this); map.whenReady(this._update, this); return container; }, onRemove: function (map) { map.off(this.options.updateWhenIdle ? 'moveend' : 'move', this._update, this); }, _addScales: function (options, className, container) { if (options.metric) { this._mScale = L.DomUtil.create('div', className + '-line', container); } if (options.imperial) { this._iScale = L.DomUtil.create('div', className + '-line', container); } }, _update: function () { var bounds = this._map.getBounds(), centerLat = bounds.getCenter().lat, halfWorldMeters = 6378137 * Math.PI * Math.cos(centerLat * Math.PI / 180), dist = halfWorldMeters * (bounds.getNorthEast().lng - bounds.getSouthWest().lng) / 180, size = this._map.getSize(), options = this.options, maxMeters = 0; if (size.x > 0) { maxMeters = dist * (options.maxWidth / size.x); } this._updateScales(options, maxMeters); }, _updateScales: function (options, maxMeters) { if (options.metric && maxMeters) { this._updateMetric(maxMeters); } if (options.imperial && maxMeters) { this._updateImperial(maxMeters); } }, _updateMetric: function (maxMeters) { var meters = this._getRoundNum(maxMeters); this._mScale.style.width = this._getScaleWidth(meters / maxMeters) + 'px'; this._mScale.innerHTML = meters < 1000 ? meters + ' m' : (meters / 1000) + ' km'; }, _updateImperial: function (maxMeters) { var maxFeet = maxMeters * 3.2808399, scale = this._iScale, maxMiles, miles, feet; if (maxFeet > 5280) { maxMiles = maxFeet / 5280; miles = this._getRoundNum(maxMiles); scale.style.width = this._getScaleWidth(miles / maxMiles) + 'px'; scale.innerHTML = miles + ' mi'; } else { feet = this._getRoundNum(maxFeet); scale.style.width = this._getScaleWidth(feet / maxFeet) + 'px'; scale.innerHTML = feet + ' ft'; } }, _getScaleWidth: function (ratio) { return Math.round(this.options.maxWidth * ratio) - 10; }, _getRoundNum: function (num) { var pow10 = Math.pow(10, (Math.floor(num) + '').length - 1), d = num / pow10; d = d >= 10 ? 10 : d >= 5 ? 5 : d >= 3 ? 3 : d >= 2 ? 2 : 1; return pow10 * d; } }); L.control.scale = function (options) { return new L.Control.Scale(options); }; /* * L.Control.Layers is a control to allow users to switch between different layers on the map. */ L.Control.Layers = L.Control.extend({ options: { collapsed: true, position: 'topright', autoZIndex: true }, initialize: function (baseLayers, overlays, options) { L.setOptions(this, options); this._layers = {}; this._lastZIndex = 0; this._handlingClick = false; for (var i in baseLayers) { this._addLayer(baseLayers[i], i); } for (i in overlays) { this._addLayer(overlays[i], i, true); } }, onAdd: function (map) { this._initLayout(); this._update(); map .on('layeradd', this._onLayerChange, this) .on('layerremove', this._onLayerChange, this); return this._container; }, onRemove: function (map) { map .off('layeradd', this._onLayerChange) .off('layerremove', this._onLayerChange); }, addBaseLayer: function (layer, name) { this._addLayer(layer, name); this._update(); return this; }, addOverlay: function (layer, name) { this._addLayer(layer, name, true); this._update(); return this; }, removeLayer: function (layer) { var id = L.stamp(layer); delete this._layers[id]; this._update(); return this; }, _initLayout: function () { var className = 'leaflet-control-layers', container = this._container = L.DomUtil.create('div', className); //Makes this work on IE10 Touch devices by stopping it from firing a mouseout event when the touch is released container.setAttribute('aria-haspopup', true); if (!L.Browser.touch) { L.DomEvent.disableClickPropagation(container); L.DomEvent.on(container, 'mousewheel', L.DomEvent.stopPropagation); } else { L.DomEvent.on(container, 'click', L.DomEvent.stopPropagation); } var form = this._form = L.DomUtil.create('form', className + '-list'); if (this.options.collapsed) { if (!L.Browser.android) { L.DomEvent .on(container, 'mouseover', this._expand, this) .on(container, 'mouseout', this._collapse, this); } var link = this._layersLink = L.DomUtil.create('a', className + '-toggle', container); link.href = '#'; link.title = 'Layers'; if (L.Browser.touch) { L.DomEvent .on(link, 'click', L.DomEvent.stop) .on(link, 'click', this._expand, this); } else { L.DomEvent.on(link, 'focus', this._expand, this); } this._map.on('click', this._collapse, this); // TODO keyboard accessibility } else { this._expand(); } this._baseLayersList = L.DomUtil.create('div', className + '-base', form); this._separator = L.DomUtil.create('div', className + '-separator', form); this._overlaysList = L.DomUtil.create('div', className + '-overlays', form); container.appendChild(form); }, _addLayer: function (layer, name, overlay) { var id = L.stamp(layer); this._layers[id] = { layer: layer, name: name, overlay: overlay }; if (this.options.autoZIndex && layer.setZIndex) { this._lastZIndex++; layer.setZIndex(this._lastZIndex); } }, _update: function () { if (!this._container) { return; } this._baseLayersList.innerHTML = ''; this._overlaysList.innerHTML = ''; var baseLayersPresent = false, overlaysPresent = false, i, obj; for (i in this._layers) { obj = this._layers[i]; this._addItem(obj); overlaysPresent = overlaysPresent || obj.overlay; baseLayersPresent = baseLayersPresent || !obj.overlay; } this._separator.style.display = overlaysPresent && baseLayersPresent ? '' : 'none'; }, _onLayerChange: function (e) { var obj = this._layers[L.stamp(e.layer)]; if (!obj) { return; } if (!this._handlingClick) { this._update(); } var type = obj.overlay ? (e.type === 'layeradd' ? 'overlayadd' : 'overlayremove') : (e.type === 'layeradd' ? 'baselayerchange' : null); if (type) { this._map.fire(type, obj); } }, // IE7 bugs out if you create a radio dynamically, so you have to do it this hacky way (see http://bit.ly/PqYLBe) _createRadioElement: function (name, checked) { var radioHtml = '<input type="radio" class="leaflet-control-layers-selector" name="' + name + '"'; if (checked) { radioHtml += ' checked="checked"'; } radioHtml += '/>'; var radioFragment = document.createElement('div'); radioFragment.innerHTML = radioHtml; return radioFragment.firstChild; }, _addItem: function (obj) { var label = document.createElement('label'), input, checked = this._map.hasLayer(obj.layer); if (obj.overlay) { input = document.createElement('input'); input.type = 'checkbox'; input.className = 'leaflet-control-layers-selector'; input.defaultChecked = checked; } else { input = this._createRadioElement('leaflet-base-layers', checked); } input.layerId = L.stamp(obj.layer); L.DomEvent.on(input, 'click', this._onInputClick, this); var name = document.createElement('span'); name.innerHTML = ' ' + obj.name; label.appendChild(input); label.appendChild(name); var container = obj.overlay ? this._overlaysList : this._baseLayersList; container.appendChild(label); return label; }, _onInputClick: function () { var i, input, obj, inputs = this._form.getElementsByTagName('input'), inputsLen = inputs.length; this._handlingClick = true; for (i = 0; i < inputsLen; i++) { input = inputs[i]; obj = this._layers[input.layerId]; if (input.checked && !this._map.hasLayer(obj.layer)) { this._map.addLayer(obj.layer); } else if (!input.checked && this._map.hasLayer(obj.layer)) { this._map.removeLayer(obj.layer); } } this._handlingClick = false; }, _expand: function () { L.DomUtil.addClass(this._container, 'leaflet-control-layers-expanded'); }, _collapse: function () { this._container.className = this._container.className.replace(' leaflet-control-layers-expanded', ''); } }); L.control.layers = function (baseLayers, overlays, options) { return new L.Control.Layers(baseLayers, overlays, options); }; /* * L.PosAnimation is used by Leaflet internally for pan animations. */ L.PosAnimation = L.Class.extend({ includes: L.Mixin.Events, run: function (el, newPos, duration, easeLinearity) { // (HTMLElement, Point[, Number, Number]) this.stop(); this._el = el; this._inProgress = true; this._newPos = newPos; this.fire('start'); el.style[L.DomUtil.TRANSITION] = 'all ' + (duration || 0.25) + 's cubic-bezier(0,0,' + (easeLinearity || 0.5) + ',1)'; L.DomEvent.on(el, L.DomUtil.TRANSITION_END, this._onTransitionEnd, this); L.DomUtil.setPosition(el, newPos); // toggle reflow, Chrome flickers for some reason if you don't do this L.Util.falseFn(el.offsetWidth); // there's no native way to track value updates of transitioned properties, so we imitate this this._stepTimer = setInterval(L.bind(this._onStep, this), 50); }, stop: function () { if (!this._inProgress) { return; } // if we just removed the transition property, the element would jump to its final position, // so we need to make it stay at the current position L.DomUtil.setPosition(this._el, this._getPos()); this._onTransitionEnd(); L.Util.falseFn(this._el.offsetWidth); // force reflow in case we are about to start a new animation }, _onStep: function () { // jshint camelcase: false // make L.DomUtil.getPosition return intermediate position value during animation this._el._leaflet_pos = this._getPos(); this.fire('step'); }, // you can't easily get intermediate values of properties animated with CSS3 Transitions, // we need to parse computed style (in case of transform it returns matrix string) _transformRe: /([-+]?(?:\d*\.)?\d+)\D*, ([-+]?(?:\d*\.)?\d+)\D*\)/, _getPos: function () { var left, top, matches, el = this._el, style = window.getComputedStyle(el); if (L.Browser.any3d) { matches = style[L.DomUtil.TRANSFORM].match(this._transformRe); left = matches ? parseFloat(matches[1]) : 0; top = matches ? parseFloat(matches[2]) : 0; } else { left = parseFloat(style.left); top = parseFloat(style.top); } return new L.Point(left, top, true); }, _onTransitionEnd: function () { L.DomEvent.off(this._el, L.DomUtil.TRANSITION_END, this._onTransitionEnd, this); if (!this._inProgress) { return; } this._inProgress = false; this._el.style[L.DomUtil.TRANSITION] = ''; // jshint camelcase: false // make sure L.DomUtil.getPosition returns the final position value after animation this._el._leaflet_pos = this._newPos; clearInterval(this._stepTimer); this.fire('step').fire('end'); } }); /* * Extends L.Map to handle panning animations. */ L.Map.include({ setView: function (center, zoom, options) { zoom = this._limitZoom(zoom); center = L.latLng(center); options = options || {}; if (this._panAnim) { this._panAnim.stop(); } if (this._loaded && !options.reset && options !== true) { if (options.animate !== undefined) { options.zoom = L.extend({animate: options.animate}, options.zoom); options.pan = L.extend({animate: options.animate}, options.pan); } // try animating pan or zoom var animated = (this._zoom !== zoom) ? this._tryAnimatedZoom && this._tryAnimatedZoom(center, zoom, options.zoom) : this._tryAnimatedPan(center, options.pan); if (animated) { // prevent resize handler call, the view will refresh after animation anyway clearTimeout(this._sizeTimer); return this; } } // animation didn't start, just reset the map view this._resetView(center, zoom); return this; }, panBy: function (offset, options) { offset = L.point(offset).round(); options = options || {}; if (!offset.x && !offset.y) { return this; } if (!this._panAnim) { this._panAnim = new L.PosAnimation(); this._panAnim.on({ 'step': this._onPanTransitionStep, 'end': this._onPanTransitionEnd }, this); } // don't fire movestart if animating inertia if (!options.noMoveStart) { this.fire('movestart'); } // animate pan unless animate: false specified if (options.animate !== false) { L.DomUtil.addClass(this._mapPane, 'leaflet-pan-anim'); var newPos = this._getMapPanePos().subtract(offset); this._panAnim.run(this._mapPane, newPos, options.duration || 0.25, options.easeLinearity); } else { this._rawPanBy(offset); this.fire('move').fire('moveend'); } return this; }, _onPanTransitionStep: function () { this.fire('move'); }, _onPanTransitionEnd: function () { L.DomUtil.removeClass(this._mapPane, 'leaflet-pan-anim'); this.fire('moveend'); }, _tryAnimatedPan: function (center, options) { // difference between the new and current centers in pixels var offset = this._getCenterOffset(center)._floor(); // don't animate too far unless animate: true specified in options if ((options && options.animate) !== true && !this.getSize().contains(offset)) { return false; } this.panBy(offset, options); return true; } }); /* * L.PosAnimation fallback implementation that powers Leaflet pan animations * in browsers that don't support CSS3 Transitions. */ L.PosAnimation = L.DomUtil.TRANSITION ? L.PosAnimation : L.PosAnimation.extend({ run: function (el, newPos, duration, easeLinearity) { // (HTMLElement, Point[, Number, Number]) this.stop(); this._el = el; this._inProgress = true; this._duration = duration || 0.25; this._easeOutPower = 1 / Math.max(easeLinearity || 0.5, 0.2); this._startPos = L.DomUtil.getPosition(el); this._offset = newPos.subtract(this._startPos); this._startTime = +new Date(); this.fire('start'); this._animate(); }, stop: function () { if (!this._inProgress) { return; } this._step(); this._complete(); }, _animate: function () { // animation loop this._animId = L.Util.requestAnimFrame(this._animate, this); this._step(); }, _step: function () { var elapsed = (+new Date()) - this._startTime, duration = this._duration * 1000; if (elapsed < duration) { this._runFrame(this._easeOut(elapsed / duration)); } else { this._runFrame(1); this._complete(); } }, _runFrame: function (progress) { var pos = this._startPos.add(this._offset.multiplyBy(progress)); L.DomUtil.setPosition(this._el, pos); this.fire('step'); }, _complete: function () { L.Util.cancelAnimFrame(this._animId); this._inProgress = false; this.fire('end'); }, _easeOut: function (t) { return 1 - Math.pow(1 - t, this._easeOutPower); } }); /* * Extends L.Map to handle zoom animations. */ L.Map.mergeOptions({ zoomAnimation: true, zoomAnimationThreshold: 4 }); if (L.DomUtil.TRANSITION) { L.Map.addInitHook(function () { // don't animate on browsers without hardware-accelerated transitions or old Android/Opera this._zoomAnimated = this.options.zoomAnimation && L.DomUtil.TRANSITION && L.Browser.any3d && !L.Browser.android23 && !L.Browser.mobileOpera; // zoom transitions run with the same duration for all layers, so if one of transitionend events // happens after starting zoom animation (propagating to the map pane), we know that it ended globally if (this._zoomAnimated) { L.DomEvent.on(this._mapPane, L.DomUtil.TRANSITION_END, this._catchTransitionEnd, this); } }); } L.Map.include(!L.DomUtil.TRANSITION ? {} : { _catchTransitionEnd: function () { if (this._animatingZoom) { this._onZoomTransitionEnd(); } }, _tryAnimatedZoom: function (center, zoom, options) { if (this._animatingZoom) { return true; } options = options || {}; // don't animate if disabled, not supported or zoom difference is too large if (!this._zoomAnimated || options.animate === false || Math.abs(zoom - this._zoom) > this.options.zoomAnimationThreshold) { return false; } // offset is the pixel coords of the zoom origin relative to the current center var scale = this.getZoomScale(zoom), offset = this._getCenterOffset(center)._divideBy(1 - 1 / scale), origin = this._getCenterLayerPoint()._add(offset); // don't animate if the zoom origin isn't within one screen from the current center, unless forced if (options.animate !== true && !this.getSize().contains(offset)) { return false; } this .fire('movestart') .fire('zoomstart'); this._animateZoom(center, zoom, origin, scale, null, true); return true; }, _animateZoom: function (center, zoom, origin, scale, delta, backwards) { this._animatingZoom = true; // put transform transition on all layers with leaflet-zoom-animated class L.DomUtil.addClass(this._mapPane, 'leaflet-zoom-anim'); // remember what center/zoom to set after animation this._animateToCenter = center; this._animateToZoom = zoom; // disable any dragging during animation if (L.Draggable) { L.Draggable._disabled = true; } this.fire('zoomanim', { center: center, zoom: zoom, origin: origin, scale: scale, delta: delta, backwards: backwards }); }, _onZoomTransitionEnd: function () { this._animatingZoom = false; L.DomUtil.removeClass(this._mapPane, 'leaflet-zoom-anim'); this._resetView(this._animateToCenter, this._animateToZoom, true, true); if (L.Draggable) { L.Draggable._disabled = false; } } }); /* Zoom animation logic for L.TileLayer. */ L.TileLayer.include({ _animateZoom: function (e) { if (!this._animating) { this._animating = true; this._prepareBgBuffer(); } var bg = this._bgBuffer, transform = L.DomUtil.TRANSFORM, initialTransform = e.delta ? L.DomUtil.getTranslateString(e.delta) : bg.style[transform], scaleStr = L.DomUtil.getScaleString(e.scale, e.origin); bg.style[transform] = e.backwards ? scaleStr + ' ' + initialTransform : initialTransform + ' ' + scaleStr; }, _endZoomAnim: function () { var front = this._tileContainer, bg = this._bgBuffer; front.style.visibility = ''; front.parentNode.appendChild(front); // Bring to fore // force reflow L.Util.falseFn(bg.offsetWidth); this._animating = false; }, _clearBgBuffer: function () { var map = this._map; if (map && !map._animatingZoom && !map.touchZoom._zooming) { this._bgBuffer.innerHTML = ''; this._bgBuffer.style[L.DomUtil.TRANSFORM] = ''; } }, _prepareBgBuffer: function () { var front = this._tileContainer, bg = this._bgBuffer; // if foreground layer doesn't have many tiles but bg layer does, // keep the existing bg layer and just zoom it some more var bgLoaded = this._getLoadedTilesPercentage(bg), frontLoaded = this._getLoadedTilesPercentage(front); if (bg && bgLoaded > 0.5 && frontLoaded < 0.5) { front.style.visibility = 'hidden'; this._stopLoadingImages(front); return; } // prepare the buffer to become the front tile pane bg.style.visibility = 'hidden'; bg.style[L.DomUtil.TRANSFORM] = ''; // switch out the current layer to be the new bg layer (and vice-versa) this._tileContainer = bg; bg = this._bgBuffer = front; this._stopLoadingImages(bg); //prevent bg buffer from clearing right after zoom clearTimeout(this._clearBgBufferTimer); }, _getLoadedTilesPercentage: function (container) { var tiles = container.getElementsByTagName('img'), i, len, count = 0; for (i = 0, len = tiles.length; i < len; i++) { if (tiles[i].complete) { count++; } } return count / len; }, // stops loading all tiles in the background layer _stopLoadingImages: function (container) { var tiles = Array.prototype.slice.call(container.getElementsByTagName('img')), i, len, tile; for (i = 0, len = tiles.length; i < len; i++) { tile = tiles[i]; if (!tile.complete) { tile.onload = L.Util.falseFn; tile.onerror = L.Util.falseFn; tile.src = L.Util.emptyImageUrl; tile.parentNode.removeChild(tile); } } } }); /* * Provides L.Map with convenient shortcuts for using browser geolocation features. */ L.Map.include({ _defaultLocateOptions: { watch: false, setView: false, maxZoom: Infinity, timeout: 10000, maximumAge: 0, enableHighAccuracy: false }, locate: function (/*Object*/ options) { options = this._locateOptions = L.extend(this._defaultLocateOptions, options); if (!navigator.geolocation) { this._handleGeolocationError({ code: 0, message: 'Geolocation not supported.' }); return this; } var onResponse = L.bind(this._handleGeolocationResponse, this), onError = L.bind(this._handleGeolocationError, this); if (options.watch) { this._locationWatchId = navigator.geolocation.watchPosition(onResponse, onError, options); } else { navigator.geolocation.getCurrentPosition(onResponse, onError, options); } return this; }, stopLocate: function () { if (navigator.geolocation) { navigator.geolocation.clearWatch(this._locationWatchId); } if (this._locateOptions) { this._locateOptions.setView = false; } return this; }, _handleGeolocationError: function (error) { var c = error.code, message = error.message || (c === 1 ? 'permission denied' : (c === 2 ? 'position unavailable' : 'timeout')); if (this._locateOptions.setView && !this._loaded) { this.fitWorld(); } this.fire('locationerror', { code: c, message: 'Geolocation error: ' + message + '.' }); }, _handleGeolocationResponse: function (pos) { var lat = pos.coords.latitude, lng = pos.coords.longitude, latlng = new L.LatLng(lat, lng), latAccuracy = 180 * pos.coords.accuracy / 40075017, lngAccuracy = latAccuracy / Math.cos(L.LatLng.DEG_TO_RAD * lat), bounds = L.latLngBounds( [lat - latAccuracy, lng - lngAccuracy], [lat + latAccuracy, lng + lngAccuracy]), options = this._locateOptions; if (options.setView) { var zoom = Math.min(this.getBoundsZoom(bounds), options.maxZoom); this.setView(latlng, zoom); } var data = { latlng: latlng, bounds: bounds, }; for (var i in pos.coords) { if (typeof pos.coords[i] === 'number') { data[i] = pos.coords[i]; } } this.fire('locationfound', data); } }); }(window, document)); },{}],37:[function(require,module,exports){ /*! * mustache.js - Logic-less {{mustache}} templates with JavaScript * http://github.com/janl/mustache.js */ /*global define: false*/ (function (root, factory) { if (typeof exports === "object" && exports) { factory(exports); // CommonJS } else { var mustache = {}; factory(mustache); if (typeof define === "function" && define.amd) { define(mustache); // AMD } else { root.Mustache = mustache; // <script> } } }(this, function (mustache) { var whiteRe = /\s*/; var spaceRe = /\s+/; var nonSpaceRe = /\S/; var eqRe = /\s*=/; var curlyRe = /\s*\}/; var tagRe = /#|\^|\/|>|\{|&|=|!/; // Workaround for https://issues.apache.org/jira/browse/COUCHDB-577 // See https://github.com/janl/mustache.js/issues/189 var RegExp_test = RegExp.prototype.test; function testRegExp(re, string) { return RegExp_test.call(re, string); } function isWhitespace(string) { return !testRegExp(nonSpaceRe, string); } var Object_toString = Object.prototype.toString; var isArray = Array.isArray || function (object) { return Object_toString.call(object) === '[object Array]'; }; function isFunction(object) { return typeof object === 'function'; } function escapeRegExp(string) { return string.replace(/[\-\[\]{}()*+?.,\\\^$|#\s]/g, "\\$&"); } var entityMap = { "&": "&amp;", "<": "&lt;", ">": "&gt;", '"': '&quot;', "'": '&#39;', "/": '&#x2F;' }; function escapeHtml(string) { return String(string).replace(/[&<>"'\/]/g, function (s) { return entityMap[s]; }); } function Scanner(string) { this.string = string; this.tail = string; this.pos = 0; } /** * Returns `true` if the tail is empty (end of string). */ Scanner.prototype.eos = function () { return this.tail === ""; }; /** * Tries to match the given regular expression at the current position. * Returns the matched text if it can match, the empty string otherwise. */ Scanner.prototype.scan = function (re) { var match = this.tail.match(re); if (match && match.index === 0) { var string = match[0]; this.tail = this.tail.substring(string.length); this.pos += string.length; return string; } return ""; }; /** * Skips all text until the given regular expression can be matched. Returns * the skipped string, which is the entire tail if no match can be made. */ Scanner.prototype.scanUntil = function (re) { var index = this.tail.search(re), match; switch (index) { case -1: match = this.tail; this.tail = ""; break; case 0: match = ""; break; default: match = this.tail.substring(0, index); this.tail = this.tail.substring(index); } this.pos += match.length; return match; }; function Context(view, parent) { this.view = view == null ? {} : view; this.parent = parent; this._cache = { '.': this.view }; } Context.make = function (view) { return (view instanceof Context) ? view : new Context(view); }; Context.prototype.push = function (view) { return new Context(view, this); }; Context.prototype.lookup = function (name) { var value; if (name in this._cache) { value = this._cache[name]; } else { var context = this; while (context) { if (name.indexOf('.') > 0) { value = context.view; var names = name.split('.'), i = 0; while (value != null && i < names.length) { value = value[names[i++]]; } } else { value = context.view[name]; } if (value != null) break; context = context.parent; } this._cache[name] = value; } if (isFunction(value)) { value = value.call(this.view); } return value; }; function Writer() { this.clearCache(); } Writer.prototype.clearCache = function () { this._cache = {}; this._partialCache = {}; }; Writer.prototype.compile = function (template, tags) { var fn = this._cache[template]; if (!fn) { var tokens = mustache.parse(template, tags); fn = this._cache[template] = this.compileTokens(tokens, template); } return fn; }; Writer.prototype.compilePartial = function (name, template, tags) { var fn = this.compile(template, tags); this._partialCache[name] = fn; return fn; }; Writer.prototype.getPartial = function (name) { if (!(name in this._partialCache) && this._loadPartial) { this.compilePartial(name, this._loadPartial(name)); } return this._partialCache[name]; }; Writer.prototype.compileTokens = function (tokens, template) { var self = this; return function (view, partials) { if (partials) { if (isFunction(partials)) { self._loadPartial = partials; } else { for (var name in partials) { self.compilePartial(name, partials[name]); } } } return renderTokens(tokens, self, Context.make(view), template); }; }; Writer.prototype.render = function (template, view, partials) { return this.compile(template)(view, partials); }; /** * Low-level function that renders the given `tokens` using the given `writer` * and `context`. The `template` string is only needed for templates that use * higher-order sections to extract the portion of the original template that * was contained in that section. */ function renderTokens(tokens, writer, context, template) { var buffer = ''; // This function is used to render an artbitrary template // in the current context by higher-order functions. function subRender(template) { return writer.render(template, context); } var token, tokenValue, value; for (var i = 0, len = tokens.length; i < len; ++i) { token = tokens[i]; tokenValue = token[1]; switch (token[0]) { case '#': value = context.lookup(tokenValue); if (typeof value === 'object' || typeof value === 'string') { if (isArray(value)) { for (var j = 0, jlen = value.length; j < jlen; ++j) { buffer += renderTokens(token[4], writer, context.push(value[j]), template); } } else if (value) { buffer += renderTokens(token[4], writer, context.push(value), template); } } else if (isFunction(value)) { var text = template == null ? null : template.slice(token[3], token[5]); value = value.call(context.view, text, subRender); if (value != null) buffer += value; } else if (value) { buffer += renderTokens(token[4], writer, context, template); } break; case '^': value = context.lookup(tokenValue); // Use JavaScript's definition of falsy. Include empty arrays. // See https://github.com/janl/mustache.js/issues/186 if (!value || (isArray(value) && value.length === 0)) { buffer += renderTokens(token[4], writer, context, template); } break; case '>': value = writer.getPartial(tokenValue); if (isFunction(value)) buffer += value(context); break; case '&': value = context.lookup(tokenValue); if (value != null) buffer += value; break; case 'name': value = context.lookup(tokenValue); if (value != null) buffer += mustache.escape(value); break; case 'text': buffer += tokenValue; break; } } return buffer; } /** * Forms the given array of `tokens` into a nested tree structure where * tokens that represent a section have two additional items: 1) an array of * all tokens that appear in that section and 2) the index in the original * template that represents the end of that section. */ function nestTokens(tokens) { var tree = []; var collector = tree; var sections = []; var token; for (var i = 0, len = tokens.length; i < len; ++i) { token = tokens[i]; switch (token[0]) { case '#': case '^': sections.push(token); collector.push(token); collector = token[4] = []; break; case '/': var section = sections.pop(); section[5] = token[2]; collector = sections.length > 0 ? sections[sections.length - 1][4] : tree; break; default: collector.push(token); } } return tree; } /** * Combines the values of consecutive text tokens in the given `tokens` array * to a single token. */ function squashTokens(tokens) { var squashedTokens = []; var token, lastToken; for (var i = 0, len = tokens.length; i < len; ++i) { token = tokens[i]; if (token) { if (token[0] === 'text' && lastToken && lastToken[0] === 'text') { lastToken[1] += token[1]; lastToken[3] = token[3]; } else { lastToken = token; squashedTokens.push(token); } } } return squashedTokens; } function escapeTags(tags) { return [ new RegExp(escapeRegExp(tags[0]) + "\\s*"), new RegExp("\\s*" + escapeRegExp(tags[1])) ]; } /** * Breaks up the given `template` string into a tree of token objects. If * `tags` is given here it must be an array with two string values: the * opening and closing tags used in the template (e.g. ["<%", "%>"]). Of * course, the default is to use mustaches (i.e. Mustache.tags). */ function parseTemplate(template, tags) { template = template || ''; tags = tags || mustache.tags; if (typeof tags === 'string') tags = tags.split(spaceRe); if (tags.length !== 2) throw new Error('Invalid tags: ' + tags.join(', ')); var tagRes = escapeTags(tags); var scanner = new Scanner(template); var sections = []; // Stack to hold section tokens var tokens = []; // Buffer to hold the tokens var spaces = []; // Indices of whitespace tokens on the current line var hasTag = false; // Is there a {{tag}} on the current line? var nonSpace = false; // Is there a non-space char on the current line? // Strips all whitespace tokens array for the current line // if there was a {{#tag}} on it and otherwise only space. function stripSpace() { if (hasTag && !nonSpace) { while (spaces.length) { delete tokens[spaces.pop()]; } } else { spaces = []; } hasTag = false; nonSpace = false; } var start, type, value, chr, token, openSection; while (!scanner.eos()) { start = scanner.pos; // Match any text between tags. value = scanner.scanUntil(tagRes[0]); if (value) { for (var i = 0, len = value.length; i < len; ++i) { chr = value.charAt(i); if (isWhitespace(chr)) { spaces.push(tokens.length); } else { nonSpace = true; } tokens.push(['text', chr, start, start + 1]); start += 1; // Check for whitespace on the current line. if (chr == '\n') stripSpace(); } } // Match the opening tag. if (!scanner.scan(tagRes[0])) break; hasTag = true; // Get the tag type. type = scanner.scan(tagRe) || 'name'; scanner.scan(whiteRe); // Get the tag value. if (type === '=') { value = scanner.scanUntil(eqRe); scanner.scan(eqRe); scanner.scanUntil(tagRes[1]); } else if (type === '{') { value = scanner.scanUntil(new RegExp('\\s*' + escapeRegExp('}' + tags[1]))); scanner.scan(curlyRe); scanner.scanUntil(tagRes[1]); type = '&'; } else { value = scanner.scanUntil(tagRes[1]); } // Match the closing tag. if (!scanner.scan(tagRes[1])) throw new Error('Unclosed tag at ' + scanner.pos); token = [type, value, start, scanner.pos]; tokens.push(token); if (type === '#' || type === '^') { sections.push(token); } else if (type === '/') { // Check section nesting. openSection = sections.pop(); if (!openSection) { throw new Error('Unopened section "' + value + '" at ' + start); } if (openSection[1] !== value) { throw new Error('Unclosed section "' + openSection[1] + '" at ' + start); } } else if (type === 'name' || type === '{' || type === '&') { nonSpace = true; } else if (type === '=') { // Set the tags for the next time around. tags = value.split(spaceRe); if (tags.length !== 2) { throw new Error('Invalid tags at ' + start + ': ' + tags.join(', ')); } tagRes = escapeTags(tags); } } // Make sure there are no open sections when we're done. openSection = sections.pop(); if (openSection) { throw new Error('Unclosed section "' + openSection[1] + '" at ' + scanner.pos); } return nestTokens(squashTokens(tokens)); } mustache.name = "mustache.js"; mustache.version = "0.7.3"; mustache.tags = ["{{", "}}"]; mustache.Scanner = Scanner; mustache.Context = Context; mustache.Writer = Writer; mustache.parse = parseTemplate; // Export the escaping function so that the user may override it. // See https://github.com/janl/mustache.js/issues/244 mustache.escape = escapeHtml; // All Mustache.* functions use this writer. var defaultWriter = new Writer(); /** * Clears all cached templates and partials in the default writer. */ mustache.clearCache = function () { return defaultWriter.clearCache(); }; /** * Compiles the given `template` to a reusable function using the default * writer. */ mustache.compile = function (template, tags) { return defaultWriter.compile(template, tags); }; /** * Compiles the partial with the given `name` and `template` to a reusable * function using the default writer. */ mustache.compilePartial = function (name, template, tags) { return defaultWriter.compilePartial(name, template, tags); }; /** * Compiles the given array of tokens (the output of a parse) to a reusable * function using the default writer. */ mustache.compileTokens = function (tokens, template) { return defaultWriter.compileTokens(tokens, template); }; /** * Renders the `template` with the given `view` and `partials` using the * default writer. */ mustache.render = function (template, view, partials) { return defaultWriter.render(template, view, partials); }; // This is here for backwards compatibility with 0.4.x. mustache.to_html = function (template, view, partials, send) { var result = mustache.render(template, view, partials); if (isFunction(send)) { send(result); } else { return result; } }; })); },{}],38:[function(require,module,exports){ module.exports={ "author": { "name": "MapBox" }, "name": "mapbox.js", "description": "mapbox javascript api", "version": "1.3.1", "homepage": "http://mapbox.com/", "repository": { "type": "git", "url": "git://github.com/mapbox/mapbox.js.git" }, "main": "index.js", "dependencies": { "leaflet": "0.6.2", "mustache": "~0.7.2", "corslite": "0.0.3", "json3": "~3.2.5" }, "scripts": { "test": "mocha-phantomjs test/index.html" }, "devDependencies": { "leaflet-hash": "git://github.com/mlevans/leaflet-hash.git#b039a3aa4e2492a5c7448075172ac26769e601d6", "leaflet-fullscreen": "0.0.0", "uglify-js": "~2.2.5", "mocha": "~1.9", "expect.js": "~0.2.0", "sinon": "~1.7.3", "mocha-phantomjs": "~1.1.1", "happen": "~0.1.2", "browserify": "~2.22.0" }, "optionalDependencies": {}, "engines": { "node": "*" }, "bugs": { "url": "https://github.com/mapbox/mapbox.js/issues" }, "_id": "[email protected]", "dist": { "shasum": "a6d144286157eecf7273b202782b31a695450f6a", "tarball": "http://registry.npmjs.org/mapbox.js/-/mapbox.js-1.3.1.tgz" }, "_from": "mapbox.js@>=1.3.1 <1.4.0", "_npmVersion": "1.2.32", "_npmUser": { "name": "tmcw", "email": "[email protected]" }, "maintainers": [ { "name": "tmcw", "email": "[email protected]" }, { "name": "tristen", "email": "[email protected]" }, { "name": "ansis", "email": "[email protected]" }, { "name": "yhahn", "email": "[email protected]" }, { "name": "willwhite", "email": "[email protected]" }, { "name": "jfirebaugh", "email": "[email protected]" }, { "name": "heyitsgarrett", "email": "[email protected]" } ], "directories": {}, "_shasum": "a6d144286157eecf7273b202782b31a695450f6a", "_resolved": "https://registry.npmjs.org/mapbox.js/-/mapbox.js-1.3.1.tgz" } },{}],39:[function(require,module,exports){ 'use strict'; module.exports = { HTTP_URLS: [ 'http://a.tiles.mapbox.com/v3/', 'http://b.tiles.mapbox.com/v3/', 'http://c.tiles.mapbox.com/v3/', 'http://d.tiles.mapbox.com/v3/'], FORCE_HTTPS: false, HTTPS_URLS: [ 'https://a.tiles.mapbox.com/v3/', 'https://b.tiles.mapbox.com/v3/', 'https://c.tiles.mapbox.com/v3/', 'https://d.tiles.mapbox.com/v3/'] }; },{}],40:[function(require,module,exports){ 'use strict'; var util = require('./util'), urlhelper = require('./url'), request = require('./request'); // Low-level geocoding interface - wraps specific API calls and their // return values. module.exports = function(_) { var geocoder = {}, url; geocoder.getURL = function(_) { return url; }; geocoder.setURL = function(_) { url = urlhelper.jsonify(_); return geocoder; }; geocoder.setID = function(_) { util.strict(_, 'string'); geocoder.setURL(urlhelper.base() + _ + '/geocode/{query}.json'); return geocoder; }; geocoder.setTileJSON = function(_) { util.strict(_, 'object'); geocoder.setURL(_.geocoder); return geocoder; }; geocoder.queryURL = function(_) { util.strict(_, 'string'); if (!geocoder.getURL()) throw new Error('Geocoding map ID not set'); return L.Util.template(geocoder.getURL(), { query: encodeURIComponent(_) }); }; geocoder.query = function(_, callback) { util.strict(_, 'string'); util.strict(callback, 'function'); request(geocoder.queryURL(_), function(err, json) { if (json && json.results && json.results.length) { var res = { results: json.results, latlng: [json.results[0][0].lat, json.results[0][0].lon] }; if (json.results[0][0].bounds !== undefined) { res.bounds = json.results[0][0].bounds; res.lbounds = util.lbounds(res.bounds); } callback(null, res); } else callback(err || true); }); return geocoder; }; // a reverse geocode: // // geocoder.reverseQuery([80, 20]) geocoder.reverseQuery = function(_, callback) { var q = ''; function norm(x) { if (x.lat !== undefined && x.lng !== undefined) return x.lng + ',' + x.lat; else if (x.lat !== undefined && x.lon !== undefined) return x.lon + ',' + x.lat; else return x[0] + ',' + x[1]; } if (_.length && _[0].length) { for (var i = 0, pts = []; i < _.length; i++) pts.push(norm(_[i])); q = pts.join(';'); } else q = norm(_); request(geocoder.queryURL(q), function(err, json) { callback(err, json); }); return geocoder; }; if (typeof _ === 'string') { if (_.indexOf('/') == -1) geocoder.setID(_); else geocoder.setURL(_); } else if (typeof _ === 'object') geocoder.setTileJSON(_); return geocoder; }; },{"./request":50,"./url":54,"./util":55}],41:[function(require,module,exports){ 'use strict'; var geocoder = require('./geocoder'); var GeocoderControl = L.Control.extend({ includes: L.Mixin.Events, options: { position: 'topleft' }, initialize: function(_) { this.geocoder = geocoder(_); }, setURL: function(_) { this.geocoder.setURL(_); return this; }, getURL: function() { return this.geocoder.getURL(); }, setID: function(_) { this.geocoder.setID(_); return this; }, setTileJSON: function(_) { this.geocoder.setTileJSON(_); return this; }, _toggle: function(e) { if (e) L.DomEvent.stop(e); if (L.DomUtil.hasClass(this._container, 'active')) { L.DomUtil.removeClass(this._container, 'active'); this._results.innerHTML = ''; this._input.blur(); } else { L.DomUtil.addClass(this._container, 'active'); this._input.focus(); this._input.select(); } }, _closeIfOpen: function(e) { if (L.DomUtil.hasClass(this._container, 'active')) { L.DomUtil.removeClass(this._container, 'active'); this._results.innerHTML = ''; this._input.blur(); } }, onAdd: function(map) { var container = L.DomUtil.create('div', 'leaflet-control-mapbox-geocoder leaflet-bar leaflet-control'), link = L.DomUtil.create('a', 'leaflet-control-mapbox-geocoder-toggle mapbox-icon mapbox-icon-geocoder', container), results = L.DomUtil.create('div', 'leaflet-control-mapbox-geocoder-results', container), wrap = L.DomUtil.create('div', 'leaflet-control-mapbox-geocoder-wrap', container), form = L.DomUtil.create('form', 'leaflet-control-mapbox-geocoder-form', wrap), input = L.DomUtil.create('input', '', form); link.href = '#'; link.innerHTML = '&nbsp;'; input.type = 'text'; input.setAttribute('placeholder', 'Search'); L.DomEvent.addListener(link, 'click', this._toggle, this); L.DomEvent.addListener(form, 'submit', this._geocode, this); L.DomEvent.disableClickPropagation(container); this._map = map; this._results = results; this._input = input; this._form = form; this._map.on('click', this._closeIfOpen, this); return container; }, _geocode: function(e) { L.DomEvent.preventDefault(e); L.DomUtil.addClass(this._container, 'searching'); var map = this._map; this.geocoder.query(this._input.value, L.bind(function(err, resp) { L.DomUtil.removeClass(this._container, 'searching'); if (err || !resp || !resp.results || !resp.results.length) { this.fire('error', {error: err}); } else { this._results.innerHTML = ''; if (resp.results.length === 1 && resp.lbounds) { this._map.fitBounds(resp.lbounds); this._closeIfOpen(); } else { for (var i = 0, l = Math.min(resp.results.length, 5); i < l; i++) { var name = []; for (var j = 0; j < resp.results[i].length; j++) { resp.results[i][j].name && name.push(resp.results[i][j].name); } if (!name.length) continue; var r = L.DomUtil.create('a', '', this._results); r.innerHTML = name.join(', '); r.href = '#'; (function(result) { L.DomEvent.addListener(r, 'click', function(e) { var _ = result[0].bounds; map.fitBounds(L.latLngBounds([[_[1], _[0]], [_[3], _[2]]])); L.DomEvent.stop(e); }); })(resp.results[i]); } if (resp.results.length > 5) { var outof = L.DomUtil.create('span', '', this._results); outof.innerHTML = 'Top 5 of ' + resp.results.length + ' results'; } } this.fire('found', resp); } }, this)); } }); module.exports = function(options) { return new GeocoderControl(options); }; },{"./geocoder":40}],42:[function(require,module,exports){ 'use strict'; function utfDecode(c) { if (c >= 93) c--; if (c >= 35) c--; return c - 32; } module.exports = function(data) { return function(x, y) { if (!data) return; var idx = utfDecode(data.grid[y].charCodeAt(x)), key = data.keys[idx]; return data.data[key]; }; }; },{}],43:[function(require,module,exports){ 'use strict'; var util = require('./util'), Mustache = require('mustache'); var GridControl = L.Control.extend({ options: { pinnable: true, follow: false, sanitizer: require('./sanitize'), touchTeaser: true, location: true }, _currentContent: '', // pinned means that this control is on a feature and the user has likely // clicked. pinned will not become false unless the user clicks off // of the feature onto another or clicks x _pinned: false, initialize: function(_, options) { L.Util.setOptions(this, options); util.strict_instance(_, L.Class, 'L.mapbox.gridLayer'); this._layer = _; }, setTemplate: function(template) { this.options.template = template; }, _template: function(format, data) { if (!data) return; var template = this.options.template || this._layer.getTileJSON().template; if (template) { var d = {}; d['__' + format + '__'] = true; return this.options.sanitizer( Mustache.to_html(template, L.extend(d, data))); } }, // change the content of the tooltip HTML if it has changed, otherwise // noop _show: function(content, o) { if (content === this._currentContent) return; this._currentContent = content; if (this.options.follow) { this._popup.setContent(content) .setLatLng(o.latLng); if (this._map._popup !== this._popup) this._popup.openOn(this._map); } else { this._container.style.display = 'block'; this._contentWrapper.innerHTML = content; } }, _hide: function() { this._pinned = false; this._currentContent = ''; this._map.closePopup(); this._container.style.display = 'none'; this._contentWrapper.innerHTML = ''; L.DomUtil.removeClass(this._container, 'closable'); }, _mouseover: function(o) { if (o.data) { L.DomUtil.addClass(this._map._container, 'map-clickable'); } else { L.DomUtil.removeClass(this._map._container, 'map-clickable'); } if (this._pinned) return; var content = this._template('teaser', o.data); if (content) { this._show(content, o); } else { this._hide(); } }, _mousemove: function(o) { if (this._pinned) return; if (!this.options.follow) return; this._popup.setLatLng(o.latLng); }, _navigateTo: function(url) { window.top.location.href = url; }, _click: function(o) { var location_formatted = this._template('location', o.data); if (this.options.location && location_formatted && location_formatted.search(/^https?:/) === 0) { return this._navigateTo(this._template('location', o.data)); } if (!this.options.pinnable) return; var content = this._template('full', o.data); if (!content && this.options.touchTeaser && L.Browser.touch) { content = this._template('teaser', o.data); } if (content) { L.DomUtil.addClass(this._container, 'closable'); this._pinned = true; this._show(content, o); } else if (this._pinned) { L.DomUtil.removeClass(this._container, 'closable'); this._pinned = false; this._hide(); } }, _onPopupClose: function() { this._currentContent = null; this._pinned = false; }, _createClosebutton: function(container, fn) { var link = L.DomUtil.create('a', 'close', container); link.innerHTML = 'close'; link.href = '#'; link.title = 'close'; L.DomEvent .on(link, 'click', L.DomEvent.stopPropagation) .on(link, 'mousedown', L.DomEvent.stopPropagation) .on(link, 'dblclick', L.DomEvent.stopPropagation) .on(link, 'click', L.DomEvent.preventDefault) .on(link, 'click', fn, this); return link; }, onAdd: function(map) { this._map = map; var className = 'leaflet-control-grid map-tooltip', container = L.DomUtil.create('div', className), contentWrapper = L.DomUtil.create('div', 'map-tooltip-content'); // hide the container element initially container.style.display = 'none'; this._createClosebutton(container, this._hide); container.appendChild(contentWrapper); this._contentWrapper = contentWrapper; this._popup = new L.Popup({ autoPan: false, closeOnClick: false }); map.on('popupclose', this._onPopupClose, this); L.DomEvent .disableClickPropagation(container) // allow people to scroll tooltips with mousewheel .addListener(container, 'mousewheel', L.DomEvent.stopPropagation); this._layer .on('mouseover', this._mouseover, this) .on('mousemove', this._mousemove, this) .on('click', this._click, this); return container; }, onRemove: function (map) { map.off('popupclose', this._onPopupClose, this); this._layer .off('mouseover', this._mouseover, this) .off('mousemove', this._mousemove, this) .off('click', this._click, this); } }); module.exports = function(_, options) { return new GridControl(_, options); }; },{"./sanitize":51,"./util":55,"mustache":37}],44:[function(require,module,exports){ 'use strict'; var util = require('./util'), url = require('./url'), request = require('./request'), grid = require('./grid'); // forked from danzel/L.UTFGrid var GridLayer = L.Class.extend({ includes: [L.Mixin.Events, require('./load_tilejson')], options: { template: function() { return ''; } }, _mouseOn: null, _tilejson: {}, _cache: {}, initialize: function(_, options) { L.Util.setOptions(this, options); this._loadTileJSON(_); }, _setTileJSON: function(json) { util.strict(json, 'object'); L.extend(this.options, { grids: json.grids, minZoom: json.minzoom, maxZoom: json.maxzoom, bounds: json.bounds && util.lbounds(json.bounds) }); this._tilejson = json; this._cache = {}; this._update(); return this; }, getTileJSON: function() { return this._tilejson; }, active: function() { return !!(this._map && this.options.grids && this.options.grids.length); }, addTo: function (map) { map.addLayer(this); return this; }, onAdd: function(map) { this._map = map; this._update(); this._map .on('click', this._click, this) .on('mousemove', this._move, this) .on('moveend', this._update, this); }, onRemove: function() { this._map .off('click', this._click, this) .off('mousemove', this._move, this) .off('moveend', this._update, this); }, getData: function(latlng, callback) { if (!this.active()) return; var map = this._map, point = map.project(latlng), tileSize = 256, resolution = 4, x = Math.floor(point.x / tileSize), y = Math.floor(point.y / tileSize), max = map.options.crs.scale(map.getZoom()) / tileSize; x = (x + max) % max; y = (y + max) % max; this._getTile(map.getZoom(), x, y, function(grid) { var gridX = Math.floor((point.x - (x * tileSize)) / resolution), gridY = Math.floor((point.y - (y * tileSize)) / resolution); callback(grid(gridX, gridY)); }); return this; }, _click: function(e) { this.getData(e.latlng, L.bind(function(data) { this.fire('click', { latLng: e.latlng, data: data }); }, this)); }, _move: function(e) { this.getData(e.latlng, L.bind(function(data) { if (data !== this._mouseOn) { if (this._mouseOn) { this.fire('mouseout', { latLng: e.latlng, data: this._mouseOn }); } this.fire('mouseover', { latLng: e.latlng, data: data }); this._mouseOn = data; } else { this.fire('mousemove', { latLng: e.latlng, data: data }); } }, this)); }, _getTileURL: function(tilePoint) { var urls = this.options.grids, index = (tilePoint.x + tilePoint.y) % urls.length, url = urls[index]; return L.Util.template(url, tilePoint); }, // Load up all required json grid files _update: function() { if (!this.active()) return; var bounds = this._map.getPixelBounds(), z = this._map.getZoom(), tileSize = 256; if (z > this.options.maxZoom || z < this.options.minZoom) return; var nwTilePoint = new L.Point( Math.floor(bounds.min.x / tileSize), Math.floor(bounds.min.y / tileSize)), seTilePoint = new L.Point( Math.floor(bounds.max.x / tileSize), Math.floor(bounds.max.y / tileSize)), max = this._map.options.crs.scale(z) / tileSize; for (var x = nwTilePoint.x; x <= seTilePoint.x; x++) { for (var y = nwTilePoint.y; y <= seTilePoint.y; y++) { // x wrapped var xw = (x + max) % max, yw = (y + max) % max; this._getTile(z, xw, yw); } } }, _getTile: function(z, x, y, callback) { var key = z + '_' + x + '_' + y, tilePoint = L.point(x, y); tilePoint.z = z; if (!this._tileShouldBeLoaded(tilePoint)) { return; } if (key in this._cache) { if (!callback) return; if (typeof this._cache[key] === 'function') { callback(this._cache[key]); // Already loaded } else { this._cache[key].push(callback); // Pending } return; } this._cache[key] = []; if (callback) { this._cache[key].push(callback); } request(this._getTileURL(tilePoint), L.bind(function(err, json) { var callbacks = this._cache[key]; this._cache[key] = grid(json); for (var i = 0; i < callbacks.length; ++i) { callbacks[i](this._cache[key]); } }, this)); }, _tileShouldBeLoaded: function(tilePoint) { if (tilePoint.z > this.options.maxZoom || tilePoint.z < this.options.minZoom) { return false; } if (this.options.bounds) { var tileSize = 256, nwPoint = tilePoint.multiplyBy(tileSize), sePoint = nwPoint.add(new L.Point(tileSize, tileSize)), nw = this._map.unproject(nwPoint), se = this._map.unproject(sePoint), bounds = new L.LatLngBounds([nw, se]); if (!this.options.bounds.intersects(bounds)) { return false; } } return true; } }); module.exports = function(_, options) { return new GridLayer(_, options); }; },{"./grid":42,"./load_tilejson":46,"./request":50,"./url":54,"./util":55}],45:[function(require,module,exports){ 'use strict'; var LegendControl = L.Control.extend({ options: { position: 'bottomright', sanitizer: require('./sanitize') }, initialize: function(options) { L.setOptions(this, options); this._legends = {}; }, onAdd: function(map) { this._container = L.DomUtil.create('div', 'map-legends wax-legends'); L.DomEvent.disableClickPropagation(this._container); this._update(); return this._container; }, addLegend: function(text) { if (!text) { return this; } if (!this._legends[text]) { this._legends[text] = 0; } this._legends[text]++; return this._update(); }, removeLegend: function(text) { if (!text) { return this; } if (this._legends[text]) this._legends[text]--; return this._update(); }, _update: function() { if (!this._map) { return this; } this._container.innerHTML = ''; var hide = 'none'; for (var i in this._legends) { if (this._legends.hasOwnProperty(i) && this._legends[i]) { var div = this._container.appendChild(document.createElement('div')); div.className = 'map-legend wax-legend'; div.innerHTML = this.options.sanitizer(i); hide = 'block'; } } // hide the control entirely unless there is at least one legend; // otherwise there will be a small grey blemish on the map. this._container.style.display = hide; return this; } }); module.exports = function(options) { return new LegendControl(options); }; },{"./sanitize":51}],46:[function(require,module,exports){ 'use strict'; var request = require('./request'), url = require('./url'), util = require('./util'); module.exports = { _loadTileJSON: function(_) { if (typeof _ === 'string') { if (_.indexOf('/') == -1) { _ = url.base() + _ + '.json'; } request(url.secureFlag(_), L.bind(function(err, json) { if (err) { util.log('could not load TileJSON at ' + _); this.fire('error', {error: err}); } else if (json) { this._setTileJSON(json); this.fire('ready'); } }, this)); } else if (_ && typeof _ === 'object') { this._setTileJSON(_); } } }; },{"./request":50,"./url":54,"./util":55}],47:[function(require,module,exports){ 'use strict'; var util = require('./util'), tileLayer = require('./tile_layer'), markerLayer = require('./marker_layer'), gridLayer = require('./grid_layer'), gridControl = require('./grid_control'), legendControl = require('./legend_control'); var Map = L.Map.extend({ includes: [require('./load_tilejson')], options: { tileLayer: {}, markerLayer: {}, gridLayer: {}, legendControl: {}, gridControl: {} }, _tilejson: {}, initialize: function(element, _, options) { L.Map.prototype.initialize.call(this, element, options); // disable the default 'Powered by Leaflet' text if (this.attributionControl) this.attributionControl.setPrefix(''); if (this.options.tileLayer) { this.tileLayer = tileLayer(undefined, this.options.tileLayer); this.addLayer(this.tileLayer); } if (this.options.markerLayer) { this.markerLayer = markerLayer(undefined, this.options.markerLayer); this.addLayer(this.markerLayer); } if (this.options.gridLayer) { this.gridLayer = gridLayer(undefined, this.options.gridLayer); this.addLayer(this.gridLayer); } if (this.options.gridLayer && this.options.gridControl) { this.gridControl = gridControl(this.gridLayer, this.options.gridControl); this.addControl(this.gridControl); } if (this.options.legendControl) { this.legendControl = legendControl(this.options.legendControl); this.addControl(this.legendControl); } this._loadTileJSON(_); }, // Update certain properties on 'ready' event addLayer: function(layer) { if ('on' in layer) { layer.on('ready', L.bind(function() { this._updateLayer(layer); }, this)); } return L.Map.prototype.addLayer.call(this, layer); }, // use a javascript object of tilejson data to configure this layer _setTileJSON: function(_) { this._tilejson = _; this._initialize(_); return this; }, getTileJSON: function() { return this._tilejson; }, _initialize: function(json) { if (this.tileLayer) { this.tileLayer._setTileJSON(json); this._updateLayer(this.tileLayer); } if (this.markerLayer && !this.markerLayer.getGeoJSON() && json.data && json.data[0]) { this.markerLayer.loadURL(json.data[0]); } if (this.gridLayer) { this.gridLayer._setTileJSON(json); this._updateLayer(this.gridLayer); } if (this.legendControl && json.legend) { this.legendControl.addLegend(json.legend); } if (!this._loaded) { var zoom = json.center[2], center = L.latLng(json.center[1], json.center[0]); this.setView(center, zoom); } }, _updateLayer: function(layer) { if (!layer.options) return; if (this.attributionControl && this._loaded) { this.attributionControl.addAttribution(layer.options.attribution); } if (!(L.stamp(layer) in this._zoomBoundLayers) && (layer.options.maxZoom || layer.options.minZoom)) { this._zoomBoundLayers[L.stamp(layer)] = layer; } this._updateZoomLevels(); } }); module.exports = function(element, _, options) { return new Map(element, _, options); }; },{"./grid_control":43,"./grid_layer":44,"./legend_control":45,"./load_tilejson":46,"./marker_layer":49,"./tile_layer":53,"./util":55}],48:[function(require,module,exports){ 'use strict'; var url = require('./url'), sanitize = require('./sanitize'); // mapbox-related markers functionality // provide an icon from mapbox's simple-style spec and hosted markers // service function icon(fp) { fp = fp || {}; var sizes = { small: [20, 50], medium: [30, 70], large: [35, 90] }, size = fp['marker-size'] || 'medium', symbol = (fp['marker-symbol']) ? '-' + fp['marker-symbol'] : '', color = (fp['marker-color'] || '7e7e7e').replace('#', ''); return L.icon({ iconUrl: url.base() + 'marker/' + 'pin-' + size.charAt(0) + symbol + '+' + color + // detect and use retina markers, which are x2 resolution ((L.Browser.retina) ? '@2x' : '') + '.png', iconSize: sizes[size], iconAnchor: [sizes[size][0] / 2, sizes[size][1] / 2], popupAnchor: [0, -sizes[size][1] / 2] }); } // a factory that provides markers for Leaflet from MapBox's // [simple-style specification](https://github.com/mapbox/simplestyle-spec) // and [Markers API](http://mapbox.com/developers/api/#markers). function style(f, latlon) { return L.marker(latlon, { icon: icon(f.properties), title: f.properties.title }); } function createPopup(f, sanitizer) { if (!f || !f.properties) return ''; var popup = ''; if (f.properties.title) { popup += '<div class="marker-title">' + f.properties.title + '</div>'; } if (f.properties.description) { popup += '<div class="marker-description">' + f.properties.description + '</div>'; } return (sanitizer || sanitize)(popup); } module.exports = { icon: icon, style: style, createPopup: createPopup }; },{"./sanitize":51,"./url":54}],49:[function(require,module,exports){ 'use strict'; var util = require('./util'); var urlhelper = require('./url'); var request = require('./request'); var marker = require('./marker'); // # markerLayer // // A layer of markers, loaded from MapBox or else. Adds the ability // to reset markers, filter them, and load them from a GeoJSON URL. var MarkerLayer = L.FeatureGroup.extend({ options: { filter: function() { return true; }, sanitizer: require('./sanitize') }, initialize: function(_, options) { L.setOptions(this, options); this._layers = {}; if (typeof _ === 'string') { util.idUrl(_, this); // javascript object of TileJSON data } else if (_ && typeof _ === 'object') { this.setGeoJSON(_); } }, setGeoJSON: function(_) { this._geojson = _; this.clearLayers(); this._initialize(_); }, getGeoJSON: function() { return this._geojson; }, loadURL: function(url) { url = urlhelper.jsonify(url); request(url, L.bind(function(err, json) { if (err) { util.log('could not load markers at ' + url); this.fire('error', {error: err}); } else if (json) { this.setGeoJSON(json); this.fire('ready'); } }, this)); return this; }, loadID: function(id) { return this.loadURL(urlhelper.base() + id + '/markers.geojson'); }, setFilter: function(_) { this.options.filter = _; if (this._geojson) { this.clearLayers(); this._initialize(this._geojson); } return this; }, getFilter: function() { return this.options.filter; }, _initialize: function(json) { var features = L.Util.isArray(json) ? json : json.features, i, len; if (features) { for (i = 0, len = features.length; i < len; i++) { // Only add this if geometry or geometries are set and not null if (features[i].geometries || features[i].geometry || features[i].features) { this._initialize(features[i]); } } } else if (this.options.filter(json)) { var layer = L.GeoJSON.geometryToLayer(json, marker.style), popupHtml = marker.createPopup(json, this.options.sanitizer); layer.feature = json; if (popupHtml) { layer.bindPopup(popupHtml, { closeButton: false }); } this.addLayer(layer); } } }); module.exports = function(_, options) { return new MarkerLayer(_, options); }; },{"./marker":48,"./request":50,"./sanitize":51,"./url":54,"./util":55}],50:[function(require,module,exports){ 'use strict'; var corslite = require('corslite'), JSON3 = require('json3'), strict = require('./util').strict; module.exports = function(url, callback) { strict(url, 'string'); strict(callback, 'function'); corslite(url, function(err, resp) { if (!err && resp) { // hardcoded grid response if (resp.responseText[0] == 'g') { resp = JSON3.parse(resp.responseText .substring(5, resp.responseText.length - 2)); } else { resp = JSON3.parse(resp.responseText); } } callback(err, resp); }); }; },{"./util":55,"corslite":34,"json3":35}],51:[function(require,module,exports){ 'use strict'; var html_sanitize = require('../ext/sanitizer/html-sanitizer-bundle.js'); // https://bugzilla.mozilla.org/show_bug.cgi?id=255107 function cleanUrl(url) { if (/^https?/.test(url.getScheme())) return url.toString(); if ('data' == url.getScheme() && /^image/.test(url.getPath())) { return url.toString(); } } function cleanId(id) { return id; } module.exports = function(_) { if (!_) return ''; return html_sanitize(_, cleanUrl, cleanId); }; },{"../ext/sanitizer/html-sanitizer-bundle.js":30}],52:[function(require,module,exports){ 'use strict'; var ShareControl = L.Control.extend({ includes: [require('./load_tilejson')], options: { position: 'topleft', url: '' }, initialize: function(_, options) { L.setOptions(this, options); this._loadTileJSON(_); }, _setTileJSON: function(json) { this._tilejson = json; }, onAdd: function(map) { this._map = map; var container = L.DomUtil.create('div', 'leaflet-control-mapbox-share leaflet-bar'); var link = L.DomUtil.create('a', 'mapbox-share mapbox-icon mapbox-icon-share', container); link.href = '#'; L.DomEvent.addListener(link, 'click', this._share, this); L.DomEvent.disableClickPropagation(container); // Close any open popups this._map.on('mousedown', this._clickOut, this); return container; }, _clickOut: function(e) { if (this._popup) { this._map.removeLayer(this._popup); this._popup = null; return; } }, _share: function(e) { L.DomEvent.stop(e); var tilejson = this._tilejson || this._map._tilejson || {}, twitter = 'http://twitter.com/intent/tweet?status=' + encodeURIComponent(tilejson.name + '\n' + (tilejson.webpage || window.location)), facebook = 'https://www.facebook.com/sharer.php?u=' + encodeURIComponent(this.options.url || tilejson.webpage || window.location) + '&t=' + encodeURIComponent(tilejson.name), share = "<a class='leaflet-popup-close-button' href='#close'>×</a>" + ("<h3>Share this map</h3>" + "<div class='mapbox-share-buttons'><a class='mapbox-share-facebook mapbox-icon mapbox-icon-facebook' target='_blank' href='{{facebook}}'>Facebook</a>" + "<a class='mapbox-share-twitter mapbox-icon mapbox-icon-twitter' target='_blank' href='{{twitter}}'>Twitter</a></div>") .replace('{{twitter}}', twitter) .replace('{{facebook}}', facebook) + ("<h3>Get the embed code</h3>" + "<small>Copy and paste this HTML into your website or blog.</small>") + "<textarea rows=4>{{value}}</textarea>" .replace('{{value}}', ("&lt;iframe width='500' height='300' frameBorder='0' src='{{embed}}'&gt;&lt;/iframe&gt;" .replace('{{embed}}', tilejson.embed || window.location))); this._popup = L.marker(this._map.getCenter(), { zIndexOffset: 10000, icon: L.divIcon({ className: 'mapbox-share-popup', iconSize: L.point(360, 240), iconAnchor: L.point(180, 120), html: share }) }) .on('mousedown', function(e) { L.DomEvent.stopPropagation(e.originalEvent); }) .on('click', clickPopup, this).addTo(this._map); function clickPopup(e) { if (e.originalEvent && e.originalEvent.target.nodeName === 'TEXTAREA') { var target = e.originalEvent.target; target.focus(); target.select(); } else if (e.originalEvent && e.originalEvent.target.getAttribute('href') === '#close') { this._clickOut(e); } L.DomEvent.stop(e.originalEvent); } } }); module.exports = function(_, options) { return new ShareControl(_, options); }; },{"./load_tilejson":46}],53:[function(require,module,exports){ 'use strict'; var util = require('./util'), url = require('./url'); var TileLayer = L.TileLayer.extend({ includes: [require('./load_tilejson')], options: { format: 'png' }, // http://mapbox.com/developers/api/#image_quality formats: [ 'png', // PNG 'png32', 'png64', 'png128', 'png256', // JPG 'jpg70', 'jpg80', 'jpg90'], initialize: function(_, options) { L.TileLayer.prototype.initialize.call(this, undefined, options); this._tilejson = {}; if (options && options.detectRetina && L.Browser.retina && options.retinaVersion) { _ = options.retinaVersion; } if (options && options.format) { util.strict_oneof(options.format, this.formats); } this._loadTileJSON(_); }, setFormat: function(_) { util.strict(_, 'string'); this.options.format = _; this.redraw(); return this; }, // disable the setUrl function, which is not available on mapbox tilelayers setUrl: null, _setTileJSON: function(json) { util.strict(json, 'object'); L.extend(this.options, { tiles: json.tiles, attribution: json.attribution, minZoom: json.minzoom, maxZoom: json.maxzoom, tms: json.scheme === 'tms', bounds: json.bounds && util.lbounds(json.bounds) }); this._tilejson = json; this.redraw(); return this; }, getTileJSON: function() { return this._tilejson; }, // this is an exception to mapbox.js naming rules because it's called // by `L.map` getTileUrl: function(tilePoint) { var tiles = this.options.tiles, index = Math.abs(tilePoint.x + tilePoint.y) % tiles.length, url = tiles[index]; var templated = L.Util.template(url, tilePoint); if (!templated) return templated; else return templated.replace('.png', '.' + this.options.format); }, // TileJSON.TileLayers are added to the map immediately, so that they get // the desired z-index, but do not update until the TileJSON has been loaded. _update: function() { if (this.options.tiles) { L.TileLayer.prototype._update.call(this); } } }); module.exports = function(_, options) { return new TileLayer(_, options); }; },{"./load_tilejson":46,"./url":54,"./util":55}],54:[function(require,module,exports){ 'use strict'; var config = require('./config'); // Return the base url of a specific version of MapBox's API. // // `hash`, if provided must be a number and is used to distribute requests // against multiple `CNAME`s in order to avoid connection limits in browsers module.exports = { isSSL: function() { return 'https:' === document.location.protocol || config.FORCE_HTTPS; }, base: function(hash) { // By default, use public HTTP urls // Support HTTPS if the user has specified HTTPS urls to use, and this // page is under HTTPS var urls = this.isSSL() ? config.HTTPS_URLS : config.HTTP_URLS; if (hash === undefined || typeof hash !== 'number') { return urls[0]; } else { return urls[hash % urls.length]; } }, // Requests that contain URLs need a secure flag appended // to their URLs so that the server knows to send SSL-ified // resource references. secureFlag: function(url) { if (!this.isSSL()) return url; else if (url.match(/(\?|&)secure/)) return url; else if (url.indexOf('?') !== -1) return url + '&secure'; else return url + '?secure'; }, // Convert a JSONP url to a JSON URL. (MapBox TileJSON sometimes hardcodes JSONP.) jsonify: function(url) { return url.replace(/\.(geo)?jsonp(?=$|\?)/, '.$1json'); } }; },{"./config":39}],55:[function(require,module,exports){ 'use strict'; module.exports = { idUrl: function(_, t) { if (_.indexOf('/') == -1) t.loadID(_); else t.loadURL(_); }, log: function(_) { if (console && typeof console.error === 'function') { console.error(_); } }, strict: function(_, type) { if (typeof _ !== type) { throw new Error('Invalid argument: ' + type + ' expected'); } }, strict_instance: function(_, klass, name) { if (!(_ instanceof klass)) { throw new Error('Invalid argument: ' + name + ' expected'); } }, strict_oneof: function(_, values) { if (values.indexOf(_) == -1) { throw new Error('Invalid argument: ' + _ + ' given, valid values are ' + values.join(', ')); } }, lbounds: function(_) { // leaflet-compatible bounds, since leaflet does not do geojson return new L.LatLngBounds([[_[1], _[0]], [_[3], _[2]]]); } }; },{}],56:[function(require,module,exports){ var ich = require('icanhaz'); module.exports.initiateTableFilter = function(opts) { $('.clear').on("click", function() { $(this.id + ".noMatches").css("visibility", "hidden"); $(this.id + opts.filterDiv).val(""); makeTable(opts); }); $(opts.filterDiv).keyup(function(e) { var text = $(e.target).val(); searchTable(opts, text); }); $('#activityFilter option').change(function() { var text = document.getElementById("tableFilter").value; console.log(text); searchTable(opts, text); $('.spotRow').first().click(); }); } module.exports.searchTable = searchTable function searchTable(opts, searchTerm) { var filteredList = []; var is_IE = !!document.documentMode; var e = document.getElementById("activityFilter"); var strUser = e.options[e.selectedIndex].value; var dropdown = strUser.toLowerCase(); if (is_IE == false) { term_array = searchTerm.split(" "); opts.data.forEach(function(object) { var stringObject = JSON.stringify(object).toLowerCase(); var does_match = true; for (var i=0; i<term_array.length; i++) { if ((stringObject.includes(term_array[i].toLowerCase())) == false) { does_match = false; }} console.log(dropdown); if (strUser != "View All Parks") { if (object[dropdown] == "no") { does_match = false; } } if (does_match) { filteredList.push(object); } }); } else { opts.data.forEach(function(object) { var stringObject = JSON.stringify(object).toLowerCase(); if (stringObject.match(searchTerm.toLowerCase())) { console.log(strUser); if (strUser != "View All Parks") { console.log(object[dropdown]); if (object[dropdown] == "yes") { filteredList.push(object); } } else { filteredList.push(object); } } }) } if (filteredList.length === 0) { $(".noMatches").css("visibility", "inherit"); makeTable(opts, filteredList); } else { $(".noMatches").css("visibility", "hidden"); makeTable(opts, filteredList); } } module.exports.sortThings = sortThings function sortThings(opts, sorter, sorted, tableDiv) { if (opts.tableDiv != tableDiv) return opts.data.sort(function(a,b){ if (a[sorter]<b[sorter]) return -1 if (a[sorter]>b[sorter]) return 1 return 0 }) if (sorted === "descending") opts.data.reverse() makeTable(opts) var header $(tableDiv + " .tHeader").each(function(i, el){ var contents = resolveDataTitle($(el).text()) if (contents === sorter) header = el }) $(header).attr("data-sorted", sorted) } module.exports.resolveDataTitle = resolveDataTitle function resolveDataTitle(string) { var adjusted = string.toLowerCase().replace(/\s/g, '').replace(/\W/g, '') return adjusted } module.exports.initiateTableSorter = initiateTableSorter function initiateTableSorter(options) { $(document).on("click", ".tHeader", sendToSort) function sendToSort(event) { var tableDiv = "#" + $(event.target).closest("div").attr("id") var sorted = $(event.target).attr("data-sorted") if (sorted) { if (sorted === "descending") sorted = "ascending" else sorted = "descending" } else { sorted = "ascending" } var sorter = resolveDataTitle(event.target.innerHTML) var sortInfo = {"sorter": sorter, "sorted": sorted, "tableDiv": tableDiv} sortThings(options, sorter, sorted, tableDiv) } } module.exports.makeTable = makeTable function makeTable(opts, filteredList) { initiateTableSorter(opts) if (filteredList) var data = filteredList else var data = opts.data var tableId = opts.tableDiv.slice(1) if (!opts.pagination) { table(data, opts) } else { var allRows = data.length var totalPages = Math.ceil(allRows / opts.pagination) var currentPage = 1 var currentStart = (currentPage * opts.pagination) - opts.pagination var currentEnd = currentPage * opts.pagination var currentRows = data.slice(currentStart, currentEnd) table(currentRows, opts) if (opts.data.length > opts.pagination) writePreNext(opts.tableDiv, currentPage, currentPage, totalPages, data, opts.pagination) } } module.exports.setPagClicks = setPagClicks function setPagClicks(data, tableId, currentPage, pagination, totalPages) { $(".pagination-pre-" + tableId).addClass("no-pag") $(document).on("click", (".pagination-next-" + tableId), function() { if ($(this).hasClass("no-pag")) return currentPage = currentPage + 1 var nextPage = currentPage + 1 currentStart = (currentPage * pagination) - pagination currentEnd = currentPage * pagination if (currentPage >= totalPages) { currentRows = data.slice(currentStart, currentEnd) table(currentRows, "#" + tableId) setPreNext("#" + tableId, currentPage, currentPage, totalPages) $(".pagination-next-" + tableId).addClass("no-pag") $(".pagination-next-" + tableId) } else { currentRows = data.slice(currentStart, currentEnd) table(currentRows, "#" + tableId) setPreNext("#" + tableId, currentPage, currentPage, totalPages) } }) $(document).on("click", (".pagination-pre-" + tableId), function() { if (currentPage > 1) $(this).removeClass("no-pag") if ($(this).hasClass("no-pag")) return // if ((currentPage) === 2) { // $(".pagination-pre-" + tableId).addClass("no-pag"); console.log("on page one!", currentPage) // } currentPage = currentPage - 1 var nextPage = currentPage + 1 currentStart = (currentPage * pagination) - pagination currentEnd = currentPage * pagination // currentRows = data.slice(currentStart, currentEnd) // table(currentRows, "#" + tableId) // setPreNext("#" + tableId, currentPage, currentPage, totalPages) if (currentPage === 1) { currentRows = data.slice(currentStart, currentEnd) table(currentRows, "#" + tableId) setPreNext("#" + tableId, currentPage, currentPage, totalPages) $(".pagination-pre-" + tableId).addClass("no-pag") } else { currentRows = data.slice(currentStart, currentEnd) table(currentRows, "#" + tableId) setPreNext("#" + tableId, currentPage, currentPage, totalPages) } }) } module.exports.setPreNext = setPreNext function setPreNext(targetDiv, currentPage, currentPage, totalPages, data, pagination) { var tableId = targetDiv.slice(1) $(targetDiv).append("<div id='Pagination' pageno='" + currentPage + "'" + "class='table-pagination'>Showing page " + currentPage + " of " + totalPages + " <a class='pagination-pre-" + tableId + "'>Previous</a>" + " <a class='pagination-next-" + tableId + "'>Next</a></p></div>" ) } module.exports.writePreNext = writePreNext function writePreNext(targetDiv, currentPage, currentPage, totalPages, data, pagination) { var tableId = targetDiv.slice(1) $(targetDiv).append("<div id='Pagination' pageno='" + currentPage + "'" + "class='table-pagination'>Showing page " + currentPage + " of " + totalPages + " <a class='pagination-pre-" + tableId + "'>Previous</a>" + " <a class='pagination-next-" + tableId + "'>Next</a></p></div>" ) setPagClicks(data, tableId, currentPage, pagination, totalPages) } module.exports.clearPreNext = clearPreNext function clearPreNext() { $(".table-pagination").attr("display", "none") } module.exports.table = table function table(data, opts) { if (opts.templateID) { var templateID = opts.templateID } else var templateID = opts.tableDiv.replace("#", "") var tableContents = ich[templateID]( { rows: data }) $(opts.tableDiv).html(tableContents) } },{"icanhaz":2}]},{},[1]) ;
remove compatibility testing code
js/sheetsee.js
remove compatibility testing code
<ide><path>s/sheetsee.js <ide> var text = $(e.target).val(); <ide> searchTable(opts, text); <ide> }); <del> $('#activityFilter option').change(function() { <del> var text = document.getElementById("tableFilter").value; <del> console.log(text); <del> searchTable(opts, text); <del> $('.spotRow').first().click(); <del> }); <ide> } <ide> <ide> module.exports.searchTable = searchTable
JavaScript
mit
72b8419f70805332d7fad23cdbd613425b31034f
0
KingPixil/moon,KingPixil/moon,KingPixil/moon,kbrsh/moon,kbrsh/moon
var expect = chai.expect; Moon.config.silent = true; console.log("[Moon] Running Tests..."); console.log("[Moon] Version: " + Moon.version); var createTestElement = function(id, html) { var el = document.createElement("div"); el.innerHTML = html; el.id = id; document.getElementById("moon-els").appendChild(el); return el; } // var MoonPerformance = { // init: function() { // var MoonBuild = Moon.prototype.build; // var MoonInit = Moon.prototype.init; // var MoonRender = Moon.prototype.render; // var MoonMount = Moon.prototype.mount; // var MoonPatch = Moon.prototype.patch; // // var formatNum = function(num) { // if(num >= 0.5) { // return num.toFixed(2) + 'ms' // } else { // return num.toFixed(2)*1000 + "µs"; // } // } // // var name = function(instance) { // return instance.$parent ? instance.$name : "root"; // } // // Moon.prototype.init = function() { // var id = name(this) + "@init"; // performance.mark("start " + id); // MoonInit.apply(this, arguments); // performance.mark("end " + id); // performance.measure(id, "start " + id, "end " + id); // var entries = performance.getEntriesByName(id); // } // // Moon.prototype.build = function() { // var id = name(this) + "@build"; // performance.mark("start " + id); // MoonBuild.apply(this, arguments); // performance.mark("end " + id); // performance.measure(id, "start " + id, "end " + id); // var entries = performance.getEntriesByName(id); // } // // Moon.prototype.render = function() { // var id = name(this) + "@render"; // performance.mark("start " + id); // var r = MoonRender.apply(this, arguments); // performance.mark("end " + id); // performance.measure(id, "start " + id, "end " + id); // var entries = performance.getEntriesByName(id); // return r; // } // // Moon.prototype.mount = function() { // var id = name(this) + "@mount"; // performance.mark("start " + id); // MoonMount.apply(this, arguments); // performance.mark("end " + id); // performance.measure(id, "start " + id, "end " + id); // var entries = performance.getEntriesByName(id); // } // // Moon.prototype.patch = function() { // var id = name(this) + "@patch"; // performance.mark("start " + id); // MoonPatch.apply(this, arguments); // performance.mark("end " + id); // performance.measure(id, "start " + id, "end " + id); // var entries = performance.getEntriesByName(id); // } // } // } // // Moon.use(MoonPerformance); describe('Instance', function() { describe('Initializing', function() { createTestElement("initialize", ""); it('with new', function() { expect(new Moon({el: "#initialize"}) instanceof Moon).to.equal(true); }); }); describe('Destroy', function() { createTestElement("destroy", '{{msg}}'); var destroyApp = new Moon({ el: "#destroy", data: { msg: "Hello Moon!" } }); it('when destroyed', function() { destroyApp.destroy(); destroyApp.set('msg', 'New Value!'); Moon.nextTick(function() { expect(document.getElementById("destroy").innerHTML).to.not.equal("New Value!"); }); }); }); }); describe("Compiler", function() { it("should not compile comments", function() { var el = createTestElement("compilerComment", '<!-- comment -->'); var compilerCommentApp = new Moon({ el: "#compilerComment" }); expect(el.innerHTML).to.equal(""); }); it("should compile self closing elements", function() { var el = createTestElement("compilerSelfClosing", '<self-closing/>'); var compilerCommentApp = new Moon({ el: "#compilerSelfClosing", template: "<div><self-closing/></div>" }); expect(compilerCommentApp.$dom.children[0].type).to.equal("self-closing"); }); }); describe('Data', function() { createTestElement("data", '{{msg}}'); var dataApp = new Moon({ el: "#data", data: { msg: "Hello Moon!" } }); it('when initializing', function() { expect(document.getElementById("data").innerHTML).to.equal("Hello Moon!"); }); it('when setting', function() { dataApp.set('msg', 'New Value'); Moon.nextTick(function() { expect(document.getElementById("data").innerHTML).to.equal("New Value"); }); }); it('when getting', function() { expect(dataApp.get('msg')).to.equal("New Value"); }); }); describe('Methods', function() { createTestElement("method", '{{count}}'); var methodApp = new Moon({ el: "#method", data: { count: 0 }, methods: { increment: function() { methodApp.set('count', methodApp.get('count') + 1); } } }); it('when calling a method', function() { methodApp.callMethod('increment'); expect(methodApp.get('count')).to.equal(1); }); it('should update DOM', function() { methodApp.callMethod('increment'); Moon.nextTick(function() { expect(document.getElementById("method").innerHTML).to.equal('2'); }); }); }); describe("Directive", function() { describe('Custom Directive', function() { createTestElement("customDirective", '<span m-square="2" id="custom-directive-span"></span>'); Moon.directive("square", function(el, val, vdom) { var num = parseInt(val); el.textContent = val*val; for(var i = 0; i < vdom.children.length; i++) { vdom.children[i].val = val*val; } }); var customDirectiveApp = new Moon({ el: "#customDirective" }); it('should execute', function() { Moon.nextTick(function() { expect(document.getElementById("custom-directive-span").innerHTML).to.equal("4"); }); }); }); describe('If Directive', function() { createTestElement("if", '<p m-if="{{condition}}" id="if-condition">Condition True</p>'); var ifApp = new Moon({ el: "#if", data: { condition: true } }); it('should exist when true', function() { expect(document.getElementById('if-condition').innerHTML).to.equal('Condition True'); }); it('should not exist when false', function() { ifApp.set('condition', false); Moon.nextTick(function() { expect(document.getElementById('if-condition')).to.be.null; }); }); it('should not be present at runtime', function() { expect(document.getElementById('if-condition').getAttribute("m-if")).to.be.null; }); }); describe('Show Directive', function() { createTestElement("show", '<p m-show="{{condition}}" id="show-condition">Condition True</p>'); var showApp = new Moon({ el: "#show", data: { condition: true } }); it('should display when true', function() { expect(document.getElementById('show-condition').style.display).to.equal('block'); }); it('should not display when false', function() { showApp.set('condition', false); Moon.nextTick(function() { expect(document.getElementById('show-condition').style.display).to.equal('none'); }); }); it('should not be present at runtime', function() { expect(document.getElementById('show-condition').getAttribute("m-show")).to.be.null; }); }); describe('Model Directive', function() { createTestElement("model", '<p id="model-msg">{{msg}}</p><input type="text" m-model="msg" id="model-msg-input"/>'); var modelApp = new Moon({ el: "#model", data: { msg: "Hello Moon!" } }); it('should update value when initialized', function() { expect(document.getElementById('model-msg').innerHTML).to.equal('Hello Moon!'); }); it('should not be present at runtime', function() { expect(document.getElementById('model-msg-input').getAttribute("m-model")).to.be.null; }); }); describe('On Directive', function() { createTestElement("on", '<p id="on-count">{{count}}</p><button m-on="click:increment" id="on-increment-button">Increment</button><a id="on-modifier-link" href="https://kabir.ml" m-on="click.prevent:modifier">Link</a><button id="on-keycode-link" m-on="click.m:keycode"></button>'); var evt, modifier_active, keycode; Moon.config.keyCodes({ m: 77 }); var onApp = new Moon({ el: "#on", data: { count: 0 }, methods: { increment: function(e) { onApp.set('count', onApp.get('count') + 1); evt = e; }, modifier: function(e) { modifier_active = true; }, keycode: function() { keycode = true; } } }); it('should call a method', function() { document.getElementById("on-increment-button").click(); expect(onApp.get('count')).to.equal(1); }); it('should update DOM', function() { document.getElementById("on-increment-button").click(); Moon.nextTick(function() { expect(document.getElementById("on-count").innerHTML).to.equal('2'); }); }); it('should pass an event object', function() { expect(evt.target.tagName).to.equal('BUTTON'); }); it('should use modifiers', function() { document.getElementById("on-modifier-link").click(); expect(modifier_active).to.be.true; }); it('should use custom keycodes', function() { var e = document.createEvent('HTMLEvents'); e.initEvent("click", false, true); e.keyCode = 77; document.getElementById("on-keycode-link").dispatchEvent(e); expect(keycode).to.be.true; }); it('should not be present at runtime', function() { expect(document.getElementById('on-increment-button').getAttribute("m-on")).to.be.null; }); }); describe('For Directive', function() { createTestElement("for", "<ul id='forList'><li m-for='item in items'>{{item}}</li></ul>"); var forApp = new Moon({ el: "#for", data: { items: [1, 2, 3, 4, 5] } }); it('should render a list', function() { expect(document.getElementById('forList').childNodes.length).to.equal(5); }); it('should update a list', function() { var items = forApp.get("items"); items.push(6); forApp.set("items", items); Moon.nextTick(function() { expect(document.getElementById('forList').childNodes.length).to.equal(6); }); }); it('should not be present at runtime', function() { expect(document.getElementById('forList').childNodes[0].getAttribute("m-for")).to.be.null; }); }); describe('Text Directive', function() { createTestElement("text", '<span m-text="{{msg}}" id="text-directive-span"></span>'); var textApp = new Moon({ el: "#text", data: { msg: "Hello Moon!" } }); it('should fill DOM with a value', function() { expect(document.getElementById("text-directive-span").innerHTML).to.equal("Hello Moon!"); }); it('should not be present at runtime', function() { expect(document.getElementById('text-directive-span').getAttribute("m-text")).to.be.null; }); }); describe('HTML Directive', function() { createTestElement("html", '<span m-html="{{msg}}" id="html-directive-span"></span>'); var htmlApp = new Moon({ el: "#html", data: { msg: "<strong>Hello Moon!</strong>" } }); it('should fill DOM with a value', function() { expect(document.getElementById("html-directive-span").innerHTML).to.equal("<strong>Hello Moon!</strong>"); }); it('should not be present at runtime', function() { expect(document.getElementById('html-directive-span').getAttribute("m-html")).to.be.null; }); }); describe('Once Directive', function() { createTestElement("once", '<span m-once id="once-directive-span">{{msg}}</span>'); var onceApp = new Moon({ el: "#once", data: { msg: "Hello Moon!" } }); it('should fill DOM with a value', function() { expect(document.getElementById("once-directive-span").innerHTML).to.equal("Hello Moon!"); }); it('should not update element once value is updated', function() { onceApp.set('msg', "Changed"); Moon.nextTick(function() { expect(document.getElementById("once-directive-span").innerHTML).to.equal("Hello Moon!"); }); }); it('should not be present at runtime', function() { expect(document.getElementById('once-directive-span').getAttribute("m-once")).to.be.null; }); }); describe('Pre Directive', function() { createTestElement("pre", '<span m-pre id="pre-directive-span">{{msg}}</span>'); var preApp = new Moon({ el: "#pre", data: { msg: "Hello Moon!" } }); it('should not fill DOM with a value', function() { expect(document.getElementById("pre-directive-span").innerHTML).to.equal("{{msg}}"); }); it('should not update element once value is updated', function() { preApp.set('msg', "Changed"); Moon.nextTick(function() { expect(document.getElementById("pre-directive-span").innerHTML).to.equal("{{msg}}"); }); }); it('should not be present at runtime', function() { expect(document.getElementById('pre-directive-span').getAttribute("m-pre")).to.be.null; }); }); describe('Mask Directive', function() { createTestElement("mask", '<span m-mask id="mask-directive-span">{{msg}}</span>'); var maskApp = new Moon({ el: "#mask" }); it('should not be present at runtime', function() { expect(document.getElementById('mask-directive-span').getAttribute("m-mask")).to.be.null; }); }); }); describe('Plugin', function() { createTestElement("plugin", '<span m-empty id="plugin-span">{{msg}}</span>'); var emptyPlugin = { init: function(Moon) { Moon.directive('empty', function(el, val, vdom) { el.innerHTML = ""; for(var i = 0; i < vdom.children.length; i++) { vdom.children[i].meta.shouldRender = false; } }); } } Moon.use(emptyPlugin); var pluginApp = new Moon({ el: "#plugin", data: { msg: "Hello Moon!" } }); it('should execute', function() { expect(document.getElementById("plugin-span").innerHTML).to.equal(""); }); }); describe('Template', function() { createTestElement("template", ''); var templateApp = new Moon({ el: "#template", template: "<div id='template'>{{msg}}</div>", data: { msg: "Hello Moon!" } }); it('should use provided template', function() { expect(document.getElementById("template").innerHTML).to.equal("Hello Moon!"); }); it('should update', function() { templateApp.set("msg", "Changed"); Moon.nextTick(function() { expect(document.getElementById("template").innerHTML).to.equal("Changed"); }); }); }); describe('Custom Render', function() { createTestElement("render", ''); var renderApp = new Moon({ el: "#render", render: function(h) { return h('div', {id: "render"}, null, this.get('msg')) }, data: { msg: "Hello Moon!" } }); it('should use provided render function', function() { expect(document.getElementById("render").innerHTML).to.equal("Hello Moon!"); }); it('should update', function() { renderApp.set("msg", "Changed"); Moon.nextTick(function() { expect(document.getElementById("render").innerHTML).to.equal("Changed"); }); }); }); describe('Functional Component', function() { createTestElement("functional", '<functional-component someprop="{{parentMsg}}"></functional-component><slot-functional-component>Default Slot Content<span slot="named">Named Slot Content</span></slot-functional-component>'); Moon.component('functional-component', { functional: true, props: ['someprop'], render: function(h, ctx) { return h("h1", {class: "functionalComponent"}, null, ctx.data.someprop); } }); Moon.component('slot-functional-component', { functional: true, render: function(h, ctx) { return h("div", {class: "functionalSlotComponent"}, null, h("h1", {}, null, ctx.slots.default), h("h1", {}, null, ctx.slots.named)); } }); var functionalApp = new Moon({ el: "#functional", data: { parentMsg: "Hello Moon!" } }); it('should render HTML', function() { expect(document.getElementsByClassName("functionalComponent")).to.not.be.null; }); it('should render with props', function() { expect(document.getElementsByClassName("functionalComponent")[0].innerHTML).to.equal("Hello Moon!"); }); it('should render when updated', function() { functionalApp.set('parentMsg', 'Changed'); Moon.nextTick(function() { expect(document.getElementsByClassName("functionalComponent")[0].innerHTML).to.equal("Changed"); }); }); describe("Slots", function() { it('should render the default slot', function() { Moon.nextTick(function() { expect(document.getElementsByClassName("functionalSlotComponent")[0].childNodes[0].innerHTML).to.equal("Default Slot Content"); }); }); it('should render a named slot', function() { Moon.nextTick(function() { expect(document.getElementsByClassName("functionalSlotComponent")[0].childNodes[1].innerHTML).to.equal("<span>Named Slot Content</span>"); }); }); }); }); describe('Component', function() { createTestElement("component", '<my-component componentprop="{{parentMsg}}"></my-component>'); var componentConstructor = Moon.component('my-component', { props: ['componentprop', 'otherprop'], template: "<div>{{componentprop}}</div>" }); it("should create a constructor", function() { expect(new componentConstructor()).to.be.an.instanceof(Moon); }); // var componentApp = new Moon({ // el: "#component", // data: { // parentMsg: "Hello Moon!" // } // }); // it('should render HTML', function() { // expect(document.getElementById("component")).to.not.be.null; // }); // it('should render with props', function() { // expect(document.getElementById("component").innerHTML).to.equal("<div>Hello Moon!</div>"); // }); // it('should render when updated', function() { // componentApp.set('parentMsg', 'Changed'); // Moon.nextTick(function() { // expect(document.getElementById("component").innerHTML).to.equal("<div>Changed</div>"); // }); // }); }); describe("Events", function() { var bus = new Moon(); var evt1 = false, evt1_2 = false, handler1, globalEvt = false; describe("Handler", function() { it("should create an event listener", function() { handler1 = function() { evt1 = true; } bus.on('evt1', handler1); expect(bus.$events.evt1[0]).to.be.a("function"); }); it("should create multiple event listeners", function() { bus.on('evt1', function() { evt1_2 = true; }); expect(bus.$events.evt1[1]).to.be.a("function"); }); it("should create a global event listener", function() { bus.on('*', function() { globalEvt = true; }); expect(bus.$events["*"][0]).to.be.a("function"); }); }); describe("Emit", function() { it("should invoke all handlers", function() { bus.emit('evt1'); expect(evt1).to.be.true; expect(evt1_2).to.be.true; }); it("should call the global handler", function() { expect(globalEvt).to.be.true; }); }); describe("Removing", function() { it("should remove a handler", function() { bus.off('evt1', handler1); expect(bus.$events.evt1.length).to.equal(1); }); it("should be able to remove all handlers", function() { bus.removeEvents(); var allEvents = bus.$events.evt1.concat(bus.$events["*"]) expect(allEvents.length).to.equal(0); }); }); }); describe("Utilities", function() { it("should extend an object", function() { expect(Moon.util.extend({a: true, b: true}, {a: true, b: false, c: true})).to.deep.equal({a: true, b: false, c: true}); }); });
test/js/test.js
var expect = chai.expect; Moon.config.silent = true; console.log("[Moon] Running Tests..."); console.log("[Moon] Version: " + Moon.version); var createTestElement = function(id, html) { var el = document.createElement("div"); el.innerHTML = html; el.id = id; document.getElementById("moon-els").appendChild(el); return el; } // var MoonPerformance = { // init: function() { // var MoonBuild = Moon.prototype.build; // var MoonInit = Moon.prototype.init; // var MoonRender = Moon.prototype.render; // var MoonMount = Moon.prototype.mount; // var MoonPatch = Moon.prototype.patch; // // var formatNum = function(num) { // if(num >= 0.5) { // return num.toFixed(2) + 'ms' // } else { // return num.toFixed(2)*1000 + "µs"; // } // } // // var name = function(instance) { // return instance.$parent ? instance.$name : "root"; // } // // Moon.prototype.init = function() { // var id = name(this) + "@init"; // performance.mark("start " + id); // MoonInit.apply(this, arguments); // performance.mark("end " + id); // performance.measure(id, "start " + id, "end " + id); // var entries = performance.getEntriesByName(id); // } // // Moon.prototype.build = function() { // var id = name(this) + "@build"; // performance.mark("start " + id); // MoonBuild.apply(this, arguments); // performance.mark("end " + id); // performance.measure(id, "start " + id, "end " + id); // var entries = performance.getEntriesByName(id); // } // // Moon.prototype.render = function() { // var id = name(this) + "@render"; // performance.mark("start " + id); // var r = MoonRender.apply(this, arguments); // performance.mark("end " + id); // performance.measure(id, "start " + id, "end " + id); // var entries = performance.getEntriesByName(id); // return r; // } // // Moon.prototype.mount = function() { // var id = name(this) + "@mount"; // performance.mark("start " + id); // MoonMount.apply(this, arguments); // performance.mark("end " + id); // performance.measure(id, "start " + id, "end " + id); // var entries = performance.getEntriesByName(id); // } // // Moon.prototype.patch = function() { // var id = name(this) + "@patch"; // performance.mark("start " + id); // MoonPatch.apply(this, arguments); // performance.mark("end " + id); // performance.measure(id, "start " + id, "end " + id); // var entries = performance.getEntriesByName(id); // } // } // } // // Moon.use(MoonPerformance); describe('Instance', function() { describe('Initializing', function() { createTestElement("initialize", ""); it('with new', function() { expect(new Moon({el: "#initialize"}) instanceof Moon).to.equal(true); }); }); describe('Destroy', function() { createTestElement("destroy", '{{msg}}'); var destroyApp = new Moon({ el: "#destroy", data: { msg: "Hello Moon!" } }); it('when destroyed', function() { destroyApp.destroy(); destroyApp.set('msg', 'New Value!'); Moon.nextTick(function() { expect(document.getElementById("destroy").innerHTML).to.not.equal("New Value!"); }); }); }); }); describe("Compiler", function() { it("should not compile comments", function() { var el = createTestElement("compilerComment", '<!-- comment -->'); var compilerCommentApp = new Moon({ el: "#compilerComment" }); expect(el.innerHTML).to.equal(""); }); it("should compile self closing elements", function() { var el = createTestElement("compilerSelfClosing", '<self-closing/>'); var compilerCommentApp = new Moon({ el: "#compilerSelfClosing", template: "<div><self-closing/></div>" }); expect(compilerCommentApp.$dom.children[0].type).to.equal("self-closing"); }); }); describe('Data', function() { createTestElement("data", '{{msg}}'); var dataApp = new Moon({ el: "#data", data: { msg: "Hello Moon!" } }); it('when initializing', function() { expect(document.getElementById("data").innerHTML).to.equal("Hello Moon!"); }); it('when setting', function() { dataApp.set('msg', 'New Value'); Moon.nextTick(function() { expect(document.getElementById("data").innerHTML).to.equal("New Value"); }); }); it('when getting', function() { expect(dataApp.get('msg')).to.equal("New Value"); }); }); describe('Methods', function() { createTestElement("method", '{{count}}'); var methodApp = new Moon({ el: "#method", data: { count: 0 }, methods: { increment: function() { methodApp.set('count', methodApp.get('count') + 1); } } }); it('when calling a method', function() { methodApp.callMethod('increment'); expect(methodApp.get('count')).to.equal(1); }); it('should update DOM', function() { methodApp.callMethod('increment'); Moon.nextTick(function() { expect(document.getElementById("method").innerHTML).to.equal('2'); }); }); }); describe("Directive", function() { describe('Custom Directive', function() { createTestElement("customDirective", '<span m-square="2" id="custom-directive-span"></span>'); Moon.directive("square", function(el, val, vdom) { var num = parseInt(val); el.textContent = val*val; for(var i = 0; i < vdom.children.length; i++) { vdom.children[i].val = val*val; } }); var customDirectiveApp = new Moon({ el: "#customDirective" }); it('should execute', function() { Moon.nextTick(function() { expect(document.getElementById("custom-directive-span").innerHTML).to.equal("4"); }); }); }); describe('If Directive', function() { createTestElement("if", '<p m-if="{{condition}}" id="if-condition">Condition True</p>'); var ifApp = new Moon({ el: "#if", data: { condition: true } }); it('should exist when true', function() { expect(document.getElementById('if-condition').innerHTML).to.equal('Condition True'); }); it('should not exist when false', function() { ifApp.set('condition', false); Moon.nextTick(function() { expect(document.getElementById('if-condition')).to.be.null; }); }); it('should not be present at runtime', function() { expect(document.getElementById('if-condition').getAttribute("m-if")).to.be.null; }); }); describe('Show Directive', function() { createTestElement("show", '<p m-show="{{condition}}" id="show-condition">Condition True</p>'); var showApp = new Moon({ el: "#show", data: { condition: true } }); it('should display when true', function() { expect(document.getElementById('show-condition').style.display).to.equal('block'); }); it('should not display when false', function() { showApp.set('condition', false); Moon.nextTick(function() { expect(document.getElementById('show-condition').style.display).to.equal('none'); }); }); it('should not be present at runtime', function() { expect(document.getElementById('show-condition').getAttribute("m-show")).to.be.null; }); }); describe('Model Directive', function() { createTestElement("model", '<p id="model-msg">{{msg}}</p><input type="text" m-model="msg" id="model-msg-input"/>'); var modelApp = new Moon({ el: "#model", data: { msg: "Hello Moon!" } }); it('should update value when initialized', function() { expect(document.getElementById('model-msg').innerHTML).to.equal('Hello Moon!'); }); it('should not be present at runtime', function() { expect(document.getElementById('model-msg-input').getAttribute("m-model")).to.be.null; }); }); describe('On Directive', function() { createTestElement("on", '<p id="on-count">{{count}}</p><button m-on="click:increment" id="on-increment-button">Increment</button><a id="on-modifier-link" href="https://kabir.ml" m-on="click.prevent:modifier">Link</a><button id="on-keycode-link" m-on="click.m:keycode"></button>'); var evt, modifier_active, keycode; Moon.config.keyCodes({ m: 77 }); var onApp = new Moon({ el: "#on", data: { count: 0 }, methods: { increment: function(e) { onApp.set('count', onApp.get('count') + 1); evt = e; }, modifier: function(e) { modifier_active = true; }, keycode: function() { keycode = true; } } }); it('should call a method', function() { document.getElementById("on-increment-button").click(); expect(onApp.get('count')).to.equal(1); }); it('should update DOM', function() { document.getElementById("on-increment-button").click(); Moon.nextTick(function() { expect(document.getElementById("on-count").innerHTML).to.equal('2'); }); }); it('should pass an event object', function() { expect(evt.target.tagName).to.equal('BUTTON'); }); it('should use modifiers', function() { document.getElementById("on-modifier-link").click(); expect(modifier_active).to.be.true; }); it('should use custom keycodes', function() { var e = document.createEvent('HTMLEvents'); e.initEvent("click", false, true); e.keyCode = 77; document.getElementById("on-keycode-link").dispatchEvent(e); expect(keycode).to.be.true; }); it('should not be present at runtime', function() { expect(document.getElementById('on-increment-button').getAttribute("m-on")).to.be.null; }); }); describe('For Directive', function() { createTestElement("for", "<ul id='forList'><li m-for='item in items'>{{item}}</li></ul>"); var forApp = new Moon({ el: "#for", data: { items: [1, 2, 3, 4, 5] } }); it('should render a list', function() { expect(document.getElementById('forList').childNodes.length).to.equal(5); }); it('should update a list', function() { var items = forApp.get("items"); items.push(6); forApp.set("items", items); Moon.nextTick(function() { expect(document.getElementById('forList').childNodes.length).to.equal(6); }); }); it('should not be present at runtime', function() { expect(document.getElementById('forList').childNodes[0].getAttribute("m-for")).to.be.null; }); }); describe('Text Directive', function() { createTestElement("text", '<span m-text="{{msg}}" id="text-directive-span"></span>'); var textApp = new Moon({ el: "#text", data: { msg: "Hello Moon!" } }); it('should fill DOM with a value', function() { expect(document.getElementById("text-directive-span").innerHTML).to.equal("Hello Moon!"); }); it('should not be present at runtime', function() { expect(document.getElementById('text-directive-span').getAttribute("m-text")).to.be.null; }); }); describe('HTML Directive', function() { createTestElement("html", '<span m-html="{{msg}}" id="html-directive-span"></span>'); var htmlApp = new Moon({ el: "#html", data: { msg: "<strong>Hello Moon!</strong>" } }); it('should fill DOM with a value', function() { expect(document.getElementById("html-directive-span").innerHTML).to.equal("<strong>Hello Moon!</strong>"); }); it('should not be present at runtime', function() { expect(document.getElementById('html-directive-span').getAttribute("m-html")).to.be.null; }); }); describe('Once Directive', function() { createTestElement("once", '<span m-once id="once-directive-span">{{msg}}</span>'); var onceApp = new Moon({ el: "#once", data: { msg: "Hello Moon!" } }); it('should fill DOM with a value', function() { expect(document.getElementById("once-directive-span").innerHTML).to.equal("Hello Moon!"); }); it('should not update element once value is updated', function() { onceApp.set('msg', "Changed"); Moon.nextTick(function() { expect(document.getElementById("once-directive-span").innerHTML).to.equal("Hello Moon!"); }); }); it('should not be present at runtime', function() { expect(document.getElementById('once-directive-span').getAttribute("m-once")).to.be.null; }); }); describe('Pre Directive', function() { createTestElement("pre", '<span m-pre id="pre-directive-span">{{msg}}</span>'); var preApp = new Moon({ el: "#pre", data: { msg: "Hello Moon!" } }); it('should not fill DOM with a value', function() { expect(document.getElementById("pre-directive-span").innerHTML).to.equal("{{msg}}"); }); it('should not update element once value is updated', function() { preApp.set('msg', "Changed"); Moon.nextTick(function() { expect(document.getElementById("pre-directive-span").innerHTML).to.equal("{{msg}}"); }); }); it('should not be present at runtime', function() { expect(document.getElementById('pre-directive-span').getAttribute("m-pre")).to.be.null; }); }); describe('Mask Directive', function() { createTestElement("mask", '<span m-mask id="mask-directive-span">{{msg}}</span>'); var maskApp = new Moon({ el: "#mask" }); it('should not be present at runtime', function() { expect(document.getElementById('mask-directive-span').getAttribute("m-mask")).to.be.null; }); }); }); describe('Plugin', function() { createTestElement("plugin", '<span m-empty id="plugin-span">{{msg}}</span>'); var emptyPlugin = { init: function(Moon) { Moon.directive('empty', function(el, val, vdom) { el.innerHTML = ""; for(var i = 0; i < vdom.children.length; i++) { vdom.children[i].meta.shouldRender = false; } }); } } Moon.use(emptyPlugin); var pluginApp = new Moon({ el: "#plugin", data: { msg: "Hello Moon!" } }); it('should execute', function() { expect(document.getElementById("plugin-span").innerHTML).to.equal(""); }); }); describe('Template', function() { createTestElement("template", ''); var templateApp = new Moon({ el: "#template", template: "<div id='template'>{{msg}}</div>", data: { msg: "Hello Moon!" } }); it('should use provided template', function() { expect(document.getElementById("template").innerHTML).to.equal("Hello Moon!"); }); it('should update', function() { templateApp.set("msg", "Changed"); Moon.nextTick(function() { expect(document.getElementById("template").innerHTML).to.equal("Changed"); }); }); }); describe('Custom Render', function() { createTestElement("render", ''); var renderApp = new Moon({ el: "#render", render: function(h) { return h('div', {id: "render"}, null, this.get('msg')) }, data: { msg: "Hello Moon!" } }); it('should use provided render function', function() { expect(document.getElementById("render").innerHTML).to.equal("Hello Moon!"); }); it('should update', function() { renderApp.set("msg", "Changed"); Moon.nextTick(function() { expect(document.getElementById("render").innerHTML).to.equal("Changed"); }); }); }); describe('Functional Component', function() { createTestElement("functional", '<functional-component someprop="{{parentMsg}}"></functional-component><slot-functional-component>Default Slot Content<span slot="named">Named Slot Content</span></slot-functional-component>'); Moon.component('functional-component', { functional: true, props: ['someprop'], render: function(h, ctx) { return h("h1", {class: "functionalComponent"}, null, ctx.data.someprop); } }); Moon.component('slot-functional-component', { functional: true, render: function(h, ctx) { return h("div", {class: "functionalSlotComponent"}, null, h("h1", {}, null, ctx.slots.default), h("h1", {}, null, ctx.slots.named)); } }); var functionalApp = new Moon({ el: "#functional", data: { parentMsg: "Hello Moon!" } }); it('should render HTML', function() { expect(document.getElementsByClassName("functionalComponent")).to.not.be.null; }); it('should render with props', function() { expect(document.getElementsByClassName("functionalComponent")[0].innerHTML).to.equal("Hello Moon!"); }); it('should render when updated', function() { functionalApp.set('parentMsg', 'Changed'); Moon.nextTick(function() { expect(document.getElementsByClassName("functionalComponent")[0].innerHTML).to.equal("Changed"); }); }); describe("Slots", function() { it('should render the default slot', function() { Moon.nextTick(function() { expect(document.getElementsByClassName("functionalSlotComponent")[0].childNodes[0].innerHTML).to.equal("Default Slot Content"); }); }); it('should render a named slot', function() { Moon.nextTick(function() { expect(document.getElementsByClassName("functionalSlotComponent")[0].childNodes[1].innerHTML).to.equal("<span>Named Slot Content</span>"); }); }); }); }); describe('Component', function() { createTestElement("component", '<my-component componentprop="{{parentMsg}}"></my-component>'); var componentConstructor = Moon.component('my-component', { props: ['componentprop', 'otherprop'], template: "<div>{{componentprop}}</div>" }); it("should create a constructor", function() { expect(new componentConstructor()).to.be.an.instanceof(Moon); }); // var componentApp = new Moon({ // el: "#component", // data: { // parentMsg: "Hello Moon!" // } // }); // it('should render HTML', function() { // expect(document.getElementById("component")).to.not.be.null; // }); // it('should render with props', function() { // expect(document.getElementById("component").innerHTML).to.equal("<div>Hello Moon!</div>"); // }); // it('should render when updated', function() { // componentApp.set('parentMsg', 'Changed'); // Moon.nextTick(function() { // expect(document.getElementById("component").innerHTML).to.equal("<div>Changed</div>"); // }); // }); }); describe("Events", function() { var bus = new Moon(); var evt1 = false, evt1_2 = false, handler1, globalEvt = false; describe("Handler", function() { it("should create an event listener", function() { handler1 = function() { evt1 = true; } bus.on('evt1', handler1); expect(bus.$events.evt1[0]).to.be.a("function"); }); it("should create multiple event listeners", function() { bus.on('evt1', function() { evt1_2 = true; }); expect(bus.$events.evt1[1]).to.be.a("function"); }); it("should create a global event listener", function() { bus.on('*', function() { globalEvt = true; }); expect(bus.$events["*"][0]).to.be.a("function"); }); }); describe("Emit", function() { it("should invoke all handlers", function() { bus.emit('evt1'); expect(evt1).to.be.true; expect(evt1_2).to.be.true; }); it("should call the global handler", function() { expect(globalEvt).to.be.true; }); }); describe("Removing", function() { it("should remove a handler", function() { bus.off('evt1', handler1); expect(bus.$events.evt1.length).to.equal(1); }); it("should be able to remove all handlers", function() { bus.removeEvents(); var allEvents = bus.$events.evt1.concat(bus.$events["*"]) expect(allEvents.length).to.equal(0); }); }); });
add some utility tests
test/js/test.js
add some utility tests
<ide><path>est/js/test.js <ide> }); <ide> }); <ide> }); <add> <add>describe("Utilities", function() { <add> it("should extend an object", function() { <add> expect(Moon.util.extend({a: true, b: true}, {a: true, b: false, c: true})).to.deep.equal({a: true, b: false, c: true}); <add> }); <add>});
Java
apache-2.0
b30414e1ec34f3c231419e88a7dfb80df30d4e1f
0
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
// Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.build; import com.intellij.build.events.*; import com.intellij.execution.filters.Filter; import com.intellij.execution.filters.HyperlinkInfo; import com.intellij.execution.filters.TextConsoleBuilderFactory; import com.intellij.execution.process.ProcessHandler; import com.intellij.execution.ui.ConsoleView; import com.intellij.execution.ui.ConsoleViewContentType; import com.intellij.execution.ui.ExecutionConsole; import com.intellij.icons.AllIcons; import com.intellij.ide.util.PropertiesComponent; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.colors.EditorColorsManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ProjectFileIndex; import com.intellij.openapi.ui.ThreeComponentsSplitter; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.pom.Navigatable; import com.intellij.ui.*; import com.intellij.ui.speedSearch.SpeedSearchUtil; import com.intellij.ui.tree.AsyncTreeModel; import com.intellij.ui.tree.StructureTreeModel; import com.intellij.ui.tree.TreeVisitor; import com.intellij.ui.tree.treeTable.TreeTableModelWithColumns; import com.intellij.ui.treeStructure.SimpleNode; import com.intellij.ui.treeStructure.SimpleTreeStructure; import com.intellij.ui.treeStructure.treetable.TreeColumnInfo; import com.intellij.ui.treeStructure.treetable.TreeTable; import com.intellij.ui.treeStructure.treetable.TreeTableModel; import com.intellij.ui.treeStructure.treetable.TreeTableTree; import com.intellij.util.EditSourceOnDoubleClickHandler; import com.intellij.util.EditSourceOnEnterKeyHandler; import com.intellij.util.ObjectUtils; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.text.DateFormatUtil; import com.intellij.util.ui.ColumnInfo; import com.intellij.util.ui.UIUtil; import com.intellij.util.ui.tree.TreeUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import javax.swing.*; import javax.swing.border.CompoundBorder; import javax.swing.table.DefaultTableCellRenderer; import javax.swing.table.TableCellRenderer; import javax.swing.table.TableColumn; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.TreeCellRenderer; import javax.swing.tree.TreePath; import java.awt.*; import java.awt.event.FocusEvent; import java.awt.event.FocusListener; import java.io.File; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Supplier; import static com.intellij.build.BuildView.CONSOLE_VIEW_NAME; /** * @author Vladislav.Soroka */ public class BuildTreeConsoleView implements ConsoleView, DataProvider, BuildConsoleView { private static final Logger LOG = Logger.getInstance(BuildTreeConsoleView.class); @NonNls private static final String TREE = "tree"; @NonNls private static final String SPLITTER_PROPERTY = "SMTestRunner.Splitter.Proportion"; private final JPanel myPanel = new JPanel(); private final Map<Object, ExecutionNode> nodesMap = ContainerUtil.newConcurrentMap(); private final Project myProject; private final ConsoleViewHandler myConsoleViewHandler; @NotNull private final BuildViewSettingsProvider myViewSettingsProvider; private final TableColumn myTimeColumn; private final String myWorkingDir; private final AtomicBoolean myDisposed = new AtomicBoolean(); private final StructureTreeModel<SimpleTreeStructure> myTreeModel; private final TreeTableTree myTree; private final ExecutionNode myRootNode; private volatile int myTimeColumnWidth; public BuildTreeConsoleView(Project project, BuildDescriptor buildDescriptor, @Nullable ExecutionConsole executionConsole, @NotNull BuildViewSettingsProvider buildViewSettingsProvider) { myProject = project; myWorkingDir = FileUtil.toSystemIndependentName(buildDescriptor.getWorkingDir()); final ColumnInfo[] COLUMNS = { new TreeColumnInfo("name"), new ColumnInfo("time elapsed") { @Nullable @Override public Object valueOf(Object o) { if (o instanceof DefaultMutableTreeNode) { final Object userObject = ((DefaultMutableTreeNode)o).getUserObject(); if (userObject instanceof ExecutionNode) { String duration = ((ExecutionNode)userObject).getDuration(); updateTimeColumnWidth("___" + duration, false); return duration; } } return null; } } }; myViewSettingsProvider = buildViewSettingsProvider; myRootNode = new ExecutionNode(myProject, null); myRootNode.setAutoExpandNode(true); SimpleTreeStructure treeStructure = new SimpleTreeStructure.Impl(myRootNode); myTreeModel = new StructureTreeModel<>(treeStructure); final TreeTableModel model = new TreeTableModelWithColumns(new AsyncTreeModel(myTreeModel, this), COLUMNS); DefaultTableCellRenderer timeColumnCellRenderer = new DefaultTableCellRenderer() { @Override public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { super.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column); setHorizontalAlignment(SwingConstants.RIGHT); Color fg = isSelected ? UIUtil.getTreeSelectionForeground(hasFocus) : SimpleTextAttributes.GRAY_ATTRIBUTES.getFgColor(); setForeground(fg); return this; } }; TreeTable treeTable = new TreeTable(model) { @Override public TableCellRenderer getCellRenderer(int row, int column) { if (column == 1) { return timeColumnCellRenderer; } return super.getCellRenderer(row, column); } }; EditSourceOnDoubleClickHandler.install(treeTable); EditSourceOnEnterKeyHandler.install(treeTable, null); myTree = treeTable.getTree(); treeTable.addFocusListener(new FocusListener() { @Override public void focusGained(FocusEvent e) { treeTable.setSelectionBackground(UIUtil.getTreeSelectionBackground(true)); } @Override public void focusLost(FocusEvent e) { treeTable.setSelectionBackground(UIUtil.getTreeSelectionBackground(false)); } }); final TreeCellRenderer treeCellRenderer = myTree.getCellRenderer(); myTree.setCellRenderer((tree, value, selected, expanded, leaf, row, hasFocus) -> { final Component rendererComponent = treeCellRenderer.getTreeCellRendererComponent(tree, value, selected, expanded, leaf, row, hasFocus); if (rendererComponent instanceof SimpleColoredComponent) { Color bg = UIUtil.getTreeBackground(selected, true); Color fg = UIUtil.getTreeForeground(selected, true); if (selected) { for (SimpleColoredComponent.ColoredIterator it = ((SimpleColoredComponent)rendererComponent).iterator(); it.hasNext(); ) { it.next(); int offset = it.getOffset(); int endOffset = it.getEndOffset(); SimpleTextAttributes currentAttributes = it.getTextAttributes(); SimpleTextAttributes newAttributes = new SimpleTextAttributes(bg, fg, currentAttributes.getWaveColor(), currentAttributes.getStyle()); it.split(endOffset - offset, newAttributes); } } SpeedSearchUtil.applySpeedSearchHighlighting(treeTable, (SimpleColoredComponent)rendererComponent, true, selected); } return rendererComponent; }); new TreeTableSpeedSearch(treeTable).setComparator(new SpeedSearchComparator(false)); treeTable.setTableHeader(null); myTimeColumn = treeTable.getColumnModel().getColumn(1); myTimeColumn.setResizable(false); updateTimeColumnWidth(StringUtil.formatDurationApproximate(11111L), true); TreeUtil.installActions(myTree); JPanel myContentPanel = new JPanel(); myContentPanel.setLayout(new CardLayout()); myContentPanel.add(ScrollPaneFactory.createScrollPane(treeTable, SideBorder.LEFT), TREE); myPanel.setLayout(new BorderLayout()); ThreeComponentsSplitter myThreeComponentsSplitter = new ThreeComponentsSplitter() { @Override public void setFirstSize(int size) { super.setFirstSize(size); float proportion = size / (float)getWidth(); PropertiesComponent.getInstance().setValue(SPLITTER_PROPERTY, proportion, 0.3f); } @Override public void doLayout() { super.doLayout(); JComponent detailsComponent = myConsoleViewHandler.getComponent(); if (detailsComponent != null && detailsComponent.isVisible()) { updateSplitter(this); } } }; Disposer.register(this, myThreeComponentsSplitter); myThreeComponentsSplitter.setFirstComponent(myContentPanel); myConsoleViewHandler = new ConsoleViewHandler(myProject, myTree, myThreeComponentsSplitter, executionConsole, buildViewSettingsProvider); myThreeComponentsSplitter.setLastComponent(myConsoleViewHandler.getComponent()); myPanel.add(myThreeComponentsSplitter, BorderLayout.CENTER); } @Override public void clear() { getRootElement().removeChildren(); nodesMap.clear(); myConsoleViewHandler.clear(); myTreeModel.invalidate(); } private ExecutionNode getRootElement() { return myRootNode; } @Override public void print(@NotNull String text, @NotNull ConsoleViewContentType contentType) { } public void onEventInternal(@NotNull BuildEvent event) { ExecutionNode parentNode = event.getParentId() == null ? null : nodesMap.get(event.getParentId()); ExecutionNode currentNode = nodesMap.get(event.getId()); if (event instanceof StartEvent || event instanceof MessageEvent) { ExecutionNode rootElement = getRootElement(); if (currentNode == null) { if (event instanceof StartBuildEvent) { currentNode = rootElement; UIUtil.invokeLaterIfNeeded(() -> { final DefaultActionGroup rerunActionGroup = new DefaultActionGroup(); for (AnAction anAction : ((StartBuildEvent)event).getRestartActions()) { rerunActionGroup.add(anAction); } TreeTable treeTable = myTree.getTreeTable(); PopupHandler.installPopupHandler(treeTable, rerunActionGroup, "BuildView"); }); } else { if (event instanceof MessageEvent) { MessageEvent messageEvent = (MessageEvent)event; parentNode = createMessageParentNodes(messageEvent, parentNode); } currentNode = new ExecutionNode(myProject, parentNode); } currentNode.setAutoExpandNode(currentNode == rootElement || parentNode == rootElement); nodesMap.put(event.getId(), currentNode); } else { LOG.warn("start event id collision found:" + event.getId() + ", was also in node: " + currentNode.getTitle()); return; } if (parentNode != null) { parentNode.add(currentNode); } if (event instanceof StartBuildEvent) { String buildTitle = ((StartBuildEvent)event).getBuildTitle(); currentNode.setTitle(buildTitle); currentNode.setAutoExpandNode(true); scheduleUpdate(currentNode); } else if (event instanceof MessageEvent) { MessageEvent messageEvent = (MessageEvent)event; currentNode.setStartTime(messageEvent.getEventTime()); currentNode.setEndTime(messageEvent.getEventTime()); currentNode.setNavigatable(messageEvent.getNavigatable(myProject)); final MessageEventResult messageEventResult = messageEvent.getResult(); currentNode.setResult(messageEventResult); } } else { currentNode = nodesMap.get(event.getId()); if (currentNode == null && event instanceof ProgressBuildEvent) { currentNode = new ExecutionNode(myProject, parentNode); nodesMap.put(event.getId(), currentNode); if (parentNode != null) { parentNode.add(currentNode); } } } if (currentNode == null) { // TODO log error return; } currentNode.setName(event.getMessage()); currentNode.setHint(event.getHint()); if (currentNode.getStartTime() == 0) { currentNode.setStartTime(event.getEventTime()); } if (event instanceof FinishEvent) { currentNode.setEndTime(event.getEventTime()); currentNode.setResult(((FinishEvent)event).getResult()); final String text = "__" + currentNode.getDuration(); ApplicationManager.getApplication().invokeLater(() -> { int timeColumnWidth = new JLabel(text, SwingConstants.RIGHT).getPreferredSize().width; if (myTimeColumnWidth < timeColumnWidth) { myTimeColumnWidth = timeColumnWidth; } }); } if (event instanceof FinishBuildEvent) { String aHint = event.getHint(); String time = DateFormatUtil.formatDateTime(event.getEventTime()); aHint = aHint == null ? "at " + time : aHint + " at " + time; currentNode.setHint(aHint); updateTimeColumnWidth(myTimeColumnWidth); if (myViewSettingsProvider.isSideBySideView()) { currentNode.setResult(null); } if (myConsoleViewHandler.myExecutionNode == null) { ExecutionNode element = getRootElement(); ApplicationManager.getApplication().invokeLater(() -> myConsoleViewHandler.setNode(element)); } if (((FinishBuildEvent)event).getResult() instanceof FailureResult) { JTree tree = myTree; if (tree != null && !tree.isRootVisible()) { ExecutionNode rootElement = getRootElement(); ExecutionNode resultNode = new ExecutionNode(myProject, rootElement); resultNode.setName(StringUtil.toTitleCase(rootElement.getName())); resultNode.setHint(rootElement.getHint()); resultNode.setEndTime(rootElement.getEndTime()); resultNode.setStartTime(rootElement.getStartTime()); resultNode.setResult(rootElement.getResult()); resultNode.setTooltip(rootElement.getTooltip()); rootElement.add(resultNode); scheduleUpdate(resultNode); return; } } } scheduleUpdate(currentNode); } @Override public void scrollTo(int offset) { } @Override public void attachToProcess(ProcessHandler processHandler) { } @Override public boolean isOutputPaused() { return false; } @Override public void setOutputPaused(boolean value) { } @Override public boolean hasDeferredOutput() { return false; } @Override public void performWhenNoDeferredOutput(@NotNull Runnable runnable) { } @Override public void setHelpId(@NotNull String helpId) { } @Override public void addMessageFilter(@NotNull Filter filter) { } @Override public void printHyperlink(@NotNull String hyperlinkText, @Nullable HyperlinkInfo info) { } @Override public int getContentSize() { return 0; } @Override public boolean canPause() { return false; } @NotNull @Override public AnAction[] createConsoleActions() { return AnAction.EMPTY_ARRAY; } @Override public void allowHeavyFilters() { } @Override public JComponent getComponent() { return myPanel; } @Override public JComponent getPreferredFocusableComponent() { return myTree; } @Override public void dispose() { myDisposed.set(true); } public boolean isDisposed() { return myDisposed.get(); } @Override public void onEvent(@NotNull BuildEvent event) { myTreeModel.getInvoker().runOrInvokeLater(() -> onEventInternal(event)); } private static void updateSplitter(@NotNull ThreeComponentsSplitter myThreeComponentsSplitter) { int firstSize = myThreeComponentsSplitter.getFirstSize(); int splitterWidth = myThreeComponentsSplitter.getWidth(); if (firstSize == 0) { float proportion = PropertiesComponent.getInstance().getFloat(SPLITTER_PROPERTY, 0.3f); int width = Math.round(splitterWidth * proportion); if (width > 0) { myThreeComponentsSplitter.setFirstSize(width); } } } protected void expand(TreeTableTree tree) { TreeUtil.expand(tree, path -> { ExecutionNode node = TreeUtil.getLastUserObject(ExecutionNode.class, path); if (node != null && node.isAutoExpandNode() && node.getChildCount() > 0) { return TreeVisitor.Action.CONTINUE; } else { return TreeVisitor.Action.SKIP_CHILDREN; } }, path -> {}); } void scheduleUpdate(ExecutionNode executionNode) { SimpleNode node = executionNode.getParent() == null ? executionNode : executionNode.getParent(); myTreeModel.invalidate(node, true).onProcessed(p -> expand(myTree)); } private ExecutionNode createMessageParentNodes(MessageEvent messageEvent, ExecutionNode parentNode) { Object messageEventParentId = messageEvent.getParentId(); if (messageEventParentId == null) return null; String group = messageEvent.getGroup(); String groupNodeId = group.hashCode() + messageEventParentId.toString(); ExecutionNode messagesGroupNode = getOrCreateMessagesNode(messageEvent, groupNodeId, parentNode, null, group, true, null, null, nodesMap, myProject); EventResult groupNodeResult = messagesGroupNode.getResult(); final MessageEvent.Kind eventKind = messageEvent.getKind(); if (!(groupNodeResult instanceof MessageEventResult) || ((MessageEventResult)groupNodeResult).getKind().compareTo(eventKind) > 0) { messagesGroupNode.setResult((MessageEventResult)() -> eventKind); } if (messageEvent instanceof FileMessageEvent) { ExecutionNode fileParentNode = messagesGroupNode; FilePosition filePosition = ((FileMessageEvent)messageEvent).getFilePosition(); String filePath = FileUtil.toSystemIndependentName(filePosition.getFile().getPath()); String parentsPath = ""; String relativePath = FileUtil.getRelativePath(myWorkingDir, filePath, '/'); if (relativePath != null) { String nodeId = groupNodeId + myWorkingDir; ExecutionNode workingDirNode = getOrCreateMessagesNode(messageEvent, nodeId, messagesGroupNode, myWorkingDir, null, false, () -> AllIcons.Nodes.Module, null, nodesMap, myProject); parentsPath = myWorkingDir; fileParentNode = workingDirNode; } VirtualFile sourceRootForFile; VirtualFile ioFile = VfsUtil.findFileByIoFile(new File(filePath), false); if (ioFile != null && (sourceRootForFile = ProjectFileIndex.SERVICE.getInstance(myProject).getSourceRootForFile(ioFile)) != null) { relativePath = FileUtil.getRelativePath(parentsPath, sourceRootForFile.getPath(), '/'); if (relativePath != null) { parentsPath += ("/" + relativePath); String contentRootNodeId = groupNodeId + sourceRootForFile.getPath(); fileParentNode = getOrCreateMessagesNode(messageEvent, contentRootNodeId, fileParentNode, relativePath, null, false, () -> ProjectFileIndex.SERVICE.getInstance(myProject).isInTestSourceContent(ioFile) ? AllIcons.Modules.TestRoot : AllIcons.Modules.SourceRoot, null, nodesMap, myProject); } } String fileNodeId = groupNodeId + filePath; relativePath = StringUtil.isEmpty(parentsPath) ? filePath : FileUtil.getRelativePath(parentsPath, filePath, '/'); parentNode = getOrCreateMessagesNode(messageEvent, fileNodeId, fileParentNode, relativePath, null, false, () -> { VirtualFile file = VfsUtil.findFileByIoFile(filePosition.getFile(), false); if (file != null) { return file.getFileType().getIcon(); } return null; }, messageEvent.getNavigatable(myProject), nodesMap, myProject); } else { parentNode = messagesGroupNode; } if (eventKind == MessageEvent.Kind.ERROR || eventKind == MessageEvent.Kind.WARNING) { SimpleNode p = parentNode; do { ((ExecutionNode)p).reportChildMessageKind(eventKind); } while ((p = p.getParent()) instanceof ExecutionNode); } return parentNode; } public void hideRootNode() { UIUtil.invokeLaterIfNeeded(() -> { if (myTree != null) { myTree.setRootVisible(false); myTree.setShowsRootHandles(true); } }); } private void updateTimeColumnWidth(String text, boolean force) { int timeColumnWidth = new JLabel(text, SwingConstants.RIGHT).getPreferredSize().width; if (myTimeColumnWidth > timeColumnWidth) { timeColumnWidth = myTimeColumnWidth; } if (force || myTimeColumn.getMaxWidth() < timeColumnWidth || myTimeColumn.getWidth() < timeColumnWidth) { updateTimeColumnWidth(timeColumnWidth); } } private void updateTimeColumnWidth(int width) { myTimeColumn.setPreferredWidth(width); myTimeColumn.setMinWidth(width); myTimeColumn.setMaxWidth(width); } @Nullable @Override public Object getData(@NotNull String dataId) { if (PlatformDataKeys.HELP_ID.is(dataId)) return "reference.build.tool.window"; if (CommonDataKeys.PROJECT.is(dataId)) return myProject; if (CommonDataKeys.NAVIGATABLE_ARRAY.is(dataId)) return extractNavigatables(); return null; } private Object extractNavigatables() { final List<Navigatable> navigatables = new ArrayList<>(); for (ExecutionNode each : getSelectedNodes()) { List<Navigatable> navigatable = each.getNavigatables(); navigatables.addAll(navigatable); } return navigatables.isEmpty() ? null : navigatables.toArray(new Navigatable[0]); } private ExecutionNode[] getSelectedNodes() { final ExecutionNode[] result = new ExecutionNode[0]; if (myTree != null) { final List<ExecutionNode> nodes = TreeUtil.collectSelectedObjects(myTree, path -> TreeUtil.getLastUserObject(ExecutionNode.class, path)); return nodes.toArray(result); } return result; } @TestOnly JTree getTree() { return myTree; } @NotNull private static ExecutionNode getOrCreateMessagesNode(MessageEvent messageEvent, String nodeId, ExecutionNode parentNode, String nodeName, String nodeTitle, boolean autoExpandNode, @Nullable Supplier<? extends Icon> iconProvider, @Nullable Navigatable navigatable, Map<Object, ExecutionNode> nodesMap, Project project) { ExecutionNode node = nodesMap.get(nodeId); if (node == null) { node = new ExecutionNode(project, parentNode); node.setName(nodeName); node.setTitle(nodeTitle); if (autoExpandNode) { node.setAutoExpandNode(true); } node.setStartTime(messageEvent.getEventTime()); node.setEndTime(messageEvent.getEventTime()); if (iconProvider != null) { node.setIconProvider(iconProvider); } if (navigatable != null) { node.setNavigatable(navigatable); } parentNode.add(node); nodesMap.put(nodeId, node); } return node; } private static class ConsoleViewHandler { private static final String TASK_OUTPUT_VIEW_NAME = "taskOutputView"; private final JPanel myPanel; private final CompositeView<ExecutionConsole> myView; @NotNull private final BuildViewSettingsProvider myViewSettingsProvider; @Nullable private ExecutionNode myExecutionNode; ConsoleViewHandler(Project project, TreeTableTree tree, ThreeComponentsSplitter threeComponentsSplitter, @Nullable ExecutionConsole executionConsole, @NotNull BuildViewSettingsProvider buildViewSettingsProvider) { myPanel = new JPanel(new BorderLayout()); ConsoleView myNodeConsole = TextConsoleBuilderFactory.getInstance().createBuilder(project).getConsole(); myViewSettingsProvider = buildViewSettingsProvider; myView = new CompositeView<>(null); if (executionConsole != null && buildViewSettingsProvider.isSideBySideView()) { myView.addView(executionConsole, CONSOLE_VIEW_NAME, true); } myView.addView(myNodeConsole, TASK_OUTPUT_VIEW_NAME, false); if (buildViewSettingsProvider.isSideBySideView()) { myView.enableView(CONSOLE_VIEW_NAME, false); myPanel.setVisible(true); } else { myPanel.setVisible(false); } JComponent consoleComponent = myNodeConsole.getComponent(); AnAction[] consoleActions = myNodeConsole.createConsoleActions(); consoleComponent.setFocusable(true); final Color editorBackground = EditorColorsManager.getInstance().getGlobalScheme().getDefaultBackground(); consoleComponent.setBorder(new CompoundBorder(IdeBorderFactory.createBorder(SideBorder.RIGHT), new SideBorder(editorBackground, SideBorder.LEFT))); myPanel.add(myView.getComponent(), BorderLayout.CENTER); final ActionToolbar toolbar = ActionManager.getInstance() .createActionToolbar("BuildResults", new DefaultActionGroup(consoleActions), false); myPanel.add(toolbar.getComponent(), BorderLayout.EAST); tree.addTreeSelectionListener(e -> { TreePath path = e.getPath(); if (path == null || !e.isAddedPath()) { return; } TreePath selectionPath = tree.getSelectionPath(); setNode(selectionPath != null ? (DefaultMutableTreeNode)selectionPath.getLastPathComponent() : null); }); Disposer.register(threeComponentsSplitter, myView); Disposer.register(threeComponentsSplitter, myNodeConsole); } private ConsoleView getTaskOutputView() { return (ConsoleView)myView.getView(TASK_OUTPUT_VIEW_NAME); } public boolean setNode(@NotNull ExecutionNode node) { EventResult eventResult = node.getResult(); boolean hasChanged = false; ConsoleView taskOutputView = getTaskOutputView(); if (eventResult instanceof FailureResult) { taskOutputView.clear(); List<? extends Failure> failures = ((FailureResult)eventResult).getFailures(); if (failures.isEmpty()) return false; for (Iterator<? extends Failure> iterator = failures.iterator(); iterator.hasNext(); ) { Failure failure = iterator.next(); String text = ObjectUtils.chooseNotNull(failure.getDescription(), failure.getMessage()); if (text == null && failure.getError() != null) { text = failure.getError().getMessage(); } if (text == null) continue; printDetails(failure, text); hasChanged = true; if (iterator.hasNext()) { taskOutputView.print("\n\n", ConsoleViewContentType.NORMAL_OUTPUT); } } } else if (eventResult instanceof MessageEventResult) { String details = ((MessageEventResult)eventResult).getDetails(); if (details == null) { return false; } if (details.isEmpty()) { return false; } taskOutputView.clear(); printDetails(null, details); hasChanged = true; } if (!hasChanged) return false; taskOutputView.scrollTo(0); myView.enableView(TASK_OUTPUT_VIEW_NAME, !myViewSettingsProvider.isSideBySideView()); myPanel.setVisible(true); return true; } private void printDetails(Failure failure, @Nullable String details) { BuildConsoleUtils.printDetails(getTaskOutputView(), failure, details); } public void setNode(@Nullable DefaultMutableTreeNode node) { if (node == null || node.getUserObject() == myExecutionNode) return; if (node.getUserObject() instanceof ExecutionNode) { myExecutionNode = (ExecutionNode)node.getUserObject(); if (setNode((ExecutionNode)node.getUserObject())) { return; } } myExecutionNode = null; if (myView.getView(CONSOLE_VIEW_NAME) != null && myViewSettingsProvider.isSideBySideView()) { myView.enableView(CONSOLE_VIEW_NAME, false); myPanel.setVisible(true); } else { myPanel.setVisible(false); } } public JComponent getComponent() { return myPanel; } public void clear() { myPanel.setVisible(false); getTaskOutputView().clear(); } } }
platform/lang-impl/src/com/intellij/build/BuildTreeConsoleView.java
// Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.build; import com.intellij.build.events.*; import com.intellij.execution.filters.Filter; import com.intellij.execution.filters.HyperlinkInfo; import com.intellij.execution.filters.TextConsoleBuilderFactory; import com.intellij.execution.process.ProcessHandler; import com.intellij.execution.ui.ConsoleView; import com.intellij.execution.ui.ConsoleViewContentType; import com.intellij.execution.ui.ExecutionConsole; import com.intellij.icons.AllIcons; import com.intellij.ide.util.PropertiesComponent; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.colors.EditorColorsManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ProjectFileIndex; import com.intellij.openapi.ui.ThreeComponentsSplitter; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.pom.Navigatable; import com.intellij.ui.*; import com.intellij.ui.speedSearch.SpeedSearchUtil; import com.intellij.ui.tree.AsyncTreeModel; import com.intellij.ui.tree.StructureTreeModel; import com.intellij.ui.tree.TreeVisitor; import com.intellij.ui.tree.treeTable.TreeTableModelWithColumns; import com.intellij.ui.treeStructure.SimpleNode; import com.intellij.ui.treeStructure.SimpleTreeStructure; import com.intellij.ui.treeStructure.treetable.TreeColumnInfo; import com.intellij.ui.treeStructure.treetable.TreeTable; import com.intellij.ui.treeStructure.treetable.TreeTableModel; import com.intellij.ui.treeStructure.treetable.TreeTableTree; import com.intellij.util.EditSourceOnDoubleClickHandler; import com.intellij.util.EditSourceOnEnterKeyHandler; import com.intellij.util.ObjectUtils; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.text.DateFormatUtil; import com.intellij.util.ui.ColumnInfo; import com.intellij.util.ui.UIUtil; import com.intellij.util.ui.tree.TreeUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import javax.swing.*; import javax.swing.border.CompoundBorder; import javax.swing.table.DefaultTableCellRenderer; import javax.swing.table.TableCellRenderer; import javax.swing.table.TableColumn; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.TreeCellRenderer; import javax.swing.tree.TreePath; import java.awt.*; import java.awt.event.FocusEvent; import java.awt.event.FocusListener; import java.io.File; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Supplier; import static com.intellij.build.BuildView.CONSOLE_VIEW_NAME; /** * @author Vladislav.Soroka */ public class BuildTreeConsoleView implements ConsoleView, DataProvider, BuildConsoleView { private static final Logger LOG = Logger.getInstance(BuildTreeConsoleView.class); @NonNls private static final String TREE = "tree"; @NonNls private static final String SPLITTER_PROPERTY = "SMTestRunner.Splitter.Proportion"; private final JPanel myPanel = new JPanel(); private final Map<Object, ExecutionNode> nodesMap = ContainerUtil.newConcurrentMap(); private final Project myProject; private final ConsoleViewHandler myConsoleViewHandler; @NotNull private final BuildViewSettingsProvider myViewSettingsProvider; private final TableColumn myTimeColumn; private final String myWorkingDir; private final AtomicBoolean myDisposed = new AtomicBoolean(); private final StructureTreeModel<SimpleTreeStructure> myTreeModel; private final TreeTableTree myTree; private final ExecutionNode myRootNode; private volatile int myTimeColumnWidth; public BuildTreeConsoleView(Project project, BuildDescriptor buildDescriptor, @Nullable ExecutionConsole executionConsole, @NotNull BuildViewSettingsProvider buildViewSettingsProvider) { myProject = project; myWorkingDir = FileUtil.toSystemIndependentName(buildDescriptor.getWorkingDir()); final ColumnInfo[] COLUMNS = { new TreeColumnInfo("name"), new ColumnInfo("time elapsed") { @Nullable @Override public Object valueOf(Object o) { if (o instanceof DefaultMutableTreeNode) { final Object userObject = ((DefaultMutableTreeNode)o).getUserObject(); if (userObject instanceof ExecutionNode) { String duration = ((ExecutionNode)userObject).getDuration(); updateTimeColumnWidth("___" + duration, false); return duration; } } return null; } } }; myViewSettingsProvider = buildViewSettingsProvider; myRootNode = new ExecutionNode(myProject, null); myRootNode.setAutoExpandNode(true); SimpleTreeStructure treeStructure = new SimpleTreeStructure.Impl(myRootNode); myTreeModel = new StructureTreeModel<>(treeStructure); final TreeTableModel model = new TreeTableModelWithColumns(new AsyncTreeModel(myTreeModel, this), COLUMNS); DefaultTableCellRenderer timeColumnCellRenderer = new DefaultTableCellRenderer() { @Override public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { super.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column); setHorizontalAlignment(SwingConstants.RIGHT); Color fg = isSelected ? UIUtil.getTreeSelectionForeground(hasFocus) : SimpleTextAttributes.GRAY_ATTRIBUTES.getFgColor(); setForeground(fg); return this; } }; TreeTable treeTable = new TreeTable(model) { @Override public TableCellRenderer getCellRenderer(int row, int column) { if (column == 1) { return timeColumnCellRenderer; } return super.getCellRenderer(row, column); } }; EditSourceOnDoubleClickHandler.install(treeTable); EditSourceOnEnterKeyHandler.install(treeTable, null); myTree = treeTable.getTree(); treeTable.addFocusListener(new FocusListener() { @Override public void focusGained(FocusEvent e) { treeTable.setSelectionBackground(UIUtil.getTreeSelectionBackground(true)); } @Override public void focusLost(FocusEvent e) { treeTable.setSelectionBackground(UIUtil.getTreeSelectionBackground(false)); } }); final TreeCellRenderer treeCellRenderer = myTree.getCellRenderer(); myTree.setCellRenderer((tree, value, selected, expanded, leaf, row, hasFocus) -> { final Component rendererComponent = treeCellRenderer.getTreeCellRendererComponent(tree, value, selected, expanded, leaf, row, hasFocus); if (rendererComponent instanceof SimpleColoredComponent) { Color bg = UIUtil.getTreeBackground(selected, true); Color fg = UIUtil.getTreeForeground(selected, true); if (selected) { for (SimpleColoredComponent.ColoredIterator it = ((SimpleColoredComponent)rendererComponent).iterator(); it.hasNext(); ) { it.next(); int offset = it.getOffset(); int endOffset = it.getEndOffset(); SimpleTextAttributes currentAttributes = it.getTextAttributes(); SimpleTextAttributes newAttributes = new SimpleTextAttributes(bg, fg, currentAttributes.getWaveColor(), currentAttributes.getStyle()); it.split(endOffset - offset, newAttributes); } } SpeedSearchUtil.applySpeedSearchHighlighting(treeTable, (SimpleColoredComponent)rendererComponent, true, selected); } return rendererComponent; }); new TreeTableSpeedSearch(treeTable).setComparator(new SpeedSearchComparator(false)); treeTable.setTableHeader(null); myTimeColumn = treeTable.getColumnModel().getColumn(1); myTimeColumn.setResizable(false); updateTimeColumnWidth(StringUtil.formatDurationApproximate(11111L), true); TreeUtil.installActions(myTree); JPanel myContentPanel = new JPanel(); myContentPanel.setLayout(new CardLayout()); myContentPanel.add(ScrollPaneFactory.createScrollPane(treeTable, SideBorder.LEFT), TREE); myPanel.setLayout(new BorderLayout()); ThreeComponentsSplitter myThreeComponentsSplitter = new ThreeComponentsSplitter() { @Override public void setFirstSize(int size) { super.setFirstSize(size); float proportion = size / (float)getWidth(); PropertiesComponent.getInstance().setValue(SPLITTER_PROPERTY, proportion, 0.3f); } @Override public void doLayout() { super.doLayout(); JComponent detailsComponent = myConsoleViewHandler.getComponent(); if (detailsComponent != null && detailsComponent.isVisible()) { updateSplitter(this); } } }; Disposer.register(this, myThreeComponentsSplitter); myThreeComponentsSplitter.setFirstComponent(myContentPanel); myConsoleViewHandler = new ConsoleViewHandler(myProject, myTree, myThreeComponentsSplitter, executionConsole, buildViewSettingsProvider); myThreeComponentsSplitter.setLastComponent(myConsoleViewHandler.getComponent()); myPanel.add(myThreeComponentsSplitter, BorderLayout.CENTER); } @Override public void clear() { getRootElement().removeChildren(); nodesMap.clear(); myConsoleViewHandler.clear(); myTreeModel.invalidate(); } private ExecutionNode getRootElement() { return myRootNode; } @Override public void print(@NotNull String text, @NotNull ConsoleViewContentType contentType) { } public void onEventInternal(@NotNull BuildEvent event) { ExecutionNode parentNode = event.getParentId() == null ? null : nodesMap.get(event.getParentId()); ExecutionNode currentNode = nodesMap.get(event.getId()); if (event instanceof StartEvent || event instanceof MessageEvent) { ExecutionNode rootElement = getRootElement(); if (currentNode == null) { if (event instanceof StartBuildEvent) { currentNode = rootElement; UIUtil.invokeLaterIfNeeded(() -> { final DefaultActionGroup rerunActionGroup = new DefaultActionGroup(); for (AnAction anAction : ((StartBuildEvent)event).getRestartActions()) { rerunActionGroup.add(anAction); } TreeTable treeTable = myTree.getTreeTable(); PopupHandler.installPopupHandler(treeTable, rerunActionGroup, "BuildView"); }); } else { if (event instanceof MessageEvent) { MessageEvent messageEvent = (MessageEvent)event; parentNode = createMessageParentNodes(messageEvent, parentNode); } currentNode = new ExecutionNode(myProject, parentNode); } currentNode.setAutoExpandNode(currentNode == rootElement || parentNode == rootElement); nodesMap.put(event.getId(), currentNode); } else { LOG.warn("start event id collision found:" + event.getId() + ", was also in node: " + currentNode.getTitle()); return; } if (parentNode != null) { parentNode.add(currentNode); } if (event instanceof StartBuildEvent) { String buildTitle = ((StartBuildEvent)event).getBuildTitle(); currentNode.setTitle(buildTitle); currentNode.setAutoExpandNode(true); scheduleUpdate(currentNode); } else if (event instanceof MessageEvent) { MessageEvent messageEvent = (MessageEvent)event; currentNode.setStartTime(messageEvent.getEventTime()); currentNode.setEndTime(messageEvent.getEventTime()); currentNode.setNavigatable(messageEvent.getNavigatable(myProject)); final MessageEventResult messageEventResult = messageEvent.getResult(); currentNode.setResult(messageEventResult); } } else { currentNode = nodesMap.get(event.getId()); if (currentNode == null && event instanceof ProgressBuildEvent) { currentNode = new ExecutionNode(myProject, parentNode); nodesMap.put(event.getId(), currentNode); if (parentNode != null) { parentNode.add(currentNode); } } } if (currentNode == null) { // TODO log error return; } currentNode.setName(event.getMessage()); currentNode.setHint(event.getHint()); if (currentNode.getStartTime() == 0) { currentNode.setStartTime(event.getEventTime()); } if (event instanceof FinishEvent) { currentNode.setEndTime(event.getEventTime()); currentNode.setResult(((FinishEvent)event).getResult()); final String text = "__" + currentNode.getDuration(); ApplicationManager.getApplication().invokeLater(() -> { int timeColumnWidth = new JLabel(text, SwingConstants.RIGHT).getPreferredSize().width; if (myTimeColumnWidth < timeColumnWidth) { myTimeColumnWidth = timeColumnWidth; } }); } if (event instanceof FinishBuildEvent) { String aHint = event.getHint(); String time = DateFormatUtil.formatDateTime(event.getEventTime()); aHint = aHint == null ? "at " + time : aHint + " at " + time; currentNode.setHint(aHint); updateTimeColumnWidth(myTimeColumnWidth); if (myViewSettingsProvider.isSideBySideView()) { currentNode.setResult(null); } if (myConsoleViewHandler.myExecutionNode == null) { ExecutionNode element = getRootElement(); ApplicationManager.getApplication().invokeLater(() -> myConsoleViewHandler.setNode(element)); } if (((FinishBuildEvent)event).getResult() instanceof FailureResult) { JTree tree = myTree; if (tree != null && !tree.isRootVisible()) { ExecutionNode rootElement = getRootElement(); ExecutionNode resultNode = new ExecutionNode(myProject, rootElement); resultNode.setName(StringUtil.toTitleCase(rootElement.getName())); resultNode.setHint(rootElement.getHint()); resultNode.setEndTime(rootElement.getEndTime()); resultNode.setStartTime(rootElement.getStartTime()); resultNode.setResult(rootElement.getResult()); resultNode.setTooltip(rootElement.getTooltip()); rootElement.add(resultNode); scheduleUpdate(resultNode); return; } } } scheduleUpdate(currentNode); } @Override public void scrollTo(int offset) { } @Override public void attachToProcess(ProcessHandler processHandler) { } @Override public boolean isOutputPaused() { return false; } @Override public void setOutputPaused(boolean value) { } @Override public boolean hasDeferredOutput() { return false; } @Override public void performWhenNoDeferredOutput(@NotNull Runnable runnable) { } @Override public void setHelpId(@NotNull String helpId) { } @Override public void addMessageFilter(@NotNull Filter filter) { } @Override public void printHyperlink(@NotNull String hyperlinkText, @Nullable HyperlinkInfo info) { } @Override public int getContentSize() { return 0; } @Override public boolean canPause() { return false; } @NotNull @Override public AnAction[] createConsoleActions() { return AnAction.EMPTY_ARRAY; } @Override public void allowHeavyFilters() { } @Override public JComponent getComponent() { return myPanel; } @Override public JComponent getPreferredFocusableComponent() { return myTree; } @Override public void dispose() { myDisposed.set(true); } public boolean isDisposed() { return myDisposed.get(); } @Override public void onEvent(@NotNull BuildEvent event) { myTreeModel.getInvoker().runOrInvokeLater(() -> onEventInternal(event)); } private static void updateSplitter(@NotNull ThreeComponentsSplitter myThreeComponentsSplitter) { int firstSize = myThreeComponentsSplitter.getFirstSize(); //int lastSize = myThreeComponentsSplitter.getLastSize(); int splitterWidth = myThreeComponentsSplitter.getWidth(); if (firstSize == 0/* && lastSize == 0*/) { float proportion = PropertiesComponent.getInstance().getFloat(SPLITTER_PROPERTY, 0.3f); int width = Math.round(splitterWidth * proportion); if (width > 0) { myThreeComponentsSplitter.setFirstSize(width); } } } protected void expand(TreeTableTree tree) { TreeUtil.expand(tree, path -> { ExecutionNode node = TreeUtil.getLastUserObject(ExecutionNode.class, path); if (node != null && node.isAutoExpandNode() && node.getChildCount() > 0) { return TreeVisitor.Action.CONTINUE; } else { return TreeVisitor.Action.SKIP_CHILDREN; } }, path -> {}); } void scheduleUpdate(ExecutionNode executionNode) { SimpleNode node = executionNode.getParent() == null ? executionNode : executionNode.getParent(); myTreeModel.invalidate(node, true).onProcessed(p -> expand(myTree)); } private ExecutionNode createMessageParentNodes(MessageEvent messageEvent, ExecutionNode parentNode) { Object messageEventParentId = messageEvent.getParentId(); if (messageEventParentId == null) return null; String group = messageEvent.getGroup(); String groupNodeId = group.hashCode() + messageEventParentId.toString(); ExecutionNode messagesGroupNode = getOrCreateMessagesNode(messageEvent, groupNodeId, parentNode, null, group, true, null, null, nodesMap, myProject); EventResult groupNodeResult = messagesGroupNode.getResult(); final MessageEvent.Kind eventKind = messageEvent.getKind(); if (!(groupNodeResult instanceof MessageEventResult) || ((MessageEventResult)groupNodeResult).getKind().compareTo(eventKind) > 0) { messagesGroupNode.setResult((MessageEventResult)() -> eventKind); } if (messageEvent instanceof FileMessageEvent) { ExecutionNode fileParentNode = messagesGroupNode; FilePosition filePosition = ((FileMessageEvent)messageEvent).getFilePosition(); String filePath = FileUtil.toSystemIndependentName(filePosition.getFile().getPath()); String parentsPath = ""; String relativePath = FileUtil.getRelativePath(myWorkingDir, filePath, '/'); if (relativePath != null) { String nodeId = groupNodeId + myWorkingDir; ExecutionNode workingDirNode = getOrCreateMessagesNode(messageEvent, nodeId, messagesGroupNode, myWorkingDir, null, false, () -> AllIcons.Nodes.Module, null, nodesMap, myProject); parentsPath = myWorkingDir; fileParentNode = workingDirNode; } VirtualFile sourceRootForFile; VirtualFile ioFile = VfsUtil.findFileByIoFile(new File(filePath), false); if (ioFile != null && (sourceRootForFile = ProjectFileIndex.SERVICE.getInstance(myProject).getSourceRootForFile(ioFile)) != null) { relativePath = FileUtil.getRelativePath(parentsPath, sourceRootForFile.getPath(), '/'); if (relativePath != null) { parentsPath += ("/" + relativePath); String contentRootNodeId = groupNodeId + sourceRootForFile.getPath(); fileParentNode = getOrCreateMessagesNode(messageEvent, contentRootNodeId, fileParentNode, relativePath, null, false, () -> ProjectFileIndex.SERVICE.getInstance(myProject).isInTestSourceContent(ioFile) ? AllIcons.Modules.TestRoot : AllIcons.Modules.SourceRoot, null, nodesMap, myProject); } } String fileNodeId = groupNodeId + filePath; relativePath = StringUtil.isEmpty(parentsPath) ? filePath : FileUtil.getRelativePath(parentsPath, filePath, '/'); parentNode = getOrCreateMessagesNode(messageEvent, fileNodeId, fileParentNode, relativePath, null, false, () -> { VirtualFile file = VfsUtil.findFileByIoFile(filePosition.getFile(), false); if (file != null) { return file.getFileType().getIcon(); } return null; }, messageEvent.getNavigatable(myProject), nodesMap, myProject); } else { parentNode = messagesGroupNode; } if (eventKind == MessageEvent.Kind.ERROR || eventKind == MessageEvent.Kind.WARNING) { SimpleNode p = parentNode; do { ((ExecutionNode)p).reportChildMessageKind(eventKind); } while ((p = p.getParent()) instanceof ExecutionNode); } return parentNode; } public void hideRootNode() { UIUtil.invokeLaterIfNeeded(() -> { if (myTree != null) { myTree.setRootVisible(false); myTree.setShowsRootHandles(true); } }); } private void updateTimeColumnWidth(String text, boolean force) { int timeColumnWidth = new JLabel(text, SwingConstants.RIGHT).getPreferredSize().width; if (myTimeColumnWidth > timeColumnWidth) { timeColumnWidth = myTimeColumnWidth; } if (force || myTimeColumn.getMaxWidth() < timeColumnWidth || myTimeColumn.getWidth() < timeColumnWidth) { updateTimeColumnWidth(timeColumnWidth); } } private void updateTimeColumnWidth(int width) { myTimeColumn.setPreferredWidth(width); myTimeColumn.setMinWidth(width); myTimeColumn.setMaxWidth(width); } @Nullable @Override public Object getData(@NotNull String dataId) { if (PlatformDataKeys.HELP_ID.is(dataId)) return "reference.build.tool.window"; if (CommonDataKeys.PROJECT.is(dataId)) return myProject; if (CommonDataKeys.NAVIGATABLE_ARRAY.is(dataId)) return extractNavigatables(); return null; } private Object extractNavigatables() { final List<Navigatable> navigatables = new ArrayList<>(); for (ExecutionNode each : getSelectedNodes()) { List<Navigatable> navigatable = each.getNavigatables(); navigatables.addAll(navigatable); } return navigatables.isEmpty() ? null : navigatables.toArray(new Navigatable[0]); } private ExecutionNode[] getSelectedNodes() { final ExecutionNode[] result = new ExecutionNode[0]; if (myTree != null) { final List<ExecutionNode> nodes = TreeUtil.collectSelectedObjects(myTree, path -> TreeUtil.getLastUserObject(ExecutionNode.class, path)); return nodes.toArray(result); } return result; } @TestOnly JTree getTree() { return myTree; } @NotNull private static ExecutionNode getOrCreateMessagesNode(MessageEvent messageEvent, String nodeId, ExecutionNode parentNode, String nodeName, String nodeTitle, boolean autoExpandNode, @Nullable Supplier<? extends Icon> iconProvider, @Nullable Navigatable navigatable, Map<Object, ExecutionNode> nodesMap, Project project) { ExecutionNode node = nodesMap.get(nodeId); if (node == null) { node = new ExecutionNode(project, parentNode); node.setName(nodeName); node.setTitle(nodeTitle); if (autoExpandNode) { node.setAutoExpandNode(true); } node.setStartTime(messageEvent.getEventTime()); node.setEndTime(messageEvent.getEventTime()); if (iconProvider != null) { node.setIconProvider(iconProvider); } if (navigatable != null) { node.setNavigatable(navigatable); } parentNode.add(node); nodesMap.put(nodeId, node); } return node; } private static class ConsoleViewHandler { private static final String TASK_OUTPUT_VIEW_NAME = "taskOutputView"; private final JPanel myPanel; private final CompositeView<ExecutionConsole> myView; @NotNull private final BuildViewSettingsProvider myViewSettingsProvider; @Nullable private ExecutionNode myExecutionNode; ConsoleViewHandler(Project project, TreeTableTree tree, ThreeComponentsSplitter threeComponentsSplitter, @Nullable ExecutionConsole executionConsole, @NotNull BuildViewSettingsProvider buildViewSettingsProvider) { myPanel = new JPanel(new BorderLayout()); ConsoleView myNodeConsole = TextConsoleBuilderFactory.getInstance().createBuilder(project).getConsole(); myViewSettingsProvider = buildViewSettingsProvider; myView = new CompositeView<>(null); if (executionConsole != null && buildViewSettingsProvider.isSideBySideView()) { myView.addView(executionConsole, CONSOLE_VIEW_NAME, true); } myView.addView(myNodeConsole, TASK_OUTPUT_VIEW_NAME, false); if (buildViewSettingsProvider.isSideBySideView()) { myView.enableView(CONSOLE_VIEW_NAME, false); myPanel.setVisible(true); } else { myPanel.setVisible(false); } JComponent consoleComponent = myNodeConsole.getComponent(); AnAction[] consoleActions = myNodeConsole.createConsoleActions(); consoleComponent.setFocusable(true); final Color editorBackground = EditorColorsManager.getInstance().getGlobalScheme().getDefaultBackground(); consoleComponent.setBorder(new CompoundBorder(IdeBorderFactory.createBorder(SideBorder.RIGHT), new SideBorder(editorBackground, SideBorder.LEFT))); myPanel.add(myView.getComponent(), BorderLayout.CENTER); final ActionToolbar toolbar = ActionManager.getInstance() .createActionToolbar("BuildResults", new DefaultActionGroup(consoleActions), false); myPanel.add(toolbar.getComponent(), BorderLayout.EAST); tree.addTreeSelectionListener(e -> { TreePath path = e.getPath(); if (path == null || !e.isAddedPath()) { return; } TreePath selectionPath = tree.getSelectionPath(); setNode(selectionPath != null ? (DefaultMutableTreeNode)selectionPath.getLastPathComponent() : null); }); Disposer.register(threeComponentsSplitter, myView); Disposer.register(threeComponentsSplitter, myNodeConsole); } private ConsoleView getTaskOutputView() { return (ConsoleView)myView.getView(TASK_OUTPUT_VIEW_NAME); } public boolean setNode(@NotNull ExecutionNode node) { EventResult eventResult = node.getResult(); boolean hasChanged = false; ConsoleView taskOutputView = getTaskOutputView(); if (eventResult instanceof FailureResult) { taskOutputView.clear(); List<? extends Failure> failures = ((FailureResult)eventResult).getFailures(); if (failures.isEmpty()) return false; for (Iterator<? extends Failure> iterator = failures.iterator(); iterator.hasNext(); ) { Failure failure = iterator.next(); String text = ObjectUtils.chooseNotNull(failure.getDescription(), failure.getMessage()); if (text == null && failure.getError() != null) { text = failure.getError().getMessage(); } if (text == null) continue; printDetails(failure, text); hasChanged = true; if (iterator.hasNext()) { taskOutputView.print("\n\n", ConsoleViewContentType.NORMAL_OUTPUT); } } } else if (eventResult instanceof MessageEventResult) { String details = ((MessageEventResult)eventResult).getDetails(); if (details == null) { return false; } if (details.isEmpty()) { return false; } taskOutputView.clear(); printDetails(null, details); hasChanged = true; } if (!hasChanged) return false; taskOutputView.scrollTo(0); myView.enableView(TASK_OUTPUT_VIEW_NAME, !myViewSettingsProvider.isSideBySideView()); myPanel.setVisible(true); return true; } private void printDetails(Failure failure, @Nullable String details) { BuildConsoleUtils.printDetails(getTaskOutputView(), failure, details); } public void setNode(@Nullable DefaultMutableTreeNode node) { if (node == null || node.getUserObject() == myExecutionNode) return; if (node.getUserObject() instanceof ExecutionNode) { myExecutionNode = (ExecutionNode)node.getUserObject(); if (setNode((ExecutionNode)node.getUserObject())) { return; } } myExecutionNode = null; if (myView.getView(CONSOLE_VIEW_NAME) != null && myViewSettingsProvider.isSideBySideView()) { myView.enableView(CONSOLE_VIEW_NAME, false); myPanel.setVisible(true); } else { myPanel.setVisible(false); } } public JComponent getComponent() { return myPanel; } public void clear() { myPanel.setVisible(false); getTaskOutputView().clear(); } } }
cleanup
platform/lang-impl/src/com/intellij/build/BuildTreeConsoleView.java
cleanup
<ide><path>latform/lang-impl/src/com/intellij/build/BuildTreeConsoleView.java <ide> <ide> private static void updateSplitter(@NotNull ThreeComponentsSplitter myThreeComponentsSplitter) { <ide> int firstSize = myThreeComponentsSplitter.getFirstSize(); <del> //int lastSize = myThreeComponentsSplitter.getLastSize(); <ide> int splitterWidth = myThreeComponentsSplitter.getWidth(); <del> if (firstSize == 0/* && lastSize == 0*/) { <add> if (firstSize == 0) { <ide> float proportion = PropertiesComponent.getInstance().getFloat(SPLITTER_PROPERTY, 0.3f); <ide> int width = Math.round(splitterWidth * proportion); <ide> if (width > 0) {
Java
apache-2.0
5ad3b4e9bcd7f8ee654bf2fa6f4e15d4702d93cb
0
52nlp/webanno,52nlp/webanno,debovis/webanno,debovis/webanno,debovis/webanno,52nlp/webanno
/******************************************************************************* * Copyright 2012 * Ubiquitous Knowledge Processing (UKP) Lab and FG Language Technology * Technische Universität Darmstadt * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package de.tudarmstadt.ukp.clarin.webanno.webapp.page.project; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.commons.io.FileUtils; import org.apache.commons.lang.exception.ExceptionUtils; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.FileEntity; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.util.EntityUtils; import org.apache.uima.UIMAException; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.ajax.markup.html.AjaxLink; import org.apache.wicket.markup.html.form.Button; import org.apache.wicket.markup.html.link.DownloadLink; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.model.LoadableDetachableModel; import org.apache.wicket.model.Model; import org.apache.wicket.model.ResourceModel; import org.apache.wicket.spring.injection.annot.SpringBean; import org.springframework.http.converter.json.MappingJacksonHttpMessageConverter; import org.springframework.security.core.context.SecurityContextHolder; import org.wicketstuff.progressbar.ProgressBar; import org.wicketstuff.progressbar.Progression; import org.wicketstuff.progressbar.ProgressionModel; import de.tudarmstadt.ukp.clarin.webanno.api.AnnotationService; import de.tudarmstadt.ukp.clarin.webanno.api.RepositoryService; import de.tudarmstadt.ukp.clarin.webanno.api.UserDao; import de.tudarmstadt.ukp.clarin.webanno.brat.project.ProjectUtil; import de.tudarmstadt.ukp.clarin.webanno.export.model.AnnotationDocument; import de.tudarmstadt.ukp.clarin.webanno.export.model.ProjectPermission; import de.tudarmstadt.ukp.clarin.webanno.export.model.SourceDocument; import de.tudarmstadt.ukp.clarin.webanno.export.model.TagSet; import de.tudarmstadt.ukp.clarin.webanno.model.AnnotationDocumentState; import de.tudarmstadt.ukp.clarin.webanno.model.Mode; import de.tudarmstadt.ukp.clarin.webanno.model.Project; import de.tudarmstadt.ukp.clarin.webanno.model.SourceDocumentState; import de.tudarmstadt.ukp.clarin.webanno.model.Tag; import de.tudarmstadt.ukp.clarin.webanno.model.User; import de.tudarmstadt.ukp.clarin.webanno.tcf.TcfWriter; import de.tudarmstadt.ukp.clarin.webanno.webapp.dao.DaoUtils; import de.tudarmstadt.ukp.clarin.webanno.webapp.dialog.AJAXDownload; import eu.clarin.weblicht.wlfxb.io.WLFormatException; /** * A Panel used to add Project Guidelines in a selected {@link Project} * * @author Seid Muhie Yimam * */ @SuppressWarnings("deprecation") public class ProjectExportPanel extends Panel { private static final long serialVersionUID = 2116717853865353733L; private static final String META_INF = "/META-INF"; public static final String EXPORTED_PROJECT = "exportedproject"; private static final String SOURCE = "/source"; private static final String CURATION_AS_SERIALISED_CAS = "/curation_ser/"; private static final String CURATION = "/curation/"; private static final String LOG = "/log"; private static final String GUIDELINE = "/guideline"; private static final String ANNOTATION_AS_SERIALISED_CAS = "/annotation_ser/"; private static final String ANNOTATION = "/annotation/"; private static final String CURATION_USER = "CURATION_USER"; private static final String CORRECTION_USER = "CORRECTION_USER"; @SpringBean(name = "annotationService") private AnnotationService annotationService; @SpringBean(name = "documentRepository") private RepositoryService projectRepository; @SpringBean(name = "userRepository") private UserDao userRepository; private int progress = 0; private ProgressBar Progress; private String username; private String fileName; String downloadedFile; String projectName; public ProjectExportPanel(String id, final Model<Project> aProjectModel) { super(id); username = SecurityContextHolder.getContext().getAuthentication().getName(); add(new Button("send", new ResourceModel("label")) { private static final long serialVersionUID = 1L; @Override public boolean isVisible() { return projectRepository.isRemoteProject(aProjectModel.getObject()); } @Override public void onSubmit() { @SuppressWarnings({ "resource" }) HttpClient httpclient = new DefaultHttpClient(); try { HttpPost httppost = new HttpPost( "http://aspra11.informatik.uni-leipzig.de:8080/" + "TEI-Integration/collection/addAnnotations?user=pws&pass=showcase"); File exportTempDir = File.createTempFile("webanno", "export"); exportTempDir.delete(); exportTempDir.mkdirs(); File metaInfDir = new File(exportTempDir + META_INF); FileUtils.forceMkdir(metaInfDir); boolean curationDocumentExist = isCurationDocumentExists(aProjectModel .getObject()); if (!curationDocumentExist) { error("No curation document created yet for this document"); } else { // copy curated documents into the export folder exportCuratedDocuments(aProjectModel.getObject(), exportTempDir); // copy META_INF contents from the project directory to the export folder FileUtils.copyDirectory(new File(projectRepository.getDir(), "/project/" + aProjectModel.getObject().getId() + META_INF), metaInfDir); DaoUtils.zipFolder(exportTempDir, new File(exportTempDir.getAbsolutePath() + ".zip")); FileEntity reqEntity = new FileEntity(new File(exportTempDir .getAbsolutePath() + ".zip"), "application/octet-stream"); httppost.setEntity(reqEntity); HttpResponse response = httpclient.execute(httppost); HttpEntity resEntity = response.getEntity(); info(response.getStatusLine().toString()); EntityUtils.consume(resEntity); } } catch (ClientProtocolException e) { error(ExceptionUtils.getRootCause(e)); } catch (IOException e) { error(e.getMessage()); } catch (UIMAException e) { error(ExceptionUtils.getRootCause(e)); } catch (ClassNotFoundException e) { error(ExceptionUtils.getRootCause(e)); } catch (WLFormatException e) { error(ExceptionUtils.getRootCause(e)); } catch (Exception e) { error(ExceptionUtils.getRootCause(e)); } finally { try { httpclient.getConnectionManager().shutdown(); } catch (Exception e) { error(ExceptionUtils.getRootCause(e)); } } } }).setOutputMarkupId(true); add(new DownloadLink("export", new LoadableDetachableModel<File>() { private static final long serialVersionUID = 840863954694163375L; @Override protected File load() { File exportFile = null; try { File exportTempDir = File.createTempFile("webanno", "export"); exportTempDir.delete(); exportTempDir.mkdirs(); boolean curationDocumentExist = isCurationDocumentExists(aProjectModel .getObject()); if (!curationDocumentExist) { error("No curation document created yet for this document"); } else { exportCuratedDocuments(aProjectModel.getObject(), exportTempDir); DaoUtils.zipFolder(exportTempDir, new File(exportTempDir.getAbsolutePath() + ".zip")); exportFile = new File(exportTempDir.getAbsolutePath() + ".zip"); } } catch (IOException e) { error(e.getMessage()); } catch (Exception e) { error(e.getMessage()); } return exportFile; } }) { private static final long serialVersionUID = 5630612543039605914L; @Override public boolean isVisible() { return isCurationDocumentExists(aProjectModel.getObject()); } }).setOutputMarkupId(true); final AJAXDownload exportProject = new AJAXDownload(); Progress = new ProgressBar("progress", new ProgressionModel() { private static final long serialVersionUID = 1971929040248482474L; @Override protected Progression getProgression() { return new Progression(progress); } }) { private static final long serialVersionUID = -6599620911784164177L; @Override protected void onFinished(AjaxRequestTarget target) { if (!fileName.equals(downloadedFile)) { exportProject.initiate(target, fileName); downloadedFile = fileName; } } }; Progress.add(exportProject); add(Progress); add(new AjaxLink<Void>("exportProject") { private static final long serialVersionUID = -5758406309688341664L; @Override public void onClick(final AjaxRequestTarget target) { Progress.start(target); new Thread() { @Override public void run() { File file = null; try { Thread.sleep(200); file = generateZipFile(aProjectModel, target); fileName = file.getAbsolutePath(); projectName = aProjectModel.getObject().getName(); } catch (UIMAException e) { error(ExceptionUtils.getRootCause(e)); } catch (ClassNotFoundException e) { error(e.getMessage()); } catch (IOException e) { error(e.getMessage()); } catch (WLFormatException e) { error(e.getMessage()); } catch (ZippingException e) { error(e.getMessage()); } catch (InterruptedException e) { } } }.start(); } }); } public File generateZipFile(final Model<Project> aProjectModel, AjaxRequestTarget target) throws IOException, UIMAException, ClassNotFoundException, WLFormatException, ZippingException, InterruptedException { File exportTempDir = null; // all metadata and project settings data from the database as JSON file File projectSettings = null; projectSettings = File.createTempFile(EXPORTED_PROJECT, ".json"); // Directory to store source documents and annotation documents exportTempDir = File.createTempFile("webanno-project", "export"); exportTempDir.delete(); exportTempDir.mkdirs(); if (aProjectModel.getObject().getId() == 0) { error("Project not yet created. Please save project details first!"); } else { exportProjectSettings(aProjectModel.getObject(), projectSettings, exportTempDir); exportSourceDocuments(aProjectModel.getObject(), exportTempDir); progress = 20; exportAnnotationDocuments(aProjectModel.getObject(), exportTempDir); progress = progress + 1; exportProjectLog(aProjectModel.getObject(), exportTempDir); progress = progress + 1; exportGuideLine(aProjectModel.getObject(), exportTempDir); progress = progress + 1; exportProjectMetaInf(aProjectModel.getObject(), exportTempDir); progress = 90; exportCuratedDocuments(aProjectModel.getObject(), exportTempDir); try { DaoUtils.zipFolder(exportTempDir, new File(exportTempDir.getAbsolutePath() + ".zip")); } catch (Exception e) { throw new ZippingException("Unable to Zipp the file"); } progress = 100; } return new File(exportTempDir.getAbsolutePath() + ".zip"); } /** * Copy source documents from the file system of this project to the export folder */ private void exportSourceDocuments(Project aProject, File aCopyDir) throws IOException { File sourceDocumentDir = new File(aCopyDir + SOURCE); FileUtils.forceMkdir(sourceDocumentDir); // Get all the source documents from the project List<de.tudarmstadt.ukp.clarin.webanno.model.SourceDocument> documents = projectRepository .listSourceDocuments(aProject); for (de.tudarmstadt.ukp.clarin.webanno.model.SourceDocument sourceDocument : documents) { FileUtils.copyFileToDirectory( projectRepository.exportSourceDocument(sourceDocument), sourceDocumentDir); } } /** * Copy, if exists, curation documents to a folder that will be exported as Zip file * * @param aProject * The {@link Project} * @param aCurationDocumentExist * Check if Curation document exists * @param aCopyDir * The folder where curated documents are copied to be exported as Zip File */ private void exportCuratedDocuments(Project aProject, File aCopyDir) throws FileNotFoundException, UIMAException, IOException, WLFormatException, ClassNotFoundException { // Get all the source documents from the project List<de.tudarmstadt.ukp.clarin.webanno.model.SourceDocument> documents = projectRepository .listSourceDocuments(aProject); for (de.tudarmstadt.ukp.clarin.webanno.model.SourceDocument sourceDocument : documents) { File curationCasDir = new File(aCopyDir + CURATION_AS_SERIALISED_CAS + sourceDocument.getName()); FileUtils.forceMkdir(curationCasDir); File curationDir = new File(aCopyDir + CURATION + sourceDocument.getName()); FileUtils.forceMkdir(curationDir); // If the curation document is exist (either finished or in progress if (sourceDocument.getState().equals(SourceDocumentState.CURATION_FINISHED) || sourceDocument.getState().equals(SourceDocumentState.CURATION_IN_PROGRESS)) { File CurationFileAsSerialisedCas = projectRepository.exportserializedCas( sourceDocument, CURATION_USER); File curationFile = null; if (CurationFileAsSerialisedCas.exists()) { curationFile = projectRepository.exportAnnotationDocument(sourceDocument, username, TcfWriter.class, sourceDocument.getName(), Mode.CURATION); } // in Case they didn't exist if (CurationFileAsSerialisedCas.exists()) { FileUtils.copyFileToDirectory(curationFile, curationDir); FileUtils.copyFileToDirectory(CurationFileAsSerialisedCas, curationCasDir); } } // If this project is a correction project, add the auto-annotated CAS to same folder as // CURATION if (aProject.getMode().equals(Mode.CORRECTION)) { File CorrectionFileAsSerialisedCas = projectRepository.exportserializedCas( sourceDocument, CORRECTION_USER); File correctionFile = null; if (CorrectionFileAsSerialisedCas.exists()) { correctionFile = projectRepository.exportAnnotationDocument(sourceDocument, username, TcfWriter.class, sourceDocument.getName(), Mode.CORRECTION); } // in Case they didn't exist if (CorrectionFileAsSerialisedCas.exists()) { FileUtils.copyFileToDirectory(correctionFile, curationDir); FileUtils.copyFileToDirectory(CorrectionFileAsSerialisedCas, curationCasDir); } } } } /** * Copy Project logs from the file system of this project to the export folder */ private void exportProjectLog(Project aProject, File aCopyDir) throws IOException { File logDir = new File(aCopyDir + LOG); FileUtils.forceMkdir(logDir); if (projectRepository.exportProjectLog(aProject).exists()) { FileUtils.copyFileToDirectory(projectRepository.exportProjectLog(aProject), logDir); } } /** * Copy Project guidelines from the file system of this project to the export folder */ private void exportGuideLine(Project aProject, File aCopyDir) throws IOException { File guidelineDir = new File(aCopyDir + GUIDELINE); FileUtils.forceMkdir(guidelineDir); File annotationGuidlines = projectRepository.exportGuidelines(aProject); if (annotationGuidlines.exists()) { for (File annotationGuideline : annotationGuidlines.listFiles()) { FileUtils.copyFileToDirectory(annotationGuideline, guidelineDir); } } } /** * Copy Project guidelines from the file system of this project to the export folder */ private void exportProjectMetaInf(Project aProject, File aCopyDir) throws IOException { File metaInfDir = new File(aCopyDir + META_INF); FileUtils.forceMkdir(metaInfDir); File metaInf = projectRepository.exportProjectMetaInf(aProject); if (metaInf.exists()) { FileUtils.copyDirectory(metaInf, metaInfDir); } } /** * Copy annotation document as Serialized CAS from the file system of this project to the export * folder * * @throws ClassNotFoundException * @throws WLFormatException * @throws UIMAException */ private void exportAnnotationDocuments(Project aProject, File aCopyDir) throws IOException, UIMAException, WLFormatException, ClassNotFoundException { List<de.tudarmstadt.ukp.clarin.webanno.model.SourceDocument> documents = projectRepository .listSourceDocuments(aProject); for (de.tudarmstadt.ukp.clarin.webanno.model.SourceDocument sourceDocument : documents) { for (de.tudarmstadt.ukp.clarin.webanno.model.AnnotationDocument annotationDocument : projectRepository .listAnnotationDocuments(sourceDocument)) { // copy annotation document only for ACTIVE users and the state of the annotation // document // is not NEW/IGNOR if (userRepository.get(annotationDocument.getUser()) != null && !annotationDocument.getState().equals(AnnotationDocumentState.NEW) && !annotationDocument.getState().equals(AnnotationDocumentState.IGNORE)) { File annotationDocumentAsSerialisedCasDir = new File(aCopyDir.getAbsolutePath() + ANNOTATION_AS_SERIALISED_CAS + sourceDocument.getName()); File annotationDocumentDir = new File(aCopyDir.getAbsolutePath() + ANNOTATION + sourceDocument.getName()); FileUtils.forceMkdir(annotationDocumentAsSerialisedCasDir); FileUtils.forceMkdir(annotationDocumentDir); File annotationFileAsSerialisedCas = projectRepository.exportserializedCas( sourceDocument, annotationDocument.getUser()); File annotationFile = null; if (annotationFileAsSerialisedCas.exists()) { Class<?> writer = projectRepository.getWritableFormats().get( sourceDocument.getFormat()); annotationFile = projectRepository.exportAnnotationDocument(sourceDocument, annotationDocument.getUser(), writer, sourceDocument.getName(), Mode.ANNOTATION); } if (annotationFileAsSerialisedCas.exists()) { FileUtils.copyFileToDirectory(annotationFileAsSerialisedCas, annotationDocumentAsSerialisedCasDir); FileUtils.copyFileToDirectory(annotationFile, annotationDocumentDir); } } } progress = progress+1; } } private boolean isCurationDocumentExists(Project aProject) { boolean curationDocumentExist = false; List<de.tudarmstadt.ukp.clarin.webanno.model.SourceDocument> documents = projectRepository .listSourceDocuments(aProject); for (de.tudarmstadt.ukp.clarin.webanno.model.SourceDocument sourceDocument : documents) { // If the curation document is exist (either finished or in progress if (sourceDocument.getState().equals(SourceDocumentState.CURATION_FINISHED) || sourceDocument.getState().equals(SourceDocumentState.CURATION_IN_PROGRESS)) { curationDocumentExist = true; break; } } return curationDocumentExist; } private void exportProjectSettings(Project aProject, File aProjectSettings, File aExportTempDir) { de.tudarmstadt.ukp.clarin.webanno.export.model.Project project = new de.tudarmstadt.ukp.clarin.webanno.export.model.Project(); project.setDescription(aProject.getDescription()); project.setName(aProject.getName()); project.setReverse(aProject.isReverseDependencyDirection()); project.setMode(aProject.getMode()); List<TagSet> tagsets = new ArrayList<TagSet>(); // add TagSets to the project for (de.tudarmstadt.ukp.clarin.webanno.model.TagSet tagSet : annotationService .listTagSets(aProject)) { TagSet exportedTagSetContent = new TagSet(); exportedTagSetContent.setDescription(tagSet.getDescription()); exportedTagSetContent.setLanguage(tagSet.getLanguage()); exportedTagSetContent.setName(tagSet.getName()); exportedTagSetContent.setType(tagSet.getType().getType()); exportedTagSetContent.setTypeName(tagSet.getType().getName()); exportedTagSetContent.setTypeDescription(tagSet.getType().getDescription()); List<de.tudarmstadt.ukp.clarin.webanno.export.model.Tag> exportedTags = new ArrayList<de.tudarmstadt.ukp.clarin.webanno.export.model.Tag>(); for (Tag tag : annotationService.listTags(tagSet)) { de.tudarmstadt.ukp.clarin.webanno.export.model.Tag exportedTag = new de.tudarmstadt.ukp.clarin.webanno.export.model.Tag(); exportedTag.setDescription(tag.getDescription()); exportedTag.setName(tag.getName()); exportedTags.add(exportedTag); } exportedTagSetContent.setTags(exportedTags); tagsets.add(exportedTagSetContent); } project.setTagSets(tagsets); List<SourceDocument> sourceDocuments = new ArrayList<SourceDocument>(); List<AnnotationDocument> annotationDocuments = new ArrayList<AnnotationDocument>(); // add source documents to a project for (de.tudarmstadt.ukp.clarin.webanno.model.SourceDocument sourceDocument : projectRepository .listSourceDocuments(aProject)) { SourceDocument sourceDocumentToExport = new SourceDocument(); sourceDocumentToExport.setFormat(sourceDocument.getFormat()); sourceDocumentToExport.setName(sourceDocument.getName()); sourceDocumentToExport.setState(sourceDocument.getState()); // add annotation document to Project for (de.tudarmstadt.ukp.clarin.webanno.model.AnnotationDocument annotationDocument : projectRepository .listAnnotationDocuments(sourceDocument)) { AnnotationDocument annotationDocumentToExport = new AnnotationDocument(); annotationDocumentToExport.setName(annotationDocument.getName()); annotationDocumentToExport.setState(annotationDocument.getState()); annotationDocumentToExport.setUser(annotationDocument.getUser()); annotationDocuments.add(annotationDocumentToExport); } sourceDocuments.add(sourceDocumentToExport); } project.setSourceDocuments(sourceDocuments); project.setAnnotationDocuments(annotationDocuments); List<ProjectPermission> projectPermissions = new ArrayList<ProjectPermission>(); // add project permissions to the project for (User user : projectRepository.listProjectUsersWithPermissions(aProject)) { for (de.tudarmstadt.ukp.clarin.webanno.model.ProjectPermission permission : projectRepository .listProjectPermisionLevel(user, aProject)) { ProjectPermission permissionToExport = new ProjectPermission(); permissionToExport.setLevel(permission.getLevel()); permissionToExport.setUser(user.getUsername()); projectPermissions.add(permissionToExport); } } project.setProjectPermissions(projectPermissions); MappingJacksonHttpMessageConverter jsonConverter = new MappingJacksonHttpMessageConverter(); ProjectUtil.setJsonConverter(jsonConverter); try { ProjectUtil.generateJson(project, aProjectSettings); FileUtils.copyFileToDirectory(aProjectSettings, aExportTempDir); } catch (IOException e) { error("File Path not found or No permision to save the file!"); } } }
de.tudarmstadt.ukp.clarin.webanno.webapp/src/main/java/de/tudarmstadt/ukp/clarin/webanno/webapp/page/project/ProjectExportPanel.java
/******************************************************************************* * Copyright 2012 * Ubiquitous Knowledge Processing (UKP) Lab and FG Language Technology * Technische Universität Darmstadt * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package de.tudarmstadt.ukp.clarin.webanno.webapp.page.project; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.commons.io.FileUtils; import org.apache.commons.lang.exception.ExceptionUtils; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.FileEntity; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.util.EntityUtils; import org.apache.uima.UIMAException; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.ajax.markup.html.AjaxLink; import org.apache.wicket.markup.html.form.Button; import org.apache.wicket.markup.html.link.DownloadLink; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.model.LoadableDetachableModel; import org.apache.wicket.model.Model; import org.apache.wicket.model.ResourceModel; import org.apache.wicket.spring.injection.annot.SpringBean; import org.springframework.http.converter.json.MappingJacksonHttpMessageConverter; import org.springframework.security.core.context.SecurityContextHolder; import org.wicketstuff.progressbar.ProgressBar; import org.wicketstuff.progressbar.Progression; import org.wicketstuff.progressbar.ProgressionModel; import de.tudarmstadt.ukp.clarin.webanno.api.AnnotationService; import de.tudarmstadt.ukp.clarin.webanno.api.RepositoryService; import de.tudarmstadt.ukp.clarin.webanno.api.UserDao; import de.tudarmstadt.ukp.clarin.webanno.brat.project.ProjectUtil; import de.tudarmstadt.ukp.clarin.webanno.export.model.AnnotationDocument; import de.tudarmstadt.ukp.clarin.webanno.export.model.ProjectPermission; import de.tudarmstadt.ukp.clarin.webanno.export.model.SourceDocument; import de.tudarmstadt.ukp.clarin.webanno.export.model.TagSet; import de.tudarmstadt.ukp.clarin.webanno.model.AnnotationDocumentState; import de.tudarmstadt.ukp.clarin.webanno.model.Mode; import de.tudarmstadt.ukp.clarin.webanno.model.Project; import de.tudarmstadt.ukp.clarin.webanno.model.SourceDocumentState; import de.tudarmstadt.ukp.clarin.webanno.model.Tag; import de.tudarmstadt.ukp.clarin.webanno.model.User; import de.tudarmstadt.ukp.clarin.webanno.tcf.TcfWriter; import de.tudarmstadt.ukp.clarin.webanno.webapp.dao.DaoUtils; import de.tudarmstadt.ukp.clarin.webanno.webapp.dialog.AJAXDownload; import eu.clarin.weblicht.wlfxb.io.WLFormatException; /** * A Panel used to add Project Guidelines in a selected {@link Project} * * @author Seid Muhie Yimam * */ @SuppressWarnings("deprecation") public class ProjectExportPanel extends Panel { private static final long serialVersionUID = 2116717853865353733L; private static final String META_INF = "/META-INF"; public static final String EXPORTED_PROJECT = "exportedproject"; private static final String CURATION_AS_SERIALISED_CAS = "/curation_ser/"; private static final String CURATION = "/curation/"; private static final String LOG = "/log"; private static final String GUIDELINE = "/guideline"; private static final String ANNOTATION_AS_SERIALISED_CAS = "/annotation_ser/"; private static final String ANNOTATION = "/annotation/"; private static final String CURATION_USER = "CURATION_USER"; private static final String CORRECTION_USER = "CORRECTION_USER"; @SpringBean(name = "annotationService") private AnnotationService annotationService; @SpringBean(name = "documentRepository") private RepositoryService projectRepository; @SpringBean(name = "userRepository") private UserDao userRepository; private int progress = 0; private ProgressBar Progress; private String username; private String fileName; String downloadedFile; String projectName; public ProjectExportPanel(String id, final Model<Project> aProjectModel) { super(id); username = SecurityContextHolder.getContext().getAuthentication().getName(); add(new Button("send", new ResourceModel("label")) { private static final long serialVersionUID = 1L; @Override public boolean isVisible() { return projectRepository.isRemoteProject(aProjectModel.getObject()); } @Override public void onSubmit() { @SuppressWarnings({ "resource" }) HttpClient httpclient = new DefaultHttpClient(); try { HttpPost httppost = new HttpPost( "http://aspra11.informatik.uni-leipzig.de:8080/" + "TEI-Integration/collection/addAnnotations?user=pws&pass=showcase"); File exportTempDir = File.createTempFile("webanno", "export"); exportTempDir.delete(); exportTempDir.mkdirs(); File metaInfDir = new File(exportTempDir + META_INF); FileUtils.forceMkdir(metaInfDir); boolean curationDocumentExist = isCurationDocumentExists(aProjectModel .getObject()); if (!curationDocumentExist) { error("No curation document created yet for this document"); } else { // copy curated documents into the export folder exportCuratedDocuments(aProjectModel.getObject(), exportTempDir); // copy META_INF contents from the project directory to the export folder FileUtils.copyDirectory(new File(projectRepository.getDir(), "/project/" + aProjectModel.getObject().getId() + META_INF), metaInfDir); DaoUtils.zipFolder(exportTempDir, new File(exportTempDir.getAbsolutePath() + ".zip")); FileEntity reqEntity = new FileEntity(new File(exportTempDir .getAbsolutePath() + ".zip"), "application/octet-stream"); httppost.setEntity(reqEntity); HttpResponse response = httpclient.execute(httppost); HttpEntity resEntity = response.getEntity(); info(response.getStatusLine().toString()); EntityUtils.consume(resEntity); } } catch (ClientProtocolException e) { error(ExceptionUtils.getRootCause(e)); } catch (IOException e) { error(e.getMessage()); } catch (UIMAException e) { error(ExceptionUtils.getRootCause(e)); } catch (ClassNotFoundException e) { error(ExceptionUtils.getRootCause(e)); } catch (WLFormatException e) { error(ExceptionUtils.getRootCause(e)); } catch (Exception e) { error(ExceptionUtils.getRootCause(e)); } finally { try { httpclient.getConnectionManager().shutdown(); } catch (Exception e) { error(ExceptionUtils.getRootCause(e)); } } } }).setOutputMarkupId(true); add(new DownloadLink("export", new LoadableDetachableModel<File>() { private static final long serialVersionUID = 840863954694163375L; @Override protected File load() { File exportFile = null; try { File exportTempDir = File.createTempFile("webanno", "export"); exportTempDir.delete(); exportTempDir.mkdirs(); boolean curationDocumentExist = isCurationDocumentExists(aProjectModel .getObject()); if (!curationDocumentExist) { error("No curation document created yet for this document"); } else { exportCuratedDocuments(aProjectModel.getObject(), exportTempDir); DaoUtils.zipFolder(exportTempDir, new File(exportTempDir.getAbsolutePath() + ".zip")); exportFile = new File(exportTempDir.getAbsolutePath() + ".zip"); } } catch (IOException e) { error(e.getMessage()); } catch (Exception e) { error(e.getMessage()); } return exportFile; } }) { private static final long serialVersionUID = 5630612543039605914L; @Override public boolean isVisible() { return isCurationDocumentExists(aProjectModel.getObject()); } }).setOutputMarkupId(true); final AJAXDownload exportProject = new AJAXDownload(); Progress = new ProgressBar("progress", new ProgressionModel() { private static final long serialVersionUID = 1971929040248482474L; @Override protected Progression getProgression() { return new Progression(progress); } }) { private static final long serialVersionUID = -6599620911784164177L; @Override protected void onFinished(AjaxRequestTarget target) { if (!fileName.equals(downloadedFile)) { exportProject.initiate(target, fileName); downloadedFile = fileName; } } }; Progress.add(exportProject); add(Progress); add(new AjaxLink<Void>("exportProject") { private static final long serialVersionUID = -5758406309688341664L; @Override public void onClick(final AjaxRequestTarget target) { Progress.start(target); new Thread() { @Override public void run() { File file = null; try { // file = file = generateZipFile(aProjectModel, target); fileName = file.getAbsolutePath(); projectName = aProjectModel.getObject().getName(); } catch (UIMAException e) { error(ExceptionUtils.getRootCause(e)); } catch (ClassNotFoundException e) { error(e.getMessage()); } catch (IOException e) { error(e.getMessage()); } catch (WLFormatException e) { error(e.getMessage()); } catch (ZippingException e) { error(e.getMessage()); } catch (InterruptedException e) { } } }.start(); } }); } public File generateZipFile(final Model<Project> aProjectModel, AjaxRequestTarget target) throws IOException, UIMAException, ClassNotFoundException, WLFormatException, ZippingException, InterruptedException { File exportTempDir = null; // all metadata and project settings data from the database as JSON file File projectSettings = null; projectSettings = File.createTempFile(EXPORTED_PROJECT, ".json"); // Directory to store source documents and annotation documents exportTempDir = File.createTempFile("webanno-project", "export"); exportTempDir.delete(); exportTempDir.mkdirs(); if (aProjectModel.getObject().getId() == 0) { error("Project not yet created. Please save project details first!"); } else { Thread.sleep(100); exportProjectSettings(aProjectModel.getObject(), projectSettings, exportTempDir); Thread.sleep(100); progress = 20; exportAnnotationDocuments(aProjectModel.getObject(), exportTempDir); progress = progress + 1; exportProjectLog(aProjectModel.getObject(), exportTempDir); Thread.sleep(100); progress = progress + 1; exportGuideLine(aProjectModel.getObject(), exportTempDir); Thread.sleep(100); progress = progress + 1; exportProjectMetaInf(aProjectModel.getObject(), exportTempDir); Thread.sleep(400); progress = 90; exportCuratedDocuments(aProjectModel.getObject(), exportTempDir); try { DaoUtils.zipFolder(exportTempDir, new File(exportTempDir.getAbsolutePath() + ".zip")); } catch (Exception e) { throw new ZippingException("Unable to Zipp the file"); } Thread.sleep(100); progress = 100; } return new File(exportTempDir.getAbsolutePath() + ".zip"); } /** * Copy, if exists, curation documents to a folder that will be exported as Zip file * * @param aProject * The {@link Project} * @param aCurationDocumentExist * Check if Curation document exists * @param aCopyDir * The folder where curated documents are copied to be exported as Zip File */ private void exportCuratedDocuments(Project aProject, File aCopyDir) throws FileNotFoundException, UIMAException, IOException, WLFormatException, ClassNotFoundException { // Get all the source documents from the project List<de.tudarmstadt.ukp.clarin.webanno.model.SourceDocument> documents = projectRepository .listSourceDocuments(aProject); for (de.tudarmstadt.ukp.clarin.webanno.model.SourceDocument sourceDocument : documents) { File curationCasDir = new File(aCopyDir + CURATION_AS_SERIALISED_CAS + sourceDocument.getName()); FileUtils.forceMkdir(curationCasDir); File curationDir = new File(aCopyDir + CURATION + sourceDocument.getName()); FileUtils.forceMkdir(curationDir); // If the curation document is exist (either finished or in progress if (sourceDocument.getState().equals(SourceDocumentState.CURATION_FINISHED) || sourceDocument.getState().equals(SourceDocumentState.CURATION_IN_PROGRESS)) { File CurationFileAsSerialisedCas = projectRepository.exportserializedCas( sourceDocument, CURATION_USER); File curationFile = null; if (CurationFileAsSerialisedCas.exists()) { curationFile = projectRepository.exportAnnotationDocument(sourceDocument, username, TcfWriter.class, sourceDocument.getName(), Mode.CURATION); } // in Case they didn't exist if (CurationFileAsSerialisedCas.exists()) { FileUtils.copyFileToDirectory(curationFile, curationDir); FileUtils.copyFileToDirectory(CurationFileAsSerialisedCas, curationCasDir); } } // If this project is a correction project, add the auto-annotated CAS to same folder as // CURATION if (aProject.getMode().equals(Mode.CORRECTION)) { File CorrectionFileAsSerialisedCas = projectRepository.exportserializedCas( sourceDocument, CORRECTION_USER); File correctionFile = null; if (CorrectionFileAsSerialisedCas.exists()) { correctionFile = projectRepository.exportAnnotationDocument(sourceDocument, username, TcfWriter.class, sourceDocument.getName(), Mode.CORRECTION); } // in Case they didn't exist if (CorrectionFileAsSerialisedCas.exists()) { FileUtils.copyFileToDirectory(correctionFile, curationDir); FileUtils.copyFileToDirectory(CorrectionFileAsSerialisedCas, curationCasDir); } } } } /** * Copy Project logs from the file system of this project to the export folder */ private void exportProjectLog(Project aProject, File aCopyDir) throws IOException { File logDir = new File(aCopyDir + LOG); FileUtils.forceMkdir(logDir); if (projectRepository.exportProjectLog(aProject).exists()) { FileUtils.copyFileToDirectory(projectRepository.exportProjectLog(aProject), logDir); } } /** * Copy Project guidelines from the file system of this project to the export folder */ private void exportGuideLine(Project aProject, File aCopyDir) throws IOException { File guidelineDir = new File(aCopyDir + GUIDELINE); FileUtils.forceMkdir(guidelineDir); File annotationGuidlines = projectRepository.exportGuidelines(aProject); if (annotationGuidlines.exists()) { for (File annotationGuideline : annotationGuidlines.listFiles()) { FileUtils.copyFileToDirectory(annotationGuideline, guidelineDir); } } } /** * Copy Project guidelines from the file system of this project to the export folder */ private void exportProjectMetaInf(Project aProject, File aCopyDir) throws IOException { File metaInfDir = new File(aCopyDir + META_INF); FileUtils.forceMkdir(metaInfDir); File metaInf = projectRepository.exportProjectMetaInf(aProject); if (metaInf.exists()) { FileUtils.copyDirectory(metaInf, metaInfDir); } } /** * Copy annotation document as Serialized CAS from the file system of this project to the export * folder * * @throws ClassNotFoundException * @throws WLFormatException * @throws UIMAException */ private void exportAnnotationDocuments(Project aProject, File aCopyDir) throws IOException, UIMAException, WLFormatException, ClassNotFoundException { List<de.tudarmstadt.ukp.clarin.webanno.model.SourceDocument> documents = projectRepository .listSourceDocuments(aProject); for (de.tudarmstadt.ukp.clarin.webanno.model.SourceDocument sourceDocument : documents) { for (de.tudarmstadt.ukp.clarin.webanno.model.AnnotationDocument annotationDocument : projectRepository .listAnnotationDocuments(sourceDocument)) { // copy annotation document only for ACTIVE users and the state of the annotation // document // is not NEW/IGNOR if (userRepository.get(annotationDocument.getUser()) != null && !annotationDocument.getState().equals(AnnotationDocumentState.NEW) && !annotationDocument.getState().equals(AnnotationDocumentState.IGNORE)) { File annotationDocumentAsSerialisedCasDir = new File(aCopyDir.getAbsolutePath() + ANNOTATION_AS_SERIALISED_CAS + sourceDocument.getName()); File annotationDocumentDir = new File(aCopyDir.getAbsolutePath() + ANNOTATION + sourceDocument.getName()); FileUtils.forceMkdir(annotationDocumentAsSerialisedCasDir); FileUtils.forceMkdir(annotationDocumentDir); File annotationFileAsSerialisedCas = projectRepository.exportserializedCas( sourceDocument, annotationDocument.getUser()); File annotationFile = null; if (annotationFileAsSerialisedCas.exists()) { Class<?> writer = projectRepository.getWritableFormats().get( sourceDocument.getFormat()); annotationFile = projectRepository.exportAnnotationDocument(sourceDocument, annotationDocument.getUser(), writer, sourceDocument.getName(), Mode.ANNOTATION); } if (annotationFileAsSerialisedCas.exists()) { FileUtils.copyFileToDirectory(annotationFileAsSerialisedCas, annotationDocumentAsSerialisedCasDir); FileUtils.copyFileToDirectory(annotationFile, annotationDocumentDir); } } } progress = progress+1; } } private boolean isCurationDocumentExists(Project aProject) { boolean curationDocumentExist = false; List<de.tudarmstadt.ukp.clarin.webanno.model.SourceDocument> documents = projectRepository .listSourceDocuments(aProject); for (de.tudarmstadt.ukp.clarin.webanno.model.SourceDocument sourceDocument : documents) { // If the curation document is exist (either finished or in progress if (sourceDocument.getState().equals(SourceDocumentState.CURATION_FINISHED) || sourceDocument.getState().equals(SourceDocumentState.CURATION_IN_PROGRESS)) { curationDocumentExist = true; break; } } return curationDocumentExist; } private void exportProjectSettings(Project aProject, File aProjectSettings, File aExportTempDir) { de.tudarmstadt.ukp.clarin.webanno.export.model.Project project = new de.tudarmstadt.ukp.clarin.webanno.export.model.Project(); project.setDescription(aProject.getDescription()); project.setName(aProject.getName()); project.setReverse(aProject.isReverseDependencyDirection()); project.setMode(aProject.getMode()); List<TagSet> tagsets = new ArrayList<TagSet>(); // add TagSets to the project for (de.tudarmstadt.ukp.clarin.webanno.model.TagSet tagSet : annotationService .listTagSets(aProject)) { TagSet exportedTagSetContent = new TagSet(); exportedTagSetContent.setDescription(tagSet.getDescription()); exportedTagSetContent.setLanguage(tagSet.getLanguage()); exportedTagSetContent.setName(tagSet.getName()); exportedTagSetContent.setType(tagSet.getType().getType()); exportedTagSetContent.setTypeName(tagSet.getType().getName()); exportedTagSetContent.setTypeDescription(tagSet.getType().getDescription()); List<de.tudarmstadt.ukp.clarin.webanno.export.model.Tag> exportedTags = new ArrayList<de.tudarmstadt.ukp.clarin.webanno.export.model.Tag>(); for (Tag tag : annotationService.listTags(tagSet)) { de.tudarmstadt.ukp.clarin.webanno.export.model.Tag exportedTag = new de.tudarmstadt.ukp.clarin.webanno.export.model.Tag(); exportedTag.setDescription(tag.getDescription()); exportedTag.setName(tag.getName()); exportedTags.add(exportedTag); } exportedTagSetContent.setTags(exportedTags); tagsets.add(exportedTagSetContent); } project.setTagSets(tagsets); List<SourceDocument> sourceDocuments = new ArrayList<SourceDocument>(); List<AnnotationDocument> annotationDocuments = new ArrayList<AnnotationDocument>(); // add source documents to a project for (de.tudarmstadt.ukp.clarin.webanno.model.SourceDocument sourceDocument : projectRepository .listSourceDocuments(aProject)) { SourceDocument sourceDocumentToExport = new SourceDocument(); sourceDocumentToExport.setFormat(sourceDocument.getFormat()); sourceDocumentToExport.setName(sourceDocument.getName()); sourceDocumentToExport.setState(sourceDocument.getState()); // add annotation document to Project for (de.tudarmstadt.ukp.clarin.webanno.model.AnnotationDocument annotationDocument : projectRepository .listAnnotationDocuments(sourceDocument)) { AnnotationDocument annotationDocumentToExport = new AnnotationDocument(); annotationDocumentToExport.setName(annotationDocument.getName()); annotationDocumentToExport.setState(annotationDocument.getState()); annotationDocumentToExport.setUser(annotationDocument.getUser()); annotationDocuments.add(annotationDocumentToExport); } sourceDocuments.add(sourceDocumentToExport); } project.setSourceDocuments(sourceDocuments); project.setAnnotationDocuments(annotationDocuments); List<ProjectPermission> projectPermissions = new ArrayList<ProjectPermission>(); // add project permissions to the project for (User user : projectRepository.listProjectUsersWithPermissions(aProject)) { for (de.tudarmstadt.ukp.clarin.webanno.model.ProjectPermission permission : projectRepository .listProjectPermisionLevel(user, aProject)) { ProjectPermission permissionToExport = new ProjectPermission(); permissionToExport.setLevel(permission.getLevel()); permissionToExport.setUser(user.getUsername()); projectPermissions.add(permissionToExport); } } project.setProjectPermissions(projectPermissions); MappingJacksonHttpMessageConverter jsonConverter = new MappingJacksonHttpMessageConverter(); ProjectUtil.setJsonConverter(jsonConverter); try { ProjectUtil.generateJson(project, aProjectSettings); FileUtils.copyFileToDirectory(aProjectSettings, aExportTempDir); } catch (IOException e) { error("File Path not found or No permision to save the file!"); } } }
#670 - Refactor codes for 1.0.0 release - the code for adding source document to the export zip file was not missed while refactoring
de.tudarmstadt.ukp.clarin.webanno.webapp/src/main/java/de/tudarmstadt/ukp/clarin/webanno/webapp/page/project/ProjectExportPanel.java
#670 - Refactor codes for 1.0.0 release - the code for adding source document to the export zip file was not missed while refactoring
<ide><path>e.tudarmstadt.ukp.clarin.webanno.webapp/src/main/java/de/tudarmstadt/ukp/clarin/webanno/webapp/page/project/ProjectExportPanel.java <ide> <ide> private static final String META_INF = "/META-INF"; <ide> public static final String EXPORTED_PROJECT = "exportedproject"; <add> private static final String SOURCE = "/source"; <ide> private static final String CURATION_AS_SERIALISED_CAS = "/curation_ser/"; <ide> private static final String CURATION = "/curation/"; <ide> private static final String LOG = "/log"; <ide> public void run() <ide> { <ide> File file = null; <del> try { // file = <add> try { <add> Thread.sleep(200); <ide> file = generateZipFile(aProjectModel, target); <ide> fileName = file.getAbsolutePath(); <ide> projectName = aProjectModel.getObject().getName(); <ide> } <ide> else { <ide> <del> Thread.sleep(100); <add> <ide> exportProjectSettings(aProjectModel.getObject(), projectSettings, exportTempDir); <del> Thread.sleep(100); <add> exportSourceDocuments(aProjectModel.getObject(), exportTempDir); <ide> progress = 20; <ide> exportAnnotationDocuments(aProjectModel.getObject(), exportTempDir); <ide> progress = progress + 1; <ide> exportProjectLog(aProjectModel.getObject(), exportTempDir); <del> Thread.sleep(100); <ide> progress = progress + 1; <ide> exportGuideLine(aProjectModel.getObject(), exportTempDir); <del> Thread.sleep(100); <ide> progress = progress + 1; <ide> exportProjectMetaInf(aProjectModel.getObject(), exportTempDir); <del> Thread.sleep(400); <ide> progress = 90; <ide> exportCuratedDocuments(aProjectModel.getObject(), exportTempDir); <ide> try { <ide> catch (Exception e) { <ide> throw new ZippingException("Unable to Zipp the file"); <ide> } <del> Thread.sleep(100); <ide> progress = 100; <ide> } <ide> return new File(exportTempDir.getAbsolutePath() + ".zip"); <ide> } <ide> <add> /** <add> * Copy source documents from the file system of this project to the export folder <add> */ <add> private void exportSourceDocuments(Project aProject, File aCopyDir) <add> throws IOException <add> { <add> File sourceDocumentDir = new File(aCopyDir + SOURCE); <add> FileUtils.forceMkdir(sourceDocumentDir); <add> // Get all the source documents from the project <add> List<de.tudarmstadt.ukp.clarin.webanno.model.SourceDocument> documents = projectRepository <add> .listSourceDocuments(aProject); <add> <add> for (de.tudarmstadt.ukp.clarin.webanno.model.SourceDocument sourceDocument : documents) { <add> FileUtils.copyFileToDirectory( <add> projectRepository.exportSourceDocument(sourceDocument), <add> sourceDocumentDir); <add> } <add> } <ide> /** <ide> * Copy, if exists, curation documents to a folder that will be exported as Zip file <ide> *
JavaScript
mit
3213dc2b772f14802a126c30af5a6a9b7f14a11e
0
solidusjs/solidus
var DEFAULT_ENCODING = 'UTF8'; var fs = require('fs'); var path = require('path'); var vm = require('vm'); var _ = require('underscore'); var async = require('async'); var request = require('request'); var Page = function( page_path, options ){ var page = this; options = options || {}; this.options = options; var server = this.options.server; var router = server.router; this.path = page_path; this.relative_path = path.relative( server.paths.views, page_path ); this.createRoute = function(){ page.is_index = /index\.hbs$/i.test( this.relative_path ); var route = this.relative_path.replace( /\.[a-z0-9]+$/i, '' ).replace( /\\/g, '/' ); var route = '/'+ route; route = route.replace( '/index', '' ); // replace indexes with base routes route = route.replace( /{([a-z_-]*)}/ig, ':$1' ); // replace dynamic bits if( route === '' ) route = '/'; page.route = route; // only overwrite existing routes if we're an index page var existing_route = _( router.routes.get ).find( function( route_data ){ return route_data.path === route; }); if( existing_route ){ console.log('[SOLIDUS]'.red.bold +' Warning. You have a conflicting route at "'+ existing_route.path +'"' ); if( !page.is_index ) return route; // return out if this isn't an index router.routes.get = _( router.routes.get ).without( existing_route ); // ensure the old route is removed if this is an index } router.get( route +'.json', function( req, res ){ page.render( req, res, { json: true }); }); router.get( route, function( req, res ){ page.render( req, res ); }); return route; }; this.parsePage = function( callback ){ fs.readFile( this.path, DEFAULT_ENCODING, function( err, data ){ var params = {}; var params_exec = /^{{!\s([\S\s]+?)\s}}/.exec( data ); try { params = ( params_exec )? JSON.parse( params_exec[1] ): {}; } catch( err ){ if( server.options.log_level >= 1 ) console.log('[SOLIDUS]'.red.bold +' Error preprocessing "'+ page.path +'"', err ); } finally { page.params = params; _( page ).extend({ title: params.title, description: params.description, permanent_name: params.permanent_name }); if( callback ) callback( params ); } }); }; this.fetchResources = function( resources, params, callback ){ var resources_data = {}; if( resources ){ var resources_array = _( resources ).pairs(); async.each( resources_array, function( resource, cb ){ var resource_url = resource[1]; for( var param in params ){ var value = params[param]; resource_url = resource_url.replace( '{'+ param +'}', value ); } request.get({ url: resource_url, json: true }, function( err, response, data ){ if( err ) return cb( err ); resources_data[resource[0]] = data; cb(); }); }, function( err ){ if( callback ) callback( resources_data ); }); } else { if( callback ) callback( resources_data ); } }; this.preprocessContext = function( context, preprocessor, callback ){ if( preprocessor ){ var preprocessor_path = path.join( server.paths.preprocessors, preprocessor ); fs.readFile( preprocessor_path, DEFAULT_ENCODING, function( err, data ){ try { vm.runInNewContext( data, { data: context }, preprocessor_path ); } catch( err ){ if( server.options.log_level >= 1 ) console.error( '[SOLIDUS]'.red.bold +' Error in preprocessor: '+ preprocessor_path, err ); } finally { if( callback ) callback( context ); } }); } else { callback( context ); } }; this.render = function( req, res, options ){ options = options || {}; var context = { page: { path: this.path, title: this.title, description: this.description, permanent_name: this.permanent_name }, parameters: {}, query: {}, resources: {}, assets: { scripts: '<script src="/compiled/scripts.js"></script>', styles: '<link rel="stylesheet" href="/compiled/styles.css" />' } }; // req.params is actually an array with crap stuck to it // so we have to parse that stuff out into a real object var parameters = {}; for( var key in req.params ) parameters[key] = req.params[key]; context.parameters = parameters; this.fetchResources( page.params.resources, req.params, function( resources ){ context.resources = resources; page.preprocessContext( context, page.params.preprocessor, function( context ){ if( options.json ) return res.json( context ); res.expose( context, 'solidus.context', 'context' ); var dir_to = path.dirname( page.relative_path.replace( /\.hbs$/i, '' ) ); if( Page.layouts[dir_to] ) context.layout = path.join( dir_to, 'layout' ); res.render( page.path, context ); }); }); }; this.destroy = function(){ router.routes.get = _( router.routes.get ).reject( function( current_route ){ return current_route.path === page.route; }); }; this.createRoute(); this.parsePage(); }; Page.layouts = {}; module.exports = Page;
lib/page.js
var DEFAULT_ENCODING = 'UTF8'; var fs = require('fs'); var path = require('path'); var vm = require('vm'); var _ = require('underscore'); var async = require('async'); var request = require('request'); var Page = function( page_path, options ){ var page = this; options = options || {}; this.options = options; var server = this.options.server; var router = server.router; this.path = page_path; this.relative_path = path.relative( server.paths.views, page_path ); this.createRoute = function(){ page.is_index = /index\.hbs$/i.test( this.relative_path ); var route = this.relative_path.replace( /\.[a-z0-9]+$/i, '' ).replace( /\\/g, '/' ); var route = '/'+ route; route = route.replace( '/index', '' ); // replace indexes with base routes route = route.replace( /{([a-z_-]*)}/ig, ':$1' ); // replace dynamic bits if( route === '' ) route = '/'; page.route = route; // only overwrite existing routes if we're an index page var existing_route = _( router.routes.get ).find( function( route_data ){ return route_data.path === route; }); if( existing_route ){ console.log('[SOLIDUS]'.red.bold +' Warning. You have a conflicting route at "'+ existing_route.path +'"' ); if( !page.is_index ) return route; } router.routes.get = _( router.routes.get ).reject( function( current_route ){ return current_route.path === route; }); router.get( route +'.json', function( req, res ){ page.render( req, res, { json: true }); }); router.get( route, function( req, res ){ page.render( req, res ); }); return route; }; this.parsePage = function( callback ){ fs.readFile( this.path, DEFAULT_ENCODING, function( err, data ){ var params = {}; var params_exec = /^{{!\s([\S\s]+?)\s}}/.exec( data ); try { params = ( params_exec )? JSON.parse( params_exec[1] ): {}; } catch( err ){ if( server.options.log_level >= 1 ) console.log('[SOLIDUS]'.red.bold +' Error preprocessing "'+ page.path +'"', err ); } finally { page.params = params; _( page ).extend({ title: params.title, description: params.description, permanent_name: params.permanent_name }); if( callback ) callback( params ); } }); }; this.fetchResources = function( resources, params, callback ){ var resources_data = {}; if( resources ){ var resources_array = _( resources ).pairs(); async.each( resources_array, function( resource, cb ){ var resource_url = resource[1]; for( var param in params ){ var value = params[param]; resource_url = resource_url.replace( '{'+ param +'}', value ); } request.get({ url: resource_url, json: true }, function( err, response, data ){ if( err ) return cb( err ); resources_data[resource[0]] = data; cb(); }); }, function( err ){ if( callback ) callback( resources_data ); }); } else { if( callback ) callback( resources_data ); } }; this.preprocessContext = function( context, preprocessor, callback ){ if( preprocessor ){ var preprocessor_path = path.join( server.paths.preprocessors, preprocessor ); fs.readFile( preprocessor_path, DEFAULT_ENCODING, function( err, data ){ try { vm.runInNewContext( data, { data: context }, preprocessor_path ); } catch( err ){ if( server.options.log_level >= 1 ) console.error( '[SOLIDUS]'.red.bold +' Error in preprocessor: '+ preprocessor_path, err ); } finally { if( callback ) callback( context ); } }); } else { callback( context ); } }; this.render = function( req, res, options ){ options = options || {}; var context = { page: { path: this.path, title: this.title, description: this.description, permanent_name: this.permanent_name }, parameters: {}, query: {}, resources: {}, assets: { scripts: '<script src="/compiled/scripts.js"></script>', styles: '<link rel="stylesheet" href="/compiled/styles.css" />' } }; // req.params is actually an array with crap stuck to it // so we have to parse that stuff out into a real object var parameters = {}; for( var key in req.params ) parameters[key] = req.params[key]; context.parameters = parameters; this.fetchResources( page.params.resources, req.params, function( resources ){ context.resources = resources; page.preprocessContext( context, page.params.preprocessor, function( context ){ if( options.json ) return res.json( context ); res.expose( context, 'solidus.context', 'context' ); var dir_to = path.dirname( page.relative_path.replace( /\.hbs$/i, '' ) ); if( Page.layouts[dir_to] ) context.layout = path.join( dir_to, 'layout' ); res.render( page.path, context ); }); }); }; this.destroy = function(){ router.routes.get = _( router.routes.get ).reject( function( current_route ){ return current_route.path === page.route; }); }; this.createRoute(); this.parsePage(); }; Page.layouts = {}; module.exports = Page;
clean up existing route logic
lib/page.js
clean up existing route logic
<ide><path>ib/page.js <ide> }); <ide> if( existing_route ){ <ide> console.log('[SOLIDUS]'.red.bold +' Warning. You have a conflicting route at "'+ existing_route.path +'"' ); <del> if( !page.is_index ) return route; <add> if( !page.is_index ) return route; // return out if this isn't an index <add> router.routes.get = _( router.routes.get ).without( existing_route ); // ensure the old route is removed if this is an index <ide> } <del> <del> router.routes.get = _( router.routes.get ).reject( function( current_route ){ <del> return current_route.path === route; <del> }); <ide> <ide> router.get( route +'.json', function( req, res ){ <ide> page.render( req, res, {
Java
apache-2.0
1ddddd932bcc6266e0009af869b1a3a76da702c7
0
jOOQ/jOOL
/** * Copyright (c) 2014-2016, Data Geekery GmbH, [email protected] * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jooq.lambda; import static java.util.Comparator.comparing; import static java.util.Spliterator.ORDERED; import static java.util.Spliterators.spliteratorUnknownSize; import static org.jooq.lambda.SeqUtils.sneakyThrow; import static org.jooq.lambda.tuple.Tuple.tuple; import java.io.IOException; import java.io.InputStream; import java.io.PrintStream; import java.io.PrintWriter; import java.io.Reader; import java.io.UncheckedIOException; import java.time.Duration; import java.time.Instant; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Queue; import java.util.Random; import java.util.Set; import java.util.Spliterator; import java.util.concurrent.ConcurrentHashMap; import java.util.function.BiFunction; import java.util.function.BiPredicate; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; import java.util.function.Supplier; import java.util.function.ToDoubleFunction; import java.util.function.ToIntFunction; import java.util.function.ToLongFunction; import java.util.function.UnaryOperator; import java.util.stream.Collector; import java.util.stream.Collectors; import java.util.stream.DoubleStream; import java.util.stream.IntStream; import java.util.stream.LongStream; import java.util.stream.Stream; import java.util.stream.StreamSupport; import javax.annotation.Generated; import org.jooq.lambda.exception.TooManyElementsException; import org.jooq.lambda.function.Function10; import org.jooq.lambda.function.Function11; import org.jooq.lambda.function.Function12; import org.jooq.lambda.function.Function13; import org.jooq.lambda.function.Function14; import org.jooq.lambda.function.Function15; import org.jooq.lambda.function.Function16; import org.jooq.lambda.function.Function3; import org.jooq.lambda.function.Function4; import org.jooq.lambda.function.Function5; import org.jooq.lambda.function.Function6; import org.jooq.lambda.function.Function7; import org.jooq.lambda.function.Function8; import org.jooq.lambda.function.Function9; import org.jooq.lambda.tuple.Tuple; import org.jooq.lambda.tuple.Tuple1; import org.jooq.lambda.tuple.Tuple10; import org.jooq.lambda.tuple.Tuple11; import org.jooq.lambda.tuple.Tuple12; import org.jooq.lambda.tuple.Tuple13; import org.jooq.lambda.tuple.Tuple14; import org.jooq.lambda.tuple.Tuple15; import org.jooq.lambda.tuple.Tuple16; import org.jooq.lambda.tuple.Tuple2; import org.jooq.lambda.tuple.Tuple3; import org.jooq.lambda.tuple.Tuple4; import org.jooq.lambda.tuple.Tuple5; import org.jooq.lambda.tuple.Tuple6; import org.jooq.lambda.tuple.Tuple7; import org.jooq.lambda.tuple.Tuple8; import org.jooq.lambda.tuple.Tuple9; /** * A sequential, ordered {@link Stream} that adds all sorts of useful methods that work only because * it is sequential and ordered. * * @author Lukas Eder * @author Roman Tkalenko */ public interface Seq<T> extends Stream<T>, Iterable<T>, Collectable<T> { /** * The underlying {@link Stream} implementation. */ Stream<T> stream(); /** * Transform this stream into a new type. * <p> * If certain operations are re-applied frequently to streams, this * transform operation is very useful for such operations to be applied in a * fluent style: * <p> * <code><pre> * Function&ltSeq&lt;Integer>, Seq&lt;String>> toString = s -> s.map(Objects::toString); * Seq&lt;String> strings = * Seq.of(1, 2, 3) * .transform(toString); * </pre></code> */ default <U> U transform(Function<? super Seq<T>, ? extends U> transformer) { return transformer.apply(this); } /** * Cross join 2 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ default <U> Seq<Tuple2<T, U>> crossJoin(Stream<? extends U> other) { return Seq.crossJoin(this, other); } /** * Cross join 2 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ default <U> Seq<Tuple2<T, U>> crossJoin(Iterable<? extends U> other) { return Seq.crossJoin(this, other); } /** * Cross join 2 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ default <U> Seq<Tuple2<T, U>> crossJoin(Seq<? extends U> other) { return Seq.crossJoin(this, other); } /** * Inner join 2 streams into one. * <p> * <code><pre> * // (tuple(1, 1), tuple(2, 2)) * Seq.of(1, 2, 3).innerJoin(Seq.of(1, 2), t -> Objects.equals(t.v1, t.v2)) * </pre></code> */ default <U> Seq<Tuple2<T, U>> innerJoin(Stream<? extends U> other, BiPredicate<? super T, ? super U> predicate) { return innerJoin(seq(other), predicate); } /** * Inner join 2 streams into one. * <p> * <code><pre> * // (tuple(1, 1), tuple(2, 2)) * Seq.of(1, 2, 3).innerJoin(Seq.of(1, 2), t -> Objects.equals(t.v1, t.v2)) * </pre></code> */ default <U> Seq<Tuple2<T, U>> innerJoin(Iterable<? extends U> other, BiPredicate<? super T, ? super U> predicate) { return innerJoin(seq(other), predicate); } /** * Inner join 2 streams into one. * <p> * <code><pre> * // (tuple(1, 1), tuple(2, 2)) * Seq.of(1, 2, 3).innerJoin(Seq.of(1, 2), t -> Objects.equals(t.v1, t.v2)) * </pre></code> */ default <U> Seq<Tuple2<T, U>> innerJoin(Seq<? extends U> other, BiPredicate<? super T, ? super U> predicate) { // This algorithm isn't lazy and has substantial complexity for large argument streams! List<? extends U> list = other.toList(); return flatMap(t -> seq(list) .filter(u -> predicate.test(t, u)) .map(u -> tuple(t, u))) .onClose(other::close); } /** * Left outer join 2 streams into one. * <p> * <code><pre> * // (tuple(1, 1), tuple(2, 2), tuple(3, null)) * Seq.of(1, 2, 3).leftOuterJoin(Seq.of(1, 2), t -> Objects.equals(t.v1, t.v2)) * </pre></code> */ default <U> Seq<Tuple2<T, U>> leftOuterJoin(Stream<? extends U> other, BiPredicate<? super T, ? super U> predicate) { return leftOuterJoin(seq(other), predicate); } /** * Left outer join 2 streams into one. * <p> * <code><pre> * // (tuple(1, 1), tuple(2, 2), tuple(3, null)) * Seq.of(1, 2, 3).leftOuterJoin(Seq.of(1, 2), t -> Objects.equals(t.v1, t.v2)) * </pre></code> */ default <U> Seq<Tuple2<T, U>> leftOuterJoin(Iterable<? extends U> other, BiPredicate<? super T, ? super U> predicate) { return leftOuterJoin(seq(other), predicate); } /** * Left outer join 2 streams into one. * <p> * <code><pre> * // (tuple(1, 1), tuple(2, 2), tuple(3, null)) * Seq.of(1, 2, 3).leftOuterJoin(Seq.of(1, 2), t -> Objects.equals(t.v1, t.v2)) * </pre></code> */ default <U> Seq<Tuple2<T, U>> leftOuterJoin(Seq<? extends U> other, BiPredicate<? super T, ? super U> predicate) { // This algorithm isn't lazy and has substantial complexity for large argument streams! List<? extends U> list = other.toList(); return flatMap(t -> seq(list) .filter(u -> predicate.test(t, u)) .onEmpty(null) .map(u -> tuple(t, u))) .onClose(other::close); } /** * Right outer join 2 streams into one. * <p> * <code><pre> * // (tuple(1, 1), tuple(2, 2), tuple(null, 3)) * Seq.of(1, 2).rightOuterJoin(Seq.of(1, 2, 3), t -> Objects.equals(t.v1, t.v2)) * </pre></code> */ default <U> Seq<Tuple2<T, U>> rightOuterJoin(Stream<? extends U> other, BiPredicate<? super T, ? super U> predicate) { return rightOuterJoin(seq(other), predicate); } /** * Right outer join 2 streams into one. * <p> * <code><pre> * // (tuple(1, 1), tuple(2, 2), tuple(null, 3)) * Seq.of(1, 2).rightOuterJoin(Seq.of(1, 2, 3), t -> Objects.equals(t.v1, t.v2)) * </pre></code> */ default <U> Seq<Tuple2<T, U>> rightOuterJoin(Iterable<? extends U> other, BiPredicate<? super T, ? super U> predicate) { return rightOuterJoin(seq(other), predicate); } /** * Right outer join 2 streams into one. * <p> * <code><pre> * // (tuple(1, 1), tuple(2, 2), tuple(null, 3)) * Seq.of(1, 2).rightOuterJoin(Seq.of(1, 2, 3), t -> Objects.equals(t.v1, t.v2)) * </pre></code> */ default <U> Seq<Tuple2<T, U>> rightOuterJoin(Seq<? extends U> other, BiPredicate<? super T, ? super U> predicate) { return other .leftOuterJoin(this, (u, t) -> predicate.test(t, u)) .map(t -> tuple(t.v2, t.v1)) .onClose(other::close); } /** * Produce this stream, or an alternative stream from the * <code>value</code>, in case this stream is empty. */ default Seq<T> onEmpty(T value) { return onEmptyGet(() -> value); } /** * Produce this stream, or an alternative stream from the * <code>supplier</code>, in case this stream is empty. */ default Seq<T> onEmptyGet(Supplier<? extends T> supplier) { boolean[] first = { true }; return SeqUtils.transform(this, (delegate, action) -> { if (first[0]) { first[0] = false; if (!delegate.tryAdvance(action)) action.accept(supplier.get()); return true; } else { return delegate.tryAdvance(action); } }); } /** * Produce this stream, or an alternative stream from the * <code>supplier</code>, in case this stream is empty. */ default <X extends Throwable> Seq<T> onEmptyThrow(Supplier<? extends X> supplier) { boolean[] first = { true }; return SeqUtils.transform(this, (delegate, action) -> { if (first[0]) { first[0] = false; if (!delegate.tryAdvance(action)) sneakyThrow(supplier.get()); return true; } else { return delegate.tryAdvance(action); } }); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(1, 2, 3).concat(Seq.of(4, 5, 6)) * </pre></code> * * @see #concat(Stream[]) */ default Seq<T> concat(Stream<? extends T> other) { return concat(seq(other)); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(1, 2, 3).concat(Seq.of(4, 5, 6)) * </pre></code> * * @see #concat(Stream[]) */ default Seq<T> concat(Iterable<? extends T> other) { return concat(seq(other)); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(1, 2, 3).concat(Seq.of(4, 5, 6)) * </pre></code> * * @see #concat(Stream[]) */ @SuppressWarnings({ "unchecked" }) default Seq<T> concat(Seq<? extends T> other) { return Seq.concat(new Seq[]{this, other}); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4) * Seq.of(1, 2, 3).concat(4) * </pre></code> * * @see #concat(Stream[]) */ default Seq<T> concat(T other) { return concat(Seq.of(other)); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(1, 2, 3).concat(4, 5, 6) * </pre></code> * * @see #concat(Stream[]) */ @SuppressWarnings({ "unchecked" }) default Seq<T> concat(T... other) { return concat(Seq.of(other)); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(1, 2, 3).append(Seq.of(4, 5, 6)) * </pre></code> * * @see #concat(Stream[]) */ default Seq<T> append(Stream<? extends T> other) { return concat(other); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(1, 2, 3).append(Seq.of(4, 5, 6)) * </pre></code> * * @see #concat(Stream[]) */ default Seq<T> append(Iterable<? extends T> other) { return concat(other); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(1, 2, 3).append(Seq.of(4, 5, 6)) * </pre></code> * * @see #concat(Stream[]) */ @SuppressWarnings({ "unchecked" }) default Seq<T> append(Seq<? extends T> other) { return concat(other); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4) * Seq.of(1, 2, 3).append(4) * </pre></code> * * @see #concat(Stream[]) */ default Seq<T> append(T other) { return concat(other); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(1, 2, 3).append(4, 5, 6) * </pre></code> * * @see #concat(Stream[]) */ @SuppressWarnings({ "unchecked" }) default Seq<T> append(T... other) { return concat(other); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(4, 5, 6).prepend(Seq.of(1, 2, 3)) * </pre></code> * * @see #concat(Stream[]) */ default Seq<T> prepend(Stream<? extends T> other) { return seq(other).concat(this); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(4, 5, 6).prepend(Seq.of(1, 2, 3)) * </pre></code> * * @see #concat(Stream[]) */ default Seq<T> prepend(Iterable<? extends T> other) { return seq(other).concat(this); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(4, 5, 6).prepend(Seq.of(1, 2, 3)) * </pre></code> * * @see #concat(Stream[]) */ @SuppressWarnings({ "unchecked" }) default Seq<T> prepend(Seq<? extends T> other) { return concat(other, this); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4) * Seq.of(2, 3, 4).prepend(1) * </pre></code> * * @see #concat(Stream[]) */ default Seq<T> prepend(T other) { return Seq.of(other).concat(this); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(4, 5, 6).prepend(Seq.of(1, 2, 3)) * </pre></code> * * @see #concat(Stream[]) */ @SuppressWarnings({ "unchecked" }) default Seq<T> prepend(T... other) { return Seq.of(other).concat(this); } /** * Check whether this stream contains a given value. * <p> * <code><pre> * // true * Seq.of(1, 2, 3).contains(2) * </pre><code> */ default boolean contains(T other) { return anyMatch(Predicate.isEqual(other)); } /** * Check whether this stream contains all given values. * <p> * <code><pre> * // true * Seq.of(1, 2, 3).containsAll(2, 3) * </pre><code> */ default boolean containsAll(T... other) { return containsAll(of(other)); } /** * Check whether this stream contains all given values. * <p> * <code><pre> * // true * Seq.of(1, 2, 3).containsAll(2, 3) * </pre><code> */ default boolean containsAll(Stream<? extends T> other) { return containsAll(seq(other)); } /** * Check whether this stream contains all given values. * <p> * <code><pre> * // true * Seq.of(1, 2, 3).containsAll(2, 3) * </pre><code> */ default boolean containsAll(Iterable<? extends T> other) { return containsAll(seq(other)); } /** * Check whether this stream contains all given values. * <p> * <code><pre> * // true * Seq.of(1, 2, 3).containsAll(2, 3) * </pre><code> */ default boolean containsAll(Seq<? extends T> other) { Set<? extends T> set = other.toSet(HashSet::new); return set.isEmpty() ? true : filter(t -> set.remove(t)).anyMatch(t -> set.isEmpty()); } /** * Check whether this stream contains any of the given values. * <p> * <code><pre> * // true * Seq.of(1, 2, 3).containsAny(2, 4) * </pre><code> */ default boolean containsAny(T... other) { return containsAny(of(other)); } /** * Check whether this stream contains any of the given values. * <p> * <code><pre> * // true * Seq.of(1, 2, 3).containsAny(2, 4) * </pre><code> */ default boolean containsAny(Stream<? extends T> other) { return containsAny(seq(other)); } /** * Check whether this stream contains any of the given values. * <p> * <code><pre> * // true * Seq.of(1, 2, 3).containsAny(2, 4) * </pre><code> */ default boolean containsAny(Iterable<? extends T> other) { return containsAny(seq(other)); } /** * Check whether this stream contains any of the given values. * <p> * <code><pre> * // true * Seq.of(1, 2, 3).containsAny(2, 4) * </pre><code> */ default boolean containsAny(Seq<? extends T> other) { Set<? extends T> set = other.toSet(HashSet::new); return set.isEmpty() ? false : anyMatch(set::contains); } /** * Get a single element from the stream at a given index. */ default Optional<T> get(long index) { if (index < 0L) return Optional.empty(); else if (index == 0L) return findFirst(); else return skip(index).findFirst(); } /** * Get the single element from the stream, or throw an exception if the * stream holds more than one element. */ default Optional<T> findSingle() throws TooManyElementsException { Iterator<T> it = iterator(); if (!it.hasNext()) return Optional.empty(); T result = it.next(); if (!it.hasNext()) return Optional.of(result); throw new TooManyElementsException("Stream contained more than one element."); } /** * Get a single element from the stream given a predicate. */ default Optional<T> findFirst(Predicate<? super T> predicate) { return filter(predicate).findFirst(); } /** * Return a new stream where the first occurrence of the argument is removed. * <p> * <code><pre> * // 1, 3, 2, 4 * Seq.of(1, 2, 3, 2, 4).remove(2) * </pre><code> */ default Seq<T> remove(T other) { boolean[] removed = new boolean[1]; return filter(t -> removed[0] || !(removed[0] = Objects.equals(t, other))); } /** * Return a new stream where all occurrences of the arguments are removed. * <p> * <code><pre> * // 1, 4 * Seq.of(1, 2, 3, 2, 4).removeAll(2, 3) * </pre><code> */ default Seq<T> removeAll(T... other) { return removeAll(of(other)); } /** * Return a new stream where all occurrences of the arguments are removed. * <p> * <code><pre> * // 1, 4 * Seq.of(1, 2, 3, 2, 4).removeAll(2, 3) * </pre><code> */ default Seq<T> removeAll(Stream<? extends T> other) { return removeAll(seq(other)); } /** * Return a new stream where all occurrences of the arguments are removed. * <p> * <code><pre> * // 1, 4 * Seq.of(1, 2, 3, 2, 4).removeAll(2, 3) * </pre><code> */ default Seq<T> removeAll(Iterable<? extends T> other) { return removeAll(seq(other)); } /** * Return a new stream where all occurrences of the arguments are removed. * <p> * <code><pre> * // 1, 4 * Seq.of(1, 2, 3, 2, 4).removeAll(2, 3) * </pre><code> */ default Seq<T> removeAll(Seq<? extends T> other) { Set<? extends T> set = other.toSet(HashSet::new); return set.isEmpty() ? this : filter(t -> !set.contains(t)).onClose(other::close); } /** * Return a new stream where only occurrences of the arguments are retained. * <p> * <code><pre> * // 2, 3, 2 * Seq.of(1, 2, 3, 2, 4).retainAll(2, 3) * </pre><code> */ default Seq<T> retainAll(T... other) { return retainAll(of(other)); } /** * Return a new stream where only occurrences of the arguments are retained. * <p> * <code><pre> * // 2, 3, 2 * Seq.of(1, 2, 3, 2, 4).retainAll(2, 3) * </pre><code> */ default Seq<T> retainAll(Stream<? extends T> other) { return retainAll(seq(other)); } /** * Return a new stream where only occurrences of the arguments are retained. * <p> * <code><pre> * // 2, 3, 2 * Seq.of(1, 2, 3, 2, 4).retainAll(2, 3) * </pre><code> */ default Seq<T> retainAll(Iterable<? extends T> other) { return retainAll(seq(other)); } /** * Return a new stream where only occurrences of the arguments are retained. * <p> * <code><pre> * // 2, 3, 2 * Seq.of(1, 2, 3, 2, 4).retainAll(2, 3) * </pre><code> */ default Seq<T> retainAll(Seq<? extends T> other) { Set<? extends T> set = other.toSet(HashSet::new); return set.isEmpty() ? empty() : filter(t -> set.contains(t)).onClose(other::close); } /** * Repeat a stream infinitely. * <p> * <code><pre> * // (1, 2, 3, 1, 2, 3, ...) * Seq.of(1, 2, 3).cycle(); * </pre></code> * * @see #cycle(Stream) */ default Seq<T> cycle() { return cycle(this); } /** * Repeat a stream a certain amount of times. * <p> * <code><pre> * // () * Seq.of(1, 2, 3).cycle(0); * * // (1, 2, 3) * Seq.of(1, 2, 3).cycle(1); * * // (1, 2, 3, 1, 2, 3, 1, 2, 3) * Seq.of(1, 2, 3).cycle(3); * </pre></code> * * @see #cycle(Stream, long) */ default Seq<T> cycle(long times) { return cycle(this, times); } /** * Get a stream of distinct keys. * <p> * <code><pre> * // (1, 2, 3) * Seq.of(1, 1, 2, -2, 3).distinct(Math::abs) * </pre></code> */ default <U> Seq<T> distinct(Function<? super T, ? extends U> keyExtractor) { final Map<U, String> seen = new ConcurrentHashMap<>(); return filter(t -> seen.put(keyExtractor.apply(t), "") == null); } /** * Zip two streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> * * @see #zip(Stream, Stream) */ default <U> Seq<Tuple2<T, U>> zip(Stream<? extends U> other) { return zip(seq(other)); } /** * Zip two streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> * * @see #zip(Stream, Stream) */ default <U> Seq<Tuple2<T, U>> zip(Iterable<? extends U> other) { return zip(seq(other)); } /** * Zip two streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> * * @see #zip(Stream, Stream) */ default <U> Seq<Tuple2<T, U>> zip(Seq<? extends U> other) { return zip(this, other); } /** * Zip two streams into one using a {@link BiFunction} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> * * @see #zip(Seq, BiFunction) */ default <U, R> Seq<R> zip(Stream<? extends U> other, BiFunction<? super T, ? super U, ? extends R> zipper) { return zip(seq(other), zipper); } /** * Zip two streams into one using a {@link BiFunction} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> * * @see #zip(Seq, BiFunction) */ default <U, R> Seq<R> zip(Iterable<? extends U> other, BiFunction<? super T, ? super U, ? extends R> zipper) { return zip(seq(other), zipper); } /** * Zip two streams into one using a {@link BiFunction} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> * * @see #zip(Seq, BiFunction) */ default <U, R> Seq<R> zip(Seq<? extends U> other, BiFunction<? super T, ? super U, ? extends R> zipper) { return zip(this, other, zipper); } /** * Zip a Stream with a corresponding Stream of indexes. * <p> * <code><pre> * // (tuple("a", 0), tuple("b", 1), tuple("c", 2)) * Seq.of("a", "b", "c").zipWithIndex() * </pre></code> * * @see #zipWithIndex(Stream) */ default Seq<Tuple2<T, Long>> zipWithIndex() { return zipWithIndex(this); } /** * Fold a Stream to the left. * <p> * <code><pre> * // "abc" * Seq.of("a", "b", "c").foldLeft("", (u, t) -> u + t) * </pre></code> */ default <U> U foldLeft(U seed, BiFunction<? super U, ? super T, ? extends U> function) { return foldLeft(this, seed, function); } /** * Fold a Stream to the right. * <p> * <code><pre> * // "cba" * Seq.of("a", "b", "c").foldRight("", (t, u) -> u + t) * </pre></code> */ default <U> U foldRight(U seed, BiFunction<? super T, ? super U, ? extends U> function) { return foldRight(this, seed, function); } /** * Scan a stream to the left. * <p> * <code><pre> * // ("", "a", "ab", "abc") * Seq.of("a", "b", "c").scanLeft("", (u, t) -> u + t) * </pre></code> */ default <U> Seq<U> scanLeft(U seed, BiFunction<? super U, ? super T, ? extends U> function) { return scanLeft(this, seed, function); } /** * Scan a stream to the right. * <p> * <code><pre> * // ("", "c", "cb", "cba") * Seq.of("a", "b", "c").scanRight("", (t, u) -> u + t) * </pre></code> */ default <U> Seq<U> scanRight(U seed, BiFunction<? super T, ? super U, ? extends U> function) { return scanRight(this, seed, function); } /** * Reverse a stream. * <p> * <code><pre> * // (3, 2, 1) * Seq.of(1, 2, 3).reverse() * </pre></code> */ default Seq<T> reverse() { return reverse(this); } /** * Shuffle a stream * <p> * <code><pre> * // e.g. (2, 3, 1) * Seq.of(1, 2, 3).shuffle() * </pre></code> */ default Seq<T> shuffle() { return shuffle(this); } /** * Shuffle a stream using specified source of randomness * <p> * <code><pre> * // e.g. (2, 3, 1) * Seq.of(1, 2, 3).shuffle(new Random()) * </pre></code> */ default Seq<T> shuffle(Random random) { return shuffle(this, random); } /** * Returns a stream with all elements skipped for which a predicate evaluates to <code>true</code>. * <p> * <code><pre> * // (3, 4, 5) * Seq.of(1, 2, 3, 4, 5).skipWhile(i -> i &lt; 3) * </pre></code> * * @see #skipWhile(Stream, Predicate) */ default Seq<T> skipWhile(Predicate<? super T> predicate) { return skipWhile(this, predicate); } /** * Returns a stream with all elements skipped for which a predicate evaluates to <code>true</code> * plus the first element for which it evaluates to false. * <p> * <code><pre> * // (4, 5) * Seq.of(1, 2, 3, 4, 5).skipWhileClosed(i -> i &lt; 3) * </pre></code> * * @see #skipWhileClosed(Stream, Predicate) */ default Seq<T> skipWhileClosed(Predicate<? super T> predicate) { return skipWhileClosed(this, predicate); } /** * Returns a stream with all elements skipped for which a predicate evaluates to <code>false</code>. * <p> * <code><pre> * // (3, 4, 5) * Seq.of(1, 2, 3, 4, 5).skipUntil(i -> i == 3) * </pre></code> * * @see #skipUntil(Stream, Predicate) */ default Seq<T> skipUntil(Predicate<? super T> predicate) { return skipUntil(this, predicate); } /** * Returns a stream with all elements skipped for which a predicate evaluates to <code>false</code> * plus the first element for which it evaluates to <code>true</code>. * <p> * <code><pre> * // (4, 5) * Seq.of(1, 2, 3, 4, 5).skipUntilClosed(i -> i == 3) * </pre></code> * * @see #skipUntilClosed(Stream, Predicate) */ default Seq<T> skipUntilClosed(Predicate<? super T> predicate) { return skipUntilClosed(this, predicate); } /** * Returns a stream limited to all elements for which a predicate evaluates to <code>true</code>. * <p> * <code><pre> * // (1, 2) * Seq.of(1, 2, 3, 4, 5).limitWhile(i -> i &lt; 3) * </pre></code> * * @see #limitWhile(Stream, Predicate) */ default Seq<T> limitWhile(Predicate<? super T> predicate) { return limitWhile(this, predicate); } /** * Returns a stream limited to all elements for which a predicate evaluates to <code>true</code> * plus the first element for which it evaluates to <code>false</code>. * <p> * <code><pre> * // (1, 2, 3) * Seq.of(1, 2, 3, 4, 5).limitWhileClosed(i -> i &lt; 3) * </pre></code> * * @see #limitWhileClosed(Stream, Predicate) */ default Seq<T> limitWhileClosed(Predicate<? super T> predicate) { return limitWhileClosed(this, predicate); } /** * Returns a stream limited to all elements for which a predicate evaluates to <code>false</code>. * <p> * <code><pre> * // (1, 2) * Seq.of(1, 2, 3, 4, 5).limitUntil(i -> i == 3) * </pre></code> * * @see #limitUntil(Stream, Predicate) */ default Seq<T> limitUntil(Predicate<? super T> predicate) { return limitUntil(this, predicate); } /** * Returns a stream limited to all elements for which a predicate evaluates to <code>false</code> * plus the first element for which it evaluates to <code>true</code>. * <p> * <code><pre> * // (1, 2, 3) * Seq.of(1, 2, 3, 4, 5).limitUntilClosed(i -> i == 3) * </pre></code> * * @see #limitUntilClosed(Stream, Predicate) */ default Seq<T> limitUntilClosed(Predicate<? super T> predicate) { return limitUntilClosed(this, predicate); } /** * Returns a stream with a given value interspersed between any two values of this stream. * <p> * <code><pre> * // (1, 0, 2, 0, 3, 0, 4) * Seq.of(1, 2, 3, 4).intersperse(0) * </pre></code> * * @see #intersperse(Stream, Object) */ default Seq<T> intersperse(T value) { return intersperse(this, value); } /** * Duplicate a Streams into two equivalent Streams. * <p> * <code><pre> * // tuple((1, 2, 3), (1, 2, 3)) * Seq.of(1, 2, 3).duplicate() * </pre></code> * * @see #duplicate(Stream) */ default Tuple2<Seq<T>, Seq<T>> duplicate() { return duplicate(this); } /** * Classify this stream's elements according to a given classifier function. * <p> * <code><pre> * // Seq(tuple(1, Seq(1, 3, 5)), tuple(0, Seq(2, 4, 6))) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2) * // Seq(tuple(true, Seq(1, 3, 5)), tuple(false, Seq(2, 4, 6))) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2 != 0) * </pre></code> * * This is a non-terminal analog of {@link #groupBy(Function)}) * @see #groupBy(Function) * @see #partition(Predicate) */ default <K> Seq<Tuple2<K, Seq<T>>> grouped(Function<? super T, ? extends K> classifier) { return grouped(this, classifier); } /** * Classify this stream's elements according to a given classifier function * and collect each class's elements using a collector. * <p> * <code><pre> * // Seq(tuple(1, 9), tuple(0, 12)) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2, Collectors.summingInt(i -> i)) * // Seq(tuple(true, 9), tuple(false, 12)) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2 != 0, Collectors.summingInt(i -> i)) * </pre></code> This is a non-terminal analog of * {@link #groupBy(Function, Collector)}) * * @see #groupBy(Function, Collector) */ default <K, A, D> Seq<Tuple2<K, D>> grouped(Function<? super T, ? extends K> classifier, Collector<? super T, A, D> downstream) { return grouped(this, classifier, downstream); } /** * Partition a stream into two given a predicate. * <p> * <code><pre> * // tuple((1, 3, 5), (2, 4, 6)) * Seq.of(1, 2, 3, 4, 5, 6).partition(i -> i % 2 != 0) * </pre></code> * * @see #partition(Stream, Predicate) */ default Tuple2<Seq<T>, Seq<T>> partition(Predicate<? super T> predicate) { return partition(this, predicate); } /** * Split a stream at a given position. * <p> * <code><pre> * // tuple((1, 2, 3), (4, 5, 6)) * Seq.of(1, 2, 3, 4, 5, 6).splitAt(3) * </pre></code> * * @see #splitAt(Stream, long) */ default Tuple2<Seq<T>, Seq<T>> splitAt(long position) { return splitAt(this, position); } /** * Split a stream at the head. * <p> * <code><pre> * // tuple(1, (2, 3, 4, 5, 6)) * Seq.of(1, 2, 3, 4, 5, 6).splitHead(3) * </pre></code> * * @see #splitAt(Stream, long) */ default Tuple2<Optional<T>, Seq<T>> splitAtHead() { return splitAtHead(this); } /** * Returns a limited interval from a given Stream. * <p> * <code><pre> * // (4, 5) * Seq.of(1, 2, 3, 4, 5, 6).slice(3, 5) * </pre></code> * * @see #slice(Stream, long, long) */ default Seq<T> slice(long from, long to) { return slice(this, from, to); } /** * Check if the sequence has any elements */ default boolean isEmpty() { return !findAny().isPresent(); } /** * Check if the sequence has no elements */ default boolean isNotEmpty() { return !isEmpty(); } /** * Sort by the results of function. */ default <U extends Comparable<? super U>> Seq<T> sorted(Function<? super T, ? extends U> function) { return sorted(comparing(function)); } /** * Sort by the results of function. */ default <U> Seq<T> sorted(Function<? super T, ? extends U> function, Comparator<? super U> comparator) { return sorted(comparing(function, comparator)); } // Methods taken from LINQ // ----------------------- /** * Keep only those elements in a stream that are of a given type. * <p> * <code><pre> * // (1, 2, 3) * Seq.of(1, "a", 2, "b", 3).ofType(Integer.class) * </pre></code> * * @see #ofType(Stream, Class) */ default <U> Seq<U> ofType(Class<? extends U> type) { return ofType(this, type); } /** * Cast all elements in a stream to a given type, possibly throwing a {@link ClassCastException}. * <p> * <code><pre> * // ClassCastException * Seq.of(1, "a", 2, "b", 3).cast(Integer.class) * </pre></code> * * @see #cast(Stream, Class) */ default <U> Seq<U> cast(Class<? extends U> type) { return cast(this, type); } /** * Map this stream to a stream containing a sliding window over the previous stream. * <p> * <code><pre> * // ((1, 2, 3), (2, 3, 4), (3, 4, 5)) * .of(1, 2, 3, 4, 5).sliding(3); * </pre></code> * <p> * This is equivalent as using the more verbose window function version: * <code><pre> * int n = 3; * Seq.of(1, 2, 3, 4, 5) * .window(0, n - 1) * .filter(w -> w.count() == n) * .map(w -> w.toList()); * </pre></code> */ default Seq<Seq<T>> sliding(long size) { if (size <= 0) throw new IllegalArgumentException("Size must be >= 1"); return window(0, size - 1).filter(w -> w.count() == size).map(w -> w.window()); } /** * Map this stream to a windowed stream using the default partition and order. * <p> * <code><pre> * // (0, 1, 2, 3, 4) * Seq.of(1, 2, 4, 2, 3).window().map(Window::rowNumber) * </pre></code> */ default Seq<Window<T>> window() { return window(Window.of()).map(t -> t.v1); } /** * Map this stream to a windowed stream using the default partition and order with frame. * <p> * <code><pre> * // (2, 4, 4, 4, 3) * Seq.of(1, 2, 4, 2, 3).window(-1, 1).map(Window::max) * </pre></code> */ default Seq<Window<T>> window(long lower, long upper) { return window(Window.of(lower, upper)).map(t -> t.v1); } /** * Map this stream to a windowed stream using the default partition and a specific order. * <p> * <code><pre> * // (0, 1, 4, 2, 3) * Seq.of(1, 2, 4, 2, 3).window(naturalOrder()).map(Window::rowNumber) * </pre></code> */ default Seq<Window<T>> window(Comparator<? super T> orderBy) { return window(Window.of(orderBy)).map(t -> t.v1); } /** * Map this stream to a windowed stream using the default partition and a specific order with frame. * <p> * <code><pre> * // (1, 1, 3, 2, 2) * Seq.of(1, 2, 4, 2, 3).window(naturalOrder(), -1, 1).map(Window::min) * </pre></code> */ default Seq<Window<T>> window(Comparator<? super T> orderBy, long lower, long upper) { return window(Window.of(orderBy, lower, upper)).map(t -> t.v1); } /** * Map this stream to a windowed stream using a specific partition and the default order. * <p> * <code><pre> * // (1, 2, 2, 2, 1) * Seq.of(1, 2, 4, 2, 3).window(i -> i % 2).map(Window::min) * </pre></code> */ default <U> Seq<Window<T>> window(Function<? super T, ? extends U> partitionBy) { return window(Window.of(partitionBy)).map(t -> t.v1); } /** * Map this stream to a windowed stream using a specific partition and the default order. * <p> * <code><pre> * // (3, 4, 4, 2, 3) * Seq.of(1, 4, 2, 2, 3).window(i -> i % 2, -1, 1).map(Window::max) * </pre></code> */ default <U> Seq<Window<T>> window(Function<? super T, ? extends U> partitionBy, long lower, long upper) { return window(Window.of(partitionBy, lower, upper)).map(t -> t.v1); } /** * Map this stream to a windowed stream using a specific partition and order. * <p> * <code><pre> * // (1, 2, 4, 4, 3) * Seq.of(1, 2, 4, 2, 3).window(i -> i % 2, naturalOrder()).map(Window::max) * </pre></code> */ default <U> Seq<Window<T>> window(Function<? super T, ? extends U> partitionBy, Comparator<? super T> orderBy) { return window(Window.of(partitionBy, orderBy)).map(t -> t.v1); } /** * Map this stream to a windowed stream using a specific partition and order with frame. * <p> * <code><pre> * // (3, 2, 4, 4, 3) * Seq.of(1, 2, 4, 2, 3).window(i -> i % 2, naturalOrder(), -1, 1).map(Window::max) * </pre></code> */ default <U> Seq<Window<T>> window(Function<? super T, ? extends U> partitionBy, Comparator<? super T> orderBy, long lower, long upper) { return window(Window.of(partitionBy, orderBy, lower, upper)).map(t -> t.v1); } // [jooq-tools] START [windows] /** * Map this stream to a windowed stream with 1 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple1<Window<T>>> window( WindowSpecification<T> specification1 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 2 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple2<Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 3 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple3<Window<T>, Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2, WindowSpecification<T> specification3 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); Map<?, Partition<T>> partitions3 = SeqUtils.partitions(specification3, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2), (Window<T>) new WindowImpl<>(t, partitions3.get(specification3.partition().apply(t.v1)), specification3) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 4 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple4<Window<T>, Window<T>, Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2, WindowSpecification<T> specification3, WindowSpecification<T> specification4 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); Map<?, Partition<T>> partitions3 = SeqUtils.partitions(specification3, buffer); Map<?, Partition<T>> partitions4 = SeqUtils.partitions(specification4, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2), (Window<T>) new WindowImpl<>(t, partitions3.get(specification3.partition().apply(t.v1)), specification3), (Window<T>) new WindowImpl<>(t, partitions4.get(specification4.partition().apply(t.v1)), specification4) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 5 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple5<Window<T>, Window<T>, Window<T>, Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2, WindowSpecification<T> specification3, WindowSpecification<T> specification4, WindowSpecification<T> specification5 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); Map<?, Partition<T>> partitions3 = SeqUtils.partitions(specification3, buffer); Map<?, Partition<T>> partitions4 = SeqUtils.partitions(specification4, buffer); Map<?, Partition<T>> partitions5 = SeqUtils.partitions(specification5, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2), (Window<T>) new WindowImpl<>(t, partitions3.get(specification3.partition().apply(t.v1)), specification3), (Window<T>) new WindowImpl<>(t, partitions4.get(specification4.partition().apply(t.v1)), specification4), (Window<T>) new WindowImpl<>(t, partitions5.get(specification5.partition().apply(t.v1)), specification5) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 6 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple6<Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2, WindowSpecification<T> specification3, WindowSpecification<T> specification4, WindowSpecification<T> specification5, WindowSpecification<T> specification6 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); Map<?, Partition<T>> partitions3 = SeqUtils.partitions(specification3, buffer); Map<?, Partition<T>> partitions4 = SeqUtils.partitions(specification4, buffer); Map<?, Partition<T>> partitions5 = SeqUtils.partitions(specification5, buffer); Map<?, Partition<T>> partitions6 = SeqUtils.partitions(specification6, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2), (Window<T>) new WindowImpl<>(t, partitions3.get(specification3.partition().apply(t.v1)), specification3), (Window<T>) new WindowImpl<>(t, partitions4.get(specification4.partition().apply(t.v1)), specification4), (Window<T>) new WindowImpl<>(t, partitions5.get(specification5.partition().apply(t.v1)), specification5), (Window<T>) new WindowImpl<>(t, partitions6.get(specification6.partition().apply(t.v1)), specification6) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 7 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple7<Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2, WindowSpecification<T> specification3, WindowSpecification<T> specification4, WindowSpecification<T> specification5, WindowSpecification<T> specification6, WindowSpecification<T> specification7 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); Map<?, Partition<T>> partitions3 = SeqUtils.partitions(specification3, buffer); Map<?, Partition<T>> partitions4 = SeqUtils.partitions(specification4, buffer); Map<?, Partition<T>> partitions5 = SeqUtils.partitions(specification5, buffer); Map<?, Partition<T>> partitions6 = SeqUtils.partitions(specification6, buffer); Map<?, Partition<T>> partitions7 = SeqUtils.partitions(specification7, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2), (Window<T>) new WindowImpl<>(t, partitions3.get(specification3.partition().apply(t.v1)), specification3), (Window<T>) new WindowImpl<>(t, partitions4.get(specification4.partition().apply(t.v1)), specification4), (Window<T>) new WindowImpl<>(t, partitions5.get(specification5.partition().apply(t.v1)), specification5), (Window<T>) new WindowImpl<>(t, partitions6.get(specification6.partition().apply(t.v1)), specification6), (Window<T>) new WindowImpl<>(t, partitions7.get(specification7.partition().apply(t.v1)), specification7) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 8 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple8<Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2, WindowSpecification<T> specification3, WindowSpecification<T> specification4, WindowSpecification<T> specification5, WindowSpecification<T> specification6, WindowSpecification<T> specification7, WindowSpecification<T> specification8 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); Map<?, Partition<T>> partitions3 = SeqUtils.partitions(specification3, buffer); Map<?, Partition<T>> partitions4 = SeqUtils.partitions(specification4, buffer); Map<?, Partition<T>> partitions5 = SeqUtils.partitions(specification5, buffer); Map<?, Partition<T>> partitions6 = SeqUtils.partitions(specification6, buffer); Map<?, Partition<T>> partitions7 = SeqUtils.partitions(specification7, buffer); Map<?, Partition<T>> partitions8 = SeqUtils.partitions(specification8, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2), (Window<T>) new WindowImpl<>(t, partitions3.get(specification3.partition().apply(t.v1)), specification3), (Window<T>) new WindowImpl<>(t, partitions4.get(specification4.partition().apply(t.v1)), specification4), (Window<T>) new WindowImpl<>(t, partitions5.get(specification5.partition().apply(t.v1)), specification5), (Window<T>) new WindowImpl<>(t, partitions6.get(specification6.partition().apply(t.v1)), specification6), (Window<T>) new WindowImpl<>(t, partitions7.get(specification7.partition().apply(t.v1)), specification7), (Window<T>) new WindowImpl<>(t, partitions8.get(specification8.partition().apply(t.v1)), specification8) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 9 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple9<Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2, WindowSpecification<T> specification3, WindowSpecification<T> specification4, WindowSpecification<T> specification5, WindowSpecification<T> specification6, WindowSpecification<T> specification7, WindowSpecification<T> specification8, WindowSpecification<T> specification9 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); Map<?, Partition<T>> partitions3 = SeqUtils.partitions(specification3, buffer); Map<?, Partition<T>> partitions4 = SeqUtils.partitions(specification4, buffer); Map<?, Partition<T>> partitions5 = SeqUtils.partitions(specification5, buffer); Map<?, Partition<T>> partitions6 = SeqUtils.partitions(specification6, buffer); Map<?, Partition<T>> partitions7 = SeqUtils.partitions(specification7, buffer); Map<?, Partition<T>> partitions8 = SeqUtils.partitions(specification8, buffer); Map<?, Partition<T>> partitions9 = SeqUtils.partitions(specification9, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2), (Window<T>) new WindowImpl<>(t, partitions3.get(specification3.partition().apply(t.v1)), specification3), (Window<T>) new WindowImpl<>(t, partitions4.get(specification4.partition().apply(t.v1)), specification4), (Window<T>) new WindowImpl<>(t, partitions5.get(specification5.partition().apply(t.v1)), specification5), (Window<T>) new WindowImpl<>(t, partitions6.get(specification6.partition().apply(t.v1)), specification6), (Window<T>) new WindowImpl<>(t, partitions7.get(specification7.partition().apply(t.v1)), specification7), (Window<T>) new WindowImpl<>(t, partitions8.get(specification8.partition().apply(t.v1)), specification8), (Window<T>) new WindowImpl<>(t, partitions9.get(specification9.partition().apply(t.v1)), specification9) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 10 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple10<Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2, WindowSpecification<T> specification3, WindowSpecification<T> specification4, WindowSpecification<T> specification5, WindowSpecification<T> specification6, WindowSpecification<T> specification7, WindowSpecification<T> specification8, WindowSpecification<T> specification9, WindowSpecification<T> specification10 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); Map<?, Partition<T>> partitions3 = SeqUtils.partitions(specification3, buffer); Map<?, Partition<T>> partitions4 = SeqUtils.partitions(specification4, buffer); Map<?, Partition<T>> partitions5 = SeqUtils.partitions(specification5, buffer); Map<?, Partition<T>> partitions6 = SeqUtils.partitions(specification6, buffer); Map<?, Partition<T>> partitions7 = SeqUtils.partitions(specification7, buffer); Map<?, Partition<T>> partitions8 = SeqUtils.partitions(specification8, buffer); Map<?, Partition<T>> partitions9 = SeqUtils.partitions(specification9, buffer); Map<?, Partition<T>> partitions10 = SeqUtils.partitions(specification10, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2), (Window<T>) new WindowImpl<>(t, partitions3.get(specification3.partition().apply(t.v1)), specification3), (Window<T>) new WindowImpl<>(t, partitions4.get(specification4.partition().apply(t.v1)), specification4), (Window<T>) new WindowImpl<>(t, partitions5.get(specification5.partition().apply(t.v1)), specification5), (Window<T>) new WindowImpl<>(t, partitions6.get(specification6.partition().apply(t.v1)), specification6), (Window<T>) new WindowImpl<>(t, partitions7.get(specification7.partition().apply(t.v1)), specification7), (Window<T>) new WindowImpl<>(t, partitions8.get(specification8.partition().apply(t.v1)), specification8), (Window<T>) new WindowImpl<>(t, partitions9.get(specification9.partition().apply(t.v1)), specification9), (Window<T>) new WindowImpl<>(t, partitions10.get(specification10.partition().apply(t.v1)), specification10) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 11 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple11<Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2, WindowSpecification<T> specification3, WindowSpecification<T> specification4, WindowSpecification<T> specification5, WindowSpecification<T> specification6, WindowSpecification<T> specification7, WindowSpecification<T> specification8, WindowSpecification<T> specification9, WindowSpecification<T> specification10, WindowSpecification<T> specification11 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); Map<?, Partition<T>> partitions3 = SeqUtils.partitions(specification3, buffer); Map<?, Partition<T>> partitions4 = SeqUtils.partitions(specification4, buffer); Map<?, Partition<T>> partitions5 = SeqUtils.partitions(specification5, buffer); Map<?, Partition<T>> partitions6 = SeqUtils.partitions(specification6, buffer); Map<?, Partition<T>> partitions7 = SeqUtils.partitions(specification7, buffer); Map<?, Partition<T>> partitions8 = SeqUtils.partitions(specification8, buffer); Map<?, Partition<T>> partitions9 = SeqUtils.partitions(specification9, buffer); Map<?, Partition<T>> partitions10 = SeqUtils.partitions(specification10, buffer); Map<?, Partition<T>> partitions11 = SeqUtils.partitions(specification11, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2), (Window<T>) new WindowImpl<>(t, partitions3.get(specification3.partition().apply(t.v1)), specification3), (Window<T>) new WindowImpl<>(t, partitions4.get(specification4.partition().apply(t.v1)), specification4), (Window<T>) new WindowImpl<>(t, partitions5.get(specification5.partition().apply(t.v1)), specification5), (Window<T>) new WindowImpl<>(t, partitions6.get(specification6.partition().apply(t.v1)), specification6), (Window<T>) new WindowImpl<>(t, partitions7.get(specification7.partition().apply(t.v1)), specification7), (Window<T>) new WindowImpl<>(t, partitions8.get(specification8.partition().apply(t.v1)), specification8), (Window<T>) new WindowImpl<>(t, partitions9.get(specification9.partition().apply(t.v1)), specification9), (Window<T>) new WindowImpl<>(t, partitions10.get(specification10.partition().apply(t.v1)), specification10), (Window<T>) new WindowImpl<>(t, partitions11.get(specification11.partition().apply(t.v1)), specification11) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 12 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple12<Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2, WindowSpecification<T> specification3, WindowSpecification<T> specification4, WindowSpecification<T> specification5, WindowSpecification<T> specification6, WindowSpecification<T> specification7, WindowSpecification<T> specification8, WindowSpecification<T> specification9, WindowSpecification<T> specification10, WindowSpecification<T> specification11, WindowSpecification<T> specification12 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); Map<?, Partition<T>> partitions3 = SeqUtils.partitions(specification3, buffer); Map<?, Partition<T>> partitions4 = SeqUtils.partitions(specification4, buffer); Map<?, Partition<T>> partitions5 = SeqUtils.partitions(specification5, buffer); Map<?, Partition<T>> partitions6 = SeqUtils.partitions(specification6, buffer); Map<?, Partition<T>> partitions7 = SeqUtils.partitions(specification7, buffer); Map<?, Partition<T>> partitions8 = SeqUtils.partitions(specification8, buffer); Map<?, Partition<T>> partitions9 = SeqUtils.partitions(specification9, buffer); Map<?, Partition<T>> partitions10 = SeqUtils.partitions(specification10, buffer); Map<?, Partition<T>> partitions11 = SeqUtils.partitions(specification11, buffer); Map<?, Partition<T>> partitions12 = SeqUtils.partitions(specification12, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2), (Window<T>) new WindowImpl<>(t, partitions3.get(specification3.partition().apply(t.v1)), specification3), (Window<T>) new WindowImpl<>(t, partitions4.get(specification4.partition().apply(t.v1)), specification4), (Window<T>) new WindowImpl<>(t, partitions5.get(specification5.partition().apply(t.v1)), specification5), (Window<T>) new WindowImpl<>(t, partitions6.get(specification6.partition().apply(t.v1)), specification6), (Window<T>) new WindowImpl<>(t, partitions7.get(specification7.partition().apply(t.v1)), specification7), (Window<T>) new WindowImpl<>(t, partitions8.get(specification8.partition().apply(t.v1)), specification8), (Window<T>) new WindowImpl<>(t, partitions9.get(specification9.partition().apply(t.v1)), specification9), (Window<T>) new WindowImpl<>(t, partitions10.get(specification10.partition().apply(t.v1)), specification10), (Window<T>) new WindowImpl<>(t, partitions11.get(specification11.partition().apply(t.v1)), specification11), (Window<T>) new WindowImpl<>(t, partitions12.get(specification12.partition().apply(t.v1)), specification12) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 13 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple13<Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2, WindowSpecification<T> specification3, WindowSpecification<T> specification4, WindowSpecification<T> specification5, WindowSpecification<T> specification6, WindowSpecification<T> specification7, WindowSpecification<T> specification8, WindowSpecification<T> specification9, WindowSpecification<T> specification10, WindowSpecification<T> specification11, WindowSpecification<T> specification12, WindowSpecification<T> specification13 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); Map<?, Partition<T>> partitions3 = SeqUtils.partitions(specification3, buffer); Map<?, Partition<T>> partitions4 = SeqUtils.partitions(specification4, buffer); Map<?, Partition<T>> partitions5 = SeqUtils.partitions(specification5, buffer); Map<?, Partition<T>> partitions6 = SeqUtils.partitions(specification6, buffer); Map<?, Partition<T>> partitions7 = SeqUtils.partitions(specification7, buffer); Map<?, Partition<T>> partitions8 = SeqUtils.partitions(specification8, buffer); Map<?, Partition<T>> partitions9 = SeqUtils.partitions(specification9, buffer); Map<?, Partition<T>> partitions10 = SeqUtils.partitions(specification10, buffer); Map<?, Partition<T>> partitions11 = SeqUtils.partitions(specification11, buffer); Map<?, Partition<T>> partitions12 = SeqUtils.partitions(specification12, buffer); Map<?, Partition<T>> partitions13 = SeqUtils.partitions(specification13, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2), (Window<T>) new WindowImpl<>(t, partitions3.get(specification3.partition().apply(t.v1)), specification3), (Window<T>) new WindowImpl<>(t, partitions4.get(specification4.partition().apply(t.v1)), specification4), (Window<T>) new WindowImpl<>(t, partitions5.get(specification5.partition().apply(t.v1)), specification5), (Window<T>) new WindowImpl<>(t, partitions6.get(specification6.partition().apply(t.v1)), specification6), (Window<T>) new WindowImpl<>(t, partitions7.get(specification7.partition().apply(t.v1)), specification7), (Window<T>) new WindowImpl<>(t, partitions8.get(specification8.partition().apply(t.v1)), specification8), (Window<T>) new WindowImpl<>(t, partitions9.get(specification9.partition().apply(t.v1)), specification9), (Window<T>) new WindowImpl<>(t, partitions10.get(specification10.partition().apply(t.v1)), specification10), (Window<T>) new WindowImpl<>(t, partitions11.get(specification11.partition().apply(t.v1)), specification11), (Window<T>) new WindowImpl<>(t, partitions12.get(specification12.partition().apply(t.v1)), specification12), (Window<T>) new WindowImpl<>(t, partitions13.get(specification13.partition().apply(t.v1)), specification13) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 14 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple14<Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2, WindowSpecification<T> specification3, WindowSpecification<T> specification4, WindowSpecification<T> specification5, WindowSpecification<T> specification6, WindowSpecification<T> specification7, WindowSpecification<T> specification8, WindowSpecification<T> specification9, WindowSpecification<T> specification10, WindowSpecification<T> specification11, WindowSpecification<T> specification12, WindowSpecification<T> specification13, WindowSpecification<T> specification14 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); Map<?, Partition<T>> partitions3 = SeqUtils.partitions(specification3, buffer); Map<?, Partition<T>> partitions4 = SeqUtils.partitions(specification4, buffer); Map<?, Partition<T>> partitions5 = SeqUtils.partitions(specification5, buffer); Map<?, Partition<T>> partitions6 = SeqUtils.partitions(specification6, buffer); Map<?, Partition<T>> partitions7 = SeqUtils.partitions(specification7, buffer); Map<?, Partition<T>> partitions8 = SeqUtils.partitions(specification8, buffer); Map<?, Partition<T>> partitions9 = SeqUtils.partitions(specification9, buffer); Map<?, Partition<T>> partitions10 = SeqUtils.partitions(specification10, buffer); Map<?, Partition<T>> partitions11 = SeqUtils.partitions(specification11, buffer); Map<?, Partition<T>> partitions12 = SeqUtils.partitions(specification12, buffer); Map<?, Partition<T>> partitions13 = SeqUtils.partitions(specification13, buffer); Map<?, Partition<T>> partitions14 = SeqUtils.partitions(specification14, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2), (Window<T>) new WindowImpl<>(t, partitions3.get(specification3.partition().apply(t.v1)), specification3), (Window<T>) new WindowImpl<>(t, partitions4.get(specification4.partition().apply(t.v1)), specification4), (Window<T>) new WindowImpl<>(t, partitions5.get(specification5.partition().apply(t.v1)), specification5), (Window<T>) new WindowImpl<>(t, partitions6.get(specification6.partition().apply(t.v1)), specification6), (Window<T>) new WindowImpl<>(t, partitions7.get(specification7.partition().apply(t.v1)), specification7), (Window<T>) new WindowImpl<>(t, partitions8.get(specification8.partition().apply(t.v1)), specification8), (Window<T>) new WindowImpl<>(t, partitions9.get(specification9.partition().apply(t.v1)), specification9), (Window<T>) new WindowImpl<>(t, partitions10.get(specification10.partition().apply(t.v1)), specification10), (Window<T>) new WindowImpl<>(t, partitions11.get(specification11.partition().apply(t.v1)), specification11), (Window<T>) new WindowImpl<>(t, partitions12.get(specification12.partition().apply(t.v1)), specification12), (Window<T>) new WindowImpl<>(t, partitions13.get(specification13.partition().apply(t.v1)), specification13), (Window<T>) new WindowImpl<>(t, partitions14.get(specification14.partition().apply(t.v1)), specification14) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 15 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple15<Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2, WindowSpecification<T> specification3, WindowSpecification<T> specification4, WindowSpecification<T> specification5, WindowSpecification<T> specification6, WindowSpecification<T> specification7, WindowSpecification<T> specification8, WindowSpecification<T> specification9, WindowSpecification<T> specification10, WindowSpecification<T> specification11, WindowSpecification<T> specification12, WindowSpecification<T> specification13, WindowSpecification<T> specification14, WindowSpecification<T> specification15 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); Map<?, Partition<T>> partitions3 = SeqUtils.partitions(specification3, buffer); Map<?, Partition<T>> partitions4 = SeqUtils.partitions(specification4, buffer); Map<?, Partition<T>> partitions5 = SeqUtils.partitions(specification5, buffer); Map<?, Partition<T>> partitions6 = SeqUtils.partitions(specification6, buffer); Map<?, Partition<T>> partitions7 = SeqUtils.partitions(specification7, buffer); Map<?, Partition<T>> partitions8 = SeqUtils.partitions(specification8, buffer); Map<?, Partition<T>> partitions9 = SeqUtils.partitions(specification9, buffer); Map<?, Partition<T>> partitions10 = SeqUtils.partitions(specification10, buffer); Map<?, Partition<T>> partitions11 = SeqUtils.partitions(specification11, buffer); Map<?, Partition<T>> partitions12 = SeqUtils.partitions(specification12, buffer); Map<?, Partition<T>> partitions13 = SeqUtils.partitions(specification13, buffer); Map<?, Partition<T>> partitions14 = SeqUtils.partitions(specification14, buffer); Map<?, Partition<T>> partitions15 = SeqUtils.partitions(specification15, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2), (Window<T>) new WindowImpl<>(t, partitions3.get(specification3.partition().apply(t.v1)), specification3), (Window<T>) new WindowImpl<>(t, partitions4.get(specification4.partition().apply(t.v1)), specification4), (Window<T>) new WindowImpl<>(t, partitions5.get(specification5.partition().apply(t.v1)), specification5), (Window<T>) new WindowImpl<>(t, partitions6.get(specification6.partition().apply(t.v1)), specification6), (Window<T>) new WindowImpl<>(t, partitions7.get(specification7.partition().apply(t.v1)), specification7), (Window<T>) new WindowImpl<>(t, partitions8.get(specification8.partition().apply(t.v1)), specification8), (Window<T>) new WindowImpl<>(t, partitions9.get(specification9.partition().apply(t.v1)), specification9), (Window<T>) new WindowImpl<>(t, partitions10.get(specification10.partition().apply(t.v1)), specification10), (Window<T>) new WindowImpl<>(t, partitions11.get(specification11.partition().apply(t.v1)), specification11), (Window<T>) new WindowImpl<>(t, partitions12.get(specification12.partition().apply(t.v1)), specification12), (Window<T>) new WindowImpl<>(t, partitions13.get(specification13.partition().apply(t.v1)), specification13), (Window<T>) new WindowImpl<>(t, partitions14.get(specification14.partition().apply(t.v1)), specification14), (Window<T>) new WindowImpl<>(t, partitions15.get(specification15.partition().apply(t.v1)), specification15) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 16 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple16<Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2, WindowSpecification<T> specification3, WindowSpecification<T> specification4, WindowSpecification<T> specification5, WindowSpecification<T> specification6, WindowSpecification<T> specification7, WindowSpecification<T> specification8, WindowSpecification<T> specification9, WindowSpecification<T> specification10, WindowSpecification<T> specification11, WindowSpecification<T> specification12, WindowSpecification<T> specification13, WindowSpecification<T> specification14, WindowSpecification<T> specification15, WindowSpecification<T> specification16 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); Map<?, Partition<T>> partitions3 = SeqUtils.partitions(specification3, buffer); Map<?, Partition<T>> partitions4 = SeqUtils.partitions(specification4, buffer); Map<?, Partition<T>> partitions5 = SeqUtils.partitions(specification5, buffer); Map<?, Partition<T>> partitions6 = SeqUtils.partitions(specification6, buffer); Map<?, Partition<T>> partitions7 = SeqUtils.partitions(specification7, buffer); Map<?, Partition<T>> partitions8 = SeqUtils.partitions(specification8, buffer); Map<?, Partition<T>> partitions9 = SeqUtils.partitions(specification9, buffer); Map<?, Partition<T>> partitions10 = SeqUtils.partitions(specification10, buffer); Map<?, Partition<T>> partitions11 = SeqUtils.partitions(specification11, buffer); Map<?, Partition<T>> partitions12 = SeqUtils.partitions(specification12, buffer); Map<?, Partition<T>> partitions13 = SeqUtils.partitions(specification13, buffer); Map<?, Partition<T>> partitions14 = SeqUtils.partitions(specification14, buffer); Map<?, Partition<T>> partitions15 = SeqUtils.partitions(specification15, buffer); Map<?, Partition<T>> partitions16 = SeqUtils.partitions(specification16, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2), (Window<T>) new WindowImpl<>(t, partitions3.get(specification3.partition().apply(t.v1)), specification3), (Window<T>) new WindowImpl<>(t, partitions4.get(specification4.partition().apply(t.v1)), specification4), (Window<T>) new WindowImpl<>(t, partitions5.get(specification5.partition().apply(t.v1)), specification5), (Window<T>) new WindowImpl<>(t, partitions6.get(specification6.partition().apply(t.v1)), specification6), (Window<T>) new WindowImpl<>(t, partitions7.get(specification7.partition().apply(t.v1)), specification7), (Window<T>) new WindowImpl<>(t, partitions8.get(specification8.partition().apply(t.v1)), specification8), (Window<T>) new WindowImpl<>(t, partitions9.get(specification9.partition().apply(t.v1)), specification9), (Window<T>) new WindowImpl<>(t, partitions10.get(specification10.partition().apply(t.v1)), specification10), (Window<T>) new WindowImpl<>(t, partitions11.get(specification11.partition().apply(t.v1)), specification11), (Window<T>) new WindowImpl<>(t, partitions12.get(specification12.partition().apply(t.v1)), specification12), (Window<T>) new WindowImpl<>(t, partitions13.get(specification13.partition().apply(t.v1)), specification13), (Window<T>) new WindowImpl<>(t, partitions14.get(specification14.partition().apply(t.v1)), specification14), (Window<T>) new WindowImpl<>(t, partitions15.get(specification15.partition().apply(t.v1)), specification15), (Window<T>) new WindowImpl<>(t, partitions16.get(specification16.partition().apply(t.v1)), specification16) )) .onClose(this::close); } // [jooq-tools] END [windows] // Shortcuts to Collectors // ----------------------- /** * Shortcut for calling {@link Stream#collect(Collector)} with a * {@link Collectors#groupingBy(Function)} collector. */ default <K> Map<K, List<T>> groupBy(Function<? super T, ? extends K> classifier) { return collect(Collectors.groupingBy(classifier)); } /** * Shortcut for calling {@link Stream#collect(Collector)} with a * {@link Collectors#groupingBy(Function, Collector)} collector. */ default <K, A, D> Map<K, D> groupBy(Function<? super T, ? extends K> classifier, Collector<? super T, A, D> downstream) { return collect(Collectors.groupingBy(classifier, downstream)); } /** * Shortcut for calling {@link Stream#collect(Collector)} with a * {@link Collectors#groupingBy(Function, Supplier, Collector)} collector. */ default <K, D, A, M extends Map<K, D>> M groupBy(Function<? super T, ? extends K> classifier, Supplier<M> mapFactory, Collector<? super T, A, D> downstream) { return collect(Collectors.groupingBy(classifier, mapFactory, downstream)); } /** * Shortcut for calling {@link Stream#collect(Collector)} with a * {@link Collectors#joining()} * collector. * * @deprecated - Use {@link #toString()} instead. This method will be * removed in the future as it causes confusion with * {@link #innerJoin(Seq, BiPredicate)}. */ @Deprecated default String join() { return map(Objects::toString).collect(Collectors.joining()); } /** * Shortcut for calling {@link Stream#collect(Collector)} with a * {@link Collectors#joining(CharSequence)} * collector. * * @deprecated - Use {@link #toString(CharSequence)} instead. This method * will be removed in the future as it causes confusion with * {@link #innerJoin(Seq, BiPredicate)}. */ @Deprecated default String join(CharSequence delimiter) { return map(Objects::toString).collect(Collectors.joining(delimiter)); } /** * Shortcut for calling {@link Stream#collect(Collector)} with a * {@link Collectors#joining(CharSequence, CharSequence, CharSequence)} * collector. * * @deprecated - Use * {@link #toString(CharSequence, CharSequence, CharSequence)} instead. This * method will be removed in the future as it causes confusion with * {@link #innerJoin(Seq, BiPredicate)}. */ @Deprecated default String join(CharSequence delimiter, CharSequence prefix, CharSequence suffix) { return map(Objects::toString).collect(Collectors.joining(delimiter, prefix, suffix)); } /** * @see Stream#of(Object) */ static <T> Seq<T> of(T value) { return seq(Stream.of(value)); } /** * @see Stream#of(Object[]) */ @SafeVarargs static <T> Seq<T> of(T... values) { return seq(Stream.of(values)); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (exclusive) */ static Seq<Byte> range(byte from, byte to) { return range(from, to, (byte) 1); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (exclusive) * @param step The increase between two values */ static Seq<Byte> range(byte from, byte to, int step) { return to <= from ? empty() : iterate(from, t -> Byte.valueOf((byte) (t + step))).limitWhile(t -> t < to); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (exclusive) */ static Seq<Short> range(short from, short to) { return range(from, to, (short) 1); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (exclusive) * @param step The increase between two values */ static Seq<Short> range(short from, short to, int step) { return to <= from ? empty() : iterate(from, t -> Short.valueOf((short) (t + step))).limitWhile(t -> t < to); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (exclusive) */ static Seq<Character> range(char from, char to) { return range(from, to, (short) 1); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (exclusive) * @param step The increase between two values */ static Seq<Character> range(char from, char to, int step) { return to <= from ? empty() : iterate(from, t -> Character.valueOf((char) (t + step))).limitWhile(t -> t < to); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (exclusive) */ static Seq<Integer> range(int from, int to) { return range(from, to, 1); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (exclusive) * @param step The increase between two values */ static Seq<Integer> range(int from, int to, int step) { return to <= from ? empty() : iterate(from, t -> Integer.valueOf(t + step)).limitWhile(t -> t < to); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (exclusive) */ static Seq<Long> range(long from, long to) { return range(from, to, 1L); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (exclusive) * @param step The increase between two values */ static Seq<Long> range(long from, long to, long step) { return to <= from ? empty() : iterate(from, t -> Long.valueOf(t + step)).limitWhile(t -> t < to); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (exclusive) */ static Seq<Instant> range(Instant from, Instant to) { return range(from, to, Duration.ofSeconds(1)); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (exclusive) * @param step The increase between two values */ static Seq<Instant> range(Instant from, Instant to, Duration step) { return to.compareTo(from) <= 0 ? empty() : iterate(from, t -> t.plus(step)).limitWhile(t -> t.compareTo(to) < 0); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (inclusive) */ static Seq<Byte> rangeClosed(byte from, byte to) { return rangeClosed(from, to, (byte) 1); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (inclusive) * @param step The increase between two values */ static Seq<Byte> rangeClosed(byte from, byte to, int step) { return to < from ? empty() : iterate(from, t -> Byte.valueOf((byte) (t + step))).limitWhile(t -> t <= to); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (inclusive) */ static Seq<Short> rangeClosed(short from, short to) { return rangeClosed(from, to, (short) 1); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (inclusive) * @param step The increase between two values */ static Seq<Short> rangeClosed(short from, short to, int step) { return to < from ? empty() : iterate(from, t -> Short.valueOf((short) (t + step))).limitWhile(t -> t <= to); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (inclusive) */ static Seq<Character> rangeClosed(char from, char to) { return rangeClosed(from, to, 1); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (inclusive) * @param step The increase between two values */ static Seq<Character> rangeClosed(char from, char to, int step) { return to < from ? empty() : iterate(from, t -> Character.valueOf((char) (t + step))).limitWhile(t -> t <= to); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (inclusive) */ static Seq<Integer> rangeClosed(int from, int to) { return rangeClosed(from, to, 1); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (inclusive) * @param step The increase between two values */ static Seq<Integer> rangeClosed(int from, int to, int step) { return to < from ? empty() : iterate(from, t -> Integer.valueOf(t + step)).limitWhile(t -> t <= to); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (inclusive) */ static Seq<Long> rangeClosed(long from, long to) { return rangeClosed(from, to, 1L); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (inclusive) * @param step The increase between two values */ static Seq<Long> rangeClosed(long from, long to, long step) { return to < from ? empty() : iterate(from, t -> Long.valueOf(t + step)).limitWhile(t -> t <= to); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (exclusive) */ static Seq<Instant> rangeClosed(Instant from, Instant to) { return rangeClosed(from, to, Duration.ofSeconds(1)); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (exclusive) * @param step The increase between two values */ static Seq<Instant> rangeClosed(Instant from, Instant to, Duration step) { return to.compareTo(from) < 0 ? empty() : iterate(from, t -> t.plus(step)).limitWhile(t -> t.compareTo(to) <= 0); } /** * @see Stream#empty() */ static <T> Seq<T> empty() { return seq(Stream.empty()); } /** * @see Stream#iterate(Object, UnaryOperator) */ static <T> Seq<T> iterate(final T seed, final UnaryOperator<T> f) { return seq(Stream.iterate(seed, f)); } /** * @see Stream#generate(Supplier) */ static Seq<Void> generate() { return generate(() -> null); } /** * @see Stream#generate(Supplier) */ static <T> Seq<T> generate(T value) { return generate(() -> value); } /** * @see Stream#generate(Supplier) */ static <T> Seq<T> generate(Supplier<? extends T> s) { return seq(Stream.generate(s)); } /** * Wrap a <code>Stream</code> into a <code>Seq</code>. */ @SuppressWarnings("unchecked") static <T> Seq<T> seq(Stream<? extends T> stream) { if (stream instanceof Seq) return (Seq<T>) stream; return new SeqImpl<>(stream); } /** * Wrap a <code>Stream</code> into a <code>Seq</code>. */ @SuppressWarnings("unchecked") static <T> Seq<T> seq(Seq<? extends T> stream) { return (Seq<T>) stream; } /** * Wrap a <code>IntStream</code> into a <code>Seq</code>. */ static Seq<Integer> seq(IntStream stream) { return new SeqImpl<>(stream.boxed()); } /** * Wrap a <code>IntStream</code> into a <code>Seq</code>. */ static Seq<Long> seq(LongStream stream) { return new SeqImpl<>(stream.boxed()); } /** * Wrap a <code>IntStream</code> into a <code>Seq</code>. */ static Seq<Double> seq(DoubleStream stream) { return new SeqImpl<>(stream.boxed()); } /** * Wrap an <code>Iterable</code> into a <code>Seq</code>. */ static <T> Seq<T> seq(Iterable<? extends T> iterable) { return seq(iterable.iterator()); } /** * Wrap an <code>Iterator</code> into a <code>Seq</code>. */ static <T> Seq<T> seq(Iterator<? extends T> iterator) { return seq(spliteratorUnknownSize(iterator, ORDERED)); } /** * Wrap a <code>Spliterator</code> into a <code>Seq</code>. */ static <T> Seq<T> seq(Spliterator<? extends T> spliterator) { return seq(StreamSupport.stream(spliterator, false)); } /** * Wrap a <code>Map</code> into a <code>Seq</code>. */ static <K, V> Seq<Tuple2<K, V>> seq(Map<? extends K, ? extends V> map) { return seq(map.entrySet()).map(e -> tuple(e.getKey(), e.getValue())); } /** * Wrap an <code>Optional</code> into a <code>Seq</code>. */ static <T> Seq<T> seq(Optional<? extends T> optional) { return optional.map(Seq::of).orElseGet(Seq::empty); } /** * Wrap an <code>InputStream</code> into a <code>Seq</code>. * <p> * Client code must close the <code>InputStream</code>. All * {@link IOException}'s thrown be the <code>InputStream</code> are wrapped * by {@link UncheckedIOException}'s. */ static Seq<Byte> seq(InputStream is) { FunctionalSpliterator<Byte> spliterator = consumer -> { try { int value = is.read(); if (value != -1) consumer.accept((byte) value); return value != -1; } catch (IOException e) { throw new UncheckedIOException(e); } }; return seq(spliterator).onClose(Unchecked.runnable(is::close)); } /** * Wrap a <code>Reader</code> into a <code>Seq</code>. * <p> * Client code must close the <code>Reader</code>. All * {@link IOException}'s thrown be the <code>InputStream</code> are wrapped * by {@link UncheckedIOException}'s. */ static Seq<Character> seq(Reader reader) { FunctionalSpliterator<Character> spliterator = consumer -> { try { int value = reader.read(); if (value != -1) consumer.accept((char) value); return value != -1; } catch (IOException e) { throw new UncheckedIOException(e); } }; return seq(spliterator).onClose(Unchecked.runnable(reader::close)); } /** * Repeat a stream infinitely. * <p> * <code><pre> * // (1, 2, 3, 1, 2, 3, ...) * Seq.of(1, 2, 3).cycle(); * </pre></code> */ static <T> Seq<T> cycle(Stream<? extends T> stream) { return cycle(seq(stream)); } /** * Repeat a stream infinitely. * <p> * <code><pre> * // (1, 2, 3, 1, 2, 3, ...) * Seq.of(1, 2, 3).cycle(); * </pre></code> */ static <T> Seq<T> cycle(Iterable<? extends T> iterable) { return cycle(seq(iterable)); } /** * Repeat a stream infinitely. * <p> * <code><pre> * // (1, 2, 3, 1, 2, 3, ...) * Seq.of(1, 2, 3).cycle(); * </pre></code> */ static <T> Seq<T> cycle(Seq<? extends T> stream) { return cycle(stream, -1); } /** * Repeat a stream a certain amount of times. * <p> * <code><pre> * // () * Seq.of(1, 2, 3).cycle(0); * * // (1, 2, 3) * Seq.of(1, 2, 3).cycle(1); * * // (1, 2, 3, 1, 2, 3, 1, 2, 3) * Seq.of(1, 2, 3).cycle(3); * </pre></code> * * @see #cycle(Stream) */ static <T> Seq<T> cycle(Stream<? extends T> stream, long times) { return cycle(seq(stream), times); } /** * Repeat a stream a certain amount of times. * <p> * <code><pre> * // () * Seq.of(1, 2, 3).cycle(0); * * // (1, 2, 3) * Seq.of(1, 2, 3).cycle(1); * * // (1, 2, 3, 1, 2, 3, 1, 2, 3) * Seq.of(1, 2, 3).cycle(3); * </pre></code> * * @see #cycle(Stream) */ static <T> Seq<T> cycle(Iterable<? extends T> iterable, long times) { return cycle(seq(iterable), times); } /** * Repeat a stream a certain amount of times. * <p> * <code><pre> * // () * Seq.of(1, 2, 3).cycle(0); * * // (1, 2, 3) * Seq.of(1, 2, 3).cycle(1); * * // (1, 2, 3, 1, 2, 3, 1, 2, 3) * Seq.of(1, 2, 3).cycle(3); * </pre></code> * * @see #cycle(Stream) */ @SuppressWarnings("unchecked") static <T> Seq<T> cycle(Seq<? extends T> stream, long times) { if (times == 0) return empty(); if (times == 1) return (Seq<T>) stream; List<T> list = new ArrayList<>(); Spliterator<T>[] sp = new Spliterator[1]; long[] remaining = new long[] { times }; return SeqUtils.transform(stream, (delegate, action) -> { if (sp[0] == null) { if (delegate.tryAdvance(t -> { list.add(t); action.accept(t); })) return true; else sp[0] = list.spliterator(); } if (!sp[0].tryAdvance(action)) { if (times != -1 && (remaining[0] = remaining[0] - 1) == 1) return false; sp[0] = list.spliterator(); if (!sp[0].tryAdvance(action)) return false; } return true; }); } /** * Unzip one Stream into two. * <p> * <code><pre> * // tuple((1, 2, 3), (a, b, c)) * Seq.unzip(Seq.of(tuple(1, "a"), tuple(2, "b"), tuple(3, "c"))); * </pre></code> */ static <T1, T2> Tuple2<Seq<T1>, Seq<T2>> unzip(Stream<Tuple2<T1, T2>> stream) { return unzip(seq(stream)); } /** * Unzip one Stream into two. * <p> * <code><pre> * // tuple((1, 2, 3), (a, b, c)) * Seq.unzip(Seq.of(tuple(1, "a"), tuple(2, "b"), tuple(3, "c"))); * </pre></code> */ static <T1, T2, U1, U2> Tuple2<Seq<U1>, Seq<U2>> unzip(Stream<Tuple2<T1, T2>> stream, Function<T1, U1> leftUnzipper, Function<T2, U2> rightUnzipper) { return unzip(seq(stream), leftUnzipper, rightUnzipper); } /** * Unzip one Stream into two. * <p> * <code><pre> * // tuple((1, 2, 3), (a, b, c)) * Seq.unzip(Seq.of(tuple(1, "a"), tuple(2, "b"), tuple(3, "c"))); * </pre></code> */ static <T1, T2, U1, U2> Tuple2<Seq<U1>, Seq<U2>> unzip(Stream<Tuple2<T1, T2>> stream, Function<Tuple2<T1, T2>, Tuple2<U1, U2>> unzipper) { return unzip(seq(stream), unzipper); } /** * Unzip one Stream into two. * <p> * <code><pre> * // tuple((1, 2, 3), (a, b, c)) * Seq.unzip(Seq.of(tuple(1, "a"), tuple(2, "b"), tuple(3, "c"))); * </pre></code> */ static <T1, T2, U1, U2> Tuple2<Seq<U1>, Seq<U2>> unzip(Stream<Tuple2<T1, T2>> stream, BiFunction<T1, T2, Tuple2<U1, U2>> unzipper) { return unzip(seq(stream), unzipper); } /** * Unzip one Stream into two. * <p> * <code><pre> * // tuple((1, 2, 3), (a, b, c)) * Seq.unzip(Seq.of(tuple(1, "a"), tuple(2, "b"), tuple(3, "c"))); * </pre></code> */ static <T1, T2> Tuple2<Seq<T1>, Seq<T2>> unzip(Iterable<Tuple2<T1, T2>> iterable) { return unzip(seq(iterable)); } /** * Unzip one Stream into two. * <p> * <code><pre> * // tuple((1, 2, 3), (a, b, c)) * Seq.unzip(Seq.of(tuple(1, "a"), tuple(2, "b"), tuple(3, "c"))); * </pre></code> */ static <T1, T2, U1, U2> Tuple2<Seq<U1>, Seq<U2>> unzip(Iterable<Tuple2<T1, T2>> iterable, Function<T1, U1> leftUnzipper, Function<T2, U2> rightUnzipper) { return unzip(seq(iterable), leftUnzipper, rightUnzipper); } /** * Unzip one Stream into two. * <p> * <code><pre> * // tuple((1, 2, 3), (a, b, c)) * Seq.unzip(Seq.of(tuple(1, "a"), tuple(2, "b"), tuple(3, "c"))); * </pre></code> */ static <T1, T2, U1, U2> Tuple2<Seq<U1>, Seq<U2>> unzip(Iterable<Tuple2<T1, T2>> iterable, Function<Tuple2<T1, T2>, Tuple2<U1, U2>> unzipper) { return unzip(seq(iterable), unzipper); } /** * Unzip one Stream into two. * <p> * <code><pre> * // tuple((1, 2, 3), (a, b, c)) * Seq.unzip(Seq.of(tuple(1, "a"), tuple(2, "b"), tuple(3, "c"))); * </pre></code> */ static <T1, T2, U1, U2> Tuple2<Seq<U1>, Seq<U2>> unzip(Iterable<Tuple2<T1, T2>> iterable, BiFunction<T1, T2, Tuple2<U1, U2>> unzipper) { return unzip(seq(iterable), unzipper); } /** * Unzip one Stream into two. * <p> * <code><pre> * // tuple((1, 2, 3), (a, b, c)) * Seq.unzip(Seq.of(tuple(1, "a"), tuple(2, "b"), tuple(3, "c"))); * </pre></code> */ static <T1, T2> Tuple2<Seq<T1>, Seq<T2>> unzip(Seq<Tuple2<T1, T2>> stream) { return unzip(stream, t -> t); } /** * Unzip one Stream into two. * <p> * <code><pre> * // tuple((1, 2, 3), (a, b, c)) * Seq.unzip(Seq.of(tuple(1, "a"), tuple(2, "b"), tuple(3, "c"))); * </pre></code> */ static <T1, T2, U1, U2> Tuple2<Seq<U1>, Seq<U2>> unzip(Seq<Tuple2<T1, T2>> stream, Function<T1, U1> leftUnzipper, Function<T2, U2> rightUnzipper) { return unzip(stream, t -> tuple(leftUnzipper.apply(t.v1), rightUnzipper.apply(t.v2))); } /** * Unzip one Stream into two. * <p> * <code><pre> * // tuple((1, 2, 3), (a, b, c)) * Seq.unzip(Seq.of(tuple(1, "a"), tuple(2, "b"), tuple(3, "c"))); * </pre></code> */ static <T1, T2, U1, U2> Tuple2<Seq<U1>, Seq<U2>> unzip(Seq<Tuple2<T1, T2>> stream, Function<Tuple2<T1, T2>, Tuple2<U1, U2>> unzipper) { return unzip(stream, (t1, t2) -> unzipper.apply(tuple(t1, t2))); } /** * Unzip one Stream into two. * <p> * <code><pre> * // tuple((1, 2, 3), (a, b, c)) * Seq.unzip(Seq.of(tuple(1, "a"), tuple(2, "b"), tuple(3, "c"))); * </pre></code> */ static <T1, T2, U1, U2> Tuple2<Seq<U1>, Seq<U2>> unzip(Seq<Tuple2<T1, T2>> stream, BiFunction<T1, T2, Tuple2<U1, U2>> unzipper) { return stream .map(t -> unzipper.apply(t.v1, t.v2)) .duplicate() .map1(s -> s.map(u -> u.v1)) .map2(s -> s.map(u -> u.v2)); } // [jooq-tools] START [zip-static] /** * Zip 2 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2> Seq<Tuple2<T1, T2>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2) { return zip(seq(s1), seq(s2)); } /** * Zip 3 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3> Seq<Tuple3<T1, T2, T3>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3) { return zip(seq(s1), seq(s2), seq(s3)); } /** * Zip 4 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4> Seq<Tuple4<T1, T2, T3, T4>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4) { return zip(seq(s1), seq(s2), seq(s3), seq(s4)); } /** * Zip 5 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5> Seq<Tuple5<T1, T2, T3, T4, T5>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5)); } /** * Zip 6 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6> Seq<Tuple6<T1, T2, T3, T4, T5, T6>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6)); } /** * Zip 7 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7> Seq<Tuple7<T1, T2, T3, T4, T5, T6, T7>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7)); } /** * Zip 8 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8> Seq<Tuple8<T1, T2, T3, T4, T5, T6, T7, T8>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8)); } /** * Zip 9 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9> Seq<Tuple9<T1, T2, T3, T4, T5, T6, T7, T8, T9>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9)); } /** * Zip 10 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> Seq<Tuple10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10)); } /** * Zip 11 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> Seq<Tuple11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11)); } /** * Zip 12 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> Seq<Tuple12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12)); } /** * Zip 13 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Seq<Tuple13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12, Stream<? extends T13> s13) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12), seq(s13)); } /** * Zip 14 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> Seq<Tuple14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12, Stream<? extends T13> s13, Stream<? extends T14> s14) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12), seq(s13), seq(s14)); } /** * Zip 15 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> Seq<Tuple15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12, Stream<? extends T13> s13, Stream<? extends T14> s14, Stream<? extends T15> s15) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12), seq(s13), seq(s14), seq(s15)); } /** * Zip 16 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Seq<Tuple16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12, Stream<? extends T13> s13, Stream<? extends T14> s14, Stream<? extends T15> s15, Stream<? extends T16> s16) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12), seq(s13), seq(s14), seq(s15), seq(s16)); } /** * Zip 2 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2> Seq<Tuple2<T1, T2>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2) { return zip(seq(i1), seq(i2)); } /** * Zip 3 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3> Seq<Tuple3<T1, T2, T3>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3) { return zip(seq(i1), seq(i2), seq(i3)); } /** * Zip 4 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4> Seq<Tuple4<T1, T2, T3, T4>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4) { return zip(seq(i1), seq(i2), seq(i3), seq(i4)); } /** * Zip 5 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5> Seq<Tuple5<T1, T2, T3, T4, T5>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5)); } /** * Zip 6 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6> Seq<Tuple6<T1, T2, T3, T4, T5, T6>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6)); } /** * Zip 7 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7> Seq<Tuple7<T1, T2, T3, T4, T5, T6, T7>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7)); } /** * Zip 8 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8> Seq<Tuple8<T1, T2, T3, T4, T5, T6, T7, T8>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8)); } /** * Zip 9 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9> Seq<Tuple9<T1, T2, T3, T4, T5, T6, T7, T8, T9>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9)); } /** * Zip 10 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> Seq<Tuple10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10)); } /** * Zip 11 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> Seq<Tuple11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11)); } /** * Zip 12 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> Seq<Tuple12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12)); } /** * Zip 13 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Seq<Tuple13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12, Iterable<? extends T13> i13) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12), seq(i13)); } /** * Zip 14 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> Seq<Tuple14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12, Iterable<? extends T13> i13, Iterable<? extends T14> i14) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12), seq(i13), seq(i14)); } /** * Zip 15 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> Seq<Tuple15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12, Iterable<? extends T13> i13, Iterable<? extends T14> i14, Iterable<? extends T15> i15) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12), seq(i13), seq(i14), seq(i15)); } /** * Zip 16 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Seq<Tuple16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12, Iterable<? extends T13> i13, Iterable<? extends T14> i14, Iterable<? extends T15> i15, Iterable<? extends T16> i16) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12), seq(i13), seq(i14), seq(i15), seq(i16)); } /** * Zip 2 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2> Seq<Tuple2<T1, T2>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2) { return zip(s1, s2, (t1, t2) -> tuple(t1, t2)) .onClose(SeqUtils.closeAll(s1, s2)); } /** * Zip 3 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3> Seq<Tuple3<T1, T2, T3>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3) { return zip(s1, s2, s3, (t1, t2, t3) -> tuple(t1, t2, t3)) .onClose(SeqUtils.closeAll(s1, s2, s3)); } /** * Zip 4 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4> Seq<Tuple4<T1, T2, T3, T4>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4) { return zip(s1, s2, s3, s4, (t1, t2, t3, t4) -> tuple(t1, t2, t3, t4)) .onClose(SeqUtils.closeAll(s1, s2, s3, s4)); } /** * Zip 5 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5> Seq<Tuple5<T1, T2, T3, T4, T5>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5) { return zip(s1, s2, s3, s4, s5, (t1, t2, t3, t4, t5) -> tuple(t1, t2, t3, t4, t5)) .onClose(SeqUtils.closeAll(s1, s2, s3, s4, s5)); } /** * Zip 6 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6> Seq<Tuple6<T1, T2, T3, T4, T5, T6>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6) { return zip(s1, s2, s3, s4, s5, s6, (t1, t2, t3, t4, t5, t6) -> tuple(t1, t2, t3, t4, t5, t6)) .onClose(SeqUtils.closeAll(s1, s2, s3, s4, s5, s6)); } /** * Zip 7 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7> Seq<Tuple7<T1, T2, T3, T4, T5, T6, T7>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7) { return zip(s1, s2, s3, s4, s5, s6, s7, (t1, t2, t3, t4, t5, t6, t7) -> tuple(t1, t2, t3, t4, t5, t6, t7)) .onClose(SeqUtils.closeAll(s1, s2, s3, s4, s5, s6, s7)); } /** * Zip 8 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8> Seq<Tuple8<T1, T2, T3, T4, T5, T6, T7, T8>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8) { return zip(s1, s2, s3, s4, s5, s6, s7, s8, (t1, t2, t3, t4, t5, t6, t7, t8) -> tuple(t1, t2, t3, t4, t5, t6, t7, t8)) .onClose(SeqUtils.closeAll(s1, s2, s3, s4, s5, s6, s7, s8)); } /** * Zip 9 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9> Seq<Tuple9<T1, T2, T3, T4, T5, T6, T7, T8, T9>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9) { return zip(s1, s2, s3, s4, s5, s6, s7, s8, s9, (t1, t2, t3, t4, t5, t6, t7, t8, t9) -> tuple(t1, t2, t3, t4, t5, t6, t7, t8, t9)) .onClose(SeqUtils.closeAll(s1, s2, s3, s4, s5, s6, s7, s8, s9)); } /** * Zip 10 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> Seq<Tuple10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10) { return zip(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10) -> tuple(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10)) .onClose(SeqUtils.closeAll(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10)); } /** * Zip 11 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> Seq<Tuple11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11) { return zip(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11) -> tuple(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11)) .onClose(SeqUtils.closeAll(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11)); } /** * Zip 12 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> Seq<Tuple12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12) { return zip(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12) -> tuple(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12)) .onClose(SeqUtils.closeAll(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12)); } /** * Zip 13 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Seq<Tuple13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12, Seq<? extends T13> s13) { return zip(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13) -> tuple(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13)) .onClose(SeqUtils.closeAll(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13)); } /** * Zip 14 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> Seq<Tuple14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12, Seq<? extends T13> s13, Seq<? extends T14> s14) { return zip(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14) -> tuple(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14)) .onClose(SeqUtils.closeAll(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14)); } /** * Zip 15 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> Seq<Tuple15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12, Seq<? extends T13> s13, Seq<? extends T14> s14, Seq<? extends T15> s15) { return zip(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15, (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15) -> tuple(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15)) .onClose(SeqUtils.closeAll(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15)); } /** * Zip 16 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Seq<Tuple16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12, Seq<? extends T13> s13, Seq<? extends T14> s14, Seq<? extends T15> s15, Seq<? extends T16> s16) { return zip(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15, s16, (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16) -> tuple(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16)) .onClose(SeqUtils.closeAll(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15, s16)); } /** * Zip 2 streams into one using a {@link BiFunction} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, BiFunction<? super T1, ? super T2, ? extends R> zipper) { return zip(seq(s1), seq(s2), zipper); } /** * Zip 3 streams into one using a {@link Function3} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Function3<? super T1, ? super T2, ? super T3, ? extends R> zipper) { return zip(seq(s1), seq(s2), seq(s3), zipper); } /** * Zip 4 streams into one using a {@link Function4} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Function4<? super T1, ? super T2, ? super T3, ? super T4, ? extends R> zipper) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), zipper); } /** * Zip 5 streams into one using a {@link Function5} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Function5<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? extends R> zipper) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), zipper); } /** * Zip 6 streams into one using a {@link Function6} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Function6<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? extends R> zipper) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), zipper); } /** * Zip 7 streams into one using a {@link Function7} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Function7<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? extends R> zipper) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), zipper); } /** * Zip 8 streams into one using a {@link Function8} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Function8<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? extends R> zipper) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), zipper); } /** * Zip 9 streams into one using a {@link Function9} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Function9<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? extends R> zipper) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), zipper); } /** * Zip 10 streams into one using a {@link Function10} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Function10<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? extends R> zipper) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), zipper); } /** * Zip 11 streams into one using a {@link Function11} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Function11<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? extends R> zipper) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), zipper); } /** * Zip 12 streams into one using a {@link Function12} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12, Function12<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? extends R> zipper) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12), zipper); } /** * Zip 13 streams into one using a {@link Function13} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12, Stream<? extends T13> s13, Function13<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? super T13, ? extends R> zipper) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12), seq(s13), zipper); } /** * Zip 14 streams into one using a {@link Function14} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12, Stream<? extends T13> s13, Stream<? extends T14> s14, Function14<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? super T13, ? super T14, ? extends R> zipper) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12), seq(s13), seq(s14), zipper); } /** * Zip 15 streams into one using a {@link Function15} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12, Stream<? extends T13> s13, Stream<? extends T14> s14, Stream<? extends T15> s15, Function15<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? super T13, ? super T14, ? super T15, ? extends R> zipper) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12), seq(s13), seq(s14), seq(s15), zipper); } /** * Zip 16 streams into one using a {@link Function16} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12, Stream<? extends T13> s13, Stream<? extends T14> s14, Stream<? extends T15> s15, Stream<? extends T16> s16, Function16<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? super T13, ? super T14, ? super T15, ? super T16, ? extends R> zipper) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12), seq(s13), seq(s14), seq(s15), seq(s16), zipper); } /** * Zip 2 streams into one using a {@link BiFunction} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, BiFunction<? super T1, ? super T2, ? extends R> zipper) { return zip(seq(i1), seq(i2), zipper); } /** * Zip 3 streams into one using a {@link Function3} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Function3<? super T1, ? super T2, ? super T3, ? extends R> zipper) { return zip(seq(i1), seq(i2), seq(i3), zipper); } /** * Zip 4 streams into one using a {@link Function4} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Function4<? super T1, ? super T2, ? super T3, ? super T4, ? extends R> zipper) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), zipper); } /** * Zip 5 streams into one using a {@link Function5} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Function5<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? extends R> zipper) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), zipper); } /** * Zip 6 streams into one using a {@link Function6} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Function6<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? extends R> zipper) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), zipper); } /** * Zip 7 streams into one using a {@link Function7} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Function7<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? extends R> zipper) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), zipper); } /** * Zip 8 streams into one using a {@link Function8} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Function8<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? extends R> zipper) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), zipper); } /** * Zip 9 streams into one using a {@link Function9} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Function9<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? extends R> zipper) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), zipper); } /** * Zip 10 streams into one using a {@link Function10} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Function10<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? extends R> zipper) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), zipper); } /** * Zip 11 streams into one using a {@link Function11} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Function11<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? extends R> zipper) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), zipper); } /** * Zip 12 streams into one using a {@link Function12} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12, Function12<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? extends R> zipper) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12), zipper); } /** * Zip 13 streams into one using a {@link Function13} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12, Iterable<? extends T13> i13, Function13<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? super T13, ? extends R> zipper) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12), seq(i13), zipper); } /** * Zip 14 streams into one using a {@link Function14} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12, Iterable<? extends T13> i13, Iterable<? extends T14> i14, Function14<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? super T13, ? super T14, ? extends R> zipper) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12), seq(i13), seq(i14), zipper); } /** * Zip 15 streams into one using a {@link Function15} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12, Iterable<? extends T13> i13, Iterable<? extends T14> i14, Iterable<? extends T15> i15, Function15<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? super T13, ? super T14, ? super T15, ? extends R> zipper) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12), seq(i13), seq(i14), seq(i15), zipper); } /** * Zip 16 streams into one using a {@link Function16} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12, Iterable<? extends T13> i13, Iterable<? extends T14> i14, Iterable<? extends T15> i15, Iterable<? extends T16> i16, Function16<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? super T13, ? super T14, ? super T15, ? super T16, ? extends R> zipper) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12), seq(i13), seq(i14), seq(i15), seq(i16), zipper); } /** * Zip 2 streams into one using a {@link BiFunction} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, BiFunction<? super T1, ? super T2, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next()); } } return seq(new Zip()); } /** * Zip 3 streams into one using a {@link Function3} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Function3<? super T1, ? super T2, ? super T3, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); final Iterator<? extends T3> it3 = s3.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext() && it3.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next(), it3.next()); } } return seq(new Zip()); } /** * Zip 4 streams into one using a {@link Function4} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Function4<? super T1, ? super T2, ? super T3, ? super T4, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); final Iterator<? extends T3> it3 = s3.iterator(); final Iterator<? extends T4> it4 = s4.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext() && it3.hasNext() && it4.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next(), it3.next(), it4.next()); } } return seq(new Zip()); } /** * Zip 5 streams into one using a {@link Function5} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Function5<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); final Iterator<? extends T3> it3 = s3.iterator(); final Iterator<? extends T4> it4 = s4.iterator(); final Iterator<? extends T5> it5 = s5.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext() && it3.hasNext() && it4.hasNext() && it5.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next(), it3.next(), it4.next(), it5.next()); } } return seq(new Zip()); } /** * Zip 6 streams into one using a {@link Function6} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Function6<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); final Iterator<? extends T3> it3 = s3.iterator(); final Iterator<? extends T4> it4 = s4.iterator(); final Iterator<? extends T5> it5 = s5.iterator(); final Iterator<? extends T6> it6 = s6.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext() && it3.hasNext() && it4.hasNext() && it5.hasNext() && it6.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next(), it3.next(), it4.next(), it5.next(), it6.next()); } } return seq(new Zip()); } /** * Zip 7 streams into one using a {@link Function7} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Function7<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); final Iterator<? extends T3> it3 = s3.iterator(); final Iterator<? extends T4> it4 = s4.iterator(); final Iterator<? extends T5> it5 = s5.iterator(); final Iterator<? extends T6> it6 = s6.iterator(); final Iterator<? extends T7> it7 = s7.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext() && it3.hasNext() && it4.hasNext() && it5.hasNext() && it6.hasNext() && it7.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next(), it3.next(), it4.next(), it5.next(), it6.next(), it7.next()); } } return seq(new Zip()); } /** * Zip 8 streams into one using a {@link Function8} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Function8<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); final Iterator<? extends T3> it3 = s3.iterator(); final Iterator<? extends T4> it4 = s4.iterator(); final Iterator<? extends T5> it5 = s5.iterator(); final Iterator<? extends T6> it6 = s6.iterator(); final Iterator<? extends T7> it7 = s7.iterator(); final Iterator<? extends T8> it8 = s8.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext() && it3.hasNext() && it4.hasNext() && it5.hasNext() && it6.hasNext() && it7.hasNext() && it8.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next(), it3.next(), it4.next(), it5.next(), it6.next(), it7.next(), it8.next()); } } return seq(new Zip()); } /** * Zip 9 streams into one using a {@link Function9} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Function9<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); final Iterator<? extends T3> it3 = s3.iterator(); final Iterator<? extends T4> it4 = s4.iterator(); final Iterator<? extends T5> it5 = s5.iterator(); final Iterator<? extends T6> it6 = s6.iterator(); final Iterator<? extends T7> it7 = s7.iterator(); final Iterator<? extends T8> it8 = s8.iterator(); final Iterator<? extends T9> it9 = s9.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext() && it3.hasNext() && it4.hasNext() && it5.hasNext() && it6.hasNext() && it7.hasNext() && it8.hasNext() && it9.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next(), it3.next(), it4.next(), it5.next(), it6.next(), it7.next(), it8.next(), it9.next()); } } return seq(new Zip()); } /** * Zip 10 streams into one using a {@link Function10} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Function10<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); final Iterator<? extends T3> it3 = s3.iterator(); final Iterator<? extends T4> it4 = s4.iterator(); final Iterator<? extends T5> it5 = s5.iterator(); final Iterator<? extends T6> it6 = s6.iterator(); final Iterator<? extends T7> it7 = s7.iterator(); final Iterator<? extends T8> it8 = s8.iterator(); final Iterator<? extends T9> it9 = s9.iterator(); final Iterator<? extends T10> it10 = s10.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext() && it3.hasNext() && it4.hasNext() && it5.hasNext() && it6.hasNext() && it7.hasNext() && it8.hasNext() && it9.hasNext() && it10.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next(), it3.next(), it4.next(), it5.next(), it6.next(), it7.next(), it8.next(), it9.next(), it10.next()); } } return seq(new Zip()); } /** * Zip 11 streams into one using a {@link Function11} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Function11<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); final Iterator<? extends T3> it3 = s3.iterator(); final Iterator<? extends T4> it4 = s4.iterator(); final Iterator<? extends T5> it5 = s5.iterator(); final Iterator<? extends T6> it6 = s6.iterator(); final Iterator<? extends T7> it7 = s7.iterator(); final Iterator<? extends T8> it8 = s8.iterator(); final Iterator<? extends T9> it9 = s9.iterator(); final Iterator<? extends T10> it10 = s10.iterator(); final Iterator<? extends T11> it11 = s11.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext() && it3.hasNext() && it4.hasNext() && it5.hasNext() && it6.hasNext() && it7.hasNext() && it8.hasNext() && it9.hasNext() && it10.hasNext() && it11.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next(), it3.next(), it4.next(), it5.next(), it6.next(), it7.next(), it8.next(), it9.next(), it10.next(), it11.next()); } } return seq(new Zip()); } /** * Zip 12 streams into one using a {@link Function12} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12, Function12<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); final Iterator<? extends T3> it3 = s3.iterator(); final Iterator<? extends T4> it4 = s4.iterator(); final Iterator<? extends T5> it5 = s5.iterator(); final Iterator<? extends T6> it6 = s6.iterator(); final Iterator<? extends T7> it7 = s7.iterator(); final Iterator<? extends T8> it8 = s8.iterator(); final Iterator<? extends T9> it9 = s9.iterator(); final Iterator<? extends T10> it10 = s10.iterator(); final Iterator<? extends T11> it11 = s11.iterator(); final Iterator<? extends T12> it12 = s12.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext() && it3.hasNext() && it4.hasNext() && it5.hasNext() && it6.hasNext() && it7.hasNext() && it8.hasNext() && it9.hasNext() && it10.hasNext() && it11.hasNext() && it12.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next(), it3.next(), it4.next(), it5.next(), it6.next(), it7.next(), it8.next(), it9.next(), it10.next(), it11.next(), it12.next()); } } return seq(new Zip()); } /** * Zip 13 streams into one using a {@link Function13} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12, Seq<? extends T13> s13, Function13<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? super T13, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); final Iterator<? extends T3> it3 = s3.iterator(); final Iterator<? extends T4> it4 = s4.iterator(); final Iterator<? extends T5> it5 = s5.iterator(); final Iterator<? extends T6> it6 = s6.iterator(); final Iterator<? extends T7> it7 = s7.iterator(); final Iterator<? extends T8> it8 = s8.iterator(); final Iterator<? extends T9> it9 = s9.iterator(); final Iterator<? extends T10> it10 = s10.iterator(); final Iterator<? extends T11> it11 = s11.iterator(); final Iterator<? extends T12> it12 = s12.iterator(); final Iterator<? extends T13> it13 = s13.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext() && it3.hasNext() && it4.hasNext() && it5.hasNext() && it6.hasNext() && it7.hasNext() && it8.hasNext() && it9.hasNext() && it10.hasNext() && it11.hasNext() && it12.hasNext() && it13.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next(), it3.next(), it4.next(), it5.next(), it6.next(), it7.next(), it8.next(), it9.next(), it10.next(), it11.next(), it12.next(), it13.next()); } } return seq(new Zip()); } /** * Zip 14 streams into one using a {@link Function14} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12, Seq<? extends T13> s13, Seq<? extends T14> s14, Function14<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? super T13, ? super T14, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); final Iterator<? extends T3> it3 = s3.iterator(); final Iterator<? extends T4> it4 = s4.iterator(); final Iterator<? extends T5> it5 = s5.iterator(); final Iterator<? extends T6> it6 = s6.iterator(); final Iterator<? extends T7> it7 = s7.iterator(); final Iterator<? extends T8> it8 = s8.iterator(); final Iterator<? extends T9> it9 = s9.iterator(); final Iterator<? extends T10> it10 = s10.iterator(); final Iterator<? extends T11> it11 = s11.iterator(); final Iterator<? extends T12> it12 = s12.iterator(); final Iterator<? extends T13> it13 = s13.iterator(); final Iterator<? extends T14> it14 = s14.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext() && it3.hasNext() && it4.hasNext() && it5.hasNext() && it6.hasNext() && it7.hasNext() && it8.hasNext() && it9.hasNext() && it10.hasNext() && it11.hasNext() && it12.hasNext() && it13.hasNext() && it14.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next(), it3.next(), it4.next(), it5.next(), it6.next(), it7.next(), it8.next(), it9.next(), it10.next(), it11.next(), it12.next(), it13.next(), it14.next()); } } return seq(new Zip()); } /** * Zip 15 streams into one using a {@link Function15} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12, Seq<? extends T13> s13, Seq<? extends T14> s14, Seq<? extends T15> s15, Function15<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? super T13, ? super T14, ? super T15, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); final Iterator<? extends T3> it3 = s3.iterator(); final Iterator<? extends T4> it4 = s4.iterator(); final Iterator<? extends T5> it5 = s5.iterator(); final Iterator<? extends T6> it6 = s6.iterator(); final Iterator<? extends T7> it7 = s7.iterator(); final Iterator<? extends T8> it8 = s8.iterator(); final Iterator<? extends T9> it9 = s9.iterator(); final Iterator<? extends T10> it10 = s10.iterator(); final Iterator<? extends T11> it11 = s11.iterator(); final Iterator<? extends T12> it12 = s12.iterator(); final Iterator<? extends T13> it13 = s13.iterator(); final Iterator<? extends T14> it14 = s14.iterator(); final Iterator<? extends T15> it15 = s15.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext() && it3.hasNext() && it4.hasNext() && it5.hasNext() && it6.hasNext() && it7.hasNext() && it8.hasNext() && it9.hasNext() && it10.hasNext() && it11.hasNext() && it12.hasNext() && it13.hasNext() && it14.hasNext() && it15.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next(), it3.next(), it4.next(), it5.next(), it6.next(), it7.next(), it8.next(), it9.next(), it10.next(), it11.next(), it12.next(), it13.next(), it14.next(), it15.next()); } } return seq(new Zip()); } /** * Zip 16 streams into one using a {@link Function16} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12, Seq<? extends T13> s13, Seq<? extends T14> s14, Seq<? extends T15> s15, Seq<? extends T16> s16, Function16<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? super T13, ? super T14, ? super T15, ? super T16, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); final Iterator<? extends T3> it3 = s3.iterator(); final Iterator<? extends T4> it4 = s4.iterator(); final Iterator<? extends T5> it5 = s5.iterator(); final Iterator<? extends T6> it6 = s6.iterator(); final Iterator<? extends T7> it7 = s7.iterator(); final Iterator<? extends T8> it8 = s8.iterator(); final Iterator<? extends T9> it9 = s9.iterator(); final Iterator<? extends T10> it10 = s10.iterator(); final Iterator<? extends T11> it11 = s11.iterator(); final Iterator<? extends T12> it12 = s12.iterator(); final Iterator<? extends T13> it13 = s13.iterator(); final Iterator<? extends T14> it14 = s14.iterator(); final Iterator<? extends T15> it15 = s15.iterator(); final Iterator<? extends T16> it16 = s16.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext() && it3.hasNext() && it4.hasNext() && it5.hasNext() && it6.hasNext() && it7.hasNext() && it8.hasNext() && it9.hasNext() && it10.hasNext() && it11.hasNext() && it12.hasNext() && it13.hasNext() && it14.hasNext() && it15.hasNext() && it16.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next(), it3.next(), it4.next(), it5.next(), it6.next(), it7.next(), it8.next(), it9.next(), it10.next(), it11.next(), it12.next(), it13.next(), it14.next(), it15.next(), it16.next()); } } return seq(new Zip()); } // [jooq-tools] END [zip-static] /** * Zip a Stream with a corresponding Stream of indexes. * <p> * <code><pre> * // (tuple("a", 0), tuple("b", 1), tuple("c", 2)) * Seq.of("a", "b", "c").zipWithIndex() * </pre></code> */ static <T> Seq<Tuple2<T, Long>> zipWithIndex(Stream<? extends T> stream) { return zipWithIndex(seq(stream)); } /** * Zip a Stream with a corresponding Stream of indexes. * <p> * <code><pre> * // (tuple("a", 0), tuple("b", 1), tuple("c", 2)) * Seq.of("a", "b", "c").zipWithIndex() * </pre></code> */ static <T> Seq<Tuple2<T, Long>> zipWithIndex(Iterable<? extends T> iterable) { return zipWithIndex(seq(iterable)); } /** * Zip a Stream with a corresponding Stream of indexes. * <p> * <code><pre> * // (tuple("a", 0), tuple("b", 1), tuple("c", 2)) * Seq.of("a", "b", "c").zipWithIndex() * </pre></code> */ static <T> Seq<Tuple2<T, Long>> zipWithIndex(Seq<? extends T> stream) { long[] index = { -1L }; return SeqUtils.transform(stream, (delegate, action) -> delegate.tryAdvance(t -> action.accept(tuple(t, index[0] = index[0] + 1)) ) ); } /** * Fold a stream to the left. * <p> * <code><pre> * // "abc" * Seq.of("a", "b", "c").foldLeft("", (u, t) -> u + t) * </pre></code> */ static <T, U> U foldLeft(Stream<? extends T> stream, U seed, BiFunction<? super U, ? super T, ? extends U> function) { return foldLeft(seq(stream), seed, function); } /** * Fold a stream to the left. * <p> * <code><pre> * // "abc" * Seq.of("a", "b", "c").foldLeft("", (u, t) -> u + t) * </pre></code> */ static <T, U> U foldLeft(Iterable<? extends T> iterable, U seed, BiFunction<? super U, ? super T, ? extends U> function) { return foldLeft(seq(iterable), seed, function); } /** * Fold a stream to the left. * <p> * <code><pre> * // "abc" * Seq.of("a", "b", "c").foldLeft("", (u, t) -> u + t) * </pre></code> */ static <T, U> U foldLeft(Seq<? extends T> stream, U seed, BiFunction<? super U, ? super T, ? extends U> function) { final Iterator<? extends T> it = stream.iterator(); U result = seed; while (it.hasNext()) result = function.apply(result, it.next()); return result; } /** * Fold a stream to the right. * <p> * <code><pre> * // "cba" * Seq.of("a", "b", "c").foldRight("", (t, u) -> u + t) * </pre></code> */ static <T, U> U foldRight(Stream<? extends T> stream, U seed, BiFunction<? super T, ? super U, ? extends U> function) { return foldRight(seq(stream), seed, function); } /** * Fold a stream to the right. * <p> * <code><pre> * // "cba" * Seq.of("a", "b", "c").foldRight("", (t, u) -> u + t) * </pre></code> */ static <T, U> U foldRight(Iterable<? extends T> iterable, U seed, BiFunction<? super T, ? super U, ? extends U> function) { return foldRight(seq(iterable), seed, function); } /** * Fold a stream to the right. * <p> * <code><pre> * // "cba" * Seq.of("a", "b", "c").foldRight("", (t, u) -> u + t) * </pre></code> */ static <T, U> U foldRight(Seq<? extends T> stream, U seed, BiFunction<? super T, ? super U, ? extends U> function) { return stream.reverse().foldLeft(seed, (u, t) -> function.apply(t, u)); } /** * Scan a stream to the left. * <p> * <code><pre> * // ("", "a", "ab", "abc") * Seq.of("a", "b", "c").scanLeft("", (u, t) -> u + t) * </pre></code> */ static <T, U> Seq<U> scanLeft(Stream<? extends T> stream, U seed, BiFunction<? super U, ? super T, ? extends U> function) { return scanLeft(seq(stream), seed, function); } /** * Scan a stream to the left. * <p> * <code><pre> * // ("", "a", "ab", "abc") * Seq.of("a", "b", "c").scanLeft("", (u, t) -> u + t) * </pre></code> */ static <T, U> Seq<U> scanLeft(Iterable<? extends T> iterable, U seed, BiFunction<? super U, ? super T, ? extends U> function) { return scanLeft(seq(iterable), seed, function); } /** * Scan a stream to the left. * <p> * <code><pre> * // ("", "a", "ab", "abc") * Seq.of("a", "b", "c").scanLeft("", (u, t) -> u + t) * </pre></code> */ static <T, U> Seq<U> scanLeft(Seq<? extends T> stream, U seed, BiFunction<? super U, ? super T, ? extends U> function) { @SuppressWarnings("unchecked") U[] value = (U[]) new Object[] { seed }; return Seq.of(seed).concat(SeqUtils.transform(stream, (delegate, action) -> delegate.tryAdvance(t -> action.accept(value[0] = function.apply(value[0], t)) ) )); } /** * Scan a stream to the right. * <p> * <code><pre> * // ("", "c", "cb", "cba") * Seq.of("a", "b", "c").scanRight("", (t, u) -> u + t) * </pre></code> */ static <T, U> Seq<U> scanRight(Stream<? extends T> stream, U seed, BiFunction<? super T, ? super U, ? extends U> function) { return scanRight(seq(stream), seed, function); } /** * Scan a stream to the right. * <p> * <code><pre> * // ("", "c", "cb", "cba") * Seq.of("a", "b", "c").scanRight("", (t, u) -> u + t) * </pre></code> */ static <T, U> Seq<U> scanRight(Iterable<? extends T> iterable, U seed, BiFunction<? super T, ? super U, ? extends U> function) { return scanRight(seq(iterable), seed, function); } /** * Scan a stream to the right. * <p> * <code><pre> * // ("", "c", "cb", "cba") * Seq.of("a", "b", "c").scanRight("", (t, u) -> u + t) * </pre></code> */ static <T, U> Seq<U> scanRight(Seq<? extends T> stream, U seed, BiFunction<? super T, ? super U, ? extends U> function) { return stream.reverse().scanLeft(seed, (u, t) -> function.apply(t, u)); } /** * Unfold a function into a stream. * <p> * <code><pre> * // (1, 2, 3, 4, 5) * Seq.unfold(1, i -> i &lt;= 6 ? Optional.of(tuple(i, i + 1)) : Optional.empty()) * </pre></code> */ static <T, U> Seq<T> unfold(U seed, Function<? super U, Optional<Tuple2<T, U>>> unfolder) { Tuple2<? extends T, ? extends U>[] unfolded = new Tuple2[] { tuple((T) null, seed) }; return seq((FunctionalSpliterator<T>) action -> { Optional<? extends Tuple2<? extends T, ? extends U>> result = unfolder.apply(unfolded[0].v2); if (result.isPresent()) action.accept((unfolded[0] = result.get()).v1); return result.isPresent(); }); } /** * Reverse a stream. * <p> * <code><pre> * // (3, 2, 1) * Seq.of(1, 2, 3).reverse() * </pre></code> */ static <T> Seq<T> reverse(Stream<? extends T> stream) { return reverse(seq(stream)); } /** * Reverse a stream. * <p> * <code><pre> * // (3, 2, 1) * Seq.of(1, 2, 3).reverse() * </pre></code> */ static <T> Seq<T> reverse(Iterable<? extends T> iterable) { return reverse(seq(iterable)); } /** * Reverse a stream. * <p> * <code><pre> * // (3, 2, 1) * Seq.of(1, 2, 3).reverse() * </pre></code> */ static <T> Seq<T> reverse(Seq<? extends T> stream) { List<T> list = toList(stream); Collections.reverse(list); return seq(list).onClose(stream::close); } /** * Shuffle a stream * <p> * <code><pre> * // e.g. (2, 3, 1) * Seq.of(1, 2, 3).shuffle() * </pre></code> */ static <T> Seq<T> shuffle(Stream<? extends T> stream) { return shuffle(seq(stream)); } /** * Shuffle a stream * <p> * <code><pre> * // e.g. (2, 3, 1) * Seq.of(1, 2, 3).shuffle() * </pre></code> */ static <T> Seq<T> shuffle(Iterable<? extends T> iterable) { return shuffle(seq(iterable)); } /** * Shuffle a stream * <p> * <code><pre> * // e.g. (2, 3, 1) * Seq.of(1, 2, 3).shuffle() * </pre></code> */ static <T> Seq<T> shuffle(Seq<? extends T> stream) { return shuffle(stream, null); } /** * Shuffle a stream using specified source of randomness * <p> * <code><pre> * // e.g. (2, 3, 1) * Seq.of(1, 2, 3).shuffle(new Random()) * </pre></code> */ static <T> Seq<T> shuffle(Stream<? extends T> stream, Random random) { return shuffle(seq(stream), random); } /** * Shuffle a stream using specified source of randomness * <p> * <code><pre> * // e.g. (2, 3, 1) * Seq.of(1, 2, 3).shuffle(new Random()) * </pre></code> */ static <T> Seq<T> shuffle(Iterable<? extends T> iterable, Random random) { return shuffle(seq(iterable), random); } /** * Shuffle a stream using specified source of randomness * <p> * <code><pre> * // e.g. (2, 3, 1) * Seq.of(1, 2, 3).shuffle(new Random()) * </pre></code> */ static <T> Seq<T> shuffle(Seq<? extends T> stream, Random random) { Spliterator[] shuffled = { null }; return SeqUtils.transform(stream, (delegate, action) -> { if (shuffled[0] == null) { List<T> list = seq(delegate).toList(); if (random == null) Collections.shuffle(list); else Collections.shuffle(list, random); shuffled[0] = list.spliterator(); } return shuffled[0].tryAdvance(action); }).onClose(stream::close); } // [jooq-tools] START [crossjoin-static] /** * Cross join 2 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2> Seq<Tuple2<T1, T2>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2) { return crossJoin(seq(s1), seq(s2)); } /** * Cross join 3 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3> Seq<Tuple3<T1, T2, T3>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3) { return crossJoin(seq(s1), seq(s2), seq(s3)); } /** * Cross join 4 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4> Seq<Tuple4<T1, T2, T3, T4>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4) { return crossJoin(seq(s1), seq(s2), seq(s3), seq(s4)); } /** * Cross join 5 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5> Seq<Tuple5<T1, T2, T3, T4, T5>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5) { return crossJoin(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5)); } /** * Cross join 6 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6> Seq<Tuple6<T1, T2, T3, T4, T5, T6>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6) { return crossJoin(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6)); } /** * Cross join 7 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7> Seq<Tuple7<T1, T2, T3, T4, T5, T6, T7>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7) { return crossJoin(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7)); } /** * Cross join 8 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8> Seq<Tuple8<T1, T2, T3, T4, T5, T6, T7, T8>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8) { return crossJoin(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8)); } /** * Cross join 9 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9> Seq<Tuple9<T1, T2, T3, T4, T5, T6, T7, T8, T9>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9) { return crossJoin(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9)); } /** * Cross join 10 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> Seq<Tuple10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10) { return crossJoin(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10)); } /** * Cross join 11 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> Seq<Tuple11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11) { return crossJoin(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11)); } /** * Cross join 12 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> Seq<Tuple12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12) { return crossJoin(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12)); } /** * Cross join 13 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Seq<Tuple13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12, Stream<? extends T13> s13) { return crossJoin(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12), seq(s13)); } /** * Cross join 14 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> Seq<Tuple14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12, Stream<? extends T13> s13, Stream<? extends T14> s14) { return crossJoin(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12), seq(s13), seq(s14)); } /** * Cross join 15 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> Seq<Tuple15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12, Stream<? extends T13> s13, Stream<? extends T14> s14, Stream<? extends T15> s15) { return crossJoin(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12), seq(s13), seq(s14), seq(s15)); } /** * Cross join 16 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Seq<Tuple16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12, Stream<? extends T13> s13, Stream<? extends T14> s14, Stream<? extends T15> s15, Stream<? extends T16> s16) { return crossJoin(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12), seq(s13), seq(s14), seq(s15), seq(s16)); } /** * Cross join 2 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2> Seq<Tuple2<T1, T2>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2) { return crossJoin(seq(i1), seq(i2)); } /** * Cross join 3 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3> Seq<Tuple3<T1, T2, T3>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3) { return crossJoin(seq(i1), seq(i2), seq(i3)); } /** * Cross join 4 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4> Seq<Tuple4<T1, T2, T3, T4>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4) { return crossJoin(seq(i1), seq(i2), seq(i3), seq(i4)); } /** * Cross join 5 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5> Seq<Tuple5<T1, T2, T3, T4, T5>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5) { return crossJoin(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5)); } /** * Cross join 6 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6> Seq<Tuple6<T1, T2, T3, T4, T5, T6>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6) { return crossJoin(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6)); } /** * Cross join 7 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7> Seq<Tuple7<T1, T2, T3, T4, T5, T6, T7>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7) { return crossJoin(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7)); } /** * Cross join 8 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8> Seq<Tuple8<T1, T2, T3, T4, T5, T6, T7, T8>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8) { return crossJoin(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8)); } /** * Cross join 9 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9> Seq<Tuple9<T1, T2, T3, T4, T5, T6, T7, T8, T9>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9) { return crossJoin(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9)); } /** * Cross join 10 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> Seq<Tuple10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10) { return crossJoin(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10)); } /** * Cross join 11 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> Seq<Tuple11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11) { return crossJoin(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11)); } /** * Cross join 12 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> Seq<Tuple12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12) { return crossJoin(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12)); } /** * Cross join 13 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Seq<Tuple13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12, Iterable<? extends T13> i13) { return crossJoin(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12), seq(i13)); } /** * Cross join 14 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> Seq<Tuple14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12, Iterable<? extends T13> i13, Iterable<? extends T14> i14) { return crossJoin(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12), seq(i13), seq(i14)); } /** * Cross join 15 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> Seq<Tuple15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12, Iterable<? extends T13> i13, Iterable<? extends T14> i14, Iterable<? extends T15> i15) { return crossJoin(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12), seq(i13), seq(i14), seq(i15)); } /** * Cross join 16 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Seq<Tuple16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12, Iterable<? extends T13> i13, Iterable<? extends T14> i14, Iterable<? extends T15> i15, Iterable<? extends T16> i16) { return crossJoin(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12), seq(i13), seq(i14), seq(i15), seq(i16)); } /** * Cross join 2 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2> Seq<Tuple2<T1, T2>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2) { List<? extends T2> list = s2.toList(); return seq(s1).flatMap(v1 -> seq(list).map(v2 -> tuple(v1, v2))) .onClose(SeqUtils.closeAll(s1, s2)); } /** * Cross join 3 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3> Seq<Tuple3<T1, T2, T3>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3) { List<Tuple2<T2, T3>> list = crossJoin(s2, s3).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2))) .onClose(SeqUtils.closeAll(s2, s3)); } /** * Cross join 4 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4> Seq<Tuple4<T1, T2, T3, T4>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4) { List<Tuple3<T2, T3, T4>> list = crossJoin(s2, s3, s4).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2, t.v3))) .onClose(SeqUtils.closeAll(s2, s3, s4)); } /** * Cross join 5 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5> Seq<Tuple5<T1, T2, T3, T4, T5>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5) { List<Tuple4<T2, T3, T4, T5>> list = crossJoin(s2, s3, s4, s5).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2, t.v3, t.v4))) .onClose(SeqUtils.closeAll(s2, s3, s4, s5)); } /** * Cross join 6 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6> Seq<Tuple6<T1, T2, T3, T4, T5, T6>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6) { List<Tuple5<T2, T3, T4, T5, T6>> list = crossJoin(s2, s3, s4, s5, s6).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2, t.v3, t.v4, t.v5))) .onClose(SeqUtils.closeAll(s2, s3, s4, s5, s6)); } /** * Cross join 7 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7> Seq<Tuple7<T1, T2, T3, T4, T5, T6, T7>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7) { List<Tuple6<T2, T3, T4, T5, T6, T7>> list = crossJoin(s2, s3, s4, s5, s6, s7).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2, t.v3, t.v4, t.v5, t.v6))) .onClose(SeqUtils.closeAll(s2, s3, s4, s5, s6, s7)); } /** * Cross join 8 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8> Seq<Tuple8<T1, T2, T3, T4, T5, T6, T7, T8>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8) { List<Tuple7<T2, T3, T4, T5, T6, T7, T8>> list = crossJoin(s2, s3, s4, s5, s6, s7, s8).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2, t.v3, t.v4, t.v5, t.v6, t.v7))) .onClose(SeqUtils.closeAll(s2, s3, s4, s5, s6, s7, s8)); } /** * Cross join 9 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9> Seq<Tuple9<T1, T2, T3, T4, T5, T6, T7, T8, T9>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9) { List<Tuple8<T2, T3, T4, T5, T6, T7, T8, T9>> list = crossJoin(s2, s3, s4, s5, s6, s7, s8, s9).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2, t.v3, t.v4, t.v5, t.v6, t.v7, t.v8))) .onClose(SeqUtils.closeAll(s2, s3, s4, s5, s6, s7, s8, s9)); } /** * Cross join 10 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> Seq<Tuple10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10) { List<Tuple9<T2, T3, T4, T5, T6, T7, T8, T9, T10>> list = crossJoin(s2, s3, s4, s5, s6, s7, s8, s9, s10).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2, t.v3, t.v4, t.v5, t.v6, t.v7, t.v8, t.v9))) .onClose(SeqUtils.closeAll(s2, s3, s4, s5, s6, s7, s8, s9, s10)); } /** * Cross join 11 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> Seq<Tuple11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11) { List<Tuple10<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>> list = crossJoin(s2, s3, s4, s5, s6, s7, s8, s9, s10, s11).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2, t.v3, t.v4, t.v5, t.v6, t.v7, t.v8, t.v9, t.v10))) .onClose(SeqUtils.closeAll(s2, s3, s4, s5, s6, s7, s8, s9, s10, s11)); } /** * Cross join 12 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> Seq<Tuple12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12) { List<Tuple11<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>> list = crossJoin(s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2, t.v3, t.v4, t.v5, t.v6, t.v7, t.v8, t.v9, t.v10, t.v11))) .onClose(SeqUtils.closeAll(s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12)); } /** * Cross join 13 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Seq<Tuple13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12, Seq<? extends T13> s13) { List<Tuple12<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>> list = crossJoin(s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2, t.v3, t.v4, t.v5, t.v6, t.v7, t.v8, t.v9, t.v10, t.v11, t.v12))) .onClose(SeqUtils.closeAll(s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13)); } /** * Cross join 14 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> Seq<Tuple14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12, Seq<? extends T13> s13, Seq<? extends T14> s14) { List<Tuple13<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>> list = crossJoin(s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2, t.v3, t.v4, t.v5, t.v6, t.v7, t.v8, t.v9, t.v10, t.v11, t.v12, t.v13))) .onClose(SeqUtils.closeAll(s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14)); } /** * Cross join 15 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> Seq<Tuple15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12, Seq<? extends T13> s13, Seq<? extends T14> s14, Seq<? extends T15> s15) { List<Tuple14<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>> list = crossJoin(s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2, t.v3, t.v4, t.v5, t.v6, t.v7, t.v8, t.v9, t.v10, t.v11, t.v12, t.v13, t.v14))) .onClose(SeqUtils.closeAll(s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15)); } /** * Cross join 16 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Seq<Tuple16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12, Seq<? extends T13> s13, Seq<? extends T14> s14, Seq<? extends T15> s15, Seq<? extends T16> s16) { List<Tuple15<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>> list = crossJoin(s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15, s16).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2, t.v3, t.v4, t.v5, t.v6, t.v7, t.v8, t.v9, t.v10, t.v11, t.v12, t.v13, t.v14, t.v15))) .onClose(SeqUtils.closeAll(s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15, s16)); } // [jooq-tools] END [crossjoin-static] /** * Concatenate a number of streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(1, 2, 3).concat(Seq.of(4, 5, 6)) * </pre></code> */ @SafeVarargs static <T> Seq<T> concat(Stream<? extends T>... streams) { return concat(SeqUtils.seqs(streams)); } /** * Concatenate a number of streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(1, 2, 3).concat(Seq.of(4, 5, 6)) * </pre></code> */ @SafeVarargs static <T> Seq<T> concat(Iterable<? extends T>... iterables) { return concat(SeqUtils.seqs(iterables)); } /** * Concatenate a number of streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(1, 2, 3).concat(Seq.of(4, 5, 6)) * </pre></code> */ @SafeVarargs static <T> Seq<T> concat(Seq<? extends T>... streams) { if (streams == null || streams.length == 0) return Seq.empty(); if (streams.length == 1) return seq(streams[0]); Stream<? extends T> result = streams[0]; for (int i = 1; i < streams.length; i++) result = Stream.concat(result, streams[i]); return seq(result); } /** * Duplicate a Streams into two equivalent Streams. * <p> * <code><pre> * // tuple((1, 2, 3), (1, 2, 3)) * Seq.of(1, 2, 3).duplicate() * </pre></code> */ static <T> Tuple2<Seq<T>, Seq<T>> duplicate(Stream<? extends T> stream) { final Iterator<? extends T> it = stream.iterator(); final LinkedList<T> gap = new LinkedList<>(); @SuppressWarnings("unchecked") final Iterator<T>[] ahead = new Iterator[] { null }; class Duplicate implements Iterator<T> { @Override public boolean hasNext() { if (ahead[0] == null || ahead[0] == this) return it.hasNext(); return !gap.isEmpty(); } @Override public T next() { if (ahead[0] == null) ahead[0] = this; if (ahead[0] == this) { T value = it.next(); gap.offer(value); return value; } else { T value = gap.poll(); if (gap.isEmpty()) ahead[0] = null; return value; } } } return tuple(seq(new Duplicate()), seq(new Duplicate())); } /** * Consume a stream and concatenate all elements. */ static String toString(Stream<?> stream) { return toString(stream, ""); } /** * Consume a stream and concatenate all elements using a separator. */ static String toString(Stream<?> stream, CharSequence delimiter) { return stream.map(Objects::toString).collect(Collectors.joining(delimiter)); } /** * Collect a Stream into a List. */ static <T, C extends Collection<T>> C toCollection(Stream<? extends T> stream, Supplier<? extends C> collectionFactory) { return stream.collect(Collectors.toCollection(collectionFactory)); } /** * Collect a Stream into a List. */ static <T> List<T> toList(Stream<? extends T> stream) { return stream.collect(Collectors.toList()); } /** * Collect a Stream into a Set. */ static <T> Set<T> toSet(Stream<? extends T> stream) { return stream.collect(Collectors.toSet()); } /** * Collect a Stream of {@link Tuple2} into a Map. */ static <T, K, V> Map<K, V> toMap(Stream<Tuple2<K, V>> stream) { return stream.collect(Collectors.toMap(Tuple2::v1, Tuple2::v2)); } /** * Collect a Stream into a Map. */ static <T, K, V> Map<K, V> toMap(Stream<? extends T> stream, Function<? super T, ? extends K> keyMapper, Function<? super T, ? extends V> valueMapper) { return stream.collect(Collectors.toMap(keyMapper, valueMapper)); } /** * Returns a limited interval from a given Stream. * <p> * <code><pre> * // (4, 5) * Seq.of(1, 2, 3, 4, 5, 6).slice(3, 5) * </pre></code> */ static <T> Seq<T> slice(Stream<? extends T> stream, long from, long to) { long f = Math.max(from, 0); long t = Math.max(to - f, 0); return seq(stream.skip(f).limit(t)); } /** * Returns a stream with n elements skipped. * <p> * <code><pre> * // (4, 5, 6) * Seq.of(1, 2, 3, 4, 5, 6).skip(3) * </pre></code> */ static <T> Seq<T> skip(Stream<? extends T> stream, long elements) { return seq(stream.skip(elements)); } /** * Returns a stream with all elements skipped for which a predicate evaluates to <code>true</code>. * <p> * <code><pre> * // (3, 4, 5) * Seq.of(1, 2, 3, 4, 5).skipWhile(i -> i &lt; 3) * </pre></code> */ static <T> Seq<T> skipWhile(Stream<? extends T> stream, Predicate<? super T> predicate) { return skipUntil(stream, predicate.negate()); } /** * Returns a stream with all elements skipped for which a predicate evaluates to <code>true</code> * plus the first element for which it evaluates to <code>false</code>. * <p> * <code><pre> * // (4, 5) * Seq.of(1, 2, 3, 4, 5).skipWhileClosed(i -> i &lt; 3) * </pre></code> */ static <T> Seq<T> skipWhileClosed(Stream<? extends T> stream, Predicate<? super T> predicate) { return skipUntilClosed(stream, predicate.negate()); } /** * Returns a stream with all elements skipped for which a predicate evaluates to <code>false</code>. * <p> * <code><pre> * // (3, 4, 5) * Seq.of(1, 2, 3, 4, 5).skipUntil(i -> i == 3) * </pre></code> */ @SuppressWarnings("unchecked") static <T> Seq<T> skipUntil(Stream<? extends T> stream, Predicate<? super T> predicate) { boolean[] test = { false }; return SeqUtils.transform(stream, (delegate, action) -> !test[0] ? delegate.tryAdvance(t -> { if (test[0] = predicate.test(t)) action.accept(t); }) : delegate.tryAdvance(action) ); } /** * Returns a stream with all elements skipped for which a predicate evaluates to <code>false</code> * plus the first element for which it evaluates to <code>true</code>. * <p> * <code><pre> * // (4, 5) * Seq.of(1, 2, 3, 4, 5).skipUntilClosed(i -> i == 3) * </pre></code> */ @SuppressWarnings("unchecked") static <T> Seq<T> skipUntilClosed(Stream<? extends T> stream, Predicate<? super T> predicate) { boolean[] test = { false }; return SeqUtils.transform(stream, (delegate, action) -> !test[0] ? delegate.tryAdvance(t -> test[0] = predicate.test(t)) : delegate.tryAdvance(action) ); } /** * Returns a stream limited to n elements. * <p> * <code><pre> * // (1, 2, 3) * Seq.of(1, 2, 3, 4, 5, 6).limit(3) * </pre></code> */ static <T> Seq<T> limit(Stream<? extends T> stream, long elements) { return seq(stream.limit(elements)); } /** * Alias for limit * * @see Seq#limit(long) */ default Seq<T> take(long maxSize) { return limit(maxSize); } /** * Alias for skip * * @see Seq#skip(long) */ default Seq<T> drop(long n) { return skip(n); } /** * Returns a stream limited to all elements for which a predicate evaluates to <code>true</code>. * <p> * <code><pre> * // (1, 2) * Seq.of(1, 2, 3, 4, 5).limitWhile(i -> i &lt; 3) * </pre></code> */ static <T> Seq<T> limitWhile(Stream<? extends T> stream, Predicate<? super T> predicate) { return limitUntil(stream, predicate.negate()); } /** * Returns a stream limited to all elements for which a predicate evaluates to <code>true</code> * plus the first element for which it evaluates to <code>false</code>. * <p> * <code><pre> * // (1, 2, 3) * Seq.of(1, 2, 3, 4, 5).limitWhileClosed(i -> i &lt; 3) * </pre></code> */ static <T> Seq<T> limitWhileClosed(Stream<? extends T> stream, Predicate<? super T> predicate) { return limitUntilClosed(stream, predicate.negate()); } /** * Returns a stream limited to all elements for which a predicate evaluates to <code>false</code>. * <p> * <code><pre> * // (1, 2) * Seq.of(1, 2, 3, 4, 5).limitUntil(i -> i == 3) * </pre></code> */ @SuppressWarnings("unchecked") static <T> Seq<T> limitUntil(Stream<? extends T> stream, Predicate<? super T> predicate) { boolean[] test = { false }; return SeqUtils.transform(stream, (delegate, action) -> delegate.tryAdvance(t -> { if (!(test[0] = predicate.test(t))) action.accept(t); }) && !test[0] ); } /** * Returns a stream limited to all elements for which a predicate evaluates to <code>false</code> * plus the first element for which it evaluates to <code>true</code>. * <p> * <code><pre> * // (1, 2, 3) * Seq.of(1, 2, 3, 4, 5).limitUntilClosed(i -> i == 3) * </pre></code> */ @SuppressWarnings("unchecked") static <T> Seq<T> limitUntilClosed(Stream<? extends T> stream, Predicate<? super T> predicate) { boolean[] test = { false }; return SeqUtils.transform(stream, (delegate, action) -> !test[0] && delegate.tryAdvance(t -> { test[0] = predicate.test(t); action.accept(t); }) ); } /** * Returns a stream with a given value interspersed between any two values of this stream. * <p> * <code><pre> * // (1, 0, 2, 0, 3, 0, 4) * Seq.of(1, 2, 3, 4).intersperse(0) * </pre></code> */ static <T> Seq<T> intersperse(Stream<? extends T> stream, T value) { return seq(stream.flatMap(t -> Stream.of(value, t)).skip(1)); } /** * Classify this stream's elements according to a given classifier function * <p> * <code><pre> * // Seq(tuple(1, Seq(1, 3, 5)), tuple(0, Seq(2, 4, 6))) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2 ) * // Seq(tuple(true, Seq(1, 3, 5)), tuple(false, Seq(2, 4, 6))) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2 != 0) * </pre></code> * * This is a non-terminal analog of {@link #groupBy(Stream, Function)}) * @see #groupBy(Function) * @see #partition(Predicate) */ public static <K, T> Seq<Tuple2<K, Seq<T>>> grouped(Stream<? extends T> stream, Function<? super T, ? extends K> classifier) { return grouped(seq(stream), classifier); } /** * Classify this stream's elements according to a given classifier function * <p> * <code><pre> * // Seq(tuple(1, Seq(1, 3, 5)), tuple(0, Seq(2, 4, 6))) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2 ) * // Seq(tuple(true, Seq(1, 3, 5)), tuple(false, Seq(2, 4, 6))) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2 != 0) * </pre></code> * * This is a non-terminal analog of {@link #groupBy(Stream, Function)}) * @see #groupBy(Function) * @see #partition(Predicate) */ public static <K, T> Seq<Tuple2<K, Seq<T>>> grouped(Iterable<? extends T> iterable, Function<? super T, ? extends K> classifier) { return grouped(seq(iterable), classifier); } /** * Classify this stream's elements according to a given classifier function * <p> * <code><pre> * // Seq(tuple(1, Seq(1, 3, 5)), tuple(0, Seq(2, 4, 6))) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2 ) * // Seq(tuple(true, Seq(1, 3, 5)), tuple(false, Seq(2, 4, 6))) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2 != 0) * </pre></code> * * This is a non-terminal analog of {@link #groupBy(Stream, Function)}) * @see #groupBy(Function) * @see #partition(Predicate) */ public static <K, T> Seq<Tuple2<K, Seq<T>>> grouped(Seq<? extends T> seq, Function<? super T, ? extends K> classifier) { final Iterator<? extends T> it = seq.iterator(); class ClassifyingIterator implements Iterator<Tuple2<K, Seq<T>>> { final Map<K, Queue<T>> buffers = new LinkedHashMap<>(); final Queue<K> keys = new LinkedList<>(); class Classification implements Iterator<T> { final K key; Queue<T> buffer; Classification(K key) { this.key = key; } void fetchClassification() { if (buffer == null) buffer = buffers.get(key); while (buffer.isEmpty() && it.hasNext()) fetchNextNewKey(); } @Override public boolean hasNext() { fetchClassification(); return !buffer.isEmpty(); } @Override public T next() { return buffer.poll(); } } void fetchClassifying() { while (it.hasNext() && fetchNextNewKey()); } boolean fetchNextNewKey() { T next = it.next(); K nextK = classifier.apply(next); Queue<T> buffer = buffers.get(nextK); try { if (buffer == null) { buffer = new ArrayDeque<>(); buffers.put(nextK, buffer); keys.add(nextK); return true; } } finally { buffer.offer(next); } return false; } @Override public boolean hasNext() { fetchClassifying(); return !keys.isEmpty(); } @Override public Tuple2<K, Seq<T>> next() { K nextK = keys.poll(); return tuple(nextK, seq(new Classification(nextK))); } } return seq(new ClassifyingIterator()).onClose(seq::close); } /** * Classify this stream's elements according to a given classifier function * and collect each class's elements using a collector. * <p> * <code><pre> * // Seq(tuple(1, 9), tuple(0, 12)) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2, Collectors.summingInt(i -> i)) * // Seq(tuple(true, 9), tuple(false, 12)) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2 != 0, Collectors.summingInt(i -> i)) * </pre></code> This is a non-terminal analog of * {@link #groupBy(Function, Collector)}) * * @see #groupBy(Function, Collector) */ public static <K, T, A, D> Seq<Tuple2<K, D>> grouped(Stream<? extends T> stream, Function<? super T, ? extends K> classifier, Collector<? super T, A, D> downstream) { return grouped(seq(stream), classifier, downstream); } /** * Classify this stream's elements according to a given classifier function * and collect each class's elements using a collector. * <p> * <code><pre> * // Seq(tuple(1, 9), tuple(0, 12)) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2, Collectors.summingInt(i -> i)) * // Seq(tuple(true, 9), tuple(false, 12)) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2 != 0, Collectors.summingInt(i -> i)) * </pre></code> This is a non-terminal analog of * {@link #groupBy(Function, Collector)}) * * @see #groupBy(Function, Collector) */ public static <K, T, A, D> Seq<Tuple2<K, D>> grouped(Iterable<? extends T> iterable, Function<? super T, ? extends K> classifier, Collector<? super T, A, D> downstream) { return grouped(seq(iterable), classifier, downstream); } /** * Classify this stream's elements according to a given classifier function * and collect each class's elements using a collector. * <p> * <code><pre> * // Seq(tuple(1, 9), tuple(0, 12)) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2, Collectors.summingInt(i -> i)) * // Seq(tuple(true, 9), tuple(false, 12)) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2 != 0, Collectors.summingInt(i -> i)) * </pre></code> This is a non-terminal analog of * {@link #groupBy(Function, Collector)}) * * @see #groupBy(Function, Collector) */ public static <K, T, A, D> Seq<Tuple2<K, D>> grouped(Seq<? extends T> seq, Function<? super T, ? extends K> classifier, Collector<? super T, A, D> downstream) { return grouped(seq, classifier).map(t -> tuple(t.v1, t.v2.collect(downstream))); } /** * Partition a stream into two given a predicate. * <p> * <code><pre> * // tuple((1, 3, 5), (2, 4, 6)) * Seq.of(1, 2, 3, 4, 5, 6).partition(i -> i % 2 != 0) * </pre></code> */ static <T> Tuple2<Seq<T>, Seq<T>> partition(Stream<? extends T> stream, Predicate<? super T> predicate) { final Iterator<? extends T> it = stream.iterator(); final LinkedList<T> buffer1 = new LinkedList<>(); final LinkedList<T> buffer2 = new LinkedList<>(); class Partition implements Iterator<T> { final boolean b; Partition(boolean b) { this.b = b; } void fetch() { while (buffer(b).isEmpty() && it.hasNext()) { T next = it.next(); buffer(predicate.test(next)).offer(next); } } LinkedList<T> buffer(boolean test) { return test ? buffer1 : buffer2; } @Override public boolean hasNext() { fetch(); return !buffer(b).isEmpty(); } @Override public T next() { return buffer(b).poll(); } } return tuple(seq(new Partition(true)), seq(new Partition(false))); } /** * Split a stream at a given position. * <p> * <code><pre> * // tuple((1, 2, 3), (4, 5, 6)) * Seq.of(1, 2, 3, 4, 5, 6).splitAt(3) * </pre></code> */ static <T> Tuple2<Seq<T>, Seq<T>> splitAt(Stream<? extends T> stream, long position) { return seq(stream) .zipWithIndex() .partition(t -> t.v2 < position) // Explicit type parameters to work around this Eclipse compiler bug: // https://bugs.eclipse.org/bugs/show_bug.cgi?id=455945 .map((v1, v2) -> Tuple.<Seq<T>, Seq<T>>tuple( v1.map(t -> t.v1), v2.map(t -> t.v1) )); } /** * Split a stream at the head. * <p> * <code><pre> * // tuple(1, (2, 3, 4, 5, 6)) * Seq.of(1, 2, 3, 4, 5, 6).splitHead(3) * </pre></code> */ static <T> Tuple2<Optional<T>, Seq<T>> splitAtHead(Stream<T> stream) { Iterator<T> it = stream.iterator(); return tuple(it.hasNext() ? Optional.of(it.next()) : Optional.empty(), seq(it)); } // Methods taken from LINQ // ----------------------- /** * Keep only those elements in a stream that are of a given type. * <p> * <code><pre> * // (1, 2, 3) * Seq.of(1, "a", 2, "b", 3).ofType(Integer.class) * </pre></code> */ @SuppressWarnings("unchecked") static <T, U> Seq<U> ofType(Stream<? extends T> stream, Class<? extends U> type) { return seq(stream).filter(type::isInstance).map(t -> (U) t); } /** * Cast all elements in a stream to a given type, possibly throwing a {@link ClassCastException}. * <p> * <code><pre> * // ClassCastException * Seq.of(1, "a", 2, "b", 3).cast(Integer.class) * </pre></code> */ static <T, U> Seq<U> cast(Stream<? extends T> stream, Class<? extends U> type) { return seq(stream).map(type::cast); } // Shortcuts to Collectors // ----------------------- /** * Shortcut for calling {@link Stream#collect(Collector)} with a * {@link Collectors#groupingBy(Function)} collector. */ static <T, K> Map<K, List<T>> groupBy(Stream<? extends T> stream, Function<? super T, ? extends K> classifier) { return seq(stream).groupBy(classifier); } /** * Shortcut for calling {@link Stream#collect(Collector)} with a * {@link Collectors#groupingBy(Function, Collector)} collector. */ static <T, K, A, D> Map<K, D> groupBy(Stream<? extends T> stream, Function<? super T, ? extends K> classifier, Collector<? super T, A, D> downstream) { return seq(stream).groupBy(classifier, downstream); } /** * Shortcut for calling {@link Stream#collect(Collector)} with a * {@link Collectors#groupingBy(Function, Supplier, Collector)} collector. */ static <T, K, D, A, M extends Map<K, D>> M groupBy(Stream<? extends T> stream, Function<? super T, ? extends K> classifier, Supplier<M> mapFactory, Collector<? super T, A, D> downstream) { return seq(stream).groupBy(classifier, mapFactory, downstream); } /** * Shortcut for calling {@link Stream#collect(Collector)} with a * {@link Collectors#joining()} * collector. * * @deprecated - Use {@link #toString()} instead. This method will be * removed in the future as it causes confusion with * {@link #innerJoin(Seq, BiPredicate)}. */ @Deprecated static String join(Stream<?> stream) { return seq(stream).join(); } /** * Shortcut for calling {@link Stream#collect(Collector)} with a * {@link Collectors#joining(CharSequence)} * collector. * * @deprecated - Use {@link #toString()} instead. This method will be * removed in the future as it causes confusion with * {@link #innerJoin(Seq, BiPredicate)}. */ @Deprecated static String join(Stream<?> stream, CharSequence delimiter) { return seq(stream).join(delimiter); } /** * Shortcut for calling {@link Stream#collect(Collector)} with a * {@link Collectors#joining(CharSequence, CharSequence, CharSequence)} * collector. * * @deprecated - Use {@link #toString()} instead. This method will be * removed in the future as it causes confusion with * {@link #innerJoin(Seq, BiPredicate)}. */ @Deprecated static String join(Stream<?> stream, CharSequence delimiter, CharSequence prefix, CharSequence suffix) { return seq(stream).join(delimiter, prefix, suffix); } // Covariant overriding of Stream return types // ------------------------------------------- @Override Seq<T> filter(Predicate<? super T> predicate); @Override <R> Seq<R> map(Function<? super T, ? extends R> mapper); @Override IntStream mapToInt(ToIntFunction<? super T> mapper); @Override LongStream mapToLong(ToLongFunction<? super T> mapper); @Override DoubleStream mapToDouble(ToDoubleFunction<? super T> mapper); @Override <R> Seq<R> flatMap(Function<? super T, ? extends Stream<? extends R>> mapper); @Override IntStream flatMapToInt(Function<? super T, ? extends IntStream> mapper); @Override LongStream flatMapToLong(Function<? super T, ? extends LongStream> mapper); @Override DoubleStream flatMapToDouble(Function<? super T, ? extends DoubleStream> mapper); @Override Seq<T> distinct(); @Override Seq<T> sorted(); @Override Seq<T> sorted(Comparator<? super T> comparator); @Override Seq<T> peek(Consumer<? super T> action); @Override Seq<T> limit(long maxSize); @Override Seq<T> skip(long n); @Override Seq<T> onClose(Runnable closeHandler); @Override void close(); @Override long count(); // These methods have no effect // ---------------------------- /** * Returns this stream. All Seq streams are sequential, hence the name. * * @return this stream unmodified */ @Override default Seq<T> sequential() { return this; } /** * Seq streams are always sequential and, as such, doesn't support * parallelization. * * @return this sequential stream unmodified * @see <a href="https://github.com/jOOQ/jOOL/issues/130">jOOL Issue #130</a> */ @Override default Seq<T> parallel() { return this; } /** * Returns this stream. All Seq streams are ordered so this method has * no effect. * * @return this stream unmodified */ @Override default Seq<T> unordered() { return this; } @Override default Spliterator<T> spliterator() { return Iterable.super.spliterator(); } @Override default void forEach(Consumer<? super T> action) { Iterable.super.forEach(action); } // Debugging tools // --------------- /** * Generate a nicely formatted representation of this stream. * <p> * Clients should not rely on the concrete formatting of this method, which * is intended for debugging convenience only. */ String format(); /** * Print contents of this stream to {@link System#out}. */ default void printOut() { print(System.out); } /** * Print contents of this stream to {@link System#err}. */ default void printErr() { print(System.err); } /** * Print contents of this stream to the argument writer. */ default void print(PrintWriter writer) { forEach(writer::println); } /** * Print contents of this stream to the argument stream. */ default void print(PrintStream stream) { forEach(stream::println); } }
src/main/java/org/jooq/lambda/Seq.java
/** * Copyright (c) 2014-2016, Data Geekery GmbH, [email protected] * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jooq.lambda; import static java.util.Comparator.comparing; import static java.util.Spliterator.ORDERED; import static java.util.Spliterators.spliteratorUnknownSize; import static org.jooq.lambda.SeqUtils.sneakyThrow; import static org.jooq.lambda.tuple.Tuple.tuple; import java.io.IOException; import java.io.InputStream; import java.io.PrintStream; import java.io.PrintWriter; import java.io.Reader; import java.io.UncheckedIOException; import java.time.Duration; import java.time.Instant; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Queue; import java.util.Random; import java.util.Set; import java.util.Spliterator; import java.util.concurrent.ConcurrentHashMap; import java.util.function.BiFunction; import java.util.function.BiPredicate; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; import java.util.function.Supplier; import java.util.function.ToDoubleFunction; import java.util.function.ToIntFunction; import java.util.function.ToLongFunction; import java.util.function.UnaryOperator; import java.util.stream.Collector; import java.util.stream.Collectors; import java.util.stream.DoubleStream; import java.util.stream.IntStream; import java.util.stream.LongStream; import java.util.stream.Stream; import java.util.stream.StreamSupport; import javax.annotation.Generated; import org.jooq.lambda.exception.TooManyElementsException; import org.jooq.lambda.function.Function10; import org.jooq.lambda.function.Function11; import org.jooq.lambda.function.Function12; import org.jooq.lambda.function.Function13; import org.jooq.lambda.function.Function14; import org.jooq.lambda.function.Function15; import org.jooq.lambda.function.Function16; import org.jooq.lambda.function.Function3; import org.jooq.lambda.function.Function4; import org.jooq.lambda.function.Function5; import org.jooq.lambda.function.Function6; import org.jooq.lambda.function.Function7; import org.jooq.lambda.function.Function8; import org.jooq.lambda.function.Function9; import org.jooq.lambda.tuple.Tuple; import org.jooq.lambda.tuple.Tuple1; import org.jooq.lambda.tuple.Tuple10; import org.jooq.lambda.tuple.Tuple11; import org.jooq.lambda.tuple.Tuple12; import org.jooq.lambda.tuple.Tuple13; import org.jooq.lambda.tuple.Tuple14; import org.jooq.lambda.tuple.Tuple15; import org.jooq.lambda.tuple.Tuple16; import org.jooq.lambda.tuple.Tuple2; import org.jooq.lambda.tuple.Tuple3; import org.jooq.lambda.tuple.Tuple4; import org.jooq.lambda.tuple.Tuple5; import org.jooq.lambda.tuple.Tuple6; import org.jooq.lambda.tuple.Tuple7; import org.jooq.lambda.tuple.Tuple8; import org.jooq.lambda.tuple.Tuple9; /** * A sequential, ordered {@link Stream} that adds all sorts of useful methods that work only because * it is sequential and ordered. * * @author Lukas Eder * @author Roman Tkalenko */ public interface Seq<T> extends Stream<T>, Iterable<T>, Collectable<T> { /** * The underlying {@link Stream} implementation. */ Stream<T> stream(); /** * Transform this stream into a new type. * <p> * If certain operations are re-applied frequently to streams, this * transform operation is very useful for such operations to be applied in a * fluent style: * <p> * <code><pre> * Function&ltSeq&lt;Integer>, Seq&lt;String>> toString = s -> s.map(Objects::toString); * Seq&lt;String> strings = * Seq.of(1, 2, 3) * .transform(toString); * </pre></code> */ default <U> U transform(Function<? super Seq<T>, ? extends U> transformer) { return transformer.apply(this); } /** * Cross join 2 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ default <U> Seq<Tuple2<T, U>> crossJoin(Stream<? extends U> other) { return Seq.crossJoin(this, other); } /** * Cross join 2 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ default <U> Seq<Tuple2<T, U>> crossJoin(Iterable<? extends U> other) { return Seq.crossJoin(this, other); } /** * Cross join 2 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ default <U> Seq<Tuple2<T, U>> crossJoin(Seq<? extends U> other) { return Seq.crossJoin(this, other); } /** * Inner join 2 streams into one. * <p> * <code><pre> * // (tuple(1, 1), tuple(2, 2)) * Seq.of(1, 2, 3).innerJoin(Seq.of(1, 2), t -> Objects.equals(t.v1, t.v2)) * </pre></code> */ default <U> Seq<Tuple2<T, U>> innerJoin(Stream<? extends U> other, BiPredicate<? super T, ? super U> predicate) { return innerJoin(seq(other), predicate); } /** * Inner join 2 streams into one. * <p> * <code><pre> * // (tuple(1, 1), tuple(2, 2)) * Seq.of(1, 2, 3).innerJoin(Seq.of(1, 2), t -> Objects.equals(t.v1, t.v2)) * </pre></code> */ default <U> Seq<Tuple2<T, U>> innerJoin(Iterable<? extends U> other, BiPredicate<? super T, ? super U> predicate) { return innerJoin(seq(other), predicate); } /** * Inner join 2 streams into one. * <p> * <code><pre> * // (tuple(1, 1), tuple(2, 2)) * Seq.of(1, 2, 3).innerJoin(Seq.of(1, 2), t -> Objects.equals(t.v1, t.v2)) * </pre></code> */ default <U> Seq<Tuple2<T, U>> innerJoin(Seq<? extends U> other, BiPredicate<? super T, ? super U> predicate) { // This algorithm isn't lazy and has substantial complexity for large argument streams! List<? extends U> list = other.toList(); return flatMap(t -> seq(list) .filter(u -> predicate.test(t, u)) .map(u -> tuple(t, u))) .onClose(other::close); } /** * Left outer join 2 streams into one. * <p> * <code><pre> * // (tuple(1, 1), tuple(2, 2), tuple(3, null)) * Seq.of(1, 2, 3).leftOuterJoin(Seq.of(1, 2), t -> Objects.equals(t.v1, t.v2)) * </pre></code> */ default <U> Seq<Tuple2<T, U>> leftOuterJoin(Stream<? extends U> other, BiPredicate<? super T, ? super U> predicate) { return leftOuterJoin(seq(other), predicate); } /** * Left outer join 2 streams into one. * <p> * <code><pre> * // (tuple(1, 1), tuple(2, 2), tuple(3, null)) * Seq.of(1, 2, 3).leftOuterJoin(Seq.of(1, 2), t -> Objects.equals(t.v1, t.v2)) * </pre></code> */ default <U> Seq<Tuple2<T, U>> leftOuterJoin(Iterable<? extends U> other, BiPredicate<? super T, ? super U> predicate) { return leftOuterJoin(seq(other), predicate); } /** * Left outer join 2 streams into one. * <p> * <code><pre> * // (tuple(1, 1), tuple(2, 2), tuple(3, null)) * Seq.of(1, 2, 3).leftOuterJoin(Seq.of(1, 2), t -> Objects.equals(t.v1, t.v2)) * </pre></code> */ default <U> Seq<Tuple2<T, U>> leftOuterJoin(Seq<? extends U> other, BiPredicate<? super T, ? super U> predicate) { // This algorithm isn't lazy and has substantial complexity for large argument streams! List<? extends U> list = other.toList(); return flatMap(t -> seq(list) .filter(u -> predicate.test(t, u)) .onEmpty(null) .map(u -> tuple(t, u))) .onClose(other::close); } /** * Right outer join 2 streams into one. * <p> * <code><pre> * // (tuple(1, 1), tuple(2, 2), tuple(null, 3)) * Seq.of(1, 2).rightOuterJoin(Seq.of(1, 2, 3), t -> Objects.equals(t.v1, t.v2)) * </pre></code> */ default <U> Seq<Tuple2<T, U>> rightOuterJoin(Stream<? extends U> other, BiPredicate<? super T, ? super U> predicate) { return rightOuterJoin(seq(other), predicate); } /** * Right outer join 2 streams into one. * <p> * <code><pre> * // (tuple(1, 1), tuple(2, 2), tuple(null, 3)) * Seq.of(1, 2).rightOuterJoin(Seq.of(1, 2, 3), t -> Objects.equals(t.v1, t.v2)) * </pre></code> */ default <U> Seq<Tuple2<T, U>> rightOuterJoin(Iterable<? extends U> other, BiPredicate<? super T, ? super U> predicate) { return rightOuterJoin(seq(other), predicate); } /** * Right outer join 2 streams into one. * <p> * <code><pre> * // (tuple(1, 1), tuple(2, 2), tuple(null, 3)) * Seq.of(1, 2).rightOuterJoin(Seq.of(1, 2, 3), t -> Objects.equals(t.v1, t.v2)) * </pre></code> */ default <U> Seq<Tuple2<T, U>> rightOuterJoin(Seq<? extends U> other, BiPredicate<? super T, ? super U> predicate) { return other .leftOuterJoin(this, (u, t) -> predicate.test(t, u)) .map(t -> tuple(t.v2, t.v1)) .onClose(other::close); } /** * Produce this stream, or an alternative stream from the * <code>value</code>, in case this stream is empty. */ default Seq<T> onEmpty(T value) { return onEmptyGet(() -> value); } /** * Produce this stream, or an alternative stream from the * <code>supplier</code>, in case this stream is empty. */ default Seq<T> onEmptyGet(Supplier<? extends T> supplier) { boolean[] first = { true }; return SeqUtils.transform(this, (delegate, action) -> { if (first[0]) { first[0] = false; if (!delegate.tryAdvance(action)) action.accept(supplier.get()); return true; } else { return delegate.tryAdvance(action); } }); } /** * Produce this stream, or an alternative stream from the * <code>supplier</code>, in case this stream is empty. */ default <X extends Throwable> Seq<T> onEmptyThrow(Supplier<? extends X> supplier) { boolean[] first = { true }; return SeqUtils.transform(this, (delegate, action) -> { if (first[0]) { first[0] = false; if (!delegate.tryAdvance(action)) sneakyThrow(supplier.get()); return true; } else { return delegate.tryAdvance(action); } }); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(1, 2, 3).concat(Seq.of(4, 5, 6)) * </pre></code> * * @see #concat(Stream[]) */ default Seq<T> concat(Stream<? extends T> other) { return concat(seq(other)); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(1, 2, 3).concat(Seq.of(4, 5, 6)) * </pre></code> * * @see #concat(Stream[]) */ default Seq<T> concat(Iterable<? extends T> other) { return concat(seq(other)); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(1, 2, 3).concat(Seq.of(4, 5, 6)) * </pre></code> * * @see #concat(Stream[]) */ @SuppressWarnings({ "unchecked" }) default Seq<T> concat(Seq<? extends T> other) { return Seq.concat(new Seq[]{this, other}); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4) * Seq.of(1, 2, 3).concat(4) * </pre></code> * * @see #concat(Stream[]) */ default Seq<T> concat(T other) { return concat(Seq.of(other)); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(1, 2, 3).concat(4, 5, 6) * </pre></code> * * @see #concat(Stream[]) */ @SuppressWarnings({ "unchecked" }) default Seq<T> concat(T... other) { return concat(Seq.of(other)); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(1, 2, 3).append(Seq.of(4, 5, 6)) * </pre></code> * * @see #concat(Stream[]) */ default Seq<T> append(Stream<? extends T> other) { return concat(other); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(1, 2, 3).append(Seq.of(4, 5, 6)) * </pre></code> * * @see #concat(Stream[]) */ default Seq<T> append(Iterable<? extends T> other) { return concat(other); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(1, 2, 3).append(Seq.of(4, 5, 6)) * </pre></code> * * @see #concat(Stream[]) */ @SuppressWarnings({ "unchecked" }) default Seq<T> append(Seq<? extends T> other) { return concat(other); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4) * Seq.of(1, 2, 3).append(4) * </pre></code> * * @see #concat(Stream[]) */ default Seq<T> append(T other) { return concat(other); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(1, 2, 3).append(4, 5, 6) * </pre></code> * * @see #concat(Stream[]) */ @SuppressWarnings({ "unchecked" }) default Seq<T> append(T... other) { return concat(other); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(4, 5, 6).prepend(Seq.of(1, 2, 3)) * </pre></code> * * @see #concat(Stream[]) */ default Seq<T> prepend(Stream<? extends T> other) { return seq(other).concat(this); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(4, 5, 6).prepend(Seq.of(1, 2, 3)) * </pre></code> * * @see #concat(Stream[]) */ default Seq<T> prepend(Iterable<? extends T> other) { return seq(other).concat(this); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(4, 5, 6).prepend(Seq.of(1, 2, 3)) * </pre></code> * * @see #concat(Stream[]) */ @SuppressWarnings({ "unchecked" }) default Seq<T> prepend(Seq<? extends T> other) { return concat(other, this); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4) * Seq.of(2, 3, 4).prepend(1) * </pre></code> * * @see #concat(Stream[]) */ default Seq<T> prepend(T other) { return Seq.of(other).concat(this); } /** * Concatenate two streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(4, 5, 6).prepend(Seq.of(1, 2, 3)) * </pre></code> * * @see #concat(Stream[]) */ @SuppressWarnings({ "unchecked" }) default Seq<T> prepend(T... other) { return Seq.of(other).concat(this); } /** * Check whether this stream contains a given value. * <p> * <code><pre> * // true * Seq.of(1, 2, 3).contains(2) * </pre><code> */ default boolean contains(T other) { return anyMatch(Predicate.isEqual(other)); } /** * Check whether this stream contains all given values. * <p> * <code><pre> * // true * Seq.of(1, 2, 3).containsAll(2, 3) * </pre><code> */ default boolean containsAll(T... other) { return containsAll(of(other)); } /** * Check whether this stream contains all given values. * <p> * <code><pre> * // true * Seq.of(1, 2, 3).containsAll(2, 3) * </pre><code> */ default boolean containsAll(Stream<? extends T> other) { return containsAll(seq(other)); } /** * Check whether this stream contains all given values. * <p> * <code><pre> * // true * Seq.of(1, 2, 3).containsAll(2, 3) * </pre><code> */ default boolean containsAll(Iterable<? extends T> other) { return containsAll(seq(other)); } /** * Check whether this stream contains all given values. * <p> * <code><pre> * // true * Seq.of(1, 2, 3).containsAll(2, 3) * </pre><code> */ default boolean containsAll(Seq<? extends T> other) { Set<? extends T> set = other.toSet(HashSet::new); return set.isEmpty() ? true : filter(t -> set.remove(t)).anyMatch(t -> set.isEmpty()); } /** * Check whether this stream contains any of the given values. * <p> * <code><pre> * // true * Seq.of(1, 2, 3).containsAny(2, 4) * </pre><code> */ default boolean containsAny(T... other) { return containsAny(of(other)); } /** * Check whether this stream contains any of the given values. * <p> * <code><pre> * // true * Seq.of(1, 2, 3).containsAny(2, 4) * </pre><code> */ default boolean containsAny(Stream<? extends T> other) { return containsAny(seq(other)); } /** * Check whether this stream contains any of the given values. * <p> * <code><pre> * // true * Seq.of(1, 2, 3).containsAny(2, 4) * </pre><code> */ default boolean containsAny(Iterable<? extends T> other) { return containsAny(seq(other)); } /** * Check whether this stream contains any of the given values. * <p> * <code><pre> * // true * Seq.of(1, 2, 3).containsAny(2, 4) * </pre><code> */ default boolean containsAny(Seq<? extends T> other) { Set<? extends T> set = other.toSet(HashSet::new); return set.isEmpty() ? false : anyMatch(set::contains); } /** * Get a single element from the stream at a given index. */ default Optional<T> get(long index) { if (index < 0L) return Optional.empty(); else if (index == 0L) return findFirst(); else return skip(index).findFirst(); } /** * Get the single element from the stream, or throw an exception if the * stream holds more than one element. */ default Optional<T> findSingle() throws TooManyElementsException { Iterator<T> it = iterator(); if (!it.hasNext()) return Optional.empty(); T result = it.next(); if (!it.hasNext()) return Optional.of(result); throw new TooManyElementsException("Stream contained more than one element."); } /** * Get a single element from the stream given a predicate. */ default Optional<T> findFirst(Predicate<? super T> predicate) { return filter(predicate).findFirst(); } /** * Return a new stream where the first occurrence of the argument is removed. * <p> * <code><pre> * // 1, 3, 2, 4 * Seq.of(1, 2, 3, 2, 4).remove(2) * </pre><code> */ default Seq<T> remove(T other) { boolean[] removed = new boolean[1]; return filter(t -> removed[0] || !(removed[0] = Objects.equals(t, other))); } /** * Return a new stream where all occurrences of the arguments are removed. * <p> * <code><pre> * // 1, 4 * Seq.of(1, 2, 3, 2, 4).removeAll(2, 3) * </pre><code> */ default Seq<T> removeAll(T... other) { return removeAll(of(other)); } /** * Return a new stream where all occurrences of the arguments are removed. * <p> * <code><pre> * // 1, 4 * Seq.of(1, 2, 3, 2, 4).removeAll(2, 3) * </pre><code> */ default Seq<T> removeAll(Stream<? extends T> other) { return removeAll(seq(other)); } /** * Return a new stream where all occurrences of the arguments are removed. * <p> * <code><pre> * // 1, 4 * Seq.of(1, 2, 3, 2, 4).removeAll(2, 3) * </pre><code> */ default Seq<T> removeAll(Iterable<? extends T> other) { return removeAll(seq(other)); } /** * Return a new stream where all occurrences of the arguments are removed. * <p> * <code><pre> * // 1, 4 * Seq.of(1, 2, 3, 2, 4).removeAll(2, 3) * </pre><code> */ default Seq<T> removeAll(Seq<? extends T> other) { Set<? extends T> set = other.toSet(HashSet::new); return set.isEmpty() ? this : filter(t -> !set.contains(t)).onClose(other::close); } /** * Return a new stream where only occurrences of the arguments are retained. * <p> * <code><pre> * // 2, 3, 2 * Seq.of(1, 2, 3, 2, 4).retainAll(2, 3) * </pre><code> */ default Seq<T> retainAll(T... other) { return retainAll(of(other)); } /** * Return a new stream where only occurrences of the arguments are retained. * <p> * <code><pre> * // 2, 3, 2 * Seq.of(1, 2, 3, 2, 4).retainAll(2, 3) * </pre><code> */ default Seq<T> retainAll(Stream<? extends T> other) { return retainAll(seq(other)); } /** * Return a new stream where only occurrences of the arguments are retained. * <p> * <code><pre> * // 2, 3, 2 * Seq.of(1, 2, 3, 2, 4).retainAll(2, 3) * </pre><code> */ default Seq<T> retainAll(Iterable<? extends T> other) { return retainAll(seq(other)); } /** * Return a new stream where only occurrences of the arguments are retained. * <p> * <code><pre> * // 2, 3, 2 * Seq.of(1, 2, 3, 2, 4).retainAll(2, 3) * </pre><code> */ default Seq<T> retainAll(Seq<? extends T> other) { Set<? extends T> set = other.toSet(HashSet::new); return set.isEmpty() ? empty() : filter(t -> set.contains(t)).onClose(other::close); } /** * Repeat a stream infinitely. * <p> * <code><pre> * // (1, 2, 3, 1, 2, 3, ...) * Seq.of(1, 2, 3).cycle(); * </pre></code> * * @see #cycle(Stream) */ default Seq<T> cycle() { return cycle(this); } /** * Repeat a stream a certain amount of times. * <p> * <code><pre> * // () * Seq.of(1, 2, 3).cycle(0); * * // (1, 2, 3) * Seq.of(1, 2, 3).cycle(1); * * // (1, 2, 3, 1, 2, 3, 1, 2, 3) * Seq.of(1, 2, 3).cycle(3); * </pre></code> * * @see #cycle(Stream, long) */ default Seq<T> cycle(long times) { return cycle(this, times); } /** * Get a stream of distinct keys. * <p> * <code><pre> * // (1, 2, 3) * Seq.of(1, 1, 2, -2, 3).distinct(Math::abs) * </pre></code> */ default <U> Seq<T> distinct(Function<? super T, ? extends U> keyExtractor) { final Map<U, String> seen = new ConcurrentHashMap<>(); return filter(t -> seen.put(keyExtractor.apply(t), "") == null); } /** * Zip two streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> * * @see #zip(Stream, Stream) */ default <U> Seq<Tuple2<T, U>> zip(Stream<? extends U> other) { return zip(seq(other)); } /** * Zip two streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> * * @see #zip(Stream, Stream) */ default <U> Seq<Tuple2<T, U>> zip(Iterable<? extends U> other) { return zip(seq(other)); } /** * Zip two streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> * * @see #zip(Stream, Stream) */ default <U> Seq<Tuple2<T, U>> zip(Seq<? extends U> other) { return zip(this, other); } /** * Zip two streams into one using a {@link BiFunction} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> * * @see #zip(Seq, BiFunction) */ default <U, R> Seq<R> zip(Stream<? extends U> other, BiFunction<? super T, ? super U, ? extends R> zipper) { return zip(seq(other), zipper); } /** * Zip two streams into one using a {@link BiFunction} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> * * @see #zip(Seq, BiFunction) */ default <U, R> Seq<R> zip(Iterable<? extends U> other, BiFunction<? super T, ? super U, ? extends R> zipper) { return zip(seq(other), zipper); } /** * Zip two streams into one using a {@link BiFunction} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> * * @see #zip(Seq, BiFunction) */ default <U, R> Seq<R> zip(Seq<? extends U> other, BiFunction<? super T, ? super U, ? extends R> zipper) { return zip(this, other, zipper); } /** * Zip a Stream with a corresponding Stream of indexes. * <p> * <code><pre> * // (tuple("a", 0), tuple("b", 1), tuple("c", 2)) * Seq.of("a", "b", "c").zipWithIndex() * </pre></code> * * @see #zipWithIndex(Stream) */ default Seq<Tuple2<T, Long>> zipWithIndex() { return zipWithIndex(this); } /** * Fold a Stream to the left. * <p> * <code><pre> * // "abc" * Seq.of("a", "b", "c").foldLeft("", (u, t) -> u + t) * </pre></code> */ default <U> U foldLeft(U seed, BiFunction<? super U, ? super T, ? extends U> function) { return foldLeft(this, seed, function); } /** * Fold a Stream to the right. * <p> * <code><pre> * // "cba" * Seq.of("a", "b", "c").foldRight("", (t, u) -> u + t) * </pre></code> */ default <U> U foldRight(U seed, BiFunction<? super T, ? super U, ? extends U> function) { return foldRight(this, seed, function); } /** * Scan a stream to the left. * <p> * <code><pre> * // ("", "a", "ab", "abc") * Seq.of("a", "b", "c").scanLeft("", (u, t) -> u + t) * </pre></code> */ default <U> Seq<U> scanLeft(U seed, BiFunction<? super U, ? super T, ? extends U> function) { return scanLeft(this, seed, function); } /** * Scan a stream to the right. * <p> * <code><pre> * // ("", "c", "cb", "cba") * Seq.of("a", "b", "c").scanRight("", (t, u) -> u + t) * </pre></code> */ default <U> Seq<U> scanRight(U seed, BiFunction<? super T, ? super U, ? extends U> function) { return scanRight(this, seed, function); } /** * Reverse a stream. * <p> * <code><pre> * // (3, 2, 1) * Seq.of(1, 2, 3).reverse() * </pre></code> */ default Seq<T> reverse() { return reverse(this); } /** * Shuffle a stream * <p> * <code><pre> * // e.g. (2, 3, 1) * Seq.of(1, 2, 3).shuffle() * </pre></code> */ default Seq<T> shuffle() { return shuffle(this); } /** * Shuffle a stream using specified source of randomness * <p> * <code><pre> * // e.g. (2, 3, 1) * Seq.of(1, 2, 3).shuffle(new Random()) * </pre></code> */ default Seq<T> shuffle(Random random) { return shuffle(this, random); } /** * Returns a stream with all elements skipped for which a predicate evaluates to <code>true</code>. * <p> * <code><pre> * // (3, 4, 5) * Seq.of(1, 2, 3, 4, 5).skipWhile(i -> i &lt; 3) * </pre></code> * * @see #skipWhile(Stream, Predicate) */ default Seq<T> skipWhile(Predicate<? super T> predicate) { return skipWhile(this, predicate); } /** * Returns a stream with all elements skipped for which a predicate evaluates to <code>true</code> * plus the first element for which it evaluates to false. * <p> * <code><pre> * // (4, 5) * Seq.of(1, 2, 3, 4, 5).skipWhileClosed(i -> i &lt; 3) * </pre></code> * * @see #skipWhileClosed(Stream, Predicate) */ default Seq<T> skipWhileClosed(Predicate<? super T> predicate) { return skipWhileClosed(this, predicate); } /** * Returns a stream with all elements skipped for which a predicate evaluates to <code>false</code>. * <p> * <code><pre> * // (3, 4, 5) * Seq.of(1, 2, 3, 4, 5).skipUntil(i -> i == 3) * </pre></code> * * @see #skipUntil(Stream, Predicate) */ default Seq<T> skipUntil(Predicate<? super T> predicate) { return skipUntil(this, predicate); } /** * Returns a stream with all elements skipped for which a predicate evaluates to <code>false</code> * plus the first element for which it evaluates to <code>true</code>. * <p> * <code><pre> * // (4, 5) * Seq.of(1, 2, 3, 4, 5).skipUntilClosed(i -> i == 3) * </pre></code> * * @see #skipUntilClosed(Stream, Predicate) */ default Seq<T> skipUntilClosed(Predicate<? super T> predicate) { return skipUntilClosed(this, predicate); } /** * Returns a stream limited to all elements for which a predicate evaluates to <code>true</code>. * <p> * <code><pre> * // (1, 2) * Seq.of(1, 2, 3, 4, 5).limitWhile(i -> i &lt; 3) * </pre></code> * * @see #limitWhile(Stream, Predicate) */ default Seq<T> limitWhile(Predicate<? super T> predicate) { return limitWhile(this, predicate); } /** * Returns a stream limited to all elements for which a predicate evaluates to <code>true</code> * plus the first element for which it evaluates to <code>false</code>. * <p> * <code><pre> * // (1, 2, 3) * Seq.of(1, 2, 3, 4, 5).limitWhileClosed(i -> i &lt; 3) * </pre></code> * * @see #limitWhileClosed(Stream, Predicate) */ default Seq<T> limitWhileClosed(Predicate<? super T> predicate) { return limitWhileClosed(this, predicate); } /** * Returns a stream limited to all elements for which a predicate evaluates to <code>false</code>. * <p> * <code><pre> * // (1, 2) * Seq.of(1, 2, 3, 4, 5).limitUntil(i -> i == 3) * </pre></code> * * @see #limitUntil(Stream, Predicate) */ default Seq<T> limitUntil(Predicate<? super T> predicate) { return limitUntil(this, predicate); } /** * Returns a stream limited to all elements for which a predicate evaluates to <code>false</code> * plus the first element for which it evaluates to <code>true</code>. * <p> * <code><pre> * // (1, 2, 3) * Seq.of(1, 2, 3, 4, 5).limitUntilClosed(i -> i == 3) * </pre></code> * * @see #limitUntilClosed(Stream, Predicate) */ default Seq<T> limitUntilClosed(Predicate<? super T> predicate) { return limitUntilClosed(this, predicate); } /** * Returns a stream with a given value interspersed between any two values of this stream. * <p> * <code><pre> * // (1, 0, 2, 0, 3, 0, 4) * Seq.of(1, 2, 3, 4).intersperse(0) * </pre></code> * * @see #intersperse(Stream, Object) */ default Seq<T> intersperse(T value) { return intersperse(this, value); } /** * Duplicate a Streams into two equivalent Streams. * <p> * <code><pre> * // tuple((1, 2, 3), (1, 2, 3)) * Seq.of(1, 2, 3).duplicate() * </pre></code> * * @see #duplicate(Stream) */ default Tuple2<Seq<T>, Seq<T>> duplicate() { return duplicate(this); } /** * Classify this stream's elements according to a given classifier function. * <p> * <code><pre> * // Seq(tuple(1, Seq(1, 3, 5)), tuple(0, Seq(2, 4, 6))) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2) * // Seq(tuple(true, Seq(1, 3, 5)), tuple(false, Seq(2, 4, 6))) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2 != 0) * </pre></code> * * This is a non-terminal analog of {@link #groupBy(Function)}) * @see #groupBy(Function) * @see #partition(Predicate) */ default <K> Seq<Tuple2<K, Seq<T>>> grouped(Function<? super T, ? extends K> classifier) { return grouped(this, classifier); } /** * Classify this stream's elements according to a given classifier function * and collect each class's elements using a collector. * <p> * <code><pre> * // Seq(tuple(1, 9), tuple(0, 12)) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2, Collectors.summingInt(i -> i)) * // Seq(tuple(true, 9), tuple(false, 12)) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2 != 0, Collectors.summingInt(i -> i)) * </pre></code> This is a non-terminal analog of * {@link #groupBy(Function, Collector)}) * * @see #groupBy(Function, Collector) */ default <K, A, D> Seq<Tuple2<K, D>> grouped(Function<? super T, ? extends K> classifier, Collector<? super T, A, D> downstream) { return grouped(this, classifier, downstream); } /** * Partition a stream into two given a predicate. * <p> * <code><pre> * // tuple((1, 3, 5), (2, 4, 6)) * Seq.of(1, 2, 3, 4, 5, 6).partition(i -> i % 2 != 0) * </pre></code> * * @see #partition(Stream, Predicate) */ default Tuple2<Seq<T>, Seq<T>> partition(Predicate<? super T> predicate) { return partition(this, predicate); } /** * Split a stream at a given position. * <p> * <code><pre> * // tuple((1, 2, 3), (4, 5, 6)) * Seq.of(1, 2, 3, 4, 5, 6).splitAt(3) * </pre></code> * * @see #splitAt(Stream, long) */ default Tuple2<Seq<T>, Seq<T>> splitAt(long position) { return splitAt(this, position); } /** * Split a stream at the head. * <p> * <code><pre> * // tuple(1, (2, 3, 4, 5, 6)) * Seq.of(1, 2, 3, 4, 5, 6).splitHead(3) * </pre></code> * * @see #splitAt(Stream, long) */ default Tuple2<Optional<T>, Seq<T>> splitAtHead() { return splitAtHead(this); } /** * Returns a limited interval from a given Stream. * <p> * <code><pre> * // (4, 5) * Seq.of(1, 2, 3, 4, 5, 6).slice(3, 5) * </pre></code> * * @see #slice(Stream, long, long) */ default Seq<T> slice(long from, long to) { return slice(this, from, to); } /** * Check if the sequence has any elements */ default boolean isEmpty() { return !findAny().isPresent(); } /** * Check if the sequence has no elements */ default boolean isNotEmpty() { return !isEmpty(); } /** * Sort by the results of function. */ default <U extends Comparable<? super U>> Seq<T> sorted(Function<? super T, ? extends U> function) { return sorted(comparing(function)); } /** * Sort by the results of function. */ default <U> Seq<T> sorted(Function<? super T, ? extends U> function, Comparator<? super U> comparator) { return sorted(comparing(function, comparator)); } // Methods taken from LINQ // ----------------------- /** * Keep only those elements in a stream that are of a given type. * <p> * <code><pre> * // (1, 2, 3) * Seq.of(1, "a", 2, "b", 3).ofType(Integer.class) * </pre></code> * * @see #ofType(Stream, Class) */ default <U> Seq<U> ofType(Class<? extends U> type) { return ofType(this, type); } /** * Cast all elements in a stream to a given type, possibly throwing a {@link ClassCastException}. * <p> * <code><pre> * // ClassCastException * Seq.of(1, "a", 2, "b", 3).cast(Integer.class) * </pre></code> * * @see #cast(Stream, Class) */ default <U> Seq<U> cast(Class<? extends U> type) { return cast(this, type); } /** * Map this stream to a stream containing a sliding window over the previous stream. * <p> * <code><pre> * // ((1, 2, 3), (2, 3, 4), (3, 4, 5)) * .of(1, 2, 3, 4, 5).sliding(3); * </pre></code> * <p> * This is equivalent as using the more verbose window function version: * <code><pre> * int n = 3; * Seq.of(1, 2, 3, 4, 5) * .window(0, n - 1) * .filter(w -> w.count() == n) * .map(w -> w.toList()); * </pre></code> */ default Seq<Seq<T>> sliding(long size) { if (size <= 0) throw new IllegalArgumentException("Size must be >= 1"); return window(0, size - 1).filter(w -> w.count() == size).map(w -> w.window()); } /** * Map this stream to a windowed stream using the default partition and order. * <p> * <code><pre> * // (0, 1, 2, 3, 4) * Seq.of(1, 2, 4, 2, 3).window().map(Window::rowNumber) * </pre></code> */ default Seq<Window<T>> window() { return window(Window.of()).map(t -> t.v1); } /** * Map this stream to a windowed stream using the default partition and order with frame. * <p> * <code><pre> * // (2, 4, 4, 4, 3) * Seq.of(1, 2, 4, 2, 3).window(-1, 1).map(Window::max) * </pre></code> */ default Seq<Window<T>> window(long lower, long upper) { return window(Window.of(lower, upper)).map(t -> t.v1); } /** * Map this stream to a windowed stream using the default partition and a specific order. * <p> * <code><pre> * // (0, 1, 4, 2, 3) * Seq.of(1, 2, 4, 2, 3).window(naturalOrder()).map(Window::rowNumber) * </pre></code> */ default Seq<Window<T>> window(Comparator<? super T> orderBy) { return window(Window.of(orderBy)).map(t -> t.v1); } /** * Map this stream to a windowed stream using the default partition and a specific order with frame. * <p> * <code><pre> * // (1, 1, 3, 2, 2) * Seq.of(1, 2, 4, 2, 3).window(naturalOrder(), -1, 1).map(Window::min) * </pre></code> */ default Seq<Window<T>> window(Comparator<? super T> orderBy, long lower, long upper) { return window(Window.of(orderBy, lower, upper)).map(t -> t.v1); } /** * Map this stream to a windowed stream using a specific partition and the default order. * <p> * <code><pre> * // (1, 2, 2, 2, 1) * Seq.of(1, 2, 4, 2, 3).window(i -> i % 2).map(Window::min) * </pre></code> */ default <U> Seq<Window<T>> window(Function<? super T, ? extends U> partitionBy) { return window(Window.of(partitionBy)).map(t -> t.v1); } /** * Map this stream to a windowed stream using a specific partition and the default order. * <p> * <code><pre> * // (3, 4, 4, 2, 3) * Seq.of(1, 4, 2, 2, 3).window(i -> i % 2, -1, 1).map(Window::max) * </pre></code> */ default <U> Seq<Window<T>> window(Function<? super T, ? extends U> partitionBy, long lower, long upper) { return window(Window.of(partitionBy, lower, upper)).map(t -> t.v1); } /** * Map this stream to a windowed stream using a specific partition and order. * <p> * <code><pre> * // (1, 2, 4, 4, 3) * Seq.of(1, 2, 4, 2, 3).window(i -> i % 2, naturalOrder()).map(Window::max) * </pre></code> */ default <U> Seq<Window<T>> window(Function<? super T, ? extends U> partitionBy, Comparator<? super T> orderBy) { return window(Window.of(partitionBy, orderBy)).map(t -> t.v1); } /** * Map this stream to a windowed stream using a specific partition and order with frame. * <p> * <code><pre> * // (3, 2, 4, 4, 3) * Seq.of(1, 2, 4, 2, 3).window(i -> i % 2, naturalOrder(), -1, 1).map(Window::max) * </pre></code> */ default <U> Seq<Window<T>> window(Function<? super T, ? extends U> partitionBy, Comparator<? super T> orderBy, long lower, long upper) { return window(Window.of(partitionBy, orderBy, lower, upper)).map(t -> t.v1); } // [jooq-tools] START [windows] /** * Map this stream to a windowed stream with 1 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple1<Window<T>>> window( WindowSpecification<T> specification1 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 2 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple2<Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 3 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple3<Window<T>, Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2, WindowSpecification<T> specification3 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); Map<?, Partition<T>> partitions3 = SeqUtils.partitions(specification3, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2), (Window<T>) new WindowImpl<>(t, partitions3.get(specification3.partition().apply(t.v1)), specification3) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 4 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple4<Window<T>, Window<T>, Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2, WindowSpecification<T> specification3, WindowSpecification<T> specification4 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); Map<?, Partition<T>> partitions3 = SeqUtils.partitions(specification3, buffer); Map<?, Partition<T>> partitions4 = SeqUtils.partitions(specification4, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2), (Window<T>) new WindowImpl<>(t, partitions3.get(specification3.partition().apply(t.v1)), specification3), (Window<T>) new WindowImpl<>(t, partitions4.get(specification4.partition().apply(t.v1)), specification4) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 5 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple5<Window<T>, Window<T>, Window<T>, Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2, WindowSpecification<T> specification3, WindowSpecification<T> specification4, WindowSpecification<T> specification5 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); Map<?, Partition<T>> partitions3 = SeqUtils.partitions(specification3, buffer); Map<?, Partition<T>> partitions4 = SeqUtils.partitions(specification4, buffer); Map<?, Partition<T>> partitions5 = SeqUtils.partitions(specification5, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2), (Window<T>) new WindowImpl<>(t, partitions3.get(specification3.partition().apply(t.v1)), specification3), (Window<T>) new WindowImpl<>(t, partitions4.get(specification4.partition().apply(t.v1)), specification4), (Window<T>) new WindowImpl<>(t, partitions5.get(specification5.partition().apply(t.v1)), specification5) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 6 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple6<Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2, WindowSpecification<T> specification3, WindowSpecification<T> specification4, WindowSpecification<T> specification5, WindowSpecification<T> specification6 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); Map<?, Partition<T>> partitions3 = SeqUtils.partitions(specification3, buffer); Map<?, Partition<T>> partitions4 = SeqUtils.partitions(specification4, buffer); Map<?, Partition<T>> partitions5 = SeqUtils.partitions(specification5, buffer); Map<?, Partition<T>> partitions6 = SeqUtils.partitions(specification6, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2), (Window<T>) new WindowImpl<>(t, partitions3.get(specification3.partition().apply(t.v1)), specification3), (Window<T>) new WindowImpl<>(t, partitions4.get(specification4.partition().apply(t.v1)), specification4), (Window<T>) new WindowImpl<>(t, partitions5.get(specification5.partition().apply(t.v1)), specification5), (Window<T>) new WindowImpl<>(t, partitions6.get(specification6.partition().apply(t.v1)), specification6) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 7 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple7<Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2, WindowSpecification<T> specification3, WindowSpecification<T> specification4, WindowSpecification<T> specification5, WindowSpecification<T> specification6, WindowSpecification<T> specification7 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); Map<?, Partition<T>> partitions3 = SeqUtils.partitions(specification3, buffer); Map<?, Partition<T>> partitions4 = SeqUtils.partitions(specification4, buffer); Map<?, Partition<T>> partitions5 = SeqUtils.partitions(specification5, buffer); Map<?, Partition<T>> partitions6 = SeqUtils.partitions(specification6, buffer); Map<?, Partition<T>> partitions7 = SeqUtils.partitions(specification7, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2), (Window<T>) new WindowImpl<>(t, partitions3.get(specification3.partition().apply(t.v1)), specification3), (Window<T>) new WindowImpl<>(t, partitions4.get(specification4.partition().apply(t.v1)), specification4), (Window<T>) new WindowImpl<>(t, partitions5.get(specification5.partition().apply(t.v1)), specification5), (Window<T>) new WindowImpl<>(t, partitions6.get(specification6.partition().apply(t.v1)), specification6), (Window<T>) new WindowImpl<>(t, partitions7.get(specification7.partition().apply(t.v1)), specification7) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 8 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple8<Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2, WindowSpecification<T> specification3, WindowSpecification<T> specification4, WindowSpecification<T> specification5, WindowSpecification<T> specification6, WindowSpecification<T> specification7, WindowSpecification<T> specification8 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); Map<?, Partition<T>> partitions3 = SeqUtils.partitions(specification3, buffer); Map<?, Partition<T>> partitions4 = SeqUtils.partitions(specification4, buffer); Map<?, Partition<T>> partitions5 = SeqUtils.partitions(specification5, buffer); Map<?, Partition<T>> partitions6 = SeqUtils.partitions(specification6, buffer); Map<?, Partition<T>> partitions7 = SeqUtils.partitions(specification7, buffer); Map<?, Partition<T>> partitions8 = SeqUtils.partitions(specification8, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2), (Window<T>) new WindowImpl<>(t, partitions3.get(specification3.partition().apply(t.v1)), specification3), (Window<T>) new WindowImpl<>(t, partitions4.get(specification4.partition().apply(t.v1)), specification4), (Window<T>) new WindowImpl<>(t, partitions5.get(specification5.partition().apply(t.v1)), specification5), (Window<T>) new WindowImpl<>(t, partitions6.get(specification6.partition().apply(t.v1)), specification6), (Window<T>) new WindowImpl<>(t, partitions7.get(specification7.partition().apply(t.v1)), specification7), (Window<T>) new WindowImpl<>(t, partitions8.get(specification8.partition().apply(t.v1)), specification8) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 9 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple9<Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2, WindowSpecification<T> specification3, WindowSpecification<T> specification4, WindowSpecification<T> specification5, WindowSpecification<T> specification6, WindowSpecification<T> specification7, WindowSpecification<T> specification8, WindowSpecification<T> specification9 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); Map<?, Partition<T>> partitions3 = SeqUtils.partitions(specification3, buffer); Map<?, Partition<T>> partitions4 = SeqUtils.partitions(specification4, buffer); Map<?, Partition<T>> partitions5 = SeqUtils.partitions(specification5, buffer); Map<?, Partition<T>> partitions6 = SeqUtils.partitions(specification6, buffer); Map<?, Partition<T>> partitions7 = SeqUtils.partitions(specification7, buffer); Map<?, Partition<T>> partitions8 = SeqUtils.partitions(specification8, buffer); Map<?, Partition<T>> partitions9 = SeqUtils.partitions(specification9, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2), (Window<T>) new WindowImpl<>(t, partitions3.get(specification3.partition().apply(t.v1)), specification3), (Window<T>) new WindowImpl<>(t, partitions4.get(specification4.partition().apply(t.v1)), specification4), (Window<T>) new WindowImpl<>(t, partitions5.get(specification5.partition().apply(t.v1)), specification5), (Window<T>) new WindowImpl<>(t, partitions6.get(specification6.partition().apply(t.v1)), specification6), (Window<T>) new WindowImpl<>(t, partitions7.get(specification7.partition().apply(t.v1)), specification7), (Window<T>) new WindowImpl<>(t, partitions8.get(specification8.partition().apply(t.v1)), specification8), (Window<T>) new WindowImpl<>(t, partitions9.get(specification9.partition().apply(t.v1)), specification9) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 10 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple10<Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2, WindowSpecification<T> specification3, WindowSpecification<T> specification4, WindowSpecification<T> specification5, WindowSpecification<T> specification6, WindowSpecification<T> specification7, WindowSpecification<T> specification8, WindowSpecification<T> specification9, WindowSpecification<T> specification10 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); Map<?, Partition<T>> partitions3 = SeqUtils.partitions(specification3, buffer); Map<?, Partition<T>> partitions4 = SeqUtils.partitions(specification4, buffer); Map<?, Partition<T>> partitions5 = SeqUtils.partitions(specification5, buffer); Map<?, Partition<T>> partitions6 = SeqUtils.partitions(specification6, buffer); Map<?, Partition<T>> partitions7 = SeqUtils.partitions(specification7, buffer); Map<?, Partition<T>> partitions8 = SeqUtils.partitions(specification8, buffer); Map<?, Partition<T>> partitions9 = SeqUtils.partitions(specification9, buffer); Map<?, Partition<T>> partitions10 = SeqUtils.partitions(specification10, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2), (Window<T>) new WindowImpl<>(t, partitions3.get(specification3.partition().apply(t.v1)), specification3), (Window<T>) new WindowImpl<>(t, partitions4.get(specification4.partition().apply(t.v1)), specification4), (Window<T>) new WindowImpl<>(t, partitions5.get(specification5.partition().apply(t.v1)), specification5), (Window<T>) new WindowImpl<>(t, partitions6.get(specification6.partition().apply(t.v1)), specification6), (Window<T>) new WindowImpl<>(t, partitions7.get(specification7.partition().apply(t.v1)), specification7), (Window<T>) new WindowImpl<>(t, partitions8.get(specification8.partition().apply(t.v1)), specification8), (Window<T>) new WindowImpl<>(t, partitions9.get(specification9.partition().apply(t.v1)), specification9), (Window<T>) new WindowImpl<>(t, partitions10.get(specification10.partition().apply(t.v1)), specification10) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 11 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple11<Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2, WindowSpecification<T> specification3, WindowSpecification<T> specification4, WindowSpecification<T> specification5, WindowSpecification<T> specification6, WindowSpecification<T> specification7, WindowSpecification<T> specification8, WindowSpecification<T> specification9, WindowSpecification<T> specification10, WindowSpecification<T> specification11 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); Map<?, Partition<T>> partitions3 = SeqUtils.partitions(specification3, buffer); Map<?, Partition<T>> partitions4 = SeqUtils.partitions(specification4, buffer); Map<?, Partition<T>> partitions5 = SeqUtils.partitions(specification5, buffer); Map<?, Partition<T>> partitions6 = SeqUtils.partitions(specification6, buffer); Map<?, Partition<T>> partitions7 = SeqUtils.partitions(specification7, buffer); Map<?, Partition<T>> partitions8 = SeqUtils.partitions(specification8, buffer); Map<?, Partition<T>> partitions9 = SeqUtils.partitions(specification9, buffer); Map<?, Partition<T>> partitions10 = SeqUtils.partitions(specification10, buffer); Map<?, Partition<T>> partitions11 = SeqUtils.partitions(specification11, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2), (Window<T>) new WindowImpl<>(t, partitions3.get(specification3.partition().apply(t.v1)), specification3), (Window<T>) new WindowImpl<>(t, partitions4.get(specification4.partition().apply(t.v1)), specification4), (Window<T>) new WindowImpl<>(t, partitions5.get(specification5.partition().apply(t.v1)), specification5), (Window<T>) new WindowImpl<>(t, partitions6.get(specification6.partition().apply(t.v1)), specification6), (Window<T>) new WindowImpl<>(t, partitions7.get(specification7.partition().apply(t.v1)), specification7), (Window<T>) new WindowImpl<>(t, partitions8.get(specification8.partition().apply(t.v1)), specification8), (Window<T>) new WindowImpl<>(t, partitions9.get(specification9.partition().apply(t.v1)), specification9), (Window<T>) new WindowImpl<>(t, partitions10.get(specification10.partition().apply(t.v1)), specification10), (Window<T>) new WindowImpl<>(t, partitions11.get(specification11.partition().apply(t.v1)), specification11) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 12 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple12<Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2, WindowSpecification<T> specification3, WindowSpecification<T> specification4, WindowSpecification<T> specification5, WindowSpecification<T> specification6, WindowSpecification<T> specification7, WindowSpecification<T> specification8, WindowSpecification<T> specification9, WindowSpecification<T> specification10, WindowSpecification<T> specification11, WindowSpecification<T> specification12 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); Map<?, Partition<T>> partitions3 = SeqUtils.partitions(specification3, buffer); Map<?, Partition<T>> partitions4 = SeqUtils.partitions(specification4, buffer); Map<?, Partition<T>> partitions5 = SeqUtils.partitions(specification5, buffer); Map<?, Partition<T>> partitions6 = SeqUtils.partitions(specification6, buffer); Map<?, Partition<T>> partitions7 = SeqUtils.partitions(specification7, buffer); Map<?, Partition<T>> partitions8 = SeqUtils.partitions(specification8, buffer); Map<?, Partition<T>> partitions9 = SeqUtils.partitions(specification9, buffer); Map<?, Partition<T>> partitions10 = SeqUtils.partitions(specification10, buffer); Map<?, Partition<T>> partitions11 = SeqUtils.partitions(specification11, buffer); Map<?, Partition<T>> partitions12 = SeqUtils.partitions(specification12, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2), (Window<T>) new WindowImpl<>(t, partitions3.get(specification3.partition().apply(t.v1)), specification3), (Window<T>) new WindowImpl<>(t, partitions4.get(specification4.partition().apply(t.v1)), specification4), (Window<T>) new WindowImpl<>(t, partitions5.get(specification5.partition().apply(t.v1)), specification5), (Window<T>) new WindowImpl<>(t, partitions6.get(specification6.partition().apply(t.v1)), specification6), (Window<T>) new WindowImpl<>(t, partitions7.get(specification7.partition().apply(t.v1)), specification7), (Window<T>) new WindowImpl<>(t, partitions8.get(specification8.partition().apply(t.v1)), specification8), (Window<T>) new WindowImpl<>(t, partitions9.get(specification9.partition().apply(t.v1)), specification9), (Window<T>) new WindowImpl<>(t, partitions10.get(specification10.partition().apply(t.v1)), specification10), (Window<T>) new WindowImpl<>(t, partitions11.get(specification11.partition().apply(t.v1)), specification11), (Window<T>) new WindowImpl<>(t, partitions12.get(specification12.partition().apply(t.v1)), specification12) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 13 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple13<Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2, WindowSpecification<T> specification3, WindowSpecification<T> specification4, WindowSpecification<T> specification5, WindowSpecification<T> specification6, WindowSpecification<T> specification7, WindowSpecification<T> specification8, WindowSpecification<T> specification9, WindowSpecification<T> specification10, WindowSpecification<T> specification11, WindowSpecification<T> specification12, WindowSpecification<T> specification13 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); Map<?, Partition<T>> partitions3 = SeqUtils.partitions(specification3, buffer); Map<?, Partition<T>> partitions4 = SeqUtils.partitions(specification4, buffer); Map<?, Partition<T>> partitions5 = SeqUtils.partitions(specification5, buffer); Map<?, Partition<T>> partitions6 = SeqUtils.partitions(specification6, buffer); Map<?, Partition<T>> partitions7 = SeqUtils.partitions(specification7, buffer); Map<?, Partition<T>> partitions8 = SeqUtils.partitions(specification8, buffer); Map<?, Partition<T>> partitions9 = SeqUtils.partitions(specification9, buffer); Map<?, Partition<T>> partitions10 = SeqUtils.partitions(specification10, buffer); Map<?, Partition<T>> partitions11 = SeqUtils.partitions(specification11, buffer); Map<?, Partition<T>> partitions12 = SeqUtils.partitions(specification12, buffer); Map<?, Partition<T>> partitions13 = SeqUtils.partitions(specification13, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2), (Window<T>) new WindowImpl<>(t, partitions3.get(specification3.partition().apply(t.v1)), specification3), (Window<T>) new WindowImpl<>(t, partitions4.get(specification4.partition().apply(t.v1)), specification4), (Window<T>) new WindowImpl<>(t, partitions5.get(specification5.partition().apply(t.v1)), specification5), (Window<T>) new WindowImpl<>(t, partitions6.get(specification6.partition().apply(t.v1)), specification6), (Window<T>) new WindowImpl<>(t, partitions7.get(specification7.partition().apply(t.v1)), specification7), (Window<T>) new WindowImpl<>(t, partitions8.get(specification8.partition().apply(t.v1)), specification8), (Window<T>) new WindowImpl<>(t, partitions9.get(specification9.partition().apply(t.v1)), specification9), (Window<T>) new WindowImpl<>(t, partitions10.get(specification10.partition().apply(t.v1)), specification10), (Window<T>) new WindowImpl<>(t, partitions11.get(specification11.partition().apply(t.v1)), specification11), (Window<T>) new WindowImpl<>(t, partitions12.get(specification12.partition().apply(t.v1)), specification12), (Window<T>) new WindowImpl<>(t, partitions13.get(specification13.partition().apply(t.v1)), specification13) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 14 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple14<Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2, WindowSpecification<T> specification3, WindowSpecification<T> specification4, WindowSpecification<T> specification5, WindowSpecification<T> specification6, WindowSpecification<T> specification7, WindowSpecification<T> specification8, WindowSpecification<T> specification9, WindowSpecification<T> specification10, WindowSpecification<T> specification11, WindowSpecification<T> specification12, WindowSpecification<T> specification13, WindowSpecification<T> specification14 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); Map<?, Partition<T>> partitions3 = SeqUtils.partitions(specification3, buffer); Map<?, Partition<T>> partitions4 = SeqUtils.partitions(specification4, buffer); Map<?, Partition<T>> partitions5 = SeqUtils.partitions(specification5, buffer); Map<?, Partition<T>> partitions6 = SeqUtils.partitions(specification6, buffer); Map<?, Partition<T>> partitions7 = SeqUtils.partitions(specification7, buffer); Map<?, Partition<T>> partitions8 = SeqUtils.partitions(specification8, buffer); Map<?, Partition<T>> partitions9 = SeqUtils.partitions(specification9, buffer); Map<?, Partition<T>> partitions10 = SeqUtils.partitions(specification10, buffer); Map<?, Partition<T>> partitions11 = SeqUtils.partitions(specification11, buffer); Map<?, Partition<T>> partitions12 = SeqUtils.partitions(specification12, buffer); Map<?, Partition<T>> partitions13 = SeqUtils.partitions(specification13, buffer); Map<?, Partition<T>> partitions14 = SeqUtils.partitions(specification14, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2), (Window<T>) new WindowImpl<>(t, partitions3.get(specification3.partition().apply(t.v1)), specification3), (Window<T>) new WindowImpl<>(t, partitions4.get(specification4.partition().apply(t.v1)), specification4), (Window<T>) new WindowImpl<>(t, partitions5.get(specification5.partition().apply(t.v1)), specification5), (Window<T>) new WindowImpl<>(t, partitions6.get(specification6.partition().apply(t.v1)), specification6), (Window<T>) new WindowImpl<>(t, partitions7.get(specification7.partition().apply(t.v1)), specification7), (Window<T>) new WindowImpl<>(t, partitions8.get(specification8.partition().apply(t.v1)), specification8), (Window<T>) new WindowImpl<>(t, partitions9.get(specification9.partition().apply(t.v1)), specification9), (Window<T>) new WindowImpl<>(t, partitions10.get(specification10.partition().apply(t.v1)), specification10), (Window<T>) new WindowImpl<>(t, partitions11.get(specification11.partition().apply(t.v1)), specification11), (Window<T>) new WindowImpl<>(t, partitions12.get(specification12.partition().apply(t.v1)), specification12), (Window<T>) new WindowImpl<>(t, partitions13.get(specification13.partition().apply(t.v1)), specification13), (Window<T>) new WindowImpl<>(t, partitions14.get(specification14.partition().apply(t.v1)), specification14) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 15 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple15<Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2, WindowSpecification<T> specification3, WindowSpecification<T> specification4, WindowSpecification<T> specification5, WindowSpecification<T> specification6, WindowSpecification<T> specification7, WindowSpecification<T> specification8, WindowSpecification<T> specification9, WindowSpecification<T> specification10, WindowSpecification<T> specification11, WindowSpecification<T> specification12, WindowSpecification<T> specification13, WindowSpecification<T> specification14, WindowSpecification<T> specification15 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); Map<?, Partition<T>> partitions3 = SeqUtils.partitions(specification3, buffer); Map<?, Partition<T>> partitions4 = SeqUtils.partitions(specification4, buffer); Map<?, Partition<T>> partitions5 = SeqUtils.partitions(specification5, buffer); Map<?, Partition<T>> partitions6 = SeqUtils.partitions(specification6, buffer); Map<?, Partition<T>> partitions7 = SeqUtils.partitions(specification7, buffer); Map<?, Partition<T>> partitions8 = SeqUtils.partitions(specification8, buffer); Map<?, Partition<T>> partitions9 = SeqUtils.partitions(specification9, buffer); Map<?, Partition<T>> partitions10 = SeqUtils.partitions(specification10, buffer); Map<?, Partition<T>> partitions11 = SeqUtils.partitions(specification11, buffer); Map<?, Partition<T>> partitions12 = SeqUtils.partitions(specification12, buffer); Map<?, Partition<T>> partitions13 = SeqUtils.partitions(specification13, buffer); Map<?, Partition<T>> partitions14 = SeqUtils.partitions(specification14, buffer); Map<?, Partition<T>> partitions15 = SeqUtils.partitions(specification15, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2), (Window<T>) new WindowImpl<>(t, partitions3.get(specification3.partition().apply(t.v1)), specification3), (Window<T>) new WindowImpl<>(t, partitions4.get(specification4.partition().apply(t.v1)), specification4), (Window<T>) new WindowImpl<>(t, partitions5.get(specification5.partition().apply(t.v1)), specification5), (Window<T>) new WindowImpl<>(t, partitions6.get(specification6.partition().apply(t.v1)), specification6), (Window<T>) new WindowImpl<>(t, partitions7.get(specification7.partition().apply(t.v1)), specification7), (Window<T>) new WindowImpl<>(t, partitions8.get(specification8.partition().apply(t.v1)), specification8), (Window<T>) new WindowImpl<>(t, partitions9.get(specification9.partition().apply(t.v1)), specification9), (Window<T>) new WindowImpl<>(t, partitions10.get(specification10.partition().apply(t.v1)), specification10), (Window<T>) new WindowImpl<>(t, partitions11.get(specification11.partition().apply(t.v1)), specification11), (Window<T>) new WindowImpl<>(t, partitions12.get(specification12.partition().apply(t.v1)), specification12), (Window<T>) new WindowImpl<>(t, partitions13.get(specification13.partition().apply(t.v1)), specification13), (Window<T>) new WindowImpl<>(t, partitions14.get(specification14.partition().apply(t.v1)), specification14), (Window<T>) new WindowImpl<>(t, partitions15.get(specification15.partition().apply(t.v1)), specification15) )) .onClose(this::close); } /** * Map this stream to a windowed stream with 16 distinct windows. */ @Generated("This method was generated using jOOQ-tools") default Seq<Tuple16<Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>, Window<T>>> window( WindowSpecification<T> specification1, WindowSpecification<T> specification2, WindowSpecification<T> specification3, WindowSpecification<T> specification4, WindowSpecification<T> specification5, WindowSpecification<T> specification6, WindowSpecification<T> specification7, WindowSpecification<T> specification8, WindowSpecification<T> specification9, WindowSpecification<T> specification10, WindowSpecification<T> specification11, WindowSpecification<T> specification12, WindowSpecification<T> specification13, WindowSpecification<T> specification14, WindowSpecification<T> specification15, WindowSpecification<T> specification16 ) { List<Tuple2<T, Long>> buffer = zipWithIndex().toList(); Map<?, Partition<T>> partitions1 = SeqUtils.partitions(specification1, buffer); Map<?, Partition<T>> partitions2 = SeqUtils.partitions(specification2, buffer); Map<?, Partition<T>> partitions3 = SeqUtils.partitions(specification3, buffer); Map<?, Partition<T>> partitions4 = SeqUtils.partitions(specification4, buffer); Map<?, Partition<T>> partitions5 = SeqUtils.partitions(specification5, buffer); Map<?, Partition<T>> partitions6 = SeqUtils.partitions(specification6, buffer); Map<?, Partition<T>> partitions7 = SeqUtils.partitions(specification7, buffer); Map<?, Partition<T>> partitions8 = SeqUtils.partitions(specification8, buffer); Map<?, Partition<T>> partitions9 = SeqUtils.partitions(specification9, buffer); Map<?, Partition<T>> partitions10 = SeqUtils.partitions(specification10, buffer); Map<?, Partition<T>> partitions11 = SeqUtils.partitions(specification11, buffer); Map<?, Partition<T>> partitions12 = SeqUtils.partitions(specification12, buffer); Map<?, Partition<T>> partitions13 = SeqUtils.partitions(specification13, buffer); Map<?, Partition<T>> partitions14 = SeqUtils.partitions(specification14, buffer); Map<?, Partition<T>> partitions15 = SeqUtils.partitions(specification15, buffer); Map<?, Partition<T>> partitions16 = SeqUtils.partitions(specification16, buffer); return seq(buffer) .map(t -> tuple( (Window<T>) new WindowImpl<>(t, partitions1.get(specification1.partition().apply(t.v1)), specification1), (Window<T>) new WindowImpl<>(t, partitions2.get(specification2.partition().apply(t.v1)), specification2), (Window<T>) new WindowImpl<>(t, partitions3.get(specification3.partition().apply(t.v1)), specification3), (Window<T>) new WindowImpl<>(t, partitions4.get(specification4.partition().apply(t.v1)), specification4), (Window<T>) new WindowImpl<>(t, partitions5.get(specification5.partition().apply(t.v1)), specification5), (Window<T>) new WindowImpl<>(t, partitions6.get(specification6.partition().apply(t.v1)), specification6), (Window<T>) new WindowImpl<>(t, partitions7.get(specification7.partition().apply(t.v1)), specification7), (Window<T>) new WindowImpl<>(t, partitions8.get(specification8.partition().apply(t.v1)), specification8), (Window<T>) new WindowImpl<>(t, partitions9.get(specification9.partition().apply(t.v1)), specification9), (Window<T>) new WindowImpl<>(t, partitions10.get(specification10.partition().apply(t.v1)), specification10), (Window<T>) new WindowImpl<>(t, partitions11.get(specification11.partition().apply(t.v1)), specification11), (Window<T>) new WindowImpl<>(t, partitions12.get(specification12.partition().apply(t.v1)), specification12), (Window<T>) new WindowImpl<>(t, partitions13.get(specification13.partition().apply(t.v1)), specification13), (Window<T>) new WindowImpl<>(t, partitions14.get(specification14.partition().apply(t.v1)), specification14), (Window<T>) new WindowImpl<>(t, partitions15.get(specification15.partition().apply(t.v1)), specification15), (Window<T>) new WindowImpl<>(t, partitions16.get(specification16.partition().apply(t.v1)), specification16) )) .onClose(this::close); } // [jooq-tools] END [windows] // Shortcuts to Collectors // ----------------------- /** * Shortcut for calling {@link Stream#collect(Collector)} with a * {@link Collectors#groupingBy(Function)} collector. */ default <K> Map<K, List<T>> groupBy(Function<? super T, ? extends K> classifier) { return collect(Collectors.groupingBy(classifier)); } /** * Shortcut for calling {@link Stream#collect(Collector)} with a * {@link Collectors#groupingBy(Function, Collector)} collector. */ default <K, A, D> Map<K, D> groupBy(Function<? super T, ? extends K> classifier, Collector<? super T, A, D> downstream) { return collect(Collectors.groupingBy(classifier, downstream)); } /** * Shortcut for calling {@link Stream#collect(Collector)} with a * {@link Collectors#groupingBy(Function, Supplier, Collector)} collector. */ default <K, D, A, M extends Map<K, D>> M groupBy(Function<? super T, ? extends K> classifier, Supplier<M> mapFactory, Collector<? super T, A, D> downstream) { return collect(Collectors.groupingBy(classifier, mapFactory, downstream)); } /** * Shortcut for calling {@link Stream#collect(Collector)} with a * {@link Collectors#joining()} * collector. * * @deprecated - Use {@link #toString()} instead. This method will be * removed in the future as it causes confusion with * {@link #innerJoin(Seq, BiPredicate)}. */ @Deprecated default String join() { return map(Objects::toString).collect(Collectors.joining()); } /** * Shortcut for calling {@link Stream#collect(Collector)} with a * {@link Collectors#joining(CharSequence)} * collector. * * @deprecated - Use {@link #toString(CharSequence)} instead. This method * will be removed in the future as it causes confusion with * {@link #innerJoin(Seq, BiPredicate)}. */ @Deprecated default String join(CharSequence delimiter) { return map(Objects::toString).collect(Collectors.joining(delimiter)); } /** * Shortcut for calling {@link Stream#collect(Collector)} with a * {@link Collectors#joining(CharSequence, CharSequence, CharSequence)} * collector. * * @deprecated - Use * {@link #toString(CharSequence, CharSequence, CharSequence)} instead. This * method will be removed in the future as it causes confusion with * {@link #innerJoin(Seq, BiPredicate)}. */ @Deprecated default String join(CharSequence delimiter, CharSequence prefix, CharSequence suffix) { return map(Objects::toString).collect(Collectors.joining(delimiter, prefix, suffix)); } /** * @see Stream#of(Object) */ static <T> Seq<T> of(T value) { return seq(Stream.of(value)); } /** * @see Stream#of(Object[]) */ @SafeVarargs static <T> Seq<T> of(T... values) { return seq(Stream.of(values)); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (exclusive) */ static Seq<Byte> range(byte from, byte to) { return range(from, to, (byte) 1); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (exclusive) * @param step The increase between two values */ static Seq<Byte> range(byte from, byte to, int step) { return to <= from ? empty() : iterate(from, t -> Byte.valueOf((byte) (t + step))).limitWhile(t -> t < to); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (exclusive) */ static Seq<Short> range(short from, short to) { return range(from, to, (short) 1); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (exclusive) * @param step The increase between two values */ static Seq<Short> range(short from, short to, int step) { return to <= from ? empty() : iterate(from, t -> Short.valueOf((short) (t + step))).limitWhile(t -> t < to); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (exclusive) */ static Seq<Character> range(char from, char to) { return range(from, to, (short) 1); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (exclusive) * @param step The increase between two values */ static Seq<Character> range(char from, char to, int step) { return to <= from ? empty() : iterate(from, t -> Character.valueOf((char) (t + step))).limitWhile(t -> t < to); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (exclusive) */ static Seq<Integer> range(int from, int to) { return range(from, to, 1); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (exclusive) * @param step The increase between two values */ static Seq<Integer> range(int from, int to, int step) { return to <= from ? empty() : iterate(from, t -> Integer.valueOf(t + step)).limitWhile(t -> t < to); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (exclusive) */ static Seq<Long> range(long from, long to) { return range(from, to, 1L); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (exclusive) * @param step The increase between two values */ static Seq<Long> range(long from, long to, long step) { return to <= from ? empty() : iterate(from, t -> Long.valueOf(t + step)).limitWhile(t -> t < to); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (exclusive) */ static Seq<Instant> range(Instant from, Instant to) { return range(from, to, Duration.ofSeconds(1)); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (exclusive) * @param step The increase between two values */ static Seq<Instant> range(Instant from, Instant to, Duration step) { return to.compareTo(from) <= 0 ? empty() : iterate(from, t -> t.plus(step)).limitWhile(t -> t.compareTo(to) < 0); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (inclusive) */ static Seq<Byte> rangeClosed(byte from, byte to) { return rangeClosed(from, to, (byte) 1); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (inclusive) * @param step The increase between two values */ static Seq<Byte> rangeClosed(byte from, byte to, int step) { return to < from ? empty() : iterate(from, t -> Byte.valueOf((byte) (t + step))).limitWhile(t -> t <= to); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (inclusive) */ static Seq<Short> rangeClosed(short from, short to) { return rangeClosed(from, to, (short) 1); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (inclusive) * @param step The increase between two values */ static Seq<Short> rangeClosed(short from, short to, int step) { return to < from ? empty() : iterate(from, t -> Short.valueOf((short) (t + step))).limitWhile(t -> t <= to); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (inclusive) */ static Seq<Character> rangeClosed(char from, char to) { return rangeClosed(from, to, 1); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (inclusive) * @param step The increase between two values */ static Seq<Character> rangeClosed(char from, char to, int step) { return to < from ? empty() : iterate(from, t -> Character.valueOf((char) (t + step))).limitWhile(t -> t <= to); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (inclusive) */ static Seq<Integer> rangeClosed(int from, int to) { return rangeClosed(from, to, 1); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (inclusive) * @param step The increase between two values */ static Seq<Integer> rangeClosed(int from, int to, int step) { return to < from ? empty() : iterate(from, t -> Integer.valueOf(t + step)).limitWhile(t -> t <= to); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (inclusive) */ static Seq<Long> rangeClosed(long from, long to) { return rangeClosed(from, to, 1L); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (inclusive) * @param step The increase between two values */ static Seq<Long> rangeClosed(long from, long to, long step) { return to < from ? empty() : iterate(from, t -> Long.valueOf(t + step)).limitWhile(t -> t <= to); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (exclusive) */ static Seq<Instant> rangeClosed(Instant from, Instant to) { return rangeClosed(from, to, Duration.ofSeconds(1)); } /** * The range between two values. * * @param from The lower bound (inclusive) * @param to The upper bound (exclusive) * @param step The increase between two values */ static Seq<Instant> rangeClosed(Instant from, Instant to, Duration step) { return to.compareTo(from) < 0 ? empty() : iterate(from, t -> t.plus(step)).limitWhile(t -> t.compareTo(to) <= 0); } /** * @see Stream#empty() */ static <T> Seq<T> empty() { return seq(Stream.empty()); } /** * @see Stream#iterate(Object, UnaryOperator) */ static <T> Seq<T> iterate(final T seed, final UnaryOperator<T> f) { return seq(Stream.iterate(seed, f)); } /** * @see Stream#generate(Supplier) */ static Seq<Void> generate() { return generate(() -> null); } /** * @see Stream#generate(Supplier) */ static <T> Seq<T> generate(T value) { return generate(() -> value); } /** * @see Stream#generate(Supplier) */ static <T> Seq<T> generate(Supplier<? extends T> s) { return seq(Stream.generate(s)); } /** * Wrap a <code>Stream</code> into a <code>Seq</code>. */ @SuppressWarnings("unchecked") static <T> Seq<T> seq(Stream<? extends T> stream) { if (stream instanceof Seq) return (Seq<T>) stream; return new SeqImpl<>(stream); } /** * Wrap a <code>Stream</code> into a <code>Seq</code>. */ @SuppressWarnings("unchecked") static <T> Seq<T> seq(Seq<? extends T> stream) { return (Seq<T>) stream; } /** * Wrap a <code>IntStream</code> into a <code>Seq</code>. */ static Seq<Integer> seq(IntStream stream) { return new SeqImpl<>(stream.boxed()); } /** * Wrap a <code>IntStream</code> into a <code>Seq</code>. */ static Seq<Long> seq(LongStream stream) { return new SeqImpl<>(stream.boxed()); } /** * Wrap a <code>IntStream</code> into a <code>Seq</code>. */ static Seq<Double> seq(DoubleStream stream) { return new SeqImpl<>(stream.boxed()); } /** * Wrap an <code>Iterable</code> into a <code>Seq</code>. */ static <T> Seq<T> seq(Iterable<? extends T> iterable) { return seq(iterable.iterator()); } /** * Wrap an <code>Iterator</code> into a <code>Seq</code>. */ static <T> Seq<T> seq(Iterator<? extends T> iterator) { return seq(spliteratorUnknownSize(iterator, ORDERED)); } /** * Wrap a <code>Spliterator</code> into a <code>Seq</code>. */ static <T> Seq<T> seq(Spliterator<? extends T> spliterator) { return seq(StreamSupport.stream(spliterator, false)); } /** * Wrap a <code>Map</code> into a <code>Seq</code>. */ static <K, V> Seq<Tuple2<K, V>> seq(Map<? extends K, ? extends V> map) { return seq(map.entrySet()).map(e -> tuple(e.getKey(), e.getValue())); } /** * Wrap an <code>Optional</code> into a <code>Seq</code>. */ static <T> Seq<T> seq(Optional<? extends T> optional) { return optional.map(Seq::of).orElseGet(Seq::empty); } /** * Wrap an <code>InputStream</code> into a <code>Seq</code>. * <p> * Client code must close the <code>InputStream</code>. All * {@link IOException}'s thrown be the <code>InputStream</code> are wrapped * by {@link UncheckedIOException}'s. */ static Seq<Byte> seq(InputStream is) { FunctionalSpliterator<Byte> spliterator = consumer -> { try { int value = is.read(); if (value != -1) consumer.accept((byte) value); return value != -1; } catch (IOException e) { throw new UncheckedIOException(e); } }; return seq(spliterator).onClose(Unchecked.runnable(is::close)); } /** * Wrap a <code>Reader</code> into a <code>Seq</code>. * <p> * Client code must close the <code>Reader</code>. All * {@link IOException}'s thrown be the <code>InputStream</code> are wrapped * by {@link UncheckedIOException}'s. */ static Seq<Character> seq(Reader reader) { FunctionalSpliterator<Character> spliterator = consumer -> { try { int value = reader.read(); if (value != -1) consumer.accept((char) value); return value != -1; } catch (IOException e) { throw new UncheckedIOException(e); } }; return seq(spliterator).onClose(Unchecked.runnable(reader::close)); } /** * Repeat a stream infinitely. * <p> * <code><pre> * // (1, 2, 3, 1, 2, 3, ...) * Seq.of(1, 2, 3).cycle(); * </pre></code> */ static <T> Seq<T> cycle(Stream<? extends T> stream) { return cycle(seq(stream)); } /** * Repeat a stream infinitely. * <p> * <code><pre> * // (1, 2, 3, 1, 2, 3, ...) * Seq.of(1, 2, 3).cycle(); * </pre></code> */ static <T> Seq<T> cycle(Iterable<? extends T> iterable) { return cycle(seq(iterable)); } /** * Repeat a stream infinitely. * <p> * <code><pre> * // (1, 2, 3, 1, 2, 3, ...) * Seq.of(1, 2, 3).cycle(); * </pre></code> */ static <T> Seq<T> cycle(Seq<? extends T> stream) { return cycle(stream, -1); } /** * Repeat a stream a certain amount of times. * <p> * <code><pre> * // () * Seq.of(1, 2, 3).cycle(0); * * // (1, 2, 3) * Seq.of(1, 2, 3).cycle(1); * * // (1, 2, 3, 1, 2, 3, 1, 2, 3) * Seq.of(1, 2, 3).cycle(3); * </pre></code> * * @see #cycle(Stream) */ static <T> Seq<T> cycle(Stream<? extends T> stream, long times) { return cycle(seq(stream), times); } /** * Repeat a stream a certain amount of times. * <p> * <code><pre> * // () * Seq.of(1, 2, 3).cycle(0); * * // (1, 2, 3) * Seq.of(1, 2, 3).cycle(1); * * // (1, 2, 3, 1, 2, 3, 1, 2, 3) * Seq.of(1, 2, 3).cycle(3); * </pre></code> * * @see #cycle(Stream) */ static <T> Seq<T> cycle(Iterable<? extends T> iterable, long times) { return cycle(seq(iterable), times); } /** * Repeat a stream a certain amount of times. * <p> * <code><pre> * // () * Seq.of(1, 2, 3).cycle(0); * * // (1, 2, 3) * Seq.of(1, 2, 3).cycle(1); * * // (1, 2, 3, 1, 2, 3, 1, 2, 3) * Seq.of(1, 2, 3).cycle(3); * </pre></code> * * @see #cycle(Stream) */ @SuppressWarnings("unchecked") static <T> Seq<T> cycle(Seq<? extends T> stream, long times) { if (times == 0) return empty(); if (times == 1) return (Seq<T>) stream; List<T> list = new ArrayList<>(); Spliterator<T>[] sp = new Spliterator[1]; long[] remaining = new long[] { times }; return SeqUtils.transform(stream, (delegate, action) -> { if (sp[0] == null) { if (delegate.tryAdvance(t -> { list.add(t); action.accept(t); })) return true; else sp[0] = list.spliterator(); } if (!sp[0].tryAdvance(action)) { if (times != -1 && (remaining[0] = remaining[0] - 1) == 1) return false; sp[0] = list.spliterator(); if (!sp[0].tryAdvance(action)) return false; } return true; }); } /** * Unzip one Stream into two. * <p> * <code><pre> * // tuple((1, 2, 3), (a, b, c)) * Seq.unzip(Seq.of(tuple(1, "a"), tuple(2, "b"), tuple(3, "c"))); * </pre></code> */ static <T1, T2> Tuple2<Seq<T1>, Seq<T2>> unzip(Stream<Tuple2<T1, T2>> stream) { return unzip(seq(stream)); } /** * Unzip one Stream into two. * <p> * <code><pre> * // tuple((1, 2, 3), (a, b, c)) * Seq.unzip(Seq.of(tuple(1, "a"), tuple(2, "b"), tuple(3, "c"))); * </pre></code> */ static <T1, T2, U1, U2> Tuple2<Seq<U1>, Seq<U2>> unzip(Stream<Tuple2<T1, T2>> stream, Function<T1, U1> leftUnzipper, Function<T2, U2> rightUnzipper) { return unzip(seq(stream), leftUnzipper, rightUnzipper); } /** * Unzip one Stream into two. * <p> * <code><pre> * // tuple((1, 2, 3), (a, b, c)) * Seq.unzip(Seq.of(tuple(1, "a"), tuple(2, "b"), tuple(3, "c"))); * </pre></code> */ static <T1, T2, U1, U2> Tuple2<Seq<U1>, Seq<U2>> unzip(Stream<Tuple2<T1, T2>> stream, Function<Tuple2<T1, T2>, Tuple2<U1, U2>> unzipper) { return unzip(seq(stream), unzipper); } /** * Unzip one Stream into two. * <p> * <code><pre> * // tuple((1, 2, 3), (a, b, c)) * Seq.unzip(Seq.of(tuple(1, "a"), tuple(2, "b"), tuple(3, "c"))); * </pre></code> */ static <T1, T2, U1, U2> Tuple2<Seq<U1>, Seq<U2>> unzip(Stream<Tuple2<T1, T2>> stream, BiFunction<T1, T2, Tuple2<U1, U2>> unzipper) { return unzip(seq(stream), unzipper); } /** * Unzip one Stream into two. * <p> * <code><pre> * // tuple((1, 2, 3), (a, b, c)) * Seq.unzip(Seq.of(tuple(1, "a"), tuple(2, "b"), tuple(3, "c"))); * </pre></code> */ static <T1, T2> Tuple2<Seq<T1>, Seq<T2>> unzip(Iterable<Tuple2<T1, T2>> iterable) { return unzip(seq(iterable)); } /** * Unzip one Stream into two. * <p> * <code><pre> * // tuple((1, 2, 3), (a, b, c)) * Seq.unzip(Seq.of(tuple(1, "a"), tuple(2, "b"), tuple(3, "c"))); * </pre></code> */ static <T1, T2, U1, U2> Tuple2<Seq<U1>, Seq<U2>> unzip(Iterable<Tuple2<T1, T2>> iterable, Function<T1, U1> leftUnzipper, Function<T2, U2> rightUnzipper) { return unzip(seq(iterable), leftUnzipper, rightUnzipper); } /** * Unzip one Stream into two. * <p> * <code><pre> * // tuple((1, 2, 3), (a, b, c)) * Seq.unzip(Seq.of(tuple(1, "a"), tuple(2, "b"), tuple(3, "c"))); * </pre></code> */ static <T1, T2, U1, U2> Tuple2<Seq<U1>, Seq<U2>> unzip(Iterable<Tuple2<T1, T2>> iterable, Function<Tuple2<T1, T2>, Tuple2<U1, U2>> unzipper) { return unzip(seq(iterable), unzipper); } /** * Unzip one Stream into two. * <p> * <code><pre> * // tuple((1, 2, 3), (a, b, c)) * Seq.unzip(Seq.of(tuple(1, "a"), tuple(2, "b"), tuple(3, "c"))); * </pre></code> */ static <T1, T2, U1, U2> Tuple2<Seq<U1>, Seq<U2>> unzip(Iterable<Tuple2<T1, T2>> iterable, BiFunction<T1, T2, Tuple2<U1, U2>> unzipper) { return unzip(seq(iterable), unzipper); } /** * Unzip one Stream into two. * <p> * <code><pre> * // tuple((1, 2, 3), (a, b, c)) * Seq.unzip(Seq.of(tuple(1, "a"), tuple(2, "b"), tuple(3, "c"))); * </pre></code> */ static <T1, T2> Tuple2<Seq<T1>, Seq<T2>> unzip(Seq<Tuple2<T1, T2>> stream) { return unzip(stream, t -> t); } /** * Unzip one Stream into two. * <p> * <code><pre> * // tuple((1, 2, 3), (a, b, c)) * Seq.unzip(Seq.of(tuple(1, "a"), tuple(2, "b"), tuple(3, "c"))); * </pre></code> */ static <T1, T2, U1, U2> Tuple2<Seq<U1>, Seq<U2>> unzip(Seq<Tuple2<T1, T2>> stream, Function<T1, U1> leftUnzipper, Function<T2, U2> rightUnzipper) { return unzip(stream, t -> tuple(leftUnzipper.apply(t.v1), rightUnzipper.apply(t.v2))); } /** * Unzip one Stream into two. * <p> * <code><pre> * // tuple((1, 2, 3), (a, b, c)) * Seq.unzip(Seq.of(tuple(1, "a"), tuple(2, "b"), tuple(3, "c"))); * </pre></code> */ static <T1, T2, U1, U2> Tuple2<Seq<U1>, Seq<U2>> unzip(Seq<Tuple2<T1, T2>> stream, Function<Tuple2<T1, T2>, Tuple2<U1, U2>> unzipper) { return unzip(stream, (t1, t2) -> unzipper.apply(tuple(t1, t2))); } /** * Unzip one Stream into two. * <p> * <code><pre> * // tuple((1, 2, 3), (a, b, c)) * Seq.unzip(Seq.of(tuple(1, "a"), tuple(2, "b"), tuple(3, "c"))); * </pre></code> */ static <T1, T2, U1, U2> Tuple2<Seq<U1>, Seq<U2>> unzip(Seq<Tuple2<T1, T2>> stream, BiFunction<T1, T2, Tuple2<U1, U2>> unzipper) { return stream .map(t -> unzipper.apply(t.v1, t.v2)) .duplicate() .map1(s -> s.map(u -> u.v1)) .map2(s -> s.map(u -> u.v2)); } // [jooq-tools] START [zip-static] /** * Zip 2 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2> Seq<Tuple2<T1, T2>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2) { return zip(seq(s1), seq(s2)); } /** * Zip 3 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3> Seq<Tuple3<T1, T2, T3>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3) { return zip(seq(s1), seq(s2), seq(s3)); } /** * Zip 4 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4> Seq<Tuple4<T1, T2, T3, T4>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4) { return zip(seq(s1), seq(s2), seq(s3), seq(s4)); } /** * Zip 5 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5> Seq<Tuple5<T1, T2, T3, T4, T5>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5)); } /** * Zip 6 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6> Seq<Tuple6<T1, T2, T3, T4, T5, T6>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6)); } /** * Zip 7 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7> Seq<Tuple7<T1, T2, T3, T4, T5, T6, T7>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7)); } /** * Zip 8 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8> Seq<Tuple8<T1, T2, T3, T4, T5, T6, T7, T8>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8)); } /** * Zip 9 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9> Seq<Tuple9<T1, T2, T3, T4, T5, T6, T7, T8, T9>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9)); } /** * Zip 10 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> Seq<Tuple10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10)); } /** * Zip 11 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> Seq<Tuple11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11)); } /** * Zip 12 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> Seq<Tuple12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12)); } /** * Zip 13 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Seq<Tuple13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12, Stream<? extends T13> s13) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12), seq(s13)); } /** * Zip 14 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> Seq<Tuple14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12, Stream<? extends T13> s13, Stream<? extends T14> s14) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12), seq(s13), seq(s14)); } /** * Zip 15 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> Seq<Tuple15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12, Stream<? extends T13> s13, Stream<? extends T14> s14, Stream<? extends T15> s15) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12), seq(s13), seq(s14), seq(s15)); } /** * Zip 16 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Seq<Tuple16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12, Stream<? extends T13> s13, Stream<? extends T14> s14, Stream<? extends T15> s15, Stream<? extends T16> s16) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12), seq(s13), seq(s14), seq(s15), seq(s16)); } /** * Zip 2 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2> Seq<Tuple2<T1, T2>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2) { return zip(seq(i1), seq(i2)); } /** * Zip 3 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3> Seq<Tuple3<T1, T2, T3>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3) { return zip(seq(i1), seq(i2), seq(i3)); } /** * Zip 4 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4> Seq<Tuple4<T1, T2, T3, T4>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4) { return zip(seq(i1), seq(i2), seq(i3), seq(i4)); } /** * Zip 5 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5> Seq<Tuple5<T1, T2, T3, T4, T5>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5)); } /** * Zip 6 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6> Seq<Tuple6<T1, T2, T3, T4, T5, T6>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6)); } /** * Zip 7 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7> Seq<Tuple7<T1, T2, T3, T4, T5, T6, T7>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7)); } /** * Zip 8 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8> Seq<Tuple8<T1, T2, T3, T4, T5, T6, T7, T8>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8)); } /** * Zip 9 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9> Seq<Tuple9<T1, T2, T3, T4, T5, T6, T7, T8, T9>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9)); } /** * Zip 10 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> Seq<Tuple10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10)); } /** * Zip 11 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> Seq<Tuple11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11)); } /** * Zip 12 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> Seq<Tuple12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12)); } /** * Zip 13 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Seq<Tuple13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12, Iterable<? extends T13> i13) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12), seq(i13)); } /** * Zip 14 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> Seq<Tuple14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12, Iterable<? extends T13> i13, Iterable<? extends T14> i14) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12), seq(i13), seq(i14)); } /** * Zip 15 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> Seq<Tuple15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12, Iterable<? extends T13> i13, Iterable<? extends T14> i14, Iterable<? extends T15> i15) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12), seq(i13), seq(i14), seq(i15)); } /** * Zip 16 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Seq<Tuple16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12, Iterable<? extends T13> i13, Iterable<? extends T14> i14, Iterable<? extends T15> i15, Iterable<? extends T16> i16) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12), seq(i13), seq(i14), seq(i15), seq(i16)); } /** * Zip 2 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2> Seq<Tuple2<T1, T2>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2) { return zip(s1, s2, (t1, t2) -> tuple(t1, t2)) .onClose(SeqUtils.closeAll(s1, s2)); } /** * Zip 3 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3> Seq<Tuple3<T1, T2, T3>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3) { return zip(s1, s2, s3, (t1, t2, t3) -> tuple(t1, t2, t3)) .onClose(SeqUtils.closeAll(s1, s2, s3)); } /** * Zip 4 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4> Seq<Tuple4<T1, T2, T3, T4>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4) { return zip(s1, s2, s3, s4, (t1, t2, t3, t4) -> tuple(t1, t2, t3, t4)) .onClose(SeqUtils.closeAll(s1, s2, s3, s4)); } /** * Zip 5 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5> Seq<Tuple5<T1, T2, T3, T4, T5>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5) { return zip(s1, s2, s3, s4, s5, (t1, t2, t3, t4, t5) -> tuple(t1, t2, t3, t4, t5)) .onClose(SeqUtils.closeAll(s1, s2, s3, s4, s5)); } /** * Zip 6 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6> Seq<Tuple6<T1, T2, T3, T4, T5, T6>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6) { return zip(s1, s2, s3, s4, s5, s6, (t1, t2, t3, t4, t5, t6) -> tuple(t1, t2, t3, t4, t5, t6)) .onClose(SeqUtils.closeAll(s1, s2, s3, s4, s5, s6)); } /** * Zip 7 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7> Seq<Tuple7<T1, T2, T3, T4, T5, T6, T7>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7) { return zip(s1, s2, s3, s4, s5, s6, s7, (t1, t2, t3, t4, t5, t6, t7) -> tuple(t1, t2, t3, t4, t5, t6, t7)) .onClose(SeqUtils.closeAll(s1, s2, s3, s4, s5, s6, s7)); } /** * Zip 8 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8> Seq<Tuple8<T1, T2, T3, T4, T5, T6, T7, T8>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8) { return zip(s1, s2, s3, s4, s5, s6, s7, s8, (t1, t2, t3, t4, t5, t6, t7, t8) -> tuple(t1, t2, t3, t4, t5, t6, t7, t8)) .onClose(SeqUtils.closeAll(s1, s2, s3, s4, s5, s6, s7, s8)); } /** * Zip 9 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9> Seq<Tuple9<T1, T2, T3, T4, T5, T6, T7, T8, T9>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9) { return zip(s1, s2, s3, s4, s5, s6, s7, s8, s9, (t1, t2, t3, t4, t5, t6, t7, t8, t9) -> tuple(t1, t2, t3, t4, t5, t6, t7, t8, t9)) .onClose(SeqUtils.closeAll(s1, s2, s3, s4, s5, s6, s7, s8, s9)); } /** * Zip 10 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> Seq<Tuple10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10) { return zip(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10) -> tuple(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10)) .onClose(SeqUtils.closeAll(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10)); } /** * Zip 11 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> Seq<Tuple11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11) { return zip(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11) -> tuple(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11)) .onClose(SeqUtils.closeAll(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11)); } /** * Zip 12 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> Seq<Tuple12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12) { return zip(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12) -> tuple(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12)) .onClose(SeqUtils.closeAll(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12)); } /** * Zip 13 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Seq<Tuple13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12, Seq<? extends T13> s13) { return zip(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13) -> tuple(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13)) .onClose(SeqUtils.closeAll(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13)); } /** * Zip 14 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> Seq<Tuple14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12, Seq<? extends T13> s13, Seq<? extends T14> s14) { return zip(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14) -> tuple(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14)) .onClose(SeqUtils.closeAll(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14)); } /** * Zip 15 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> Seq<Tuple15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12, Seq<? extends T13> s13, Seq<? extends T14> s14, Seq<? extends T15> s15) { return zip(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15, (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15) -> tuple(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15)) .onClose(SeqUtils.closeAll(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15)); } /** * Zip 16 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(2, "b"), tuple(3, "c")) * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Seq<Tuple16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12, Seq<? extends T13> s13, Seq<? extends T14> s14, Seq<? extends T15> s15, Seq<? extends T16> s16) { return zip(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15, s16, (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16) -> tuple(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16)) .onClose(SeqUtils.closeAll(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15, s16)); } /** * Zip 2 streams into one using a {@link BiFunction} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, BiFunction<? super T1, ? super T2, ? extends R> zipper) { return zip(seq(s1), seq(s2), zipper); } /** * Zip 3 streams into one using a {@link Function3} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Function3<? super T1, ? super T2, ? super T3, ? extends R> zipper) { return zip(seq(s1), seq(s2), seq(s3), zipper); } /** * Zip 4 streams into one using a {@link Function4} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Function4<? super T1, ? super T2, ? super T3, ? super T4, ? extends R> zipper) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), zipper); } /** * Zip 5 streams into one using a {@link Function5} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Function5<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? extends R> zipper) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), zipper); } /** * Zip 6 streams into one using a {@link Function6} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Function6<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? extends R> zipper) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), zipper); } /** * Zip 7 streams into one using a {@link Function7} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Function7<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? extends R> zipper) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), zipper); } /** * Zip 8 streams into one using a {@link Function8} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Function8<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? extends R> zipper) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), zipper); } /** * Zip 9 streams into one using a {@link Function9} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Function9<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? extends R> zipper) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), zipper); } /** * Zip 10 streams into one using a {@link Function10} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Function10<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? extends R> zipper) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), zipper); } /** * Zip 11 streams into one using a {@link Function11} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Function11<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? extends R> zipper) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), zipper); } /** * Zip 12 streams into one using a {@link Function12} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12, Function12<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? extends R> zipper) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12), zipper); } /** * Zip 13 streams into one using a {@link Function13} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12, Stream<? extends T13> s13, Function13<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? super T13, ? extends R> zipper) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12), seq(s13), zipper); } /** * Zip 14 streams into one using a {@link Function14} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12, Stream<? extends T13> s13, Stream<? extends T14> s14, Function14<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? super T13, ? super T14, ? extends R> zipper) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12), seq(s13), seq(s14), zipper); } /** * Zip 15 streams into one using a {@link Function15} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12, Stream<? extends T13> s13, Stream<? extends T14> s14, Stream<? extends T15> s15, Function15<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? super T13, ? super T14, ? super T15, ? extends R> zipper) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12), seq(s13), seq(s14), seq(s15), zipper); } /** * Zip 16 streams into one using a {@link Function16} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, R> Seq<R> zip(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12, Stream<? extends T13> s13, Stream<? extends T14> s14, Stream<? extends T15> s15, Stream<? extends T16> s16, Function16<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? super T13, ? super T14, ? super T15, ? super T16, ? extends R> zipper) { return zip(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12), seq(s13), seq(s14), seq(s15), seq(s16), zipper); } /** * Zip 2 streams into one using a {@link BiFunction} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, BiFunction<? super T1, ? super T2, ? extends R> zipper) { return zip(seq(i1), seq(i2), zipper); } /** * Zip 3 streams into one using a {@link Function3} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Function3<? super T1, ? super T2, ? super T3, ? extends R> zipper) { return zip(seq(i1), seq(i2), seq(i3), zipper); } /** * Zip 4 streams into one using a {@link Function4} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Function4<? super T1, ? super T2, ? super T3, ? super T4, ? extends R> zipper) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), zipper); } /** * Zip 5 streams into one using a {@link Function5} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Function5<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? extends R> zipper) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), zipper); } /** * Zip 6 streams into one using a {@link Function6} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Function6<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? extends R> zipper) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), zipper); } /** * Zip 7 streams into one using a {@link Function7} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Function7<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? extends R> zipper) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), zipper); } /** * Zip 8 streams into one using a {@link Function8} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Function8<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? extends R> zipper) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), zipper); } /** * Zip 9 streams into one using a {@link Function9} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Function9<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? extends R> zipper) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), zipper); } /** * Zip 10 streams into one using a {@link Function10} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Function10<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? extends R> zipper) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), zipper); } /** * Zip 11 streams into one using a {@link Function11} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Function11<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? extends R> zipper) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), zipper); } /** * Zip 12 streams into one using a {@link Function12} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12, Function12<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? extends R> zipper) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12), zipper); } /** * Zip 13 streams into one using a {@link Function13} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12, Iterable<? extends T13> i13, Function13<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? super T13, ? extends R> zipper) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12), seq(i13), zipper); } /** * Zip 14 streams into one using a {@link Function14} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12, Iterable<? extends T13> i13, Iterable<? extends T14> i14, Function14<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? super T13, ? super T14, ? extends R> zipper) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12), seq(i13), seq(i14), zipper); } /** * Zip 15 streams into one using a {@link Function15} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12, Iterable<? extends T13> i13, Iterable<? extends T14> i14, Iterable<? extends T15> i15, Function15<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? super T13, ? super T14, ? super T15, ? extends R> zipper) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12), seq(i13), seq(i14), seq(i15), zipper); } /** * Zip 16 streams into one using a {@link Function16} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, R> Seq<R> zip(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12, Iterable<? extends T13> i13, Iterable<? extends T14> i14, Iterable<? extends T15> i15, Iterable<? extends T16> i16, Function16<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? super T13, ? super T14, ? super T15, ? super T16, ? extends R> zipper) { return zip(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12), seq(i13), seq(i14), seq(i15), seq(i16), zipper); } /** * Zip 2 streams into one using a {@link BiFunction} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, BiFunction<? super T1, ? super T2, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next()); } } return seq(new Zip()); } /** * Zip 3 streams into one using a {@link Function3} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Function3<? super T1, ? super T2, ? super T3, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); final Iterator<? extends T3> it3 = s3.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext() && it3.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next(), it3.next()); } } return seq(new Zip()); } /** * Zip 4 streams into one using a {@link Function4} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Function4<? super T1, ? super T2, ? super T3, ? super T4, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); final Iterator<? extends T3> it3 = s3.iterator(); final Iterator<? extends T4> it4 = s4.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext() && it3.hasNext() && it4.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next(), it3.next(), it4.next()); } } return seq(new Zip()); } /** * Zip 5 streams into one using a {@link Function5} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Function5<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); final Iterator<? extends T3> it3 = s3.iterator(); final Iterator<? extends T4> it4 = s4.iterator(); final Iterator<? extends T5> it5 = s5.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext() && it3.hasNext() && it4.hasNext() && it5.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next(), it3.next(), it4.next(), it5.next()); } } return seq(new Zip()); } /** * Zip 6 streams into one using a {@link Function6} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Function6<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); final Iterator<? extends T3> it3 = s3.iterator(); final Iterator<? extends T4> it4 = s4.iterator(); final Iterator<? extends T5> it5 = s5.iterator(); final Iterator<? extends T6> it6 = s6.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext() && it3.hasNext() && it4.hasNext() && it5.hasNext() && it6.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next(), it3.next(), it4.next(), it5.next(), it6.next()); } } return seq(new Zip()); } /** * Zip 7 streams into one using a {@link Function7} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Function7<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); final Iterator<? extends T3> it3 = s3.iterator(); final Iterator<? extends T4> it4 = s4.iterator(); final Iterator<? extends T5> it5 = s5.iterator(); final Iterator<? extends T6> it6 = s6.iterator(); final Iterator<? extends T7> it7 = s7.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext() && it3.hasNext() && it4.hasNext() && it5.hasNext() && it6.hasNext() && it7.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next(), it3.next(), it4.next(), it5.next(), it6.next(), it7.next()); } } return seq(new Zip()); } /** * Zip 8 streams into one using a {@link Function8} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Function8<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); final Iterator<? extends T3> it3 = s3.iterator(); final Iterator<? extends T4> it4 = s4.iterator(); final Iterator<? extends T5> it5 = s5.iterator(); final Iterator<? extends T6> it6 = s6.iterator(); final Iterator<? extends T7> it7 = s7.iterator(); final Iterator<? extends T8> it8 = s8.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext() && it3.hasNext() && it4.hasNext() && it5.hasNext() && it6.hasNext() && it7.hasNext() && it8.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next(), it3.next(), it4.next(), it5.next(), it6.next(), it7.next(), it8.next()); } } return seq(new Zip()); } /** * Zip 9 streams into one using a {@link Function9} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Function9<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); final Iterator<? extends T3> it3 = s3.iterator(); final Iterator<? extends T4> it4 = s4.iterator(); final Iterator<? extends T5> it5 = s5.iterator(); final Iterator<? extends T6> it6 = s6.iterator(); final Iterator<? extends T7> it7 = s7.iterator(); final Iterator<? extends T8> it8 = s8.iterator(); final Iterator<? extends T9> it9 = s9.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext() && it3.hasNext() && it4.hasNext() && it5.hasNext() && it6.hasNext() && it7.hasNext() && it8.hasNext() && it9.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next(), it3.next(), it4.next(), it5.next(), it6.next(), it7.next(), it8.next(), it9.next()); } } return seq(new Zip()); } /** * Zip 10 streams into one using a {@link Function10} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Function10<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); final Iterator<? extends T3> it3 = s3.iterator(); final Iterator<? extends T4> it4 = s4.iterator(); final Iterator<? extends T5> it5 = s5.iterator(); final Iterator<? extends T6> it6 = s6.iterator(); final Iterator<? extends T7> it7 = s7.iterator(); final Iterator<? extends T8> it8 = s8.iterator(); final Iterator<? extends T9> it9 = s9.iterator(); final Iterator<? extends T10> it10 = s10.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext() && it3.hasNext() && it4.hasNext() && it5.hasNext() && it6.hasNext() && it7.hasNext() && it8.hasNext() && it9.hasNext() && it10.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next(), it3.next(), it4.next(), it5.next(), it6.next(), it7.next(), it8.next(), it9.next(), it10.next()); } } return seq(new Zip()); } /** * Zip 11 streams into one using a {@link Function11} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Function11<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); final Iterator<? extends T3> it3 = s3.iterator(); final Iterator<? extends T4> it4 = s4.iterator(); final Iterator<? extends T5> it5 = s5.iterator(); final Iterator<? extends T6> it6 = s6.iterator(); final Iterator<? extends T7> it7 = s7.iterator(); final Iterator<? extends T8> it8 = s8.iterator(); final Iterator<? extends T9> it9 = s9.iterator(); final Iterator<? extends T10> it10 = s10.iterator(); final Iterator<? extends T11> it11 = s11.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext() && it3.hasNext() && it4.hasNext() && it5.hasNext() && it6.hasNext() && it7.hasNext() && it8.hasNext() && it9.hasNext() && it10.hasNext() && it11.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next(), it3.next(), it4.next(), it5.next(), it6.next(), it7.next(), it8.next(), it9.next(), it10.next(), it11.next()); } } return seq(new Zip()); } /** * Zip 12 streams into one using a {@link Function12} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12, Function12<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); final Iterator<? extends T3> it3 = s3.iterator(); final Iterator<? extends T4> it4 = s4.iterator(); final Iterator<? extends T5> it5 = s5.iterator(); final Iterator<? extends T6> it6 = s6.iterator(); final Iterator<? extends T7> it7 = s7.iterator(); final Iterator<? extends T8> it8 = s8.iterator(); final Iterator<? extends T9> it9 = s9.iterator(); final Iterator<? extends T10> it10 = s10.iterator(); final Iterator<? extends T11> it11 = s11.iterator(); final Iterator<? extends T12> it12 = s12.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext() && it3.hasNext() && it4.hasNext() && it5.hasNext() && it6.hasNext() && it7.hasNext() && it8.hasNext() && it9.hasNext() && it10.hasNext() && it11.hasNext() && it12.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next(), it3.next(), it4.next(), it5.next(), it6.next(), it7.next(), it8.next(), it9.next(), it10.next(), it11.next(), it12.next()); } } return seq(new Zip()); } /** * Zip 13 streams into one using a {@link Function13} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12, Seq<? extends T13> s13, Function13<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? super T13, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); final Iterator<? extends T3> it3 = s3.iterator(); final Iterator<? extends T4> it4 = s4.iterator(); final Iterator<? extends T5> it5 = s5.iterator(); final Iterator<? extends T6> it6 = s6.iterator(); final Iterator<? extends T7> it7 = s7.iterator(); final Iterator<? extends T8> it8 = s8.iterator(); final Iterator<? extends T9> it9 = s9.iterator(); final Iterator<? extends T10> it10 = s10.iterator(); final Iterator<? extends T11> it11 = s11.iterator(); final Iterator<? extends T12> it12 = s12.iterator(); final Iterator<? extends T13> it13 = s13.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext() && it3.hasNext() && it4.hasNext() && it5.hasNext() && it6.hasNext() && it7.hasNext() && it8.hasNext() && it9.hasNext() && it10.hasNext() && it11.hasNext() && it12.hasNext() && it13.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next(), it3.next(), it4.next(), it5.next(), it6.next(), it7.next(), it8.next(), it9.next(), it10.next(), it11.next(), it12.next(), it13.next()); } } return seq(new Zip()); } /** * Zip 14 streams into one using a {@link Function14} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12, Seq<? extends T13> s13, Seq<? extends T14> s14, Function14<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? super T13, ? super T14, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); final Iterator<? extends T3> it3 = s3.iterator(); final Iterator<? extends T4> it4 = s4.iterator(); final Iterator<? extends T5> it5 = s5.iterator(); final Iterator<? extends T6> it6 = s6.iterator(); final Iterator<? extends T7> it7 = s7.iterator(); final Iterator<? extends T8> it8 = s8.iterator(); final Iterator<? extends T9> it9 = s9.iterator(); final Iterator<? extends T10> it10 = s10.iterator(); final Iterator<? extends T11> it11 = s11.iterator(); final Iterator<? extends T12> it12 = s12.iterator(); final Iterator<? extends T13> it13 = s13.iterator(); final Iterator<? extends T14> it14 = s14.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext() && it3.hasNext() && it4.hasNext() && it5.hasNext() && it6.hasNext() && it7.hasNext() && it8.hasNext() && it9.hasNext() && it10.hasNext() && it11.hasNext() && it12.hasNext() && it13.hasNext() && it14.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next(), it3.next(), it4.next(), it5.next(), it6.next(), it7.next(), it8.next(), it9.next(), it10.next(), it11.next(), it12.next(), it13.next(), it14.next()); } } return seq(new Zip()); } /** * Zip 15 streams into one using a {@link Function15} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12, Seq<? extends T13> s13, Seq<? extends T14> s14, Seq<? extends T15> s15, Function15<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? super T13, ? super T14, ? super T15, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); final Iterator<? extends T3> it3 = s3.iterator(); final Iterator<? extends T4> it4 = s4.iterator(); final Iterator<? extends T5> it5 = s5.iterator(); final Iterator<? extends T6> it6 = s6.iterator(); final Iterator<? extends T7> it7 = s7.iterator(); final Iterator<? extends T8> it8 = s8.iterator(); final Iterator<? extends T9> it9 = s9.iterator(); final Iterator<? extends T10> it10 = s10.iterator(); final Iterator<? extends T11> it11 = s11.iterator(); final Iterator<? extends T12> it12 = s12.iterator(); final Iterator<? extends T13> it13 = s13.iterator(); final Iterator<? extends T14> it14 = s14.iterator(); final Iterator<? extends T15> it15 = s15.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext() && it3.hasNext() && it4.hasNext() && it5.hasNext() && it6.hasNext() && it7.hasNext() && it8.hasNext() && it9.hasNext() && it10.hasNext() && it11.hasNext() && it12.hasNext() && it13.hasNext() && it14.hasNext() && it15.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next(), it3.next(), it4.next(), it5.next(), it6.next(), it7.next(), it8.next(), it9.next(), it10.next(), it11.next(), it12.next(), it13.next(), it14.next(), it15.next()); } } return seq(new Zip()); } /** * Zip 16 streams into one using a {@link Function16} to produce resulting values. * <p> * <code><pre> * // ("1:a", "2:b", "3:c") * Seq.of(1, 2, 3).zip(Seq.of("a", "b", "c"), (i, s) -> i + ":" + s) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, R> Seq<R> zip(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12, Seq<? extends T13> s13, Seq<? extends T14> s14, Seq<? extends T15> s15, Seq<? extends T16> s16, Function16<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? super T10, ? super T11, ? super T12, ? super T13, ? super T14, ? super T15, ? super T16, ? extends R> zipper) { final Iterator<? extends T1> it1 = s1.iterator(); final Iterator<? extends T2> it2 = s2.iterator(); final Iterator<? extends T3> it3 = s3.iterator(); final Iterator<? extends T4> it4 = s4.iterator(); final Iterator<? extends T5> it5 = s5.iterator(); final Iterator<? extends T6> it6 = s6.iterator(); final Iterator<? extends T7> it7 = s7.iterator(); final Iterator<? extends T8> it8 = s8.iterator(); final Iterator<? extends T9> it9 = s9.iterator(); final Iterator<? extends T10> it10 = s10.iterator(); final Iterator<? extends T11> it11 = s11.iterator(); final Iterator<? extends T12> it12 = s12.iterator(); final Iterator<? extends T13> it13 = s13.iterator(); final Iterator<? extends T14> it14 = s14.iterator(); final Iterator<? extends T15> it15 = s15.iterator(); final Iterator<? extends T16> it16 = s16.iterator(); class Zip implements Iterator<R> { @Override public boolean hasNext() { return it1.hasNext() && it2.hasNext() && it3.hasNext() && it4.hasNext() && it5.hasNext() && it6.hasNext() && it7.hasNext() && it8.hasNext() && it9.hasNext() && it10.hasNext() && it11.hasNext() && it12.hasNext() && it13.hasNext() && it14.hasNext() && it15.hasNext() && it16.hasNext(); } @Override public R next() { return zipper.apply(it1.next(), it2.next(), it3.next(), it4.next(), it5.next(), it6.next(), it7.next(), it8.next(), it9.next(), it10.next(), it11.next(), it12.next(), it13.next(), it14.next(), it15.next(), it16.next()); } } return seq(new Zip()); } // [jooq-tools] END [zip-static] /** * Zip a Stream with a corresponding Stream of indexes. * <p> * <code><pre> * // (tuple("a", 0), tuple("b", 1), tuple("c", 2)) * Seq.of("a", "b", "c").zipWithIndex() * </pre></code> */ static <T> Seq<Tuple2<T, Long>> zipWithIndex(Stream<? extends T> stream) { return zipWithIndex(seq(stream)); } /** * Zip a Stream with a corresponding Stream of indexes. * <p> * <code><pre> * // (tuple("a", 0), tuple("b", 1), tuple("c", 2)) * Seq.of("a", "b", "c").zipWithIndex() * </pre></code> */ static <T> Seq<Tuple2<T, Long>> zipWithIndex(Iterable<? extends T> iterable) { return zipWithIndex(seq(iterable)); } /** * Zip a Stream with a corresponding Stream of indexes. * <p> * <code><pre> * // (tuple("a", 0), tuple("b", 1), tuple("c", 2)) * Seq.of("a", "b", "c").zipWithIndex() * </pre></code> */ static <T> Seq<Tuple2<T, Long>> zipWithIndex(Seq<? extends T> stream) { long[] index = { -1L }; return SeqUtils.transform(stream, (delegate, action) -> delegate.tryAdvance(t -> action.accept(tuple(t, index[0] = index[0] + 1)) ) ); } /** * Fold a stream to the left. * <p> * <code><pre> * // "abc" * Seq.of("a", "b", "c").foldLeft("", (u, t) -> u + t) * </pre></code> */ static <T, U> U foldLeft(Stream<? extends T> stream, U seed, BiFunction<? super U, ? super T, ? extends U> function) { return foldLeft(seq(stream), seed, function); } /** * Fold a stream to the left. * <p> * <code><pre> * // "abc" * Seq.of("a", "b", "c").foldLeft("", (u, t) -> u + t) * </pre></code> */ static <T, U> U foldLeft(Iterable<? extends T> iterable, U seed, BiFunction<? super U, ? super T, ? extends U> function) { return foldLeft(seq(iterable), seed, function); } /** * Fold a stream to the left. * <p> * <code><pre> * // "abc" * Seq.of("a", "b", "c").foldLeft("", (u, t) -> u + t) * </pre></code> */ static <T, U> U foldLeft(Seq<? extends T> stream, U seed, BiFunction<? super U, ? super T, ? extends U> function) { final Iterator<? extends T> it = stream.iterator(); U result = seed; while (it.hasNext()) result = function.apply(result, it.next()); return result; } /** * Fold a stream to the right. * <p> * <code><pre> * // "cba" * Seq.of("a", "b", "c").foldRight("", (t, u) -> u + t) * </pre></code> */ static <T, U> U foldRight(Stream<? extends T> stream, U seed, BiFunction<? super T, ? super U, ? extends U> function) { return foldRight(seq(stream), seed, function); } /** * Fold a stream to the right. * <p> * <code><pre> * // "cba" * Seq.of("a", "b", "c").foldRight("", (t, u) -> u + t) * </pre></code> */ static <T, U> U foldRight(Iterable<? extends T> iterable, U seed, BiFunction<? super T, ? super U, ? extends U> function) { return foldRight(seq(iterable), seed, function); } /** * Fold a stream to the right. * <p> * <code><pre> * // "cba" * Seq.of("a", "b", "c").foldRight("", (t, u) -> u + t) * </pre></code> */ static <T, U> U foldRight(Seq<? extends T> stream, U seed, BiFunction<? super T, ? super U, ? extends U> function) { return stream.reverse().foldLeft(seed, (u, t) -> function.apply(t, u)); } /** * Scan a stream to the left. * <p> * <code><pre> * // ("", "a", "ab", "abc") * Seq.of("a", "b", "c").scanLeft("", (u, t) -> u + t) * </pre></code> */ static <T, U> Seq<U> scanLeft(Stream<? extends T> stream, U seed, BiFunction<? super U, ? super T, ? extends U> function) { return scanLeft(seq(stream), seed, function); } /** * Scan a stream to the left. * <p> * <code><pre> * // ("", "a", "ab", "abc") * Seq.of("a", "b", "c").scanLeft("", (u, t) -> u + t) * </pre></code> */ static <T, U> Seq<U> scanLeft(Iterable<? extends T> iterable, U seed, BiFunction<? super U, ? super T, ? extends U> function) { return scanLeft(seq(iterable), seed, function); } /** * Scan a stream to the left. * <p> * <code><pre> * // ("", "a", "ab", "abc") * Seq.of("a", "b", "c").scanLeft("", (u, t) -> u + t) * </pre></code> */ static <T, U> Seq<U> scanLeft(Seq<? extends T> stream, U seed, BiFunction<? super U, ? super T, ? extends U> function) { @SuppressWarnings("unchecked") U[] value = (U[]) new Object[] { seed }; return Seq.of(seed).concat(SeqUtils.transform(stream, (delegate, action) -> delegate.tryAdvance(t -> action.accept(value[0] = function.apply(value[0], t)) ) )); } /** * Scan a stream to the right. * <p> * <code><pre> * // ("", "c", "cb", "cba") * Seq.of("a", "b", "c").scanRight("", (t, u) -> u + t) * </pre></code> */ static <T, U> Seq<U> scanRight(Stream<? extends T> stream, U seed, BiFunction<? super T, ? super U, ? extends U> function) { return scanRight(seq(stream), seed, function); } /** * Scan a stream to the right. * <p> * <code><pre> * // ("", "c", "cb", "cba") * Seq.of("a", "b", "c").scanRight("", (t, u) -> u + t) * </pre></code> */ static <T, U> Seq<U> scanRight(Iterable<? extends T> iterable, U seed, BiFunction<? super T, ? super U, ? extends U> function) { return scanRight(seq(iterable), seed, function); } /** * Scan a stream to the right. * <p> * <code><pre> * // ("", "c", "cb", "cba") * Seq.of("a", "b", "c").scanRight("", (t, u) -> u + t) * </pre></code> */ static <T, U> Seq<U> scanRight(Seq<? extends T> stream, U seed, BiFunction<? super T, ? super U, ? extends U> function) { return stream.reverse().scanLeft(seed, (u, t) -> function.apply(t, u)); } /** * Unfold a function into a stream. * <p> * <code><pre> * // (1, 2, 3, 4, 5) * Seq.unfold(1, i -> i &lt;= 6 ? Optional.of(tuple(i, i + 1)) : Optional.empty()) * </pre></code> */ static <T, U> Seq<T> unfold(U seed, Function<? super U, Optional<Tuple2<T, U>>> unfolder) { Tuple2<? extends T, ? extends U>[] unfolded = new Tuple2[] { tuple((T) null, seed) }; return seq((FunctionalSpliterator<T>) action -> { Optional<? extends Tuple2<? extends T, ? extends U>> result = unfolder.apply(unfolded[0].v2); if (result.isPresent()) action.accept((unfolded[0] = result.get()).v1); return result.isPresent(); }); } /** * Reverse a stream. * <p> * <code><pre> * // (3, 2, 1) * Seq.of(1, 2, 3).reverse() * </pre></code> */ static <T> Seq<T> reverse(Stream<? extends T> stream) { return reverse(seq(stream)); } /** * Reverse a stream. * <p> * <code><pre> * // (3, 2, 1) * Seq.of(1, 2, 3).reverse() * </pre></code> */ static <T> Seq<T> reverse(Iterable<? extends T> iterable) { return reverse(seq(iterable)); } /** * Reverse a stream. * <p> * <code><pre> * // (3, 2, 1) * Seq.of(1, 2, 3).reverse() * </pre></code> */ static <T> Seq<T> reverse(Seq<? extends T> stream) { List<T> list = toList(stream); Collections.reverse(list); return seq(list).onClose(stream::close); } /** * Shuffle a stream * <p> * <code><pre> * // e.g. (2, 3, 1) * Seq.of(1, 2, 3).shuffle() * </pre></code> */ static <T> Seq<T> shuffle(Stream<? extends T> stream) { return shuffle(seq(stream)); } /** * Shuffle a stream * <p> * <code><pre> * // e.g. (2, 3, 1) * Seq.of(1, 2, 3).shuffle() * </pre></code> */ static <T> Seq<T> shuffle(Iterable<? extends T> iterable) { return shuffle(seq(iterable)); } /** * Shuffle a stream * <p> * <code><pre> * // e.g. (2, 3, 1) * Seq.of(1, 2, 3).shuffle() * </pre></code> */ static <T> Seq<T> shuffle(Seq<? extends T> stream) { Spliterator[] shuffled = { null }; return SeqUtils.transform(stream, (delegate, action) -> { if (shuffled[0] == null) { List<T> list = seq(delegate).toList(); Collections.shuffle(list); shuffled[0] = list.spliterator(); } return shuffled[0].tryAdvance(action); }).onClose(stream::close); } /** * Shuffle a stream using specified source of randomness * <p> * <code><pre> * // e.g. (2, 3, 1) * Seq.of(1, 2, 3).shuffle(new Random()) * </pre></code> */ static <T> Seq<T> shuffle(Stream<? extends T> stream, Random random) { return shuffle(seq(stream), random); } /** * Shuffle a stream using specified source of randomness * <p> * <code><pre> * // e.g. (2, 3, 1) * Seq.of(1, 2, 3).shuffle(new Random()) * </pre></code> */ static <T> Seq<T> shuffle(Iterable<? extends T> iterable, Random random) { return shuffle(seq(iterable), random); } /** * Shuffle a stream using specified source of randomness * <p> * <code><pre> * // e.g. (2, 3, 1) * Seq.of(1, 2, 3).shuffle(new Random()) * </pre></code> */ static <T> Seq<T> shuffle(Seq<? extends T> stream, Random random) { List<T> list = toList(stream); Collections.shuffle(list, random); return seq(list); } // [jooq-tools] START [crossjoin-static] /** * Cross join 2 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2> Seq<Tuple2<T1, T2>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2) { return crossJoin(seq(s1), seq(s2)); } /** * Cross join 3 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3> Seq<Tuple3<T1, T2, T3>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3) { return crossJoin(seq(s1), seq(s2), seq(s3)); } /** * Cross join 4 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4> Seq<Tuple4<T1, T2, T3, T4>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4) { return crossJoin(seq(s1), seq(s2), seq(s3), seq(s4)); } /** * Cross join 5 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5> Seq<Tuple5<T1, T2, T3, T4, T5>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5) { return crossJoin(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5)); } /** * Cross join 6 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6> Seq<Tuple6<T1, T2, T3, T4, T5, T6>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6) { return crossJoin(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6)); } /** * Cross join 7 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7> Seq<Tuple7<T1, T2, T3, T4, T5, T6, T7>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7) { return crossJoin(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7)); } /** * Cross join 8 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8> Seq<Tuple8<T1, T2, T3, T4, T5, T6, T7, T8>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8) { return crossJoin(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8)); } /** * Cross join 9 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9> Seq<Tuple9<T1, T2, T3, T4, T5, T6, T7, T8, T9>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9) { return crossJoin(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9)); } /** * Cross join 10 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> Seq<Tuple10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10) { return crossJoin(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10)); } /** * Cross join 11 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> Seq<Tuple11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11) { return crossJoin(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11)); } /** * Cross join 12 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> Seq<Tuple12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12) { return crossJoin(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12)); } /** * Cross join 13 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Seq<Tuple13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12, Stream<? extends T13> s13) { return crossJoin(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12), seq(s13)); } /** * Cross join 14 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> Seq<Tuple14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12, Stream<? extends T13> s13, Stream<? extends T14> s14) { return crossJoin(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12), seq(s13), seq(s14)); } /** * Cross join 15 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> Seq<Tuple15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12, Stream<? extends T13> s13, Stream<? extends T14> s14, Stream<? extends T15> s15) { return crossJoin(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12), seq(s13), seq(s14), seq(s15)); } /** * Cross join 16 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Seq<Tuple16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>> crossJoin(Stream<? extends T1> s1, Stream<? extends T2> s2, Stream<? extends T3> s3, Stream<? extends T4> s4, Stream<? extends T5> s5, Stream<? extends T6> s6, Stream<? extends T7> s7, Stream<? extends T8> s8, Stream<? extends T9> s9, Stream<? extends T10> s10, Stream<? extends T11> s11, Stream<? extends T12> s12, Stream<? extends T13> s13, Stream<? extends T14> s14, Stream<? extends T15> s15, Stream<? extends T16> s16) { return crossJoin(seq(s1), seq(s2), seq(s3), seq(s4), seq(s5), seq(s6), seq(s7), seq(s8), seq(s9), seq(s10), seq(s11), seq(s12), seq(s13), seq(s14), seq(s15), seq(s16)); } /** * Cross join 2 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2> Seq<Tuple2<T1, T2>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2) { return crossJoin(seq(i1), seq(i2)); } /** * Cross join 3 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3> Seq<Tuple3<T1, T2, T3>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3) { return crossJoin(seq(i1), seq(i2), seq(i3)); } /** * Cross join 4 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4> Seq<Tuple4<T1, T2, T3, T4>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4) { return crossJoin(seq(i1), seq(i2), seq(i3), seq(i4)); } /** * Cross join 5 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5> Seq<Tuple5<T1, T2, T3, T4, T5>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5) { return crossJoin(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5)); } /** * Cross join 6 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6> Seq<Tuple6<T1, T2, T3, T4, T5, T6>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6) { return crossJoin(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6)); } /** * Cross join 7 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7> Seq<Tuple7<T1, T2, T3, T4, T5, T6, T7>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7) { return crossJoin(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7)); } /** * Cross join 8 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8> Seq<Tuple8<T1, T2, T3, T4, T5, T6, T7, T8>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8) { return crossJoin(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8)); } /** * Cross join 9 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9> Seq<Tuple9<T1, T2, T3, T4, T5, T6, T7, T8, T9>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9) { return crossJoin(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9)); } /** * Cross join 10 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> Seq<Tuple10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10) { return crossJoin(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10)); } /** * Cross join 11 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> Seq<Tuple11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11) { return crossJoin(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11)); } /** * Cross join 12 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> Seq<Tuple12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12) { return crossJoin(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12)); } /** * Cross join 13 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Seq<Tuple13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12, Iterable<? extends T13> i13) { return crossJoin(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12), seq(i13)); } /** * Cross join 14 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> Seq<Tuple14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12, Iterable<? extends T13> i13, Iterable<? extends T14> i14) { return crossJoin(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12), seq(i13), seq(i14)); } /** * Cross join 15 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> Seq<Tuple15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12, Iterable<? extends T13> i13, Iterable<? extends T14> i14, Iterable<? extends T15> i15) { return crossJoin(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12), seq(i13), seq(i14), seq(i15)); } /** * Cross join 16 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Seq<Tuple16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>> crossJoin(Iterable<? extends T1> i1, Iterable<? extends T2> i2, Iterable<? extends T3> i3, Iterable<? extends T4> i4, Iterable<? extends T5> i5, Iterable<? extends T6> i6, Iterable<? extends T7> i7, Iterable<? extends T8> i8, Iterable<? extends T9> i9, Iterable<? extends T10> i10, Iterable<? extends T11> i11, Iterable<? extends T12> i12, Iterable<? extends T13> i13, Iterable<? extends T14> i14, Iterable<? extends T15> i15, Iterable<? extends T16> i16) { return crossJoin(seq(i1), seq(i2), seq(i3), seq(i4), seq(i5), seq(i6), seq(i7), seq(i8), seq(i9), seq(i10), seq(i11), seq(i12), seq(i13), seq(i14), seq(i15), seq(i16)); } /** * Cross join 2 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2> Seq<Tuple2<T1, T2>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2) { List<? extends T2> list = s2.toList(); return seq(s1).flatMap(v1 -> seq(list).map(v2 -> tuple(v1, v2))) .onClose(SeqUtils.closeAll(s1, s2)); } /** * Cross join 3 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3> Seq<Tuple3<T1, T2, T3>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3) { List<Tuple2<T2, T3>> list = crossJoin(s2, s3).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2))) .onClose(SeqUtils.closeAll(s2, s3)); } /** * Cross join 4 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4> Seq<Tuple4<T1, T2, T3, T4>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4) { List<Tuple3<T2, T3, T4>> list = crossJoin(s2, s3, s4).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2, t.v3))) .onClose(SeqUtils.closeAll(s2, s3, s4)); } /** * Cross join 5 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5> Seq<Tuple5<T1, T2, T3, T4, T5>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5) { List<Tuple4<T2, T3, T4, T5>> list = crossJoin(s2, s3, s4, s5).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2, t.v3, t.v4))) .onClose(SeqUtils.closeAll(s2, s3, s4, s5)); } /** * Cross join 6 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6> Seq<Tuple6<T1, T2, T3, T4, T5, T6>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6) { List<Tuple5<T2, T3, T4, T5, T6>> list = crossJoin(s2, s3, s4, s5, s6).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2, t.v3, t.v4, t.v5))) .onClose(SeqUtils.closeAll(s2, s3, s4, s5, s6)); } /** * Cross join 7 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7> Seq<Tuple7<T1, T2, T3, T4, T5, T6, T7>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7) { List<Tuple6<T2, T3, T4, T5, T6, T7>> list = crossJoin(s2, s3, s4, s5, s6, s7).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2, t.v3, t.v4, t.v5, t.v6))) .onClose(SeqUtils.closeAll(s2, s3, s4, s5, s6, s7)); } /** * Cross join 8 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8> Seq<Tuple8<T1, T2, T3, T4, T5, T6, T7, T8>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8) { List<Tuple7<T2, T3, T4, T5, T6, T7, T8>> list = crossJoin(s2, s3, s4, s5, s6, s7, s8).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2, t.v3, t.v4, t.v5, t.v6, t.v7))) .onClose(SeqUtils.closeAll(s2, s3, s4, s5, s6, s7, s8)); } /** * Cross join 9 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9> Seq<Tuple9<T1, T2, T3, T4, T5, T6, T7, T8, T9>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9) { List<Tuple8<T2, T3, T4, T5, T6, T7, T8, T9>> list = crossJoin(s2, s3, s4, s5, s6, s7, s8, s9).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2, t.v3, t.v4, t.v5, t.v6, t.v7, t.v8))) .onClose(SeqUtils.closeAll(s2, s3, s4, s5, s6, s7, s8, s9)); } /** * Cross join 10 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> Seq<Tuple10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10) { List<Tuple9<T2, T3, T4, T5, T6, T7, T8, T9, T10>> list = crossJoin(s2, s3, s4, s5, s6, s7, s8, s9, s10).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2, t.v3, t.v4, t.v5, t.v6, t.v7, t.v8, t.v9))) .onClose(SeqUtils.closeAll(s2, s3, s4, s5, s6, s7, s8, s9, s10)); } /** * Cross join 11 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11> Seq<Tuple11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11) { List<Tuple10<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>> list = crossJoin(s2, s3, s4, s5, s6, s7, s8, s9, s10, s11).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2, t.v3, t.v4, t.v5, t.v6, t.v7, t.v8, t.v9, t.v10))) .onClose(SeqUtils.closeAll(s2, s3, s4, s5, s6, s7, s8, s9, s10, s11)); } /** * Cross join 12 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12> Seq<Tuple12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12) { List<Tuple11<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>> list = crossJoin(s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2, t.v3, t.v4, t.v5, t.v6, t.v7, t.v8, t.v9, t.v10, t.v11))) .onClose(SeqUtils.closeAll(s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12)); } /** * Cross join 13 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13> Seq<Tuple13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12, Seq<? extends T13> s13) { List<Tuple12<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>> list = crossJoin(s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2, t.v3, t.v4, t.v5, t.v6, t.v7, t.v8, t.v9, t.v10, t.v11, t.v12))) .onClose(SeqUtils.closeAll(s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13)); } /** * Cross join 14 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14> Seq<Tuple14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12, Seq<? extends T13> s13, Seq<? extends T14> s14) { List<Tuple13<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>> list = crossJoin(s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2, t.v3, t.v4, t.v5, t.v6, t.v7, t.v8, t.v9, t.v10, t.v11, t.v12, t.v13))) .onClose(SeqUtils.closeAll(s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14)); } /** * Cross join 15 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15> Seq<Tuple15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12, Seq<? extends T13> s13, Seq<? extends T14> s14, Seq<? extends T15> s15) { List<Tuple14<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>> list = crossJoin(s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2, t.v3, t.v4, t.v5, t.v6, t.v7, t.v8, t.v9, t.v10, t.v11, t.v12, t.v13, t.v14))) .onClose(SeqUtils.closeAll(s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15)); } /** * Cross join 16 streams into one. * <p> * <code><pre> * // (tuple(1, "a"), tuple(1, "b"), tuple(2, "a"), tuple(2, "b")) * Seq.of(1, 2).crossJoin(Seq.of("a", "b")) * </pre></code> */ @Generated("This method was generated using jOOQ-tools") static <T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16> Seq<Tuple16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>> crossJoin(Seq<? extends T1> s1, Seq<? extends T2> s2, Seq<? extends T3> s3, Seq<? extends T4> s4, Seq<? extends T5> s5, Seq<? extends T6> s6, Seq<? extends T7> s7, Seq<? extends T8> s8, Seq<? extends T9> s9, Seq<? extends T10> s10, Seq<? extends T11> s11, Seq<? extends T12> s12, Seq<? extends T13> s13, Seq<? extends T14> s14, Seq<? extends T15> s15, Seq<? extends T16> s16) { List<Tuple15<T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>> list = crossJoin(s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15, s16).toList(); return s1.flatMap(v1 -> seq(list).map(t -> tuple(v1, t.v1, t.v2, t.v3, t.v4, t.v5, t.v6, t.v7, t.v8, t.v9, t.v10, t.v11, t.v12, t.v13, t.v14, t.v15))) .onClose(SeqUtils.closeAll(s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15, s16)); } // [jooq-tools] END [crossjoin-static] /** * Concatenate a number of streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(1, 2, 3).concat(Seq.of(4, 5, 6)) * </pre></code> */ @SafeVarargs static <T> Seq<T> concat(Stream<? extends T>... streams) { return concat(SeqUtils.seqs(streams)); } /** * Concatenate a number of streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(1, 2, 3).concat(Seq.of(4, 5, 6)) * </pre></code> */ @SafeVarargs static <T> Seq<T> concat(Iterable<? extends T>... iterables) { return concat(SeqUtils.seqs(iterables)); } /** * Concatenate a number of streams. * <p> * <code><pre> * // (1, 2, 3, 4, 5, 6) * Seq.of(1, 2, 3).concat(Seq.of(4, 5, 6)) * </pre></code> */ @SafeVarargs static <T> Seq<T> concat(Seq<? extends T>... streams) { if (streams == null || streams.length == 0) return Seq.empty(); if (streams.length == 1) return seq(streams[0]); Stream<? extends T> result = streams[0]; for (int i = 1; i < streams.length; i++) result = Stream.concat(result, streams[i]); return seq(result); } /** * Duplicate a Streams into two equivalent Streams. * <p> * <code><pre> * // tuple((1, 2, 3), (1, 2, 3)) * Seq.of(1, 2, 3).duplicate() * </pre></code> */ static <T> Tuple2<Seq<T>, Seq<T>> duplicate(Stream<? extends T> stream) { final Iterator<? extends T> it = stream.iterator(); final LinkedList<T> gap = new LinkedList<>(); @SuppressWarnings("unchecked") final Iterator<T>[] ahead = new Iterator[] { null }; class Duplicate implements Iterator<T> { @Override public boolean hasNext() { if (ahead[0] == null || ahead[0] == this) return it.hasNext(); return !gap.isEmpty(); } @Override public T next() { if (ahead[0] == null) ahead[0] = this; if (ahead[0] == this) { T value = it.next(); gap.offer(value); return value; } else { T value = gap.poll(); if (gap.isEmpty()) ahead[0] = null; return value; } } } return tuple(seq(new Duplicate()), seq(new Duplicate())); } /** * Consume a stream and concatenate all elements. */ static String toString(Stream<?> stream) { return toString(stream, ""); } /** * Consume a stream and concatenate all elements using a separator. */ static String toString(Stream<?> stream, CharSequence delimiter) { return stream.map(Objects::toString).collect(Collectors.joining(delimiter)); } /** * Collect a Stream into a List. */ static <T, C extends Collection<T>> C toCollection(Stream<? extends T> stream, Supplier<? extends C> collectionFactory) { return stream.collect(Collectors.toCollection(collectionFactory)); } /** * Collect a Stream into a List. */ static <T> List<T> toList(Stream<? extends T> stream) { return stream.collect(Collectors.toList()); } /** * Collect a Stream into a Set. */ static <T> Set<T> toSet(Stream<? extends T> stream) { return stream.collect(Collectors.toSet()); } /** * Collect a Stream of {@link Tuple2} into a Map. */ static <T, K, V> Map<K, V> toMap(Stream<Tuple2<K, V>> stream) { return stream.collect(Collectors.toMap(Tuple2::v1, Tuple2::v2)); } /** * Collect a Stream into a Map. */ static <T, K, V> Map<K, V> toMap(Stream<? extends T> stream, Function<? super T, ? extends K> keyMapper, Function<? super T, ? extends V> valueMapper) { return stream.collect(Collectors.toMap(keyMapper, valueMapper)); } /** * Returns a limited interval from a given Stream. * <p> * <code><pre> * // (4, 5) * Seq.of(1, 2, 3, 4, 5, 6).slice(3, 5) * </pre></code> */ static <T> Seq<T> slice(Stream<? extends T> stream, long from, long to) { long f = Math.max(from, 0); long t = Math.max(to - f, 0); return seq(stream.skip(f).limit(t)); } /** * Returns a stream with n elements skipped. * <p> * <code><pre> * // (4, 5, 6) * Seq.of(1, 2, 3, 4, 5, 6).skip(3) * </pre></code> */ static <T> Seq<T> skip(Stream<? extends T> stream, long elements) { return seq(stream.skip(elements)); } /** * Returns a stream with all elements skipped for which a predicate evaluates to <code>true</code>. * <p> * <code><pre> * // (3, 4, 5) * Seq.of(1, 2, 3, 4, 5).skipWhile(i -> i &lt; 3) * </pre></code> */ static <T> Seq<T> skipWhile(Stream<? extends T> stream, Predicate<? super T> predicate) { return skipUntil(stream, predicate.negate()); } /** * Returns a stream with all elements skipped for which a predicate evaluates to <code>true</code> * plus the first element for which it evaluates to <code>false</code>. * <p> * <code><pre> * // (4, 5) * Seq.of(1, 2, 3, 4, 5).skipWhileClosed(i -> i &lt; 3) * </pre></code> */ static <T> Seq<T> skipWhileClosed(Stream<? extends T> stream, Predicate<? super T> predicate) { return skipUntilClosed(stream, predicate.negate()); } /** * Returns a stream with all elements skipped for which a predicate evaluates to <code>false</code>. * <p> * <code><pre> * // (3, 4, 5) * Seq.of(1, 2, 3, 4, 5).skipUntil(i -> i == 3) * </pre></code> */ @SuppressWarnings("unchecked") static <T> Seq<T> skipUntil(Stream<? extends T> stream, Predicate<? super T> predicate) { boolean[] test = { false }; return SeqUtils.transform(stream, (delegate, action) -> !test[0] ? delegate.tryAdvance(t -> { if (test[0] = predicate.test(t)) action.accept(t); }) : delegate.tryAdvance(action) ); } /** * Returns a stream with all elements skipped for which a predicate evaluates to <code>false</code> * plus the first element for which it evaluates to <code>true</code>. * <p> * <code><pre> * // (4, 5) * Seq.of(1, 2, 3, 4, 5).skipUntilClosed(i -> i == 3) * </pre></code> */ @SuppressWarnings("unchecked") static <T> Seq<T> skipUntilClosed(Stream<? extends T> stream, Predicate<? super T> predicate) { boolean[] test = { false }; return SeqUtils.transform(stream, (delegate, action) -> !test[0] ? delegate.tryAdvance(t -> test[0] = predicate.test(t)) : delegate.tryAdvance(action) ); } /** * Returns a stream limited to n elements. * <p> * <code><pre> * // (1, 2, 3) * Seq.of(1, 2, 3, 4, 5, 6).limit(3) * </pre></code> */ static <T> Seq<T> limit(Stream<? extends T> stream, long elements) { return seq(stream.limit(elements)); } /** * Alias for limit * * @see Seq#limit(long) */ default Seq<T> take(long maxSize) { return limit(maxSize); } /** * Alias for skip * * @see Seq#skip(long) */ default Seq<T> drop(long n) { return skip(n); } /** * Returns a stream limited to all elements for which a predicate evaluates to <code>true</code>. * <p> * <code><pre> * // (1, 2) * Seq.of(1, 2, 3, 4, 5).limitWhile(i -> i &lt; 3) * </pre></code> */ static <T> Seq<T> limitWhile(Stream<? extends T> stream, Predicate<? super T> predicate) { return limitUntil(stream, predicate.negate()); } /** * Returns a stream limited to all elements for which a predicate evaluates to <code>true</code> * plus the first element for which it evaluates to <code>false</code>. * <p> * <code><pre> * // (1, 2, 3) * Seq.of(1, 2, 3, 4, 5).limitWhileClosed(i -> i &lt; 3) * </pre></code> */ static <T> Seq<T> limitWhileClosed(Stream<? extends T> stream, Predicate<? super T> predicate) { return limitUntilClosed(stream, predicate.negate()); } /** * Returns a stream limited to all elements for which a predicate evaluates to <code>false</code>. * <p> * <code><pre> * // (1, 2) * Seq.of(1, 2, 3, 4, 5).limitUntil(i -> i == 3) * </pre></code> */ @SuppressWarnings("unchecked") static <T> Seq<T> limitUntil(Stream<? extends T> stream, Predicate<? super T> predicate) { boolean[] test = { false }; return SeqUtils.transform(stream, (delegate, action) -> delegate.tryAdvance(t -> { if (!(test[0] = predicate.test(t))) action.accept(t); }) && !test[0] ); } /** * Returns a stream limited to all elements for which a predicate evaluates to <code>false</code> * plus the first element for which it evaluates to <code>true</code>. * <p> * <code><pre> * // (1, 2, 3) * Seq.of(1, 2, 3, 4, 5).limitUntilClosed(i -> i == 3) * </pre></code> */ @SuppressWarnings("unchecked") static <T> Seq<T> limitUntilClosed(Stream<? extends T> stream, Predicate<? super T> predicate) { boolean[] test = { false }; return SeqUtils.transform(stream, (delegate, action) -> !test[0] && delegate.tryAdvance(t -> { test[0] = predicate.test(t); action.accept(t); }) ); } /** * Returns a stream with a given value interspersed between any two values of this stream. * <p> * <code><pre> * // (1, 0, 2, 0, 3, 0, 4) * Seq.of(1, 2, 3, 4).intersperse(0) * </pre></code> */ static <T> Seq<T> intersperse(Stream<? extends T> stream, T value) { return seq(stream.flatMap(t -> Stream.of(value, t)).skip(1)); } /** * Classify this stream's elements according to a given classifier function * <p> * <code><pre> * // Seq(tuple(1, Seq(1, 3, 5)), tuple(0, Seq(2, 4, 6))) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2 ) * // Seq(tuple(true, Seq(1, 3, 5)), tuple(false, Seq(2, 4, 6))) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2 != 0) * </pre></code> * * This is a non-terminal analog of {@link #groupBy(Stream, Function)}) * @see #groupBy(Function) * @see #partition(Predicate) */ public static <K, T> Seq<Tuple2<K, Seq<T>>> grouped(Stream<? extends T> stream, Function<? super T, ? extends K> classifier) { return grouped(seq(stream), classifier); } /** * Classify this stream's elements according to a given classifier function * <p> * <code><pre> * // Seq(tuple(1, Seq(1, 3, 5)), tuple(0, Seq(2, 4, 6))) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2 ) * // Seq(tuple(true, Seq(1, 3, 5)), tuple(false, Seq(2, 4, 6))) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2 != 0) * </pre></code> * * This is a non-terminal analog of {@link #groupBy(Stream, Function)}) * @see #groupBy(Function) * @see #partition(Predicate) */ public static <K, T> Seq<Tuple2<K, Seq<T>>> grouped(Iterable<? extends T> iterable, Function<? super T, ? extends K> classifier) { return grouped(seq(iterable), classifier); } /** * Classify this stream's elements according to a given classifier function * <p> * <code><pre> * // Seq(tuple(1, Seq(1, 3, 5)), tuple(0, Seq(2, 4, 6))) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2 ) * // Seq(tuple(true, Seq(1, 3, 5)), tuple(false, Seq(2, 4, 6))) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2 != 0) * </pre></code> * * This is a non-terminal analog of {@link #groupBy(Stream, Function)}) * @see #groupBy(Function) * @see #partition(Predicate) */ public static <K, T> Seq<Tuple2<K, Seq<T>>> grouped(Seq<? extends T> seq, Function<? super T, ? extends K> classifier) { final Iterator<? extends T> it = seq.iterator(); class ClassifyingIterator implements Iterator<Tuple2<K, Seq<T>>> { final Map<K, Queue<T>> buffers = new LinkedHashMap<>(); final Queue<K> keys = new LinkedList<>(); class Classification implements Iterator<T> { final K key; Queue<T> buffer; Classification(K key) { this.key = key; } void fetchClassification() { if (buffer == null) buffer = buffers.get(key); while (buffer.isEmpty() && it.hasNext()) fetchNextNewKey(); } @Override public boolean hasNext() { fetchClassification(); return !buffer.isEmpty(); } @Override public T next() { return buffer.poll(); } } void fetchClassifying() { while (it.hasNext() && fetchNextNewKey()); } boolean fetchNextNewKey() { T next = it.next(); K nextK = classifier.apply(next); Queue<T> buffer = buffers.get(nextK); try { if (buffer == null) { buffer = new ArrayDeque<>(); buffers.put(nextK, buffer); keys.add(nextK); return true; } } finally { buffer.offer(next); } return false; } @Override public boolean hasNext() { fetchClassifying(); return !keys.isEmpty(); } @Override public Tuple2<K, Seq<T>> next() { K nextK = keys.poll(); return tuple(nextK, seq(new Classification(nextK))); } } return seq(new ClassifyingIterator()).onClose(seq::close); } /** * Classify this stream's elements according to a given classifier function * and collect each class's elements using a collector. * <p> * <code><pre> * // Seq(tuple(1, 9), tuple(0, 12)) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2, Collectors.summingInt(i -> i)) * // Seq(tuple(true, 9), tuple(false, 12)) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2 != 0, Collectors.summingInt(i -> i)) * </pre></code> This is a non-terminal analog of * {@link #groupBy(Function, Collector)}) * * @see #groupBy(Function, Collector) */ public static <K, T, A, D> Seq<Tuple2<K, D>> grouped(Stream<? extends T> stream, Function<? super T, ? extends K> classifier, Collector<? super T, A, D> downstream) { return grouped(seq(stream), classifier, downstream); } /** * Classify this stream's elements according to a given classifier function * and collect each class's elements using a collector. * <p> * <code><pre> * // Seq(tuple(1, 9), tuple(0, 12)) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2, Collectors.summingInt(i -> i)) * // Seq(tuple(true, 9), tuple(false, 12)) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2 != 0, Collectors.summingInt(i -> i)) * </pre></code> This is a non-terminal analog of * {@link #groupBy(Function, Collector)}) * * @see #groupBy(Function, Collector) */ public static <K, T, A, D> Seq<Tuple2<K, D>> grouped(Iterable<? extends T> iterable, Function<? super T, ? extends K> classifier, Collector<? super T, A, D> downstream) { return grouped(seq(iterable), classifier, downstream); } /** * Classify this stream's elements according to a given classifier function * and collect each class's elements using a collector. * <p> * <code><pre> * // Seq(tuple(1, 9), tuple(0, 12)) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2, Collectors.summingInt(i -> i)) * // Seq(tuple(true, 9), tuple(false, 12)) * Seq.of(1, 2, 3, 4, 5, 6).grouped(i -> i % 2 != 0, Collectors.summingInt(i -> i)) * </pre></code> This is a non-terminal analog of * {@link #groupBy(Function, Collector)}) * * @see #groupBy(Function, Collector) */ public static <K, T, A, D> Seq<Tuple2<K, D>> grouped(Seq<? extends T> seq, Function<? super T, ? extends K> classifier, Collector<? super T, A, D> downstream) { return grouped(seq, classifier).map(t -> tuple(t.v1, t.v2.collect(downstream))); } /** * Partition a stream into two given a predicate. * <p> * <code><pre> * // tuple((1, 3, 5), (2, 4, 6)) * Seq.of(1, 2, 3, 4, 5, 6).partition(i -> i % 2 != 0) * </pre></code> */ static <T> Tuple2<Seq<T>, Seq<T>> partition(Stream<? extends T> stream, Predicate<? super T> predicate) { final Iterator<? extends T> it = stream.iterator(); final LinkedList<T> buffer1 = new LinkedList<>(); final LinkedList<T> buffer2 = new LinkedList<>(); class Partition implements Iterator<T> { final boolean b; Partition(boolean b) { this.b = b; } void fetch() { while (buffer(b).isEmpty() && it.hasNext()) { T next = it.next(); buffer(predicate.test(next)).offer(next); } } LinkedList<T> buffer(boolean test) { return test ? buffer1 : buffer2; } @Override public boolean hasNext() { fetch(); return !buffer(b).isEmpty(); } @Override public T next() { return buffer(b).poll(); } } return tuple(seq(new Partition(true)), seq(new Partition(false))); } /** * Split a stream at a given position. * <p> * <code><pre> * // tuple((1, 2, 3), (4, 5, 6)) * Seq.of(1, 2, 3, 4, 5, 6).splitAt(3) * </pre></code> */ static <T> Tuple2<Seq<T>, Seq<T>> splitAt(Stream<? extends T> stream, long position) { return seq(stream) .zipWithIndex() .partition(t -> t.v2 < position) // Explicit type parameters to work around this Eclipse compiler bug: // https://bugs.eclipse.org/bugs/show_bug.cgi?id=455945 .map((v1, v2) -> Tuple.<Seq<T>, Seq<T>>tuple( v1.map(t -> t.v1), v2.map(t -> t.v1) )); } /** * Split a stream at the head. * <p> * <code><pre> * // tuple(1, (2, 3, 4, 5, 6)) * Seq.of(1, 2, 3, 4, 5, 6).splitHead(3) * </pre></code> */ static <T> Tuple2<Optional<T>, Seq<T>> splitAtHead(Stream<T> stream) { Iterator<T> it = stream.iterator(); return tuple(it.hasNext() ? Optional.of(it.next()) : Optional.empty(), seq(it)); } // Methods taken from LINQ // ----------------------- /** * Keep only those elements in a stream that are of a given type. * <p> * <code><pre> * // (1, 2, 3) * Seq.of(1, "a", 2, "b", 3).ofType(Integer.class) * </pre></code> */ @SuppressWarnings("unchecked") static <T, U> Seq<U> ofType(Stream<? extends T> stream, Class<? extends U> type) { return seq(stream).filter(type::isInstance).map(t -> (U) t); } /** * Cast all elements in a stream to a given type, possibly throwing a {@link ClassCastException}. * <p> * <code><pre> * // ClassCastException * Seq.of(1, "a", 2, "b", 3).cast(Integer.class) * </pre></code> */ static <T, U> Seq<U> cast(Stream<? extends T> stream, Class<? extends U> type) { return seq(stream).map(type::cast); } // Shortcuts to Collectors // ----------------------- /** * Shortcut for calling {@link Stream#collect(Collector)} with a * {@link Collectors#groupingBy(Function)} collector. */ static <T, K> Map<K, List<T>> groupBy(Stream<? extends T> stream, Function<? super T, ? extends K> classifier) { return seq(stream).groupBy(classifier); } /** * Shortcut for calling {@link Stream#collect(Collector)} with a * {@link Collectors#groupingBy(Function, Collector)} collector. */ static <T, K, A, D> Map<K, D> groupBy(Stream<? extends T> stream, Function<? super T, ? extends K> classifier, Collector<? super T, A, D> downstream) { return seq(stream).groupBy(classifier, downstream); } /** * Shortcut for calling {@link Stream#collect(Collector)} with a * {@link Collectors#groupingBy(Function, Supplier, Collector)} collector. */ static <T, K, D, A, M extends Map<K, D>> M groupBy(Stream<? extends T> stream, Function<? super T, ? extends K> classifier, Supplier<M> mapFactory, Collector<? super T, A, D> downstream) { return seq(stream).groupBy(classifier, mapFactory, downstream); } /** * Shortcut for calling {@link Stream#collect(Collector)} with a * {@link Collectors#joining()} * collector. * * @deprecated - Use {@link #toString()} instead. This method will be * removed in the future as it causes confusion with * {@link #innerJoin(Seq, BiPredicate)}. */ @Deprecated static String join(Stream<?> stream) { return seq(stream).join(); } /** * Shortcut for calling {@link Stream#collect(Collector)} with a * {@link Collectors#joining(CharSequence)} * collector. * * @deprecated - Use {@link #toString()} instead. This method will be * removed in the future as it causes confusion with * {@link #innerJoin(Seq, BiPredicate)}. */ @Deprecated static String join(Stream<?> stream, CharSequence delimiter) { return seq(stream).join(delimiter); } /** * Shortcut for calling {@link Stream#collect(Collector)} with a * {@link Collectors#joining(CharSequence, CharSequence, CharSequence)} * collector. * * @deprecated - Use {@link #toString()} instead. This method will be * removed in the future as it causes confusion with * {@link #innerJoin(Seq, BiPredicate)}. */ @Deprecated static String join(Stream<?> stream, CharSequence delimiter, CharSequence prefix, CharSequence suffix) { return seq(stream).join(delimiter, prefix, suffix); } // Covariant overriding of Stream return types // ------------------------------------------- @Override Seq<T> filter(Predicate<? super T> predicate); @Override <R> Seq<R> map(Function<? super T, ? extends R> mapper); @Override IntStream mapToInt(ToIntFunction<? super T> mapper); @Override LongStream mapToLong(ToLongFunction<? super T> mapper); @Override DoubleStream mapToDouble(ToDoubleFunction<? super T> mapper); @Override <R> Seq<R> flatMap(Function<? super T, ? extends Stream<? extends R>> mapper); @Override IntStream flatMapToInt(Function<? super T, ? extends IntStream> mapper); @Override LongStream flatMapToLong(Function<? super T, ? extends LongStream> mapper); @Override DoubleStream flatMapToDouble(Function<? super T, ? extends DoubleStream> mapper); @Override Seq<T> distinct(); @Override Seq<T> sorted(); @Override Seq<T> sorted(Comparator<? super T> comparator); @Override Seq<T> peek(Consumer<? super T> action); @Override Seq<T> limit(long maxSize); @Override Seq<T> skip(long n); @Override Seq<T> onClose(Runnable closeHandler); @Override void close(); @Override long count(); // These methods have no effect // ---------------------------- /** * Returns this stream. All Seq streams are sequential, hence the name. * * @return this stream unmodified */ @Override default Seq<T> sequential() { return this; } /** * Seq streams are always sequential and, as such, doesn't support * parallelization. * * @return this sequential stream unmodified * @see <a href="https://github.com/jOOQ/jOOL/issues/130">jOOL Issue #130</a> */ @Override default Seq<T> parallel() { return this; } /** * Returns this stream. All Seq streams are ordered so this method has * no effect. * * @return this stream unmodified */ @Override default Seq<T> unordered() { return this; } @Override default Spliterator<T> spliterator() { return Iterable.super.spliterator(); } @Override default void forEach(Consumer<? super T> action) { Iterable.super.forEach(action); } // Debugging tools // --------------- /** * Generate a nicely formatted representation of this stream. * <p> * Clients should not rely on the concrete formatting of this method, which * is intended for debugging convenience only. */ String format(); /** * Print contents of this stream to {@link System#out}. */ default void printOut() { print(System.out); } /** * Print contents of this stream to {@link System#err}. */ default void printErr() { print(System.err); } /** * Print contents of this stream to the argument writer. */ default void print(PrintWriter writer) { forEach(writer::println); } /** * Print contents of this stream to the argument stream. */ default void print(PrintStream stream) { forEach(stream::println); } }
[#199] Add lazy implementation for Seq.shuffle()
src/main/java/org/jooq/lambda/Seq.java
[#199] Add lazy implementation for Seq.shuffle()
<ide><path>rc/main/java/org/jooq/lambda/Seq.java <ide> * </pre></code> <ide> */ <ide> static <T> Seq<T> shuffle(Seq<? extends T> stream) { <add> return shuffle(stream, null); <add> } <add> <add> /** <add> * Shuffle a stream using specified source of randomness <add> * <p> <add> * <code><pre> <add> * // e.g. (2, 3, 1) <add> * Seq.of(1, 2, 3).shuffle(new Random()) <add> * </pre></code> <add> */ <add> static <T> Seq<T> shuffle(Stream<? extends T> stream, Random random) { <add> return shuffle(seq(stream), random); <add> } <add> <add> /** <add> * Shuffle a stream using specified source of randomness <add> * <p> <add> * <code><pre> <add> * // e.g. (2, 3, 1) <add> * Seq.of(1, 2, 3).shuffle(new Random()) <add> * </pre></code> <add> */ <add> static <T> Seq<T> shuffle(Iterable<? extends T> iterable, Random random) { <add> return shuffle(seq(iterable), random); <add> } <add> <add> /** <add> * Shuffle a stream using specified source of randomness <add> * <p> <add> * <code><pre> <add> * // e.g. (2, 3, 1) <add> * Seq.of(1, 2, 3).shuffle(new Random()) <add> * </pre></code> <add> */ <add> static <T> Seq<T> shuffle(Seq<? extends T> stream, Random random) { <ide> Spliterator[] shuffled = { null }; <ide> <ide> return SeqUtils.transform(stream, (delegate, action) -> { <ide> if (shuffled[0] == null) { <ide> List<T> list = seq(delegate).toList(); <del> Collections.shuffle(list); <add> <add> if (random == null) <add> Collections.shuffle(list); <add> else <add> Collections.shuffle(list, random); <add> <ide> shuffled[0] = list.spliterator(); <ide> } <ide> <ide> return shuffled[0].tryAdvance(action); <ide> <ide> }).onClose(stream::close); <del> } <del> <del> /** <del> * Shuffle a stream using specified source of randomness <del> * <p> <del> * <code><pre> <del> * // e.g. (2, 3, 1) <del> * Seq.of(1, 2, 3).shuffle(new Random()) <del> * </pre></code> <del> */ <del> static <T> Seq<T> shuffle(Stream<? extends T> stream, Random random) { <del> return shuffle(seq(stream), random); <del> } <del> <del> /** <del> * Shuffle a stream using specified source of randomness <del> * <p> <del> * <code><pre> <del> * // e.g. (2, 3, 1) <del> * Seq.of(1, 2, 3).shuffle(new Random()) <del> * </pre></code> <del> */ <del> static <T> Seq<T> shuffle(Iterable<? extends T> iterable, Random random) { <del> return shuffle(seq(iterable), random); <del> } <del> <del> /** <del> * Shuffle a stream using specified source of randomness <del> * <p> <del> * <code><pre> <del> * // e.g. (2, 3, 1) <del> * Seq.of(1, 2, 3).shuffle(new Random()) <del> * </pre></code> <del> */ <del> static <T> Seq<T> shuffle(Seq<? extends T> stream, Random random) { <del> List<T> list = toList(stream); <del> Collections.shuffle(list, random); <del> return seq(list); <ide> } <ide> <ide> // [jooq-tools] START [crossjoin-static]
Java
apache-2.0
56dcefe3f8fe341fa131897ebd53508c29fe069f
0
DevStreet/FinanceAnalytics,codeaudit/OG-Platform,jerome79/OG-Platform,DevStreet/FinanceAnalytics,jeorme/OG-Platform,nssales/OG-Platform,DevStreet/FinanceAnalytics,nssales/OG-Platform,jeorme/OG-Platform,ChinaQuants/OG-Platform,nssales/OG-Platform,nssales/OG-Platform,jeorme/OG-Platform,McLeodMoores/starling,jeorme/OG-Platform,jerome79/OG-Platform,McLeodMoores/starling,McLeodMoores/starling,codeaudit/OG-Platform,McLeodMoores/starling,ChinaQuants/OG-Platform,ChinaQuants/OG-Platform,codeaudit/OG-Platform,jerome79/OG-Platform,DevStreet/FinanceAnalytics,codeaudit/OG-Platform,jerome79/OG-Platform,ChinaQuants/OG-Platform
/** * Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.financial.analytics.conversion; import java.util.List; import javax.time.calendar.ZonedDateTime; import org.apache.commons.lang.Validate; import com.opengamma.OpenGammaRuntimeException; import com.opengamma.analytics.financial.instrument.InstrumentDefinition; import com.opengamma.analytics.financial.instrument.bond.BondFixedSecurityDefinition; import com.opengamma.analytics.financial.instrument.future.BondFutureDefinition; import com.opengamma.core.security.SecuritySource; import com.opengamma.financial.security.bond.BondSecurity; import com.opengamma.financial.security.future.BondFutureDeliverable; import com.opengamma.financial.security.future.BondFutureSecurity; /** * FIXME CASE - BondFutureDefinition needs a reference price. Without a trade, where will it come from? */ public class BondFutureSecurityConverter extends AbstractFutureSecurityVisitor<InstrumentDefinition<?>> { private final SecuritySource _securitySource; private final BondSecurityConverter _bondConverter; public BondFutureSecurityConverter(final SecuritySource securitySource, final BondSecurityConverter bondConverter) { Validate.notNull(securitySource, "security source"); Validate.notNull(bondConverter, "bond converter"); _securitySource = securitySource; _bondConverter = bondConverter; } @Override public InstrumentDefinition<?> visitBondFutureSecurity(final BondFutureSecurity bondFuture) { Validate.notNull(bondFuture); final ZonedDateTime tradingLastDate = bondFuture.getExpiry().getExpiry(); final ZonedDateTime noticeFirstDate = bondFuture.getFirstDeliveryDate(); final ZonedDateTime noticeLastDate = bondFuture.getLastDeliveryDate(); final double notional = bondFuture.getUnitAmount(); final List<BondFutureDeliverable> basket = bondFuture.getBasket(); final int n = basket.size(); final BondFixedSecurityDefinition[] deliverables = new BondFixedSecurityDefinition[n]; final double[] conversionFactor = new double[n]; for (int i = 0; i < n; i++) { final BondFutureDeliverable deliverable = basket.get(i); final BondSecurity bondSecurity = (BondSecurity) _securitySource.getSecurity(deliverable.getIdentifiers()); if (bondSecurity == null) { throw new OpenGammaRuntimeException("No security found with identifiers " + deliverable.getIdentifiers()); } deliverables[i] = (BondFixedSecurityDefinition) bondSecurity.accept(_bondConverter); //TODO check type conversionFactor[i] = deliverable.getConversionFactor(); } return new BondFutureDefinition(tradingLastDate, noticeFirstDate, noticeLastDate, notional, deliverables, conversionFactor); } }
projects/OG-Financial/src/com/opengamma/financial/analytics/conversion/BondFutureSecurityConverter.java
/** * Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.financial.analytics.conversion; import java.util.List; import javax.time.calendar.ZonedDateTime; import org.apache.commons.lang.Validate; import com.opengamma.analytics.financial.instrument.InstrumentDefinition; import com.opengamma.analytics.financial.instrument.bond.BondFixedSecurityDefinition; import com.opengamma.analytics.financial.instrument.future.BondFutureDefinition; import com.opengamma.core.security.SecuritySource; import com.opengamma.financial.security.bond.BondSecurity; import com.opengamma.financial.security.future.BondFutureDeliverable; import com.opengamma.financial.security.future.BondFutureSecurity; /** * FIXME CASE - BondFutureDefinition needs a reference price. Without a trade, where will it come from? */ public class BondFutureSecurityConverter extends AbstractFutureSecurityVisitor<InstrumentDefinition<?>> { private final SecuritySource _securitySource; private final BondSecurityConverter _bondConverter; public BondFutureSecurityConverter(final SecuritySource securitySource, final BondSecurityConverter bondConverter) { Validate.notNull(securitySource, "security source"); Validate.notNull(bondConverter, "bond converter"); _securitySource = securitySource; _bondConverter = bondConverter; } @Override public InstrumentDefinition<?> visitBondFutureSecurity(final BondFutureSecurity bondFuture) { Validate.notNull(bondFuture); final ZonedDateTime tradingLastDate = bondFuture.getExpiry().getExpiry(); final ZonedDateTime noticeFirstDate = bondFuture.getFirstDeliveryDate(); final ZonedDateTime noticeLastDate = bondFuture.getLastDeliveryDate(); final double notional = bondFuture.getUnitAmount(); final List<BondFutureDeliverable> basket = bondFuture.getBasket(); final int n = basket.size(); final BondFixedSecurityDefinition[] deliverables = new BondFixedSecurityDefinition[n]; final double[] conversionFactor = new double[n]; for (int i = 0; i < n; i++) { final BondFutureDeliverable deliverable = basket.get(i); final BondSecurity bondSecurity = (BondSecurity) _securitySource.getSecurity(deliverable.getIdentifiers()); deliverables[i] = (BondFixedSecurityDefinition) bondSecurity.accept(_bondConverter); //TODO check type conversionFactor[i] = deliverable.getConversionFactor(); } return new BondFutureDefinition(tradingLastDate, noticeFirstDate, noticeLastDate, notional, deliverables, conversionFactor); } }
Adding exception
projects/OG-Financial/src/com/opengamma/financial/analytics/conversion/BondFutureSecurityConverter.java
Adding exception
<ide><path>rojects/OG-Financial/src/com/opengamma/financial/analytics/conversion/BondFutureSecurityConverter.java <ide> <ide> import org.apache.commons.lang.Validate; <ide> <add>import com.opengamma.OpenGammaRuntimeException; <ide> import com.opengamma.analytics.financial.instrument.InstrumentDefinition; <ide> import com.opengamma.analytics.financial.instrument.bond.BondFixedSecurityDefinition; <ide> import com.opengamma.analytics.financial.instrument.future.BondFutureDefinition; <ide> for (int i = 0; i < n; i++) { <ide> final BondFutureDeliverable deliverable = basket.get(i); <ide> final BondSecurity bondSecurity = (BondSecurity) _securitySource.getSecurity(deliverable.getIdentifiers()); <add> if (bondSecurity == null) { <add> throw new OpenGammaRuntimeException("No security found with identifiers " + deliverable.getIdentifiers()); <add> } <ide> deliverables[i] = (BondFixedSecurityDefinition) bondSecurity.accept(_bondConverter); //TODO check type <ide> conversionFactor[i] = deliverable.getConversionFactor(); <ide> }
Java
mit
0876d84fdb78afa08047396f50374519c978de92
0
FAU-Inf2/kwikshop-android,FAU-Inf2/kwikshop-android
package de.cs.fau.mad.quickshop_android; import android.os.Bundle; import android.support.v4.app.FragmentManager; import android.support.v4.widget.DrawerLayout; import android.support.v7.app.ActionBar; import android.support.v7.app.ActionBarActivity; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import cs.fau.mad.quickshop_android.R; public class MainActivity extends ActionBarActivity implements NavigationDrawerFragment.NavigationDrawerCallbacks { /** * Fragment managing the behaviors, interactions and presentation of the navigation drawer. */ private NavigationDrawerFragment mNavigationDrawerFragment; /** * Used to store the last screen title. For use in {@link #restoreActionBar()}. */ private CharSequence mTitle; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); mNavigationDrawerFragment = (NavigationDrawerFragment) getSupportFragmentManager().findFragmentById(R.id.navigation_drawer); mTitle = getTitle(); FragmentManager fragmentManager = getSupportFragmentManager(); fragmentManager.beginTransaction().replace(R.id.container, ListFragment.newInstance(0)).commit(); // Set up the drawer. mNavigationDrawerFragment.setUp(R.id.navigation_drawer, (DrawerLayout) findViewById(R.id.drawer_layout)); } @Override public void onNavigationDrawerItemSelected(int position) { // update the main content by replacing fragments Log.d("Position selected: ", "" + position); FragmentManager fragmentManager = getSupportFragmentManager(); switch (position){ case 0: fragmentManager.beginTransaction().replace(R.id.container, ListFragment.newInstance(position)).commit(); break; case 1: fragmentManager.beginTransaction().replace(R.id.container, AddListFragment.newInstance(position)).commit(); break; default: fragmentManager.beginTransaction().replace(R.id.container, ListFragment.newInstance(position)).commit(); } } public void onSectionAttached(int number) { // changes the title in the actionbar by clicking on a section switch (number) { case 0: mTitle = getString(R.string.title_list_overview); break; case 1: mTitle = getString(R.string.title_add_list); break; } } public void restoreActionBar() { ActionBar actionBar = getSupportActionBar(); actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD); actionBar.setDisplayShowTitleEnabled(true); actionBar.setTitle(mTitle); } @Override public boolean onCreateOptionsMenu(Menu menu) { if (!mNavigationDrawerFragment.isDrawerOpen()) { // Only show items in the action bar relevant to this screen // if the drawer is not showing. Otherwise, let the drawer // decide what to show in the action bar. getMenuInflater().inflate(R.menu.main, menu); restoreActionBar(); return true; } return super.onCreateOptionsMenu(menu); } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); FragmentManager fragmentManager = getSupportFragmentManager(); switch(id){ case R.id.action_add_list: setTitle(R.string.title_add_list); // did not work? IDK why? fragmentManager.beginTransaction().replace(R.id.container, AddListFragment.newInstance(1)).commit(); break; } return super.onOptionsItemSelected(item); } }
app/src/main/java/de/cs/fau/mad/quickshop_android/MainActivity.java
package de.cs.fau.mad.quickshop_android; import android.app.Activity; import android.support.v7.app.ActionBarActivity; import android.support.v7.app.ActionBar; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.os.Bundle; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.support.v4.widget.DrawerLayout; import cs.fau.mad.quickshop_android.R; public class MainActivity extends ActionBarActivity implements NavigationDrawerFragment.NavigationDrawerCallbacks { /** * Fragment managing the behaviors, interactions and presentation of the navigation drawer. */ private NavigationDrawerFragment mNavigationDrawerFragment; /** * Used to store the last screen title. For use in {@link #restoreActionBar()}. */ private CharSequence mTitle; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); mNavigationDrawerFragment = (NavigationDrawerFragment) getSupportFragmentManager().findFragmentById(R.id.navigation_drawer); mTitle = getTitle(); // Set up the drawer. mNavigationDrawerFragment.setUp(R.id.navigation_drawer, (DrawerLayout) findViewById(R.id.drawer_layout)); } @Override public void onNavigationDrawerItemSelected(int position) { // update the main content by replacing fragments FragmentManager fragmentManager = getSupportFragmentManager(); fragmentManager.beginTransaction() .replace(R.id.container, ListFragment.newInstance(position + 1)) .commit(); } public void onSectionAttached(int number) { switch (number) { case 1: mTitle = getString(R.string.title_section1); break; case 2: mTitle = getString(R.string.title_section2); break; case 3: mTitle = getString(R.string.title_section3); break; } } public void restoreActionBar() { ActionBar actionBar = getSupportActionBar(); actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD); actionBar.setDisplayShowTitleEnabled(true); actionBar.setTitle(mTitle); } @Override public boolean onCreateOptionsMenu(Menu menu) { if (!mNavigationDrawerFragment.isDrawerOpen()) { // Only show items in the action bar relevant to this screen // if the drawer is not showing. Otherwise, let the drawer // decide what to show in the action bar. getMenuInflater().inflate(R.menu.main, menu); restoreActionBar(); return true; } return super.onCreateOptionsMenu(menu); } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); //noinspection SimplifiableIfStatement if (id == R.id.action_settings) { return true; } return super.onOptionsItemSelected(item); } }
add 'new list Fragment'
app/src/main/java/de/cs/fau/mad/quickshop_android/MainActivity.java
add 'new list Fragment'
<ide><path>pp/src/main/java/de/cs/fau/mad/quickshop_android/MainActivity.java <ide> package de.cs.fau.mad.quickshop_android; <ide> <del>import android.app.Activity; <add>import android.os.Bundle; <add>import android.support.v4.app.FragmentManager; <add>import android.support.v4.widget.DrawerLayout; <add>import android.support.v7.app.ActionBar; <ide> import android.support.v7.app.ActionBarActivity; <del>import android.support.v7.app.ActionBar; <del>import android.support.v4.app.Fragment; <del>import android.support.v4.app.FragmentManager; <del>import android.os.Bundle; <del>import android.view.LayoutInflater; <add>import android.util.Log; <ide> import android.view.Menu; <ide> import android.view.MenuItem; <del>import android.view.View; <del>import android.view.ViewGroup; <del>import android.support.v4.widget.DrawerLayout; <ide> <ide> import cs.fau.mad.quickshop_android.R; <ide> <ide> */ <ide> private CharSequence mTitle; <ide> <add> <ide> @Override <ide> protected void onCreate(Bundle savedInstanceState) { <ide> super.onCreate(savedInstanceState); <ide> getSupportFragmentManager().findFragmentById(R.id.navigation_drawer); <ide> mTitle = getTitle(); <ide> <add> FragmentManager fragmentManager = getSupportFragmentManager(); <add> fragmentManager.beginTransaction().replace(R.id.container, ListFragment.newInstance(0)).commit(); <add> <ide> // Set up the drawer. <ide> mNavigationDrawerFragment.setUp(R.id.navigation_drawer, (DrawerLayout) findViewById(R.id.drawer_layout)); <ide> } <ide> @Override <ide> public void onNavigationDrawerItemSelected(int position) { <ide> // update the main content by replacing fragments <add> <add> Log.d("Position selected: ", "" + position); <add> <ide> FragmentManager fragmentManager = getSupportFragmentManager(); <del> fragmentManager.beginTransaction() <del> .replace(R.id.container, ListFragment.newInstance(position + 1)) <del> .commit(); <add> switch (position){ <add> case 0: <add> fragmentManager.beginTransaction().replace(R.id.container, ListFragment.newInstance(position)).commit(); <add> break; <add> case 1: <add> fragmentManager.beginTransaction().replace(R.id.container, AddListFragment.newInstance(position)).commit(); <add> break; <add> default: <add> fragmentManager.beginTransaction().replace(R.id.container, ListFragment.newInstance(position)).commit(); <add> } <add> <ide> } <ide> <ide> public void onSectionAttached(int number) { <add> // changes the title in the actionbar by clicking on a section <ide> switch (number) { <add> case 0: <add> mTitle = getString(R.string.title_list_overview); <add> break; <ide> case 1: <del> mTitle = getString(R.string.title_section1); <add> mTitle = getString(R.string.title_add_list); <ide> break; <del> case 2: <del> mTitle = getString(R.string.title_section2); <del> break; <del> case 3: <del> mTitle = getString(R.string.title_section3); <del> break; <add> <ide> } <ide> } <ide> <ide> // automatically handle clicks on the Home/Up button, so long <ide> // as you specify a parent activity in AndroidManifest.xml. <ide> int id = item.getItemId(); <add> FragmentManager fragmentManager = getSupportFragmentManager(); <ide> <del> //noinspection SimplifiableIfStatement <del> if (id == R.id.action_settings) { <del> return true; <add> switch(id){ <add> case R.id.action_add_list: <add> setTitle(R.string.title_add_list); // did not work? IDK why? <add> fragmentManager.beginTransaction().replace(R.id.container, AddListFragment.newInstance(1)).commit(); <add> <add> break; <ide> } <ide> <ide> return super.onOptionsItemSelected(item);
Java
bsd-3-clause
51925f28b2c48dceb88a7874b7a66bb11d42bbe6
0
UCDenver-ccp/datasource,UCDenver-ccp/datasource,bill-baumgartner/datasource,bill-baumgartner/datasource
/* * Copyright (C) 2009 Center for Computational Pharmacology, University of Colorado School of Medicine * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * */ package edu.ucdenver.ccp.datasource.fileparsers.irefweb; /* * #%L * Colorado Computational Pharmacology's common module * %% * Copyright (C) 2012 - 2015 Regents of the University of Colorado * %% * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. Neither the name of the Regents of the University of Colorado nor the names of its contributors * may be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. * #L% */ import java.io.File; import java.io.IOException; import java.util.HashSet; import java.util.Set; import org.apache.log4j.BasicConfigurator; import org.apache.log4j.Logger; import edu.ucdenver.ccp.common.download.FtpDownload; import edu.ucdenver.ccp.common.file.CharacterEncoding; import edu.ucdenver.ccp.common.file.reader.Line; import edu.ucdenver.ccp.common.file.reader.StreamLineReader; import edu.ucdenver.ccp.common.ftp.FTPUtil.FileType; import edu.ucdenver.ccp.common.string.RegExPatterns; import edu.ucdenver.ccp.common.string.StringConstants; import edu.ucdenver.ccp.common.string.StringUtil; import edu.ucdenver.ccp.datasource.fileparsers.download.FtpHost; import edu.ucdenver.ccp.datasource.fileparsers.obo.MiOntologyIdTermPair; import edu.ucdenver.ccp.datasource.fileparsers.obo.NcbiTaxonomyIdTermPair; import edu.ucdenver.ccp.datasource.fileparsers.taxonaware.TaxonAwareSingleLineFileRecordReader; import edu.ucdenver.ccp.datasource.identifiers.DataSourceIdentifier; import edu.ucdenver.ccp.datasource.identifiers.NucleotideAccessionResolver; import edu.ucdenver.ccp.datasource.identifiers.ProteinAccessionResolver; import edu.ucdenver.ccp.datasource.identifiers.bind.BindInteractionID; import edu.ucdenver.ccp.datasource.identifiers.dip.DipInteractionID; import edu.ucdenver.ccp.datasource.identifiers.dip.DipInteractorID; import edu.ucdenver.ccp.datasource.identifiers.ebi.intact.IntActID; import edu.ucdenver.ccp.datasource.identifiers.ebi.interpro.PirID; import edu.ucdenver.ccp.datasource.identifiers.ebi.interpro.TigrFamsID; import edu.ucdenver.ccp.datasource.identifiers.ebi.ipi.IpiID; import edu.ucdenver.ccp.datasource.identifiers.ebi.uniprot.UniProtEntryName; import edu.ucdenver.ccp.datasource.identifiers.ebi.uniprot.UniProtID; import edu.ucdenver.ccp.datasource.identifiers.ebi.uniprot.UniProtIsoformID; import edu.ucdenver.ccp.datasource.identifiers.ensembl.EnsemblGeneID; import edu.ucdenver.ccp.datasource.identifiers.flybase.FlyBaseID; import edu.ucdenver.ccp.datasource.identifiers.hgnc.HgncGeneSymbolID; import edu.ucdenver.ccp.datasource.identifiers.hprd.HprdID; import edu.ucdenver.ccp.datasource.identifiers.irefweb.CrigId; import edu.ucdenver.ccp.datasource.identifiers.irefweb.CrogId; import edu.ucdenver.ccp.datasource.identifiers.irefweb.IcrigId; import edu.ucdenver.ccp.datasource.identifiers.irefweb.IcrogId; import edu.ucdenver.ccp.datasource.identifiers.irefweb.IrigId; import edu.ucdenver.ccp.datasource.identifiers.irefweb.IrogId; import edu.ucdenver.ccp.datasource.identifiers.irefweb.RigId; import edu.ucdenver.ccp.datasource.identifiers.irefweb.RogId; import edu.ucdenver.ccp.datasource.identifiers.kegg.KeggGeneID; import edu.ucdenver.ccp.datasource.identifiers.mint.MintID; import edu.ucdenver.ccp.datasource.identifiers.ncbi.gene.EntrezGeneID; import edu.ucdenver.ccp.datasource.identifiers.ncbi.gene.GiNumberID; import edu.ucdenver.ccp.datasource.identifiers.ncbi.refseq.RefSeqID; import edu.ucdenver.ccp.datasource.identifiers.ncbi.taxonomy.NcbiTaxonomyID; import edu.ucdenver.ccp.datasource.identifiers.other.AfcsId; import edu.ucdenver.ccp.datasource.identifiers.other.BindTranslationId; import edu.ucdenver.ccp.datasource.identifiers.other.BioGridID; import edu.ucdenver.ccp.datasource.identifiers.other.CamjeDbId; import edu.ucdenver.ccp.datasource.identifiers.other.CorumId; import edu.ucdenver.ccp.datasource.identifiers.other.CygdId; import edu.ucdenver.ccp.datasource.identifiers.other.ImexId; import edu.ucdenver.ccp.datasource.identifiers.other.InnateDbId; import edu.ucdenver.ccp.datasource.identifiers.other.MatrixDbId; import edu.ucdenver.ccp.datasource.identifiers.other.MpactId; import edu.ucdenver.ccp.datasource.identifiers.other.MpiDbId; import edu.ucdenver.ccp.datasource.identifiers.other.OphidId; import edu.ucdenver.ccp.datasource.identifiers.other.PrfId; import edu.ucdenver.ccp.datasource.identifiers.other.UniParcID; import edu.ucdenver.ccp.datasource.identifiers.pdb.PdbID; import edu.ucdenver.ccp.datasource.identifiers.sgd.SgdID; import edu.ucdenver.ccp.identifier.publication.PubMedID; /** * This class is used to parse DIPYYYMMDD files which can be downloaded from the DIP website * ftp://ftp.no.embnet.org/irefindex/data/archive/release_4.0/psimi_tab/All.mitab.06042009.txt.zip * * @author Bill Baumgartner * @see IRefWebMitab4_0FileData for file format and version specifications */ public class IRefWebPsiMitab2_6FileParser extends TaxonAwareSingleLineFileRecordReader<IRefWebPsiMitab2_6FileData> { private static final Logger logger = Logger.getLogger(IRefWebPsiMitab2_6FileParser.class); private static final String HEADER = "#uidA\tuidB\taltA\taltB\taliasA\taliasB\tmethod\tauthor\tpmids\ttaxa\ttaxb\tinteractionType\tsourcedb\tinteractionIdentifier\tconfidence\texpansion\tbiological_role_A\tbiological_role_B\texperimental_role_A\texperimental_role_B\tinteractor_type_A\tinteractor_type_B\txrefs_A\txrefs_B\txrefs_Interaction\tAnnotations_A\tAnnotations_B\tAnnotations_Interaction\tHost_organism_taxid\tparameters_Interaction\tCreation_date\tUpdate_date\tChecksum_A\tChecksum_B\tChecksum_Interaction\tNegative\tOriginalReferenceA\tOriginalReferenceB\tFinalReferenceA\tFinalReferenceB\tMappingScoreA\tMappingScoreB\tirogida\tirogidb\tirigid\tcrogida\tcrogidb\tcrigid\ticrogida\ticrogidb\ticrigid\timex_id\tedgetype\tnumParticipants"; // public static final String FTP_FILE_NAME = "All.mitab.03022013.txt.zip"; public static final String FTP_FILE_NAME = "All.mitab.07042015.txt.zip"; public static final CharacterEncoding ENCODING = CharacterEncoding.US_ASCII; public static final String FTP_USER_NAME = "ftp"; @FtpDownload(server = FtpHost.IREFWEB_HOST, path = "irefindex/data/archive/release_10.0/psi_mitab/MITAB2.6/", filename = FTP_FILE_NAME, filetype = FileType.BINARY, username = FTP_USER_NAME, decompress = true, targetFileName="All.mitab.04072015.txt") private File allMitabTxtFile; public IRefWebPsiMitab2_6FileParser(File file, CharacterEncoding encoding) throws IOException, IllegalArgumentException { super(file, encoding, null); } public IRefWebPsiMitab2_6FileParser(File workDirectory, boolean clean) throws IOException { super(workDirectory, ENCODING, null, null, null, clean, null); } public IRefWebPsiMitab2_6FileParser(File file, CharacterEncoding encoding, Set<NcbiTaxonomyID> taxonIds) throws IOException, IllegalArgumentException { super(file, encoding, taxonIds); } public IRefWebPsiMitab2_6FileParser(File workDirectory, boolean clean, Set<NcbiTaxonomyID> taxonIds) throws IOException { super(workDirectory, ENCODING, null, null, null, clean, taxonIds); } @Override protected NcbiTaxonomyID getLineTaxon(Line line) { IRefWebPsiMitab2_6FileData record = parseRecordFromLine(line); // should probably return both here IRefWebInteractor interactorA = record.getInteractorA(); if (interactorA.getNcbiTaxonomyId() != null) { return interactorA.getNcbiTaxonomyId().getTaxonomyId(); } IRefWebInteractor interactorB = record.getInteractorB(); if (interactorB.getNcbiTaxonomyId() != null) { return interactorB.getNcbiTaxonomyId().getTaxonomyId(); } return null; } @Override protected StreamLineReader initializeLineReaderFromDownload(CharacterEncoding encoding, String skipLinePrefix) throws IOException { return new StreamLineReader(allMitabTxtFile, encoding, skipLinePrefix); } @Override protected String getFileHeader() throws IOException { return readLine().getText(); } @Override protected String getExpectedFileHeader() throws IOException { return HEADER; } /** * Extracts information from a line from a file and returns a IRefWebPsiMitab2_5FileData object. * * @param miOntologyTermResolver * @param line * @return */ @Override public IRefWebPsiMitab2_6FileData parseRecordFromLine(Line line) { String[] toks = line.getText().split("\\t", -1); if (toks.length == 54) { IRefWebInteractor interactorA = getInteractor(toks[0], toks[2], toks[4], toks[9], toks[16], toks[18], toks[20], toks[22], toks[25], toks[32], toks[36], toks[38], toks[40], toks[42], toks[45], toks[48]); IRefWebInteractor interactorB = getInteractor(toks[1], toks[3], toks[5], toks[10], toks[17], toks[19], toks[21], toks[23], toks[26], toks[33], toks[37], toks[39], toks[41], toks[43], toks[46], toks[49]); IRefWebInteraction interaction = getInteraction(toks[6], toks[7], toks[8], toks[11], toks[13], toks[14], toks[15], toks[24], toks[27], toks[28], toks[29], toks[34], toks[35], toks[44], toks[47], toks[50], toks[51], toks[52], toks[53]); IRefWebInteractionSourceDatabase sourceDb = MiOntologyIdTermPair.parseString( IRefWebInteractionSourceDatabase.class, toks[12]); String creationDate = toks[30]; String updateDate = toks[31]; return new IRefWebPsiMitab2_6FileData(sourceDb, creationDate, updateDate, interactorA, interactorB, interaction, line.getByteOffset(), line.getLineNumber()); } String errorMessage = "Unexpected number of tokens (" + toks.length + " != 54) on line: " + line; throw new IllegalArgumentException("IRefWeb file format appears to have changed: " + errorMessage); } /** * @return */ private IRefWebInteraction getInteraction(String detectionMethodStr, String authorStr, String pmidsStr, String interactionTypeStr, String interactionIdStr, String confidenceStr, String expansionStr, String interactionXrefsStr, String interactionAnnotationsStr, String hostOrgTaxonomyIdStr, String interactionParametersStr, String interactionChecksumStr, String negativeStr, String irigidStr, String crigidStr, String icrigidStr, String imexIdStr, String edgeTypeStr, String numParticipantsStr) { if (!interactionXrefsStr.trim().equals(StringConstants.HYPHEN_MINUS)) { throw new IllegalArgumentException("Observed a value in the xrefs_interaction column. " + "This column has always been empty. Code changes likely required."); } if (!interactionAnnotationsStr.trim().equals(StringConstants.HYPHEN_MINUS)) { throw new IllegalArgumentException("Observed a value in the annotations_interaction column. " + "This column has always been empty. Code changes likely required."); } if (!interactionParametersStr.trim().equals(StringConstants.HYPHEN_MINUS)) { throw new IllegalArgumentException("Observed a value in the parameters_interaction column. " + "This column has always been empty. Code changes likely required."); } IRefWebInteractionDetectionMethod detectionMethod = null; if (!detectionMethodStr.trim().equals(StringConstants.HYPHEN_MINUS)) { detectionMethod = MiOntologyIdTermPair.parseString(IRefWebInteractionDetectionMethod.class, detectionMethodStr); } String author = (authorStr.trim().equals(StringConstants.HYPHEN_MINUS)) ? null : authorStr; Set<PubMedID> pmids = parsePmidsStr(pmidsStr); IRefWebInteractionType interactionType = null; if (!interactionTypeStr.trim().equals(StringConstants.HYPHEN_MINUS)) { interactionType = MiOntologyIdTermPair.parseString(IRefWebInteractionType.class, interactionTypeStr); } Set<DataSourceIdentifier<?>> interactionDbIds = resolveInteractionDbIds(interactionIdStr); Set<String> confidence = parseConfidenceStr(confidenceStr); String expansion = expansionStr; String xrefsInteraction = null; String annotationsInteraction = null; IRefWebHostOrganism hostOrgTaxonomyId = null; if (!hostOrgTaxonomyIdStr.trim().equals(StringConstants.HYPHEN_MINUS)) { hostOrgTaxonomyId = NcbiTaxonomyIdTermPair.parseString(IRefWebHostOrganism.class, hostOrgTaxonomyIdStr); } String parametersInteraction = null; RigId checksumInteraction = new RigId(StringUtil.removePrefix(interactionChecksumStr, "rigid:")); boolean negative = Boolean.parseBoolean(negativeStr); IrigId irigid = new IrigId(irigidStr); CrigId crigid = new CrigId(crigidStr); IcrigId icrigid = new IcrigId(icrigidStr); ImexId imexId = (imexIdStr.trim().equals(StringConstants.HYPHEN_MINUS)) ? null : new ImexId(imexIdStr); String edgeType = edgeTypeStr; int numParticipants = Integer.parseInt(numParticipantsStr); return new IRefWebInteraction(detectionMethod, author, pmids, interactionType, interactionDbIds, confidence, expansion, xrefsInteraction, annotationsInteraction, hostOrgTaxonomyId, parametersInteraction, checksumInteraction, negative, irigid, crigid, icrigid, imexId, edgeType, numParticipants); } /** * @param interactionIdStr * @return */ private Set<DataSourceIdentifier<?>> resolveInteractionDbIds(String interactionIdStr) { Set<DataSourceIdentifier<?>> ids = new HashSet<DataSourceIdentifier<?>>(); for (String id : interactionIdStr.split(RegExPatterns.PIPE)) { if (id.startsWith("edgetype:") || id.endsWith(":-")) { // do nothing - this is a redundant storage of edge type or a null identifier } else if (id.startsWith("BIND_Translation:")) { ids.add(new BindTranslationId(StringUtil.removePrefix(id, "BIND_Translation:"))); } else if (id.startsWith("irigid:")) { ids.add(new IrigId(StringUtil.removePrefix(id, "irigid:"))); } else if (id.startsWith("rigid:")) { ids.add(new RigId(StringUtil.removePrefix(id, "rigid:"))); } else if (id.startsWith("grid:")) { ids.add(new BioGridID(StringUtil.removePrefix(id, "grid:"))); } else if (id.startsWith("bind:")) { ids.add(new BindInteractionID(StringUtil.removePrefix(id, "bind:"))); } else if (id.startsWith("MPACT:")) { ids.add(new MpactId(StringUtil.removePrefix(id, "MPACT:"))); } else if (id.startsWith("mint:")) { ids.add(new MintID(StringUtil.removePrefix(id, "mint:"))); } else if (id.startsWith("intact:")) { ids.add(new IntActID(StringUtil.removePrefix(id, "intact:"))); } else if (id.startsWith("dip:")) { ids.add(new DipInteractionID(StringUtil.removePrefix(id, "dip:"))); } else if (id.startsWith("ophid:")) { ids.add(new OphidId(StringUtil.removePrefix(id, "ophid:"))); } else if (id.startsWith("InnateDB:")) { String idbId = StringUtil.removePrefix(id, "InnateDB:"); if (idbId.startsWith("IDB-")) { idbId = StringUtil.removePrefix(idbId, "IDB-"); } ids.add(new InnateDbId(idbId)); } else if (id.startsWith("innatedb:")) { String idbId = StringUtil.removePrefix(id, "innatedb:"); if (idbId.startsWith("IDB-")) { idbId = StringUtil.removePrefix(idbId, "IDB-"); } ids.add(new InnateDbId(idbId)); } else if (id.startsWith("CORUM:")) { ids.add(new CorumId(StringUtil.removePrefix(id, "CORUM:"))); } else if (id.startsWith("mpilit:")) { ids.add(new MpiDbId(StringUtil.removePrefix(id, "mpilit:"))); } else if (id.startsWith("mpiimex:")) { ids.add(new MpiDbId(StringUtil.removePrefix(id, "mpiimex:"))); } else if (id.startsWith("MatrixDB:")) { ids.add(new MatrixDbId(StringUtil.removePrefix(id, "MatrixDB:"))); } else if (id.startsWith("biogrid:")) { ids.add(new BioGridID(StringUtil.removePrefix(id, "biogrid:"))); } else if (id.startsWith("pubmed:")) { ids.add(new PubMedID(StringUtil.removePrefix(id, "pubmed:"))); } else if (id.startsWith("HPRD")) { try { ids.add(new HprdID(StringUtil.removePrefix(id, "HPRD:"))); } catch (IllegalArgumentException e) { logger.warn(e.getMessage()); } } else { // throw new IllegalArgumentException("Unknown id prefix: " + id); logger.warn("Unknown id prefix: " + id); // return null; } } return ids; } private Set<DataSourceIdentifier<?>> resolveInteractorIds(String interactorIdStr) { Set<DataSourceIdentifier<?>> ids = new HashSet<DataSourceIdentifier<?>>(); for (String id : interactorIdStr.split(RegExPatterns.PIPE)) { ids.add(resolveInteractorId(id)); } return ids; } /** * @param ids * @param id */ private DataSourceIdentifier<?> resolveInteractorId(String idStr) { if (idStr.trim().equals(StringConstants.HYPHEN_MINUS)) { return null; } if (idStr.startsWith("xx:")) { return null; } if (idStr.startsWith("other:")) { return null; } if (idStr.equals("null")) { return null; } try { if (idStr.startsWith("uniprotkb:")) { return getUniprotId(StringUtil.removePrefix(idStr, "uniprotkb:")); } else if (idStr.startsWith("uniprot:")) { return getUniprotId(StringUtil.removePrefix(idStr, "uniprot:")); } else if (idStr.startsWith("Swiss-Prot:")) { return getUniprotId(StringUtil.removePrefix(idStr, "Swiss-Prot:")); } else if (idStr.startsWith("uniprot/swiss-prot:")) { return getUniprotId(StringUtil.removePrefix(idStr, "uniprot/swiss-prot:")); } else if (idStr.startsWith("UniProtKB/TrEMBL:")) { return getUniprotId(StringUtil.removePrefix(idStr, "UniProtKB/TrEMBL:")); } else if (idStr.startsWith("SP:")) { return getUniprotId(StringUtil.removePrefix(idStr, "SP:")); } else if (idStr.startsWith("uniprot knowledge base")) { return getUniprotId(StringUtil.removePrefix(idStr, "uniprot knowledge base:")); } else if (idStr.startsWith("TREMBL")) { return getUniprotId(StringUtil.removePrefix(idStr, "TREMBL:")); } else if (idStr.startsWith("entrezgene/locuslink:")) { return new EntrezGeneID(StringUtil.removePrefix(idStr, "entrezgene/locuslink:")); } else if (idStr.startsWith("entrez gene/locuslink:")) { return new EntrezGeneID(StringUtil.removePrefix(idStr, "entrez gene/locuslink:")); } else if (idStr.startsWith("HPRD:")) { return new HprdID(StringUtil.removePrefix(idStr, "HPRD:")); } else if (idStr.startsWith("CORUM:")) { return new CorumId(StringUtil.removePrefix(idStr, "CORUM:")); } else if (idStr.startsWith("crogid:")) { return new CrogId(StringUtil.removePrefix(idStr, "crogid:")); } else if (idStr.startsWith("icrogid:")) { return new IcrogId(StringUtil.removePrefix(idStr, "icrogid:")); } else if (idStr.startsWith("refseq:")) { return getRefseqAccession(StringUtil.removePrefix(idStr, "refseq:").toUpperCase()); } else if (idStr.startsWith("RefSeq:")) { return getRefseqAccession(StringUtil.removePrefix(idStr, "RefSeq:").toUpperCase()); } else if (idStr.startsWith("rogid:")) { return new RogId(StringUtil.removePrefix(idStr, "rogid:")); } else if (idStr.startsWith("irogid:")) { return new IrogId(StringUtil.removePrefix(idStr, "irogid:")); } else if (idStr.startsWith("PDB:")) { return new PdbID(StringUtil.removePrefix(idStr, "PDB:")); } else if (idStr.startsWith("complex:")) { return new RogId(StringUtil.removePrefix(idStr, "complex:")); } else if (idStr.startsWith("cygd:")) { return new CygdId(StringUtil.removePrefix(idStr, "cygd:")); } else if (idStr.startsWith("prf:")) { return new PrfId(StringUtil.removePrefix(idStr, "prf:")); } else if (idStr.startsWith("mpilit:")) { return new MpiDbId(StringUtil.removePrefix(idStr, "mpilit:")); } else if (idStr.startsWith("mpiimex:")) { return new MpiDbId(StringUtil.removePrefix(idStr, "mpiimex:")); } else if (idStr.startsWith("pir:")) { return new PirID(StringUtil.removePrefix(idStr, "pir:")); } else if (idStr.startsWith("PIR:")) { return new PirID(StringUtil.removePrefix(idStr, "PIR:")); } else if (idStr.startsWith("mint:")) { return new MintID(StringUtil.removePrefix(idStr, "mint:")); } else if (idStr.startsWith("dip:")) { return new DipInteractorID(StringUtil.removePrefix(idStr, "dip:")); } else if (idStr.startsWith("camjedb:")) { return new CamjeDbId(StringUtil.removePrefix(idStr, "camjedb:")); } else if (idStr.startsWith("rcsb pdb:")) { return new PdbID(StringUtil.removePrefix(idStr, "rcsb pdb:")); } else if (idStr.startsWith("gi:")) { return new GiNumberID(StringUtil.removePrefix(idStr, "gi:")); } else if (idStr.startsWith("genbank_protein_gi:")) { return new GiNumberID(StringUtil.removePrefix(idStr, "genbank_protein_gi:")); } else if (idStr.startsWith("intact:")) { return new IntActID(StringUtil.removePrefix(idStr, "intact:")); } else if (idStr.startsWith("ipi:")) { return new IpiID(StringUtil.removePrefix(idStr, "ipi:")); } else if (idStr.startsWith("Ensembl:")) { return new EnsemblGeneID(StringUtil.removePrefix(idStr, "Ensembl:")); } else if (idStr.startsWith("MatrixDB:")) { return new MatrixDbId(StringUtil.removePrefix(idStr, "MatrixDB:")); } else if (idStr.startsWith("SGD:")) { return new SgdID(StringUtil.removePrefix(idStr, "SGD:")); } else if (idStr.startsWith("TIGR:")) { return new TigrFamsID(StringUtil.removePrefix(idStr, "TIGR:")); } else if (idStr.startsWith("afcs:")) { return new AfcsId(StringUtil.removePrefix(idStr, "afcs:")); } else if (idStr.startsWith("pubmed:")) { return new PubMedID(StringUtil.removePrefix(idStr, "pubmed:")); } else if (idStr.startsWith("uniparc:")) { return new UniParcID(StringUtil.removePrefix(idStr, "uniparc:")); } else if (idStr.startsWith("FlyBase:")) { return new FlyBaseID(StringUtil.removePrefix(idStr, "FlyBase:")); } else if (idStr.startsWith("KEGG:")) { return new KeggGeneID(StringUtil.removePrefix(idStr, "KEGG:")); } else if (idStr.startsWith("InnateDB:")) { return new InnateDbId(StringUtil.removePrefix(idStr, "InnateDB:")); } else if (idStr.startsWith("emb:")) { return ProteinAccessionResolver.resolveProteinAccession(StringUtil.removePrefix(idStr, "emb:")); } else if (idStr.startsWith("dbj:")) { return getGenbankAccession(StringUtil.removePrefix(idStr, "dbj:")); } else if (idStr.startsWith("ddbj/embl/genbank:")) { return getGenbankAccession(StringUtil.removePrefix(idStr, "ddbj/embl/genbank:")); } else if (idStr.startsWith("GenBank:")) { return getGenbankAccession(StringUtil.removePrefix(idStr, "GenBank:")); } else if (idStr.startsWith("genbank indentifier:")) { return getGenbankAccession(StringUtil.removePrefix(idStr, "genbank indentifier:")); } else if (idStr.startsWith("GB:")) { return getGenbankAccession(StringUtil.removePrefix(idStr, "GB:")); } else if (idStr.startsWith("gb:")) { return getGenbankAccession(StringUtil.removePrefix(idStr, "gb:")); } else if (idStr.startsWith("tpg:")) { return getGenbankAccession(StringUtil.removePrefix(idStr, "tpg:")); } else if (idStr.startsWith("pdb:")) { return new PdbID(StringUtil.removePrefix(idStr, "pdb:")); } else if (idStr.startsWith("flybase:")) { return new FlyBaseID(StringUtil.removePrefix(idStr, "flybase:")); } else if (idStr.startsWith("sgd:")) { return new FlyBaseID(StringUtil.removePrefix(idStr, "sgd:")); } else if (idStr.startsWith("entrezgene:")) { return new EntrezGeneID(StringUtil.removePrefix(idStr, "entrezgene:")); } } catch (IllegalArgumentException e) { logger.warn("Invalid identifier due to " + e.getMessage()); logger.warn("Trying identifier as GenBank ID..."); return getGenbankAccession(idStr); } // throw new IllegalArgumentException("Unknown id prefix: " + idStr); logger.warn("Unknown id prefix: " + idStr); return null; } /** * @param removePrefix * @return */ private DataSourceIdentifier<?> getUniprotId(String idStr) { try { if (idStr.contains(StringConstants.HYPHEN_MINUS)) { return new UniProtIsoformID(idStr); } else if (idStr.contains(StringConstants.UNDERSCORE)) { return new UniProtEntryName(idStr); } return new UniProtID(idStr); } catch (IllegalArgumentException e) { logger.warn("Detected invalid UniProt accession: " + idStr); return null; } } private DataSourceIdentifier<?> getRefseqAccession(String acc) { try { return new RefSeqID(acc); } catch (IllegalArgumentException e) { return getGenbankAccession(acc); } } /** * @param removePrefix * @return */ private DataSourceIdentifier<?> getGenbankAccession(String acc) { try { return NucleotideAccessionResolver.resolveNucleotideAccession(acc); } catch (IllegalArgumentException e) { try { return ProteinAccessionResolver.resolveProteinAccession(acc); } catch (IllegalArgumentException e2) { logger.warn("Detected invalid GenBank accession: " + acc); return null; } } } /** * @param pmidsStr * @return */ private Set<PubMedID> parsePmidsStr(String pmidsStr) { if (pmidsStr.trim().equals(StringConstants.HYPHEN_MINUS) || pmidsStr.trim().equals("pubmed:0")) { return null; } String[] toks = pmidsStr.split(RegExPatterns.PIPE); Set<PubMedID> pmids = new HashSet<PubMedID>(); for (String tok : toks) { try { pmids.add(new PubMedID(StringUtil.removePrefix(tok, "pubmed:"))); } catch (IllegalArgumentException e) { logger.warn("Detected invalid pubmed id: " + e.getMessage()); } } return pmids; } /** * @param confidenceStr * @return */ private Set<String> parseConfidenceStr(String confidenceStr) { if (confidenceStr.trim().equals(StringConstants.HYPHEN_MINUS)) { return null; } String[] toks = confidenceStr.split(RegExPatterns.PIPE); Set<String> confidences = new HashSet<String>(); for (String tok : toks) { confidences.add(tok); } return confidences; } /** * @return */ private IRefWebInteractor getInteractor(String uniqueIdStr, String altIdStr, String aliasStr, String taxIdStr, String biologicalRoleStr, String experimentalRoleStr, String interactorTypeStr, String dbXrefsStr, String annotationsStr, String checksumStr, String originalReferenceStr, String finalReferenceStr, String mappingScoreStr, String irogidStr, String crogidStr, String icrogidStr) { if (!dbXrefsStr.trim().equals(StringConstants.HYPHEN_MINUS)) { throw new IllegalArgumentException("Observed a value in the xrefs_A or xrefs_B column. " + "This column has always been empty. Code changes likely required."); } if (!annotationsStr.trim().equals(StringConstants.HYPHEN_MINUS)) { throw new IllegalArgumentException("Observed a value in the Annotations_A or Annotations_B column. " + "This column has always been empty. Code changes likely required."); } DataSourceIdentifier<?> uniqueId = resolveInteractorId(uniqueIdStr); Set<DataSourceIdentifier<?>> alternateIds = resolveInteractorIds(altIdStr); Set<DataSourceIdentifier<?>> aliasIds = resolveAliasIds(aliasStr); Set<String> aliasSymbols = resolveAliasSymbols(aliasStr); IRefWebInteractorOrganism ncbiTaxonomyId = null; if (!taxIdStr.trim().equals(StringConstants.HYPHEN_MINUS)) { ncbiTaxonomyId = NcbiTaxonomyIdTermPair.parseString(IRefWebInteractorOrganism.class, taxIdStr); } Set<DataSourceIdentifier<?>> dbXReferenceIds = null; IRefWebInteractorBiologicalRole biologicalRole = null; if (!biologicalRoleStr.trim().equals(StringConstants.HYPHEN_MINUS)) { biologicalRole = MiOntologyIdTermPair.parseString(IRefWebInteractorBiologicalRole.class, biologicalRoleStr); } IRefWebInteractorExperimentalRole experimentalRole = null; if (!experimentalRoleStr.trim().equals(StringConstants.HYPHEN_MINUS)) { experimentalRole = MiOntologyIdTermPair.parseString(IRefWebInteractorExperimentalRole.class, experimentalRoleStr); } IRefWebInteractorType interactorType = null; if (!interactorTypeStr.trim().equals(StringConstants.HYPHEN_MINUS)) { interactorType = MiOntologyIdTermPair.parseString(IRefWebInteractorType.class, interactorTypeStr); } String annotations = null; RogId checksum = new RogId(StringUtil.removePrefix(checksumStr, "rogid:")); DataSourceIdentifier<?> originalReference = resolveInteractorId(originalReferenceStr); DataSourceIdentifier<?> finalReference = resolveInteractorId(finalReferenceStr); String mappingScore = mappingScoreStr; IrogId irogid = new IrogId(irogidStr); CrogId crogid = new CrogId(crogidStr); IcrogId icrogid = new IcrogId(icrogidStr); return new IRefWebInteractor(uniqueId, alternateIds, aliasSymbols, aliasIds, ncbiTaxonomyId, dbXReferenceIds, biologicalRole, experimentalRole, interactorType, annotations, checksum, originalReference, finalReference, mappingScore, irogid, crogid, icrogid); } /** * @param aliasStr * @return */ private Set<String> resolveAliasSymbols(String aliasStr) { Set<String> aliases = new HashSet<String>(); for (String alias : aliasStr.split(RegExPatterns.PIPE)) { String aliasSymbol = resolveAliasSymbol(alias); if (aliasSymbol != null) { aliases.add(aliasSymbol); } } return aliases; } /** * @param alias * @return */ private String resolveAliasSymbol(String aliasStr) { if (aliasStr.startsWith("entrezgene/locuslink:")) { return new String(StringUtil.removePrefix(aliasStr, "entrezgene/locuslink:")); } return null; } /** * @param aliasStr * @return */ private Set<DataSourceIdentifier<?>> resolveAliasIds(String aliasStr) { Set<DataSourceIdentifier<?>> ids = new HashSet<DataSourceIdentifier<?>>(); for (String alias : aliasStr.split(RegExPatterns.PIPE)) { if (!alias.equals(StringConstants.HYPHEN_MINUS)) { DataSourceIdentifier<?> id = resolveAliasId(alias); if (id != null) { ids.add(id); } } } return ids; } /** * @param id * @return */ private DataSourceIdentifier<?> resolveAliasId(String aliasStr) { if (aliasStr.startsWith("uniprotkb:")) { return new UniProtEntryName(StringUtil.removePrefix(aliasStr, "uniprotkb:")); } else if (aliasStr.startsWith("entrezgene/locuslink:")) { // ignore, it is a gene symbol and is handled by resolveAliasSymbols() return null; } else if (aliasStr.startsWith("crogid:")) { return new CrogId(StringUtil.removePrefix(aliasStr, "crogid:")); } else if (aliasStr.startsWith("icrogid:")) { return new IcrogId(StringUtil.removePrefix(aliasStr, "icrogid:")); } else if (aliasStr.startsWith("rogid:")) { return new RogId(StringUtil.removePrefix(aliasStr, "rogid:")); } else if (aliasStr.startsWith("refseq:")) { return getRefseqAccession(StringUtil.removePrefix(aliasStr, "refseq:")); } else if (aliasStr.startsWith("hgnc:")) { return new HgncGeneSymbolID(StringUtil.removePrefix(aliasStr, "hgnc:")); } throw new IllegalArgumentException("Unknown id prefix: " + aliasStr); } public static void main(String[] args) { BasicConfigurator.configure(); File irefwebFile = new File("/tmp/irefweb.sample"); try { IRefWebPsiMitab2_6FileParser parser = new IRefWebPsiMitab2_6FileParser(irefwebFile, CharacterEncoding.US_ASCII); while (parser.hasNext()) { parser.next(); } } catch (IllegalArgumentException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }
datasource-fileparsers/src/main/java/edu/ucdenver/ccp/datasource/fileparsers/irefweb/IRefWebPsiMitab2_6FileParser.java
/* * Copyright (C) 2009 Center for Computational Pharmacology, University of Colorado School of Medicine * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * */ package edu.ucdenver.ccp.datasource.fileparsers.irefweb; /* * #%L * Colorado Computational Pharmacology's common module * %% * Copyright (C) 2012 - 2015 Regents of the University of Colorado * %% * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. Neither the name of the Regents of the University of Colorado nor the names of its contributors * may be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. * #L% */ import java.io.File; import java.io.IOException; import java.util.HashSet; import java.util.Set; import org.apache.log4j.BasicConfigurator; import org.apache.log4j.Logger; import edu.ucdenver.ccp.common.download.FtpDownload; import edu.ucdenver.ccp.common.file.CharacterEncoding; import edu.ucdenver.ccp.common.file.reader.Line; import edu.ucdenver.ccp.common.file.reader.StreamLineReader; import edu.ucdenver.ccp.common.ftp.FTPUtil.FileType; import edu.ucdenver.ccp.common.string.RegExPatterns; import edu.ucdenver.ccp.common.string.StringConstants; import edu.ucdenver.ccp.common.string.StringUtil; import edu.ucdenver.ccp.datasource.fileparsers.download.FtpHost; import edu.ucdenver.ccp.datasource.fileparsers.obo.MiOntologyIdTermPair; import edu.ucdenver.ccp.datasource.fileparsers.obo.NcbiTaxonomyIdTermPair; import edu.ucdenver.ccp.datasource.fileparsers.taxonaware.TaxonAwareSingleLineFileRecordReader; import edu.ucdenver.ccp.datasource.identifiers.DataSourceIdentifier; import edu.ucdenver.ccp.datasource.identifiers.NucleotideAccessionResolver; import edu.ucdenver.ccp.datasource.identifiers.ProteinAccessionResolver; import edu.ucdenver.ccp.datasource.identifiers.bind.BindInteractionID; import edu.ucdenver.ccp.datasource.identifiers.dip.DipInteractionID; import edu.ucdenver.ccp.datasource.identifiers.dip.DipInteractorID; import edu.ucdenver.ccp.datasource.identifiers.ebi.intact.IntActID; import edu.ucdenver.ccp.datasource.identifiers.ebi.interpro.PirID; import edu.ucdenver.ccp.datasource.identifiers.ebi.interpro.TigrFamsID; import edu.ucdenver.ccp.datasource.identifiers.ebi.ipi.IpiID; import edu.ucdenver.ccp.datasource.identifiers.ebi.uniprot.UniProtEntryName; import edu.ucdenver.ccp.datasource.identifiers.ebi.uniprot.UniProtID; import edu.ucdenver.ccp.datasource.identifiers.ebi.uniprot.UniProtIsoformID; import edu.ucdenver.ccp.datasource.identifiers.ensembl.EnsemblGeneID; import edu.ucdenver.ccp.datasource.identifiers.flybase.FlyBaseID; import edu.ucdenver.ccp.datasource.identifiers.hgnc.HgncGeneSymbolID; import edu.ucdenver.ccp.datasource.identifiers.hprd.HprdID; import edu.ucdenver.ccp.datasource.identifiers.irefweb.CrigId; import edu.ucdenver.ccp.datasource.identifiers.irefweb.CrogId; import edu.ucdenver.ccp.datasource.identifiers.irefweb.IcrigId; import edu.ucdenver.ccp.datasource.identifiers.irefweb.IcrogId; import edu.ucdenver.ccp.datasource.identifiers.irefweb.IrigId; import edu.ucdenver.ccp.datasource.identifiers.irefweb.IrogId; import edu.ucdenver.ccp.datasource.identifiers.irefweb.RigId; import edu.ucdenver.ccp.datasource.identifiers.irefweb.RogId; import edu.ucdenver.ccp.datasource.identifiers.kegg.KeggGeneID; import edu.ucdenver.ccp.datasource.identifiers.mint.MintID; import edu.ucdenver.ccp.datasource.identifiers.ncbi.gene.EntrezGeneID; import edu.ucdenver.ccp.datasource.identifiers.ncbi.gene.GiNumberID; import edu.ucdenver.ccp.datasource.identifiers.ncbi.refseq.RefSeqID; import edu.ucdenver.ccp.datasource.identifiers.ncbi.taxonomy.NcbiTaxonomyID; import edu.ucdenver.ccp.datasource.identifiers.other.AfcsId; import edu.ucdenver.ccp.datasource.identifiers.other.BindTranslationId; import edu.ucdenver.ccp.datasource.identifiers.other.BioGridID; import edu.ucdenver.ccp.datasource.identifiers.other.CamjeDbId; import edu.ucdenver.ccp.datasource.identifiers.other.CorumId; import edu.ucdenver.ccp.datasource.identifiers.other.CygdId; import edu.ucdenver.ccp.datasource.identifiers.other.ImexId; import edu.ucdenver.ccp.datasource.identifiers.other.InnateDbId; import edu.ucdenver.ccp.datasource.identifiers.other.MatrixDbId; import edu.ucdenver.ccp.datasource.identifiers.other.MpactId; import edu.ucdenver.ccp.datasource.identifiers.other.MpiDbId; import edu.ucdenver.ccp.datasource.identifiers.other.OphidId; import edu.ucdenver.ccp.datasource.identifiers.other.PrfId; import edu.ucdenver.ccp.datasource.identifiers.other.UniParcID; import edu.ucdenver.ccp.datasource.identifiers.pdb.PdbID; import edu.ucdenver.ccp.datasource.identifiers.sgd.SgdID; import edu.ucdenver.ccp.identifier.publication.PubMedID; /** * This class is used to parse DIPYYYMMDD files which can be downloaded from the DIP website * ftp://ftp.no.embnet.org/irefindex/data/archive/release_4.0/psimi_tab/All.mitab.06042009.txt.zip * * @author Bill Baumgartner * @see IRefWebMitab4_0FileData for file format and version specifications */ public class IRefWebPsiMitab2_6FileParser extends TaxonAwareSingleLineFileRecordReader<IRefWebPsiMitab2_6FileData> { private static final Logger logger = Logger.getLogger(IRefWebPsiMitab2_6FileParser.class); private static final String HEADER = "#uidA\tuidB\taltA\taltB\taliasA\taliasB\tmethod\tauthor\tpmids\ttaxa\ttaxb\tinteractionType\tsourcedb\tinteractionIdentifier\tconfidence\texpansion\tbiological_role_A\tbiological_role_B\texperimental_role_A\texperimental_role_B\tinteractor_type_A\tinteractor_type_B\txrefs_A\txrefs_B\txrefs_Interaction\tAnnotations_A\tAnnotations_B\tAnnotations_Interaction\tHost_organism_taxid\tparameters_Interaction\tCreation_date\tUpdate_date\tChecksum_A\tChecksum_B\tChecksum_Interaction\tNegative\tOriginalReferenceA\tOriginalReferenceB\tFinalReferenceA\tFinalReferenceB\tMappingScoreA\tMappingScoreB\tirogida\tirogidb\tirigid\tcrogida\tcrogidb\tcrigid\ticrogida\ticrogidb\ticrigid\timex_id\tedgetype\tnumParticipants"; public static final String FTP_FILE_NAME = "All.mitab.03022013.txt.zip"; public static final CharacterEncoding ENCODING = CharacterEncoding.US_ASCII; public static final String FTP_USER_NAME = "ftp"; @FtpDownload(server = FtpHost.IREFWEB_HOST, path = "irefindex/data/archive/release_10.0/psi_mitab/MITAB2.6/", filename = FTP_FILE_NAME, filetype = FileType.BINARY, username = FTP_USER_NAME, decompress = true, targetFileName="All.mitab.08122013.txt") private File allMitabTxtFile; public IRefWebPsiMitab2_6FileParser(File file, CharacterEncoding encoding) throws IOException, IllegalArgumentException { super(file, encoding, null); } public IRefWebPsiMitab2_6FileParser(File workDirectory, boolean clean) throws IOException { super(workDirectory, ENCODING, null, null, null, clean, null); } public IRefWebPsiMitab2_6FileParser(File file, CharacterEncoding encoding, Set<NcbiTaxonomyID> taxonIds) throws IOException, IllegalArgumentException { super(file, encoding, taxonIds); } public IRefWebPsiMitab2_6FileParser(File workDirectory, boolean clean, Set<NcbiTaxonomyID> taxonIds) throws IOException { super(workDirectory, ENCODING, null, null, null, clean, taxonIds); } @Override protected NcbiTaxonomyID getLineTaxon(Line line) { IRefWebPsiMitab2_6FileData record = parseRecordFromLine(line); // should probably return both here IRefWebInteractor interactorA = record.getInteractorA(); if (interactorA.getNcbiTaxonomyId() != null) { return interactorA.getNcbiTaxonomyId().getTaxonomyId(); } IRefWebInteractor interactorB = record.getInteractorB(); if (interactorB.getNcbiTaxonomyId() != null) { return interactorB.getNcbiTaxonomyId().getTaxonomyId(); } return null; } @Override protected StreamLineReader initializeLineReaderFromDownload(CharacterEncoding encoding, String skipLinePrefix) throws IOException { return new StreamLineReader(allMitabTxtFile, encoding, skipLinePrefix); } @Override protected String getFileHeader() throws IOException { return readLine().getText(); } @Override protected String getExpectedFileHeader() throws IOException { return HEADER; } /** * Extracts information from a line from a file and returns a IRefWebPsiMitab2_5FileData object. * * @param miOntologyTermResolver * @param line * @return */ @Override public IRefWebPsiMitab2_6FileData parseRecordFromLine(Line line) { String[] toks = line.getText().split("\\t", -1); if (toks.length == 54) { IRefWebInteractor interactorA = getInteractor(toks[0], toks[2], toks[4], toks[9], toks[16], toks[18], toks[20], toks[22], toks[25], toks[32], toks[36], toks[38], toks[40], toks[42], toks[45], toks[48]); IRefWebInteractor interactorB = getInteractor(toks[1], toks[3], toks[5], toks[10], toks[17], toks[19], toks[21], toks[23], toks[26], toks[33], toks[37], toks[39], toks[41], toks[43], toks[46], toks[49]); IRefWebInteraction interaction = getInteraction(toks[6], toks[7], toks[8], toks[11], toks[13], toks[14], toks[15], toks[24], toks[27], toks[28], toks[29], toks[34], toks[35], toks[44], toks[47], toks[50], toks[51], toks[52], toks[53]); IRefWebInteractionSourceDatabase sourceDb = MiOntologyIdTermPair.parseString( IRefWebInteractionSourceDatabase.class, toks[12]); String creationDate = toks[30]; String updateDate = toks[31]; return new IRefWebPsiMitab2_6FileData(sourceDb, creationDate, updateDate, interactorA, interactorB, interaction, line.getByteOffset(), line.getLineNumber()); } String errorMessage = "Unexpected number of tokens (" + toks.length + " != 54) on line: " + line; throw new IllegalArgumentException("IRefWeb file format appears to have changed: " + errorMessage); } /** * @return */ private IRefWebInteraction getInteraction(String detectionMethodStr, String authorStr, String pmidsStr, String interactionTypeStr, String interactionIdStr, String confidenceStr, String expansionStr, String interactionXrefsStr, String interactionAnnotationsStr, String hostOrgTaxonomyIdStr, String interactionParametersStr, String interactionChecksumStr, String negativeStr, String irigidStr, String crigidStr, String icrigidStr, String imexIdStr, String edgeTypeStr, String numParticipantsStr) { if (!interactionXrefsStr.trim().equals(StringConstants.HYPHEN_MINUS)) { throw new IllegalArgumentException("Observed a value in the xrefs_interaction column. " + "This column has always been empty. Code changes likely required."); } if (!interactionAnnotationsStr.trim().equals(StringConstants.HYPHEN_MINUS)) { throw new IllegalArgumentException("Observed a value in the annotations_interaction column. " + "This column has always been empty. Code changes likely required."); } if (!interactionParametersStr.trim().equals(StringConstants.HYPHEN_MINUS)) { throw new IllegalArgumentException("Observed a value in the parameters_interaction column. " + "This column has always been empty. Code changes likely required."); } IRefWebInteractionDetectionMethod detectionMethod = null; if (!detectionMethodStr.trim().equals(StringConstants.HYPHEN_MINUS)) { detectionMethod = MiOntologyIdTermPair.parseString(IRefWebInteractionDetectionMethod.class, detectionMethodStr); } String author = (authorStr.trim().equals(StringConstants.HYPHEN_MINUS)) ? null : authorStr; Set<PubMedID> pmids = parsePmidsStr(pmidsStr); IRefWebInteractionType interactionType = null; if (!interactionTypeStr.trim().equals(StringConstants.HYPHEN_MINUS)) { interactionType = MiOntologyIdTermPair.parseString(IRefWebInteractionType.class, interactionTypeStr); } Set<DataSourceIdentifier<?>> interactionDbIds = resolveInteractionDbIds(interactionIdStr); Set<String> confidence = parseConfidenceStr(confidenceStr); String expansion = expansionStr; String xrefsInteraction = null; String annotationsInteraction = null; IRefWebHostOrganism hostOrgTaxonomyId = null; if (!hostOrgTaxonomyIdStr.trim().equals(StringConstants.HYPHEN_MINUS)) { hostOrgTaxonomyId = NcbiTaxonomyIdTermPair.parseString(IRefWebHostOrganism.class, hostOrgTaxonomyIdStr); } String parametersInteraction = null; RigId checksumInteraction = new RigId(StringUtil.removePrefix(interactionChecksumStr, "rigid:")); boolean negative = Boolean.parseBoolean(negativeStr); IrigId irigid = new IrigId(irigidStr); CrigId crigid = new CrigId(crigidStr); IcrigId icrigid = new IcrigId(icrigidStr); ImexId imexId = (imexIdStr.trim().equals(StringConstants.HYPHEN_MINUS)) ? null : new ImexId(imexIdStr); String edgeType = edgeTypeStr; int numParticipants = Integer.parseInt(numParticipantsStr); return new IRefWebInteraction(detectionMethod, author, pmids, interactionType, interactionDbIds, confidence, expansion, xrefsInteraction, annotationsInteraction, hostOrgTaxonomyId, parametersInteraction, checksumInteraction, negative, irigid, crigid, icrigid, imexId, edgeType, numParticipants); } /** * @param interactionIdStr * @return */ private Set<DataSourceIdentifier<?>> resolveInteractionDbIds(String interactionIdStr) { Set<DataSourceIdentifier<?>> ids = new HashSet<DataSourceIdentifier<?>>(); for (String id : interactionIdStr.split(RegExPatterns.PIPE)) { if (id.startsWith("edgetype:") || id.endsWith(":-")) { // do nothing - this is a redundant storage of edge type or a null identifier } else if (id.startsWith("BIND_Translation:")) { ids.add(new BindTranslationId(StringUtil.removePrefix(id, "BIND_Translation:"))); } else if (id.startsWith("irigid:")) { ids.add(new IrigId(StringUtil.removePrefix(id, "irigid:"))); } else if (id.startsWith("rigid:")) { ids.add(new RigId(StringUtil.removePrefix(id, "rigid:"))); } else if (id.startsWith("grid:")) { ids.add(new BioGridID(StringUtil.removePrefix(id, "grid:"))); } else if (id.startsWith("bind:")) { ids.add(new BindInteractionID(StringUtil.removePrefix(id, "bind:"))); } else if (id.startsWith("MPACT:")) { ids.add(new MpactId(StringUtil.removePrefix(id, "MPACT:"))); } else if (id.startsWith("mint:")) { ids.add(new MintID(StringUtil.removePrefix(id, "mint:"))); } else if (id.startsWith("intact:")) { ids.add(new IntActID(StringUtil.removePrefix(id, "intact:"))); } else if (id.startsWith("dip:")) { ids.add(new DipInteractionID(StringUtil.removePrefix(id, "dip:"))); } else if (id.startsWith("ophid:")) { ids.add(new OphidId(StringUtil.removePrefix(id, "ophid:"))); } else if (id.startsWith("InnateDB:")) { String idbId = StringUtil.removePrefix(id, "InnateDB:"); if (idbId.startsWith("IDB-")) { idbId = StringUtil.removePrefix(idbId, "IDB-"); } ids.add(new InnateDbId(idbId)); } else if (id.startsWith("innatedb:")) { String idbId = StringUtil.removePrefix(id, "innatedb:"); if (idbId.startsWith("IDB-")) { idbId = StringUtil.removePrefix(idbId, "IDB-"); } ids.add(new InnateDbId(idbId)); } else if (id.startsWith("CORUM:")) { ids.add(new CorumId(StringUtil.removePrefix(id, "CORUM:"))); } else if (id.startsWith("mpilit:")) { ids.add(new MpiDbId(StringUtil.removePrefix(id, "mpilit:"))); } else if (id.startsWith("mpiimex:")) { ids.add(new MpiDbId(StringUtil.removePrefix(id, "mpiimex:"))); } else if (id.startsWith("MatrixDB:")) { ids.add(new MatrixDbId(StringUtil.removePrefix(id, "MatrixDB:"))); } else if (id.startsWith("biogrid:")) { ids.add(new BioGridID(StringUtil.removePrefix(id, "biogrid:"))); } else if (id.startsWith("pubmed:")) { ids.add(new PubMedID(StringUtil.removePrefix(id, "pubmed:"))); } else if (id.startsWith("HPRD")) { try { ids.add(new HprdID(StringUtil.removePrefix(id, "HPRD:"))); } catch (IllegalArgumentException e) { logger.warn(e.getMessage()); } } else { // throw new IllegalArgumentException("Unknown id prefix: " + id); logger.warn("Unknown id prefix: " + id); // return null; } } return ids; } private Set<DataSourceIdentifier<?>> resolveInteractorIds(String interactorIdStr) { Set<DataSourceIdentifier<?>> ids = new HashSet<DataSourceIdentifier<?>>(); for (String id : interactorIdStr.split(RegExPatterns.PIPE)) { ids.add(resolveInteractorId(id)); } return ids; } /** * @param ids * @param id */ private DataSourceIdentifier<?> resolveInteractorId(String idStr) { if (idStr.trim().equals(StringConstants.HYPHEN_MINUS)) { return null; } if (idStr.startsWith("xx:")) { return null; } if (idStr.startsWith("other:")) { return null; } if (idStr.equals("null")) { return null; } try { if (idStr.startsWith("uniprotkb:")) { return getUniprotId(StringUtil.removePrefix(idStr, "uniprotkb:")); } else if (idStr.startsWith("uniprot:")) { return getUniprotId(StringUtil.removePrefix(idStr, "uniprot:")); } else if (idStr.startsWith("Swiss-Prot:")) { return getUniprotId(StringUtil.removePrefix(idStr, "Swiss-Prot:")); } else if (idStr.startsWith("uniprot/swiss-prot:")) { return getUniprotId(StringUtil.removePrefix(idStr, "uniprot/swiss-prot:")); } else if (idStr.startsWith("UniProtKB/TrEMBL:")) { return getUniprotId(StringUtil.removePrefix(idStr, "UniProtKB/TrEMBL:")); } else if (idStr.startsWith("SP:")) { return getUniprotId(StringUtil.removePrefix(idStr, "SP:")); } else if (idStr.startsWith("uniprot knowledge base")) { return getUniprotId(StringUtil.removePrefix(idStr, "uniprot knowledge base:")); } else if (idStr.startsWith("TREMBL")) { return getUniprotId(StringUtil.removePrefix(idStr, "TREMBL:")); } else if (idStr.startsWith("entrezgene/locuslink:")) { return new EntrezGeneID(StringUtil.removePrefix(idStr, "entrezgene/locuslink:")); } else if (idStr.startsWith("entrez gene/locuslink:")) { return new EntrezGeneID(StringUtil.removePrefix(idStr, "entrez gene/locuslink:")); } else if (idStr.startsWith("HPRD:")) { return new HprdID(StringUtil.removePrefix(idStr, "HPRD:")); } else if (idStr.startsWith("CORUM:")) { return new CorumId(StringUtil.removePrefix(idStr, "CORUM:")); } else if (idStr.startsWith("crogid:")) { return new CrogId(StringUtil.removePrefix(idStr, "crogid:")); } else if (idStr.startsWith("icrogid:")) { return new IcrogId(StringUtil.removePrefix(idStr, "icrogid:")); } else if (idStr.startsWith("refseq:")) { return getRefseqAccession(StringUtil.removePrefix(idStr, "refseq:").toUpperCase()); } else if (idStr.startsWith("RefSeq:")) { return getRefseqAccession(StringUtil.removePrefix(idStr, "RefSeq:").toUpperCase()); } else if (idStr.startsWith("rogid:")) { return new RogId(StringUtil.removePrefix(idStr, "rogid:")); } else if (idStr.startsWith("irogid:")) { return new IrogId(StringUtil.removePrefix(idStr, "irogid:")); } else if (idStr.startsWith("PDB:")) { return new PdbID(StringUtil.removePrefix(idStr, "PDB:")); } else if (idStr.startsWith("complex:")) { return new RogId(StringUtil.removePrefix(idStr, "complex:")); } else if (idStr.startsWith("cygd:")) { return new CygdId(StringUtil.removePrefix(idStr, "cygd:")); } else if (idStr.startsWith("prf:")) { return new PrfId(StringUtil.removePrefix(idStr, "prf:")); } else if (idStr.startsWith("mpilit:")) { return new MpiDbId(StringUtil.removePrefix(idStr, "mpilit:")); } else if (idStr.startsWith("mpiimex:")) { return new MpiDbId(StringUtil.removePrefix(idStr, "mpiimex:")); } else if (idStr.startsWith("pir:")) { return new PirID(StringUtil.removePrefix(idStr, "pir:")); } else if (idStr.startsWith("PIR:")) { return new PirID(StringUtil.removePrefix(idStr, "PIR:")); } else if (idStr.startsWith("mint:")) { return new MintID(StringUtil.removePrefix(idStr, "mint:")); } else if (idStr.startsWith("dip:")) { return new DipInteractorID(StringUtil.removePrefix(idStr, "dip:")); } else if (idStr.startsWith("camjedb:")) { return new CamjeDbId(StringUtil.removePrefix(idStr, "camjedb:")); } else if (idStr.startsWith("rcsb pdb:")) { return new PdbID(StringUtil.removePrefix(idStr, "rcsb pdb:")); } else if (idStr.startsWith("gi:")) { return new GiNumberID(StringUtil.removePrefix(idStr, "gi:")); } else if (idStr.startsWith("genbank_protein_gi:")) { return new GiNumberID(StringUtil.removePrefix(idStr, "genbank_protein_gi:")); } else if (idStr.startsWith("intact:")) { return new IntActID(StringUtil.removePrefix(idStr, "intact:")); } else if (idStr.startsWith("ipi:")) { return new IpiID(StringUtil.removePrefix(idStr, "ipi:")); } else if (idStr.startsWith("Ensembl:")) { return new EnsemblGeneID(StringUtil.removePrefix(idStr, "Ensembl:")); } else if (idStr.startsWith("MatrixDB:")) { return new MatrixDbId(StringUtil.removePrefix(idStr, "MatrixDB:")); } else if (idStr.startsWith("SGD:")) { return new SgdID(StringUtil.removePrefix(idStr, "SGD:")); } else if (idStr.startsWith("TIGR:")) { return new TigrFamsID(StringUtil.removePrefix(idStr, "TIGR:")); } else if (idStr.startsWith("afcs:")) { return new AfcsId(StringUtil.removePrefix(idStr, "afcs:")); } else if (idStr.startsWith("pubmed:")) { return new PubMedID(StringUtil.removePrefix(idStr, "pubmed:")); } else if (idStr.startsWith("uniparc:")) { return new UniParcID(StringUtil.removePrefix(idStr, "uniparc:")); } else if (idStr.startsWith("FlyBase:")) { return new FlyBaseID(StringUtil.removePrefix(idStr, "FlyBase:")); } else if (idStr.startsWith("KEGG:")) { return new KeggGeneID(StringUtil.removePrefix(idStr, "KEGG:")); } else if (idStr.startsWith("InnateDB:")) { return new InnateDbId(StringUtil.removePrefix(idStr, "InnateDB:")); } else if (idStr.startsWith("emb:")) { return ProteinAccessionResolver.resolveProteinAccession(StringUtil.removePrefix(idStr, "emb:")); } else if (idStr.startsWith("dbj:")) { return getGenbankAccession(StringUtil.removePrefix(idStr, "dbj:")); } else if (idStr.startsWith("ddbj/embl/genbank:")) { return getGenbankAccession(StringUtil.removePrefix(idStr, "ddbj/embl/genbank:")); } else if (idStr.startsWith("GenBank:")) { return getGenbankAccession(StringUtil.removePrefix(idStr, "GenBank:")); } else if (idStr.startsWith("genbank indentifier:")) { return getGenbankAccession(StringUtil.removePrefix(idStr, "genbank indentifier:")); } else if (idStr.startsWith("GB:")) { return getGenbankAccession(StringUtil.removePrefix(idStr, "GB:")); } else if (idStr.startsWith("gb:")) { return getGenbankAccession(StringUtil.removePrefix(idStr, "gb:")); } else if (idStr.startsWith("tpg:")) { return getGenbankAccession(StringUtil.removePrefix(idStr, "tpg:")); } else if (idStr.startsWith("pdb:")) { return new PdbID(StringUtil.removePrefix(idStr, "pdb:")); } else if (idStr.startsWith("flybase:")) { return new FlyBaseID(StringUtil.removePrefix(idStr, "flybase:")); } else if (idStr.startsWith("sgd:")) { return new FlyBaseID(StringUtil.removePrefix(idStr, "sgd:")); } else if (idStr.startsWith("entrezgene:")) { return new EntrezGeneID(StringUtil.removePrefix(idStr, "entrezgene:")); } } catch (IllegalArgumentException e) { logger.warn("Invalid identifier due to " + e.getMessage()); logger.warn("Trying identifier as GenBank ID..."); return getGenbankAccession(idStr); } // throw new IllegalArgumentException("Unknown id prefix: " + idStr); logger.warn("Unknown id prefix: " + idStr); return null; } /** * @param removePrefix * @return */ private DataSourceIdentifier<?> getUniprotId(String idStr) { try { if (idStr.contains(StringConstants.HYPHEN_MINUS)) { return new UniProtIsoformID(idStr); } else if (idStr.contains(StringConstants.UNDERSCORE)) { return new UniProtEntryName(idStr); } return new UniProtID(idStr); } catch (IllegalArgumentException e) { logger.warn("Detected invalid UniProt accession: " + idStr); return null; } } private DataSourceIdentifier<?> getRefseqAccession(String acc) { try { return new RefSeqID(acc); } catch (IllegalArgumentException e) { return getGenbankAccession(acc); } } /** * @param removePrefix * @return */ private DataSourceIdentifier<?> getGenbankAccession(String acc) { try { return NucleotideAccessionResolver.resolveNucleotideAccession(acc); } catch (IllegalArgumentException e) { try { return ProteinAccessionResolver.resolveProteinAccession(acc); } catch (IllegalArgumentException e2) { logger.warn("Detected invalid GenBank accession: " + acc); return null; } } } /** * @param pmidsStr * @return */ private Set<PubMedID> parsePmidsStr(String pmidsStr) { if (pmidsStr.trim().equals(StringConstants.HYPHEN_MINUS) || pmidsStr.trim().equals("pubmed:0")) { return null; } String[] toks = pmidsStr.split(RegExPatterns.PIPE); Set<PubMedID> pmids = new HashSet<PubMedID>(); for (String tok : toks) { try { pmids.add(new PubMedID(StringUtil.removePrefix(tok, "pubmed:"))); } catch (IllegalArgumentException e) { logger.warn("Detected invalid pubmed id: " + e.getMessage()); } } return pmids; } /** * @param confidenceStr * @return */ private Set<String> parseConfidenceStr(String confidenceStr) { if (confidenceStr.trim().equals(StringConstants.HYPHEN_MINUS)) { return null; } String[] toks = confidenceStr.split(RegExPatterns.PIPE); Set<String> confidences = new HashSet<String>(); for (String tok : toks) { confidences.add(tok); } return confidences; } /** * @return */ private IRefWebInteractor getInteractor(String uniqueIdStr, String altIdStr, String aliasStr, String taxIdStr, String biologicalRoleStr, String experimentalRoleStr, String interactorTypeStr, String dbXrefsStr, String annotationsStr, String checksumStr, String originalReferenceStr, String finalReferenceStr, String mappingScoreStr, String irogidStr, String crogidStr, String icrogidStr) { if (!dbXrefsStr.trim().equals(StringConstants.HYPHEN_MINUS)) { throw new IllegalArgumentException("Observed a value in the xrefs_A or xrefs_B column. " + "This column has always been empty. Code changes likely required."); } if (!annotationsStr.trim().equals(StringConstants.HYPHEN_MINUS)) { throw new IllegalArgumentException("Observed a value in the Annotations_A or Annotations_B column. " + "This column has always been empty. Code changes likely required."); } DataSourceIdentifier<?> uniqueId = resolveInteractorId(uniqueIdStr); Set<DataSourceIdentifier<?>> alternateIds = resolveInteractorIds(altIdStr); Set<DataSourceIdentifier<?>> aliasIds = resolveAliasIds(aliasStr); Set<String> aliasSymbols = resolveAliasSymbols(aliasStr); IRefWebInteractorOrganism ncbiTaxonomyId = null; if (!taxIdStr.trim().equals(StringConstants.HYPHEN_MINUS)) { ncbiTaxonomyId = NcbiTaxonomyIdTermPair.parseString(IRefWebInteractorOrganism.class, taxIdStr); } Set<DataSourceIdentifier<?>> dbXReferenceIds = null; IRefWebInteractorBiologicalRole biologicalRole = null; if (!biologicalRoleStr.trim().equals(StringConstants.HYPHEN_MINUS)) { biologicalRole = MiOntologyIdTermPair.parseString(IRefWebInteractorBiologicalRole.class, biologicalRoleStr); } IRefWebInteractorExperimentalRole experimentalRole = null; if (!experimentalRoleStr.trim().equals(StringConstants.HYPHEN_MINUS)) { experimentalRole = MiOntologyIdTermPair.parseString(IRefWebInteractorExperimentalRole.class, experimentalRoleStr); } IRefWebInteractorType interactorType = null; if (!interactorTypeStr.trim().equals(StringConstants.HYPHEN_MINUS)) { interactorType = MiOntologyIdTermPair.parseString(IRefWebInteractorType.class, interactorTypeStr); } String annotations = null; RogId checksum = new RogId(StringUtil.removePrefix(checksumStr, "rogid:")); DataSourceIdentifier<?> originalReference = resolveInteractorId(originalReferenceStr); DataSourceIdentifier<?> finalReference = resolveInteractorId(finalReferenceStr); String mappingScore = mappingScoreStr; IrogId irogid = new IrogId(irogidStr); CrogId crogid = new CrogId(crogidStr); IcrogId icrogid = new IcrogId(icrogidStr); return new IRefWebInteractor(uniqueId, alternateIds, aliasSymbols, aliasIds, ncbiTaxonomyId, dbXReferenceIds, biologicalRole, experimentalRole, interactorType, annotations, checksum, originalReference, finalReference, mappingScore, irogid, crogid, icrogid); } /** * @param aliasStr * @return */ private Set<String> resolveAliasSymbols(String aliasStr) { Set<String> aliases = new HashSet<String>(); for (String alias : aliasStr.split(RegExPatterns.PIPE)) { String aliasSymbol = resolveAliasSymbol(alias); if (aliasSymbol != null) { aliases.add(aliasSymbol); } } return aliases; } /** * @param alias * @return */ private String resolveAliasSymbol(String aliasStr) { if (aliasStr.startsWith("entrezgene/locuslink:")) { return new String(StringUtil.removePrefix(aliasStr, "entrezgene/locuslink:")); } return null; } /** * @param aliasStr * @return */ private Set<DataSourceIdentifier<?>> resolveAliasIds(String aliasStr) { Set<DataSourceIdentifier<?>> ids = new HashSet<DataSourceIdentifier<?>>(); for (String alias : aliasStr.split(RegExPatterns.PIPE)) { if (!alias.equals(StringConstants.HYPHEN_MINUS)) { DataSourceIdentifier<?> id = resolveAliasId(alias); if (id != null) { ids.add(id); } } } return ids; } /** * @param id * @return */ private DataSourceIdentifier<?> resolveAliasId(String aliasStr) { if (aliasStr.startsWith("uniprotkb:")) { return new UniProtEntryName(StringUtil.removePrefix(aliasStr, "uniprotkb:")); } else if (aliasStr.startsWith("entrezgene/locuslink:")) { // ignore, it is a gene symbol and is handled by resolveAliasSymbols() return null; } else if (aliasStr.startsWith("crogid:")) { return new CrogId(StringUtil.removePrefix(aliasStr, "crogid:")); } else if (aliasStr.startsWith("icrogid:")) { return new IcrogId(StringUtil.removePrefix(aliasStr, "icrogid:")); } else if (aliasStr.startsWith("rogid:")) { return new RogId(StringUtil.removePrefix(aliasStr, "rogid:")); } else if (aliasStr.startsWith("refseq:")) { return getRefseqAccession(StringUtil.removePrefix(aliasStr, "refseq:")); } else if (aliasStr.startsWith("hgnc:")) { return new HgncGeneSymbolID(StringUtil.removePrefix(aliasStr, "hgnc:")); } throw new IllegalArgumentException("Unknown id prefix: " + aliasStr); } public static void main(String[] args) { BasicConfigurator.configure(); File irefwebFile = new File("/tmp/irefweb.sample"); try { IRefWebPsiMitab2_6FileParser parser = new IRefWebPsiMitab2_6FileParser(irefwebFile, CharacterEncoding.US_ASCII); while (parser.hasNext()) { parser.next(); } } catch (IllegalArgumentException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }
Updated to download/use the 07042015 file
datasource-fileparsers/src/main/java/edu/ucdenver/ccp/datasource/fileparsers/irefweb/IRefWebPsiMitab2_6FileParser.java
Updated to download/use the 07042015 file
<ide><path>atasource-fileparsers/src/main/java/edu/ucdenver/ccp/datasource/fileparsers/irefweb/IRefWebPsiMitab2_6FileParser.java <ide> <ide> private static final String HEADER = "#uidA\tuidB\taltA\taltB\taliasA\taliasB\tmethod\tauthor\tpmids\ttaxa\ttaxb\tinteractionType\tsourcedb\tinteractionIdentifier\tconfidence\texpansion\tbiological_role_A\tbiological_role_B\texperimental_role_A\texperimental_role_B\tinteractor_type_A\tinteractor_type_B\txrefs_A\txrefs_B\txrefs_Interaction\tAnnotations_A\tAnnotations_B\tAnnotations_Interaction\tHost_organism_taxid\tparameters_Interaction\tCreation_date\tUpdate_date\tChecksum_A\tChecksum_B\tChecksum_Interaction\tNegative\tOriginalReferenceA\tOriginalReferenceB\tFinalReferenceA\tFinalReferenceB\tMappingScoreA\tMappingScoreB\tirogida\tirogidb\tirigid\tcrogida\tcrogidb\tcrigid\ticrogida\ticrogidb\ticrigid\timex_id\tedgetype\tnumParticipants"; <ide> <del> public static final String FTP_FILE_NAME = "All.mitab.03022013.txt.zip"; <add>// public static final String FTP_FILE_NAME = "All.mitab.03022013.txt.zip"; <add> public static final String FTP_FILE_NAME = "All.mitab.07042015.txt.zip"; <ide> public static final CharacterEncoding ENCODING = CharacterEncoding.US_ASCII; <ide> public static final String FTP_USER_NAME = "ftp"; <ide> <del> @FtpDownload(server = FtpHost.IREFWEB_HOST, path = "irefindex/data/archive/release_10.0/psi_mitab/MITAB2.6/", filename = FTP_FILE_NAME, filetype = FileType.BINARY, username = FTP_USER_NAME, decompress = true, targetFileName="All.mitab.08122013.txt") <add> @FtpDownload(server = FtpHost.IREFWEB_HOST, path = "irefindex/data/archive/release_10.0/psi_mitab/MITAB2.6/", filename = FTP_FILE_NAME, filetype = FileType.BINARY, username = FTP_USER_NAME, decompress = true, targetFileName="All.mitab.04072015.txt") <ide> private File allMitabTxtFile; <ide> <ide> public IRefWebPsiMitab2_6FileParser(File file, CharacterEncoding encoding) throws IOException,