code
stringlengths 10
749k
| repo_name
stringlengths 5
108
| path
stringlengths 7
333
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 10
749k
|
---|---|---|---|---|---|
package com.omnicrola.panoptes.ui.autocomplete;
import static org.easymock.EasyMock.expect;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.Arrays;
import java.util.List;
import org.junit.Test;
import com.omnicrola.panoptes.control.DataController;
import com.omnicrola.panoptes.control.IControlObserver;
import com.omnicrola.panoptes.control.TimeblockSet;
import com.omnicrola.panoptes.data.IReadTimeblock;
import com.omnicrola.panoptes.data.TimeData;
import com.omnicrola.testing.util.EnhancedTestCase;
public class CardNumberProviderTest extends EnhancedTestCase {
private DataController mockController;
private String expectedNumber1;
private String expectedNumber2;
private String expectedNumber3;
@Test
public void testImplementsInterfaces() throws Exception {
assertImplementsInterface(IOptionProvider.class, CardNumberProvider.class);
assertImplementsInterface(IControlObserver.class, CardNumberProvider.class);
}
@Test
public void testConstructorParams() throws Exception {
DataController mockController = useMock(DataController.class);
startReplay();
CardNumberProvider cardNumberProvider = new CardNumberProvider(mockController);
assertConstructionParamSame("dataController", mockController, cardNumberProvider);
}
@Test
public void testDataChanged_UpdatesOptionList() throws Exception {
setupMockTimeblockSet();
startReplay();
CardNumberProvider cardNumberProvider = new CardNumberProvider(this.mockController);
assertEquals(0, cardNumberProvider.getOptionsList().size());
cardNumberProvider.dataChanged();
List<Object> optionsList = cardNumberProvider.getOptionsList();
assertEquals(3, optionsList.size());
assertTrue(optionsList.contains(this.expectedNumber1));
assertTrue(optionsList.contains(this.expectedNumber2));
assertTrue(optionsList.contains(this.expectedNumber3));
}
@Test
public void testTimeblockSetChanged_UpdatesOptionList() throws Exception {
TimeblockSet mockTimeblockSet = useMock(TimeblockSet.class);
setupMockTimeblockSet();
startReplay();
CardNumberProvider cardNumberProvider = new CardNumberProvider(this.mockController);
assertEquals(0, cardNumberProvider.getOptionsList().size());
cardNumberProvider.timeblockSetChanged(mockTimeblockSet);
List<Object> optionsList = cardNumberProvider.getOptionsList();
assertEquals(3, optionsList.size());
assertTrue(optionsList.contains(this.expectedNumber1));
assertTrue(optionsList.contains(this.expectedNumber2));
assertTrue(optionsList.contains(this.expectedNumber3));
}
public void setupMockTimeblockSet() {
TimeblockSet mockTimeblockSet = useMock(TimeblockSet.class);
this.expectedNumber1 = "cardnumber";
this.expectedNumber2 = "a different number";
this.expectedNumber3 = "duplicate";
IReadTimeblock mockTimeblock1 = createMockBlockWithCardNumber(this.expectedNumber1);
IReadTimeblock mockTimeblock2 = createMockBlockWithCardNumber(this.expectedNumber2);
IReadTimeblock mockTimeblock3 = createMockBlockWithCardNumber(this.expectedNumber3);
IReadTimeblock mockTimeblock4 = createMockBlockWithCardNumber(this.expectedNumber3);
List<IReadTimeblock> timblocks = Arrays.asList(mockTimeblock1, mockTimeblock2, mockTimeblock3, mockTimeblock4);
expect(mockTimeblockSet.getBlockSet()).andReturn(timblocks);
this.mockController = useMock(DataController.class);
expect(this.mockController.getAllTimeblocks()).andReturn(mockTimeblockSet);
}
private IReadTimeblock createMockBlockWithCardNumber(String expectedNumber) {
IReadTimeblock mockTimeblock = useMock(IReadTimeblock.class);
TimeData mockData = useMock(TimeData.class);
expect(mockTimeblock.getTimeData()).andReturn(mockData);
expect(mockData.getCard()).andReturn(expectedNumber);
return mockTimeblock;
}
}
| Omnicrola/Panoptes | Panoptes.Test/src/com/omnicrola/panoptes/ui/autocomplete/CardNumberProviderTest.java | Java | gpl-2.0 | 3,813 |
package com.rideon.web.security;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.DefaultHttpClient;
import org.springframework.web.client.RestTemplate;
public class ClientAuthenticator {
public ClientAuthenticator() {
super();
}
// API
public static void setAuthentication(final RestTemplate restTemplate, final String username, final String password) {
basicAuth(restTemplate, username, password);
}
private static void basicAuth(final RestTemplate restTemplate, final String username, final String password) {
final HttpComponentsClientHttpRequestFactoryBasicAuth requestFactory =
((HttpComponentsClientHttpRequestFactoryBasicAuth) restTemplate.getRequestFactory());
DefaultHttpClient httpClient = (DefaultHttpClient) requestFactory.getHttpClient();
CredentialsProvider prov = httpClient.getCredentialsProvider();
prov.setCredentials(requestFactory.getAuthScope(), new UsernamePasswordCredentials(username, password));
}
}
| vilasmaciel/rideon | rideon-client/src/main/java/com/rideon/web/security/ClientAuthenticator.java | Java | gpl-2.0 | 1,128 |
/*
* Copyright 2006-2020 The MZmine Development Team
*
* This file is part of MZmine.
*
* MZmine is free software; you can redistribute it and/or modify it under the terms of the GNU
* General Public License as published by the Free Software Foundation; either version 2 of the
* License, or (at your option) any later version.
*
* MZmine is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
* Public License for more details.
*
* You should have received a copy of the GNU General Public License along with MZmine; if not,
* write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
* USA
*/
package io.github.mzmine.parameters.parametertypes.ranges;
import java.text.NumberFormat;
import java.util.Collection;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import com.google.common.collect.Range;
import io.github.mzmine.parameters.UserParameter;
public class DoubleRangeParameter implements UserParameter<Range<Double>, DoubleRangeComponent> {
private final String name, description;
protected final boolean valueRequired;
private final boolean nonEmptyRequired;
private NumberFormat format;
private Range<Double> value;
private Range<Double> maxAllowedRange;
public DoubleRangeParameter(String name, String description, NumberFormat format) {
this(name, description, format, true, false, null);
}
public DoubleRangeParameter(String name, String description, NumberFormat format,
Range<Double> defaultValue) {
this(name, description, format, true, false, defaultValue);
}
public DoubleRangeParameter(String name, String description, NumberFormat format,
boolean valueRequired, Range<Double> defaultValue) {
this(name, description, format, valueRequired, false, defaultValue);
}
public DoubleRangeParameter(String name, String description, NumberFormat format,
boolean valueRequired, boolean nonEmptyRequired, Range<Double> defaultValue) {
this(name, description, format, valueRequired, nonEmptyRequired, defaultValue, null);
}
public DoubleRangeParameter(String name, String description, NumberFormat format,
boolean valueRequired, boolean nonEmptyRequired, Range<Double> defaultValue, Range<Double> maxAllowedRange) {
this.name = name;
this.description = description;
this.format = format;
this.valueRequired = valueRequired;
this.nonEmptyRequired = nonEmptyRequired;
this.value = defaultValue;
this.maxAllowedRange = maxAllowedRange;
}
/**
* @see io.github.mzmine.data.Parameter#getName()
*/
@Override
public String getName() {
return name;
}
/**
* @see io.github.mzmine.data.Parameter#getDescription()
*/
@Override
public String getDescription() {
return description;
}
public boolean isValueRequired() {
return valueRequired;
}
@Override
public DoubleRangeComponent createEditingComponent() {
return new DoubleRangeComponent(format);
}
public Range<Double> getValue() {
return value;
}
@Override
public void setValue(Range<Double> value) {
this.value = value;
}
@Override
public DoubleRangeParameter cloneParameter() {
DoubleRangeParameter copy = new DoubleRangeParameter(name, description, format);
copy.setValue(this.getValue());
return copy;
}
@Override
public void setValueFromComponent(DoubleRangeComponent component) {
value = component.getValue();
}
@Override
public void setValueToComponent(DoubleRangeComponent component, Range<Double> newValue) {
component.setValue(newValue);
}
@Override
public void loadValueFromXML(Element xmlElement) {
NodeList minNodes = xmlElement.getElementsByTagName("min");
if (minNodes.getLength() != 1)
return;
NodeList maxNodes = xmlElement.getElementsByTagName("max");
if (maxNodes.getLength() != 1)
return;
String minText = minNodes.item(0).getTextContent();
String maxText = maxNodes.item(0).getTextContent();
double min = Double.valueOf(minText);
double max = Double.valueOf(maxText);
value = Range.closed(min, max);
}
@Override
public void saveValueToXML(Element xmlElement) {
if (value == null)
return;
Document parentDocument = xmlElement.getOwnerDocument();
Element newElement = parentDocument.createElement("min");
newElement.setTextContent(String.valueOf(value.lowerEndpoint()));
xmlElement.appendChild(newElement);
newElement = parentDocument.createElement("max");
newElement.setTextContent(String.valueOf(value.upperEndpoint()));
xmlElement.appendChild(newElement);
}
@Override
public boolean checkValue(Collection<String> errorMessages) {
if (valueRequired && (value == null)) {
errorMessages.add(name + " is not set properly");
return false;
}
if (value != null) {
if (!nonEmptyRequired && value.lowerEndpoint() > value.upperEndpoint()) {
errorMessages.add(name + " range maximum must be higher than minimum, or equal");
return false;
}
if (nonEmptyRequired && value.lowerEndpoint() >= value.upperEndpoint()) {
errorMessages.add(name + " range maximum must be higher than minimum");
return false;
}
}
if (value != null && maxAllowedRange != null) {
if (maxAllowedRange.intersection(value) != value) {
errorMessages.add(name + " must be within " + maxAllowedRange.toString());
return false;
}
}
return true;
}
}
| tomas-pluskal/mzmine3 | src/main/java/io/github/mzmine/parameters/parametertypes/ranges/DoubleRangeParameter.java | Java | gpl-2.0 | 5,669 |
/*
* RapidMiner
*
* Copyright (C) 2001-2008 by Rapid-I and the contributors
*
* Complete list of developers available at our web site:
*
* http://rapid-i.com
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see http://www.gnu.org/licenses/.
*/
package com.rapidminer.gui.tools;
import java.awt.Component;
import java.awt.Dialog;
import java.awt.Frame;
import java.awt.Window;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.SwingUtilities;
import javax.swing.Timer;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
/**
* Some utils for the creation of a modal progress monitor dialog.
*
* @author Santhosh Kumar, Ingo Mierswa
* @version $Id: ProgressUtils.java,v 1.3 2008/05/09 19:22:59 ingomierswa Exp $
*/
public class ProgressUtils {
static class MonitorListener implements ChangeListener, ActionListener {
private ProgressMonitor monitor;
private Window owner;
private Timer timer;
private boolean modal;
public MonitorListener(Window owner, ProgressMonitor monitor, boolean modal) {
this.owner = owner;
this.monitor = monitor;
this.modal = modal;
}
public void stateChanged(ChangeEvent ce) {
ProgressMonitor monitor = (ProgressMonitor) ce.getSource();
if (monitor.getCurrent() != monitor.getTotal()) {
if (timer == null) {
timer = new Timer(monitor.getWaitingTime(), this);
timer.setRepeats(false);
timer.start();
}
} else {
if (timer != null && timer.isRunning())
timer.stop();
monitor.removeChangeListener(this);
}
}
public void actionPerformed(ActionEvent e) {
monitor.removeChangeListener(this);
ProgressDialog dlg = owner instanceof Frame ? new ProgressDialog((Frame) owner, monitor, modal) : new ProgressDialog((Dialog) owner, monitor, modal);
dlg.pack();
dlg.setLocationRelativeTo(null);
dlg.setVisible(true);
}
}
/** Create a new (modal) progress monitor dialog. Please note the the value for total (the maximum
* number of possible steps) is greater then 0 even for indeterminate progresses. The value
* of waitingTime is used before the dialog is actually created and shown. */
public static ProgressMonitor createModalProgressMonitor(Component owner, int total, boolean indeterminate, int waitingTimeBeforeDialogAppears, boolean modal) {
ProgressMonitor monitor = new ProgressMonitor(total, indeterminate, waitingTimeBeforeDialogAppears);
Window window = owner instanceof Window ? (Window) owner : SwingUtilities.getWindowAncestor(owner);
monitor.addChangeListener(new MonitorListener(window, monitor, modal));
return monitor;
}
}
| ntj/ComplexRapidMiner | src/com/rapidminer/gui/tools/ProgressUtils.java | Java | gpl-2.0 | 3,354 |
package org.mo.jfa.face.database.connector;
import org.mo.web.core.container.AContainer;
import org.mo.web.core.webform.IFormPage;
import org.mo.web.protocol.context.IWebContext;
public interface IConnectorAction
{
String catalog(IWebContext context,
@AContainer(name = IFormPage.Page) FConnectorPage page);
String list(IWebContext context,
@AContainer(name = IFormPage.Page) FConnectorPage page);
String sort(IWebContext context,
@AContainer(name = IFormPage.Page) FConnectorPage page);
String insert(IWebContext context,
@AContainer(name = IFormPage.Page) FConnectorPage page);
String update(IWebContext context,
@AContainer(name = IFormPage.Page) FConnectorPage page);
String delete(IWebContext context,
@AContainer(name = IFormPage.Page) FConnectorPage page);
}
| favedit/MoPlatform | mo-4-web/src/web-face/org/mo/jfa/face/database/connector/IConnectorAction.java | Java | gpl-2.0 | 890 |
/*
* *************************************************************************************
* Copyright (C) 2008 EsperTech, Inc. All rights reserved. *
* http://esper.codehaus.org *
* http://www.espertech.com *
* ---------------------------------------------------------------------------------- *
* The software in this package is published under the terms of the GPL license *
* a copy of which has been included with this distribution in the license.txt file. *
* *************************************************************************************
*/
package com.espertech.esper.epl.join.rep;
import com.espertech.esper.client.EventBean;
import com.espertech.esper.support.epl.join.SupportJoinResultNodeFactory;
import com.espertech.esper.support.event.SupportEventBeanFactory;
import java.util.*;
import junit.framework.TestCase;
public class TestRepositoryImpl extends TestCase
{
private EventBean s0Event;
private RepositoryImpl repository;
public void setUp()
{
s0Event = SupportEventBeanFactory.createObject(new Object());
repository = new RepositoryImpl(0, s0Event, 6);
}
public void testGetCursors()
{
// get cursor for root stream lookup
Iterator<Cursor> it = repository.getCursors(0);
assertTrue(it.hasNext());
Cursor cursor = it.next();
assertSame(s0Event, cursor.getTheEvent());
assertSame(0, cursor.getStream());
assertFalse(it.hasNext());
tryIteratorEmpty(it);
// try invalid get cursor for no results
try
{
repository.getCursors(2);
fail();
}
catch (NullPointerException ex)
{
// expected
}
}
public void testAddResult()
{
Set<EventBean> results = SupportJoinResultNodeFactory.makeEventSet(2);
repository.addResult(repository.getCursors(0).next(), results, 1);
assertEquals(1, repository.getNodesPerStream()[1].size());
try
{
repository.addResult(repository.getCursors(0).next(), new HashSet<EventBean>(), 1);
fail();
}
catch (IllegalArgumentException ex)
{
// expected
}
try
{
repository.addResult(repository.getCursors(0).next(), null, 1);
fail();
}
catch (NullPointerException ex)
{
// expected
}
}
public void testFlow()
{
// Lookup from s0
Cursor cursors[] = read(repository.getCursors(0));
assertEquals(1, cursors.length);
Set<EventBean> resultsS1 = SupportJoinResultNodeFactory.makeEventSet(2);
repository.addResult(cursors[0], resultsS1, 1);
// Lookup from s1
cursors = read(repository.getCursors(1));
assertEquals(2, cursors.length);
Set<EventBean> resultsS2[] = SupportJoinResultNodeFactory.makeEventSets(new int[] {2, 3});
repository.addResult(cursors[0], resultsS2[0], 2);
repository.addResult(cursors[1], resultsS2[1], 2);
// Lookup from s2
cursors = read(repository.getCursors(2));
assertEquals(5, cursors.length); // 2 + 3 for s2
Set<EventBean> resultsS3[] = SupportJoinResultNodeFactory.makeEventSets(new int[] {2, 1, 3, 5, 1});
repository.addResult(cursors[0], resultsS3[0], 3);
repository.addResult(cursors[1], resultsS3[1], 3);
repository.addResult(cursors[2], resultsS3[2], 3);
repository.addResult(cursors[3], resultsS3[3], 3);
repository.addResult(cursors[4], resultsS3[4], 3);
// Lookup from s3
cursors = read(repository.getCursors(3));
assertEquals(12, cursors.length);
}
private void tryIteratorEmpty(Iterator it)
{
try
{
it.next();
fail();
}
catch (NoSuchElementException ex)
{
// expected
}
}
private Cursor[] read(Iterator<Cursor> iterator)
{
List<Cursor> cursors = new ArrayList<Cursor>();
while (iterator.hasNext())
{
Cursor cursor = iterator.next();
cursors.add(cursor);
}
return cursors.toArray(new Cursor[0]);
}
}
| sungsoo/esper | esper/src/test/java/com/espertech/esper/epl/join/rep/TestRepositoryImpl.java | Java | gpl-2.0 | 4,571 |
package doodle;
import java.io.Serializable;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Comparator;
import java.util.Date;
/**
* A poll is made up of two or more time slots, which are voted on by poll
* invitees. A time slot is defined by a start datetime and optionally an end
* datetime.
*
* @author Jonas Michel
*
*/
public class TimeSlot implements Serializable {
private static final long serialVersionUID = -8690469227753138784L;
/** The time slot's start time. */
private Date start = null;
/** The time slot's end time (optional). */
private Date end = null;
public TimeSlot(Date start, Date end) {
this.start = start;
this.end = end;
}
public TimeSlot(Date start) {
this.start = start;
}
public TimeSlot(Date day, String timeStr) throws NumberFormatException {
if (timeStr.contains("-"))
initDoubleTime(day, timeStr);
else
initSingleTime(day, timeStr);
}
public Date getStart() {
return start;
}
public Date getEnd() {
return end;
}
private void initSingleTime(Date day, String timeStr)
throws NumberFormatException {
start = parseTimeString(day, timeStr);
}
private void initDoubleTime(Date day, String timeStr)
throws NumberFormatException {
String[] timeStrArr = timeStr.split("-");
start = parseTimeString(day, timeStrArr[0]);
end = parseTimeString(day, timeStrArr[1]);
}
private Date parseTimeString(Date day, String timeStr)
throws NumberFormatException {
int hour = 0, minute = 0;
if (timeStr.contains(":")) {
hour = Integer.parseInt(timeStr.split(":")[0]);
minute = Integer.parseInt(timeStr.split(":")[1]);
} else {
hour = Integer.parseInt(timeStr);
}
Calendar cal = Calendar.getInstance();
cal.setTime(day);
cal.add(Calendar.HOUR_OF_DAY, hour);
cal.add(Calendar.MINUTE, minute);
return cal.getTime();
}
public String toDayString() {
SimpleDateFormat day = new SimpleDateFormat("MM/dd/yyyy");
return day.format(start);
}
public String toTimeString() {
SimpleDateFormat time = new SimpleDateFormat("HH:mm");
StringBuilder sb = new StringBuilder();
sb.append(time.format(start));
if (end == null)
return sb.toString();
sb.append("-" + time.format(end));
return sb.toString();
}
@Override
public String toString() {
return toDayString() + " " + toTimeString();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((end == null) ? 0 : end.hashCode());
result = prime * result + ((start == null) ? 0 : start.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (!(obj instanceof TimeSlot))
return false;
TimeSlot other = (TimeSlot) obj;
if (end == null) {
if (other.end != null)
return false;
} else if (!end.equals(other.end))
return false;
if (start == null) {
if (other.start != null)
return false;
} else if (!start.equals(other.start))
return false;
return true;
}
public static class TimeSlotComparator implements Comparator<TimeSlot> {
@Override
public int compare(TimeSlot ts1, TimeSlot ts2) {
if (ts1.getStart().before(ts2.getStart()))
return -1;
else if (ts1.getStart().after(ts2.getStart()))
return 1;
else
return 0;
}
}
}
| jonasrmichel/jms-doodle-poll | joram-jms/samples/src/joram/doodle/TimeSlot.java | Java | gpl-2.0 | 3,355 |
package tim.prune.threedee;
import java.awt.event.InputEvent;
/*
* Copyright (c) 2007 Sun Microsystems, Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* - Redistribution of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistribution in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* Neither the name of Sun Microsystems, Inc. or the names of
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* This software is provided "AS IS," without a warranty of any
* kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND
* WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY
* EXCLUDED. SUN MICROSYSTEMS, INC. ("SUN") AND ITS LICENSORS SHALL
* NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF
* USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS
* DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR
* ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL,
* CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND
* REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF THE USE OF OR
* INABILITY TO USE THIS SOFTWARE, EVEN IF SUN HAS BEEN ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGES.
*
* You acknowledge that this software is not designed, licensed or
* intended for use in the design, construction, operation or
* maintenance of any nuclear facility.
*
* Copyright (c) 2021 ActivityWorkshop simplifications and renamings,
* and restriction to upright orientations.
*/
import java.awt.event.MouseEvent;
import java.awt.AWTEvent;
import javax.media.j3d.Transform3D;
import javax.media.j3d.Canvas3D;
import javax.vecmath.Vector3d;
import javax.vecmath.Point3d;
import javax.vecmath.Matrix3d;
import com.sun.j3d.utils.behaviors.vp.ViewPlatformAWTBehavior;
import com.sun.j3d.utils.universe.ViewingPlatform;
/**
* Moves the View around a point of interest when the mouse is dragged with
* a mouse button pressed. Includes rotation, zoom, and translation
* actions. Zooming can also be obtained by using mouse wheel.
* <p>
* The rotate action rotates the ViewPlatform around the point of interest
* when the mouse is moved with the main mouse button pressed. The
* rotation is in the direction of the mouse movement, with a default
* rotation of 0.01 radians for each pixel of mouse movement.
* <p>
* The zoom action moves the ViewPlatform closer to or further from the
* point of interest when the mouse is moved with the middle mouse button
* pressed (or Alt-main mouse button on systems without a middle mouse button).
* The default zoom action is to translate the ViewPlatform 0.01 units for each
* pixel of mouse movement. Moving the mouse up moves the ViewPlatform closer,
* moving the mouse down moves the ViewPlatform further away.
* <p>
* The translate action translates the ViewPlatform when the mouse is moved
* with the right mouse button pressed. The translation is in the direction
* of the mouse movement, with a default translation of 0.01 units for each
* pixel of mouse movement.
* <p>
* The actions can be reversed using the <code>REVERSE_</code><i>ACTION</i>
* constructor flags. The default action moves the ViewPlatform around the
* objects in the scene. The <code>REVERSE_</code><i>ACTION</i> flags can
* make the objects in the scene appear to be moving in the direction
* of the mouse movement.
*/
public class UprightOrbiter extends ViewPlatformAWTBehavior
{
private Transform3D _longitudeTransform = new Transform3D();
private Transform3D _latitudeTransform = new Transform3D();
private Transform3D _rotateTransform = new Transform3D();
// needed for integrateTransforms but don't want to new every time
private Transform3D _temp1 = new Transform3D();
private Transform3D _temp2 = new Transform3D();
private Transform3D _translation = new Transform3D();
private Vector3d _transVector = new Vector3d();
private Vector3d _distanceVector = new Vector3d();
private Vector3d _centerVector = new Vector3d();
private Vector3d _invertCenterVector = new Vector3d();
private double _deltaYaw = 0.0, _deltaPitch = 0.0;
private double _startDistanceFromCenter = 20.0;
private double _distanceFromCenter = 20.0;
private Point3d _rotationCenter = new Point3d();
private Matrix3d _rotMatrix = new Matrix3d();
private int _mouseX = 0, _mouseY = 0;
private double _xtrans = 0.0, _ytrans = 0.0, _ztrans = 0.0;
private static final double MIN_RADIUS = 0.0;
// the factor to be applied to wheel zooming so that it does not
// look much different with mouse movement zooming.
private static final float wheelZoomFactor = 50.0f;
private static final double NOMINAL_ZOOM_FACTOR = .01;
private static final double NOMINAL_ROT_FACTOR = .008;
private static final double NOMINAL_TRANS_FACTOR = .003;
private double _pitchAngle = 0.0;
/**
* Creates a new OrbitBehaviour
* @param inCanvas The Canvas3D to add the behaviour to
* @param inInitialPitch pitch angle in degrees
*/
public UprightOrbiter(Canvas3D inCanvas, double inInitialPitch)
{
super(inCanvas, MOUSE_LISTENER | MOUSE_MOTION_LISTENER | MOUSE_WHEEL_LISTENER );
_pitchAngle = Math.toRadians(inInitialPitch);
}
protected synchronized void processAWTEvents( final AWTEvent[] events )
{
motion = false;
for (AWTEvent event : events) {
if (event instanceof MouseEvent) {
processMouseEvent((MouseEvent) event);
}
}
}
protected void processMouseEvent( final MouseEvent evt )
{
if (evt.getID() == MouseEvent.MOUSE_PRESSED) {
_mouseX = evt.getX();
_mouseY = evt.getY();
motion = true;
}
else if (evt.getID() == MouseEvent.MOUSE_DRAGGED)
{
int xchange = evt.getX() - _mouseX;
int ychange = evt.getY() - _mouseY;
// rotate
if (isRotateEvent(evt))
{
_deltaYaw -= xchange * NOMINAL_ROT_FACTOR;
_deltaPitch -= ychange * NOMINAL_ROT_FACTOR;
}
// translate
else if (isTranslateEvent(evt))
{
_xtrans -= xchange * NOMINAL_TRANS_FACTOR;
_ytrans += ychange * NOMINAL_TRANS_FACTOR;
}
// zoom
else if (isZoomEvent(evt)) {
doZoomOperations( ychange );
}
_mouseX = evt.getX();
_mouseY = evt.getY();
motion = true;
}
else if (evt.getID() == MouseEvent.MOUSE_WHEEL )
{
if (isZoomEvent(evt))
{
// if zooming is done through mouse wheel, the number of wheel increments
// is multiplied by the wheelZoomFactor, to make zoom speed look natural
if ( evt instanceof java.awt.event.MouseWheelEvent)
{
int zoom = ((int)(((java.awt.event.MouseWheelEvent)evt).getWheelRotation()
* wheelZoomFactor));
doZoomOperations( zoom );
motion = true;
}
}
}
}
/*
* zoom but stop at MIN_RADIUS
*/
private void doZoomOperations( int ychange )
{
if ((_distanceFromCenter - ychange * NOMINAL_ZOOM_FACTOR) > MIN_RADIUS) {
_distanceFromCenter -= ychange * NOMINAL_ZOOM_FACTOR;
}
else {
_distanceFromCenter = MIN_RADIUS;
}
}
/**
* Sets the ViewingPlatform for this behaviour. This method is
* called by the ViewingPlatform.
* If a sub-calls overrides this method, it must call
* super.setViewingPlatform(vp).
* NOTE: Applications should <i>not</i> call this method.
*/
@Override
public void setViewingPlatform(ViewingPlatform vp)
{
super.setViewingPlatform( vp );
if (vp != null) {
resetView();
integrateTransforms();
}
}
/**
* Reset the orientation and distance of this behaviour to the current
* values in the ViewPlatform Transform Group
*/
private void resetView()
{
Vector3d centerToView = new Vector3d();
targetTG.getTransform( targetTransform );
targetTransform.get( _rotMatrix, _transVector );
centerToView.sub( _transVector, _rotationCenter );
_distanceFromCenter = centerToView.length();
_startDistanceFromCenter = _distanceFromCenter;
targetTransform.get( _rotMatrix );
_rotateTransform.set( _rotMatrix );
// compute the initial x/y/z offset
_temp1.set(centerToView);
_rotateTransform.invert();
_rotateTransform.mul(_temp1);
_rotateTransform.get(centerToView);
_xtrans = centerToView.x;
_ytrans = centerToView.y;
_ztrans = centerToView.z;
// reset rotMatrix
_rotateTransform.set( _rotMatrix );
}
protected synchronized void integrateTransforms()
{
// Check if the transform has been changed by another behaviour
Transform3D currentXfm = new Transform3D();
targetTG.getTransform(currentXfm);
if (! targetTransform.equals(currentXfm))
resetView();
// Three-step rotation process, firstly undo the pitch and apply the delta yaw
_latitudeTransform.rotX(_pitchAngle);
_rotateTransform.mul(_rotateTransform, _latitudeTransform);
_longitudeTransform.rotY( _deltaYaw );
_rotateTransform.mul(_rotateTransform, _longitudeTransform);
// Now update pitch angle according to delta and apply
_pitchAngle = Math.min(Math.max(0.0, _pitchAngle - _deltaPitch), Math.PI/2.0);
_latitudeTransform.rotX(-_pitchAngle);
_rotateTransform.mul(_rotateTransform, _latitudeTransform);
_distanceVector.z = _distanceFromCenter - _startDistanceFromCenter;
_temp1.set(_distanceVector);
_temp1.mul(_rotateTransform, _temp1);
// want to look at rotationCenter
_transVector.x = _rotationCenter.x + _xtrans;
_transVector.y = _rotationCenter.y + _ytrans;
_transVector.z = _rotationCenter.z + _ztrans;
_translation.set(_transVector);
targetTransform.mul(_temp1, _translation);
// handle rotationCenter
_temp1.set(_centerVector);
_temp1.mul(targetTransform);
_invertCenterVector.x = -_centerVector.x;
_invertCenterVector.y = -_centerVector.y;
_invertCenterVector.z = -_centerVector.z;
_temp2.set(_invertCenterVector);
targetTransform.mul(_temp1, _temp2);
Vector3d finalTranslation = new Vector3d();
targetTransform.get(finalTranslation);
targetTG.setTransform(targetTransform);
// reset yaw and pitch deltas
_deltaYaw = 0.0;
_deltaPitch = 0.0;
}
private boolean isRotateEvent(MouseEvent evt)
{
final boolean isRightDrag = (evt.getModifiersEx() & InputEvent.BUTTON3_DOWN_MASK) > 0;
return !evt.isAltDown() && !isRightDrag;
}
private boolean isZoomEvent(MouseEvent evt)
{
if (evt instanceof java.awt.event.MouseWheelEvent) {
return true;
}
return evt.isAltDown() && !evt.isMetaDown();
}
private boolean isTranslateEvent(MouseEvent evt)
{
final boolean isRightDrag = (evt.getModifiersEx() & InputEvent.BUTTON3_DOWN_MASK) > 0;
return !evt.isAltDown() && isRightDrag;
}
}
| activityworkshop/GpsPrune | src/tim/prune/threedee/UprightOrbiter.java | Java | gpl-2.0 | 10,939 |
/**
* 对公众平台发送给公众账号的消息加解密示例代码.
*
* @copyright Copyright (c) 1998-2014 Tencent Inc.
*/
// ------------------------------------------------------------------------
package com.weixin.sdk.encrypt;
import java.security.MessageDigest;
import java.util.Arrays;
/**
* SHA1 class
*
* 计算公众平台的消息签名接口.
*/
class SHA1 {
/**
* 用SHA1算法生成安全签名
* @param token 票据
* @param timestamp 时间戳
* @param nonce 随机字符串
* @param encrypt 密文
* @return 安全签名
* @throws AesException
*/
public static String getSHA1(String token, String timestamp, String nonce, String encrypt) throws AesException
{
try {
String[] array = new String[] { token, timestamp, nonce, encrypt };
StringBuffer sb = new StringBuffer();
// 字符串排序
Arrays.sort(array);
for (int i = 0; i < 4; i++) {
sb.append(array[i]);
}
String str = sb.toString();
// SHA1签名生成
MessageDigest md = MessageDigest.getInstance("SHA-1");
md.update(str.getBytes());
byte[] digest = md.digest();
StringBuffer hexstr = new StringBuffer();
String shaHex = "";
for (int i = 0; i < digest.length; i++) {
shaHex = Integer.toHexString(digest[i] & 0xFF);
if (shaHex.length() < 2) {
hexstr.append(0);
}
hexstr.append(shaHex);
}
return hexstr.toString();
} catch (Exception e) {
e.printStackTrace();
throw new AesException(AesException.ComputeSignatureError);
}
}
}
| teabo/wholly-framework | wholly-demo/wholly_weixin/src/main/java/com/weixin/sdk/encrypt/SHA1.java | Java | gpl-2.0 | 1,592 |
package com.rockey.emonitor.jms.controller;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.web.servlet.mvc.AbstractController;
import com.rockey.emonitor.jms.component.AppList;
import com.rockey.emonitor.jms.component.EmonitorContext;
import com.rockey.emonitor.jms.component.FilterList;
import com.rockey.emonitor.jms.model.LogMessage;
import com.rockey.emonitor.jms.service.MessageService;
import com.rockey.emonitor.jms.util.Base64;
import com.rockey.emonitor.jms.util.Util;
import com.rockey.emonitor.model.AppFilter;
public class MessageController extends AbstractController{
private static final Log log = LogFactory.getLog(MessageController.class);
@Autowired
private MessageService messageService;
@Autowired
private EmonitorContext runtimeContext;
@Autowired
private AppList appListComponent;
@Autowired
private FilterList filterListComponent;
private String key;
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
@Override
protected ModelAndView handleRequestInternal(HttpServletRequest request,
HttpServletResponse response) throws Exception {
request.setCharacterEncoding("utf-8");
log.info("requestURL =[ " + request.getRequestURI() + "?" + request.getQueryString() + " ]");
if (!runtimeContext.isReadyProcess()) {
log.error("EmonitorContext not init complete ! please wait...");
return null;
}
try {
List<String> appList = appListComponent.getAppList();
Map<String, List<AppFilter>> filterMap = filterListComponent.getFilterMap();
Map<String,String> params = new HashMap<String,String>();
// 打印参数列表
@SuppressWarnings("unchecked")
Enumeration<String> names = request.getParameterNames();
if(names.hasMoreElements())
{
while (names.hasMoreElements()) {
String paramName = (String) names.nextElement();
String paramValue = request.getParameter(paramName);
//将所有参数转为大写
params.put(paramName.toUpperCase(), paramValue);
log.info("Request Parameter:" + paramName + "=" + paramValue);
}
}
//获取消息
String message = params.get("MESSAGE");
if (message!= null && !message.isEmpty()) {
message = new String(Base64.decode(message.getBytes("UTF-8")),"UTF-8");
}
log.info("client IP :" + request.getRemoteAddr() + ", message = " + message);
LogMessage logMessage = Util.createMessageFromXml(message);
//密钥检测
String sign = Util.ComputeHash(logMessage, this.key);
if (logMessage.getSign().equals(sign)) {
if (!appList.isEmpty() && appList.contains(logMessage.getApplicationID())) {//应用合法检测
if (!filterMap.isEmpty() && filterMap.containsKey(logMessage.getApplicationID())) {//过滤器检测
List<AppFilter> fiterList = filterMap.get(logMessage.getApplicationID());
for (AppFilter filter : fiterList) {
if (logMessage.getTitle().contains(filter.getContent())) {
log.info("告警标题包含过滤信息[" + filter.getContent() + "],信息将会被过滤。");
return null;
}
if (logMessage.getBody().contains(filter.getContent())) {
log.info("告警内容包含过滤信息[" + filter.getContent() + "],信息将会被过滤。");
return null;
}
}
}
messageService.sendAlertMessage(logMessage);
} else {
log.error("invalid applicationId (" + logMessage.getApplicationID() + ") ....");
}
}
} catch (Exception e) {
log.error("MessageController err", e);
}
return null;
}
}
| RockeyHoo/icheck | emonitor/src/main/java/com/rockey/emonitor/jms/controller/MessageController.java | Java | gpl-2.0 | 4,087 |
/*
* Copyright (c) 2006 - 2012 LinogistiX GmbH
*
* www.linogistix.com
*
* Project myWMS-LOS
*/
package de.linogistix.los.reference.customization.common;
import javax.ejb.Stateless;
import de.linogistix.los.customization.LOSEventConsumerBean;
import de.linogistix.los.util.event.LOSEventConsumer;
/**
* @author krane
*
*/
@Stateless
public class Ref_EventConsumerBean extends LOSEventConsumerBean implements LOSEventConsumer {
}
| Jacksson/mywmsnb | mywms.as/project-ejb/src/java/de/linogistix/los/reference/customization/common/Ref_EventConsumerBean.java | Java | gpl-2.0 | 446 |
package xyz.zyzhu.model;
import org.junit.*;
import org.junit.Assert.*;
public class TestHelloWorld
{
@Test
public void tesySayHello()
{
Assert.assertEquals("hello world",new HelloWorld().sayHello());
}
} | zyz963272311/testGitHub | testMaven/src/test/java/xyz/zyzhu/model/TestHelloWorld.java | Java | gpl-2.0 | 212 |
/*
* PS3 Media Server, for streaming any medias to your PS3.
* Copyright (C) 2012 Ph.Waeber
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; version 2
* of the License only.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package net.pms.medialibrary.commons.dataobjects;
import net.pms.medialibrary.commons.enumarations.ThumbnailPrioType;
public class DOThumbnailPriority {
private long id;
private ThumbnailPrioType thumbnailPriorityType;
private String picturePath;
private int seekPosition;
private int priorityIndex;
public DOThumbnailPriority(){
this(-1, ThumbnailPrioType.THUMBNAIL, "", 0);
}
public DOThumbnailPriority(long id, ThumbnailPrioType thumbnailPriorityType, String picturePath, int priorityIndex){
this(id, thumbnailPriorityType, -1, picturePath, priorityIndex);
}
public DOThumbnailPriority(long id, ThumbnailPrioType thumbnailPriorityType, int seekPosition, int priorityIndex){
this(id, thumbnailPriorityType, seekPosition, "", priorityIndex);
}
public DOThumbnailPriority(long id, ThumbnailPrioType thumbnailPriorityType, int seekPosition, String picturePath, int priorityIndex){
setId(id);
setThumbnailPriorityType(thumbnailPriorityType);
setSeekPosition(seekPosition);
setPicturePath(picturePath);
setPriorityIndex(priorityIndex);
}
public void setThumbnailPriorityType(ThumbnailPrioType thumbnailPriorityType) {
this.thumbnailPriorityType = thumbnailPriorityType;
}
public ThumbnailPrioType getThumbnailPriorityType() {
return thumbnailPriorityType;
}
public void setPicturePath(String picturePath) {
this.picturePath = picturePath;
}
public String getPicturePath() {
return picturePath;
}
public void setSeekPosition(int seekPosition) {
this.seekPosition = seekPosition;
}
public int getSeekPosition() {
return seekPosition;
}
public void setPriorityIndex(int priorityIndex) {
this.priorityIndex = priorityIndex;
}
public int getPriorityIndex() {
return priorityIndex;
}
public void setId(long id) {
this.id = id;
}
public long getId() {
return id;
}
@Override
public boolean equals(Object obj){
if(!(obj instanceof DOThumbnailPriority)){
return false;
}
DOThumbnailPriority compObj = (DOThumbnailPriority) obj;
if(getId() == compObj.getId()
&& getThumbnailPriorityType() == compObj.getThumbnailPriorityType()
&& getPicturePath().equals(compObj.getPicturePath())
&& getSeekPosition() == compObj.getSeekPosition()
&& getPriorityIndex() == compObj.getPriorityIndex()){
return true;
}
return false;
}
@Override
public int hashCode(){
int hashCode = 24 + String.valueOf(getId()).hashCode();
hashCode *= 24 + getPicturePath().hashCode();
hashCode *= 24 + getSeekPosition();
hashCode *= 24 + getPriorityIndex();
return hashCode;
}
@Override
public DOThumbnailPriority clone(){
return new DOThumbnailPriority(getId(), getThumbnailPriorityType(), getSeekPosition(), getPicturePath(), getPriorityIndex());
}
@Override
public String toString(){
return String.format("id=%s, prioIndex=%s, type=%s, seekPos=%s, picPath=%s", getId(), getPriorityIndex(), getThumbnailPriorityType(), getSeekPosition(), getPicturePath());
}
}
| taconaut/pms-mlx | core/src/main/java/net/pms/medialibrary/commons/dataobjects/DOThumbnailPriority.java | Java | gpl-2.0 | 3,813 |
package scatterbox.utils;
import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStreamReader;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Timestamp;
import java.util.HashMap;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
public class ImportVanKasteren {
String dataFileName = "/Users/knoxs/Documents/datasets/kasterenDataset/kasterenSenseData.txt";
String annotationFileName = "/Users/knoxs/Documents/datasets/kasterenDataset/kasterenActData.txt";
File dataFile = new File(dataFileName);
File annotationFile = new File(annotationFileName);
BufferedReader dataFileReader;
BufferedReader annotationFileReader;
Connection conn = null;
String insertDataCommand = "insert into events (start_time, end_time, id, java_type) values (\"START\", \"END\", \"OBJECT\", \"scatterbox.event.KasterenEvent\")";
String insertAnnotationCommand = "insert into annotations (start_time, end_time, annotation) values (\"START\", \"END\", \"ANNOTATION\")";
HashMap<Integer, String> objects = new HashMap<Integer, String>();
HashMap<Integer, String> annotations = new HashMap<Integer, String>();
//String[] annotations = {"leavehouse", "usetoilet", "takeshower", "gotobed", "preparebreakfast", "preparedinner", "getdrink"};
/**
* Format of the sql timestamp. Allows easy conversion to date format
*/
final DateTimeFormatter dateTimeFormatter = DateTimeFormat
.forPattern("dd-MMM-yyyy HH:mm:ss");
public static void main(String[] args) throws FileNotFoundException {
ImportVanKasteren ivk = new ImportVanKasteren();
ivk.connectToDatabase();
ivk.dataFileReader = new BufferedReader(new InputStreamReader(
new DataInputStream(new FileInputStream(ivk.dataFileName))));
ivk.annotationFileReader = new BufferedReader(new InputStreamReader(
new DataInputStream(new FileInputStream(ivk.annotationFileName))));
ivk.setUpAnnotations();
ivk.setUpObjects();
ivk.getData();
ivk.getAnnotations();
}
private void getData() {
String line;
try {
while ((line = dataFileReader.readLine()) != null) {
String[] readingArray = line.split("\t");
DateTime startTime = dateTimeFormatter
.parseDateTime(readingArray[0]);
Timestamp startTimestamp = new Timestamp(startTime.getMillis());
DateTime endTime = dateTimeFormatter.parseDateTime(readingArray[1]);
Timestamp endTimestamp = new Timestamp(endTime.getMillis());
int id = Integer.parseInt(readingArray[2]);
//The reason for -1 is because, kasteren starts id names at 1, not 0, but the array starts at 0.
String object = objects.get(id);
insertStatement(insertDataCommand.replace("START",
startTimestamp.toString()).replace("END",
endTimestamp.toString()).replace("OBJECT", object));
}
} catch (Exception ioe) {
ioe.printStackTrace();
}
}
private void getAnnotations() {
String line;
try {
while ((line = annotationFileReader.readLine()) != null) {
String[] readingArray = line.split("\t");
DateTime startTime = dateTimeFormatter
.parseDateTime(readingArray[0]);
Timestamp startTimestamp = new Timestamp(startTime.getMillis());
DateTime endTime = dateTimeFormatter.parseDateTime(readingArray[1]);
Timestamp endTimestamp = new Timestamp(endTime.getMillis());
int id = Integer.parseInt(readingArray[2]);
//The reason for -1 is because, kasteren starts id names at 1, not 0, but the array starts at 0.
String annotation = annotations.get(id);
insertStatement(insertAnnotationCommand.replace("START",
startTimestamp.toString()).replace("END",
endTimestamp.toString()).replace("ANNOTATION", annotation));
}
} catch (Exception ioe) {
ioe.printStackTrace();
}
}
public boolean insertStatement(String an_sql_statement) {
System.out.println(an_sql_statement);
boolean success = false;
//System.out.println(an_sql_statement);
Statement statement;
try {
statement = conn.createStatement();
if (conn != null) {
success = statement.execute(an_sql_statement);
statement.close();
} else {
System.err.println("No database connection!!!");
}
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return success;
}
public boolean connectToDatabase() {
boolean connected = false;
String userName = "root";
String password = "";
String url = "jdbc:mysql://localhost:3306/tvk";
try {
Class.forName("com.mysql.jdbc.Driver").newInstance();
conn = DriverManager.getConnection(url, userName, password);
if (conn != null) {
connected = true;
}
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (ClassNotFoundException e) {
e.printStackTrace();
} catch (SQLException e) {
e.printStackTrace();
}
return connected;
}
private void setUpObjects() {
objects.put(1, "microwave");
objects.put(5, "halltoiletdoor");
objects.put(6, "hallbathroomdoor");
objects.put(7, "cupscupboard");
objects.put(8, "fridge");
objects.put(9, "platescupboard");
objects.put(12, "frontdoor");
objects.put(13, "dishwasher");
objects.put(14, "toiletflush");
objects.put(17, "freezer");
objects.put(18, "panscupboard");
objects.put(20, "washingmachine");
objects.put(23, "groceriescupboard");
objects.put(24, "hallbedroomdoor");
}
private void setUpAnnotations() {
annotations.put(1, "leavehouse");
annotations.put(4, "usetoilet");
annotations.put(5, "takeshower");
annotations.put(10, "gotobed");
annotations.put(13, "preparebreakfast");
annotations.put(15, "preparedinner");
annotations.put(17, "getdrink");
}
}
| knoxsp/Concept | src/scatterbox/utils/ImportVanKasteren.java | Java | gpl-2.0 | 6,596 |
/*
* Rasengan - Manga and Comic Downloader
* Copyright (C) 2013 Sauriel
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package de.sauriel.rasengan.ui;
import java.awt.BorderLayout;
import javax.swing.JDialog;
import javax.swing.JProgressBar;
import javax.swing.JLabel;
import java.util.Observable;
import java.util.Observer;
public class DownloadDialog extends JDialog implements Observer {
private static final long serialVersionUID = 4251447351437107605L;
JProgressBar progressBar;
public DownloadDialog() {
// Configure Dialog
setDefaultCloseOperation(JDialog.DO_NOTHING_ON_CLOSE);
setAlwaysOnTop(true);
setModal(true);
setModalityType(ModalityType.MODELESS);
setResizable(false);
setTitle("Downloading: " + RasenganMainFrame.comic.getName());
setBounds(100, 100, 300, 60);
setLayout(new BorderLayout(0, 0));
// Set Content
JLabel labelDownload = new JLabel("Downloading: " + RasenganMainFrame.comic.getName());
add(labelDownload, BorderLayout.NORTH);
progressBar = new JProgressBar();
add(progressBar, BorderLayout.CENTER);
}
@Override
public void update(Observable comicService, Object imagesCount) {
int[] newImagesCount= (int[]) imagesCount;
progressBar.setMaximum(newImagesCount[1]);
progressBar.setValue(newImagesCount[0]);
if (newImagesCount[0] == newImagesCount[1]) {
dispose();
}
}
}
| Sauriel/rasengan | src/de/sauriel/rasengan/ui/DownloadDialog.java | Java | gpl-2.0 | 2,082 |
package org.openzal.zal.ldap;
import javax.annotation.Nonnull;
public class LdapServerType
{
@Nonnull
private final com.zimbra.cs.ldap.LdapServerType mLdapServerType;
public final static LdapServerType MASTER = new LdapServerType(com.zimbra.cs.ldap.LdapServerType.MASTER);
public final static LdapServerType REPLICA = new LdapServerType(com.zimbra.cs.ldap.LdapServerType.REPLICA);
public LdapServerType(@Nonnull Object ldapServerType)
{
mLdapServerType = (com.zimbra.cs.ldap.LdapServerType)ldapServerType;
}
protected <T> T toZimbra(Class<T> cls)
{
return cls.cast(mLdapServerType);
}
public boolean isMaster() {
return mLdapServerType.isMaster();
}
}
| ZeXtras/OpenZAL | src/java/org/openzal/zal/ldap/LdapServerType.java | Java | gpl-2.0 | 694 |
package org.hectordam.proyectohector;
import java.util.ArrayList;
import org.hectordam.proyectohector.R;
import org.hectordam.proyectohector.base.Bar;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.GooglePlayServicesNotAvailableException;
import com.google.android.gms.common.GooglePlayServicesClient.ConnectionCallbacks;
import com.google.android.gms.common.GooglePlayServicesClient.OnConnectionFailedListener;
import com.google.android.gms.location.LocationClient;
import com.google.android.gms.location.LocationListener;
import com.google.android.gms.location.LocationRequest;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.CameraUpdate;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.MapsInitializer;
import com.google.android.gms.maps.SupportMapFragment;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.MarkerOptions;
import android.location.Location;
import android.os.Bundle;
import android.support.v4.app.FragmentActivity;
import android.view.Menu;
public class Mapa extends FragmentActivity implements LocationListener,ConnectionCallbacks, OnConnectionFailedListener{
private GoogleMap mapa;
private LocationClient locationClient;
private CameraUpdate camara;
private static final LocationRequest LOC_REQUEST = LocationRequest.create()
.setInterval(5000)
.setFastestInterval(16)
.setPriority(LocationRequest.PRIORITY_HIGH_ACCURACY);
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.mapa);
try {
MapsInitializer.initialize(this);
} catch (GooglePlayServicesNotAvailableException e) {
e.printStackTrace();
}
mapa = ((SupportMapFragment) getSupportFragmentManager().findFragmentById(R.id.map)).getMap();
ArrayList<Bar> bares = getIntent().getParcelableArrayListExtra("bares");
if (bares != null) {
//marcarBares(bares);
}
mapa.setMyLocationEnabled(true);
configuraLocalizador();
camara = CameraUpdateFactory.newLatLng(new LatLng(41.6561, -0.8773));
mapa.moveCamera(camara);
mapa.animateCamera(CameraUpdateFactory.zoomTo(11.0f), 2000, null);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.mapa, menu);
return true;
}
/**
* Añade las marcas de todas las gasolineras
* @param gasolineras
*/
private void marcarBares(ArrayList<Bar> bares) {
if (bares.size() > 0) {
for (Bar bar : bares) {
mapa.addMarker(new MarkerOptions()
.position(new LatLng(bar.getLatitud(), bar.getLongitud()))
.title(bar.getNombre()));
}
}
}
/**
* Se muestra la Activity
*/
@Override
protected void onStart() {
super.onStart();
locationClient.connect();
}
@Override
protected void onStop() {
super.onStop();
locationClient.disconnect();
}
private void configuraLocalizador() {
if (locationClient == null) {
locationClient = new LocationClient(this, this, this);
}
}
@Override
public void onConnected(Bundle arg0) {
locationClient.requestLocationUpdates(LOC_REQUEST, this);
}
@Override
public void onConnectionFailed(ConnectionResult arg0) {
}
@Override
public void onDisconnected() {
}
@Override
public void onLocationChanged(Location arg0) {
}
}
| gilmh/PracticaAndroid | AgenBar/src/org/hectordam/proyectohector/Mapa.java | Java | gpl-2.0 | 3,630 |
package com.orange.documentare.core.comp.clustering.tasksservice;
/*
* Copyright (c) 2016 Orange
*
* Authors: Christophe Maldivi & Joel Gardes
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 as published by
* the Free Software Foundation.
*/
import com.orange.documentare.core.comp.clustering.graph.ClusteringParameters;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.fest.assertions.Assertions;
import org.junit.After;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
@Slf4j
public class ClusteringTasksServiceTest {
private static final int NB_TASKS = 4 * 10;
private static final String CLUSTERING_TASK_FILE_PREFIX = "clustering_tasks_";
private static final String STRIPPED_CLUSTERING_JSON = "stripped_clustering.json";
private static final String NOT_STRIPPED_CLUSTERING_JSON = "not_stripped_clustering.json";
private final ClusteringTasksService tasksHandler = ClusteringTasksService.instance();
private final ClusteringParameters parameters = ClusteringParameters.builder().acut().qcut().build();
@After
public void cleanup() {
FileUtils.deleteQuietly(new File(STRIPPED_CLUSTERING_JSON));
FileUtils.deleteQuietly(new File(NOT_STRIPPED_CLUSTERING_JSON));
}
@Test
public void runSeveralDistinctTasks() throws IOException, InterruptedException {
// given
String refJson1 = FileUtils.readFileToString(new File(getClass().getResource("/clusteringtasks/latin1_clustering.ref.json").getFile()));
String refJson2 = FileUtils.readFileToString(new File(getClass().getResource("/clusteringtasks/latin2_clustering.ref.json").getFile()));
String refJson3 = FileUtils.readFileToString(new File(getClass().getResource("/clusteringtasks/latin3_clustering.ref.json").getFile()));
String refJson4 = FileUtils.readFileToString(new File(getClass().getResource("/clusteringtasks/latin4_clustering.ref.json").getFile()));
File segFile1 = new File(getClass().getResource("/clusteringtasks/latin1_segmentation.json").getFile());
File segFile2 = new File(getClass().getResource("/clusteringtasks/latin2_segmentation.json").getFile());
File segFile3 = new File(getClass().getResource("/clusteringtasks/latin3_segmentation.json").getFile());
File segFile4 = new File(getClass().getResource("/clusteringtasks/latin4_segmentation.json").getFile());
String[] outputFilenames = new String[NB_TASKS];
ClusteringTask[] clusteringTasks = new ClusteringTask[NB_TASKS];
for (int i = 0; i < NB_TASKS; i++) {
outputFilenames[i] = CLUSTERING_TASK_FILE_PREFIX + i + ".json";
}
for (int i = 0; i < NB_TASKS/4; i++) {
clusteringTasks[i * 4] = ClusteringTask.builder()
.inputFilename(segFile1.getAbsolutePath())
.outputFilename(outputFilenames[i * 4])
.clusteringParameters(parameters)
.build();
clusteringTasks[i * 4 + 1] = ClusteringTask.builder()
.inputFilename(segFile2.getAbsolutePath())
.outputFilename(outputFilenames[i * 4 + 1])
.clusteringParameters(parameters)
.build();
clusteringTasks[i * 4 + 2] = ClusteringTask.builder()
.inputFilename(segFile3.getAbsolutePath())
.outputFilename(outputFilenames[i * 4 + 2])
.clusteringParameters(parameters)
.build();
clusteringTasks[i * 4 + 3] = ClusteringTask.builder()
.inputFilename(segFile4.getAbsolutePath())
.outputFilename(outputFilenames[i * 4 + 3])
.clusteringParameters(parameters)
.build();
}
// when
for (int i = 0; i < NB_TASKS; i++) {
tasksHandler.addNewTask(clusteringTasks[i]);
Thread.sleep(200);
log.info(tasksHandler.tasksDescription().toString());
}
tasksHandler.waitForAllTasksDone();
String[] outputJsons = new String[NB_TASKS];
for (int i = 0; i < NB_TASKS; i++) {
outputJsons[i] = FileUtils.readFileToString(new File(outputFilenames[i]));
}
// then
for (int i = 0; i < NB_TASKS/4; i++) {
Assertions.assertThat(outputJsons[i * 4]).isEqualTo(refJson1);
Assertions.assertThat(outputJsons[i * 4 + 1]).isEqualTo(refJson2);
Assertions.assertThat(outputJsons[i * 4 + 2]).isEqualTo(refJson3);
Assertions.assertThat(outputJsons[i * 4 + 3]).isEqualTo(refJson4);
}
Arrays.stream(outputFilenames)
.forEach(f -> FileUtils.deleteQuietly(new File(f)));
}
@Test
public void saveStrippedOutput() throws IOException, InterruptedException {
// given
File ref = new File(getClass().getResource("/clusteringtasks/stripped_clustering.ref.json").getFile());
String jsonRef = FileUtils.readFileToString(ref);
String strippedFilename = STRIPPED_CLUSTERING_JSON;
File strippedFile = new File(strippedFilename);
strippedFile.delete();
File segFile = new File(getClass().getResource("/clusteringtasks/latin1_segmentation.json").getFile());
String outputFilename = NOT_STRIPPED_CLUSTERING_JSON;
ClusteringTask task = ClusteringTask.builder()
.inputFilename(segFile.getAbsolutePath())
.outputFilename(outputFilename)
.clusteringParameters(parameters)
.strippedOutputFilename(strippedFilename)
.build();
// when
tasksHandler.addNewTask(task);
tasksHandler.waitForAllTasksDone();
// then
String strippedJson = FileUtils.readFileToString(strippedFile);
Assertions.assertThat(strippedFile).exists();
Assertions.assertThat(strippedJson).isEqualTo(jsonRef);
}
}
| Orange-OpenSource/documentare-simdoc | simdoc/core/java/Comp/src/test/java/com/orange/documentare/core/comp/clustering/tasksservice/ClusteringTasksServiceTest.java | Java | gpl-2.0 | 5,621 |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package database.parse.util;
import almonds.Parse;
import almonds.ParseObject;
import database.parse.tables.ParsePhenomena;
import database.parse.tables.ParseSensor;
import java.net.URI;
/**
*
* @author jried31
*/
public class DBGlobals {
static public String TABLE_PHENOMENA="tester";
static public String TABLE_SENSOR="Sensor";
public static String URL_GOOGLE_SEARCH="http://suggestqueries.google.com/complete/search?client=firefox&hl=en&q=WORD";
//http://clients1.google.com/complete/search?noLabels=t&client=web&q=WORD";
public static void InitializeParse(){
//App Ids for Connecting to the Parse DB
Parse.initialize("jEciFYpTp2b1XxHuIkmAs3yaP70INpkBDg9WdTl9", //Application ID
"aPEXcVv80kHwfVJK1WEKWckePkWxYNEXBovIR6d5"); //Rest API Key
}
}
| jried31/SSKB | sensite/src/main/java/database/parse/util/DBGlobals.java | Java | gpl-2.0 | 1,007 |
/* This file was generated by SableCC (http://www.sablecc.org/). */
package se.sics.kola.node;
import se.sics.kola.analysis.*;
@SuppressWarnings("nls")
public final class AClassFieldAccess extends PFieldAccess
{
private PClassName _className_;
private TIdentifier _identifier_;
public AClassFieldAccess()
{
// Constructor
}
public AClassFieldAccess(
@SuppressWarnings("hiding") PClassName _className_,
@SuppressWarnings("hiding") TIdentifier _identifier_)
{
// Constructor
setClassName(_className_);
setIdentifier(_identifier_);
}
@Override
public Object clone()
{
return new AClassFieldAccess(
cloneNode(this._className_),
cloneNode(this._identifier_));
}
@Override
public void apply(Switch sw)
{
((Analysis) sw).caseAClassFieldAccess(this);
}
public PClassName getClassName()
{
return this._className_;
}
public void setClassName(PClassName node)
{
if(this._className_ != null)
{
this._className_.parent(null);
}
if(node != null)
{
if(node.parent() != null)
{
node.parent().removeChild(node);
}
node.parent(this);
}
this._className_ = node;
}
public TIdentifier getIdentifier()
{
return this._identifier_;
}
public void setIdentifier(TIdentifier node)
{
if(this._identifier_ != null)
{
this._identifier_.parent(null);
}
if(node != null)
{
if(node.parent() != null)
{
node.parent().removeChild(node);
}
node.parent(this);
}
this._identifier_ = node;
}
@Override
public String toString()
{
return ""
+ toString(this._className_)
+ toString(this._identifier_);
}
@Override
void removeChild(@SuppressWarnings("unused") Node child)
{
// Remove child
if(this._className_ == child)
{
this._className_ = null;
return;
}
if(this._identifier_ == child)
{
this._identifier_ = null;
return;
}
throw new RuntimeException("Not a child.");
}
@Override
void replaceChild(@SuppressWarnings("unused") Node oldChild, @SuppressWarnings("unused") Node newChild)
{
// Replace child
if(this._className_ == oldChild)
{
setClassName((PClassName) newChild);
return;
}
if(this._identifier_ == oldChild)
{
setIdentifier((TIdentifier) newChild);
return;
}
throw new RuntimeException("Not a child.");
}
}
| kompics/kola | src/main/java/se/sics/kola/node/AClassFieldAccess.java | Java | gpl-2.0 | 2,887 |
package com.pingdynasty.blipbox;
import java.util.Map;
import java.util.HashMap;
import javax.sound.midi.*;
import com.pingdynasty.midi.ScaleMapper;
import org.apache.log4j.Logger;
public class MidiOutputEventHandler extends MultiModeKeyPressManager {
private static final Logger log = Logger.getLogger(MidiOutputEventHandler.class);
private static final int OCTAVE_SHIFT = 6;
private MidiPlayer midiPlayer;
private int lastNote = 0;
public class MidiConfigurationMode extends ConfigurationMode {
private ScaleMapper mapper;
private int basenote = 40;
public MidiConfigurationMode(String name, String follow){
super(name, follow);
mapper = new ScaleMapper();
mapper.setScale("Mixolydian Mode");
// setScale("Chromatic Scale");
// setScale("C Major");
// setScale("Dorian Mode");
}
public ScaleMapper getScaleMapper(){
return mapper;
}
public int getBaseNote(){
return basenote;
}
public void setBaseNote(int basenote){
this.basenote = basenote;
}
}
public ConfigurationMode createConfigurationMode(String mode, String follow){
return new MidiConfigurationMode(mode, follow);
}
public ScaleMapper getScaleMapper(){
MidiConfigurationMode mode = (MidiConfigurationMode)getCurrentConfigurationMode();
return mode.getScaleMapper();
}
public ScaleMapper getScaleMapper(String mode){
MidiConfigurationMode config = (MidiConfigurationMode)getConfigurationMode(mode);
return config.getScaleMapper();
}
public int getBaseNote(){
MidiConfigurationMode mode = (MidiConfigurationMode)getCurrentConfigurationMode();
return mode.getBaseNote();
}
public void setBaseNote(int basenote){
MidiConfigurationMode mode = (MidiConfigurationMode)getCurrentConfigurationMode();
mode.setBaseNote(basenote);
}
public void setBaseNote(String modename, int basenote){
MidiConfigurationMode mode = (MidiConfigurationMode)getConfigurationMode(modename);
mode.setBaseNote(basenote);
}
public void holdOff(){
super.holdOff();
sendMidiNoteOff(lastNote);
}
public void init(){
super.init();
setSensorEventHandler("Cross", SensorType.BUTTON2_SENSOR, new OctaveShiftUpEventHandler());
setSensorEventHandler("Cross", SensorType.BUTTON3_SENSOR, new OctaveShiftDownEventHandler());
setSensorEventHandler("Criss", SensorType.BUTTON2_SENSOR, new OctaveShiftUpEventHandler());
setSensorEventHandler("Criss", SensorType.BUTTON3_SENSOR, new OctaveShiftDownEventHandler());
}
public class OctaveShiftUpEventHandler implements SensorEventHandler {
public void sensorChange(SensorDefinition sensor){
if(sensor.value != 0){
int basenote = getBaseNote();
log.debug("octave up");
basenote += OCTAVE_SHIFT;
if(basenote + OCTAVE_SHIFT <= 127)
setBaseNote(basenote);
log.debug("new basenote "+basenote);
}
}
}
public class OctaveShiftDownEventHandler implements SensorEventHandler {
public void sensorChange(SensorDefinition sensor){
if(sensor.value != 0){
int basenote = getBaseNote();
log.debug("octave down");
basenote -= OCTAVE_SHIFT;
if(basenote > 0)
setBaseNote(basenote);
log.debug("new basenote "+basenote);
}
}
}
public class BaseNoteChangeEventHandler implements SensorEventHandler {
private int min, max;
public BaseNoteChangeEventHandler(){
this(0, 127);
}
public BaseNoteChangeEventHandler(int min, int max){
this.min = min;
this.max = max;
}
public void sensorChange(SensorDefinition sensor){
int basenote = sensor.scale(min, max);
log.debug("basenote: "+basenote);
setBaseNote(basenote);
}
}
public class ScaleChangeEventHandler implements SensorEventHandler {
public void sensorChange(SensorDefinition sensor){
ScaleMapper mapper = getScaleMapper();
int val = sensor.scale(mapper.getScaleNames().length);
if(val < mapper.getScaleNames().length){
mapper.setScale(val);
log.debug("set scale "+mapper.getScaleNames()[val]);
}
}
}
public class ControlChangeEventHandler implements SensorEventHandler {
private int from;
private int to;
private int cc;
public ControlChangeEventHandler(int cc){
this(cc, 0, 127);
}
public ControlChangeEventHandler(int cc, int from, int to){
this.from = from;
this.to = to;
this.cc = cc;
}
public void sensorChange(SensorDefinition sensor){
int val = sensor.scale(from, to);
sendMidiCC(cc, val);
}
}
public class NonRegisteredParameterEventHandler implements SensorEventHandler {
private int from;
private int to;
private int cc;
public NonRegisteredParameterEventHandler(int cc, int from, int to){
this.from = from;
this.to = to;
this.cc = cc;
}
public void sensorChange(SensorDefinition sensor){
int val = sensor.scale(from, to);
sendMidiNRPN(cc, val);
}
}
public class PitchBendEventHandler implements SensorEventHandler {
private int from;
private int to;
public PitchBendEventHandler(){
this(-8191, 8192);
}
public PitchBendEventHandler(int from, int to){
this.from = from;
this.to = to;
}
public void sensorChange(SensorDefinition sensor){
int val = sensor.scale(from, to);
sendMidiPitchBend(val);
}
}
public class NotePlayer implements KeyEventHandler {
private int lastnote;
public void sensorChange(SensorDefinition sensor){}
protected int getVelocity(int row){
// int velocity = ((row+1)*127/8);
// int velocity = (row*127/8)+1;
int velocity = ((row+1)*127/9);
return velocity;
}
public void keyDown(int col, int row){
lastNote = getScaleMapper().getNote(col+getBaseNote());
sendMidiNoteOn(lastNote, getVelocity(row));
}
public void keyUp(int col, int row){
sendMidiNoteOff(lastNote);
}
public void keyChange(int oldCol, int oldRow, int newCol, int newRow){
int newNote = getScaleMapper().getNote(newCol+getBaseNote());
if(newNote != lastNote){
sendMidiNoteOff(lastNote);
sendMidiNoteOn(newNote, getVelocity(newRow));
}
lastNote = newNote;
}
}
public MidiOutputEventHandler(BlipBox sender){
super(sender);
}
public void configureControlChange(String mode, SensorType type, int channel, int cc, int min, int max){
log.debug("Setting "+mode+":"+type+" to CC "+cc+" ("+min+"-"+max+")");
setSensorEventHandler(mode, type, new ControlChangeEventHandler(cc, min, max));
}
public void configureNRPN(String mode, SensorType type, int channel, int cc, int min, int max){
log.debug("Setting "+mode+":"+type+" to NRPN "+cc+" ("+min+"-"+max+")");
setSensorEventHandler(mode, type, new NonRegisteredParameterEventHandler(cc, min, max));
}
public void configurePitchBend(String mode, SensorType type, int channel, int min, int max){
setSensorEventHandler(mode, type, new PitchBendEventHandler(min, max));
}
public void configureBaseNoteChange(String mode, SensorType type, int min, int max){
log.debug("Setting "+mode+":"+type+" to control base note");
setSensorEventHandler(mode, type, new BaseNoteChangeEventHandler(min, max));
}
public void configureScaleChange(String mode, SensorType type){
log.debug("Setting "+mode+":"+type+" to control scale changes");
setSensorEventHandler(mode, type, new ScaleChangeEventHandler());
}
public void configureNotePlayer(String mode, boolean notes, boolean pb, boolean at){
log.debug("Setting "+mode+" mode to play notes ("+notes+") pitch bend ("+pb+") aftertouch ("+at+")");
if(notes){
setKeyEventHandler(mode, new NotePlayer());
}else{
setKeyEventHandler(mode, null);
}
// todo: honour pb and at
}
// public String[] getScaleNames(){
// return mapper.getScaleNames();
// }
// public void setScale(int index){
// mapper.setScale(index);
// }
public String getCurrentScale(){
ScaleMapper mapper = getScaleMapper();
return mapper.getScaleNames()[mapper.getScaleIndex()];
}
public void setMidiPlayer(MidiPlayer midiPlayer){
this.midiPlayer = midiPlayer;
}
public void sendMidiNoteOn(int note, int velocity){
log.debug("note on:\t "+note+"\t "+velocity);
if(note > 127 || note < 0){
log.error("MIDI note on "+note+"/"+velocity+" value out of range");
return;
}
if(velocity > 127 || velocity < 0){
log.error("MIDI note on "+note+"/"+velocity+" value out of range");
velocity = velocity < 0 ? 0 : 127;
}
try {
if(midiPlayer != null)
midiPlayer.noteOn(note, velocity);
}catch(Exception exc){
log.error(exc, exc);
}
}
public void sendMidiNoteOff(int note){
// note = mapper.getNote(note);
log.debug("note off:\t "+note);
if(note > 127 || note < 0){
log.error("MIDI note off "+note+" value out of range");
return;
}
try {
if(midiPlayer != null)
midiPlayer.noteOff(note);
}catch(Exception exc){
log.error(exc, exc);
}
}
public void sendMidiNRPN(int parameter, int value){
// log.debug("nrpn ("+parameter+") :\t "+value);
sendMidiCC(99, 3);
sendMidiCC(98, parameter & 0x7f); // NRPN LSB
sendMidiCC(6, value);
// sendMidiCC(99, parameter >> 7); // NRPN MSB
// sendMidiCC(98, parameter & 0x7f); // NRPN LSB
// sendMidiCC(6, value >> 7); // Data Entry MSB
// if((value & 0x7f) != 0)
// sendMidiCC(38, value & 0x7f); // Data Entry LSB
}
public void sendMidiCC(int cc, int value){
// log.debug("midi cc:\t "+cc+"\t "+value);
if(value > 127 || value < 0){
log.error("MIDI CC "+cc+" value out of range: "+value);
return;
}
try {
if(midiPlayer != null)
midiPlayer.controlChange(cc, value);
}catch(Exception exc){
log.error(exc, exc);
}
}
public void sendMidiPitchBend(int degree){
// send midi pitch bend in the range -8192 to 8191 inclusive
if(degree < -8192 || degree > 8191){
log.error("MIDI pitch bend value out of range: "+degree);
return;
}
// setPitchBend() expects a value in the range 0 to 16383
degree += 8192;
try {
if(midiPlayer != null)
midiPlayer.pitchBend(degree);
}catch(Exception exc){
log.error(exc, exc);
}
}
public void setChannel(int channel){
midiPlayer.setChannel(channel);
}
public class SensitiveNotePlayer implements KeyEventHandler {
private int lastnote;
private int velocity;
// todo : velocity could be set by row rather than sensor position
public void sensorChange(SensorDefinition sensor){
velocity = sensor.scale(127);
}
public void keyDown(int col, int row){
lastNote = getScaleMapper().getNote(col+getBaseNote());
sendMidiNoteOn(lastNote, velocity);
}
public void keyUp(int col, int row){
sendMidiNoteOff(lastNote);
}
public void keyChange(int oldCol, int oldRow, int newCol, int newRow){
int newNote = getScaleMapper().getNote(newCol+getBaseNote());
if(newNote != lastNote){
sendMidiNoteOff(lastNote);
sendMidiNoteOn(newNote, velocity);
// }else{
// // todo: aftertouch, bend
}
lastNote = newNote;
}
}
} | pingdynasty/BlipBox | src/com/pingdynasty/blipbox/MidiOutputEventHandler.java | Java | gpl-2.0 | 12,949 |
/*
* Copyright 2004-2007 Sun Microsystems, Inc. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
* CA 95054 USA or visit www.sun.com if you need additional information or
* have any questions.
*/
/*
* @test
* @bug 4853450
* @summary EnumType tests
* @library ../../lib
* @compile -source 1.5 EnumTyp.java
* @run main EnumTyp
*/
import java.util.*;
import com.sun.mirror.declaration.*;
import com.sun.mirror.type.*;
import com.sun.mirror.util.*;
public class EnumTyp extends Tester {
public static void main(String[] args) {
(new EnumTyp()).run();
}
// Declarations used by tests
enum Suit {
CIVIL,
CRIMINAL
}
private Suit s;
private EnumType e; // an enum type
protected void init() {
e = (EnumType) getField("s").getType();
}
// TypeMirror methods
@Test(result="enum")
Collection<String> accept() {
final Collection<String> res = new ArrayList<String>();
e.accept(new SimpleTypeVisitor() {
public void visitTypeMirror(TypeMirror t) {
res.add("type");
}
public void visitReferenceType(ReferenceType t) {
res.add("ref type");
}
public void visitClassType(ClassType t) {
res.add("class");
}
public void visitEnumType(EnumType t) {
res.add("enum");
}
public void visitInterfaceType(InterfaceType t) {
res.add("interface");
}
});
return res;
}
// EnumType method
@Test(result="EnumTyp.Suit")
EnumDeclaration getDeclaration() {
return e.getDeclaration();
}
}
| unktomi/form-follows-function | mjavac/langtools/test/tools/apt/mirror/type/EnumTyp.java | Java | gpl-2.0 | 2,590 |
package org.zanata.dao;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.hibernate.Criteria;
import org.hibernate.Query;
import org.hibernate.Session;
import org.hibernate.criterion.Restrictions;
import org.jboss.seam.ScopeType;
import org.jboss.seam.annotations.AutoCreate;
import org.jboss.seam.annotations.Name;
import org.jboss.seam.annotations.Scope;
import org.zanata.model.HAccount;
import org.zanata.model.HAccountRole;
import org.zanata.model.HProject;
@Name("accountRoleDAO")
@AutoCreate
@Scope(ScopeType.STATELESS)
public class AccountRoleDAO extends AbstractDAOImpl<HAccountRole, Integer> {
public AccountRoleDAO() {
super(HAccountRole.class);
}
public AccountRoleDAO(Session session) {
super(HAccountRole.class, session);
}
public boolean roleExists(String role) {
return findByName(role) != null;
}
public HAccountRole findByName(String roleName) {
Criteria cr = getSession().createCriteria(HAccountRole.class);
cr.add(Restrictions.eq("name", roleName));
cr.setCacheable(true).setComment("AccountRoleDAO.findByName");
return (HAccountRole) cr.uniqueResult();
}
public HAccountRole create(String roleName, HAccountRole.RoleType type,
String... includesRoles) {
HAccountRole role = new HAccountRole();
role.setName(roleName);
role.setRoleType(type);
for (String includeRole : includesRoles) {
Set<HAccountRole> groups = role.getGroups();
if (groups == null) {
groups = new HashSet<HAccountRole>();
role.setGroups(groups);
}
groups.add(findByName(includeRole));
}
makePersistent(role);
return role;
}
public HAccountRole updateIncludeRoles(String roleName,
String... includesRoles) {
HAccountRole role = findByName(roleName);
for (String includeRole : includesRoles) {
Set<HAccountRole> groups = role.getGroups();
if (groups == null) {
groups = new HashSet<HAccountRole>();
role.setGroups(groups);
}
groups.add(findByName(includeRole));
}
makePersistent(role);
return role;
}
public List<HAccount> listMembers(String roleName) {
HAccountRole role = findByName(roleName);
return listMembers(role);
}
@SuppressWarnings("unchecked")
public List<HAccount> listMembers(HAccountRole role) {
Query query =
getSession()
.createQuery(
"from HAccount account where :role member of account.roles");
query.setParameter("role", role);
query.setComment("AccountRoleDAO.listMembers");
return query.list();
}
public Collection<HAccountRole> getByProject(HProject project) {
return getSession()
.createQuery(
"select p.allowedRoles from HProject p where p = :project")
.setParameter("project", project)
.setComment("AccountRoleDAO.getByProject").list();
}
}
| itsazzad/zanata-server | zanata-war/src/main/java/org/zanata/dao/AccountRoleDAO.java | Java | gpl-2.0 | 3,235 |
package imageresizerforandroid;
import java.awt.image.BufferedImage;
import java.io.File;
import javax.swing.ImageIcon;
/**
*
* @author fonter
*/
public class ImageContainer {
private final BufferedImage image;
private ImageIcon cache;
private final File file;
public ImageContainer (BufferedImage image, File file) {
this.image = image;
this.file = file;
}
public String getNormalizeName () {
String name = file.getName();
int pos = name.lastIndexOf(".");
if (pos > 0) {
return name.substring(0, pos);
}
return name;
}
public ImageIcon getCache () {
return cache;
}
public File getFile () {
return file;
}
public BufferedImage getImage () {
return image;
}
public void setCache (ImageIcon cache) {
this.cache = cache;
}
public boolean isCacheCreate () {
return cache != null;
}
}
| pinkiesky/ImageResizerForAndroid | ImageResizerForAndroid/src/imageresizerforandroid/ImageContainer.java | Java | gpl-2.0 | 995 |
package fortesting;
import java.io.IOException;
public class RunTransformTim {
/**
* Loads data into appropriate tables (assumes scheme already created)
*
* This will import the tables from my CSVs, which I have on dropbox. Let me
* (Tim) know if you need the CSVs No guarantee that it works on CSVs
* generated differently
*
* @param args
* @throws IOException
*/
public static void main(String[] args) throws IOException {
// String host = "127.0.0.1";
String host = "52.32.209.104";
// String keyspace = "new";
String keyspace = "main";
String year = "2012";
// if an error occurs during upload of first CSV, use
// GetLastRow.getLastEntry() to find its
// npi value and use that as start. This will not work in other CSVs
// unless the last npi
// added is greater than the largest npi in all previously loaded CSVs
String start = "";
TransformTim t = new TransformTim();
t.injest(host, keyspace, "CSV/MedicareA2012.csv", year, start);
t.injest(host, keyspace, "CSV/MedicareB2012.csv", year, "");
t.injest(host, keyspace, "CSV/MedicareC2012.csv", year, "");
t.injest(host, keyspace, "CSV/MedicareD2012.csv", year, "");
t.injest(host, keyspace, "CSV/MedicareEG2012.csv", year, "");
t.injest(host, keyspace, "CSV/MedicareHJ2012.csv", year, "");
t.injest(host, keyspace, "CSV/MedicareKL2012.csv", year, "");
t.injest(host, keyspace, "CSV/MedicareMN2012.csv", year, "");
t.injest(host, keyspace, "CSV/MedicareOQ2012.csv", year, "");
t.injest(host, keyspace, "CSV/MedicareR2012.csv", year, "");
t.injest(host, keyspace, "CSV/MedicareS2012.csv", year, "");
t.injest(host, keyspace, "CSV/MedicareTX2012.csv", year, "");
t.injest(host, keyspace, "CSV/MedicareYZ2012.csv", year, "");
}
}
| ZheyuJin/CS8674.FALL2015.NEUSeattle | cassandra/Tim Cassandra Testing and Table Creation/RunTransformTim.java | Java | gpl-2.0 | 1,948 |
package edu.cmu.cs.cimds.geogame.client.ui;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import com.google.gwt.user.client.ui.Grid;
import com.google.gwt.user.client.ui.Image;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.VerticalPanel;
import com.google.gwt.user.client.ui.Widget;
import edu.cmu.cs.cimds.geogame.client.model.dto.ItemDTO;
public class InventoryGrid extends Grid {
// public static final String MONEY_ICON_FILENAME = "coinbag.jpg";
public static final String BLANK_ICON_FILENAME = "blank.png";
private int numCells;
private VerticalPanel[] blanks;
private List<ItemDTO> inventory = new ArrayList<ItemDTO>();
private ItemImageCreator imageCreator;
private String imageWidth = null;
public InventoryGrid(int numRows, int numColumns) {
super(numRows, numColumns);
this.numCells = numRows * numColumns;
this.blanks = new VerticalPanel[this.numCells];
for(int i=0;i<numCells;i++) {
this.blanks[i] = new VerticalPanel();
this.blanks[i].add(new Image(BLANK_ICON_FILENAME));
this.setWidget(i, this.blanks[i]);
}
this.clearContent();
}
public InventoryGrid(int numRows, int numColumns, ItemImageCreator imageCreator) {
this(numRows, numColumns);
this.imageCreator = imageCreator;
}
public List<ItemDTO> getInventory() { return inventory; }
public void setInventory(List<ItemDTO> inventory) { this.inventory = inventory; }
// public void setInventory(List<ItemTypeDTO> inventory) {
// this.inventory = new ArrayList<ItemTypeDTO>();
// for(ItemTypeDTO itemType : inventory) {
// Item dummyItem = new Item();
// dummyItem.setItemType(itemType);
// this.inventory.add(dummyItem);
// }
// }
public ItemImageCreator getImageCreator() { return imageCreator; }
public void setImageCreator(ItemImageCreator imageCreator) { this.imageCreator = imageCreator; }
public void setImageWidth(String imageWidth) { this.imageWidth = imageWidth; }
public void setWidget(int numCell, Widget w) {
// if(numCell >= this.numCells) {
// throw new IndexOutOfBoundsException();
// }
super.setWidget((int)Math.floor(numCell/this.numColumns), numCell%this.numColumns, w);
}
public void clearContent() {
for(int i=0;i<numCells;i++) {
this.setWidget(i, this.blanks[i]);
}
}
public void refresh() {
this.clearContent();
Collections.sort(this.inventory);
for(int i=0;i<this.inventory.size();i++) {
final ItemDTO item = this.inventory.get(i);
Image image = this.imageCreator.createImage(item);
if(this.imageWidth!=null) {
image.setWidth(this.imageWidth);
}
//Label descriptionLabel = new Label(item.getItemType().getName() + " - " + item.getItemType().getBasePrice() + "G", true);
VerticalPanel itemPanel = new VerticalPanel();
itemPanel.add(image);
//itemPanel.add(descriptionLabel);
this.setWidget(i, itemPanel);
}
}
} | grapesmoker/geogame | src/edu/cmu/cs/cimds/geogame/client/ui/InventoryGrid.java | Java | gpl-2.0 | 2,903 |
package mx.gob.sct.utic.mimappir.admseg.postgreSQL.services;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import mx.gob.sct.utic.mimappir.admseg.postgreSQL.model.SEGUSUARIO;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.DataAccessException;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.GrantedAuthorityImpl;
import org.springframework.security.core.userdetails.User;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.transaction.annotation.Transactional;
/**
* A custom service for retrieving users from a custom datasource, such as a
* database.
* <p>
* This custom service must implement Spring's {@link UserDetailsService}
*/
@Transactional(value="transactionManager_ADMSEG_POSGIS",readOnly=true)
public class CustomUserDetailsService implements UserDetailsService{
private SEGUSUARIO_Service SEGUSUARIO_Service;
//private UserDAO userDAO2 = new UserDAO();
/**
* Retrieves a user record containing the user's credentials and access.
*/
public UserDetails loadUserByUsername(String username)
throws UsernameNotFoundException, DataAccessException {
// Declare a null Spring User
UserDetails user = null;
try {
// Search database for a user that matches the specified username
// You can provide a custom DAO to access your persistence layer
// Or use JDBC to access your database
// DbUser is our custom domain user. This is not the same as
// Spring's User
List<SEGUSUARIO> registros = SEGUSUARIO_Service.getUsuario(username);
//List<SEGUSUARIO> registros = SEGUSUARIO_Service.getUsuariosList();
Iterator<SEGUSUARIO> it = registros.iterator();
SEGUSUARIO dbUser = null;
while(it.hasNext()){
dbUser = it.next();
}
// Populate the Spring User object with details from the dbUser
// Here we just pass the username, password, and access level
// getAuthorities() will translate the access level to the correct
// role type
user = new User(dbUser.getCUSUARIO(), dbUser.getCPASSWORD(), true, true,
true, true, getAuthorities(0));
} catch (Exception e) {
throw new UsernameNotFoundException("Error in retrieving user");
}
// Return user to Spring for processing.
// Take note we're not the one evaluating whether this user is
// authenticated or valid
// We just merely retrieve a user that matches the specified username
return user;
}
/**
* Retrieves the correct ROLE type depending on the access level, where
* access level is an Integer. Basically, this interprets the access value
* whether it's for a regular user or admin.
*
* @param access
* an integer value representing the access of the user
* @return collection of granted authorities
*/
public Collection<GrantedAuthority> getAuthorities(Integer access) {
// Create a list of grants for this user
List<GrantedAuthority> authList = new ArrayList<GrantedAuthority>(2);
// All users are granted with ROLE_USER access
// Therefore this user gets a ROLE_USER by default
authList.add(new GrantedAuthorityImpl("ROLE_USER"));
// Check if this user has admin access
// We interpret Integer(1) as an admin user
if (access.compareTo(1) == 0) {
// User has admin access
authList.add(new GrantedAuthorityImpl("ROLE_ADMIN"));
}
// Return list of granted authorities
return authList;
}
@Autowired
public void setMenuService(SEGUSUARIO_Service service) {
this.SEGUSUARIO_Service = service;
}
} | IvanSantiago/retopublico | MiMappir/src/mx/gob/sct/utic/mimappir/admseg/postgreSQL/services/CustomUserDetailsService.java | Java | gpl-2.0 | 3,913 |
package org.ljc.adoptojdk.class_name;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class ResolveSimpleNameClassName {
private Collection<String> packages = null;
public Collection<String> getPackages() {
Set<String> returnPackages = new HashSet<String>();
for (Package aPackage : Package.getPackages()) {
returnPackages.add(aPackage.getName());
}
return returnPackages;
}
public List<String> getFullyQualifiedNames(String simpleName) {
if (this.packages == null) {
this.packages = getPackages();
}
List<String> fqns = new ArrayList<String>();
for (String aPackage : packages) {
try {
String fqn = aPackage + "." + simpleName;
Class.forName(fqn);
fqns.add(fqn);
} catch (Exception e) {
// Ignore
}
}
return fqns;
}
}
| neomatrix369/OpenJDKProductivityTool | src/main/java/org/ljc/adoptojdk/class_name/ResolveSimpleNameClassName.java | Java | gpl-2.0 | 1,001 |
/** ======================================================================== */
/** */
/** @copyright Copyright (c) 2010-2015, S2S s.r.l. */
/** @license http://www.gnu.org/licenses/gpl-2.0.html GNU Public License v.2 */
/** @version 6.0 */
/** This file is part of SdS - Sistema della Sicurezza . */
/** SdS - Sistema della Sicurezza is free software: you can redistribute it and/or modify */
/** it under the terms of the GNU General Public License as published by */
/** the Free Software Foundation, either version 3 of the License, or */
/** (at your option) any later version. */
/** SdS - Sistema della Sicurezza is distributed in the hope that it will be useful, */
/** but WITHOUT ANY WARRANTY; without even the implied warranty of */
/** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the */
/** GNU General Public License for more details. */
/** You should have received a copy of the GNU General Public License */
/** along with SdS - Sistema della Sicurezza . If not, see <http://www.gnu.org/licenses/gpl-2.0.html> GNU Public License v.2 */
/** */
/** ======================================================================== */
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.apconsulting.luna.ejb.Corsi;
/**
*
* @author Dario
*/
public class MaterialeCorso_View implements java.io.Serializable {
public long COD_DOC;
public String TIT_DOC;
public java.sql.Date DAT_REV_DOC;
public String RSP_DOC;
public String NOME_FILE;
}
| s2sprodotti/SDS | SistemaDellaSicurezza/src/com/apconsulting/luna/ejb/Corsi/MaterialeCorso_View.java | Java | gpl-2.0 | 1,710 |
/*******************************************************************************
* This file is part of OpenNMS(R).
*
* Copyright (C) 2018-2020 The OpenNMS Group, Inc.
* OpenNMS(R) is Copyright (C) 1999-2020 The OpenNMS Group, Inc.
*
* OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
*
* OpenNMS(R) is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License,
* or (at your option) any later version.
*
* OpenNMS(R) is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with OpenNMS(R). If not, see:
* http://www.gnu.org/licenses/
*
* For more information contact:
* OpenNMS(R) Licensing <[email protected]>
* http://www.opennms.org/
* http://www.opennms.com/
*******************************************************************************/
package org.opennms.features.kafka.producer;
import java.io.IOException;
import java.time.Duration;
import java.util.Collections;
import java.util.Dictionary;
import java.util.Enumeration;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.BlockingDeque;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.ThreadFactory;
import java.util.function.Consumer;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.errors.TimeoutException;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.opennms.core.ipc.common.kafka.Utils;
import org.opennms.features.kafka.producer.datasync.KafkaAlarmDataSync;
import org.opennms.features.kafka.producer.model.OpennmsModelProtos;
import org.opennms.features.situationfeedback.api.AlarmFeedback;
import org.opennms.features.situationfeedback.api.AlarmFeedbackListener;
import org.opennms.netmgt.alarmd.api.AlarmCallbackStateTracker;
import org.opennms.netmgt.alarmd.api.AlarmLifecycleListener;
import org.opennms.netmgt.events.api.EventListener;
import org.opennms.netmgt.events.api.EventSubscriptionService;
import org.opennms.netmgt.events.api.ThreadAwareEventListener;
import org.opennms.netmgt.events.api.model.IEvent;
import org.opennms.netmgt.model.OnmsAlarm;
import org.opennms.netmgt.topologies.service.api.OnmsTopologyConsumer;
import org.opennms.netmgt.topologies.service.api.OnmsTopologyDao;
import org.opennms.netmgt.topologies.service.api.OnmsTopologyEdge;
import org.opennms.netmgt.topologies.service.api.OnmsTopologyMessage;
import org.opennms.netmgt.topologies.service.api.OnmsTopologyMessage.TopologyMessageStatus;
import org.opennms.netmgt.topologies.service.api.OnmsTopologyProtocol;
import org.opennms.netmgt.topologies.service.api.OnmsTopologyVertex;
import org.opennms.netmgt.topologies.service.api.TopologyVisitor;
import org.opennms.netmgt.xml.event.Event;
import org.osgi.service.cm.ConfigurationAdmin;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.expression.Expression;
import org.springframework.expression.ExpressionParser;
import org.springframework.expression.spel.standard.SpelExpressionParser;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Strings;
import com.swrve.ratelimitedlogger.RateLimitedLog;
public class OpennmsKafkaProducer implements AlarmLifecycleListener, EventListener, AlarmFeedbackListener, OnmsTopologyConsumer, ThreadAwareEventListener {
private static final Logger LOG = LoggerFactory.getLogger(OpennmsKafkaProducer.class);
private static final RateLimitedLog RATE_LIMITED_LOGGER = RateLimitedLog
.withRateLimit(LOG)
.maxRate(5).every(Duration.ofSeconds(30))
.build();
public static final String KAFKA_CLIENT_PID = "org.opennms.features.kafka.producer.client";
private static final ExpressionParser SPEL_PARSER = new SpelExpressionParser();
private final ThreadFactory nodeUpdateThreadFactory = new ThreadFactoryBuilder()
.setNameFormat("kafka-producer-node-update-%d")
.build();
private final ProtobufMapper protobufMapper;
private final NodeCache nodeCache;
private final ConfigurationAdmin configAdmin;
private final EventSubscriptionService eventSubscriptionService;
private KafkaAlarmDataSync dataSync;
private String eventTopic;
private String alarmTopic;
private String nodeTopic;
private String alarmFeedbackTopic;
private String topologyVertexTopic;
private String topologyEdgeTopic;
private boolean forwardEvents;
private boolean forwardAlarms;
private boolean forwardAlarmFeedback;
private boolean suppressIncrementalAlarms;
private boolean forwardNodes;
private Expression eventFilterExpression;
private Expression alarmFilterExpression;
private final CountDownLatch forwardedEvent = new CountDownLatch(1);
private final CountDownLatch forwardedAlarm = new CountDownLatch(1);
private final CountDownLatch forwardedNode = new CountDownLatch(1);
private final CountDownLatch forwardedAlarmFeedback = new CountDownLatch(1);
private final CountDownLatch forwardedTopologyVertexMessage = new CountDownLatch(1);
private final CountDownLatch forwardedTopologyEdgeMessage = new CountDownLatch(1);
private KafkaProducer<byte[], byte[]> producer;
private final Map<String, OpennmsModelProtos.Alarm> outstandingAlarms = new ConcurrentHashMap<>();
private final AlarmEqualityChecker alarmEqualityChecker =
AlarmEqualityChecker.with(AlarmEqualityChecker.Exclusions::defaultExclusions);
private final AlarmCallbackStateTracker stateTracker = new AlarmCallbackStateTracker();
private final OnmsTopologyDao topologyDao;
private int kafkaSendQueueCapacity;
private BlockingDeque<KafkaRecord> kafkaSendDeque;
private final ExecutorService kafkaSendQueueExecutor =
Executors.newSingleThreadExecutor(runnable -> new Thread(runnable, "KafkaSendQueueProcessor"));
private final ExecutorService nodeUpdateExecutor;
private String encoding = "UTF8";
private int numEventListenerThreads = 4;
public OpennmsKafkaProducer(ProtobufMapper protobufMapper, NodeCache nodeCache,
ConfigurationAdmin configAdmin, EventSubscriptionService eventSubscriptionService,
OnmsTopologyDao topologyDao, int nodeAsyncUpdateThreads) {
this.protobufMapper = Objects.requireNonNull(protobufMapper);
this.nodeCache = Objects.requireNonNull(nodeCache);
this.configAdmin = Objects.requireNonNull(configAdmin);
this.eventSubscriptionService = Objects.requireNonNull(eventSubscriptionService);
this.topologyDao = Objects.requireNonNull(topologyDao);
this.nodeUpdateExecutor = Executors.newFixedThreadPool(nodeAsyncUpdateThreads, nodeUpdateThreadFactory);
}
public void init() throws IOException {
// Create the Kafka producer
final Properties producerConfig = new Properties();
final Dictionary<String, Object> properties = configAdmin.getConfiguration(KAFKA_CLIENT_PID).getProperties();
if (properties != null) {
final Enumeration<String> keys = properties.keys();
while (keys.hasMoreElements()) {
final String key = keys.nextElement();
producerConfig.put(key, properties.get(key));
}
}
// Overwrite the serializers, since we rely on these
producerConfig.put("key.serializer", ByteArraySerializer.class.getCanonicalName());
producerConfig.put("value.serializer", ByteArraySerializer.class.getCanonicalName());
// Class-loader hack for accessing the kafka classes when initializing producer.
producer = Utils.runWithGivenClassLoader(() -> new KafkaProducer<>(producerConfig), KafkaProducer.class.getClassLoader());
// Start processing records that have been queued for sending
if (kafkaSendQueueCapacity <= 0) {
kafkaSendQueueCapacity = 1000;
LOG.info("Defaulted the 'kafkaSendQueueCapacity' to 1000 since no property was set");
}
kafkaSendDeque = new LinkedBlockingDeque<>(kafkaSendQueueCapacity);
kafkaSendQueueExecutor.execute(this::processKafkaSendQueue);
if (forwardEvents) {
eventSubscriptionService.addEventListener(this);
}
topologyDao.subscribe(this);
}
public void destroy() {
kafkaSendQueueExecutor.shutdownNow();
nodeUpdateExecutor.shutdownNow();
if (producer != null) {
producer.close();
producer = null;
}
if (forwardEvents) {
eventSubscriptionService.removeEventListener(this);
}
topologyDao.unsubscribe(this);
}
private void forwardTopologyMessage(OnmsTopologyMessage message) {
if (message.getProtocol() == null) {
LOG.error("forwardTopologyMessage: null protocol");
return;
}
if (message.getMessagestatus() == null) {
LOG.error("forwardTopologyMessage: null status");
return;
}
if (message.getMessagebody() == null) {
LOG.error("forwardTopologyMessage: null message");
return;
}
if (message.getMessagestatus() == TopologyMessageStatus.DELETE) {
message.getMessagebody().accept(new DeletingVisitor(message));
} else {
message.getMessagebody().accept(new UpdatingVisitor(message));
}
}
private void forwardTopologyEdgeMessage(byte[] refid, byte[] message) {
sendRecord(() -> {
return new ProducerRecord<>(topologyEdgeTopic, refid, message);
}, recordMetadata -> {
// We've got an ACK from the server that the event was forwarded
// Let other threads know when we've successfully forwarded an event
forwardedTopologyEdgeMessage.countDown();
});
}
private void forwardEvent(Event event) {
boolean shouldForwardEvent = true;
// Filtering
if (eventFilterExpression != null) {
try {
shouldForwardEvent = eventFilterExpression.getValue(event, Boolean.class);
} catch (Exception e) {
LOG.error("Event filter '{}' failed to return a result for event: {}. The event will be forwarded anyways.",
eventFilterExpression.getExpressionString(), event.toStringSimple(), e);
}
}
if (!shouldForwardEvent) {
if (LOG.isTraceEnabled()) {
LOG.trace("Event {} not forwarded due to event filter: {}",
event.toStringSimple(), eventFilterExpression.getExpressionString());
}
return;
}
// Node handling
if (forwardNodes && event.getNodeid() != null && event.getNodeid() != 0) {
updateNodeAsynchronously(event.getNodeid());
}
// Forward!
sendRecord(() -> {
final OpennmsModelProtos.Event mappedEvent = protobufMapper.toEvent(event).build();
LOG.debug("Sending event with UEI: {}", mappedEvent.getUei());
return new ProducerRecord<>(eventTopic, mappedEvent.toByteArray());
}, recordMetadata -> {
// We've got an ACK from the server that the event was forwarded
// Let other threads know when we've successfully forwarded an event
forwardedEvent.countDown();
});
}
public boolean shouldForwardAlarm(OnmsAlarm alarm) {
if (alarmFilterExpression != null) {
// The expression is not necessarily thread safe
synchronized (this) {
try {
final boolean shouldForwardAlarm = alarmFilterExpression.getValue(alarm, Boolean.class);
if (LOG.isTraceEnabled()) {
LOG.trace("Alarm {} not forwarded due to event filter: {}",
alarm, alarmFilterExpression.getExpressionString());
}
return shouldForwardAlarm;
} catch (Exception e) {
LOG.error("Alarm filter '{}' failed to return a result for event: {}. The alarm will be forwarded anyways.",
alarmFilterExpression.getExpressionString(), alarm, e);
}
}
}
return true;
}
private boolean isIncrementalAlarm(String reductionKey, OnmsAlarm alarm) {
OpennmsModelProtos.Alarm existingAlarm = outstandingAlarms.get(reductionKey);
return existingAlarm != null && alarmEqualityChecker.equalsExcludingOnFirst(protobufMapper.toAlarm(alarm),
existingAlarm);
}
private void recordIncrementalAlarm(String reductionKey, OnmsAlarm alarm) {
// Apply the excluded fields when putting to the map so we do not have to perform this calculation
// on each equality check
outstandingAlarms.put(reductionKey,
AlarmEqualityChecker.Exclusions.defaultExclusions(protobufMapper.toAlarm(alarm)).build());
}
private void updateAlarm(String reductionKey, OnmsAlarm alarm) {
// Always push null records, no good way to perform filtering on these
if (alarm == null) {
// The alarm has been deleted so we shouldn't track it in the map of outstanding alarms any longer
outstandingAlarms.remove(reductionKey);
// The alarm was deleted, push a null record to the reduction key
sendRecord(() -> {
LOG.debug("Deleting alarm with reduction key: {}", reductionKey);
return new ProducerRecord<>(alarmTopic, reductionKey.getBytes(encoding), null);
}, recordMetadata -> {
// We've got an ACK from the server that the alarm was forwarded
// Let other threads know when we've successfully forwarded an alarm
forwardedAlarm.countDown();
});
return;
}
// Filtering
if (!shouldForwardAlarm(alarm)) {
return;
}
if (suppressIncrementalAlarms && isIncrementalAlarm(reductionKey, alarm)) {
return;
}
// Node handling
if (forwardNodes && alarm.getNodeId() != null) {
updateNodeAsynchronously(alarm.getNodeId());
}
// Forward!
sendRecord(() -> {
final OpennmsModelProtos.Alarm mappedAlarm = protobufMapper.toAlarm(alarm).build();
LOG.debug("Sending alarm with reduction key: {}", reductionKey);
if (suppressIncrementalAlarms) {
recordIncrementalAlarm(reductionKey, alarm);
}
return new ProducerRecord<>(alarmTopic, reductionKey.getBytes(encoding), mappedAlarm.toByteArray());
}, recordMetadata -> {
// We've got an ACK from the server that the alarm was forwarded
// Let other threads know when we've successfully forwarded an alarm
forwardedAlarm.countDown();
});
}
private void updateNodeAsynchronously(long nodeId) {
// Updating node asynchronously will unblock event consumption.
nodeUpdateExecutor.execute(() -> {
maybeUpdateNode(nodeId);
});
}
private void maybeUpdateNode(long nodeId) {
nodeCache.triggerIfNeeded(nodeId, (node) -> {
final String nodeCriteria;
if (node != null && node.getForeignSource() != null && node.getForeignId() != null) {
nodeCriteria = String.format("%s:%s", node.getForeignSource(), node.getForeignId());
} else {
nodeCriteria = Long.toString(nodeId);
}
if (node == null) {
// The node was deleted, push a null record
sendRecord(() -> {
LOG.debug("Deleting node with criteria: {}", nodeCriteria);
return new ProducerRecord<>(nodeTopic, nodeCriteria.getBytes(encoding), null);
});
return;
}
sendRecord(() -> {
final OpennmsModelProtos.Node mappedNode = protobufMapper.toNode(node).build();
LOG.debug("Sending node with criteria: {}", nodeCriteria);
return new ProducerRecord<>(nodeTopic, nodeCriteria.getBytes(encoding), mappedNode.toByteArray());
}, recordMetadata -> {
// We've got an ACK from the server that the node was forwarded
// Let other threads know when we've successfully forwarded a node
forwardedNode.countDown();
});
});
}
private void sendRecord(Callable<ProducerRecord<byte[], byte[]>> callable) {
sendRecord(callable, null);
}
private void sendRecord(Callable<ProducerRecord<byte[], byte[]>> callable, Consumer<RecordMetadata> callback) {
if (producer == null) {
return;
}
final ProducerRecord<byte[], byte[]> record;
try {
record = callable.call();
} catch (Exception e) {
// Propagate
throw new RuntimeException(e);
}
// Rather than attempt to send, we instead queue the record to avoid blocking since KafkaProducer's send()
// method can block if Kafka is not available when metadata is attempted to be retrieved
// Any offer that fails due to capacity overflow will simply be dropped and will have to wait until the next
// sync to be processed so this is just a best effort attempt
if (!kafkaSendDeque.offer(new KafkaRecord(record, callback))) {
RATE_LIMITED_LOGGER.warn("Dropped a Kafka record due to queue capacity being full.");
}
}
private void processKafkaSendQueue() {
//noinspection InfiniteLoopStatement
while (true) {
try {
KafkaRecord kafkaRecord = kafkaSendDeque.take();
ProducerRecord<byte[], byte[]> producerRecord = kafkaRecord.getProducerRecord();
Consumer<RecordMetadata> consumer = kafkaRecord.getConsumer();
try {
producer.send(producerRecord, (recordMetadata, e) -> {
if (e != null) {
LOG.warn("Failed to send record to producer: {}.", producerRecord, e);
if (e instanceof TimeoutException) {
// If Kafka is Offline, buffer the record again for events.
// This is best effort to keep the order although in-flight elements may still miss the order.
if (producerRecord != null &&
this.eventTopic.equalsIgnoreCase(producerRecord.topic())) {
if(!kafkaSendDeque.offerFirst(kafkaRecord)) {
RATE_LIMITED_LOGGER.warn("Dropped a Kafka record due to queue capacity being full.");
}
}
}
return;
}
if (consumer != null) {
consumer.accept(recordMetadata);
}
});
} catch (RuntimeException e) {
LOG.warn("Failed to send record to producer: {}.", producerRecord, e);
}
} catch (InterruptedException ignore) {
break;
}
}
}
@Override
public void handleAlarmSnapshot(List<OnmsAlarm> alarms) {
if (!forwardAlarms || dataSync == null) {
// Ignore
return;
}
dataSync.handleAlarmSnapshot(alarms);
}
@Override
public void preHandleAlarmSnapshot() {
stateTracker.startTrackingAlarms();
}
@Override
public void postHandleAlarmSnapshot() {
stateTracker.resetStateAndStopTrackingAlarms();
}
@Override
public void handleNewOrUpdatedAlarm(OnmsAlarm alarm) {
if (!forwardAlarms) {
// Ignore
return;
}
updateAlarm(alarm.getReductionKey(), alarm);
stateTracker.trackNewOrUpdatedAlarm(alarm.getId(), alarm.getReductionKey());
}
@Override
public void handleDeletedAlarm(int alarmId, String reductionKey) {
if (!forwardAlarms) {
// Ignore
return;
}
handleDeletedAlarm(reductionKey);
stateTracker.trackDeletedAlarm(alarmId, reductionKey);
}
private void handleDeletedAlarm(String reductionKey) {
updateAlarm(reductionKey, null);
}
@Override
public String getName() {
return OpennmsKafkaProducer.class.getName();
}
@Override
public void onEvent(IEvent event) {
forwardEvent(Event.copyFrom(event));
}
@Override
public Set<OnmsTopologyProtocol> getProtocols() {
return Collections.singleton(OnmsTopologyProtocol.allProtocols());
}
@Override
public void consume(OnmsTopologyMessage message) {
forwardTopologyMessage(message);
}
public void setTopologyVertexTopic(String topologyVertexTopic) {
this.topologyVertexTopic = topologyVertexTopic;
}
public void setTopologyEdgeTopic(String topologyEdgeTopic) {
this.topologyEdgeTopic = topologyEdgeTopic;
}
public void setEventTopic(String eventTopic) {
this.eventTopic = eventTopic;
forwardEvents = !Strings.isNullOrEmpty(eventTopic);
}
public void setAlarmTopic(String alarmTopic) {
this.alarmTopic = alarmTopic;
forwardAlarms = !Strings.isNullOrEmpty(alarmTopic);
}
public void setNodeTopic(String nodeTopic) {
this.nodeTopic = nodeTopic;
forwardNodes = !Strings.isNullOrEmpty(nodeTopic);
}
public void setAlarmFeedbackTopic(String alarmFeedbackTopic) {
this.alarmFeedbackTopic = alarmFeedbackTopic;
forwardAlarmFeedback = !Strings.isNullOrEmpty(alarmFeedbackTopic);
}
public void setEventFilter(String eventFilter) {
if (Strings.isNullOrEmpty(eventFilter)) {
eventFilterExpression = null;
} else {
eventFilterExpression = SPEL_PARSER.parseExpression(eventFilter);
}
}
public void setAlarmFilter(String alarmFilter) {
if (Strings.isNullOrEmpty(alarmFilter)) {
alarmFilterExpression = null;
} else {
alarmFilterExpression = SPEL_PARSER.parseExpression(alarmFilter);
}
}
public OpennmsKafkaProducer setDataSync(KafkaAlarmDataSync dataSync) {
this.dataSync = dataSync;
return this;
}
@Override
public void handleAlarmFeedback(List<AlarmFeedback> alarmFeedback) {
if (!forwardAlarmFeedback) {
return;
}
// NOTE: This will currently block while waiting for Kafka metadata if Kafka is not available.
alarmFeedback.forEach(feedback -> sendRecord(() -> {
LOG.debug("Sending alarm feedback with key: {}", feedback.getAlarmKey());
return new ProducerRecord<>(alarmFeedbackTopic, feedback.getAlarmKey().getBytes(encoding),
protobufMapper.toAlarmFeedback(feedback).build().toByteArray());
}, recordMetadata -> {
// We've got an ACK from the server that the alarm feedback was forwarded
// Let other threads know when we've successfully forwarded an alarm feedback
forwardedAlarmFeedback.countDown();
}));
}
public boolean isForwardingAlarms() {
return forwardAlarms;
}
public CountDownLatch getEventForwardedLatch() {
return forwardedEvent;
}
public CountDownLatch getAlarmForwardedLatch() {
return forwardedAlarm;
}
public CountDownLatch getNodeForwardedLatch() {
return forwardedNode;
}
public CountDownLatch getAlarmFeedbackForwardedLatch() {
return forwardedAlarmFeedback;
}
public void setSuppressIncrementalAlarms(boolean suppressIncrementalAlarms) {
this.suppressIncrementalAlarms = suppressIncrementalAlarms;
}
@VisibleForTesting
KafkaAlarmDataSync getDataSync() {
return dataSync;
}
public AlarmCallbackStateTracker getAlarmCallbackStateTracker() {
return stateTracker;
}
public void setKafkaSendQueueCapacity(int kafkaSendQueueCapacity) {
this.kafkaSendQueueCapacity = kafkaSendQueueCapacity;
}
@Override
public int getNumThreads() {
return numEventListenerThreads;
}
private static final class KafkaRecord {
private final ProducerRecord<byte[], byte[]> producerRecord;
private final Consumer<RecordMetadata> consumer;
KafkaRecord(ProducerRecord<byte[], byte[]> producerRecord, Consumer<RecordMetadata> consumer) {
this.producerRecord = producerRecord;
this.consumer = consumer;
}
ProducerRecord<byte[], byte[]> getProducerRecord() {
return producerRecord;
}
Consumer<RecordMetadata> getConsumer() {
return consumer;
}
}
public CountDownLatch getForwardedTopologyVertexMessage() {
return forwardedTopologyVertexMessage;
}
public CountDownLatch getForwardedTopologyEdgeMessage() {
return forwardedTopologyEdgeMessage;
}
public String getEncoding() {
return encoding;
}
public void setEncoding(String encoding) {
this.encoding = encoding;
}
public int getNumEventListenerThreads() {
return numEventListenerThreads;
}
public void setNumEventListenerThreads(int numEventListenerThreads) {
this.numEventListenerThreads = numEventListenerThreads;
}
private class TopologyVisitorImpl implements TopologyVisitor {
final OnmsTopologyMessage onmsTopologyMessage;
TopologyVisitorImpl(OnmsTopologyMessage onmsTopologyMessage) {
this.onmsTopologyMessage = Objects.requireNonNull(onmsTopologyMessage);
}
byte[] getKeyForEdge(OnmsTopologyEdge edge) {
Objects.requireNonNull(onmsTopologyMessage);
return String.format("topology:%s:%s", onmsTopologyMessage.getProtocol().getId(), edge.getId()).getBytes();
}
}
private class DeletingVisitor extends TopologyVisitorImpl {
DeletingVisitor(OnmsTopologyMessage topologyMessage) {
super(topologyMessage);
}
@Override
public void visit(OnmsTopologyEdge edge) {
forwardTopologyEdgeMessage(getKeyForEdge(edge), null);
}
}
private class UpdatingVisitor extends TopologyVisitorImpl {
UpdatingVisitor(OnmsTopologyMessage onmsTopologyMessage) {
super(onmsTopologyMessage);
}
@Override
public void visit(OnmsTopologyVertex vertex) {
// Node handling
if (forwardNodes && vertex.getNodeid() != null) {
updateNodeAsynchronously(vertex.getNodeid());
}
}
@Override
public void visit(OnmsTopologyEdge edge) {
Objects.requireNonNull(onmsTopologyMessage);
final OpennmsModelProtos.TopologyEdge mappedTopoMsg =
protobufMapper.toEdgeTopologyMessage(onmsTopologyMessage.getProtocol(), edge);
forwardTopologyEdgeMessage(getKeyForEdge(edge), mappedTopoMsg.toByteArray());
}
}
}
| jeffgdotorg/opennms | features/kafka/producer/src/main/java/org/opennms/features/kafka/producer/OpennmsKafkaProducer.java | Java | gpl-2.0 | 28,520 |
/*
* Vulcan Build Manager
* Copyright (C) 2005-2012 Chris Eldredge
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
package net.sourceforge.vulcan.subversion;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import net.sourceforge.vulcan.RepositoryAdaptor;
import net.sourceforge.vulcan.StateManager;
import net.sourceforge.vulcan.core.BuildDetailCallback;
import net.sourceforge.vulcan.core.support.FileSystem;
import net.sourceforge.vulcan.core.support.FileSystemImpl;
import net.sourceforge.vulcan.core.support.RepositoryUtils;
import net.sourceforge.vulcan.dto.ChangeLogDto;
import net.sourceforge.vulcan.dto.ChangeSetDto;
import net.sourceforge.vulcan.dto.PathModification;
import net.sourceforge.vulcan.dto.ProjectConfigDto;
import net.sourceforge.vulcan.dto.ProjectStatusDto;
import net.sourceforge.vulcan.dto.RepositoryTagDto;
import net.sourceforge.vulcan.dto.RevisionTokenDto;
import net.sourceforge.vulcan.exception.ConfigException;
import net.sourceforge.vulcan.exception.RepositoryException;
import net.sourceforge.vulcan.integration.support.PluginSupport;
import net.sourceforge.vulcan.subversion.dto.CheckoutDepth;
import net.sourceforge.vulcan.subversion.dto.SparseCheckoutDto;
import net.sourceforge.vulcan.subversion.dto.SubversionConfigDto;
import net.sourceforge.vulcan.subversion.dto.SubversionProjectConfigDto;
import net.sourceforge.vulcan.subversion.dto.SubversionRepositoryProfileDto;
import org.apache.commons.lang.StringUtils;
import org.tigris.subversion.javahl.ClientException;
import org.tigris.subversion.javahl.Notify2;
import org.tigris.subversion.javahl.NotifyAction;
import org.tigris.subversion.javahl.NotifyInformation;
import org.tigris.subversion.javahl.PromptUserPassword2;
import org.tigris.subversion.javahl.PromptUserPassword3;
import org.tigris.subversion.javahl.Revision;
import org.tigris.subversion.javahl.SVNClient;
import org.tmatesoft.svn.core.ISVNLogEntryHandler;
import org.tmatesoft.svn.core.SVNCancelException;
import org.tmatesoft.svn.core.SVNDepth;
import org.tmatesoft.svn.core.SVNDirEntry;
import org.tmatesoft.svn.core.SVNErrorCode;
import org.tmatesoft.svn.core.SVNException;
import org.tmatesoft.svn.core.SVNLogEntry;
import org.tmatesoft.svn.core.SVNLogEntryPath;
import org.tmatesoft.svn.core.SVNNodeKind;
import org.tmatesoft.svn.core.SVNProperties;
import org.tmatesoft.svn.core.SVNPropertyValue;
import org.tmatesoft.svn.core.SVNURL;
import org.tmatesoft.svn.core.io.SVNRepository;
import org.tmatesoft.svn.core.wc.ISVNEventHandler;
import org.tmatesoft.svn.core.wc.SVNDiffClient;
import org.tmatesoft.svn.core.wc.SVNEvent;
import org.tmatesoft.svn.core.wc.SVNLogClient;
import org.tmatesoft.svn.core.wc.SVNPropertyData;
import org.tmatesoft.svn.core.wc.SVNRevision;
import org.tmatesoft.svn.core.wc.SVNWCClient;
public class SubversionRepositoryAdaptor extends SubversionSupport implements RepositoryAdaptor {
private final EventHandler eventHandler = new EventHandler();
private final ProjectConfigDto projectConfig;
private final String projectName;
private final Map<String, Long> byteCounters;
final LineOfDevelopment lineOfDevelopment = new LineOfDevelopment();
final SVNClient client = new SVNClient();
private long revision = -1;
private long diffStartRevision = -1;
private final StateManager stateManager;
private FileSystem fileSystem = new FileSystemImpl();
private List<ChangeSetDto> changeSets;
boolean canceling = false;
public SubversionRepositoryAdaptor(SubversionConfigDto globalConfig, ProjectConfigDto projectConfig, SubversionProjectConfigDto config, StateManager stateManager) throws ConfigException {
this(globalConfig, projectConfig, config, stateManager, true);
}
protected SubversionRepositoryAdaptor(SubversionConfigDto globalConfig, ProjectConfigDto projectConfig, SubversionProjectConfigDto config, StateManager stateManager, boolean init) throws ConfigException {
this(
globalConfig,
projectConfig,
config,
stateManager,
init,
getSelectedEnvironment(
globalConfig.getProfiles(),
config.getRepositoryProfile(),
"svn.profile.missing"));
}
protected SubversionRepositoryAdaptor(SubversionConfigDto globalConfig, ProjectConfigDto projectConfig, SubversionProjectConfigDto config, StateManager stateManager, boolean init, SubversionRepositoryProfileDto profile) throws ConfigException {
this(
globalConfig,
projectConfig,
config,
stateManager,
profile,
createRepository(profile, init));
}
protected SubversionRepositoryAdaptor(SubversionConfigDto globalConfig, ProjectConfigDto projectConfig, SubversionProjectConfigDto config, StateManager stateManager, final SubversionRepositoryProfileDto profile, SVNRepository svnRepository) throws ConfigException {
super(config, profile, svnRepository);
this.stateManager = stateManager;
this.projectConfig = projectConfig;
this.projectName = projectConfig.getName();
if (globalConfig != null) {
this.byteCounters = globalConfig.getWorkingCopyByteCounts();
} else {
this.byteCounters = Collections.emptyMap();
}
lineOfDevelopment.setPath(config.getPath());
lineOfDevelopment.setRepositoryRoot(profile.getRootUrl());
lineOfDevelopment.setTagFolderNames(new HashSet<String>(Arrays.asList(globalConfig.getTagFolderNames())));
client.notification2(eventHandler);
if (StringUtils.isNotBlank(profile.getUsername())) {
client.setPrompt(new PromptUserPassword3() {
public String getUsername() {
return profile.getUsername();
}
public String getPassword() {
return profile.getPassword();
}
public boolean prompt(String arg0, String arg1) {
return true;
}
public boolean prompt(String arg0, String arg1, boolean arg2) {
return true;
}
public boolean userAllowedSave() {
return false;
}
public int askTrustSSLServer(String arg0, boolean arg1) {
return PromptUserPassword2.AcceptTemporary;
}
public String askQuestion(String arg0, String arg1,
boolean arg2, boolean arg3) {
throw new UnsupportedOperationException();
}
public String askQuestion(String arg0, String arg1, boolean arg2) {
throw new UnsupportedOperationException();
}
public boolean askYesNo(String arg0, String arg1, boolean arg2) {
throw new UnsupportedOperationException();
}
});
}
}
public boolean hasIncomingChanges(ProjectStatusDto previousStatus) throws RepositoryException {
RevisionTokenDto rev = previousStatus.getRevision();
if (rev == null) {
rev = previousStatus.getLastKnownRevision();
}
if (rev == null) {
return true;
}
return getLatestRevision(rev).getRevisionNum() > rev.getRevisionNum();
}
public void prepareRepository(BuildDetailCallback buildDetailCallback) throws RepositoryException, InterruptedException {
}
public RevisionTokenDto getLatestRevision(RevisionTokenDto previousRevision) throws RepositoryException {
final String path = lineOfDevelopment.getComputedRelativePath();
SVNDirEntry info;
try {
info = svnRepository.info(path, revision);
} catch (SVNException e) {
throw new RepositoryException(e);
}
if (info == null) {
throw new RepositoryException("svn.path.not.exist", null, path);
}
final long lastChangedRevision = info.getRevision();
/*
* Get the revision of the newest log entry for this path.
* See Issue 95 (http://code.google.com/p/vulcan/issues/detail?id=95).
*/
final long mostRecentLogRevision = getMostRecentLogRevision(lastChangedRevision);
revision = mostRecentLogRevision;
if (config.getCheckoutDepth() == CheckoutDepth.Infinity || previousRevision == null) {
return new RevisionTokenDto(revision, "r" + revision);
}
/* Issue 151 (http://code.google.com/p/vulcan/issues/detail?id=151):
* Need to filter out irrelevant commits from sparse working copy.
*/
try {
getChangeLog(previousRevision, new RevisionTokenDto(revision), null);
if (changeSets.size() > 0) {
String label = changeSets.get(changeSets.size()-1).getRevisionLabel();
revision = Long.valueOf(label.substring(1));
} else {
// No commit logs matched means we're effectively at the old revision.
// Need to check if the old revision exists on this path in case we're building
// from a different branch/tag.
try {
info = svnRepository.info(path, previousRevision.getRevisionNum());
} catch (SVNException e) {
throw new RepositoryException(e);
}
if (info != null) {
revision = previousRevision.getRevisionNum();
} else {
// Path doesn't exist at that revision. Use most recent commit
// even though it didn't match our sparse working copy.
revision = mostRecentLogRevision;
}
}
} catch (RepositoryException e) {
// Probably the path does not exist at the previousRevision.
revision = mostRecentLogRevision;
changeSets = null;
}
return new RevisionTokenDto(revision, "r" + revision);
}
public void createPristineWorkingCopy(BuildDetailCallback buildDetailCallback) throws RepositoryException {
final File absolutePath = new File(projectConfig.getWorkDir()).getAbsoluteFile();
new RepositoryUtils(fileSystem).createOrCleanWorkingCopy(absolutePath, buildDetailCallback);
synchronized (byteCounters) {
if (byteCounters.containsKey(projectName)) {
eventHandler.setPreviousFileCount(byteCounters.get(projectName).longValue());
}
}
eventHandler.setBuildDetailCallback(buildDetailCallback);
final Revision svnRev = Revision.getInstance(revision);
final boolean ignoreExternals = false;
final boolean allowUnverObstructions = false;
try {
client.checkout(
getCompleteSVNURL().toString(),
absolutePath.toString(),
svnRev,
svnRev,
config.getCheckoutDepth().getId(),
ignoreExternals,
allowUnverObstructions
);
configureBugtraqIfNecessary(absolutePath);
} catch (ClientException e) {
if (!canceling) {
throw new RepositoryException(e);
}
} catch (SVNException e) {
throw new RepositoryException(e);
}
final boolean depthIsSticky = true;
for (SparseCheckoutDto folder : config.getFolders()) {
sparseUpdate(folder, absolutePath, svnRev, ignoreExternals,
allowUnverObstructions, depthIsSticky);
}
synchronized (byteCounters) {
byteCounters.put(projectName, eventHandler.getFileCount());
}
}
void sparseUpdate(SparseCheckoutDto folder,
File workingCopyRootPath, final Revision svnRev,
final boolean ignoreExternals,
final boolean allowUnverObstructions, final boolean depthIsSticky)
throws RepositoryException {
final File dir = new File(workingCopyRootPath, folder.getDirectoryName());
final File parentDir = dir.getParentFile();
if (!parentDir.exists()) {
final SparseCheckoutDto parentFolder = new SparseCheckoutDto();
parentFolder.setDirectoryName(new File(folder.getDirectoryName()).getParent());
parentFolder.setCheckoutDepth(CheckoutDepth.Empty);
sparseUpdate(parentFolder, workingCopyRootPath, svnRev, ignoreExternals, allowUnverObstructions, depthIsSticky);
}
final String path = dir.toString();
try {
client.update(path, svnRev, folder.getCheckoutDepth().getId(), depthIsSticky, ignoreExternals, allowUnverObstructions);
} catch (ClientException e) {
if (!canceling) {
throw new RepositoryException("svn.sparse.checkout.error", e, folder.getDirectoryName());
}
}
}
public void updateWorkingCopy(BuildDetailCallback buildDetailCallback) throws RepositoryException {
final File absolutePath = new File(projectConfig.getWorkDir()).getAbsoluteFile();
try {
final Revision svnRev = Revision.getInstance(revision);
final boolean depthIsSticky = false;
final boolean ignoreExternals = false;
final boolean allowUnverObstructions = false;
client.update(absolutePath.toString(), svnRev, SVNDepth.UNKNOWN.getId(), depthIsSticky, ignoreExternals, allowUnverObstructions);
} catch (ClientException e) {
if (!canceling) {
throw new RepositoryException(e);
}
throw new RepositoryException(e);
}
}
public boolean isWorkingCopy() {
try {
if (client.info(new File(projectConfig.getWorkDir()).getAbsolutePath()) != null) {
return true;
}
} catch (ClientException ignore) {
}
return false;
}
public ChangeLogDto getChangeLog(RevisionTokenDto first, RevisionTokenDto last, OutputStream diffOutputStream) throws RepositoryException {
final SVNRevision r1 = SVNRevision.create(first.getRevisionNum().longValue());
final SVNRevision r2 = SVNRevision.create(last.getRevisionNum().longValue());
if (changeSets == null) {
changeSets = fetchChangeSets(r1, r2);
if (this.config.getCheckoutDepth() != CheckoutDepth.Infinity) {
final SparseChangeLogFilter filter = new SparseChangeLogFilter(this.config, this.lineOfDevelopment);
filter.removeIrrelevantChangeSets(changeSets);
}
}
if (diffOutputStream != null) {
fetchDifferences(SVNRevision.create(diffStartRevision), r2, diffOutputStream);
}
final ChangeLogDto changeLog = new ChangeLogDto();
changeLog.setChangeSets(changeSets);
return changeLog;
}
@SuppressWarnings("unchecked")
public List<RepositoryTagDto> getAvailableTagsAndBranches() throws RepositoryException {
final String projectRoot = lineOfDevelopment.getComputedTagRoot();
final List<RepositoryTagDto> tags = new ArrayList<RepositoryTagDto>();
final RepositoryTagDto trunkTag = new RepositoryTagDto();
trunkTag.setDescription("trunk");
trunkTag.setName("trunk");
tags.add(trunkTag);
try {
final Collection<SVNDirEntry> entries = svnRepository.getDir(projectRoot, -1, null, (Collection<?>) null);
for (SVNDirEntry entry : entries) {
final String folderName = entry.getName();
if (entry.getKind() == SVNNodeKind.DIR && lineOfDevelopment.isTag(folderName)) {
addTags(projectRoot, folderName, tags);
}
}
} catch (SVNException e) {
throw new RepositoryException(e);
}
Collections.sort(tags, new Comparator<RepositoryTagDto>() {
public int compare(RepositoryTagDto t1, RepositoryTagDto t2) {
return t1.getName().compareTo(t2.getName());
}
});
return tags;
}
public String getRepositoryUrl() {
try {
return getCompleteSVNURL().toString();
} catch (SVNException e) {
throw new RuntimeException(e);
}
}
public String getTagOrBranch() {
return lineOfDevelopment.getComputedTagName();
}
public void setTagOrBranch(String tagName) {
lineOfDevelopment.setAlternateTagName(tagName);
}
protected long getMostRecentLogRevision(final long lastChangedRevision) throws RepositoryException {
final long[] commitRev = new long[1];
commitRev[0] = -1;
final SVNLogClient logClient = new SVNLogClient(
svnRepository.getAuthenticationManager(), options);
final ISVNLogEntryHandler handler = new ISVNLogEntryHandler() {
public void handleLogEntry(SVNLogEntry logEntry) throws SVNException {
commitRev[0] = logEntry.getRevision();
}
};
try {
logClient.doLog(SVNURL.parseURIEncoded(profile.getRootUrl()),
new String[] {lineOfDevelopment.getComputedRelativePath()},
SVNRevision.HEAD, SVNRevision.HEAD, SVNRevision.create(lastChangedRevision),
true, false, 1, handler);
} catch (SVNException e) {
throw new RepositoryException(e);
}
// If for some reason there were zero log entries, default to Last Changed Revision.
if (commitRev[0] < 0) {
commitRev[0] = lastChangedRevision;
}
return commitRev[0];
}
protected List<ChangeSetDto> fetchChangeSets(final SVNRevision r1, final SVNRevision r2) throws RepositoryException {
final SVNLogClient logClient = new SVNLogClient(svnRepository.getAuthenticationManager(), options);
logClient.setEventHandler(eventHandler);
final List<ChangeSetDto> changeSets = new ArrayList<ChangeSetDto>();
diffStartRevision = r2.getNumber();
final ISVNLogEntryHandler handler = new ISVNLogEntryHandler() {
@SuppressWarnings("unchecked")
public void handleLogEntry(SVNLogEntry logEntry) {
final long logEntryRevision = logEntry.getRevision();
if (diffStartRevision > logEntryRevision) {
diffStartRevision = logEntryRevision;
}
if (logEntryRevision == r1.getNumber()) {
/* The log message for r1 is in the previous build report. Don't include it twice. */
return;
}
final ChangeSetDto changeSet = new ChangeSetDto();
changeSet.setRevisionLabel("r" + logEntryRevision);
changeSet.setAuthorName(logEntry.getAuthor());
changeSet.setMessage(logEntry.getMessage());
changeSet.setTimestamp(new Date(logEntry.getDate().getTime()));
final Collection<SVNLogEntryPath> paths = ((Map<String, SVNLogEntryPath>) logEntry.getChangedPaths()).values();
for (SVNLogEntryPath path : paths) {
changeSet.addModifiedPath(path.getPath(), toPathModification(path.getType()));
}
changeSets.add(changeSet);
}
private PathModification toPathModification(char type) {
switch(type) {
case SVNLogEntryPath.TYPE_ADDED:
return PathModification.Add;
case SVNLogEntryPath.TYPE_DELETED:
return PathModification.Remove;
case SVNLogEntryPath.TYPE_REPLACED:
case SVNLogEntryPath.TYPE_MODIFIED:
return PathModification.Modify;
}
return null;
}
};
try {
logClient.doLog(
SVNURL.parseURIEncoded(profile.getRootUrl()),
new String[] {lineOfDevelopment.getComputedRelativePath()},
r1, r1, r2,
true,
true,
0,
handler);
} catch (SVNCancelException e) {
} catch (SVNException e) {
if (isFatal(e)) {
throw new RepositoryException(e);
}
}
return changeSets;
}
protected void fetchDifferences(final SVNRevision r1, final SVNRevision r2, OutputStream os) throws RepositoryException {
final SVNDiffClient diffClient = new SVNDiffClient(svnRepository.getAuthenticationManager(), options);
diffClient.setEventHandler(eventHandler);
try {
diffClient.doDiff(getCompleteSVNURL(), r1, r1, r2, SVNDepth.INFINITY, true, os);
os.close();
} catch (SVNCancelException e) {
} catch (SVNException e) {
if (e.getErrorMessage().getErrorCode() == SVNErrorCode.RA_DAV_PATH_NOT_FOUND) {
// This usually happens when building from a different branch or tag that
// does not share ancestry with the previous build.
log.info("Failed to obtain diff of revisions r"
+ r1.getNumber() + ":" + r2.getNumber(), e);
} else {
throw new RepositoryException(e);
}
} catch (IOException e) {
throw new RepositoryException(e);
}
}
protected SVNURL getCompleteSVNURL() throws SVNException {
return SVNURL.parseURIEncoded(lineOfDevelopment.getAbsoluteUrl());
}
@SuppressWarnings("unchecked")
private void addTags(String projectRoot, String folderName, List<RepositoryTagDto> tags) throws SVNException {
final String path = projectRoot + "/" + folderName;
final Collection<SVNDirEntry> entries = svnRepository.getDir(path, -1, null, (Collection<?>) null);
for (SVNDirEntry entry : entries) {
final String tagName = entry.getName();
if (entry.getKind() == SVNNodeKind.DIR) {
RepositoryTagDto tag = new RepositoryTagDto();
tag.setName(folderName + "/" + tagName);
tag.setDescription(tag.getName());
tags.add(tag);
}
}
}
private void configureBugtraqIfNecessary(File absolutePath) throws SVNException {
if (!this.config.isObtainBugtraqProperties()) {
return;
}
final ProjectConfigDto orig = stateManager.getProjectConfig(projectName);
final SVNWCClient client = new SVNWCClient(svnRepository.getAuthenticationManager(), options);
final SVNProperties bugtraqProps = new SVNProperties();
getWorkingCopyProperty(client, absolutePath, BUGTRAQ_URL, bugtraqProps);
getWorkingCopyProperty(client, absolutePath, BUGTRAQ_MESSAGE, bugtraqProps);
getWorkingCopyProperty(client, absolutePath, BUGTRAQ_LOGREGEX, bugtraqProps);
final ProjectConfigDto projectConfig = (ProjectConfigDto) orig.copy();
configureBugtraq(projectConfig, bugtraqProps);
if (!orig.equals(projectConfig)) {
try {
log.info("Updating bugtraq information for project " + projectName);
stateManager.updateProjectConfig(projectName, projectConfig, false);
} catch (Exception e) {
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
}
throw new RuntimeException(e);
}
}
}
private void getWorkingCopyProperty(final SVNWCClient client, File absolutePath, String propName, final SVNProperties bugtraqProps) throws SVNException {
SVNPropertyData prop;
prop = client.doGetProperty(absolutePath, propName, SVNRevision.BASE, SVNRevision.BASE);
bugtraqProps.put(propName, getValueIfNotNull(prop));
}
protected String getValueIfNotNull(SVNPropertyData prop) {
if (prop != null) {
final SVNPropertyValue value = prop.getValue();
if (value.isString()) {
return value.getString();
}
return SVNPropertyValue.getPropertyAsString(value);
}
return StringUtils.EMPTY;
}
private class EventHandler implements ISVNEventHandler, Notify2 {
private long previousFileCount = -1;
private long fileCount = 0;
private BuildDetailCallback buildDetailCallback;
public void onNotify(NotifyInformation info) {
if (info.getAction() == NotifyAction.update_add) {
fileCount++;
PluginSupport.setWorkingCopyProgress(buildDetailCallback, fileCount, previousFileCount, ProgressUnit.Files);
} else if (info.getAction() == NotifyAction.skip) {
log.warn("Skipping missing target: " + info.getPath());
}
if (Thread.interrupted()) {
try {
client.cancelOperation();
canceling = true;
} catch (ClientException e) {
log.error("Error canceling svn operation", e);
}
}
}
public void handleEvent(SVNEvent event, double progress) throws SVNException {
}
public void checkCancelled() throws SVNCancelException {
if (Thread.interrupted()) {
throw new SVNCancelException();
}
}
void setBuildDetailCallback(BuildDetailCallback buildDetailCallback) {
this.buildDetailCallback = buildDetailCallback;
}
long getFileCount() {
return fileCount;
}
void setPreviousFileCount(long previousByteCount) {
this.previousFileCount = previousByteCount;
}
}
}
| chriseldredge/vulcan | plugins/vulcan-subversion/source/main/java/net/sourceforge/vulcan/subversion/SubversionRepositoryAdaptor.java | Java | gpl-2.0 | 23,349 |
package openra.server;
import openra.core.Unit;
public class UnitAnimEvent extends ActionEvent
{
private Unit un;
private UnitAnimEvent scheduled;
public UnitAnimEvent(int p, Unit un) {
super(p);
//logger->debug("UAE cons: this:%p un:%p\n",this,un);
this.un = un;
//un.referTo();
scheduled = null;
}
void destUnitAnimEvent()
{
//logger->debug("UAE dest: this:%p un:%p sch:%p\n",this,un,scheduled);
if (scheduled != null) {
this.getAequeue().scheduleEvent(scheduled);
}
//un->unrefer();
}
protected void setSchedule(UnitAnimEvent e)
{
//logger->debug("Scheduling an event. (this: %p, e: %p)\n",this,e);
if (scheduled != null) {
scheduled.setSchedule(null);
scheduled.stop();
}
scheduled = e;
}
void stopScheduled()
{
if (scheduled != null) {
scheduled.stop();
}
}
void update()
{
}
}
| damiencarol/openredalert | java/src/openra/server/UnitAnimEvent.java | Java | gpl-2.0 | 965 |
/**
* Copyright (C) 2013, Moss Computing Inc.
*
* This file is part of simpledeb.
*
* simpledeb is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2, or (at your option)
* any later version.
*
* simpledeb is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with simpledeb; see the file COPYING. If not, write to the
* Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301 USA.
*
* Linking this library statically or dynamically with other modules is
* making a combined work based on this library. Thus, the terms and
* conditions of the GNU General Public License cover the whole
* combination.
*
* As a special exception, the copyright holders of this library give you
* permission to link this library with independent modules to produce an
* executable, regardless of the license terms of these independent
* modules, and to copy and distribute the resulting executable under
* terms of your choice, provided that you also meet, for each linked
* independent module, the terms and conditions of the license of that
* module. An independent module is a module which is not derived from
* or based on this library. If you modify this library, you may extend
* this exception to your version of the library, but you are not
* obligated to do so. If you do not wish to do so, delete this
* exception statement from your version.
*/
package com.moss.simpledeb.core.action;
import java.io.File;
import java.util.LinkedList;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
import com.moss.simpledeb.core.DebComponent;
import com.moss.simpledeb.core.DebState;
import com.moss.simpledeb.core.path.ArchivePath;
import com.moss.simpledeb.core.path.BytesArchivePath;
import com.moss.simpledeb.core.path.DirArchivePath;
@XmlAccessorType(XmlAccessType.FIELD)
public final class LaunchScriptAction extends DebAction {
@XmlAttribute(name="class-name")
private String className;
@XmlAttribute(name="target-file")
private String targetFile;
@XmlAttribute(name="path-level")
private int pathLevel;
@Override
public void run(DebState state) throws Exception {
{
File target = new File(targetFile).getParentFile();
LinkedList<File> pathsNeeded = new LinkedList<File>();
File f = target;
while (f != null) {
pathsNeeded.addFirst(f);
f = f.getParentFile();
}
for (int i=0; i<pathLevel; i++) {
pathsNeeded.removeFirst();
}
for (File e : pathsNeeded) {
String p = "./" + e.getPath();
if (!p.endsWith("/")) {
p = p + "/";
}
TarArchiveEntry tarEntry = new TarArchiveEntry(p);
tarEntry.setGroupId(0);
tarEntry.setGroupName("root");
tarEntry.setIds(0, 0);
tarEntry.setModTime(System.currentTimeMillis());
tarEntry.setSize(0);
tarEntry.setUserId(0);
tarEntry.setUserName("root");
tarEntry.setMode(Integer.parseInt("755", 8));
ArchivePath path = new DirArchivePath(tarEntry);
state.addPath(DebComponent.CONTENT, path);
}
}
String cp;
{
StringBuffer sb = new StringBuffer();
for (String path : state.classpath) {
if (sb.length() == 0) {
sb.append(path);
}
else {
sb.append(":");
sb.append(path);
}
}
cp = sb.toString();
}
StringBuilder sb = new StringBuilder();
sb.append("#!/bin/bash\n");
sb.append("CP=\"");
sb.append(cp);
sb.append("\"\n");
sb.append("/usr/bin/java -cp $CP ");
sb.append(className);
sb.append(" $@\n");
byte[] data = sb.toString().getBytes();
String entryName = "./" + targetFile;
TarArchiveEntry tarEntry = new TarArchiveEntry(entryName);
tarEntry.setGroupId(0);
tarEntry.setGroupName("root");
tarEntry.setIds(0, 0);
tarEntry.setModTime(System.currentTimeMillis());
tarEntry.setSize(data.length);
tarEntry.setUserId(0);
tarEntry.setUserName("root");
tarEntry.setMode(Integer.parseInt("755", 8));
ArchivePath path = new BytesArchivePath(tarEntry, data);
state.addPath(DebComponent.CONTENT, path);
}
public String getClassName() {
return className;
}
public void setClassName(String className) {
this.className = className;
}
public String getTargetFile() {
return targetFile;
}
public void setTargetFile(String targetFile) {
this.targetFile = targetFile;
}
public int getPathLevel() {
return pathLevel;
}
public void setPathLevel(int assumedTargetPathLevel) {
this.pathLevel = assumedTargetPathLevel;
}
}
| mosscode/simpledeb | core/src/main/java/com/moss/simpledeb/core/action/LaunchScriptAction.java | Java | gpl-2.0 | 4,990 |
/*µü´úÆ÷ģʽ
*
* ±éÀú¼¯ºÏµÄÖ°Ôð·ÖÀë³öÀ´£»
*/
/**°×Ïä¾ÛºÏ+Íâµü´ú×Ó*/
public interface Iterator
{
public Object first();
public Object next();
//µÃµ½µ±Ç°¶ÔÏó
public Object currentItem();
//ÊÇ·ñµ½Á˽áβ
public boolean isDone();
}
//ÕýÐòµü´úÆ÷
public class ConcreteIterator implements Iterator
{
private int currentIndex = 0;
//¶¨ÒåÒ»¸ö¾ßÌ弯ºÏ¶ÔÏó
private Aggregate aggregate = null;
public ConcreteIterator(Aggregate aggregate)
{
this.aggregate = aggregate;
}
//ÖØÐ´¸¸Àà·½·¨
@Override
public Object first()
{
currentIndex = 0;
return vector.get(currentIndex);
}
@Override
public Object next()
{
if(currentIndex < aggregate.count())
currentIndex++;
return vector.get(currentIndex);
}
@Override
public Object currentItem()
{
return aggregate.getAt(currentIndex);
}
@Override
public boolean isDone()
{
return (currentIndex >= aggregate.count());
}
}
/*¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª*/
//°×Ïä¾ÛºÏÒªÇ󼯺ÏÀàÏòÍâ½çÌṩ·ÃÎÊ×Ô¼ºÄÚ²¿ÔªËصĽӿÚ
public interface Aggregat
{
public Iterator createIterator();
//»ñÈ¡¼¯ºÏÄÚ²¿ÔªËØ×ÜÊý
public int count();
//»ñȡָ¶¨Î»ÖÃÔªËØ
public Object getAt(int index);
}
/*¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª*/
//¾ßÌåµÄ¼¯ºÏÀà
public class ConcreteAggregat implements Aggregat
{
private Vector vector = null;
public Vector getVector()
{
return vector;
}
public void setVector(final Vector vector)
{
this.vector = vector;
}
public ConcreteAggregat()
{
vector = new Vector();
vector.add("item 1");
vector.add("item 2");
}
//»ñÈ¡¼¯ºÏÄÚ²¿ÔªËØ×ÜÊý
@Override
public int count()
{
return vector.size();
}
//»ñȡָ¶¨Î»ÖÃÔªËØ
@Override
public Object getAt(int index)
{
if(0 <= index && index < vector.size())
return vector[index];
else
return null;
}
//´´½¨Ò»¸ö¾ßÌåµü´úÆ÷¶ÔÏ󣬲¢°Ñ¸Ã¼¯ºÏ¶ÔÏó×ÔÉí½»¸ø¸Ãµü´úÆ÷
@Override
public Iterator createIterator()
{
//ÕâÀï¿ÉÒÔʹÓüòµ¥¹¤³§Ä£Ê½
return new ConcreteIterator(this);
}
}
/*¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª¡ª*/
public class Client
{
public static void main(final String[] args)
{
Aggregat agg = new ConcreteAggregat();
final Iterator iterator = agg.createIterator();
System.out.println(iterator.first());
while (!iterator.isDone())
{
//Item item = (Item)iterator.currentItem();
System.out.println(iterator.next());
}
}
}
| creary/company | document/设计模式/3行为型/3迭代器.java | Java | gpl-2.0 | 2,882 |
package yuka.detectors;
import yuka.containers.News;
import javax.imageio.ImageIO;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;
import java.util.HashMap;
import java.util.Map;
/**
* Created by imyuka on 18/08/2016.
*/
public class FigureDetector {
private Map<String, Integer> cmap;
private Map<String, Integer> pmap;
private Map<String, Integer> dim;
//private int height = 0;
//private int height = 0;
public FigureDetector(Map<String, String> figure) {
String url = figure.get(News.IMAGE_URL);
dim = getDimension(url);
String caption = figure.get(News.IMAGE_CAPTION);
cmap = ClubDetector.count(caption);
PlayerDetector pd = new PlayerDetector(cmap.keySet());
pmap = pd.count(caption);
}
public int getHeight() {
return dim.get("height");
}
public int getWidth() {
return dim.get("width");
}
public double getPercentage (double totalHeight) {
return (double) getWidth() / HeightDetector.COLUMN_WIDTH
* (getHeight() / totalHeight);
}
public Map<String, Integer> getClubMap() {
return cmap;
}
public Map<String, Integer> getPlayerMap() {
return pmap;
}
public static Map<String, Integer> getDimension (String source) {
//int[] dim = new int[]{0,0};
Map<String, Integer> dim = new HashMap<>();
dim.put("width", 0);
dim.put("height", 0);
try {
URL url = new URL(source);
URLConnection conn = url.openConnection();
// now you get the content length
/////int contentLength = conn.getContentLength();
// you can check size here using contentLength
InputStream in = conn.getInputStream();
BufferedImage image = ImageIO.read(in);
// you can get size dimesion
//int width = image.getWidth();
//int height = image.getHeight();
dim.put("width", image.getWidth());
dim.put("height", image.getHeight());
} catch (IOException e) {
System.out.println();
}
return dim;
}
}
| imyuka/AFL | AFL/src/main/java/yuka/detectors/FigureDetector.java | Java | gpl-2.0 | 2,274 |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package pe.edu.upeu.modelo;
import java.io.Serializable;
import java.util.Collection;
import java.util.Date;
import javax.persistence.Basic;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlTransient;
/**
*
* @author hp
*/
@Entity
@Table(name = "conf_periodo")
@XmlRootElement
@NamedQueries({
@NamedQuery(name = "ConfPeriodo.findAll", query = "SELECT c FROM ConfPeriodo c")})
public class ConfPeriodo implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Basic(optional = false)
@Column(name = "id_periodo")
private Integer idPeriodo;
@Basic(optional = false)
@Column(name = "periodo")
private String periodo;
@Basic(optional = false)
@Column(name = "descripcion")
private String descripcion;
@Basic(optional = false)
@Column(name = "fecha_inicio")
@Temporal(TemporalType.DATE)
private Date fechaInicio;
@Basic(optional = false)
@Column(name = "fecha_fin")
@Temporal(TemporalType.DATE)
private Date fechaFin;
@Basic(optional = false)
@Column(name = "estado")
private String estado;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "idPeriodo")
private Collection<GloAreaEje> gloAreaEjeCollection;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "idPeriodo")
private Collection<GloEstadoArea> gloEstadoAreaCollection;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "idPeriodo")
private Collection<GloEstadoDepartamento> gloEstadoDepartamentoCollection;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "idPeriodo")
private Collection<GloEstadoFilial> gloEstadoFilialCollection;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "idPeriodo")
private Collection<GloDepartCoordinador> gloDepartCoordinadorCollection;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "idPeriodo")
private Collection<GloMeta> gloMetaCollection;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "idPeriodo")
private Collection<GloDepartareaCoordinador> gloDepartareaCoordinadorCollection;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "idPeriodo")
private Collection<FinPartidapresupuestaria> finPartidapresupuestariaCollection;
@JoinColumn(name = "id_temporada", referencedColumnName = "id_temporada")
@ManyToOne(optional = false)
private ConfTemporada idTemporada;
public ConfPeriodo() {
}
public ConfPeriodo(Integer idPeriodo) {
this.idPeriodo = idPeriodo;
}
public ConfPeriodo(Integer idPeriodo, String periodo, String descripcion, Date fechaInicio, Date fechaFin, String estado) {
this.idPeriodo = idPeriodo;
this.periodo = periodo;
this.descripcion = descripcion;
this.fechaInicio = fechaInicio;
this.fechaFin = fechaFin;
this.estado = estado;
}
public Integer getIdPeriodo() {
return idPeriodo;
}
public void setIdPeriodo(Integer idPeriodo) {
this.idPeriodo = idPeriodo;
}
public String getPeriodo() {
return periodo;
}
public void setPeriodo(String periodo) {
this.periodo = periodo;
}
public String getDescripcion() {
return descripcion;
}
public void setDescripcion(String descripcion) {
this.descripcion = descripcion;
}
public Date getFechaInicio() {
return fechaInicio;
}
public void setFechaInicio(Date fechaInicio) {
this.fechaInicio = fechaInicio;
}
public Date getFechaFin() {
return fechaFin;
}
public void setFechaFin(Date fechaFin) {
this.fechaFin = fechaFin;
}
public String getEstado() {
return estado;
}
public void setEstado(String estado) {
this.estado = estado;
}
@XmlTransient
public Collection<GloAreaEje> getGloAreaEjeCollection() {
return gloAreaEjeCollection;
}
public void setGloAreaEjeCollection(Collection<GloAreaEje> gloAreaEjeCollection) {
this.gloAreaEjeCollection = gloAreaEjeCollection;
}
@XmlTransient
public Collection<GloEstadoArea> getGloEstadoAreaCollection() {
return gloEstadoAreaCollection;
}
public void setGloEstadoAreaCollection(Collection<GloEstadoArea> gloEstadoAreaCollection) {
this.gloEstadoAreaCollection = gloEstadoAreaCollection;
}
@XmlTransient
public Collection<GloEstadoDepartamento> getGloEstadoDepartamentoCollection() {
return gloEstadoDepartamentoCollection;
}
public void setGloEstadoDepartamentoCollection(Collection<GloEstadoDepartamento> gloEstadoDepartamentoCollection) {
this.gloEstadoDepartamentoCollection = gloEstadoDepartamentoCollection;
}
@XmlTransient
public Collection<GloEstadoFilial> getGloEstadoFilialCollection() {
return gloEstadoFilialCollection;
}
public void setGloEstadoFilialCollection(Collection<GloEstadoFilial> gloEstadoFilialCollection) {
this.gloEstadoFilialCollection = gloEstadoFilialCollection;
}
@XmlTransient
public Collection<GloDepartCoordinador> getGloDepartCoordinadorCollection() {
return gloDepartCoordinadorCollection;
}
public void setGloDepartCoordinadorCollection(Collection<GloDepartCoordinador> gloDepartCoordinadorCollection) {
this.gloDepartCoordinadorCollection = gloDepartCoordinadorCollection;
}
@XmlTransient
public Collection<GloMeta> getGloMetaCollection() {
return gloMetaCollection;
}
public void setGloMetaCollection(Collection<GloMeta> gloMetaCollection) {
this.gloMetaCollection = gloMetaCollection;
}
@XmlTransient
public Collection<GloDepartareaCoordinador> getGloDepartareaCoordinadorCollection() {
return gloDepartareaCoordinadorCollection;
}
public void setGloDepartareaCoordinadorCollection(Collection<GloDepartareaCoordinador> gloDepartareaCoordinadorCollection) {
this.gloDepartareaCoordinadorCollection = gloDepartareaCoordinadorCollection;
}
@XmlTransient
public Collection<FinPartidapresupuestaria> getFinPartidapresupuestariaCollection() {
return finPartidapresupuestariaCollection;
}
public void setFinPartidapresupuestariaCollection(Collection<FinPartidapresupuestaria> finPartidapresupuestariaCollection) {
this.finPartidapresupuestariaCollection = finPartidapresupuestariaCollection;
}
public ConfTemporada getIdTemporada() {
return idTemporada;
}
public void setIdTemporada(ConfTemporada idTemporada) {
this.idTemporada = idTemporada;
}
@Override
public int hashCode() {
int hash = 0;
hash += (idPeriodo != null ? idPeriodo.hashCode() : 0);
return hash;
}
@Override
public boolean equals(Object object) {
// TODO: Warning - this method won't work in the case the id fields are not set
if (!(object instanceof ConfPeriodo)) {
return false;
}
ConfPeriodo other = (ConfPeriodo) object;
if ((this.idPeriodo == null && other.idPeriodo != null) || (this.idPeriodo != null && !this.idPeriodo.equals(other.idPeriodo))) {
return false;
}
return true;
}
@Override
public String toString() {
return "pe.edu.upeu.modelo.ConfPeriodo[ idPeriodo=" + idPeriodo + " ]";
}
}
| AngieChambi/ProFinal | WebSgeUPeU/src/java/pe/edu/upeu/modelo/ConfPeriodo.java | Java | gpl-2.0 | 8,395 |
package com.atlach.trafficdataloader;
import java.util.ArrayList;
/* Short Desc: Storage object for Trip info */
/* Trip Info > Trips > Routes */
public class TripInfo {
private int tripCount = 0;
public ArrayList<Trip> trips = null;
public String date;
public String origin;
public String destination;
public TripInfo() {
trips = new ArrayList<Trip>();
}
public int addTrip() {
Trip temp = new Trip();
if (trips.add(temp) == false) {
/* Failed */
return -1;
}
tripCount++;
return trips.indexOf(temp);
}
public int addRouteToTrip(int tripId, String routeName, String mode, String dist, String agency, String start, String end, String points) {
int result = -1;
Trip temp = trips.get(tripId);
if (temp != null) {
result = temp.addRoute(routeName, mode, dist, agency, start, end, points);
}
return result;
}
public int getTripCount() {
return tripCount;
}
public static class Trip {
public double totalDist = 0.0;
public double totalCost = 0.0;
public int totalTraffic = 0;
private int transfers = 0;
public ArrayList<Route> routes = null;
public Trip() {
routes = new ArrayList<Route>();
};
public int addRoute(String routeName, String mode, String dist, String agency, String start, String end, String points) {
Route temp = new Route();
temp.name = routeName;
temp.mode = mode;
temp.dist = dist;
temp.agency = agency;
temp.start = start;
temp.end = end;
temp.points = points;
if (routes.add(temp) == false) {
/* Failed */
return -1;
}
transfers++;
return routes.indexOf(temp);
}
public int getTransfers() {
return transfers;
}
}
public static class Route {
/* Object fields */
public String name = "";
public String mode = "";
public String dist = "0.0";
public String agency = "";
public String start = "";
public String end = "";
public String points = "";
public String cond = "";
//public String cost = "0.0";
public double costMatrix[] = {0.0, 0.0, 0.0, 0.0};
public double getRegularCost(boolean isDiscounted) {
if (isDiscounted) {
return costMatrix[1];
}
return costMatrix[0];
}
public double getSpecialCost(boolean isDiscounted) {
if (isDiscounted) {
return costMatrix[2];
}
return costMatrix[3];
}
}
}
| linusmotu/Viaje | ViajeUi/src/com/atlach/trafficdataloader/TripInfo.java | Java | gpl-2.0 | 2,363 |
package org.adastraeducation.liquiz.equation;
import java.util.*;
import java.util.regex.*;
import org.adastraeducation.liquiz.*;
/**
* Present equations with random variables.
* It has two ways to parse the equations in string[]. One is in infix, and the other is in the RPN.
* @author Yingzhu Wang
*
*/
public class Equation implements Displayable {
private Expression func;
private double correctAnswer;
private HashMap<String,Var> variables;
public Equation(String equation, HashMap<String,Var> variables){
this.variables=variables;
ArrayList<String> equationSplit = this.parseQuestion(equation);
this.func = this.parseInfix(equationSplit);
correctAnswer = func.eval();
}
public Equation(Expression func, HashMap<String,Var> variables){
this.func = func;
this.variables = variables;
correctAnswer=func.eval();
}
public Equation(Expression func){
this.func = func;
this.variables = new HashMap<String,Var>();
correctAnswer=func.eval();
}
public void setExpression(Expression e){
this.func=e;
correctAnswer=func.eval();
}
public void setVariables(HashMap<String,Var> variables){
this.variables = variables;
}
public String getTagName() { return "Equation"; }
public Expression parseInfix(ArrayList<String> s){
Tree t = new Tree(s);
ArrayList<String> rpn = t.traverse();
return parseRPN(rpn);
}
// Precompile all regular expressions used in parsing
private static final Pattern parseDigits =
Pattern.compile("^[0-9]+$");
private static final Pattern wordPattern =
Pattern.compile("[\\W]|([\\w]*)");
/*TODO: We can do much better than a switch statement,
* but it would require a hash map and lots of little objects
*/
//TODO: Check if binary ops are backgwards? a b - ????
public Expression parseRPN(ArrayList<String> s) {
Stack<Expression> stack = new Stack<Expression>();
for(int i = 0; i<s.size(); i++){
String temp = s.get(i);
if (Functions.MATHFUNCTIONS.contains(temp)) {
Expression op1 ;
Expression op2 ;
switch(temp){
case "+":
op2=stack.pop();
op1=stack.pop();
stack.push(new Plus(op1,op2));
break;
case "-":
op2=stack.pop();
op1=stack.pop();
stack.push( new Minus(op1,op2));
break;
case "*":
op2=stack.pop();
op1=stack.pop();
stack.push( new Multi(op1,op2));break;
case "/":
op2=stack.pop();
op1=stack.pop();
stack.push( new Div(op1,op2));break;
case "sin":
op1=stack.pop();
stack.push(new Sin(op1));break;
case "cos":
op1=stack.pop();
stack.push(new Cos(op1));break;
case "tan":
op1=stack.pop();
stack.push(new Tan(op1));break;
case "abs":
op1=stack.pop();
stack.push(new Abs(op1));break;
case "Asin":
op1=stack.pop();
stack.push(new Asin(op1));break;
case "Atan":
op1=stack.pop();
stack.push(new Atan(op1));break;
case "neg":
op1=stack.pop();
stack.push(new Neg(op1));break;
case "sqrt":
op1=stack.pop();
stack.push(new Sqrt(op1));break;
default:break;
}
}
//deal with the space
else if(temp.equals(""))
;
else{
Matcher m = parseDigits.matcher(temp);
if (m.matches()){
double x = Double.parseDouble(temp);
stack.push(new Constant(x));
}
else{
stack.push(variables.get(temp));
}
}
}
return stack.pop();
}
public ArrayList<String> parseQuestion(String question){
ArrayList<String> s = new ArrayList<String>();
Matcher m = wordPattern.matcher(question);
while(m.find()){
s.add(m.group());
}
return s;
}
// public ResultSet readDatabase(String sql){
// return DatabaseMgr.select(sql);
// }
//
// public void writeDatabase(String sql){
// DatabaseMgr.update(sql);
// }
public Expression getExpression(){
return func;
}
public double getCorrectAnswer(){
return correctAnswer;
}
@Override
public void writeHTML(StringBuilder b) {
func.infixReplaceVar(b);
}
@Override
public void writeXML(StringBuilder b) {
b.append("<Equation question='");
func.infix(b);
b.append("'></Equation>");
}
@Override
public void writeJS(StringBuilder b) {
}
}
| hydrodog/LiquiZ | LiquiZ/src/org/adastraeducation/liquiz/equation/Equation.java | Java | gpl-2.0 | 4,219 |
package com.github.luksdlt92;
import java.util.ArrayList;
/*
* DoubleJumpReload plugin
* Made by luksdlt92 and Abdalion
*/
import org.bukkit.event.Listener;
import org.bukkit.plugin.java.JavaPlugin;
import com.github.luksdlt92.commands.DoubleJumpCommand;
import com.github.luksdlt92.listeners.JumpListener;
public class DoubleJumpReload extends JavaPlugin implements Listener {
public ArrayList<String> _players = new ArrayList<String>();
public ArrayList<String> _playersDisableJump = new ArrayList<String>();
@Override
public void onEnable()
{
new JumpListener(this);
this.getCommand("jumpdelay").setExecutor(new DoubleJumpCommand(this));
}
public ArrayList<String> getPlayers()
{
return _players;
}
public ArrayList<String> getPlayersDisableJump()
{
return _playersDisableJump;
}
}
| luksdlt92/doublejumpreload | DoubleJumpReload/src/main/java/com/github/luksdlt92/DoubleJumpReload.java | Java | gpl-2.0 | 871 |
package org.vidogram.messenger.g.a;
import java.io.ByteArrayOutputStream;
public class j extends ByteArrayOutputStream
{
private final b a;
public j(b paramb, int paramInt)
{
this.a = paramb;
this.buf = this.a.a(Math.max(paramInt, 256));
}
private void a(int paramInt)
{
if (this.count + paramInt <= this.buf.length)
return;
byte[] arrayOfByte = this.a.a((this.count + paramInt) * 2);
System.arraycopy(this.buf, 0, arrayOfByte, 0, this.count);
this.a.a(this.buf);
this.buf = arrayOfByte;
}
public void close()
{
this.a.a(this.buf);
this.buf = null;
super.close();
}
public void finalize()
{
this.a.a(this.buf);
}
public void write(int paramInt)
{
monitorenter;
try
{
a(1);
super.write(paramInt);
monitorexit;
return;
}
finally
{
localObject = finally;
monitorexit;
}
throw localObject;
}
public void write(byte[] paramArrayOfByte, int paramInt1, int paramInt2)
{
monitorenter;
try
{
a(paramInt2);
super.write(paramArrayOfByte, paramInt1, paramInt2);
monitorexit;
return;
}
finally
{
paramArrayOfByte = finally;
monitorexit;
}
throw paramArrayOfByte;
}
}
/* Location: G:\programs\dex2jar-2.0\vidogram-dex2jar.jar
* Qualified Name: org.vidogram.messenger.g.a.j
* JD-Core Version: 0.6.0
*/ | Robert0Trebor/robert | TMessagesProj/src/main/java/org/vidogram/messenger/g/a/j.java | Java | gpl-2.0 | 1,439 |
package it.polito.nexa.pc;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.Statement;
import java.util.List;
/**
* Created by giuseppe on 19/05/15.
*/
public interface TriplesAdder {
public Model addTriples(Model model, List<Statement> statementList);
}
| giuseppefutia/rdf-public-contracts | src/main/java/it/polito/nexa/pc/TriplesAdder.java | Java | gpl-2.0 | 290 |
package ms.aurora.browser.wrapper;
import org.apache.log4j.Logger;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.tidy.Tidy;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.transform.Result;
import javax.xml.transform.Source;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import javax.xml.xpath.*;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
/**
* A parser / access layer for HTML pages
* @author Rick
*/
public final class HTML {
private static Logger logger = Logger.getLogger(HTML.class);
private Document dom;
public HTML(Document dom) {
this.dom = dom;
}
public Document getDOM() {
return dom;
}
public List<Node> searchXPath(String expression) {
List<Node> matchingElements = new ArrayList<Node>();
try {
XPathExpression expressionObj = getExpression(expression);
NodeList resultingNodeList = (NodeList) expressionObj.evaluate(dom,
XPathConstants.NODESET);
for (int index = 0; index < resultingNodeList.getLength(); index++) {
matchingElements.add(resultingNodeList.item(index));
}
} catch (XPathExpressionException e) {
logger.error("Incorrect XPath expression", e);
}
return matchingElements;
}
public List<Node> searchXPath(Node base, String expression) {
List<Node> matchingElements = new ArrayList<Node>();
try {
XPathExpression expressionObj = getExpression(expression);
NodeList resultingNodeList = (NodeList) expressionObj.evaluate(base,
XPathConstants.NODESET);
for (int index = 0; index < resultingNodeList.getLength(); index++) {
matchingElements.add(resultingNodeList.item(index));
}
} catch (XPathExpressionException e) {
logger.error("Incorrect XPath expression", e);
}
return matchingElements;
}
private XPathExpression getExpression(String expression) throws XPathExpressionException {
XPath xpath = XPathFactory.newInstance().newXPath();
return xpath.compile(expression);
}
public static HTML fromStream(InputStream stream) {
try {
/*
* UGLY ASS W3C API IS UGLY
*/
Tidy tidy = new Tidy();
tidy.setXHTML(true);
Document dom = tidy.parseDOM(stream, null);
dom.getDocumentElement().normalize();
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
Source xmlSource = new DOMSource(dom);
Result outputTarget = new StreamResult(outputStream);
TransformerFactory.newInstance().newTransformer().transform(xmlSource, outputTarget);
InputStream is = new ByteArrayInputStream(outputStream.toByteArray());
return new HTML(db.parse(is));
} catch (Exception e) {
logger.error("Failed to parse HTML properly", e);
}
return null;
}
}
| rvbiljouw/aurorabot | src/main/java/ms/aurora/browser/wrapper/HTML.java | Java | gpl-2.0 | 3,499 |
package es.uniovi.asw.gui.util.form.validator.specific;
import es.uniovi.asw.gui.util.form.validator.composite.CheckAllValidator;
import es.uniovi.asw.gui.util.form.validator.simple.LengthValidator;
import es.uniovi.asw.gui.util.form.validator.simple.NumberValidator;
public class TelephoneValidator extends CheckAllValidator {
public TelephoneValidator() {
super(new NumberValidator(), new LengthValidator(9));
}
@Override
public String help() {
return "S�lo n�meros, 9 caracteres.";
}
}
| Arquisoft/Trivial5a | game/src/main/java/es/uniovi/asw/gui/util/form/validator/specific/TelephoneValidator.java | Java | gpl-2.0 | 507 |
package com.greenpineyu.fel.function.operator;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.greenpineyu.fel.Expression;
import com.greenpineyu.fel.common.ArrayUtils;
import com.greenpineyu.fel.common.Null;
import com.greenpineyu.fel.common.ReflectUtil;
import com.greenpineyu.fel.compile.FelMethod;
import com.greenpineyu.fel.compile.SourceBuilder;
import com.greenpineyu.fel.context.FelContext;
import com.greenpineyu.fel.function.CommonFunction;
import com.greenpineyu.fel.function.Function;
import com.greenpineyu.fel.parser.FelNode;
import com.greenpineyu.fel.security.SecurityMgr;
public class Dot implements Function {
private static final Logger logger = LoggerFactory.getLogger(Dot.class);
private SecurityMgr securityMgr;
public SecurityMgr getSecurityMgr() {
return securityMgr;
}
public void setSecurityMgr(SecurityMgr securityMgr) {
this.securityMgr = securityMgr;
}
static final Map<Class<?>, Class<?>> PRIMITIVE_TYPES;
static {
PRIMITIVE_TYPES = new HashMap<Class<?>, Class<?>>();
PRIMITIVE_TYPES.put(Boolean.class, Boolean.TYPE);
PRIMITIVE_TYPES.put(Byte.class, Byte.TYPE);
PRIMITIVE_TYPES.put(Character.class, Character.TYPE);
PRIMITIVE_TYPES.put(Double.class, Double.TYPE);
PRIMITIVE_TYPES.put(Float.class, Float.TYPE);
PRIMITIVE_TYPES.put(Integer.class, Integer.TYPE);
PRIMITIVE_TYPES.put(Long.class, Long.TYPE);
PRIMITIVE_TYPES.put(Short.class, Short.TYPE);
}
public static final String DOT = ".";
@Override
public String getName() {
return DOT;
}
@Override
public Object call(FelNode node, FelContext context) {
List<FelNode> children = node.getChildren();
Object left = children.get(0);
if (left instanceof Expression) {
Expression exp = (Expression) left;
left = exp.eval(context);
}
FelNode right = children.get(1);
FelNode exp = right;
Class<?>[] argsType = new Class<?>[0];
Object[] args = CommonFunction.evalArgs(right, context);
if (!ArrayUtils.isEmpty(args)) {
argsType = new Class[args.length];
for (int i = 0; i < args.length; i++) {
if (args[i] == null) {
argsType[i] = Null.class;
continue;
}
argsType[i] = args[i].getClass();
}
}
Method method = null;
Class<?> cls = left instanceof Class<?> ? (Class<?>) left : left.getClass();
String methodName = right.getText();
method = findMethod(cls, methodName, argsType);
if (method == null) {
String getMethod = "get";
method = findMethod(cls, getMethod, new Class<?>[] { String.class });
args = new Object[] { exp.getText() };
}
if (method != null) return invoke(left, method, args);
return null;
}
private Method findMethod(Class<?> cls, String methodName, Class<?>[] argsType) {
Method method = ReflectUtil.findMethod(cls, methodName, argsType);
return getCallableMethod(method);
}
private Method getMethod(Class<?> cls, String methodName, Class<?>[] argsType) {
Method method = ReflectUtil.getMethod(cls, methodName, argsType);
return getCallableMethod(method);
}
private Method getCallableMethod(Method m) {
if (m == null || securityMgr.isCallable(m)) return m;
throw new SecurityException("安全管理器[" + securityMgr.getClass().getSimpleName() + "]禁止调用方法[" + m.toString() + "]");
}
/**
* 调用方法
*
* @param obj
* @param method
* @param args
* @return
*/
public static Object invoke(Object obj, Method method, Object[] args) {
try {
return method.invoke(obj, args);
} catch (IllegalArgumentException | IllegalAccessException e) {
logger.error("", e);
} catch (InvocationTargetException e) {
logger.error("", e.getTargetException());
}
return null;
}
@Override
public FelMethod toMethod(FelNode node, FelContext context) {
StringBuilder sb = new StringBuilder();
List<FelNode> children = node.getChildren();
FelNode l = children.get(0);
SourceBuilder leftMethod = l.toMethod(context);
Class<?> cls = leftMethod.returnType(context, l);
String leftSrc = leftMethod.source(context, l);
if (cls.isPrimitive()) {
Class<?> wrapperClass = ReflectUtil.toWrapperClass(cls);
// 如果左边返回的值的基本类型,要转成包装类型[eg:((Integer)1).doubleValue()]
sb.append("((").append(wrapperClass.getSimpleName()).append(")").append(leftSrc).append(")");
} else {
sb.append(leftSrc);
}
sb.append(".");
Method method = null;
FelNode rightNode = children.get(1);
List<FelNode> params = rightNode.getChildren();
List<SourceBuilder> paramMethods = new ArrayList<SourceBuilder>();
Class<?>[] paramValueTypes = null;
boolean hasParam = params != null && !params.isEmpty();
String rightMethod = rightNode.getText();
String rightMethodParam = "";
if (hasParam) {
// 有参数
paramValueTypes = new Class<?>[params.size()];
for (int i = 0; i < params.size(); i++) {
FelNode p = params.get(i);
SourceBuilder paramMethod = p.toMethod(context);
paramMethods.add(paramMethod);
paramValueTypes[i] = paramMethod.returnType(context, p);
}
// 根据参数查找方法
method = findMethod(cls, rightNode.getText(), paramValueTypes);
if (method != null) {
Class<?>[] paramTypes = method.getParameterTypes();
for (int i = 0; i < paramTypes.length; i++) {
Class<?> paramType = paramTypes[i];
FelNode p = params.get(i);
String paramCode = getParamCode(paramType, p, context);
rightMethodParam += paramCode + ",";
}
rightMethod = method.getName();
}
} else {
method = findMethod(cls, rightNode.getText(), new Class<?>[0]);
if (method == null) {
// 当没有找到方法 ,直接使用get方法来获取属性
method = getMethod(cls, "get", new Class<?>[] { String.class });
if (method != null) {
rightMethod = "get";
rightMethodParam = "\"" + rightNode.getText() + "\"";
}
} else {
rightMethod = method.getName();
}
}
if (method != null) {}
if (rightMethodParam.endsWith(",")) {
rightMethodParam = rightMethodParam.substring(0, rightMethodParam.length() - 1);
}
rightMethod += "(" + rightMethodParam + ")";
sb.append(rightMethod);
FelMethod returnMe = new FelMethod(method == null ? null : method.getReturnType(), sb.toString());
return returnMe;
}
/**
* 获取参数代码
*
* @param paramType
* 方法声明的参数类型
* @param paramValueType
* 参数值的类型
* @param paramMethod
* @return
*/
public static String getParamCode(Class<?> paramType, FelNode node, FelContext ctx) {
// 如果类型相等(包装类型与基本类型(int和Integer)也认为是相等 ),直接添加参数。
String paramCode = "";
SourceBuilder paramMethod = node.toMethod(ctx);
Class<?> paramValueType = paramMethod.returnType(ctx, node);
if (ReflectUtil.isTypeMatch(paramType, paramValueType)) {
paramCode = paramMethod.source(ctx, node);
} else {
// 如果类型不匹配,使用强制转型
String className = null;
Class<?> wrapperClass = ReflectUtil.toWrapperClass(paramType);
if (wrapperClass != null) {
className = wrapperClass.getName();
} else {
className = paramType.getName();
}
paramCode = "(" + className + ")" + paramMethod.source(ctx, node);
}
return paramCode;
}
}
| zbutfly/albacore | expr-fel-import/src/main/java/com/greenpineyu/fel/function/operator/Dot.java | Java | gpl-2.0 | 7,449 |
package gui;
import java.awt.Color;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.List;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import javax.swing.ListSelectionModel;
import javax.swing.border.EmptyBorder;
import javax.swing.table.DefaultTableModel;
import logic.DB.MongoUserManager;
import logic.model.Statistics;
import logic.model.User;
import javax.swing.JLabel;
import java.awt.Font;
import java.awt.Toolkit;
public class ListPlayers extends JFrame {
/**
*
*/
private static final long serialVersionUID = 1L;
private JPanel contentPane;
private JScrollPane spUsers;
private JTable tabUsers;
private MongoUserManager mongo = new MongoUserManager();
private List<User> users;
private JButton btnClose;
private JLabel lbListUsers;
/**
* Launch the application.
*/
/*public static void main(String[] args) {
EventQueue.invokeLater(new Runnable() {
public void run() {
try {
ListPlayers frame = new ListPlayers();
frame.setVisible(true);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}*/
/**
* Create the frame.
*/
public ListPlayers() {
setIconImage(Toolkit.getDefaultToolkit().getImage("C:\\Users\\Raquel\\Desktop\\ASWProject\\Trivial_i1b\\Game\\src\\main\\resources\\Images\\icono.png"));
setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
setBounds(100, 100, 532, 340);
contentPane = new JPanel();
contentPane.setBackground(new Color(0,0,139));
contentPane.setBorder(new EmptyBorder(5, 5, 5, 5));
setContentPane(contentPane);
contentPane.setLayout(null);
contentPane.add(getSpUsers());
contentPane.add(getBtnClose());
contentPane.setBackground(InitialWindow.pnFondo.getBackground());
JButton btnSeeStatistics = new JButton("See statistics");
btnSeeStatistics.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
users = mongo.getAllUsers();
StatisticsWindow statistics = new StatisticsWindow();
statistics.setVisible(true);
statistics.txPlayer.setText((String) tabUsers.getValueAt(tabUsers.getSelectedRow(), 0));
int row = tabUsers.getSelectedRow();
int newRow = 0;
for (User u : users){
if (u.getEmail().equals(tabUsers.getValueAt(row, 1))){
Statistics s = u.getStatistics();
statistics.tabStatistics.setValueAt(s.getQuestionsMatched(), newRow, 0);
statistics.tabStatistics.setValueAt(s.getQuestionsAnswered(), newRow, 1);
statistics.tabStatistics.setValueAt(s.getTimesPlayed(), newRow, 2);
newRow++;
}
}
}
});
btnSeeStatistics.setBounds(357, 42, 123, 23);
contentPane.add(btnSeeStatistics);
contentPane.add(getLbListUsers());
}
private JScrollPane getSpUsers() {
if (spUsers == null) {
spUsers = new JScrollPane();
spUsers.setBounds(42, 103, 306, 128);
spUsers.setViewportView(getTabUsers());
spUsers.setBackground(InitialWindow.pnFondo.getBackground());
}
return spUsers;
}
private JTable getTabUsers() {
if (tabUsers == null) {
tabUsers = new JTable();
tabUsers.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
tabUsers.setModel(new DefaultTableModel(
new Object[][] {
},
new String[] {
"Username", "Email"
}
));
}
DefaultTableModel model = (DefaultTableModel)tabUsers.getModel();
listUsers(model);
return tabUsers;
}
private JButton getBtnClose() {
if (btnClose == null) {
btnClose = new JButton("Close");
btnClose.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
dispose();
}
});
btnClose.setBounds(378, 230, 76, 23);
}
return btnClose;
}
private void listUsers(DefaultTableModel model) {
users = mongo.getAllUsers();
Object[] row = new Object[2];
for (int i = 0; i < users.size(); i++) {
row[0] = users.get(i).getUsername();
row[1] = users.get(i).getEmail();
model.addRow(row);
}
}
private JLabel getLbListUsers() {
if (lbListUsers == null) {
lbListUsers = new JLabel("List of users:");
lbListUsers.setFont(new Font("Arial", Font.PLAIN, 25));
lbListUsers.setBounds(142, 32, 195, 32);
}
return lbListUsers;
}
}
| Arquisoft/Trivial_i1b | Game/src/main/java/gui/ListPlayers.java | Java | gpl-2.0 | 4,299 |
package sabstracta;
/**
* Represents an or operation in the syntax tree.
*
*/
public class Or extends ExpresionBinariaLogica {
public Or(Expresion _izq, Expresion _dch) {
super(_izq, _dch);
}
/**
* Returns the instruction code.
*/
@Override
protected String getInst() {
return "or";
}
}
| igofunke/LPM | src/sabstracta/Or.java | Java | gpl-2.0 | 325 |
package org.oguz.servlet;
import java.io.IOException;
import java.io.PrintWriter;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebInitParam;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
public class XMLServlet extends HttpServlet
{
/**
*
*/
private static final long serialVersionUID = 1L;
/**
* @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException
{
response.setContentType("text/html");
PrintWriter out = response.getWriter();
String userName = request.getParameter("username");
String fullName = request.getParameter("fullname");
String profession = request.getParameter("profession");
// HttpSession session =request.getSession();
ServletContext context = request.getServletContext();
if (userName != "" && userName != null)
{
// session.setAttribute("savedUser",userName);
context.setAttribute("savedUser", userName);
out.println("<p>Hello context parameter " + (String)context.getAttribute("savedUser") +
" from GET method</p>");
}
else
{
out.println("<p>Hello default user " +
this.getServletConfig().getInitParameter("username") + " from GET method</p>");
}
if (fullName != "" && fullName != null)
{
// session.setAttribute("savedFull", fullName);
context.setAttribute("savedFull", fullName);
out.println("<p> your full name is: " + (String)context.getAttribute("savedFull") +
"</p>");
}
else
{
out.println("<p>Hello default fullname " +
this.getServletConfig().getInitParameter("fullname") + " from GET method</p>");
}
}
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException
{
response.setContentType("text/html");
PrintWriter out = response.getWriter();
String userName = request.getParameter("username");
String fullName = request.getParameter("fullname");
String profession = request.getParameter("profession");
// String location = request.getParameter("location");
String[] location = request.getParameterValues("location");
out.println("<p>Hello " + userName + " from POST method in XMLSERVLET response</p>");
out.println("<p> your full name is: " + fullName + "</p>");
out.println("<p>your profession is: " + profession + "</p>");
for (int i = 0; i < location.length; i++)
{
out.println("<p>your location is: " + location[i].toUpperCase() + "</p>");
}
}
}
| ogz00/Servlet-SimpleServletProject | src/org/oguz/servlet/XMLServlet.java | Java | gpl-2.0 | 2,759 |
package fr.npellegrin.xebia.mower.parser.model;
/**
* Parsed position.
*/
public class PositionDefinition {
private int x;
private int y;
private OrientationDefinition orientation;
public int getX() {
return x;
}
public void setX(final int x) {
this.x = x;
}
public int getY() {
return y;
}
public void setY(final int y) {
this.y = y;
}
public OrientationDefinition getOrientation() {
return orientation;
}
public void setOrientation(final OrientationDefinition orientation) {
this.orientation = orientation;
}
}
| npellegrin/MowItNow | src/main/java/fr/npellegrin/xebia/mower/parser/model/PositionDefinition.java | Java | gpl-2.0 | 550 |
package org.iproduct.iptpi.domain.movement;
import static java.lang.Math.PI;
import static java.lang.Math.abs;
import static java.lang.Math.atan;
import static java.lang.Math.cbrt;
import static java.lang.Math.cos;
import static java.lang.Math.hypot;
import static java.lang.Math.min;
import static java.lang.Math.pow;
import static java.lang.Math.signum;
import static java.lang.Math.sin;
import static java.lang.Math.sqrt;
import static java.lang.Math.tan;
import static org.iproduct.iptpi.demo.robot.RobotParametrs.MAIN_AXE_LENGTH;
import static org.iproduct.iptpi.demo.robot.RobotParametrs.MAX_ROBOT_ANGULAR_ACCELERATION;
import static org.iproduct.iptpi.demo.robot.RobotParametrs.MAX_ROBOT_LINEAR_ACCELERATION;
import static org.iproduct.iptpi.demo.robot.RobotParametrs.MAX_ROBOT_LINEAR_VELOCITY;
import static org.iproduct.iptpi.demo.robot.RobotParametrs.ROBOT_STOPPING_DECCELERATION;
import static org.iproduct.iptpi.demo.robot.RobotParametrs.WHEEL_RADIUS;
import static org.iproduct.iptpi.domain.CommandName.STOP;
import static org.iproduct.iptpi.domain.CommandName.VOID;
import org.iproduct.iptpi.domain.Command;
import org.iproduct.iptpi.domain.arduino.LineReadings;
import org.iproduct.iptpi.domain.audio.AudioPlayer;
import org.iproduct.iptpi.domain.position.Position;
import org.iproduct.iptpi.domain.position.PositionsFlux;
import org.reactivestreams.Subscriber;
import org.reactivestreams.Subscription;
import com.pi4j.wiringpi.Gpio;
import reactor.core.publisher.EmitterProcessor;
import reactor.core.publisher.Flux;
import reactor.util.function.Tuple2;
import reactor.util.function.Tuple3;
import reactor.util.function.Tuple4;
import reactor.util.function.Tuples;
public class MovementCommandSubscriber implements Subscriber<Command> {
public static final int MAX_SPEED = 1024;
public static final int CLOCK_DIVISOR = 2;
public static final double LANDING_CURVE_PARAMETER = 0.000000005;
public static final MotorsCommand STOP_COMMAND = new MotorsCommand(0, 0, 0, 0, 0);
private Subscription subscription;
private PositionsFlux positions;
private Flux<LineReadings> lineReadings;
// private SchedulerGroup eventLoops = SchedulerGroup.async();
//Create movement command broadcaster
private EmitterProcessor<Command> commandFlux = EmitterProcessor.create();
public MovementCommandSubscriber(PositionsFlux positions, Flux<LineReadings> lineReadings) {
this.positions = positions;
this.lineReadings = lineReadings;
}
@Override
public void onNext(Command command) {
setupGpioForMovement();
switch (command.getName()) {
case MOVE_FORWARD : moveForward(command); break;
case FOLLOW_LINE : followLine(command); break;
case MOVE_RELATIVE : moveRelative(command); break;
case STOP :
System.out.println("STOPPING THE ROBOT");
runMotors(STOP_COMMAND);
break;
default:
break;
}
}
protected void moveRelative(Command command) {
RelativeMovement relMove = (RelativeMovement) command.getData();
// start moving - and think later as it comes :)
int directionL, directionR;
if(relMove.getVelocity() < 0) {
directionL = directionR = -1;
} else {
directionL = directionR = 1;
}
double targetVelocity = abs(relMove.getVelocity());
int velocity = (int)(MAX_SPEED * targetVelocity / MAX_ROBOT_LINEAR_VELOCITY); // 50 mm/s max
MotorsCommand initialCommand = new MotorsCommand(directionL, directionR, velocity, velocity, Long.MAX_VALUE); //distance still unknown
System.out.println(initialCommand);
runMotors(initialCommand);
Position startPos = positions.elementAt(1).block();
double targetDeltaX = relMove.getDeltaX();
double targetDeltaY = relMove.getDeltaY();
double targetX = startPos.getX() + targetDeltaX;
double targetY = startPos.getY() + targetDeltaY;
double distance = hypot(targetDeltaX, targetDeltaY);
System.out.println("$$$$$$$$$$$$$$ TargetX=" + targetX );
System.out.println("$$$$$$$$$$$$$$ TargetY=" + targetY );
System.out.println("$$$$$$$$$$$$$$ Target Distance=" + distance);
double targetHeading, targetDeltaHeading, targetCurvature, h = 0;
if(relMove.getDeltaHeading() == 0 ) {
targetCurvature = targetDeltaHeading = 0;
targetHeading = startPos.getHeading();
} else {
targetDeltaHeading = relMove.getDeltaHeading();
targetHeading = startPos.getHeading() + targetDeltaHeading ;
targetCurvature = (2 * sin(targetDeltaHeading / 2) ) / distance ;
h = sqrt( 1/(targetCurvature * targetCurvature) - 0.25 * distance * distance );
}
double xC, yC; //circle center coordinates
double r = hypot(distance/2, h);
if(targetCurvature != 0) {
double q = hypot( targetX - startPos.getX(), targetY - startPos.getY() ),
x3 = (targetX + startPos.getX()) /2,
y3 = (targetY + startPos.getY()) /2;
if(targetCurvature > 0) {
xC = x3 + sqrt(r*r - (q*q/4)) * (startPos.getY() - targetY)/q;
yC = y3 + sqrt(r*r - (q*q/4)) * (targetX - startPos.getX() )/q;
} else {
xC = x3 - sqrt(r*r - (q*q/4)) * (startPos.getY() - targetY)/q;
yC = y3 - sqrt(r*r - (q*q/4)) * (targetX - startPos.getX() )/q;
}
} else {
xC = (targetX + startPos.getX()) /2;
yC = (targetY + startPos.getY()) /2;
}
System.out.println("$$$$$$$$$$$$$$ TargetHeading=" + targetHeading );
System.out.println("$$$$$$$$$$$$$$ TargetCurvature=" + targetCurvature );
double targetAngularVelocity;
if (targetDeltaHeading != 0 && relMove.getAngularVelocity() == 0)
targetAngularVelocity = targetVelocity * targetCurvature;
else
targetAngularVelocity = relMove.getAngularVelocity();
double startH = startPos.getHeading();
System.out.println("START POSITION: " + startPos);
Flux<Position> skip1 = positions.skip(1);
Flux.zip(positions, skip1)
.scan(initialCommand, (last, tupple) -> {
Position prevPos = ((Position)tupple.getT1());
Position currPos = ((Position)tupple.getT2());
float prevX = prevPos.getX();
float prevY = prevPos.getY();
double prevH = prevPos.getHeading();
float currX = currPos.getX();
float currY = currPos.getY();
double currH = currPos.getHeading();
System.out.println(currPos + " - " + prevPos);
double dt = (currPos.getTimestamp() - prevPos.getTimestamp()) / 1000.0; //delta time in seconds between position redings
if(dt <= 0) return last; // if invalid sequence do nothing
double time = (currPos.getTimestamp() - startPos.getTimestamp()) /1000.0;
// calculating the ideal trajectory position
double tarX, tarY, tarH, remainingPathLength;
if(targetCurvature == 0) {
tarX = startPos.getX() + targetVelocity * time * cos(targetHeading);
tarY = startPos.getY() + targetVelocity * time * sin(targetHeading);
remainingPathLength = hypot(targetX - currX, targetY - currY) ;
tarH = targetHeading;
} else {
double deltaHeading = targetAngularVelocity * time;
double startAng = atan((startPos.getY() - yC) / (startPos.getX() - xC));
double angle = startAng + deltaHeading;
if(signum(angle) != (startPos.getY() - yC))
angle -= PI;
tarX = cos(angle) * r + xC;
tarY = sin(angle) * r + yC;
tarH = startPos.getHeading() + deltaHeading;
remainingPathLength = (targetDeltaHeading - deltaHeading ) / targetCurvature;
// System.out.println(" -----> tarX=" + tarX + ", tarY=" + tarY + ", tarH=" + tarH + ", deltaHeading=" + deltaHeading + ", startAng=" + startAng + ", angle=" + angle);
// System.out.println(" -----> r=" + r + ", xC=" + xC + ", yC=" + yC );
}
//calculating current trajectory parameters
float dX = currX - prevX;
float dY = currY - prevY;
double currDist = hypot(dX, dY);
double currV = currDist / dt; // current velocity [mm/s]
double currAngV = (currH - prevH) / dt;
//calculating errors
double errX = (tarX - currX) * cos(tarH) + (tarY - currY) + sin(tarH);
double errY = (tarX - currX) * sin(tarH) + (tarY - currY) + cos(tarH);
double errH = tarH - currH;
//calculating landing curve
double Cx = LANDING_CURVE_PARAMETER;
double dlandY = 3 * Cx * pow(cbrt(abs(errY) / Cx), 2) * signum(errY);
double landH = tarH + atan(dlandY);
double dErrY = -targetAngularVelocity * errX + currV * sin (errH);
double landAngV = targetAngularVelocity + (2 * (1 / cbrt(abs(errY) / Cx)) * dErrY) /
(1 + tan(landH - tarH) * tan(landH - tarH));
//calculating the corrected trajectory control parameters
double switchAngV = landAngV - currAngV +
sqrt(2 * MAX_ROBOT_ANGULAR_ACCELERATION * abs(landH - currH))
* signum(landH - currH) * 0.2;
double switchAngA = min(abs(switchAngV / dt), MAX_ROBOT_ANGULAR_ACCELERATION) * signum(switchAngV);
double newAngV = currAngV + switchAngA * dt;
//calculating new velocity
double dErrX = targetVelocity - currV * cos(errH) + targetAngularVelocity * errY;
double switchV = dErrX + sqrt( 2 * MAX_ROBOT_LINEAR_ACCELERATION * abs(errX)) * signum(errX);
double switchA = min(abs(switchV / dt), MAX_ROBOT_LINEAR_ACCELERATION) * signum(switchV);
//calculating delta motor speed control values
double k = 0.1;
double newDeltaLR = k* MAX_SPEED * MAIN_AXE_LENGTH * dt * switchAngA / (2 * WHEEL_RADIUS);
//calculating new motor speed control values
int newVL = (int) (last.getVelocityL() + switchA * dt / WHEEL_RADIUS - newDeltaLR * last.getDirL());
int newVR = (int) (last.getVelocityR() + switchA * dt / WHEEL_RADIUS + newDeltaLR * last.getDirL());
System.out.println("--> errH=" + errH + ", targetHeading=" + targetHeading + ", currH=" + currH + ", dist=" + currDist
);
// System.out.println("!!! landH=" + landH + ", dErrY=" + dErrY
// + ", currAngV=" + currAngV + ", landAngV=" + landAngV + ", switchAngV=" + switchAngV
// + ", switchAngA=" + switchAngA + ", newAngV=" + newAngV );
// System.out.println("!!! remainingPathLength=" + remainingPathLength + ", dErrX=" + dErrX + ", switchV=" + switchV + ", switchA=" + switchA );
// System.out.println("!!! newDeltaV=" + switchA * dt / WHEEL_RADIUS + ", newDelatLR=" + newDeltaLR + ", newVL=" + newVL + ", newVR=" + newVR);
double remainingDeltaHeading = targetHeading - currH;
if(remainingPathLength < last.getRemainingPath()
&& remainingPathLength > currV * currV / ROBOT_STOPPING_DECCELERATION
|| targetDeltaHeading > 0.01
&& abs(remainingDeltaHeading) > 0.05 && remainingDeltaHeading * targetDeltaHeading > 0 ) { //drive until minimum distance to target
return new MotorsCommand(last.getDirL(), last.getDirR(), newVL, newVR, (float) remainingPathLength);
} else {
System.out.println("FINAL POSITION: " + currPos);
return STOP_COMMAND;
}
}).map((MotorsCommand motorsCommand) -> {
runMotors(motorsCommand);
return motorsCommand;
})
.takeUntil((MotorsCommand motorsCommand) -> motorsCommand.equals(STOP_COMMAND) )
.subscribe( (MotorsCommand motorsCommand) -> {
System.out.println(motorsCommand);
});
}
protected void followLine(Command command) {
{
ForwardMovement forwardMove = (ForwardMovement) command.getData();
// start moving - and think later as it comes :)
int directionL, directionR;
if(forwardMove.getVelocity() < 0) {
directionL = directionR = -1;
} else {
directionL = directionR = 1;
}
double targetVelocity = abs(forwardMove.getVelocity());
int velocity = (int)(MAX_SPEED * targetVelocity / MAX_ROBOT_LINEAR_VELOCITY); // 50 mm/s max
MotorsCommand initialCommand = new MotorsCommand(directionL, directionR, velocity, velocity, Long.MAX_VALUE); //distance still unknown
System.out.println(initialCommand);
runMotors(initialCommand);
Position startPos = positions.elementAt(1).block();
double distance = forwardMove.getDistance();
double targetHeading = startPos.getHeading();
double targetDeltaX = distance * cos(targetHeading);
double targetDeltaY = distance * sin(targetHeading);
double targetX = startPos.getX() + targetDeltaX;
double targetY = startPos.getY() + targetDeltaY;
System.out.println("$$$$$$$$$$$$$$ TargetX=" + targetX );
System.out.println("$$$$$$$$$$$$$$ TargetY=" + targetY );
System.out.println("$$$$$$$$$$$$$$ Target Distance=" + distance);
System.out.println("$$$$$$$$$$$$$$ TargetHeading=" + targetHeading );
double startH = startPos.getHeading();
System.out.println("START POSITION: " + startPos);
Flux<Position> skip1 = positions.skip(1);
Flux<Tuple2<Position, Position>> lastTwoPositionsFlux = Flux.zip(positions, skip1);
Flux<Tuple4<Position, Position, LineReadings, Command>> flux =
Flux.combineLatest(
lastTwoPositionsFlux,
lineReadings,
commandFlux.startWith(new Command(VOID, null)),
(Object[] args) ->
Tuples.of(((Tuple2<Position, Position>)args[0]).getT1(),
((Tuple2<Position, Position>)args[0]).getT2(),
(LineReadings)args[1],
(Command)args[2])
);
flux.scan(initialCommand, (last, tuple4) -> {
System.out.println("########## NEW EVENT !!!!!!!!!!!");
Position prevPos = tuple4.getT1();
Position currPos = tuple4.getT2();
LineReadings lastReadings = tuple4.getT3();
Command lastCommand = tuple4.getT4();
float prevX = prevPos.getX();
float prevY = prevPos.getY();
double prevH = prevPos.getHeading();
float currX = currPos.getX();
float currY = currPos.getY();
double currH = currPos.getHeading();
System.out.println(currPos + " - " + prevPos);
double dt = (currPos.getTimestamp() - prevPos.getTimestamp()) / 1000.0; //delta time in seconds between position redings
if(dt <= 0) return last; // if invalid sequence do nothing
double time = (currPos.getTimestamp() - startPos.getTimestamp()) /1000.0;
// calculating the ideal trajectory position
double tarX, tarY, tarH, remainingPathLength;
tarX = startPos.getX() + targetVelocity * time * cos(targetHeading);
tarY = startPos.getY() + targetVelocity * time * sin(targetHeading);
remainingPathLength = hypot(targetX - currX, targetY - currY) ;
tarH = targetHeading;
//calculating current trajectory parameters
float dX = currX - prevX;
float dY = currY - prevY;
double currDist = hypot(dX, dY);
double currV = currDist / dt; // current velocity [mm/s]
double currAngV = (currH - prevH) / dt;
//calculating errors
double errX = (tarX - currX) * cos(tarH) + (tarY - currY) + sin(tarH);
double errY = (tarX - currX) * sin(tarH) + (tarY - currY) + cos(tarH);
double errH = tarH - currH;
//calculating landing curve
double Cx = LANDING_CURVE_PARAMETER;
double dlandY = 3 * Cx * pow(cbrt(abs(errY) / Cx), 2) * signum(errY);
double landH = tarH + atan(dlandY);
double dErrY = currV * sin (errH);
double landAngV = (2 * (1 / cbrt(abs(errY) / Cx)) * dErrY) /
(1 + tan(landH - tarH) * tan(landH - tarH));
//calculating the corrected trajectory control parameters
double switchAngV = landAngV - currAngV +
sqrt(2 * MAX_ROBOT_ANGULAR_ACCELERATION * abs(landH - currH))
* signum(landH - currH) * 0.2;
double switchAngA = min(abs(switchAngV / dt), MAX_ROBOT_ANGULAR_ACCELERATION) * signum(switchAngV);
double newAngV = currAngV + switchAngA * dt;
//calculating new velocity
double dErrX = targetVelocity - currV * cos(errH);
double switchV = dErrX + sqrt( 2 * MAX_ROBOT_LINEAR_ACCELERATION * abs(errX)) * signum(errX);
double switchA = min(abs(switchV / dt), MAX_ROBOT_LINEAR_ACCELERATION) * signum(switchV);
// double newV = currV + switchA * dt;
//calculating delta motor speed control values
double k = 0.1;
double newDeltaLR = k* MAX_SPEED * MAIN_AXE_LENGTH * dt * switchAngA / (2 * WHEEL_RADIUS);
//calculating new motor speed control values
int newVL = (int) (last.getVelocityL() + switchA * dt / WHEEL_RADIUS - newDeltaLR * last.getDirL());
int newVR = (int) (last.getVelocityR() + switchA * dt / WHEEL_RADIUS + newDeltaLR * last.getDirL());
System.out.println("!!! time=" + time + ", dt=" + dt + ", tarX=" + tarX + ", tarY=" + tarY
+ ", startH=" + startH + ", errH=" + errH + ", targetX=" + targetX + ", targetY=" + targetY + ", targetHeading=" + targetHeading
+ ", errX=" + errX + ", errY=" + errY + ", dlandY=" + dlandY + ", currV=" + currV + ", dist=" + currDist
+ ", switchAngV/dt=" + switchAngV / dt );
System.out.println("!!! remainingPathLength=" + remainingPathLength + ", dErrX=" + dErrX + ", switchV=" + switchV + ", switchA=" + switchA );
if(lastCommand.getName() != STOP && remainingPathLength < last.getRemainingPath()
&& remainingPathLength > currV * currV / ROBOT_STOPPING_DECCELERATION ) { //drive until minimum distance to target
return new MotorsCommand(last.getDirL(), last.getDirR(), newVL, newVR, (float) remainingPathLength);
} else {
System.out.println("FINAL POSITION: " + currPos);
return STOP_COMMAND;
}
}).map((MotorsCommand motorsCommand) -> {
runMotors(motorsCommand);
return motorsCommand;
})
.takeUntil((MotorsCommand motorsCommand) -> motorsCommand.equals(STOP_COMMAND) )
.subscribe( (MotorsCommand motorsCommand) -> {
System.out.println(motorsCommand);
});
}
}
protected void moveForward(Command command) {
{
ForwardMovement forwardMove = (ForwardMovement) command.getData();
// start moving - and think later as it comes :)
int directionL, directionR;
if(forwardMove.getVelocity() < 0) {
directionL = directionR = -1;
} else {
directionL = directionR = 1;
}
double targetVelocity = abs(forwardMove.getVelocity());
int velocity = (int)(MAX_SPEED * targetVelocity / MAX_ROBOT_LINEAR_VELOCITY); // 50 mm/s max
MotorsCommand initialCommand = new MotorsCommand(directionL, directionR, velocity, velocity, Long.MAX_VALUE); //distance still unknown
System.out.println(initialCommand);
runMotors(initialCommand);
Position startPos = positions.elementAt(1).block();
double distance = forwardMove.getDistance();
double targetHeading = startPos.getHeading();
double targetDeltaX = distance * cos(targetHeading);
double targetDeltaY = distance * sin(targetHeading);
double targetX = startPos.getX() + targetDeltaX;
double targetY = startPos.getY() + targetDeltaY;
System.out.println("$$$$$$$$$$$$$$ TargetX=" + targetX );
System.out.println("$$$$$$$$$$$$$$ TargetY=" + targetY );
System.out.println("$$$$$$$$$$$$$$ Target Distance=" + distance);
System.out.println("$$$$$$$$$$$$$$ TargetHeading=" + targetHeading );
double startH = startPos.getHeading();
System.out.println("START POSITION: " + startPos);
Flux<Position> skip1 = positions.skip(1);
Flux<Tuple2<Position, Position>> lastTwoPositionsFlux = Flux.zip(positions, skip1);
Flux<Tuple3<Position, Position, Command>> flux =
Flux.combineLatest(
lastTwoPositionsFlux,
commandFlux.startWith(new Command(VOID, null)),
(tuple2, lastCommand) -> Tuples.of(tuple2.getT1(), tuple2.getT2(), lastCommand)
);
flux.scan(initialCommand, (last, tuple3) -> {
System.out.println("########## NEW EVENT !!!!!!!!!!!");
Position prevPos = tuple3.getT1();
Position currPos = tuple3.getT2();
Command lastCommand = tuple3.getT3();
float prevX = prevPos.getX();
float prevY = prevPos.getY();
double prevH = prevPos.getHeading();
float currX = currPos.getX();
float currY = currPos.getY();
double currH = currPos.getHeading();
System.out.println(currPos + " - " + prevPos);
double dt = (currPos.getTimestamp() - prevPos.getTimestamp()) / 1000.0; //delta time in seconds between position redings
if(dt <= 0) return last; // if invalid sequence do nothing
double time = (currPos.getTimestamp() - startPos.getTimestamp()) /1000.0;
// calculating the ideal trajectory position
double tarX, tarY, tarH, remainingPathLength;
tarX = startPos.getX() + targetVelocity * time * cos(targetHeading);
tarY = startPos.getY() + targetVelocity * time * sin(targetHeading);
remainingPathLength = hypot(targetX - currX, targetY - currY) ;
tarH = targetHeading;
//calculating current trajectory parameters
float dX = currX - prevX;
float dY = currY - prevY;
double currDist = hypot(dX, dY);
double currV = currDist / dt; // current velocity [mm/s]
double currAngV = (currH - prevH) / dt;
//calculating errors
double errX = (tarX - currX) * cos(tarH) + (tarY - currY) + sin(tarH);
double errY = (tarX - currX) * sin(tarH) + (tarY - currY) + cos(tarH);
double errH = tarH - currH;
//calculating landing curve
double Cx = LANDING_CURVE_PARAMETER;
double dlandY = 3 * Cx * pow(cbrt(abs(errY) / Cx), 2) * signum(errY);
double landH = tarH + atan(dlandY);
double dErrY = currV * sin (errH);
double landAngV = (2 * (1 / cbrt(abs(errY) / Cx)) * dErrY) /
(1 + tan(landH - tarH) * tan(landH - tarH));
//calculating the corrected trajectory control parameters
double switchAngV = landAngV - currAngV +
sqrt(2 * MAX_ROBOT_ANGULAR_ACCELERATION * abs(landH - currH))
* signum(landH - currH) * 0.2;
double switchAngA = min(abs(switchAngV / dt), MAX_ROBOT_ANGULAR_ACCELERATION) * signum(switchAngV);
double newAngV = currAngV + switchAngA * dt;
//calculating new velocity
double dErrX = targetVelocity - currV * cos(errH);
double switchV = dErrX + sqrt( 2 * MAX_ROBOT_LINEAR_ACCELERATION * abs(errX)) * signum(errX);
double switchA = min(abs(switchV / dt), MAX_ROBOT_LINEAR_ACCELERATION) * signum(switchV);
// double newV = currV + switchA * dt;
//calculating delta motor speed control values
double k = 0.1;
double newDeltaLR = k* MAX_SPEED * MAIN_AXE_LENGTH * dt * switchAngA / (2 * WHEEL_RADIUS);
//calculating new motor speed control values
int newVL = (int) (last.getVelocityL() + switchA * dt / WHEEL_RADIUS - newDeltaLR * last.getDirL());
int newVR = (int) (last.getVelocityR() + switchA * dt / WHEEL_RADIUS + newDeltaLR * last.getDirL());
System.out.println("!!! time=" + time + ", dt=" + dt + ", tarX=" + tarX + ", tarY=" + tarY
+ ", startH=" + startH + ", errH=" + errH + ", targetX=" + targetX + ", targetY=" + targetY + ", targetHeading=" + targetHeading
+ ", errX=" + errX + ", errY=" + errY + ", dlandY=" + dlandY + ", currV=" + currV + ", dist=" + currDist
+ ", switchAngV/dt=" + switchAngV / dt );
System.out.println("!!! remainingPathLength=" + remainingPathLength + ", dErrX=" + dErrX + ", switchV=" + switchV + ", switchA=" + switchA );
if(lastCommand.getName() != STOP && remainingPathLength < last.getRemainingPath()
&& remainingPathLength > currV * currV / ROBOT_STOPPING_DECCELERATION ) { //drive until minimum distance to target
return new MotorsCommand(last.getDirL(), last.getDirR(), newVL, newVR, (float) remainingPathLength);
} else {
System.out.println("FINAL POSITION: " + currPos);
return STOP_COMMAND;
}
}).map((MotorsCommand motorsCommand) -> {
runMotors(motorsCommand);
return motorsCommand;
})
.takeUntil((MotorsCommand motorsCommand) -> motorsCommand.equals(STOP_COMMAND) )
.subscribe( (MotorsCommand motorsCommand) -> {
System.out.println(motorsCommand);
});
}
}
protected void setupGpioForMovement() {
// Motor direction pins
Gpio.pinMode(5, Gpio.OUTPUT);
Gpio.pinMode(6, Gpio.OUTPUT);
Gpio.pinMode(12, Gpio.PWM_OUTPUT);
Gpio.pinMode(13, Gpio.PWM_OUTPUT);
Gpio.pwmSetMode(Gpio.PWM_MODE_MS);
Gpio.pwmSetRange(MAX_SPEED);
Gpio.pwmSetClock(CLOCK_DIVISOR);
}
private void runMotors(MotorsCommand mc) {
//setting motor directions
Gpio.digitalWrite(5, mc.getDirR() > 0 ? 1 : 0);
Gpio.digitalWrite(6, mc.getDirL() > 0 ? 1 : 0);
//setting speed
if(mc.getVelocityR() >= 0 && mc.getVelocityR() <= MAX_SPEED)
Gpio.pwmWrite(12, mc.getVelocityR()); // speed up to MAX_SPEED
if(mc.getVelocityL() >= 0 && mc.getVelocityL() <= MAX_SPEED)
Gpio.pwmWrite(13, mc.getVelocityL());
}
@Override
public void onSubscribe(Subscription s) {
subscription = s;
subscription.request(Long.MAX_VALUE);
}
@Override
public void onError(Throwable t) {
// TODO Auto-generated method stub
}
@Override
public void onComplete() {
// TODO Auto-generated method stub
}
}
| iproduct/course-social-robotics | iptpi-demo/src/main/java/org/iproduct/iptpi/domain/movement/MovementCommandSubscriber.java | Java | gpl-2.0 | 25,456 |
package cn.ac.iscas.cloudeploy.v2.puppet.transform.ast;
import java.util.List;
public class ASTCollExpr extends ASTBase{
private Object test1;
private Object test2;
private String oper;
private List<Object> children;
private String form;
private String type;
public Object getTest1() {
return test1;
}
public void setTest1(Object test1) {
this.test1 = test1;
}
public Object getTest2() {
return test2;
}
public void setTest2(Object test2) {
this.test2 = test2;
}
public String getOper() {
return oper;
}
public void setOper(String oper) {
this.oper = oper;
}
public List<Object> getChildren() {
return children;
}
public void setChildren(List<Object> children) {
this.children = children;
}
public String getForm() {
return form;
}
public void setForm(String form) {
this.form = form;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
}
| xpxstar/Cloudeploy | script-service/src/main/java/cn/ac/iscas/cloudeploy/v2/puppet/transform/ast/ASTCollExpr.java | Java | gpl-2.0 | 941 |
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.launcher2;
import android.content.Context;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.util.AttributeSet;
import android.util.DisplayMetrics;
import android.view.FocusFinder;
import android.view.MotionEvent;
import android.view.View;
import android.widget.FrameLayout;
import com.android.launcher.R;
public class Cling extends FrameLayout {
static final String WORKSPACE_CLING_DISMISSED_KEY = "cling.workspace.dismissed";
static final String ALLAPPS_CLING_DISMISSED_KEY = "cling.allapps.dismissed";
static final String FOLDER_CLING_DISMISSED_KEY = "cling.folder.dismissed";
private static String WORKSPACE_PORTRAIT = "workspace_portrait";
private static String WORKSPACE_LANDSCAPE = "workspace_landscape";
private static String WORKSPACE_LARGE = "workspace_large";
private static String WORKSPACE_CUSTOM = "workspace_custom";
private static String ALLAPPS_PORTRAIT = "all_apps_portrait";
private static String ALLAPPS_LANDSCAPE = "all_apps_landscape";
private static String ALLAPPS_LARGE = "all_apps_large";
private static String FOLDER_PORTRAIT = "folder_portrait";
private static String FOLDER_LANDSCAPE = "folder_landscape";
private static String FOLDER_LARGE = "folder_large";
private Launcher mLauncher;
private boolean mIsInitialized;
private String mDrawIdentifier;
private Drawable mBackground;
private Drawable mPunchThroughGraphic;
private Drawable mHandTouchGraphic;
private int mPunchThroughGraphicCenterRadius;
private int mAppIconSize;
private int mButtonBarHeight;
private float mRevealRadius;
private int[] mPositionData;
private Paint mErasePaint;
public Cling(Context context) {
this(context, null, 0);
}
public Cling(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public Cling(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.Cling, defStyle, 0);
mDrawIdentifier = a.getString(R.styleable.Cling_drawIdentifier);
a.recycle();
setClickable(true);
}
void init(Launcher l, int[] positionData) {
if (!mIsInitialized) {
mLauncher = l;
mPositionData = positionData;
Resources r = getContext().getResources();
mPunchThroughGraphic = r.getDrawable(R.drawable.cling);
mPunchThroughGraphicCenterRadius =
r.getDimensionPixelSize(R.dimen.clingPunchThroughGraphicCenterRadius);
mAppIconSize = r.getDimensionPixelSize(R.dimen.app_icon_size);
mRevealRadius = r.getDimensionPixelSize(R.dimen.reveal_radius) * 1f;
mButtonBarHeight = r.getDimensionPixelSize(R.dimen.button_bar_height);
mErasePaint = new Paint();
mErasePaint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.MULTIPLY));
mErasePaint.setColor(0xFFFFFF);
mErasePaint.setAlpha(0);
mIsInitialized = true;
}
}
void cleanup() {
mBackground = null;
mPunchThroughGraphic = null;
mHandTouchGraphic = null;
mIsInitialized = false;
}
public String getDrawIdentifier() {
return mDrawIdentifier;
}
private int[] getPunchThroughPositions() {
if (mDrawIdentifier.equals(WORKSPACE_PORTRAIT)) {
return new int[]{getMeasuredWidth() / 2, getMeasuredHeight() - (mButtonBarHeight / 2)};
} else if (mDrawIdentifier.equals(WORKSPACE_LANDSCAPE)) {
return new int[]{getMeasuredWidth() - (mButtonBarHeight / 2), getMeasuredHeight() / 2};
} else if (mDrawIdentifier.equals(WORKSPACE_LARGE)) {
final float scale = LauncherApplication.getScreenDensity();
final int cornerXOffset = (int) (scale * 15);
final int cornerYOffset = (int) (scale * 10);
return new int[]{getMeasuredWidth() - cornerXOffset, cornerYOffset};
} else if (mDrawIdentifier.equals(ALLAPPS_PORTRAIT) ||
mDrawIdentifier.equals(ALLAPPS_LANDSCAPE) ||
mDrawIdentifier.equals(ALLAPPS_LARGE)) {
return mPositionData;
}
return new int[]{-1, -1};
}
@Override
public View focusSearch(int direction) {
return this.focusSearch(this, direction);
}
@Override
public View focusSearch(View focused, int direction) {
return FocusFinder.getInstance().findNextFocus(this, focused, direction);
}
@Override
public boolean onHoverEvent(MotionEvent event) {
return (mDrawIdentifier.equals(WORKSPACE_PORTRAIT)
|| mDrawIdentifier.equals(WORKSPACE_LANDSCAPE)
|| mDrawIdentifier.equals(WORKSPACE_LARGE)
|| mDrawIdentifier.equals(ALLAPPS_PORTRAIT)
|| mDrawIdentifier.equals(ALLAPPS_LANDSCAPE)
|| mDrawIdentifier.equals(ALLAPPS_LARGE)
|| mDrawIdentifier.equals(WORKSPACE_CUSTOM));
}
@Override
public boolean onTouchEvent(android.view.MotionEvent event) {
if (mDrawIdentifier.equals(WORKSPACE_PORTRAIT) ||
mDrawIdentifier.equals(WORKSPACE_LANDSCAPE) ||
mDrawIdentifier.equals(WORKSPACE_LARGE) ||
mDrawIdentifier.equals(ALLAPPS_PORTRAIT) ||
mDrawIdentifier.equals(ALLAPPS_LANDSCAPE) ||
mDrawIdentifier.equals(ALLAPPS_LARGE)) {
int[] positions = getPunchThroughPositions();
for (int i = 0; i < positions.length; i += 2) {
double diff = Math.sqrt(Math.pow(event.getX() - positions[i], 2) +
Math.pow(event.getY() - positions[i + 1], 2));
if (diff < mRevealRadius) {
return false;
}
}
} else if (mDrawIdentifier.equals(FOLDER_PORTRAIT) ||
mDrawIdentifier.equals(FOLDER_LANDSCAPE) ||
mDrawIdentifier.equals(FOLDER_LARGE)) {
Folder f = mLauncher.getWorkspace().getOpenFolder();
if (f != null) {
Rect r = new Rect();
f.getHitRect(r);
if (r.contains((int) event.getX(), (int) event.getY())) {
return false;
}
}
}
return true;
};
@Override
protected void dispatchDraw(Canvas canvas) {
if (mIsInitialized) {
DisplayMetrics metrics = new DisplayMetrics();
mLauncher.getWindowManager().getDefaultDisplay().getMetrics(metrics);
// Initialize the draw buffer (to allow punching through)
Bitmap b = Bitmap.createBitmap(getMeasuredWidth(), getMeasuredHeight(),
Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(b);
// Draw the background
if (mBackground == null) {
if (mDrawIdentifier.equals(WORKSPACE_PORTRAIT) ||
mDrawIdentifier.equals(WORKSPACE_LANDSCAPE) ||
mDrawIdentifier.equals(WORKSPACE_LARGE)) {
mBackground = getResources().getDrawable(R.drawable.bg_cling1);
} else if (mDrawIdentifier.equals(ALLAPPS_PORTRAIT) ||
mDrawIdentifier.equals(ALLAPPS_LANDSCAPE) ||
mDrawIdentifier.equals(ALLAPPS_LARGE)) {
mBackground = getResources().getDrawable(R.drawable.bg_cling2);
} else if (mDrawIdentifier.equals(FOLDER_PORTRAIT) ||
mDrawIdentifier.equals(FOLDER_LANDSCAPE)) {
mBackground = getResources().getDrawable(R.drawable.bg_cling3);
} else if (mDrawIdentifier.equals(FOLDER_LARGE)) {
mBackground = getResources().getDrawable(R.drawable.bg_cling4);
} else if (mDrawIdentifier.equals(WORKSPACE_CUSTOM)) {
mBackground = getResources().getDrawable(R.drawable.bg_cling5);
}
}
if (mBackground != null) {
mBackground.setBounds(0, 0, getMeasuredWidth(), getMeasuredHeight());
mBackground.draw(c);
} else {
c.drawColor(0x99000000);
}
int cx = -1;
int cy = -1;
float scale = mRevealRadius / mPunchThroughGraphicCenterRadius;
int dw = (int) (scale * mPunchThroughGraphic.getIntrinsicWidth());
int dh = (int) (scale * mPunchThroughGraphic.getIntrinsicHeight());
// Determine where to draw the punch through graphic
int[] positions = getPunchThroughPositions();
for (int i = 0; i < positions.length; i += 2) {
cx = positions[i];
cy = positions[i + 1];
if (cx > -1 && cy > -1) {
c.drawCircle(cx, cy, mRevealRadius, mErasePaint);
mPunchThroughGraphic.setBounds(cx - dw / 2, cy - dh / 2, cx + dw / 2, cy + dh / 2);
mPunchThroughGraphic.draw(c);
}
}
// Draw the hand graphic in All Apps
if (mDrawIdentifier.equals(ALLAPPS_PORTRAIT) ||
mDrawIdentifier.equals(ALLAPPS_LANDSCAPE) ||
mDrawIdentifier.equals(ALLAPPS_LARGE)) {
if (mHandTouchGraphic == null) {
mHandTouchGraphic = getResources().getDrawable(R.drawable.hand);
}
int offset = mAppIconSize / 4;
mHandTouchGraphic.setBounds(cx + offset, cy + offset,
cx + mHandTouchGraphic.getIntrinsicWidth() + offset,
cy + mHandTouchGraphic.getIntrinsicHeight() + offset);
mHandTouchGraphic.draw(c);
}
canvas.drawBitmap(b, 0, 0, null);
c.setBitmap(null);
b = null;
}
// Draw the rest of the cling
super.dispatchDraw(canvas);
};
}
| rex-xxx/mt6572_x201 | packages/apps/Launcher2/src/com/android/launcher2/Cling.java | Java | gpl-2.0 | 11,008 |
package rb;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Random;
import java.util.Timer;
import java.util.TimerTask;
import javax.persistence.TypedQuery;
import rb.helpers.ClassificationResult;
import rb.helpers.DBHandler;
import rb.helpers.StemmerHelper;
import rb.persistentobjects.Statement;
import rb.persistentobjects.Word;
import ca.uwo.csd.ai.nlp.common.SparseVector;
import ca.uwo.csd.ai.nlp.kernel.KernelManager;
import ca.uwo.csd.ai.nlp.kernel.LinearKernel;
import ca.uwo.csd.ai.nlp.libsvm.svm_model;
import ca.uwo.csd.ai.nlp.libsvm.ex.Instance;
import ca.uwo.csd.ai.nlp.libsvm.ex.SVMPredictor;
import com.cd.reddit.Reddit;
import com.cd.reddit.RedditException;
import com.cd.reddit.json.jackson.RedditJsonParser;
import com.cd.reddit.json.mapping.RedditComment;
import com.cd.reddit.json.mapping.RedditLink;
import com.cd.reddit.json.util.RedditComments;
import de.daslaboratorium.machinelearning.classifier.BayesClassifier;
import de.daslaboratorium.machinelearning.classifier.Classification;
public class Poster {
private static Random randomGenerator;
static Reddit reddit = new Reddit("machinelearningbot/0.1 by elggem");
static String subreddit = "";
static BayesClassifier<String, String> bayes = null;
static svm_model model = null;
static List<Statement> statements = null;
static List<Word> words = null;
static int max_reply_length = 25;
static int classifier = 0;
static ArrayList<String> alreadyPostedComments = new ArrayList<String>();
/**
* @param args
*/
public static void main(String[] args) {
System.out.println("Reddit BOT Final stage. Armed and ready to go!");
randomGenerator = new Random();
if (args.length < 4) {
System.out.println(" usage: java -Xmx4g -jar JAR SUBREDDIT CLASSIFIER USER PASS\n" +
" with: SUBREDDIT = the subreddit to post to\n" +
" CLASSIFIER = 1:BN, 2:SVM 3:Random\n" +
" USER/PASS = username and password\n");
System.exit(1);
}
subreddit = args[0];
classifier = Integer.valueOf(args[1]);
try {
reddit.login(args[2], args[3]);
} catch (RedditException e) {
e.printStackTrace();
}
DBHandler.initializeHandler(args[0]);
if (statements == null) {
TypedQuery<Statement> query=DBHandler.em.createQuery("Select o from Statement o where o.text != \"[deleted]\" and o.parentStatement!= null and o.length>0 and o.length < " + max_reply_length,Statement.class);
statements = query.getResultList();
}
if (words == null) {
TypedQuery<Word> queryW=DBHandler.em.createQuery("Select o from Word o",Word.class);
words = queryW.getResultList();
}
//TIMER CODE
Timer timer = new Timer();
long thirty_minutes = 30*60*1000;
long one_hour = thirty_minutes*2;
long waiting_time = (long) (one_hour+(Math.random()*one_hour));
timer.schedule(new TimerTask() {
@Override
public void run() {
System.out.println("-> Awake from Hibernation");
RedditComment commentToReplyTo = findInterestingCommentPopularity(subreddit);
System.out.println("-> Generating Reply for " + commentToReplyTo.getBody());
String generatedReply = generateCommentFor(commentToReplyTo, classifier);
System.out.println("-> Generated " + generatedReply);
System.out.println("-> Posting..");
postCommentAsReplyTo(generatedReply, commentToReplyTo);
System.out.println("-> Going Back to Sleep...");
}
}, waiting_time, waiting_time);
//-------------
System.out.println("-> Awake from Hibernation");
RedditComment commentToReplyTo = findInterestingCommentPopularity(subreddit);
System.out.println("-> Generating Reply for " + commentToReplyTo.getBody());
String generatedReply = generateCommentFor(commentToReplyTo, classifier);
System.out.println("-> Generated " + generatedReply);
System.out.println("-> Posting..");
postCommentAsReplyTo(generatedReply, commentToReplyTo);
System.out.println("-> Going Back to Sleep...");
///---------------------
//DBHandler.closeHandler();
}
public static RedditComment findInterestingCommentRelevance(String subreddit) {
//Get the first 25 posts.
try {
List<RedditLink> links = reddit.listingFor(subreddit, "");
Collections.shuffle(links);
RedditLink linkOfInterest = links.get(0);
System.out.println(" findInterestingComment post: " + linkOfInterest);
List<RedditComment> comments = getAllCommentsForLink(linkOfInterest, subreddit);
ArrayList<ClassificationResult> classifications = new ArrayList<ClassificationResult>();
for (RedditComment redditComment : comments) {
classifications.add(classifyCommentBayes(redditComment));
}
RedditComment bestComment = null;
double max_prob = 0;
int i=0;
for (RedditComment redditComment : comments) {
double prob = classifications.get(i).probability;
if (prob>max_prob && convertStringToStemmedList(redditComment.getBody()).size()>0) {
max_prob = prob;
bestComment = redditComment;
}
i++;
}
System.out.println(" findInterestingComment comment: " + bestComment);
return bestComment;
} catch (Exception e) {
System.out.println("ERROR, trying next time: " + e.getLocalizedMessage());
}
return null;
}
public static RedditComment findInterestingCommentPopularity(String subreddit) {
//Get the first 25 posts.
try {
List<RedditLink> links = reddit.listingFor(subreddit, "");
Collections.shuffle(links);
RedditLink linkOfInterest = links.get(0);
System.out.println(" findInterestingComment post: " + linkOfInterest);
List<RedditComment> comments = getAllCommentsForLink(linkOfInterest, subreddit);
RedditComment bestComment = null;
long max_karma = 0;
for (RedditComment redditComment : comments) {
if (redditComment.getUps()-redditComment.getDowns() > max_karma) {
//if (redditComment.getReplies().toString().length() <= 10) {
max_karma = redditComment.getUps()-redditComment.getDowns();
bestComment = redditComment;
//}
}
}
System.out.println(" findInterestingComment comment: " + bestComment);
return bestComment;
} catch (Exception e) {
System.out.println("ERROR, trying next time: " + e.getLocalizedMessage());
}
return null;
}
public static String generateCommentFor(RedditComment comment, int classifierID) {
String generated = "";
if (classifierID == 1) {
generated = classifyCommentBayes(comment).resultString;
} else if (classifierID == 2) {
generated = classifyCommentSVM(comment);
} else if (classifierID == 3) {
generated = classifyCommentRandom(comment);
} else {
System.out.println("Choose a valid classifier dude.");
System.exit(1);
}
return generated;
}
public static void postCommentAsReplyTo(String comment, RedditComment parent) {
try {
System.out.println(" postCommentAsReplyTo response: "+reddit.comment(comment, parent.getName()));
} catch (RedditException e) {
System.out.println("ERROR, trying next time: " + e.getLocalizedMessage());
}
}
public static String classifyCommentSVM(RedditComment comment) {
if (model == null) {
try {
model = SVMPredictor.loadModel(subreddit + ".model");
} catch (Exception e1) {
System.out.println("ERROR: Couldnt load SVM model. Exitting");
System.exit(1);
}
KernelManager.setCustomKernel(new LinearKernel());
System.out.println(" classifyCommentSVM: loaded model!!");
}
ArrayList<String> inputlist = convertStringToStemmedList(comment.getBody());
SparseVector vec = new SparseVector();
for (Word word : words) {
int value = 0;
if(inputlist.contains(word.word)) {
value = 1;
}
vec.add(words.indexOf(word), value);
}
Instance inst = new Instance(0, vec);
double result = SVMPredictor.predict(inst, model, false);
return statements.get((int) result).text;
}
public static ClassificationResult classifyCommentBayes(RedditComment comment) {
if (bayes == null) {
// Create a new bayes classifier with string categories and string features.
bayes = new BayesClassifier<String, String>();
// Change the memory capacity. New learned classifications (using
// learn method are stored in a queue with the size given here and
// used to classify unknown sentences.
bayes.setMemoryCapacity(50000);
TypedQuery<Statement> query=DBHandler.em.createQuery("Select o from Statement o where o.text != \"[deleted]\" and o.parentStatement!= null and o.length>0 and o.length < " + max_reply_length,Statement.class);
List<Statement> statements = query.getResultList();
System.out.println(" classifyCommentBayes analyzing " + statements.size() + " statements... ");
for (Statement statement : statements) {
ArrayList<String> wordStrings = new ArrayList<String>();
for (Word word : statement.parentStatement.includedWords) {
wordStrings.add(word.word);
}
//LEARN
bayes.learn(statement.text, wordStrings);
}
}
ArrayList<String> list = convertStringToStemmedList(comment.getBody());
Classification<String,String> result = bayes.classify(list);
System.out.println(" classifyCommentBayes " + list + " prob " + result.getProbability());
return new ClassificationResult(result.getCategory(), result.getProbability());
}
public static String classifyCommentRandom(RedditComment comment) {
String result = statements.get(randomGenerator.nextInt(statements.size())).text;
System.out.println(" classifyCommentRandom ");
return result;
}
public static List<RedditComment>getAllRepliesForComment(RedditComment redditComment) {
List<RedditComment> thelist = new ArrayList<RedditComment>();
try {
thelist.add(redditComment);
if (redditComment.getReplies().toString().length() >= 10) {
final RedditJsonParser parser = new RedditJsonParser(redditComment.getReplies());
List<RedditComment> redditReplies = parser.parseCommentsOnly();
for (RedditComment redditCommentReply : redditReplies) {
List<RedditComment> additionalList = getAllRepliesForComment(redditCommentReply);
for (RedditComment redditComment2 : additionalList) {
redditComment2.parent = redditComment;
}
thelist.addAll(additionalList);
}
}
} catch (RedditException e) {
e.printStackTrace();
}
return thelist;
}
public static List<RedditComment>getAllCommentsForLink(RedditLink redditLink, String subreddit) {
List<RedditComment> thelist = new ArrayList<RedditComment>();
try {
if (redditLink.getNum_comments()>0) {
RedditComments comments;
comments = reddit.commentsFor(subreddit, redditLink.getId());
for (RedditComment redditComment : comments.getComments()) {
List<RedditComment> additionalList = getAllRepliesForComment(redditComment);
thelist.addAll(additionalList);
}
}
} catch (RedditException e) {
e.printStackTrace();
}
return thelist;
}
public static ArrayList<String> convertStringToStemmedList(String input) {
ArrayList<String> wordStrings = new ArrayList<String>();
for (String string : input.split("\\s+")) {
String pruned = string.replaceAll("[^a-zA-Z]", "").toLowerCase();
String stemmed = StemmerHelper.stemWord(pruned);
if (DBHandler.checkWord(stemmed)) {
wordStrings.add(stemmed);
}
}
return wordStrings;
}
}
| elggem/redditbot | src/rb/Poster.java | Java | gpl-2.0 | 11,681 |
/*
* Copyright 2006-2016 The MZmine 3 Development Team
*
* This file is part of MZmine 3.
*
* MZmine 3 is free software; you can redistribute it and/or modify it under the terms of the GNU
* General Public License as published by the Free Software Foundation; either version 2 of the
* License, or (at your option) any later version.
*
* MZmine 3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with MZmine 3; if not,
* write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
* USA
*/
package io.github.mzmine.util;
import java.io.File;
import java.util.List;
import javax.annotation.Nonnull;
import com.google.common.io.Files;
/**
* File name utilities
*/
public class FileNameUtil {
public static @Nonnull String findCommonPrefix(@Nonnull List<File> fileNames) {
if (fileNames.size() < 2)
return "";
String firstName = fileNames.get(0).getName();
for (int prefixLen = 0; prefixLen < firstName.length(); prefixLen++) {
char c = firstName.charAt(prefixLen);
for (int i = 1; i < fileNames.size(); i++) {
String ithName = fileNames.get(i).getName();
if (prefixLen >= ithName.length() || ithName.charAt(prefixLen) != c) {
// Mismatch found
return ithName.substring(0, prefixLen);
}
}
}
return firstName;
}
public static @Nonnull String findCommonSuffix(@Nonnull List<File> fileNames) {
if (fileNames.isEmpty())
return "";
if (fileNames.size() == 1) {
// Return file extension
String ext = Files.getFileExtension(fileNames.get(0).getAbsolutePath());
return "." + ext;
}
String firstName = fileNames.get(0).getName();
for (int suffixLen = 0; suffixLen < firstName.length(); suffixLen++) {
char c = firstName.charAt(firstName.length() - 1 - suffixLen);
for (int i = 1; i < fileNames.size(); i++) {
String ithName = fileNames.get(i).getName();
if (suffixLen >= ithName.length()
|| ithName.charAt(ithName.length() - 1 - suffixLen) != c) {
// Mismatch found
return ithName.substring(ithName.length() - suffixLen);
}
}
}
return firstName;
}
}
| DrewG/mzmine3 | src/main/java/io/github/mzmine/util/FileNameUtil.java | Java | gpl-2.0 | 2,482 |
package nl.pelagic.musicTree.flac2mp3.cli;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.nio.file.Files;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.logging.Level;
import java.util.logging.Logger;
import nl.pelagic.audio.conversion.flac2mp3.api.Flac2Mp3Configuration;
import nl.pelagic.audio.conversion.flac2mp3.api.FlacToMp3;
import nl.pelagic.audio.musicTree.configuration.api.MusicTreeConfiguration;
import nl.pelagic.audio.musicTree.configuration.api.MusicTreeConstants;
import nl.pelagic.audio.musicTree.syncer.api.Syncer;
import nl.pelagic.audio.musicTree.util.MusicTreeHelpers;
import nl.pelagic.musicTree.flac2mp3.cli.i18n.Messages;
import nl.pelagic.shell.script.listener.api.ShellScriptListener;
import nl.pelagic.shutdownhook.api.ShutdownHookParticipant;
import nl.pelagic.util.file.FileUtils;
import org.kohsuke.args4j.CmdLineException;
import org.kohsuke.args4j.CmdLineParser;
import org.osgi.framework.BundleContext;
import org.osgi.service.component.annotations.Activate;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Deactivate;
import org.osgi.service.component.annotations.Reference;
/**
* The main program that synchronises a flac tree into a mp3 tree or just
* converts one or more flac files in mp3 files, based on a music tree
* configuration
*/
@Component(property = {
"main.thread=true" /* Signal the launcher that this is the main thread */
})
public class Main implements Runnable, ShutdownHookParticipant {
/** the application logger name */
static private final String LOGGER_APPLICATION_NAME = "nl.pelagic"; //$NON-NLS-1$
/** the application logger level to allow */
static private final Level LOGGER_APPLICATION_LEVEL = Level.SEVERE;
/** the jaudiotagger library logger name */
static private final String LOGGER_JAUDIOTAGGER_NAME = "org.jaudiotagger"; //$NON-NLS-1$
/** the jaudiotagger library logger level to allow */
static private final Level LOGGER_JAUDIOTAGGER_LEVEL = Level.SEVERE;
/** the program name */
static final String PROGRAM_NAME = "flac2mp3"; //$NON-NLS-1$
/** the application logger */
private final Logger applicationLogger;
/** the jaudiotagger library logger */
private final Logger jaudiotaggerLogger;
/** the list of extension to use in the flac tree */
private final HashSet<String> extensionsList = new HashSet<>();
/** the filenames for covers to use in the flac tree */
private final HashSet<String> coversList = new HashSet<>();
/*
* Construction
*/
/**
* Default constructor
*/
public Main() {
super();
/**
* <pre>
* 1=timestamp
* 2=level
* 3=logger
* 4=class method
* 5=message
* 6=stack trace, preceded by a newline (if exception is present)
* </pre>
*/
System.setProperty("java.util.logging.SimpleFormatter.format", "[%1$tF %1$tT] %4$10s %2$s : %5$s%6$s%n"); //$NON-NLS-1$ //$NON-NLS-2$
applicationLogger = Logger.getLogger(LOGGER_APPLICATION_NAME);
applicationLogger.setLevel(LOGGER_APPLICATION_LEVEL);
jaudiotaggerLogger = Logger.getLogger(LOGGER_JAUDIOTAGGER_NAME);
jaudiotaggerLogger.setLevel(LOGGER_JAUDIOTAGGER_LEVEL);
extensionsList.add(MusicTreeConstants.FLACEXTENSION);
coversList.add(MusicTreeConstants.COVER);
}
/*
* Consumed Services
*/
/** the shell script listener (optional) */
private ShellScriptListener shellScriptListener = null;
/**
* @param shellScriptListener the shellScriptListener to set
*/
@Reference
void setShellScriptListener(ShellScriptListener shellScriptListener) {
this.shellScriptListener = shellScriptListener;
}
/** the flac2Mp3 service */
private FlacToMp3 flacToMp3 = null;
/**
* @param flacToMp3 the flacToMp3 to set
*/
@Reference
void setFlacToMp3(FlacToMp3 flacToMp3) {
this.flacToMp3 = flacToMp3;
}
/** the syncer service */
private Syncer syncer = null;
/**
* @param syncer the syncer to set
*/
@Reference
void setSyncer(Syncer syncer) {
this.syncer = syncer;
}
/*
* Command line arguments
*/
/** the launcher arguments property name */
static final String LAUNCHER_ARGUMENTS = "launcher.arguments"; //$NON-NLS-1$
/** the command line arguments */
private String[] args = null;
/**
* The bnd launcher provides access to the command line arguments via the
* Launcher object. This object is also registered under Object.
*
* @param done unused
* @param parameters the launcher parameters, which includes the command line
* arguments
*/
@Reference
void setDone(@SuppressWarnings("unused") Object done, Map<String, Object> parameters) {
args = (String[]) parameters.get(LAUNCHER_ARGUMENTS);
}
/*
* Bundle
*/
/** The setting name for the stayAlive property */
public static final String SETTING_STAYALIVE = "stayAlive"; //$NON-NLS-1$
/** true when the application should NOT automatically exit when done */
private boolean stayAlive = false;
/**
* Bundle activator
*
* @param bundleContext the bundle context
*/
@Activate
void activate(BundleContext bundleContext) {
String ex = bundleContext.getProperty(PROGRAM_NAME + "." + SETTING_STAYALIVE); //$NON-NLS-1$
if (ex != null) {
stayAlive = Boolean.parseBoolean(ex);
}
}
/**
* Bundle deactivator
*/
@Deactivate
void deactivate() {
/* nothing to do */
}
/*
* Helpers
*/
/**
* Read a filelist file into a list of entries to convert
*
* @param out the stream to print the error messages to
* @param fileList the filelist file
* @param entriesToConvert a list of files to convert, to which the files read
* from the filelist file must be added
* @return true when successful
*/
static boolean readFileList(PrintStream out, File fileList, List<String> entriesToConvert) {
assert (out != null);
assert (fileList != null);
assert (entriesToConvert != null);
if (!fileList.isFile()) {
out.printf(Messages.getString("Main.8"), fileList.getPath()); //$NON-NLS-1$
return false;
}
try (BufferedReader reader =
new BufferedReader(new InputStreamReader(Files.newInputStream(fileList.toPath()), "UTF-8"))) { //$NON-NLS-1$
String line = null;
while ((line = reader.readLine()) != null) {
/* skip empty lines */
if (line.trim().isEmpty()) {
continue;
}
entriesToConvert.add(line);
}
}
catch (IOException e) {
/* can't be covered in a test */
out.printf(Messages.getString("Main.11"), fileList, e.getLocalizedMessage()); //$NON-NLS-1$
return false;
}
return true;
}
/**
* Validate the entry to convert to filter out non-existing directories and
* files. An entry to convert must exist and be below the flac base directory.
*
* @param out the stream to print the error messages to
* @param musicTreeConfiguration the music tree configuration
* @param entryToConvert the entry to convert
* @return true when validation is successful
*/
static boolean validateEntryToConvert(PrintStream out, MusicTreeConfiguration musicTreeConfiguration,
File entryToConvert) {
assert (out != null);
assert (musicTreeConfiguration != null);
assert (entryToConvert != null);
/* check that the entry exists */
if (!entryToConvert.exists()) {
out.printf(Messages.getString("Main.5"), entryToConvert.getPath()); //$NON-NLS-1$
return false;
}
/*
* check that entry is below the flac base directory so that it doesn't
* escape the base directory by doing a ../../..
*/
if (!FileUtils.isFileBelowDirectory(musicTreeConfiguration.getFlacBaseDir(), entryToConvert, true)) {
out.printf(Messages.getString("Main.6"), //$NON-NLS-1$
entryToConvert.getPath(), musicTreeConfiguration.getFlacBaseDir().getPath());
return false;
}
return true;
}
/**
* Convert a flac file into an mp3 file.
*
* @param err the stream to print to
* @param flac2Mp3Configuration the conversion configuration. When null then
* the default configuration is used.
* @param musicTreeConfiguration the music tree configuration
* @param simulate true to simulate conversion
* @param fileToConvert the flac file to convert
*
* @return true when successful
*/
boolean convertFile(PrintStream err, Flac2Mp3Configuration flac2Mp3Configuration,
MusicTreeConfiguration musicTreeConfiguration, boolean simulate, File fileToConvert) {
assert (err != null);
assert (flac2Mp3Configuration != null);
assert (musicTreeConfiguration != null);
assert (fileToConvert != null);
assert (fileToConvert.isFile());
File mp3File = MusicTreeHelpers.flacFileToMp3File(musicTreeConfiguration, fileToConvert);
if (mp3File == null) {
err.printf(Messages.getString("Main.12"), fileToConvert.getPath(), //$NON-NLS-1$
musicTreeConfiguration.getMp3BaseDir().getPath());
return false;
}
boolean doConversion = !mp3File.exists() || (fileToConvert.lastModified() > mp3File.lastModified());
if (!doConversion) {
return true;
}
boolean converted = false;
try {
converted = flacToMp3.convert(flac2Mp3Configuration, fileToConvert, mp3File, simulate);
if (!converted) {
err.printf(Messages.getString("Main.1"), fileToConvert.getPath()); //$NON-NLS-1$
}
}
catch (IOException e) {
converted = false;
err.printf(Messages.getString("Main.2"), fileToConvert.getPath(), e.getLocalizedMessage()); //$NON-NLS-1$
}
return converted;
}
/**
* Stay alive, if needed (which is when the component has a SETTING_STAYALIVE
* property set to true).
*
* @param err the stream to print a 'staying alive' message to
*/
void stayAlive(PrintStream err) {
if (!stayAlive) {
return;
}
err.printf(Messages.getString("Main.7")); //$NON-NLS-1$
try {
Thread.sleep(Long.MAX_VALUE);
}
catch (InterruptedException e) {
/* swallow */
}
}
/*
* ShutdownHookParticipant
*/
/** true when we have to stop */
private AtomicBoolean stop = new AtomicBoolean(false);
@Override
public void shutdownHook() {
stop.set(true);
}
/*
* Main
*/
/**
* Run the main program
*
* @param err the stream to print errors to
* @return true when successful
*/
boolean doMain(PrintStream err) {
if (args == null) {
/*
* the launcher didn't set our command line options so set empty arguments
* (use defaults)
*/
args = new String[0];
}
/*
* Parse the command line
*/
CommandLineOptions commandLineOptions = new CommandLineOptions();
CmdLineParser parser = new CmdLineParser(commandLineOptions);
try {
parser.parseArgument(args);
}
catch (CmdLineException e) {
err.printf(Messages.getString("Main.4"), e.getLocalizedMessage()); //$NON-NLS-1$
commandLineOptions.setErrorReported(true);
}
/*
* Process command-line options
*/
/* print usage when so requested and exit */
if (commandLineOptions.isHelp() || commandLineOptions.isErrorReported()) {
try {
/* can't be covered by a test */
int cols = Integer.parseInt(System.getenv("COLUMNS")); //$NON-NLS-1$
if (cols > 80) {
parser.getProperties().withUsageWidth(cols);
}
}
catch (NumberFormatException e) {
/* swallow, can't be covered by a test */
}
CommandLineOptions.usage(err, PROGRAM_NAME, parser);
return false;
}
/*
* Setup verbose modes in the shell script listener
*/
shellScriptListener.setVerbose(commandLineOptions.isVerbose(), commandLineOptions.isExtraVerbose(),
commandLineOptions.isQuiet());
/*
* Setup & validate the music tree configuration
*/
MusicTreeConfiguration musicTreeConfiguration =
new MusicTreeConfiguration(commandLineOptions.getFlacBaseDir(), commandLineOptions.getMp3BaseDir());
List<String> errors = musicTreeConfiguration.validate(true);
if (errors != null) {
for (String error : errors) {
err.println(error);
}
return false;
}
/*
* Setup & validate the flac2mp3 configuration
*/
Flac2Mp3Configuration flac2Mp3Configuration = new Flac2Mp3Configuration();
flac2Mp3Configuration.setFlacExecutable(commandLineOptions.getFlacExecutable().getPath());
flac2Mp3Configuration.setLameExecutable(commandLineOptions.getLameExecutable().getPath());
flac2Mp3Configuration.setFlacOptions(commandLineOptions.getFlacOptions());
flac2Mp3Configuration.setLameOptions(commandLineOptions.getLameOptions());
flac2Mp3Configuration.setUseId3V1Tags(commandLineOptions.isUseID3v1());
flac2Mp3Configuration.setUseId3V24Tags(commandLineOptions.isUseID3v24());
flac2Mp3Configuration.setForceConversion(commandLineOptions.isForceConversion());
flac2Mp3Configuration.setRunFlacLame(!commandLineOptions.isDoNotRunFlacAndLame());
flac2Mp3Configuration.setCopyTag(!commandLineOptions.isDoNotCopyTag());
flac2Mp3Configuration.setCopyTimestamp(!commandLineOptions.isDoNotCopyTimestamp());
errors = flac2Mp3Configuration.validate();
if (errors != null) {
/* can't be covered by a test */
for (String error : errors) {
err.println(error);
}
return false;
}
/*
* Setup the entries to convert: first get them from the command-line (if
* specified) and then add those in the file list (if set)
*/
List<String> entriesToConvert = commandLineOptions.getEntriesToConvert();
File fileList = commandLineOptions.getFileList();
if (fileList != null) {
readFileList(err, fileList, entriesToConvert);
}
if (entriesToConvert.isEmpty()) {
/*
* no entries to convert, so default to the flac base directory: sync the
* whole tree
*/
entriesToConvert.add(musicTreeConfiguration.getFlacBaseDir().getAbsolutePath());
}
/*
* Run
*/
boolean result = true;
for (String entryToConvert : entriesToConvert) {
if (stop.get()) {
break;
}
File entryToConvertFile = new File(entryToConvert);
boolean validationResult = validateEntryToConvert(err, musicTreeConfiguration, entryToConvertFile);
if (validationResult) {
if (entryToConvertFile.isDirectory()) {
result = result && syncer.syncFlac2Mp3(flac2Mp3Configuration, musicTreeConfiguration, entryToConvertFile,
extensionsList, coversList, commandLineOptions.isSimulate());
} else if (entryToConvertFile.isFile()) {
result = result && convertFile(err, flac2Mp3Configuration, musicTreeConfiguration,
commandLineOptions.isSimulate(), entryToConvertFile);
} else {
/* can't be covered by a test */
err.printf(Messages.getString("Main.3"), entryToConvert); //$NON-NLS-1$
}
} else {
result = false;
}
}
return result;
}
/*
* Since we're registered as a Runnable with the main.thread property we get
* called when the system is fully initialised.
*/
@Override
public void run() {
boolean success = doMain(System.err);
stayAlive(System.err);
if (!success) {
/* can't be covered by a test */
System.exit(1);
}
}
}
| fhuberts/musicTreePrograms | nl.pelagic.musicTree.flac2mp3.cli/src/nl/pelagic/musicTree/flac2mp3/cli/Main.java | Java | gpl-2.0 | 15,761 |
package ch.dritz.remedy2redmine;
import java.io.File;
import java.io.IOException;
import ch.dritz.common.Config;
import ch.dritz.remedy2redmine.modules.SyncModule;
/**
* Main class for Remedy2Redmine
* @author D.Ritz
*/
public class Main
{
private static void usage(String msg)
{
if (msg != null)
System.out.println("ERROR: " + msg);
System.out.println("Remedy2Redmine " + Version.getVersion());
System.out.println("Usage: Remedy2Redmine <config.properties> <command> [<command specific args>]");
System.out.println(" <command> : one of (sync)");
System.out.println(" <mode specific args> for each mode:");
System.out.println(" - sync: none");
System.out.println("OR: Remedy2Redmine -version");
System.exit(1);
}
/**
* main() entry point
* @param args
* @throws IOException
*/
public static void main(String[] args)
throws Exception
{
if (args.length == 1 && "-version".equals(args[0])) {
System.out.println("Remedy2Redmine " + Version.getVersion());
return;
}
if (args.length < 2)
usage("Not enough arguments");
File configFile = new File(args[0]);
String command = args[1];
Config config = new Config();
config.loadFromFile(configFile);
if ("sync".equals(command)) {
File syncConfig = new File(configFile.getParentFile(),
config.getString("sync.config", "sync.properties"));
config.loadFromFile(syncConfig);
SyncModule sync = new SyncModule(config);
try {
sync.start();
} finally {
sync.shutdown();
}
} else {
usage("Unknown command");
}
}
}
| dr-itz/redmine_remedy_view | java/src/main/java/ch/dritz/remedy2redmine/Main.java | Java | gpl-2.0 | 1,560 |
package com.gilecode.langlocker;
import org.eclipse.jface.resource.ImageDescriptor;
import org.eclipse.ui.plugin.AbstractUIPlugin;
import org.osgi.framework.BundleContext;
/**
* The activator class controls the plug-in life cycle
*/
public class Activator extends AbstractUIPlugin {
// The plug-in ID
public static final String PLUGIN_ID = "com.gilecode.langlocker"; //$NON-NLS-1$
// The shared instance
private static Activator plugin;
/**
* The constructor
*/
public Activator() {
super();
}
/*
* (non-Javadoc)
*
* @see
* org.eclipse.ui.plugin.AbstractUIPlugin#start(org.osgi.framework.BundleContext
* )
*/
public void start(BundleContext context) throws Exception {
super.start(context);
plugin = this;
}
/*
* (non-Javadoc)
*
* @see
* org.eclipse.ui.plugin.AbstractUIPlugin#stop(org.osgi.framework.BundleContext
* )
*/
public void stop(BundleContext context) throws Exception {
plugin = null;
super.stop(context);
}
/**
* Returns the shared instance
*
* @return the shared instance
*/
public static Activator getDefault() {
return plugin;
}
/**
* Returns an image descriptor for the image file at the given plug-in
* relative path
*
* @param path
* the path
* @return the image descriptor
*/
public static ImageDescriptor getImageDescriptor(String path) {
return imageDescriptorFromPlugin(PLUGIN_ID, path);
}
}
| amogilev/ide-lang-locker | eclipse-plugin/src/com/gilecode/langlocker/Activator.java | Java | gpl-2.0 | 1,423 |
package org.iatoki.judgels.jerahmeel.user.item;
public final class UserItem {
private final String userJid;
private final String itemJid;
private final UserItemStatus status;
public UserItem(String userJid, String itemJid, UserItemStatus status) {
this.userJid = userJid;
this.itemJid = itemJid;
this.status = status;
}
public String getUserJid() {
return userJid;
}
public String getItemJid() {
return itemJid;
}
public UserItemStatus getStatus() {
return status;
}
}
| judgels/jerahmeel | app/org/iatoki/judgels/jerahmeel/user/item/UserItem.java | Java | gpl-2.0 | 567 |
/**
*
*/
package co.innovate.rentavoz.services.almacen.impl;
import java.io.Serializable;
import java.util.Date;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import co.innovate.rentavoz.model.Tercero;
import co.innovate.rentavoz.model.almacen.Cuota;
import co.innovate.rentavoz.model.almacen.venta.Venta;
import co.innovate.rentavoz.repositories.GenericRepository;
import co.innovate.rentavoz.repositories.almacen.CuotaDao;
import co.innovate.rentavoz.services.almacen.CuotaService;
import co.innovate.rentavoz.services.impl.GenericServiceImpl;
/**
* @author <a href="mailto:[email protected]">Elmer Jose Diaz Lazo</a>
* @project rentavoz3
* @class CuotaServiceImpl
* @date 7/02/2014
*
*/
@Service("cuotaService")
public class CuotaServiceImpl extends GenericServiceImpl<Cuota, Integer> implements CuotaService,Serializable {
/**
* 7/02/2014
* @author <a href="mailto:[email protected]">Elmer Jose Diaz Lazo</a>
* serialVersionUID
*/
private static final long serialVersionUID = 1L;
@Autowired
private CuotaDao cuotaDao;
/* (non-Javadoc)
* @see co.innovate.rentavoz.services.impl.GenericServiceImpl#getDao()
*/
@Override
public GenericRepository<Cuota, Integer> getDao() {
return cuotaDao;
}
/* (non-Javadoc)
* @see co.innovate.rentavoz.services.almacen.CuotaService#buscarCuotasPendientesPorCliente(co.innovate.rentavoz.model.Tercero)
*/
@Override
public List<Cuota> buscarCuotasPendientesPorCliente(Tercero cliente) {
return cuotaDao.buscarCuotasPendientesPorCliente(cliente);
}
/* (non-Javadoc)
* @see co.innovate.rentavoz.services.almacen.CuotaService#buscarRutaDeCuotasPorCobrador(co.innovate.rentavoz.model.Tercero)
*/
@Override
public List<Cuota> buscarRutaDeCuotasPorCobrador(Tercero cobrador) {
return cuotaDao.buscarRutaDeCuotasPorCobrador(cobrador);
}
/* (non-Javadoc)
* @see co.innovate.rentavoz.services.almacen.CuotaService#findByVenta(co.innovate.rentavoz.model.almacen.venta.Venta)
*/
@Override
public List<Cuota> findByVenta(Venta venta) {
return cuotaDao.findByVenta(venta);
}
/* (non-Javadoc)
* @see co.innovate.rentavoz.services.almacen.CuotaService#findDeudoresMorosos(java.util.Date)
*/
@Override
public List<Tercero> findDeudoresMorosos(Date fechaCierre) {
return cuotaDao.findDeudoresMorosos(fechaCierre);
}
}
| kaisenlean/rentavoz3 | src/main/java/co/innovate/rentavoz/services/almacen/impl/CuotaServiceImpl.java | Java | gpl-2.0 | 2,414 |
package oo.Prototype;
/*
* A Symbol Loader to register all prototype instance
*/
import java.util.*;
public class SymbolLoader {
private Hashtable symbols = new Hashtable();
public SymbolLoader() {
symbols.put("Line", new LineSymbol());
symbols.put("Note", new NoteSymbol());
}
public Hashtable getSymbols() {
return symbols;
}
} | longluo/DesignPatterns | src/oo/Prototype/SymbolLoader.java | Java | gpl-2.0 | 385 |
package com.djtu.signExam.util;
import java.io.IOException;
import com.djtu.signExam.model.support.EntityGenerator;
/**
* use this class to bootstrap the project.
* There is no need for developers to write model classes themselves.
* Once there are some updates or modified parts in the database,
* Please run this class as JavaApplication to update Modal class files.
*
* @author lihe
*
*/
public class Bootstrap {
public static void main(String[] args){
EntityGenerator generator = new EntityGenerator();
try {
generator.generateModel();
} catch (IOException e) {
e.printStackTrace();
}
}
}
| doomdagger/CompetitionHub | src/main/java/com/djtu/signExam/util/Bootstrap.java | Java | gpl-2.0 | 625 |
package com.ues21.ferreteria.login;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpSession;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import com.ues21.ferreteria.productos.Productos;
import com.ues21.ferreteria.productos.ProductosDAO;
import com.ues21.ferreteria.usuarios.Usuarios;
import com.ues21.ferreteria.usuarios.UsuariosDAO;
@Controller
public class LoginController {
@Autowired private LoginDAO loginDAO;
@Autowired private UsuariosDAO usuariosDAO;
/*
@RequestMapping(value = "/login", method = RequestMethod.GET)
public String listaHome(Model model) {
model.addAttribute("login", null);
return "login";
}
*/
@RequestMapping(value = "/login", method = RequestMethod.GET)
public String viewRegistration(Map<String, Object> model) {
Login userForm = new Login();
model.put("userForm", userForm);
return "login";
}
@RequestMapping(value = "/login", method = RequestMethod.POST)
public String processRegistration(@ModelAttribute("userForm") Login user,
Model model, HttpSession session) {
// implement your own registration logic here...
Login login = loginDAO.verificarUsuario(user);
// for testing purpose:
System.out.println("username: " + user.getDni());
System.out.println("password: " + user.getContrasena());
if (login==null){
model.addAttribute("loginError", "Error logging in. Please try again");
return "index";
}
else {
Usuarios usuario = usuariosDAO.getUsuario(user.getDni());
session.setAttribute("loggedInUser", usuario);
return "home";
}
}
@RequestMapping(value = "/logout", method = RequestMethod.GET)
public String logout(HttpSession session){
session.removeAttribute("loggedInUser");
return "index";
}
}
| srubbiolo/SYSO | src/main/java/com/ues21/ferreteria/login/LoginController.java | Java | gpl-2.0 | 2,293 |
/* This code is part of Freenet. It is distributed under the GNU General
* Public License, version 2 (or at your option any later version). See
* http://www.gnu.org/ for further details of the GPL. */
/* Freenet 0.7 node. */
package freenet.node;
import static freenet.node.stats.DataStoreKeyType.CHK;
import static freenet.node.stats.DataStoreKeyType.PUB_KEY;
import static freenet.node.stats.DataStoreKeyType.SSK;
import static freenet.node.stats.DataStoreType.CACHE;
import static freenet.node.stats.DataStoreType.CLIENT;
import static freenet.node.stats.DataStoreType.SLASHDOT;
import static freenet.node.stats.DataStoreType.STORE;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.MINUTES;
import static java.util.concurrent.TimeUnit.SECONDS;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.RandomAccessFile;
import java.io.UnsupportedEncodingException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.Locale;
import java.util.Map;
import java.util.MissingResourceException;
import java.util.Random;
import java.util.Set;
import freenet.config.*;
import freenet.node.useralerts.*;
import org.tanukisoftware.wrapper.WrapperManager;
import freenet.client.FetchContext;
import freenet.clients.fcp.FCPMessage;
import freenet.clients.fcp.FeedMessage;
import freenet.clients.http.SecurityLevelsToadlet;
import freenet.clients.http.SimpleToadletServer;
import freenet.crypt.DSAPublicKey;
import freenet.crypt.ECDH;
import freenet.crypt.MasterSecret;
import freenet.crypt.PersistentRandomSource;
import freenet.crypt.RandomSource;
import freenet.crypt.Yarrow;
import freenet.io.comm.DMT;
import freenet.io.comm.DisconnectedException;
import freenet.io.comm.FreenetInetAddress;
import freenet.io.comm.IOStatisticCollector;
import freenet.io.comm.Message;
import freenet.io.comm.MessageCore;
import freenet.io.comm.MessageFilter;
import freenet.io.comm.Peer;
import freenet.io.comm.PeerParseException;
import freenet.io.comm.ReferenceSignatureVerificationException;
import freenet.io.comm.TrafficClass;
import freenet.io.comm.UdpSocketHandler;
import freenet.io.xfer.PartiallyReceivedBlock;
import freenet.keys.CHKBlock;
import freenet.keys.CHKVerifyException;
import freenet.keys.ClientCHK;
import freenet.keys.ClientCHKBlock;
import freenet.keys.ClientKey;
import freenet.keys.ClientKeyBlock;
import freenet.keys.ClientSSK;
import freenet.keys.ClientSSKBlock;
import freenet.keys.Key;
import freenet.keys.KeyBlock;
import freenet.keys.KeyVerifyException;
import freenet.keys.NodeCHK;
import freenet.keys.NodeSSK;
import freenet.keys.SSKBlock;
import freenet.keys.SSKVerifyException;
import freenet.l10n.BaseL10n;
import freenet.l10n.NodeL10n;
import freenet.node.DarknetPeerNode.FRIEND_TRUST;
import freenet.node.DarknetPeerNode.FRIEND_VISIBILITY;
import freenet.node.NodeDispatcher.NodeDispatcherCallback;
import freenet.node.OpennetManager.ConnectionType;
import freenet.node.SecurityLevels.NETWORK_THREAT_LEVEL;
import freenet.node.SecurityLevels.PHYSICAL_THREAT_LEVEL;
import freenet.node.probe.Listener;
import freenet.node.probe.Type;
import freenet.node.stats.DataStoreInstanceType;
import freenet.node.stats.DataStoreStats;
import freenet.node.stats.NotAvailNodeStoreStats;
import freenet.node.stats.StoreCallbackStats;
import freenet.node.updater.NodeUpdateManager;
import freenet.pluginmanager.ForwardPort;
import freenet.pluginmanager.PluginDownLoaderOfficialHTTPS;
import freenet.pluginmanager.PluginManager;
import freenet.store.BlockMetadata;
import freenet.store.CHKStore;
import freenet.store.FreenetStore;
import freenet.store.KeyCollisionException;
import freenet.store.NullFreenetStore;
import freenet.store.PubkeyStore;
import freenet.store.RAMFreenetStore;
import freenet.store.SSKStore;
import freenet.store.SlashdotStore;
import freenet.store.StorableBlock;
import freenet.store.StoreCallback;
import freenet.store.caching.CachingFreenetStore;
import freenet.store.caching.CachingFreenetStoreTracker;
import freenet.store.saltedhash.ResizablePersistentIntBuffer;
import freenet.store.saltedhash.SaltedHashFreenetStore;
import freenet.support.Executor;
import freenet.support.Fields;
import freenet.support.HTMLNode;
import freenet.support.HexUtil;
import freenet.support.JVMVersion;
import freenet.support.LogThresholdCallback;
import freenet.support.Logger;
import freenet.support.Logger.LogLevel;
import freenet.support.PooledExecutor;
import freenet.support.PrioritizedTicker;
import freenet.support.ShortBuffer;
import freenet.support.SimpleFieldSet;
import freenet.support.Ticker;
import freenet.support.TokenBucket;
import freenet.support.api.BooleanCallback;
import freenet.support.api.IntCallback;
import freenet.support.api.LongCallback;
import freenet.support.api.ShortCallback;
import freenet.support.api.StringCallback;
import freenet.support.io.ArrayBucketFactory;
import freenet.support.io.Closer;
import freenet.support.io.FileUtil;
import freenet.support.io.NativeThread;
import freenet.support.math.MersenneTwister;
import freenet.support.transport.ip.HostnameSyntaxException;
/**
* @author amphibian
*/
public class Node implements TimeSkewDetectorCallback {
public class MigrateOldStoreData implements Runnable {
private final boolean clientCache;
public MigrateOldStoreData(boolean clientCache) {
this.clientCache = clientCache;
if(clientCache) {
oldCHKClientCache = chkClientcache;
oldPKClientCache = pubKeyClientcache;
oldSSKClientCache = sskClientcache;
} else {
oldCHK = chkDatastore;
oldPK = pubKeyDatastore;
oldSSK = sskDatastore;
oldCHKCache = chkDatastore;
oldPKCache = pubKeyDatastore;
oldSSKCache = sskDatastore;
}
}
@Override
public void run() {
System.err.println("Migrating old "+(clientCache ? "client cache" : "datastore"));
if(clientCache) {
migrateOldStore(oldCHKClientCache, chkClientcache, true);
StoreCallback<? extends StorableBlock> old;
synchronized(Node.this) {
old = oldCHKClientCache;
oldCHKClientCache = null;
}
closeOldStore(old);
migrateOldStore(oldPKClientCache, pubKeyClientcache, true);
synchronized(Node.this) {
old = oldPKClientCache;
oldPKClientCache = null;
}
closeOldStore(old);
migrateOldStore(oldSSKClientCache, sskClientcache, true);
synchronized(Node.this) {
old = oldSSKClientCache;
oldSSKClientCache = null;
}
closeOldStore(old);
} else {
migrateOldStore(oldCHK, chkDatastore, false);
oldCHK = null;
migrateOldStore(oldPK, pubKeyDatastore, false);
oldPK = null;
migrateOldStore(oldSSK, sskDatastore, false);
oldSSK = null;
migrateOldStore(oldCHKCache, chkDatacache, false);
oldCHKCache = null;
migrateOldStore(oldPKCache, pubKeyDatacache, false);
oldPKCache = null;
migrateOldStore(oldSSKCache, sskDatacache, false);
oldSSKCache = null;
}
System.err.println("Finished migrating old "+(clientCache ? "client cache" : "datastore"));
}
}
volatile CHKStore oldCHK;
volatile PubkeyStore oldPK;
volatile SSKStore oldSSK;
volatile CHKStore oldCHKCache;
volatile PubkeyStore oldPKCache;
volatile SSKStore oldSSKCache;
volatile CHKStore oldCHKClientCache;
volatile PubkeyStore oldPKClientCache;
volatile SSKStore oldSSKClientCache;
private <T extends StorableBlock> void migrateOldStore(StoreCallback<T> old, StoreCallback<T> newStore, boolean canReadClientCache) {
FreenetStore<T> store = old.getStore();
if(store instanceof RAMFreenetStore) {
RAMFreenetStore<T> ramstore = (RAMFreenetStore<T>)store;
try {
ramstore.migrateTo(newStore, canReadClientCache);
} catch (IOException e) {
Logger.error(this, "Caught migrating old store: "+e, e);
}
ramstore.clear();
} else if(store instanceof SaltedHashFreenetStore) {
Logger.error(this, "Migrating from from a saltedhashstore not fully supported yet: will not keep old keys");
}
}
public <T extends StorableBlock> void closeOldStore(StoreCallback<T> old) {
FreenetStore<T> store = old.getStore();
if(store instanceof SaltedHashFreenetStore) {
SaltedHashFreenetStore<T> saltstore = (SaltedHashFreenetStore<T>) store;
saltstore.close();
saltstore.destruct();
}
}
private static volatile boolean logMINOR;
private static volatile boolean logDEBUG;
static {
Logger.registerLogThresholdCallback(new LogThresholdCallback(){
@Override
public void shouldUpdate(){
logMINOR = Logger.shouldLog(LogLevel.MINOR, this);
logDEBUG = Logger.shouldLog(LogLevel.DEBUG, this);
}
});
}
private static MeaningfulNodeNameUserAlert nodeNameUserAlert;
private static TimeSkewDetectedUserAlert timeSkewDetectedUserAlert;
public class NodeNameCallback extends StringCallback {
NodeNameCallback() {
}
@Override
public String get() {
String name;
synchronized(this) {
name = myName;
}
if(name.startsWith("Node id|")|| name.equals("MyFirstFreenetNode") || name.startsWith("Freenet node with no name #")){
clientCore.alerts.register(nodeNameUserAlert);
}else{
clientCore.alerts.unregister(nodeNameUserAlert);
}
return name;
}
@Override
public void set(String val) throws InvalidConfigValueException {
if(get().equals(val)) return;
else if(val.length() > 128)
throw new InvalidConfigValueException("The given node name is too long ("+val+')');
else if("".equals(val))
val = "~none~";
synchronized(this) {
myName = val;
}
// We'll broadcast the new name to our connected darknet peers via a differential node reference
SimpleFieldSet fs = new SimpleFieldSet(true);
fs.putSingle("myName", myName);
peers.locallyBroadcastDiffNodeRef(fs, true, false);
// We call the callback once again to ensure MeaningfulNodeNameUserAlert
// has been unregistered ... see #1595
get();
}
}
private class StoreTypeCallback extends StringCallback implements EnumerableOptionCallback {
@Override
public String get() {
synchronized(Node.this) {
return storeType;
}
}
@Override
public void set(String val) throws InvalidConfigValueException, NodeNeedRestartException {
boolean found = false;
for (String p : getPossibleValues()) {
if (p.equals(val)) {
found = true;
break;
}
}
if (!found)
throw new InvalidConfigValueException("Invalid store type");
String type;
synchronized(Node.this) {
type = storeType;
}
if(type.equals("ram")) {
synchronized(this) { // Serialise this part.
makeStore(val);
}
} else {
synchronized(Node.this) {
storeType = val;
}
throw new NodeNeedRestartException("Store type cannot be changed on the fly");
}
}
@Override
public String[] getPossibleValues() {
return new String[] { "salt-hash", "ram" };
}
}
private class ClientCacheTypeCallback extends StringCallback implements EnumerableOptionCallback {
@Override
public String get() {
synchronized(Node.this) {
return clientCacheType;
}
}
@Override
public void set(String val) throws InvalidConfigValueException, NodeNeedRestartException {
boolean found = false;
for (String p : getPossibleValues()) {
if (p.equals(val)) {
found = true;
break;
}
}
if (!found)
throw new InvalidConfigValueException("Invalid store type");
synchronized(this) { // Serialise this part.
String suffix = getStoreSuffix();
if (val.equals("salt-hash")) {
byte[] key;
try {
synchronized(Node.this) {
if(keys == null) throw new MasterKeysWrongPasswordException();
key = keys.clientCacheMasterKey;
clientCacheType = val;
}
} catch (MasterKeysWrongPasswordException e1) {
setClientCacheAwaitingPassword();
throw new InvalidConfigValueException("You must enter the password");
}
try {
initSaltHashClientCacheFS(suffix, true, key);
} catch (NodeInitException e) {
Logger.error(this, "Unable to create new store", e);
System.err.println("Unable to create new store: "+e);
e.printStackTrace();
// FIXME l10n both on the NodeInitException and the wrapper message
throw new InvalidConfigValueException("Unable to create new store: "+e);
}
} else if(val.equals("ram")) {
initRAMClientCacheFS();
} else /*if(val.equals("none")) */{
initNoClientCacheFS();
}
synchronized(Node.this) {
clientCacheType = val;
}
}
}
@Override
public String[] getPossibleValues() {
return new String[] { "salt-hash", "ram", "none" };
}
}
private static class L10nCallback extends StringCallback implements EnumerableOptionCallback {
@Override
public String get() {
return NodeL10n.getBase().getSelectedLanguage().fullName;
}
@Override
public void set(String val) throws InvalidConfigValueException {
if(val == null || get().equalsIgnoreCase(val)) return;
try {
NodeL10n.getBase().setLanguage(BaseL10n.LANGUAGE.mapToLanguage(val));
} catch (MissingResourceException e) {
throw new InvalidConfigValueException(e.getLocalizedMessage());
}
PluginManager.setLanguage(NodeL10n.getBase().getSelectedLanguage());
}
@Override
public String[] getPossibleValues() {
return BaseL10n.LANGUAGE.valuesWithFullNames();
}
}
/** Encryption key for client.dat.crypt or client.dat.bak.crypt */
private DatabaseKey databaseKey;
/** Encryption keys, if loaded, null if waiting for a password. We must be able to write them,
* and they're all used elsewhere anyway, so there's no point trying not to keep them in memory. */
private MasterKeys keys;
/** Stats */
public final NodeStats nodeStats;
/** Config object for the whole node. */
public final PersistentConfig config;
// Static stuff related to logger
/** Directory to log to */
static File logDir;
/** Maximum size of gzipped logfiles */
static long maxLogSize;
/** Log config handler */
public static LoggingConfigHandler logConfigHandler;
public static final int PACKETS_IN_BLOCK = 32;
public static final int PACKET_SIZE = 1024;
public static final double DECREMENT_AT_MIN_PROB = 0.25;
public static final double DECREMENT_AT_MAX_PROB = 0.5;
// Send keepalives every 7-14 seconds. Will be acked and if necessary resent.
// Old behaviour was keepalives every 14-28. Even that was adequate for a 30 second
// timeout. Most nodes don't need to send keepalives because they are constantly busy,
// this is only an issue for disabled darknet connections, very quiet private networks
// etc.
public static final long KEEPALIVE_INTERVAL = SECONDS.toMillis(7);
// If no activity for 30 seconds, node is dead
// 35 seconds allows plenty of time for resends etc even if above is 14 sec as it is on older nodes.
public static final long MAX_PEER_INACTIVITY = SECONDS.toMillis(35);
/** Time after which a handshake is assumed to have failed. */
public static final int HANDSHAKE_TIMEOUT = (int) MILLISECONDS.toMillis(4800); // Keep the below within the 30 second assumed timeout.
// Inter-handshake time must be at least 2x handshake timeout
public static final int MIN_TIME_BETWEEN_HANDSHAKE_SENDS = HANDSHAKE_TIMEOUT*2; // 10-20 secs
public static final int RANDOMIZED_TIME_BETWEEN_HANDSHAKE_SENDS = HANDSHAKE_TIMEOUT*2; // avoid overlap when the two handshakes are at the same time
public static final int MIN_TIME_BETWEEN_VERSION_PROBES = HANDSHAKE_TIMEOUT*4;
public static final int RANDOMIZED_TIME_BETWEEN_VERSION_PROBES = HANDSHAKE_TIMEOUT*2; // 20-30 secs
public static final int MIN_TIME_BETWEEN_VERSION_SENDS = HANDSHAKE_TIMEOUT*4;
public static final int RANDOMIZED_TIME_BETWEEN_VERSION_SENDS = HANDSHAKE_TIMEOUT*2; // 20-30 secs
public static final int MIN_TIME_BETWEEN_BURSTING_HANDSHAKE_BURSTS = HANDSHAKE_TIMEOUT*24; // 2-5 minutes
public static final int RANDOMIZED_TIME_BETWEEN_BURSTING_HANDSHAKE_BURSTS = HANDSHAKE_TIMEOUT*36;
public static final int MIN_BURSTING_HANDSHAKE_BURST_SIZE = 1; // 1-4 handshake sends per burst
public static final int RANDOMIZED_BURSTING_HANDSHAKE_BURST_SIZE = 3;
// If we don't receive any packets at all in this period, from any node, tell the user
public static final long ALARM_TIME = MINUTES.toMillis(1);
static final long MIN_INTERVAL_BETWEEN_INCOMING_SWAP_REQUESTS = MILLISECONDS.toMillis(900);
static final long MIN_INTERVAL_BETWEEN_INCOMING_PROBE_REQUESTS = MILLISECONDS.toMillis(1000);
public static final int SYMMETRIC_KEY_LENGTH = 32; // 256 bits - note that this isn't used everywhere to determine it
/** Datastore directory */
private final ProgramDirectory storeDir;
/** Datastore properties */
private String storeType;
private boolean storeUseSlotFilters;
private boolean storeSaltHashResizeOnStart;
/** Minimum total datastore size */
static final long MIN_STORE_SIZE = 32 * 1024 * 1024;
/** Default datastore size (must be at least MIN_STORE_SIZE) */
static final long DEFAULT_STORE_SIZE = 32 * 1024 * 1024;
/** Minimum client cache size */
static final long MIN_CLIENT_CACHE_SIZE = 0;
/** Default client cache size (must be at least MIN_CLIENT_CACHE_SIZE) */
static final long DEFAULT_CLIENT_CACHE_SIZE = 10 * 1024 * 1024;
/** Minimum slashdot cache size */
static final long MIN_SLASHDOT_CACHE_SIZE = 0;
/** Default slashdot cache size (must be at least MIN_SLASHDOT_CACHE_SIZE) */
static final long DEFAULT_SLASHDOT_CACHE_SIZE = 10 * 1024 * 1024;
/** The number of bytes per key total in all the different datastores. All the datastores
* are always the same size in number of keys. */
public static final int sizePerKey = CHKBlock.DATA_LENGTH + CHKBlock.TOTAL_HEADERS_LENGTH +
DSAPublicKey.PADDED_SIZE + SSKBlock.DATA_LENGTH + SSKBlock.TOTAL_HEADERS_LENGTH;
/** The maximum number of keys stored in each of the datastores, cache and store combined. */
private long maxTotalKeys;
long maxCacheKeys;
long maxStoreKeys;
/** The maximum size of the datastore. Kept to avoid rounding turning 5G into 5368698672 */
private long maxTotalDatastoreSize;
/** If true, store shrinks occur immediately even if they are over 10% of the store size. If false,
* we just set the storeSize and do an offline shrink on the next startup. Online shrinks do not
* preserve the most recently used data so are not recommended. */
private boolean storeForceBigShrinks;
private final SemiOrderedShutdownHook shutdownHook;
/** The CHK datastore. Long term storage; data should only be inserted here if
* this node is the closest location on the chain so far, and it is on an
* insert (because inserts will always reach the most specialized node; if we
* allow requests to store here, then we get pollution by inserts for keys not
* close to our specialization). These conclusions derived from Oskar's simulations. */
private CHKStore chkDatastore;
/** The SSK datastore. See description for chkDatastore. */
private SSKStore sskDatastore;
/** The store of DSAPublicKeys (by hash). See description for chkDatastore. */
private PubkeyStore pubKeyDatastore;
/** Client cache store type */
private String clientCacheType;
/** Client cache could not be opened so is a RAMFS until the correct password is entered */
private boolean clientCacheAwaitingPassword;
private boolean databaseAwaitingPassword;
/** Client cache maximum cached keys for each type */
long maxClientCacheKeys;
/** Maximum size of the client cache. Kept to avoid rounding problems. */
private long maxTotalClientCacheSize;
/** The CHK datacache. Short term cache which stores everything that passes
* through this node. */
private CHKStore chkDatacache;
/** The SSK datacache. Short term cache which stores everything that passes
* through this node. */
private SSKStore sskDatacache;
/** The public key datacache (by hash). Short term cache which stores
* everything that passes through this node. */
private PubkeyStore pubKeyDatacache;
/** The CHK client cache. Caches local requests only. */
private CHKStore chkClientcache;
/** The SSK client cache. Caches local requests only. */
private SSKStore sskClientcache;
/** The pubkey client cache. Caches local requests only. */
private PubkeyStore pubKeyClientcache;
// These only cache keys for 30 minutes.
// FIXME make the first two configurable
private long maxSlashdotCacheSize;
private int maxSlashdotCacheKeys;
static final long PURGE_INTERVAL = SECONDS.toMillis(60);
private CHKStore chkSlashdotcache;
private SlashdotStore<CHKBlock> chkSlashdotcacheStore;
private SSKStore sskSlashdotcache;
private SlashdotStore<SSKBlock> sskSlashdotcacheStore;
private PubkeyStore pubKeySlashdotcache;
private SlashdotStore<DSAPublicKey> pubKeySlashdotcacheStore;
/** If false, only ULPRs will use the slashdot cache. If true, everything does. */
private boolean useSlashdotCache;
/** If true, we write stuff to the datastore even though we shouldn't because the HTL is
* too high. However it is flagged as old so it won't be included in the Bloom filter for
* sharing purposes. */
private boolean writeLocalToDatastore;
final NodeGetPubkey getPubKey;
/** FetchContext for ARKs */
public final FetchContext arkFetcherContext;
/** IP detector */
public final NodeIPDetector ipDetector;
/** For debugging/testing, set this to true to stop the
* probabilistic decrement at the edges of the HTLs. */
boolean disableProbabilisticHTLs;
public final RequestTracker tracker;
/** Semi-unique ID for swap requests. Used to identify us so that the
* topology can be reconstructed. */
public long swapIdentifier;
private String myName;
public final LocationManager lm;
/** My peers */
public final PeerManager peers;
/** Node-reference directory (node identity, peers, etc) */
final ProgramDirectory nodeDir;
/** Config directory (l10n overrides, etc) */
final ProgramDirectory cfgDir;
/** User data directory (bookmarks, download lists, etc) */
final ProgramDirectory userDir;
/** Run-time state directory (bootID, PRNG seed, etc) */
final ProgramDirectory runDir;
/** Plugin directory */
final ProgramDirectory pluginDir;
/** File to write crypto master keys into, possibly passworded */
final File masterKeysFile;
/** Directory to put extra peer data into */
final File extraPeerDataDir;
private volatile boolean hasPanicked;
/** Strong RNG */
public final RandomSource random;
/** JCA-compliant strong RNG. WARNING: DO NOT CALL THIS ON THE MAIN NETWORK
* HANDLING THREADS! In some configurations it can block, potentially
* forever, on nextBytes()! */
public final SecureRandom secureRandom;
/** Weak but fast RNG */
public final Random fastWeakRandom;
/** The object which handles incoming messages and allows us to wait for them */
final MessageCore usm;
// Darknet stuff
NodeCrypto darknetCrypto;
// Back compat
private boolean showFriendsVisibilityAlert;
// Opennet stuff
private final NodeCryptoConfig opennetCryptoConfig;
OpennetManager opennet;
private volatile boolean isAllowedToConnectToSeednodes;
private int maxOpennetPeers;
private boolean acceptSeedConnections;
private boolean passOpennetRefsThroughDarknet;
// General stuff
public final Executor executor;
public final PacketSender ps;
public final PrioritizedTicker ticker;
final DNSRequester dnsr;
final NodeDispatcher dispatcher;
public final UptimeEstimator uptime;
public final TokenBucket outputThrottle;
public boolean throttleLocalData;
private int outputBandwidthLimit;
private int inputBandwidthLimit;
private long amountOfDataToCheckCompressionRatio;
private int minimumCompressionPercentage;
private int maxTimeForSingleCompressor;
private boolean connectionSpeedDetection;
boolean inputLimitDefault;
final boolean enableARKs;
final boolean enablePerNodeFailureTables;
final boolean enableULPRDataPropagation;
final boolean enableSwapping;
private volatile boolean publishOurPeersLocation;
private volatile boolean routeAccordingToOurPeersLocation;
boolean enableSwapQueueing;
boolean enablePacketCoalescing;
public static final short DEFAULT_MAX_HTL = (short)18;
private short maxHTL;
private boolean skipWrapperWarning;
private int maxPacketSize;
/** Should inserts ignore low backoff times by default? */
public static final boolean IGNORE_LOW_BACKOFF_DEFAULT = false;
/** Definition of "low backoff times" for above. */
public static final long LOW_BACKOFF = SECONDS.toMillis(30);
/** Should inserts be fairly blatently prioritised on accept by default? */
public static final boolean PREFER_INSERT_DEFAULT = false;
/** Should inserts fork when the HTL reaches cacheability? */
public static final boolean FORK_ON_CACHEABLE_DEFAULT = true;
public final IOStatisticCollector collector;
/** Type identifier for fproxy node to node messages, as sent on DMT.nodeToNodeMessage's */
public static final int N2N_MESSAGE_TYPE_FPROXY = 1;
/** Type identifier for differential node reference messages, as sent on DMT.nodeToNodeMessage's */
public static final int N2N_MESSAGE_TYPE_DIFFNODEREF = 2;
/** Identifier within fproxy messages for simple, short text messages to be displayed on the homepage as useralerts */
public static final int N2N_TEXT_MESSAGE_TYPE_USERALERT = 1;
/** Identifier within fproxy messages for an offer to transfer a file */
public static final int N2N_TEXT_MESSAGE_TYPE_FILE_OFFER = 2;
/** Identifier within fproxy messages for accepting an offer to transfer a file */
public static final int N2N_TEXT_MESSAGE_TYPE_FILE_OFFER_ACCEPTED = 3;
/** Identifier within fproxy messages for rejecting an offer to transfer a file */
public static final int N2N_TEXT_MESSAGE_TYPE_FILE_OFFER_REJECTED = 4;
/** Identified within friend feed for the recommendation of a bookmark */
public static final int N2N_TEXT_MESSAGE_TYPE_BOOKMARK = 5;
/** Identified within friend feed for the recommendation of a file */
public static final int N2N_TEXT_MESSAGE_TYPE_DOWNLOAD = 6;
public static final int EXTRA_PEER_DATA_TYPE_N2NTM = 1;
public static final int EXTRA_PEER_DATA_TYPE_PEER_NOTE = 2;
public static final int EXTRA_PEER_DATA_TYPE_QUEUED_TO_SEND_N2NM = 3;
public static final int EXTRA_PEER_DATA_TYPE_BOOKMARK = 4;
public static final int EXTRA_PEER_DATA_TYPE_DOWNLOAD = 5;
public static final int PEER_NOTE_TYPE_PRIVATE_DARKNET_COMMENT = 1;
/** The bootID of the last time the node booted up. Or -1 if we don't know due to
* permissions problems, or we suspect that the node has been booted and not
* written the file e.g. if we can't write it. So if we want to compare data
* gathered in the last session and only recorded to disk on a clean shutdown
* to data we have now, we just include the lastBootID. */
public final long lastBootID;
public final long bootID;
public final long startupTime;
private SimpleToadletServer toadlets;
public final NodeClientCore clientCore;
// ULPRs, RecentlyFailed, per node failure tables, are all managed by FailureTable.
final FailureTable failureTable;
// The version we were before we restarted.
public int lastVersion;
/** NodeUpdater **/
public final NodeUpdateManager nodeUpdater;
public final SecurityLevels securityLevels;
// Things that's needed to keep track of
public final PluginManager pluginManager;
// Helpers
public final InetAddress localhostAddress;
public final FreenetInetAddress fLocalhostAddress;
// The node starter
private static NodeStarter nodeStarter;
// The watchdog will be silenced until it's true
private boolean hasStarted;
private boolean isStopping = false;
/**
* Minimum uptime for us to consider a node an acceptable place to store a key. We store a key
* to the datastore only if it's from an insert, and we are a sink, but when calculating whether
* we are a sink we ignore nodes which have less uptime (percentage) than this parameter.
*/
static final int MIN_UPTIME_STORE_KEY = 40;
private volatile boolean isPRNGReady = false;
private boolean storePreallocate;
private boolean enableRoutedPing;
private boolean peersOffersDismissed;
/**
* Minimum bandwidth limit in bytes considered usable: 10 KiB. If there is an attempt to set a limit below this -
* excluding the reserved -1 for input bandwidth - the callback will throw. See the callbacks for
* outputBandwidthLimit and inputBandwidthLimit. 10 KiB are equivalent to 50 GiB traffic per month.
*/
private static final int minimumBandwidth = 10 * 1024;
/** Quality of Service mark we will use for all outgoing packets (opennet/darknet) */
private TrafficClass trafficClass;
public TrafficClass getTrafficClass() {
return trafficClass;
}
/*
* Gets minimum bandwidth in bytes considered usable.
*
* @see #minimumBandwidth
*/
public static int getMinimumBandwidth() {
return minimumBandwidth;
}
/**
* Dispatches a probe request with the specified settings
* @see freenet.node.probe.Probe#start(byte, long, Type, Listener)
*/
public void startProbe(final byte htl, final long uid, final Type type, final Listener listener) {
dispatcher.probe.start(htl, uid, type, listener);
}
/**
* Read all storable settings (identity etc) from the node file.
* @param filename The name of the file to read from.
* @throws IOException throw when I/O error occur
*/
private void readNodeFile(String filename) throws IOException {
// REDFLAG: Any way to share this code with NodePeer?
FileInputStream fis = new FileInputStream(filename);
InputStreamReader isr = new InputStreamReader(fis, "UTF-8");
BufferedReader br = new BufferedReader(isr);
SimpleFieldSet fs = new SimpleFieldSet(br, false, true);
br.close();
// Read contents
String[] udp = fs.getAll("physical.udp");
if((udp != null) && (udp.length > 0)) {
for(String udpAddr : udp) {
// Just keep the first one with the correct port number.
Peer p;
try {
p = new Peer(udpAddr, false, true);
} catch (HostnameSyntaxException e) {
Logger.error(this, "Invalid hostname or IP Address syntax error while parsing our darknet node reference: "+udpAddr);
System.err.println("Invalid hostname or IP Address syntax error while parsing our darknet node reference: "+udpAddr);
continue;
} catch (PeerParseException e) {
throw (IOException)new IOException().initCause(e);
}
if(p.getPort() == getDarknetPortNumber()) {
// DNSRequester doesn't deal with our own node
ipDetector.setOldIPAddress(p.getFreenetAddress());
break;
}
}
}
darknetCrypto.readCrypto(fs);
swapIdentifier = Fields.bytesToLong(darknetCrypto.identityHashHash);
String loc = fs.get("location");
double locD = Location.getLocation(loc);
if (locD == -1.0)
throw new IOException("Invalid location: " + loc);
lm.setLocation(locD);
myName = fs.get("myName");
if(myName == null) {
myName = newName();
}
String verString = fs.get("version");
if(verString == null) {
Logger.error(this, "No version!");
System.err.println("No version!");
} else {
lastVersion = Version.getArbitraryBuildNumber(verString, -1);
}
}
public void makeStore(String val) throws InvalidConfigValueException {
String suffix = getStoreSuffix();
if (val.equals("salt-hash")) {
try {
initSaltHashFS(suffix, true, null);
} catch (NodeInitException e) {
Logger.error(this, "Unable to create new store", e);
System.err.println("Unable to create new store: "+e);
e.printStackTrace();
// FIXME l10n both on the NodeInitException and the wrapper message
throw new InvalidConfigValueException("Unable to create new store: "+e);
}
} else {
initRAMFS();
}
synchronized(Node.this) {
storeType = val;
}
}
private String newName() {
return "Freenet node with no name #"+random.nextLong();
}
private final Object writeNodeFileSync = new Object();
public void writeNodeFile() {
synchronized(writeNodeFileSync) {
writeNodeFile(nodeDir.file("node-"+getDarknetPortNumber()), nodeDir.file("node-"+getDarknetPortNumber()+".bak"));
}
}
public void writeOpennetFile() {
OpennetManager om = opennet;
if(om != null) om.writeFile();
}
private void writeNodeFile(File orig, File backup) {
SimpleFieldSet fs = darknetCrypto.exportPrivateFieldSet();
if(orig.exists()) backup.delete();
FileOutputStream fos = null;
try {
fos = new FileOutputStream(backup);
fs.writeTo(fos);
fos.close();
fos = null;
FileUtil.renameTo(backup, orig);
} catch (IOException ioe) {
Logger.error(this, "IOE :"+ioe.getMessage(), ioe);
return;
} finally {
Closer.close(fos);
}
}
private void initNodeFileSettings() {
Logger.normal(this, "Creating new node file from scratch");
// Don't need to set getDarknetPortNumber()
// FIXME use a real IP!
darknetCrypto.initCrypto();
swapIdentifier = Fields.bytesToLong(darknetCrypto.identityHashHash);
myName = newName();
}
/**
* Read the config file from the arguments.
* Then create a node.
* Anything that needs static init should ideally be in here.
* @param args
*/
public static void main(String[] args) throws IOException {
NodeStarter.main(args);
}
public boolean isUsingWrapper(){
if(nodeStarter!=null && WrapperManager.isControlledByNativeWrapper())
return true;
else
return false;
}
public NodeStarter getNodeStarter(){
return nodeStarter;
}
/**
* Create a Node from a Config object.
* @param config The Config object for this node.
* @param r The random number generator for this node. Passed in because we may want
* to use a non-secure RNG for e.g. one-JVM live-code simulations. Should be a Yarrow in
* a production node. Yarrow will be used if that parameter is null
* @param weakRandom The fast random number generator the node will use. If null a MT
* instance will be used, seeded from the secure PRNG.
* @param lc logging config Handler
* @param ns NodeStarter
* @param executor Executor
* @throws NodeInitException If the node initialization fails.
*/
Node(PersistentConfig config, RandomSource r, RandomSource weakRandom, LoggingConfigHandler lc, NodeStarter ns, Executor executor) throws NodeInitException {
this.shutdownHook = SemiOrderedShutdownHook.get();
// Easy stuff
String tmp = "Initializing Node using Freenet Build #"+Version.buildNumber()+" r"+Version.cvsRevision()+" and freenet-ext Build #"+NodeStarter.extBuildNumber+" r"+NodeStarter.extRevisionNumber+" with "+System.getProperty("java.vendor")+" JVM version "+System.getProperty("java.version")+" running on "+System.getProperty("os.arch")+' '+System.getProperty("os.name")+' '+System.getProperty("os.version");
fixCertsFiles();
Logger.normal(this, tmp);
System.out.println(tmp);
collector = new IOStatisticCollector();
this.executor = executor;
nodeStarter=ns;
if(logConfigHandler != lc)
logConfigHandler=lc;
getPubKey = new NodeGetPubkey(this);
startupTime = System.currentTimeMillis();
SimpleFieldSet oldConfig = config.getSimpleFieldSet();
// Setup node-specific configuration
final SubConfig nodeConfig = config.createSubConfig("node");
final SubConfig installConfig = config.createSubConfig("node.install");
int sortOrder = 0;
// Directory for node-related files other than store
this.userDir = setupProgramDir(installConfig, "userDir", ".",
"Node.userDir", "Node.userDirLong", nodeConfig);
this.cfgDir = setupProgramDir(installConfig, "cfgDir", getUserDir().toString(),
"Node.cfgDir", "Node.cfgDirLong", nodeConfig);
this.nodeDir = setupProgramDir(installConfig, "nodeDir", getUserDir().toString(),
"Node.nodeDir", "Node.nodeDirLong", nodeConfig);
this.runDir = setupProgramDir(installConfig, "runDir", getUserDir().toString(),
"Node.runDir", "Node.runDirLong", nodeConfig);
this.pluginDir = setupProgramDir(installConfig, "pluginDir", userDir().file("plugins").toString(),
"Node.pluginDir", "Node.pluginDirLong", nodeConfig);
// l10n stuffs
nodeConfig.register("l10n", Locale.getDefault().getLanguage().toLowerCase(), sortOrder++, false, true,
"Node.l10nLanguage",
"Node.l10nLanguageLong",
new L10nCallback());
try {
new NodeL10n(BaseL10n.LANGUAGE.mapToLanguage(nodeConfig.getString("l10n")), getCfgDir());
} catch (MissingResourceException e) {
try {
new NodeL10n(BaseL10n.LANGUAGE.mapToLanguage(nodeConfig.getOption("l10n").getDefault()), getCfgDir());
} catch (MissingResourceException e1) {
new NodeL10n(BaseL10n.LANGUAGE.mapToLanguage(BaseL10n.LANGUAGE.getDefault().shortCode), getCfgDir());
}
}
// FProxy config needs to be here too
SubConfig fproxyConfig = config.createSubConfig("fproxy");
try {
toadlets = new SimpleToadletServer(fproxyConfig, new ArrayBucketFactory(), executor, this);
fproxyConfig.finishedInitialization();
toadlets.start();
} catch (IOException e4) {
Logger.error(this, "Could not start web interface: "+e4, e4);
System.err.println("Could not start web interface: "+e4);
e4.printStackTrace();
throw new NodeInitException(NodeInitException.EXIT_COULD_NOT_START_FPROXY, "Could not start FProxy: "+e4);
} catch (InvalidConfigValueException e4) {
System.err.println("Invalid config value, cannot start web interface: "+e4);
e4.printStackTrace();
throw new NodeInitException(NodeInitException.EXIT_COULD_NOT_START_FPROXY, "Could not start FProxy: "+e4);
}
final NativeThread entropyGatheringThread = new NativeThread(new Runnable() {
long tLastAdded = -1;
private void recurse(File f) {
if(isPRNGReady)
return;
extendTimeouts();
File[] subDirs = f.listFiles(new FileFilter() {
@Override
public boolean accept(File pathname) {
return pathname.exists() && pathname.canRead() && pathname.isDirectory();
}
});
// @see http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=5086412
if(subDirs != null)
for(File currentDir : subDirs)
recurse(currentDir);
}
@Override
public void run() {
try {
// Delay entropy generation helper hack if enough entropy available
Thread.sleep(100);
} catch (InterruptedException e) {
}
if(isPRNGReady)
return;
System.out.println("Not enough entropy available.");
System.out.println("Trying to gather entropy (randomness) by reading the disk...");
if(File.separatorChar == '/') {
if(new File("/dev/hwrng").exists())
System.out.println("/dev/hwrng exists - have you installed rng-tools?");
else
System.out.println("You should consider installing a better random number generator e.g. haveged.");
}
extendTimeouts();
for(File root : File.listRoots()) {
if(isPRNGReady)
return;
recurse(root);
}
}
/** This is ridiculous, but for some users it can take more than an hour, and timing out sucks
* a few bytes and then times out again. :( */
static final int EXTEND_BY = 60*60*1000;
private void extendTimeouts() {
long now = System.currentTimeMillis();
if(now - tLastAdded < EXTEND_BY/2) return;
long target = tLastAdded + EXTEND_BY;
while(target < now)
target += EXTEND_BY;
long extend = target - now;
assert(extend < Integer.MAX_VALUE);
assert(extend > 0);
WrapperManager.signalStarting((int)extend);
tLastAdded = now;
}
}, "Entropy Gathering Thread", NativeThread.MIN_PRIORITY, true);
// Setup RNG if needed : DO NOT USE IT BEFORE THAT POINT!
if (r == null) {
// Preload required freenet.crypt.Util and freenet.crypt.Rijndael classes (selftest can delay Yarrow startup and trigger false lack-of-enthropy message)
freenet.crypt.Util.mdProviders.size();
freenet.crypt.ciphers.Rijndael.getProviderName();
File seed = userDir.file("prng.seed");
FileUtil.setOwnerRW(seed);
entropyGatheringThread.start();
// Can block.
this.random = new Yarrow(seed);
// http://bugs.sun.com/view_bug.do;jsessionid=ff625daf459fdffffffffcd54f1c775299e0?bug_id=4705093
// This might block on /dev/random while doing new SecureRandom(). Once it's created, it won't block.
ECDH.blockingInit();
} else {
this.random = r;
// if it's not null it's because we are running in the simulator
}
// This can block too.
this.secureRandom = NodeStarter.getGlobalSecureRandom();
isPRNGReady = true;
toadlets.getStartupToadlet().setIsPRNGReady();
if(weakRandom == null) {
byte buffer[] = new byte[16];
random.nextBytes(buffer);
this.fastWeakRandom = new MersenneTwister(buffer);
}else
this.fastWeakRandom = weakRandom;
nodeNameUserAlert = new MeaningfulNodeNameUserAlert(this);
this.config = config;
lm = new LocationManager(random, this);
try {
localhostAddress = InetAddress.getByName("127.0.0.1");
} catch (UnknownHostException e3) {
// Does not do a reverse lookup, so this is impossible
throw new Error(e3);
}
fLocalhostAddress = new FreenetInetAddress(localhostAddress);
this.securityLevels = new SecurityLevels(this, config);
// Location of master key
nodeConfig.register("masterKeyFile", "master.keys", sortOrder++, true, true, "Node.masterKeyFile", "Node.masterKeyFileLong",
new StringCallback() {
@Override
public String get() {
if(masterKeysFile == null) return "none";
else return masterKeysFile.getPath();
}
@Override
public void set(String val) throws InvalidConfigValueException, NodeNeedRestartException {
// FIXME l10n
// FIXME wipe the old one and move
throw new InvalidConfigValueException("Node.masterKeyFile cannot be changed on the fly, you must shutdown, wipe the old file and reconfigure");
}
});
String value = nodeConfig.getString("masterKeyFile");
File f;
if (value.equalsIgnoreCase("none")) {
f = null;
} else {
f = new File(value);
if(f.exists() && !(f.canWrite() && f.canRead()))
throw new NodeInitException(NodeInitException.EXIT_CANT_WRITE_MASTER_KEYS, "Cannot read from and write to master keys file "+f);
}
masterKeysFile = f;
FileUtil.setOwnerRW(masterKeysFile);
nodeConfig.register("showFriendsVisibilityAlert", false, sortOrder++, true, false, "Node.showFriendsVisibilityAlert", "Node.showFriendsVisibilityAlert", new BooleanCallback() {
@Override
public Boolean get() {
synchronized(Node.this) {
return showFriendsVisibilityAlert;
}
}
@Override
public void set(Boolean val) throws InvalidConfigValueException,
NodeNeedRestartException {
synchronized(this) {
if(val == showFriendsVisibilityAlert) return;
if(val) return;
}
unregisterFriendsVisibilityAlert();
}
});
showFriendsVisibilityAlert = nodeConfig.getBoolean("showFriendsVisibilityAlert");
byte[] clientCacheKey = null;
MasterSecret persistentSecret = null;
for(int i=0;i<2; i++) {
try {
if(securityLevels.physicalThreatLevel == PHYSICAL_THREAT_LEVEL.MAXIMUM) {
keys = MasterKeys.createRandom(secureRandom);
} else {
keys = MasterKeys.read(masterKeysFile, secureRandom, "");
}
clientCacheKey = keys.clientCacheMasterKey;
persistentSecret = keys.getPersistentMasterSecret();
databaseKey = keys.createDatabaseKey(secureRandom);
if(securityLevels.getPhysicalThreatLevel() == PHYSICAL_THREAT_LEVEL.HIGH) {
System.err.println("Physical threat level is set to HIGH but no password, resetting to NORMAL - probably timing glitch");
securityLevels.resetPhysicalThreatLevel(PHYSICAL_THREAT_LEVEL.NORMAL);
}
break;
} catch (MasterKeysWrongPasswordException e) {
break;
} catch (MasterKeysFileSizeException e) {
System.err.println("Impossible: master keys file "+masterKeysFile+" too " + e.sizeToString() + "! Deleting to enable startup, but you will lose your client cache.");
masterKeysFile.delete();
} catch (IOException e) {
break;
}
}
// Boot ID
bootID = random.nextLong();
// Fixed length file containing boot ID. Accessed with random access file. So hopefully it will always be
// written. Note that we set lastBootID to -1 if we can't _write_ our ID as well as if we can't read it,
// because if we can't write it then we probably couldn't write it on the last bootup either.
File bootIDFile = runDir.file("bootID");
int BOOT_FILE_LENGTH = 64 / 4; // A long in padded hex bytes
long oldBootID = -1;
RandomAccessFile raf = null;
try {
raf = new RandomAccessFile(bootIDFile, "rw");
if(raf.length() < BOOT_FILE_LENGTH) {
oldBootID = -1;
} else {
byte[] buf = new byte[BOOT_FILE_LENGTH];
raf.readFully(buf);
String s = new String(buf, "ISO-8859-1");
try {
oldBootID = Fields.bytesToLong(HexUtil.hexToBytes(s));
} catch (NumberFormatException e) {
oldBootID = -1;
}
raf.seek(0);
}
String s = HexUtil.bytesToHex(Fields.longToBytes(bootID));
byte[] buf = s.getBytes("ISO-8859-1");
if(buf.length != BOOT_FILE_LENGTH)
System.err.println("Not 16 bytes for boot ID "+bootID+" - WTF??");
raf.write(buf);
} catch (IOException e) {
oldBootID = -1;
// If we have an error in reading, *or in writing*, we don't reliably know the last boot ID.
} finally {
Closer.close(raf);
}
lastBootID = oldBootID;
nodeConfig.register("disableProbabilisticHTLs", false, sortOrder++, true, false, "Node.disablePHTLS", "Node.disablePHTLSLong",
new BooleanCallback() {
@Override
public Boolean get() {
return disableProbabilisticHTLs;
}
@Override
public void set(Boolean val) throws InvalidConfigValueException {
disableProbabilisticHTLs = val;
}
});
disableProbabilisticHTLs = nodeConfig.getBoolean("disableProbabilisticHTLs");
nodeConfig.register("maxHTL", DEFAULT_MAX_HTL, sortOrder++, true, false, "Node.maxHTL", "Node.maxHTLLong", new ShortCallback() {
@Override
public Short get() {
return maxHTL;
}
@Override
public void set(Short val) throws InvalidConfigValueException {
if(val < 0) throw new InvalidConfigValueException("Impossible max HTL");
maxHTL = val;
}
}, false);
maxHTL = nodeConfig.getShort("maxHTL");
class TrafficClassCallback extends StringCallback implements EnumerableOptionCallback {
@Override
public String get() {
return trafficClass.name();
}
@Override
public void set(String tcName) throws InvalidConfigValueException, NodeNeedRestartException {
try {
trafficClass = TrafficClass.fromNameOrValue(tcName);
} catch (IllegalArgumentException e) {
throw new InvalidConfigValueException(e);
}
throw new NodeNeedRestartException("TrafficClass cannot change on the fly");
}
@Override
public String[] getPossibleValues() {
ArrayList<String> array = new ArrayList<String>();
for (TrafficClass tc : TrafficClass.values())
array.add(tc.name());
return array.toArray(new String[0]);
}
}
nodeConfig.register("trafficClass", TrafficClass.getDefault().name(), sortOrder++, true, false,
"Node.trafficClass", "Node.trafficClassLong",
new TrafficClassCallback());
String trafficClassValue = nodeConfig.getString("trafficClass");
try {
trafficClass = TrafficClass.fromNameOrValue(trafficClassValue);
} catch (IllegalArgumentException e) {
Logger.error(this, "Invalid trafficClass:"+trafficClassValue+" resetting the value to default.", e);
trafficClass = TrafficClass.getDefault();
}
// FIXME maybe these should persist? They need to be private.
decrementAtMax = random.nextDouble() <= DECREMENT_AT_MAX_PROB;
decrementAtMin = random.nextDouble() <= DECREMENT_AT_MIN_PROB;
// Determine where to bind to
usm = new MessageCore(executor);
// FIXME maybe these configs should actually be under a node.ip subconfig?
ipDetector = new NodeIPDetector(this);
sortOrder = ipDetector.registerConfigs(nodeConfig, sortOrder);
// ARKs enabled?
nodeConfig.register("enableARKs", true, sortOrder++, true, false, "Node.enableARKs", "Node.enableARKsLong", new BooleanCallback() {
@Override
public Boolean get() {
return enableARKs;
}
@Override
public void set(Boolean val) throws InvalidConfigValueException {
throw new InvalidConfigValueException("Cannot change on the fly");
}
@Override
public boolean isReadOnly() {
return true;
}
});
enableARKs = nodeConfig.getBoolean("enableARKs");
nodeConfig.register("enablePerNodeFailureTables", true, sortOrder++, true, false, "Node.enablePerNodeFailureTables", "Node.enablePerNodeFailureTablesLong", new BooleanCallback() {
@Override
public Boolean get() {
return enablePerNodeFailureTables;
}
@Override
public void set(Boolean val) throws InvalidConfigValueException {
throw new InvalidConfigValueException("Cannot change on the fly");
}
@Override
public boolean isReadOnly() {
return true;
}
});
enablePerNodeFailureTables = nodeConfig.getBoolean("enablePerNodeFailureTables");
nodeConfig.register("enableULPRDataPropagation", true, sortOrder++, true, false, "Node.enableULPRDataPropagation", "Node.enableULPRDataPropagationLong", new BooleanCallback() {
@Override
public Boolean get() {
return enableULPRDataPropagation;
}
@Override
public void set(Boolean val) throws InvalidConfigValueException {
throw new InvalidConfigValueException("Cannot change on the fly");
}
@Override
public boolean isReadOnly() {
return true;
}
});
enableULPRDataPropagation = nodeConfig.getBoolean("enableULPRDataPropagation");
nodeConfig.register("enableSwapping", true, sortOrder++, true, false, "Node.enableSwapping", "Node.enableSwappingLong", new BooleanCallback() {
@Override
public Boolean get() {
return enableSwapping;
}
@Override
public void set(Boolean val) throws InvalidConfigValueException {
throw new InvalidConfigValueException("Cannot change on the fly");
}
@Override
public boolean isReadOnly() {
return true;
}
});
enableSwapping = nodeConfig.getBoolean("enableSwapping");
/*
* Publish our peers' locations is enabled, even in MAXIMUM network security and/or HIGH friends security,
* because a node which doesn't publish its peers' locations will get dramatically less traffic.
*
* Publishing our peers' locations does make us slightly more vulnerable to some attacks, but I don't think
* it's a big difference: swapping reveals the same information, it just doesn't update as quickly. This
* may help slightly, but probably not dramatically against a clever attacker.
*
* FIXME review this decision.
*/
nodeConfig.register("publishOurPeersLocation", true, sortOrder++, true, false, "Node.publishOurPeersLocation", "Node.publishOurPeersLocationLong", new BooleanCallback() {
@Override
public Boolean get() {
return publishOurPeersLocation;
}
@Override
public void set(Boolean val) throws InvalidConfigValueException {
publishOurPeersLocation = val;
}
});
publishOurPeersLocation = nodeConfig.getBoolean("publishOurPeersLocation");
nodeConfig.register("routeAccordingToOurPeersLocation", true, sortOrder++, true, false, "Node.routeAccordingToOurPeersLocation", "Node.routeAccordingToOurPeersLocation", new BooleanCallback() {
@Override
public Boolean get() {
return routeAccordingToOurPeersLocation;
}
@Override
public void set(Boolean val) throws InvalidConfigValueException {
routeAccordingToOurPeersLocation = val;
}
});
routeAccordingToOurPeersLocation = nodeConfig.getBoolean("routeAccordingToOurPeersLocation");
nodeConfig.register("enableSwapQueueing", true, sortOrder++, true, false, "Node.enableSwapQueueing", "Node.enableSwapQueueingLong", new BooleanCallback() {
@Override
public Boolean get() {
return enableSwapQueueing;
}
@Override
public void set(Boolean val) throws InvalidConfigValueException {
enableSwapQueueing = val;
}
});
enableSwapQueueing = nodeConfig.getBoolean("enableSwapQueueing");
nodeConfig.register("enablePacketCoalescing", true, sortOrder++, true, false, "Node.enablePacketCoalescing", "Node.enablePacketCoalescingLong", new BooleanCallback() {
@Override
public Boolean get() {
return enablePacketCoalescing;
}
@Override
public void set(Boolean val) throws InvalidConfigValueException {
enablePacketCoalescing = val;
}
});
enablePacketCoalescing = nodeConfig.getBoolean("enablePacketCoalescing");
// Determine the port number
// @see #191
if(oldConfig != null && "-1".equals(oldConfig.get("node.listenPort")))
throw new NodeInitException(NodeInitException.EXIT_COULD_NOT_BIND_USM, "Your freenet.ini file is corrupted! 'listenPort=-1'");
NodeCryptoConfig darknetConfig = new NodeCryptoConfig(nodeConfig, sortOrder++, false, securityLevels);
sortOrder += NodeCryptoConfig.OPTION_COUNT;
darknetCrypto = new NodeCrypto(this, false, darknetConfig, startupTime, enableARKs);
// Must be created after darknetCrypto
dnsr = new DNSRequester(this);
ps = new PacketSender(this);
ticker = new PrioritizedTicker(executor, getDarknetPortNumber());
if(executor instanceof PooledExecutor)
((PooledExecutor)executor).setTicker(ticker);
Logger.normal(Node.class, "Creating node...");
shutdownHook.addEarlyJob(new Thread() {
@Override
public void run() {
if (opennet != null)
opennet.stop(false);
}
});
shutdownHook.addEarlyJob(new Thread() {
@Override
public void run() {
darknetCrypto.stop();
}
});
// Bandwidth limit
nodeConfig.register("outputBandwidthLimit", "15K", sortOrder++, false, true, "Node.outBWLimit", "Node.outBWLimitLong", new IntCallback() {
@Override
public Integer get() {
//return BlockTransmitter.getHardBandwidthLimit();
return outputBandwidthLimit;
}
@Override
public void set(Integer obwLimit) throws InvalidConfigValueException {
BandwidthManager.checkOutputBandwidthLimit(obwLimit);
try {
outputThrottle.changeNanosAndBucketSize(SECONDS.toNanos(1) / obwLimit, obwLimit/2);
} catch (IllegalArgumentException e) {
throw new InvalidConfigValueException(e);
}
synchronized(Node.this) {
outputBandwidthLimit = obwLimit;
}
}
});
int obwLimit = nodeConfig.getInt("outputBandwidthLimit");
if (obwLimit < minimumBandwidth) {
obwLimit = minimumBandwidth; // upgrade slow nodes automatically
Logger.normal(Node.class, "Output bandwidth was lower than minimum bandwidth. Increased to minimum bandwidth.");
}
outputBandwidthLimit = obwLimit;
try {
BandwidthManager.checkOutputBandwidthLimit(outputBandwidthLimit);
} catch (InvalidConfigValueException e) {
throw new NodeInitException(NodeInitException.EXIT_BAD_BWLIMIT, e.getMessage());
}
// Bucket size of 0.5 seconds' worth of bytes.
// Add them at a rate determined by the obwLimit.
// Maximum forced bytes 80%, in other words, 20% of the bandwidth is reserved for
// block transfers, so we will use that 20% for block transfers even if more than 80% of the limit is used for non-limited data (resends etc).
int bucketSize = obwLimit/2;
// Must have at least space for ONE PACKET.
// FIXME: make compatible with alternate transports.
bucketSize = Math.max(bucketSize, 2048);
try {
outputThrottle = new TokenBucket(bucketSize, SECONDS.toNanos(1) / obwLimit, obwLimit/2);
} catch (IllegalArgumentException e) {
throw new NodeInitException(NodeInitException.EXIT_BAD_BWLIMIT, e.getMessage());
}
nodeConfig.register("inputBandwidthLimit", "-1", sortOrder++, false, true, "Node.inBWLimit", "Node.inBWLimitLong", new IntCallback() {
@Override
public Integer get() {
if(inputLimitDefault) return -1;
return inputBandwidthLimit;
}
@Override
public void set(Integer ibwLimit) throws InvalidConfigValueException {
synchronized(Node.this) {
BandwidthManager.checkInputBandwidthLimit(ibwLimit);
if(ibwLimit == -1) {
inputLimitDefault = true;
ibwLimit = outputBandwidthLimit * 4;
} else {
inputLimitDefault = false;
}
inputBandwidthLimit = ibwLimit;
}
}
});
int ibwLimit = nodeConfig.getInt("inputBandwidthLimit");
if(ibwLimit == -1) {
inputLimitDefault = true;
ibwLimit = obwLimit * 4;
}
else if (ibwLimit < minimumBandwidth) {
ibwLimit = minimumBandwidth; // upgrade slow nodes automatically
Logger.normal(Node.class, "Input bandwidth was lower than minimum bandwidth. Increased to minimum bandwidth.");
}
inputBandwidthLimit = ibwLimit;
try {
BandwidthManager.checkInputBandwidthLimit(inputBandwidthLimit);
} catch (InvalidConfigValueException e) {
throw new NodeInitException(NodeInitException.EXIT_BAD_BWLIMIT, e.getMessage());
}
nodeConfig.register("amountOfDataToCheckCompressionRatio", "8MiB", sortOrder++,
true, true, "Node.amountOfDataToCheckCompressionRatio",
"Node.amountOfDataToCheckCompressionRatioLong", new LongCallback() {
@Override
public Long get() {
return amountOfDataToCheckCompressionRatio;
}
@Override
public void set(Long amountOfDataToCheckCompressionRatio) {
synchronized(Node.this) {
Node.this.amountOfDataToCheckCompressionRatio = amountOfDataToCheckCompressionRatio;
}
}
}, true);
amountOfDataToCheckCompressionRatio = nodeConfig.getLong("amountOfDataToCheckCompressionRatio");
nodeConfig.register("minimumCompressionPercentage", "10", sortOrder++,
true, true, "Node.minimumCompressionPercentage",
"Node.minimumCompressionPercentageLong", new IntCallback() {
@Override
public Integer get() {
return minimumCompressionPercentage;
}
@Override
public void set(Integer minimumCompressionPercentage) {
synchronized(Node.this) {
if (minimumCompressionPercentage < 0 || minimumCompressionPercentage > 100) {
Logger.normal(Node.class, "Wrong minimum compression percentage" + minimumCompressionPercentage);
return;
}
Node.this.minimumCompressionPercentage = minimumCompressionPercentage;
}
}
}, Dimension.NOT);
minimumCompressionPercentage = nodeConfig.getInt("minimumCompressionPercentage");
nodeConfig.register("maxTimeForSingleCompressor", "20m", sortOrder++,
true, true, "Node.maxTimeForSingleCompressor",
"Node.maxTimeForSingleCompressorLong", new IntCallback() {
@Override
public Integer get() {
return maxTimeForSingleCompressor;
}
@Override
public void set(Integer maxTimeForSingleCompressor) {
synchronized(Node.this) {
Node.this.maxTimeForSingleCompressor = maxTimeForSingleCompressor;
}
}
}, Dimension.DURATION);
maxTimeForSingleCompressor = nodeConfig.getInt("maxTimeForSingleCompressor");
nodeConfig.register("connectionSpeedDetection", true, sortOrder++,
true, true, "Node.connectionSpeedDetection",
"Node.connectionSpeedDetectionLong", new BooleanCallback() {
@Override
public Boolean get() {
return connectionSpeedDetection;
}
@Override
public void set(Boolean connectionSpeedDetection) {
synchronized(Node.this) {
Node.this.connectionSpeedDetection = connectionSpeedDetection;
}
}
});
connectionSpeedDetection = nodeConfig.getBoolean("connectionSpeedDetection");
nodeConfig.register("throttleLocalTraffic", false, sortOrder++, true, false, "Node.throttleLocalTraffic", "Node.throttleLocalTrafficLong", new BooleanCallback() {
@Override
public Boolean get() {
return throttleLocalData;
}
@Override
public void set(Boolean val) throws InvalidConfigValueException {
throttleLocalData = val;
}
});
throttleLocalData = nodeConfig.getBoolean("throttleLocalTraffic");
String s = "Testnet mode DISABLED. You may have some level of anonymity. :)\n"+
"Note that this version of Freenet is still a very early alpha, and may well have numerous bugs and design flaws.\n"+
"In particular: YOU ARE WIDE OPEN TO YOUR IMMEDIATE PEERS! They can eavesdrop on your requests with relatively little difficulty at present (correlation attacks etc).";
Logger.normal(this, s);
System.err.println(s);
File nodeFile = nodeDir.file("node-"+getDarknetPortNumber());
File nodeFileBackup = nodeDir.file("node-"+getDarknetPortNumber()+".bak");
// After we have set up testnet and IP address, load the node file
try {
// FIXME should take file directly?
readNodeFile(nodeFile.getPath());
} catch (IOException e) {
try {
System.err.println("Trying to read node file backup ...");
readNodeFile(nodeFileBackup.getPath());
} catch (IOException e1) {
if(nodeFile.exists() || nodeFileBackup.exists()) {
System.err.println("No node file or cannot read, (re)initialising crypto etc");
System.err.println(e1.toString());
e1.printStackTrace();
System.err.println("After:");
System.err.println(e.toString());
e.printStackTrace();
} else {
System.err.println("Creating new cryptographic keys...");
}
initNodeFileSettings();
}
}
// Then read the peers
peers = new PeerManager(this, shutdownHook);
tracker = new RequestTracker(peers, ticker);
usm.setDispatcher(dispatcher=new NodeDispatcher(this));
uptime = new UptimeEstimator(runDir, ticker, darknetCrypto.identityHash);
// ULPRs
failureTable = new FailureTable(this);
nodeStats = new NodeStats(this, sortOrder, config.createSubConfig("node.load"), obwLimit, ibwLimit, lastVersion);
// clientCore needs new load management and other settings from stats.
clientCore = new NodeClientCore(this, config, nodeConfig, installConfig, getDarknetPortNumber(), sortOrder, oldConfig, fproxyConfig, toadlets, databaseKey, persistentSecret);
toadlets.setCore(clientCore);
if (JVMVersion.isEOL()) {
clientCore.alerts.register(new JVMVersionAlert());
}
if(showFriendsVisibilityAlert)
registerFriendsVisibilityAlert();
// Node updater support
System.out.println("Initializing Node Updater");
try {
nodeUpdater = NodeUpdateManager.maybeCreate(this, config);
} catch (InvalidConfigValueException e) {
e.printStackTrace();
throw new NodeInitException(NodeInitException.EXIT_COULD_NOT_START_UPDATER, "Could not create Updater: "+e);
}
// Opennet
final SubConfig opennetConfig = config.createSubConfig("node.opennet");
opennetConfig.register("connectToSeednodes", true, 0, true, false, "Node.withAnnouncement", "Node.withAnnouncementLong", new BooleanCallback() {
@Override
public Boolean get() {
return isAllowedToConnectToSeednodes;
}
@Override
public void set(Boolean val) throws InvalidConfigValueException, NodeNeedRestartException {
if (get().equals(val))
return;
synchronized(Node.this) {
isAllowedToConnectToSeednodes = val;
if(opennet != null)
throw new NodeNeedRestartException(l10n("connectToSeednodesCannotBeChangedMustDisableOpennetOrReboot"));
}
}
});
isAllowedToConnectToSeednodes = opennetConfig.getBoolean("connectToSeednodes");
// Can be enabled on the fly
opennetConfig.register("enabled", false, 0, true, true, "Node.opennetEnabled", "Node.opennetEnabledLong", new BooleanCallback() {
@Override
public Boolean get() {
synchronized(Node.this) {
return opennet != null;
}
}
@Override
public void set(Boolean val) throws InvalidConfigValueException {
OpennetManager o;
synchronized(Node.this) {
if(val == (opennet != null)) return;
if(val) {
try {
o = opennet = new OpennetManager(Node.this, opennetCryptoConfig, System.currentTimeMillis(), isAllowedToConnectToSeednodes);
} catch (NodeInitException e) {
opennet = null;
throw new InvalidConfigValueException(e.getMessage());
}
} else {
o = opennet;
opennet = null;
}
}
if(val) o.start();
else o.stop(true);
ipDetector.ipDetectorManager.notifyPortChange(getPublicInterfacePorts());
}
});
boolean opennetEnabled = opennetConfig.getBoolean("enabled");
opennetConfig.register("maxOpennetPeers", OpennetManager.MAX_PEERS_FOR_SCALING, 1, true, false, "Node.maxOpennetPeers",
"Node.maxOpennetPeersLong", new IntCallback() {
@Override
public Integer get() {
return maxOpennetPeers;
}
@Override
public void set(Integer inputMaxOpennetPeers) throws InvalidConfigValueException {
if(inputMaxOpennetPeers < 0) throw new InvalidConfigValueException(l10n("mustBePositive"));
if(inputMaxOpennetPeers > OpennetManager.MAX_PEERS_FOR_SCALING) throw new InvalidConfigValueException(l10n("maxOpennetPeersMustBeTwentyOrLess", "maxpeers", Integer.toString(OpennetManager.MAX_PEERS_FOR_SCALING)));
maxOpennetPeers = inputMaxOpennetPeers;
}
}
, false);
maxOpennetPeers = opennetConfig.getInt("maxOpennetPeers");
if(maxOpennetPeers > OpennetManager.MAX_PEERS_FOR_SCALING) {
Logger.error(this, "maxOpennetPeers may not be over "+OpennetManager.MAX_PEERS_FOR_SCALING);
maxOpennetPeers = OpennetManager.MAX_PEERS_FOR_SCALING;
}
opennetCryptoConfig = new NodeCryptoConfig(opennetConfig, 2 /* 0 = enabled */, true, securityLevels);
if(opennetEnabled) {
opennet = new OpennetManager(this, opennetCryptoConfig, System.currentTimeMillis(), isAllowedToConnectToSeednodes);
// Will be started later
} else {
opennet = null;
}
securityLevels.addNetworkThreatLevelListener(new SecurityLevelListener<NETWORK_THREAT_LEVEL>() {
@Override
public void onChange(NETWORK_THREAT_LEVEL oldLevel, NETWORK_THREAT_LEVEL newLevel) {
if(newLevel == NETWORK_THREAT_LEVEL.HIGH
|| newLevel == NETWORK_THREAT_LEVEL.MAXIMUM) {
OpennetManager om;
synchronized(Node.this) {
om = opennet;
if(om != null)
opennet = null;
}
if(om != null) {
om.stop(true);
ipDetector.ipDetectorManager.notifyPortChange(getPublicInterfacePorts());
}
} else if(newLevel == NETWORK_THREAT_LEVEL.NORMAL
|| newLevel == NETWORK_THREAT_LEVEL.LOW) {
OpennetManager o = null;
synchronized(Node.this) {
if(opennet == null) {
try {
o = opennet = new OpennetManager(Node.this, opennetCryptoConfig, System.currentTimeMillis(), isAllowedToConnectToSeednodes);
} catch (NodeInitException e) {
opennet = null;
Logger.error(this, "UNABLE TO ENABLE OPENNET: "+e, e);
clientCore.alerts.register(new SimpleUserAlert(false, l10n("enableOpennetFailedTitle"), l10n("enableOpennetFailed", "message", e.getLocalizedMessage()), l10n("enableOpennetFailed", "message", e.getLocalizedMessage()), UserAlert.ERROR));
}
}
}
if(o != null) {
o.start();
ipDetector.ipDetectorManager.notifyPortChange(getPublicInterfacePorts());
}
}
Node.this.config.store();
}
});
opennetConfig.register("acceptSeedConnections", false, 2, true, true, "Node.acceptSeedConnectionsShort", "Node.acceptSeedConnections", new BooleanCallback() {
@Override
public Boolean get() {
return acceptSeedConnections;
}
@Override
public void set(Boolean val) throws InvalidConfigValueException {
acceptSeedConnections = val;
}
});
acceptSeedConnections = opennetConfig.getBoolean("acceptSeedConnections");
if(acceptSeedConnections && opennet != null)
opennet.crypto.socket.getAddressTracker().setHugeTracker();
opennetConfig.finishedInitialization();
nodeConfig.register("passOpennetPeersThroughDarknet", true, sortOrder++, true, false, "Node.passOpennetPeersThroughDarknet", "Node.passOpennetPeersThroughDarknetLong",
new BooleanCallback() {
@Override
public Boolean get() {
synchronized(Node.this) {
return passOpennetRefsThroughDarknet;
}
}
@Override
public void set(Boolean val) throws InvalidConfigValueException {
synchronized(Node.this) {
passOpennetRefsThroughDarknet = val;
}
}
});
passOpennetRefsThroughDarknet = nodeConfig.getBoolean("passOpennetPeersThroughDarknet");
this.extraPeerDataDir = userDir.file("extra-peer-data-"+getDarknetPortNumber());
if (!((extraPeerDataDir.exists() && extraPeerDataDir.isDirectory()) || (extraPeerDataDir.mkdir()))) {
String msg = "Could not find or create extra peer data directory";
throw new NodeInitException(NodeInitException.EXIT_BAD_DIR, msg);
}
// Name
nodeConfig.register("name", myName, sortOrder++, false, true, "Node.nodeName", "Node.nodeNameLong",
new NodeNameCallback());
myName = nodeConfig.getString("name");
// Datastore
nodeConfig.register("storeForceBigShrinks", false, sortOrder++, true, false, "Node.forceBigShrink", "Node.forceBigShrinkLong",
new BooleanCallback() {
@Override
public Boolean get() {
synchronized(Node.this) {
return storeForceBigShrinks;
}
}
@Override
public void set(Boolean val) throws InvalidConfigValueException {
synchronized(Node.this) {
storeForceBigShrinks = val;
}
}
});
// Datastore
nodeConfig.register("storeType", "ram", sortOrder++, true, true, "Node.storeType", "Node.storeTypeLong", new StoreTypeCallback());
storeType = nodeConfig.getString("storeType");
/*
* Very small initial store size, since the node will preallocate it when starting up for the first time,
* BLOCKING STARTUP, and since everyone goes through the wizard anyway...
*/
nodeConfig.register("storeSize", DEFAULT_STORE_SIZE, sortOrder++, false, true, "Node.storeSize", "Node.storeSizeLong",
new LongCallback() {
@Override
public Long get() {
return maxTotalDatastoreSize;
}
@Override
public void set(Long storeSize) throws InvalidConfigValueException {
if(storeSize < MIN_STORE_SIZE)
throw new InvalidConfigValueException(l10n("invalidStoreSize"));
long newMaxStoreKeys = storeSize / sizePerKey;
if(newMaxStoreKeys == maxTotalKeys) return;
// Update each datastore
synchronized(Node.this) {
maxTotalDatastoreSize = storeSize;
maxTotalKeys = newMaxStoreKeys;
maxStoreKeys = maxTotalKeys / 2;
maxCacheKeys = maxTotalKeys - maxStoreKeys;
}
try {
chkDatastore.setMaxKeys(maxStoreKeys, storeForceBigShrinks);
chkDatacache.setMaxKeys(maxCacheKeys, storeForceBigShrinks);
pubKeyDatastore.setMaxKeys(maxStoreKeys, storeForceBigShrinks);
pubKeyDatacache.setMaxKeys(maxCacheKeys, storeForceBigShrinks);
sskDatastore.setMaxKeys(maxStoreKeys, storeForceBigShrinks);
sskDatacache.setMaxKeys(maxCacheKeys, storeForceBigShrinks);
} catch (IOException e) {
// FIXME we need to be able to tell the user.
Logger.error(this, "Caught "+e+" resizing the datastore", e);
System.err.println("Caught "+e+" resizing the datastore");
e.printStackTrace();
}
//Perhaps a bit hackish...? Seems like this should be near it's definition in NodeStats.
nodeStats.avgStoreCHKLocation.changeMaxReports((int)maxStoreKeys);
nodeStats.avgCacheCHKLocation.changeMaxReports((int)maxCacheKeys);
nodeStats.avgSlashdotCacheCHKLocation.changeMaxReports((int)maxCacheKeys);
nodeStats.avgClientCacheCHKLocation.changeMaxReports((int)maxCacheKeys);
nodeStats.avgStoreSSKLocation.changeMaxReports((int)maxStoreKeys);
nodeStats.avgCacheSSKLocation.changeMaxReports((int)maxCacheKeys);
nodeStats.avgSlashdotCacheSSKLocation.changeMaxReports((int)maxCacheKeys);
nodeStats.avgClientCacheSSKLocation.changeMaxReports((int)maxCacheKeys);
}
}, true);
maxTotalDatastoreSize = nodeConfig.getLong("storeSize");
if(maxTotalDatastoreSize < MIN_STORE_SIZE && !storeType.equals("ram")) { // totally arbitrary minimum!
throw new NodeInitException(NodeInitException.EXIT_INVALID_STORE_SIZE, "Store size too small");
}
maxTotalKeys = maxTotalDatastoreSize / sizePerKey;
nodeConfig.register("storeUseSlotFilters", true, sortOrder++, true, false, "Node.storeUseSlotFilters", "Node.storeUseSlotFiltersLong", new BooleanCallback() {
public Boolean get() {
synchronized(Node.this) {
return storeUseSlotFilters;
}
}
public void set(Boolean val) throws InvalidConfigValueException,
NodeNeedRestartException {
synchronized(Node.this) {
storeUseSlotFilters = val;
}
// FIXME l10n
throw new NodeNeedRestartException("Need to restart to change storeUseSlotFilters");
}
});
storeUseSlotFilters = nodeConfig.getBoolean("storeUseSlotFilters");
nodeConfig.register("storeSaltHashSlotFilterPersistenceTime", ResizablePersistentIntBuffer.DEFAULT_PERSISTENCE_TIME, sortOrder++, true, false,
"Node.storeSaltHashSlotFilterPersistenceTime", "Node.storeSaltHashSlotFilterPersistenceTimeLong", new IntCallback() {
@Override
public Integer get() {
return ResizablePersistentIntBuffer.getPersistenceTime();
}
@Override
public void set(Integer val)
throws InvalidConfigValueException,
NodeNeedRestartException {
if(val >= -1)
ResizablePersistentIntBuffer.setPersistenceTime(val);
else
throw new InvalidConfigValueException(l10n("slotFilterPersistenceTimeError"));
}
}, false);
nodeConfig.register("storeSaltHashResizeOnStart", false, sortOrder++, true, false,
"Node.storeSaltHashResizeOnStart", "Node.storeSaltHashResizeOnStartLong", new BooleanCallback() {
@Override
public Boolean get() {
return storeSaltHashResizeOnStart;
}
@Override
public void set(Boolean val) throws InvalidConfigValueException, NodeNeedRestartException {
storeSaltHashResizeOnStart = val;
}
});
storeSaltHashResizeOnStart = nodeConfig.getBoolean("storeSaltHashResizeOnStart");
this.storeDir = setupProgramDir(installConfig, "storeDir", userDir().file("datastore").getPath(), "Node.storeDirectory", "Node.storeDirectoryLong", nodeConfig);
installConfig.finishedInitialization();
final String suffix = getStoreSuffix();
maxStoreKeys = maxTotalKeys / 2;
maxCacheKeys = maxTotalKeys - maxStoreKeys;
/*
* On Windows, setting the file length normally involves writing lots of zeros.
* So it's an uninterruptible system call that takes a loooong time. On OS/X,
* presumably the same is true. If the RNG is fast enough, this means that
* setting the length and writing random data take exactly the same amount
* of time. On most versions of Unix, holes can be created. However on all
* systems, predictable disk usage is a good thing. So lets turn it on by
* default for now, on all systems. The datastore can be read but mostly not
* written while the random data is being written.
*/
nodeConfig.register("storePreallocate", true, sortOrder++, true, true, "Node.storePreallocate", "Node.storePreallocateLong",
new BooleanCallback() {
@Override
public Boolean get() {
return storePreallocate;
}
@Override
public void set(Boolean val) throws InvalidConfigValueException, NodeNeedRestartException {
storePreallocate = val;
if (storeType.equals("salt-hash")) {
setPreallocate(chkDatastore, val);
setPreallocate(chkDatacache, val);
setPreallocate(pubKeyDatastore, val);
setPreallocate(pubKeyDatacache, val);
setPreallocate(sskDatastore, val);
setPreallocate(sskDatacache, val);
}
}
private void setPreallocate(StoreCallback<?> datastore,
boolean val) {
// Avoid race conditions by checking first.
FreenetStore<?> store = datastore.getStore();
if(store instanceof SaltedHashFreenetStore)
((SaltedHashFreenetStore<?>)store).setPreallocate(val);
}}
);
storePreallocate = nodeConfig.getBoolean("storePreallocate");
if(File.separatorChar == '/' && System.getProperty("os.name").toLowerCase().indexOf("mac os") < 0) {
securityLevels.addPhysicalThreatLevelListener(new SecurityLevelListener<SecurityLevels.PHYSICAL_THREAT_LEVEL>() {
@Override
public void onChange(PHYSICAL_THREAT_LEVEL oldLevel, PHYSICAL_THREAT_LEVEL newLevel) {
try {
if(newLevel == PHYSICAL_THREAT_LEVEL.LOW)
nodeConfig.set("storePreallocate", false);
else
nodeConfig.set("storePreallocate", true);
} catch (NodeNeedRestartException e) {
// Ignore
} catch (InvalidConfigValueException e) {
// Ignore
}
}
});
}
securityLevels.addPhysicalThreatLevelListener(new SecurityLevelListener<SecurityLevels.PHYSICAL_THREAT_LEVEL>() {
@Override
public void onChange(PHYSICAL_THREAT_LEVEL oldLevel, PHYSICAL_THREAT_LEVEL newLevel) {
if(newLevel == PHYSICAL_THREAT_LEVEL.MAXIMUM) {
synchronized(this) {
clientCacheAwaitingPassword = false;
databaseAwaitingPassword = false;
}
try {
killMasterKeysFile();
clientCore.clientLayerPersister.disableWrite();
clientCore.clientLayerPersister.waitForNotWriting();
clientCore.clientLayerPersister.deleteAllFiles();
} catch (IOException e) {
masterKeysFile.delete();
Logger.error(this, "Unable to securely delete "+masterKeysFile);
System.err.println(NodeL10n.getBase().getString("SecurityLevels.cantDeletePasswordFile", "filename", masterKeysFile.getAbsolutePath()));
clientCore.alerts.register(new SimpleUserAlert(true, NodeL10n.getBase().getString("SecurityLevels.cantDeletePasswordFileTitle"), NodeL10n.getBase().getString("SecurityLevels.cantDeletePasswordFile"), NodeL10n.getBase().getString("SecurityLevels.cantDeletePasswordFileTitle"), UserAlert.CRITICAL_ERROR));
}
}
if(oldLevel == PHYSICAL_THREAT_LEVEL.MAXIMUM && newLevel != PHYSICAL_THREAT_LEVEL.HIGH) {
// Not passworded.
// Create the master.keys.
// Keys must exist.
try {
MasterKeys keys;
synchronized(this) {
keys = Node.this.keys;
}
keys.changePassword(masterKeysFile, "", secureRandom);
} catch (IOException e) {
Logger.error(this, "Unable to create encryption keys file: "+masterKeysFile+" : "+e, e);
System.err.println("Unable to create encryption keys file: "+masterKeysFile+" : "+e);
e.printStackTrace();
}
}
}
});
if(securityLevels.physicalThreatLevel == PHYSICAL_THREAT_LEVEL.MAXIMUM) {
try {
killMasterKeysFile();
} catch (IOException e) {
String msg = "Unable to securely delete old master.keys file when switching to MAXIMUM seclevel!!";
System.err.println(msg);
throw new NodeInitException(NodeInitException.EXIT_CANT_WRITE_MASTER_KEYS, msg);
}
}
long defaultCacheSize;
long memoryLimit = NodeStarter.getMemoryLimitBytes();
// This is tricky because systems with low memory probably also have slow disks, but using
// up too much memory can be catastrophic...
// Total alchemy, FIXME!
if(memoryLimit == Long.MAX_VALUE || memoryLimit < 0)
defaultCacheSize = 1024*1024;
else if(memoryLimit <= 128*1024*1024)
defaultCacheSize = 0; // Turn off completely for very small memory.
else {
// 9 stores, total should be 5% of memory, up to maximum of 1MB per store at 308MB+
defaultCacheSize = Math.min(1024*1024, (memoryLimit - 128*1024*1024) / (20*9));
}
nodeConfig.register("cachingFreenetStoreMaxSize", defaultCacheSize, sortOrder++, true, false, "Node.cachingFreenetStoreMaxSize", "Node.cachingFreenetStoreMaxSizeLong",
new LongCallback() {
@Override
public Long get() {
synchronized(Node.this) {
return cachingFreenetStoreMaxSize;
}
}
@Override
public void set(Long val) throws InvalidConfigValueException, NodeNeedRestartException {
if(val < 0) throw new InvalidConfigValueException(l10n("invalidMemoryCacheSize"));
// Any positive value is legal. In particular, e.g. 1200 bytes would cause us to cache SSKs but not CHKs.
synchronized(Node.this) {
cachingFreenetStoreMaxSize = val;
}
throw new NodeNeedRestartException("Caching Maximum Size cannot be changed on the fly");
}
}, true);
cachingFreenetStoreMaxSize = nodeConfig.getLong("cachingFreenetStoreMaxSize");
if(cachingFreenetStoreMaxSize < 0)
throw new NodeInitException(NodeInitException.EXIT_BAD_CONFIG, l10n("invalidMemoryCacheSize"));
nodeConfig.register("cachingFreenetStorePeriod", "300k", sortOrder++, true, false, "Node.cachingFreenetStorePeriod", "Node.cachingFreenetStorePeriod",
new LongCallback() {
@Override
public Long get() {
synchronized(Node.this) {
return cachingFreenetStorePeriod;
}
}
@Override
public void set(Long val) throws InvalidConfigValueException, NodeNeedRestartException {
synchronized(Node.this) {
cachingFreenetStorePeriod = val;
}
throw new NodeNeedRestartException("Caching Period cannot be changed on the fly");
}
}, true);
cachingFreenetStorePeriod = nodeConfig.getLong("cachingFreenetStorePeriod");
if(cachingFreenetStoreMaxSize > 0 && cachingFreenetStorePeriod > 0) {
cachingFreenetStoreTracker = new CachingFreenetStoreTracker(cachingFreenetStoreMaxSize, cachingFreenetStorePeriod, ticker);
}
boolean shouldWriteConfig = false;
if(storeType.equals("bdb-index")) {
System.err.println("Old format Berkeley DB datastore detected.");
System.err.println("This datastore format is no longer supported.");
System.err.println("The old datastore will be securely deleted.");
storeType = "salt-hash";
shouldWriteConfig = true;
deleteOldBDBIndexStoreFiles();
}
if (storeType.equals("salt-hash")) {
initRAMFS();
initSaltHashFS(suffix, false, null);
} else {
initRAMFS();
}
if(databaseAwaitingPassword) createPasswordUserAlert();
// Client cache
// Default is 10MB, in memory only. The wizard will change this.
nodeConfig.register("clientCacheType", "ram", sortOrder++, true, true, "Node.clientCacheType", "Node.clientCacheTypeLong", new ClientCacheTypeCallback());
clientCacheType = nodeConfig.getString("clientCacheType");
nodeConfig.register("clientCacheSize", DEFAULT_CLIENT_CACHE_SIZE, sortOrder++, false, true, "Node.clientCacheSize", "Node.clientCacheSizeLong",
new LongCallback() {
@Override
public Long get() {
return maxTotalClientCacheSize;
}
@Override
public void set(Long storeSize) throws InvalidConfigValueException {
if(storeSize < MIN_CLIENT_CACHE_SIZE)
throw new InvalidConfigValueException(l10n("invalidStoreSize"));
long newMaxStoreKeys = storeSize / sizePerKey;
if(newMaxStoreKeys == maxClientCacheKeys) return;
// Update each datastore
synchronized(Node.this) {
maxTotalClientCacheSize = storeSize;
maxClientCacheKeys = newMaxStoreKeys;
}
try {
chkClientcache.setMaxKeys(maxClientCacheKeys, storeForceBigShrinks);
pubKeyClientcache.setMaxKeys(maxClientCacheKeys, storeForceBigShrinks);
sskClientcache.setMaxKeys(maxClientCacheKeys, storeForceBigShrinks);
} catch (IOException e) {
// FIXME we need to be able to tell the user.
Logger.error(this, "Caught "+e+" resizing the clientcache", e);
System.err.println("Caught "+e+" resizing the clientcache");
e.printStackTrace();
}
}
}, true);
maxTotalClientCacheSize = nodeConfig.getLong("clientCacheSize");
if(maxTotalClientCacheSize < MIN_CLIENT_CACHE_SIZE) {
throw new NodeInitException(NodeInitException.EXIT_INVALID_STORE_SIZE, "Client cache size too small");
}
maxClientCacheKeys = maxTotalClientCacheSize / sizePerKey;
boolean startedClientCache = false;
if (clientCacheType.equals("salt-hash")) {
if(clientCacheKey == null) {
System.err.println("Cannot open client-cache, it is passworded");
setClientCacheAwaitingPassword();
} else {
initSaltHashClientCacheFS(suffix, false, clientCacheKey);
startedClientCache = true;
}
} else if(clientCacheType.equals("none")) {
initNoClientCacheFS();
startedClientCache = true;
} else { // ram
initRAMClientCacheFS();
startedClientCache = true;
}
if(!startedClientCache)
initRAMClientCacheFS();
if(!clientCore.loadedDatabase() && databaseKey != null) {
try {
lateSetupDatabase(databaseKey);
} catch (MasterKeysWrongPasswordException e2) {
System.err.println("Impossible: "+e2);
e2.printStackTrace();
} catch (MasterKeysFileSizeException e2) {
System.err.println("Impossible: "+e2);
e2.printStackTrace();
} catch (IOException e2) {
System.err.println("Unable to load database: "+e2);
e2.printStackTrace();
}
}
nodeConfig.register("useSlashdotCache", true, sortOrder++, true, false, "Node.useSlashdotCache", "Node.useSlashdotCacheLong", new BooleanCallback() {
@Override
public Boolean get() {
return useSlashdotCache;
}
@Override
public void set(Boolean val) throws InvalidConfigValueException, NodeNeedRestartException {
useSlashdotCache = val;
}
});
useSlashdotCache = nodeConfig.getBoolean("useSlashdotCache");
nodeConfig.register("writeLocalToDatastore", false, sortOrder++, true, false, "Node.writeLocalToDatastore", "Node.writeLocalToDatastoreLong", new BooleanCallback() {
@Override
public Boolean get() {
return writeLocalToDatastore;
}
@Override
public void set(Boolean val) throws InvalidConfigValueException, NodeNeedRestartException {
writeLocalToDatastore = val;
}
});
writeLocalToDatastore = nodeConfig.getBoolean("writeLocalToDatastore");
// LOW network *and* physical seclevel = writeLocalToDatastore
securityLevels.addNetworkThreatLevelListener(new SecurityLevelListener<NETWORK_THREAT_LEVEL>() {
@Override
public void onChange(NETWORK_THREAT_LEVEL oldLevel, NETWORK_THREAT_LEVEL newLevel) {
if(newLevel == NETWORK_THREAT_LEVEL.LOW && securityLevels.getPhysicalThreatLevel() == PHYSICAL_THREAT_LEVEL.LOW)
writeLocalToDatastore = true;
else
writeLocalToDatastore = false;
}
});
securityLevels.addPhysicalThreatLevelListener(new SecurityLevelListener<PHYSICAL_THREAT_LEVEL>() {
@Override
public void onChange(PHYSICAL_THREAT_LEVEL oldLevel, PHYSICAL_THREAT_LEVEL newLevel) {
if(newLevel == PHYSICAL_THREAT_LEVEL.LOW && securityLevels.getNetworkThreatLevel() == NETWORK_THREAT_LEVEL.LOW)
writeLocalToDatastore = true;
else
writeLocalToDatastore = false;
}
});
nodeConfig.register("slashdotCacheLifetime", MINUTES.toMillis(30), sortOrder++, true, false, "Node.slashdotCacheLifetime", "Node.slashdotCacheLifetimeLong", new LongCallback() {
@Override
public Long get() {
return chkSlashdotcacheStore.getLifetime();
}
@Override
public void set(Long val) throws InvalidConfigValueException, NodeNeedRestartException {
if(val < 0) throw new InvalidConfigValueException("Must be positive!");
chkSlashdotcacheStore.setLifetime(val);
pubKeySlashdotcacheStore.setLifetime(val);
sskSlashdotcacheStore.setLifetime(val);
}
}, false);
long slashdotCacheLifetime = nodeConfig.getLong("slashdotCacheLifetime");
nodeConfig.register("slashdotCacheSize", DEFAULT_SLASHDOT_CACHE_SIZE, sortOrder++, false, true, "Node.slashdotCacheSize", "Node.slashdotCacheSizeLong",
new LongCallback() {
@Override
public Long get() {
return maxSlashdotCacheSize;
}
@Override
public void set(Long storeSize) throws InvalidConfigValueException {
if(storeSize < MIN_SLASHDOT_CACHE_SIZE)
throw new InvalidConfigValueException(l10n("invalidStoreSize"));
int newMaxStoreKeys = (int) Math.min(storeSize / sizePerKey, Integer.MAX_VALUE);
if(newMaxStoreKeys == maxSlashdotCacheKeys) return;
// Update each datastore
synchronized(Node.this) {
maxSlashdotCacheSize = storeSize;
maxSlashdotCacheKeys = newMaxStoreKeys;
}
try {
chkSlashdotcache.setMaxKeys(maxSlashdotCacheKeys, storeForceBigShrinks);
pubKeySlashdotcache.setMaxKeys(maxSlashdotCacheKeys, storeForceBigShrinks);
sskSlashdotcache.setMaxKeys(maxSlashdotCacheKeys, storeForceBigShrinks);
} catch (IOException e) {
// FIXME we need to be able to tell the user.
Logger.error(this, "Caught "+e+" resizing the slashdotcache", e);
System.err.println("Caught "+e+" resizing the slashdotcache");
e.printStackTrace();
}
}
}, true);
maxSlashdotCacheSize = nodeConfig.getLong("slashdotCacheSize");
if(maxSlashdotCacheSize < MIN_SLASHDOT_CACHE_SIZE) {
throw new NodeInitException(NodeInitException.EXIT_INVALID_STORE_SIZE, "Slashdot cache size too small");
}
maxSlashdotCacheKeys = (int) Math.min(maxSlashdotCacheSize / sizePerKey, Integer.MAX_VALUE);
chkSlashdotcache = new CHKStore();
chkSlashdotcacheStore = new SlashdotStore<CHKBlock>(chkSlashdotcache, maxSlashdotCacheKeys, slashdotCacheLifetime, PURGE_INTERVAL, ticker, this.clientCore.tempBucketFactory);
pubKeySlashdotcache = new PubkeyStore();
pubKeySlashdotcacheStore = new SlashdotStore<DSAPublicKey>(pubKeySlashdotcache, maxSlashdotCacheKeys, slashdotCacheLifetime, PURGE_INTERVAL, ticker, this.clientCore.tempBucketFactory);
getPubKey.setLocalSlashdotcache(pubKeySlashdotcache);
sskSlashdotcache = new SSKStore(getPubKey);
sskSlashdotcacheStore = new SlashdotStore<SSKBlock>(sskSlashdotcache, maxSlashdotCacheKeys, slashdotCacheLifetime, PURGE_INTERVAL, ticker, this.clientCore.tempBucketFactory);
// MAXIMUM seclevel = no slashdot cache.
securityLevels.addNetworkThreatLevelListener(new SecurityLevelListener<NETWORK_THREAT_LEVEL>() {
@Override
public void onChange(NETWORK_THREAT_LEVEL oldLevel, NETWORK_THREAT_LEVEL newLevel) {
if(newLevel == NETWORK_THREAT_LEVEL.MAXIMUM)
useSlashdotCache = false;
else if(oldLevel == NETWORK_THREAT_LEVEL.MAXIMUM)
useSlashdotCache = true;
}
});
nodeConfig.register("skipWrapperWarning", false, sortOrder++, true, false, "Node.skipWrapperWarning", "Node.skipWrapperWarningLong", new BooleanCallback() {
@Override
public void set(Boolean value) throws InvalidConfigValueException, NodeNeedRestartException {
skipWrapperWarning = value;
}
@Override
public Boolean get() {
return skipWrapperWarning;
}
});
skipWrapperWarning = nodeConfig.getBoolean("skipWrapperWarning");
nodeConfig.register("maxPacketSize", 1280, sortOrder++, true, true, "Node.maxPacketSize", "Node.maxPacketSizeLong", new IntCallback() {
@Override
public Integer get() {
synchronized(Node.this) {
return maxPacketSize;
}
}
@Override
public void set(Integer val) throws InvalidConfigValueException,
NodeNeedRestartException {
synchronized(Node.this) {
if(val == maxPacketSize) return;
if(val < UdpSocketHandler.MIN_MTU) throw new InvalidConfigValueException("Must be over 576");
if(val > 1492) throw new InvalidConfigValueException("Larger than ethernet frame size unlikely to work!");
maxPacketSize = val;
}
updateMTU();
}
}, true);
maxPacketSize = nodeConfig.getInt("maxPacketSize");
nodeConfig.register("enableRoutedPing", false, sortOrder++, true, false, "Node.enableRoutedPing", "Node.enableRoutedPingLong", new BooleanCallback() {
@Override
public Boolean get() {
synchronized(Node.this) {
return enableRoutedPing;
}
}
@Override
public void set(Boolean val) throws InvalidConfigValueException,
NodeNeedRestartException {
synchronized(Node.this) {
enableRoutedPing = val;
}
}
});
enableRoutedPing = nodeConfig.getBoolean("enableRoutedPing");
updateMTU();
// peers-offers/*.fref files
peersOffersFrefFilesConfiguration(nodeConfig, sortOrder++);
if (!peersOffersDismissed && checkPeersOffersFrefFiles())
PeersOffersUserAlert.createAlert(this);
/* Take care that no configuration options are registered after this point; they will not persist
* between restarts.
*/
nodeConfig.finishedInitialization();
if(shouldWriteConfig) config.store();
writeNodeFile();
// Initialize the plugin manager
Logger.normal(this, "Initializing Plugin Manager");
System.out.println("Initializing Plugin Manager");
pluginManager = new PluginManager(this, lastVersion);
shutdownHook.addEarlyJob(new NativeThread("Shutdown plugins", NativeThread.HIGH_PRIORITY, true) {
@Override
public void realRun() {
pluginManager.stop(SECONDS.toMillis(30)); // FIXME make it configurable??
}
});
// FIXME
// Short timeouts and JVM timeouts with nothing more said than the above have been seen...
// I don't know why... need a stack dump...
// For now just give it an extra 2 minutes. If it doesn't start in that time,
// it's likely (on reports so far) that a restart will fix it.
// And we have to get a build out because ALL plugins are now failing to load,
// including the absolutely essential (for most nodes) JSTUN and UPnP.
WrapperManager.signalStarting((int) MINUTES.toMillis(2));
FetchContext ctx = clientCore.makeClient((short)0, true, false).getFetchContext();
ctx.allowSplitfiles = false;
ctx.dontEnterImplicitArchives = true;
ctx.maxArchiveRestarts = 0;
ctx.maxMetadataSize = 256;
ctx.maxNonSplitfileRetries = 10;
ctx.maxOutputLength = 4096;
ctx.maxRecursionLevel = 2;
ctx.maxTempLength = 4096;
this.arkFetcherContext = ctx;
registerNodeToNodeMessageListener(N2N_MESSAGE_TYPE_FPROXY, fproxyN2NMListener);
registerNodeToNodeMessageListener(Node.N2N_MESSAGE_TYPE_DIFFNODEREF, diffNoderefListener);
// FIXME this is a hack
// toadlet server should start after all initialized
// see NodeClientCore line 437
if (toadlets.isEnabled()) {
toadlets.finishStart();
toadlets.createFproxy();
toadlets.removeStartupToadlet();
}
Logger.normal(this, "Node constructor completed");
System.out.println("Node constructor completed");
new BandwidthManager(this).start();
}
private void peersOffersFrefFilesConfiguration(SubConfig nodeConfig, int configOptionSortOrder) {
final Node node = this;
nodeConfig.register("peersOffersDismissed", false, configOptionSortOrder, true, true,
"Node.peersOffersDismissed", "Node.peersOffersDismissedLong", new BooleanCallback() {
@Override
public Boolean get() {
return peersOffersDismissed;
}
@Override
public void set(Boolean val) {
if (val) {
for (UserAlert alert : clientCore.alerts.getAlerts())
if (alert instanceof PeersOffersUserAlert)
clientCore.alerts.unregister(alert);
} else
PeersOffersUserAlert.createAlert(node);
peersOffersDismissed = val;
}
});
peersOffersDismissed = nodeConfig.getBoolean("peersOffersDismissed");
}
private boolean checkPeersOffersFrefFiles() {
File[] files = runDir.file("peers-offers").listFiles();
if (files != null && files.length > 0) {
for (File file : files) {
if (file.isFile()) {
String filename = file.getName();
if (filename.endsWith(".fref"))
return true;
}
}
}
return false;
}
/** Delete files from old BDB-index datastore. */
private void deleteOldBDBIndexStoreFiles() {
File dbDir = storeDir.file("database-"+getDarknetPortNumber());
FileUtil.removeAll(dbDir);
File dir = storeDir.dir();
File[] list = dir.listFiles();
for(File f : list) {
String name = f.getName();
if(f.isFile() &&
name.toLowerCase().matches("((chk)|(ssk)|(pubkey))-[0-9]*\\.((store)|(cache))(\\.((keys)|(lru)))?")) {
System.out.println("Deleting old datastore file \""+f+"\"");
try {
FileUtil.secureDelete(f);
} catch (IOException e) {
System.err.println("Failed to delete old datastore file \""+f+"\": "+e);
e.printStackTrace();
}
}
}
}
private void fixCertsFiles() {
// Hack to update certificates file to fix update.cmd
// startssl.pem: Might be useful for old versions of update.sh too?
File certs = new File(PluginDownLoaderOfficialHTTPS.certfileOld);
fixCertsFile(certs);
if(FileUtil.detectedOS.isWindows) {
// updater\startssl.pem: Needed for Windows update.cmd.
certs = new File("updater", PluginDownLoaderOfficialHTTPS.certfileOld);
fixCertsFile(certs);
}
}
private void fixCertsFile(File certs) {
long oldLength = certs.exists() ? certs.length() : -1;
try {
File tmpFile = File.createTempFile(PluginDownLoaderOfficialHTTPS.certfileOld, ".tmp", new File("."));
PluginDownLoaderOfficialHTTPS.writeCertsTo(tmpFile);
if(FileUtil.renameTo(tmpFile, certs)) {
long newLength = certs.length();
if(newLength != oldLength)
System.err.println("Updated "+certs+" so that update scripts will work");
} else {
if(certs.length() != tmpFile.length()) {
System.err.println("Cannot update "+certs+" : last-resort update scripts (in particular update.cmd on Windows) may not work");
File manual = new File(PluginDownLoaderOfficialHTTPS.certfileOld+".new");
manual.delete();
if(tmpFile.renameTo(manual))
System.err.println("Please delete "+certs+" and rename "+manual+" over it");
else
tmpFile.delete();
}
}
} catch (IOException e) {
}
}
/**
** Sets up a program directory using the config value defined by the given
** parameters.
*/
public ProgramDirectory setupProgramDir(SubConfig installConfig,
String cfgKey, String defaultValue, String shortdesc, String longdesc, String moveErrMsg,
SubConfig oldConfig) throws NodeInitException {
ProgramDirectory dir = new ProgramDirectory(moveErrMsg);
int sortOrder = ProgramDirectory.nextOrder();
// forceWrite=true because currently it can't be changed on the fly, also for packages
installConfig.register(cfgKey, defaultValue, sortOrder, true, true, shortdesc, longdesc, dir.getStringCallback());
String dirName = installConfig.getString(cfgKey);
try {
dir.move(dirName);
} catch (IOException e) {
throw new NodeInitException(NodeInitException.EXIT_BAD_DIR, "could not set up directory: " + longdesc);
}
return dir;
}
protected ProgramDirectory setupProgramDir(SubConfig installConfig,
String cfgKey, String defaultValue, String shortdesc, String longdesc,
SubConfig oldConfig) throws NodeInitException {
return setupProgramDir(installConfig, cfgKey, defaultValue, shortdesc, longdesc, null, oldConfig);
}
public void lateSetupDatabase(DatabaseKey databaseKey) throws MasterKeysWrongPasswordException, MasterKeysFileSizeException, IOException {
if(clientCore.loadedDatabase()) return;
System.out.println("Starting late database initialisation");
try {
if(!clientCore.lateInitDatabase(databaseKey))
failLateInitDatabase();
} catch (NodeInitException e) {
failLateInitDatabase();
}
}
private void failLateInitDatabase() {
System.err.println("Failed late initialisation of database, closing...");
}
public void killMasterKeysFile() throws IOException {
MasterKeys.killMasterKeys(masterKeysFile);
}
private void setClientCacheAwaitingPassword() {
createPasswordUserAlert();
synchronized(this) {
clientCacheAwaitingPassword = true;
}
}
/** Called when the client layer needs the decryption password. */
void setDatabaseAwaitingPassword() {
synchronized(this) {
databaseAwaitingPassword = true;
}
}
private final UserAlert masterPasswordUserAlert = new UserAlert() {
final long creationTime = System.currentTimeMillis();
@Override
public String anchor() {
return "password";
}
@Override
public String dismissButtonText() {
return null;
}
@Override
public long getUpdatedTime() {
return creationTime;
}
@Override
public FCPMessage getFCPMessage() {
return new FeedMessage(getTitle(), getShortText(), getText(), getPriorityClass(), getUpdatedTime());
}
@Override
public HTMLNode getHTMLText() {
HTMLNode content = new HTMLNode("div");
SecurityLevelsToadlet.generatePasswordFormPage(false, clientCore.getToadletContainer(), content, false, false, false, null, null);
return content;
}
@Override
public short getPriorityClass() {
return UserAlert.ERROR;
}
@Override
public String getShortText() {
return NodeL10n.getBase().getString("SecurityLevels.enterPassword");
}
@Override
public String getText() {
return NodeL10n.getBase().getString("SecurityLevels.enterPassword");
}
@Override
public String getTitle() {
return NodeL10n.getBase().getString("SecurityLevels.enterPassword");
}
@Override
public boolean isEventNotification() {
return false;
}
@Override
public boolean isValid() {
synchronized(Node.this) {
return clientCacheAwaitingPassword || databaseAwaitingPassword;
}
}
@Override
public void isValid(boolean validity) {
// Ignore
}
@Override
public void onDismiss() {
// Ignore
}
@Override
public boolean shouldUnregisterOnDismiss() {
return false;
}
@Override
public boolean userCanDismiss() {
return false;
}
};
private void createPasswordUserAlert() {
this.clientCore.alerts.register(masterPasswordUserAlert);
}
private void initRAMClientCacheFS() {
chkClientcache = new CHKStore();
new RAMFreenetStore<CHKBlock>(chkClientcache, (int) Math.min(Integer.MAX_VALUE, maxClientCacheKeys));
pubKeyClientcache = new PubkeyStore();
new RAMFreenetStore<DSAPublicKey>(pubKeyClientcache, (int) Math.min(Integer.MAX_VALUE, maxClientCacheKeys));
sskClientcache = new SSKStore(getPubKey);
new RAMFreenetStore<SSKBlock>(sskClientcache, (int) Math.min(Integer.MAX_VALUE, maxClientCacheKeys));
}
private void initNoClientCacheFS() {
chkClientcache = new CHKStore();
new NullFreenetStore<CHKBlock>(chkClientcache);
pubKeyClientcache = new PubkeyStore();
new NullFreenetStore<DSAPublicKey>(pubKeyClientcache);
sskClientcache = new SSKStore(getPubKey);
new NullFreenetStore<SSKBlock>(sskClientcache);
}
private String getStoreSuffix() {
return "-" + getDarknetPortNumber();
}
private void finishInitSaltHashFS(final String suffix, NodeClientCore clientCore) {
if(clientCore.alerts == null) throw new NullPointerException();
chkDatastore.getStore().setUserAlertManager(clientCore.alerts);
chkDatacache.getStore().setUserAlertManager(clientCore.alerts);
pubKeyDatastore.getStore().setUserAlertManager(clientCore.alerts);
pubKeyDatacache.getStore().setUserAlertManager(clientCore.alerts);
sskDatastore.getStore().setUserAlertManager(clientCore.alerts);
sskDatacache.getStore().setUserAlertManager(clientCore.alerts);
}
private void initRAMFS() {
chkDatastore = new CHKStore();
new RAMFreenetStore<CHKBlock>(chkDatastore, (int) Math.min(Integer.MAX_VALUE, maxStoreKeys));
chkDatacache = new CHKStore();
new RAMFreenetStore<CHKBlock>(chkDatacache, (int) Math.min(Integer.MAX_VALUE, maxCacheKeys));
pubKeyDatastore = new PubkeyStore();
new RAMFreenetStore<DSAPublicKey>(pubKeyDatastore, (int) Math.min(Integer.MAX_VALUE, maxStoreKeys));
pubKeyDatacache = new PubkeyStore();
getPubKey.setDataStore(pubKeyDatastore, pubKeyDatacache);
new RAMFreenetStore<DSAPublicKey>(pubKeyDatacache, (int) Math.min(Integer.MAX_VALUE, maxCacheKeys));
sskDatastore = new SSKStore(getPubKey);
new RAMFreenetStore<SSKBlock>(sskDatastore, (int) Math.min(Integer.MAX_VALUE, maxStoreKeys));
sskDatacache = new SSKStore(getPubKey);
new RAMFreenetStore<SSKBlock>(sskDatacache, (int) Math.min(Integer.MAX_VALUE, maxCacheKeys));
}
private long cachingFreenetStoreMaxSize;
private long cachingFreenetStorePeriod;
private CachingFreenetStoreTracker cachingFreenetStoreTracker;
private void initSaltHashFS(final String suffix, boolean dontResizeOnStart, byte[] masterKey) throws NodeInitException {
try {
final CHKStore chkDatastore = new CHKStore();
final FreenetStore<CHKBlock> chkDataFS = makeStore("CHK", true, chkDatastore, dontResizeOnStart, masterKey);
final CHKStore chkDatacache = new CHKStore();
final FreenetStore<CHKBlock> chkCacheFS = makeStore("CHK", false, chkDatacache, dontResizeOnStart, masterKey);
((SaltedHashFreenetStore<CHKBlock>) chkCacheFS.getUnderlyingStore()).setAltStore(((SaltedHashFreenetStore<CHKBlock>) chkDataFS.getUnderlyingStore()));
final PubkeyStore pubKeyDatastore = new PubkeyStore();
final FreenetStore<DSAPublicKey> pubkeyDataFS = makeStore("PUBKEY", true, pubKeyDatastore, dontResizeOnStart, masterKey);
final PubkeyStore pubKeyDatacache = new PubkeyStore();
final FreenetStore<DSAPublicKey> pubkeyCacheFS = makeStore("PUBKEY", false, pubKeyDatacache, dontResizeOnStart, masterKey);
((SaltedHashFreenetStore<DSAPublicKey>) pubkeyCacheFS.getUnderlyingStore()).setAltStore(((SaltedHashFreenetStore<DSAPublicKey>) pubkeyDataFS.getUnderlyingStore()));
final SSKStore sskDatastore = new SSKStore(getPubKey);
final FreenetStore<SSKBlock> sskDataFS = makeStore("SSK", true, sskDatastore, dontResizeOnStart, masterKey);
final SSKStore sskDatacache = new SSKStore(getPubKey);
final FreenetStore<SSKBlock> sskCacheFS = makeStore("SSK", false, sskDatacache, dontResizeOnStart, masterKey);
((SaltedHashFreenetStore<SSKBlock>) sskCacheFS.getUnderlyingStore()).setAltStore(((SaltedHashFreenetStore<SSKBlock>) sskDataFS.getUnderlyingStore()));
boolean delay =
chkDataFS.start(ticker, false) |
chkCacheFS.start(ticker, false) |
pubkeyDataFS.start(ticker, false) |
pubkeyCacheFS.start(ticker, false) |
sskDataFS.start(ticker, false) |
sskCacheFS.start(ticker, false);
if(delay) {
System.err.println("Delayed init of datastore");
initRAMFS();
final Runnable migrate = new MigrateOldStoreData(false);
this.getTicker().queueTimedJob(new Runnable() {
@Override
public void run() {
System.err.println("Starting delayed init of datastore");
try {
chkDataFS.start(ticker, true);
chkCacheFS.start(ticker, true);
pubkeyDataFS.start(ticker, true);
pubkeyCacheFS.start(ticker, true);
sskDataFS.start(ticker, true);
sskCacheFS.start(ticker, true);
} catch (IOException e) {
Logger.error(this, "Failed to start datastore: "+e, e);
System.err.println("Failed to start datastore: "+e);
e.printStackTrace();
return;
}
Node.this.chkDatastore = chkDatastore;
Node.this.chkDatacache = chkDatacache;
Node.this.pubKeyDatastore = pubKeyDatastore;
Node.this.pubKeyDatacache = pubKeyDatacache;
getPubKey.setDataStore(pubKeyDatastore, pubKeyDatacache);
Node.this.sskDatastore = sskDatastore;
Node.this.sskDatacache = sskDatacache;
finishInitSaltHashFS(suffix, clientCore);
System.err.println("Finishing delayed init of datastore");
migrate.run();
}
}, "Start store", 0, true, false); // Use Ticker to guarantee that this runs *after* constructors have completed.
} else {
Node.this.chkDatastore = chkDatastore;
Node.this.chkDatacache = chkDatacache;
Node.this.pubKeyDatastore = pubKeyDatastore;
Node.this.pubKeyDatacache = pubKeyDatacache;
getPubKey.setDataStore(pubKeyDatastore, pubKeyDatacache);
Node.this.sskDatastore = sskDatastore;
Node.this.sskDatacache = sskDatacache;
this.getTicker().queueTimedJob(new Runnable() {
@Override
public void run() {
Node.this.chkDatastore = chkDatastore;
Node.this.chkDatacache = chkDatacache;
Node.this.pubKeyDatastore = pubKeyDatastore;
Node.this.pubKeyDatacache = pubKeyDatacache;
getPubKey.setDataStore(pubKeyDatastore, pubKeyDatacache);
Node.this.sskDatastore = sskDatastore;
Node.this.sskDatacache = sskDatacache;
finishInitSaltHashFS(suffix, clientCore);
}
}, "Start store", 0, true, false);
}
} catch (IOException e) {
System.err.println("Could not open store: " + e);
e.printStackTrace();
throw new NodeInitException(NodeInitException.EXIT_STORE_OTHER, e.getMessage());
}
}
private void initSaltHashClientCacheFS(final String suffix, boolean dontResizeOnStart, byte[] clientCacheMasterKey) throws NodeInitException {
try {
final CHKStore chkClientcache = new CHKStore();
final FreenetStore<CHKBlock> chkDataFS = makeClientcache("CHK", true, chkClientcache, dontResizeOnStart, clientCacheMasterKey);
final PubkeyStore pubKeyClientcache = new PubkeyStore();
final FreenetStore<DSAPublicKey> pubkeyDataFS = makeClientcache("PUBKEY", true, pubKeyClientcache, dontResizeOnStart, clientCacheMasterKey);
final SSKStore sskClientcache = new SSKStore(getPubKey);
final FreenetStore<SSKBlock> sskDataFS = makeClientcache("SSK", true, sskClientcache, dontResizeOnStart, clientCacheMasterKey);
boolean delay =
chkDataFS.start(ticker, false) |
pubkeyDataFS.start(ticker, false) |
sskDataFS.start(ticker, false);
if(delay) {
System.err.println("Delayed init of client-cache");
initRAMClientCacheFS();
final Runnable migrate = new MigrateOldStoreData(true);
getTicker().queueTimedJob(new Runnable() {
@Override
public void run() {
System.err.println("Starting delayed init of client-cache");
try {
chkDataFS.start(ticker, true);
pubkeyDataFS.start(ticker, true);
sskDataFS.start(ticker, true);
} catch (IOException e) {
Logger.error(this, "Failed to start client-cache: "+e, e);
System.err.println("Failed to start client-cache: "+e);
e.printStackTrace();
return;
}
Node.this.chkClientcache = chkClientcache;
Node.this.pubKeyClientcache = pubKeyClientcache;
getPubKey.setLocalDataStore(pubKeyClientcache);
Node.this.sskClientcache = sskClientcache;
System.err.println("Finishing delayed init of client-cache");
migrate.run();
}
}, "Migrate store", 0, true, false);
} else {
Node.this.chkClientcache = chkClientcache;
Node.this.pubKeyClientcache = pubKeyClientcache;
getPubKey.setLocalDataStore(pubKeyClientcache);
Node.this.sskClientcache = sskClientcache;
}
} catch (IOException e) {
System.err.println("Could not open store: " + e);
e.printStackTrace();
throw new NodeInitException(NodeInitException.EXIT_STORE_OTHER, e.getMessage());
}
}
private <T extends StorableBlock> FreenetStore<T> makeClientcache(String type, boolean isStore, StoreCallback<T> cb, boolean dontResizeOnStart, byte[] clientCacheMasterKey) throws IOException {
FreenetStore<T> store = makeStore(type, "clientcache", maxClientCacheKeys, cb, dontResizeOnStart, clientCacheMasterKey);
return store;
}
private <T extends StorableBlock> FreenetStore<T> makeStore(String type, boolean isStore, StoreCallback<T> cb, boolean dontResizeOnStart, byte[] clientCacheMasterKey) throws IOException {
String store = isStore ? "store" : "cache";
long maxKeys = isStore ? maxStoreKeys : maxCacheKeys;
return makeStore(type, store, maxKeys, cb, dontResizeOnStart, clientCacheMasterKey);
}
private <T extends StorableBlock> FreenetStore<T> makeStore(String type, String store, long maxKeys, StoreCallback<T> cb, boolean lateStart, byte[] clientCacheMasterKey) throws IOException {
Logger.normal(this, "Initializing "+type+" Data"+store);
System.out.println("Initializing "+type+" Data"+store+" (" + maxStoreKeys + " keys)");
SaltedHashFreenetStore<T> fs = SaltedHashFreenetStore.<T>construct(getStoreDir(), type+"-"+store, cb,
random, maxKeys, storeUseSlotFilters, shutdownHook, storePreallocate, storeSaltHashResizeOnStart && !lateStart, lateStart ? ticker : null, clientCacheMasterKey);
cb.setStore(fs);
if(cachingFreenetStoreMaxSize > 0)
return new CachingFreenetStore<T>(cb, fs, cachingFreenetStoreTracker);
else
return fs;
}
public void start(boolean noSwaps) throws NodeInitException {
// IMPORTANT: Read the peers only after we have finished initializing Node.
// Peer constructors are complex and can call methods on Node.
peers.tryReadPeers(nodeDir.file("peers-"+getDarknetPortNumber()).getPath(), darknetCrypto, null, false, false);
peers.updatePMUserAlert();
dispatcher.start(nodeStats); // must be before usm
dnsr.start();
peers.start(); // must be before usm
nodeStats.start();
uptime.start();
failureTable.start();
darknetCrypto.start();
if(opennet != null)
opennet.start();
ps.start(nodeStats);
ticker.start();
scheduleVersionTransition();
usm.start(ticker);
if(isUsingWrapper()) {
Logger.normal(this, "Using wrapper correctly: "+nodeStarter);
System.out.println("Using wrapper correctly: "+nodeStarter);
} else {
Logger.error(this, "NOT using wrapper (at least not correctly). Your freenet-ext.jar <http://downloads.freenetproject.org/alpha/freenet-ext.jar> and/or wrapper.conf <https://emu.freenetproject.org/svn/trunk/apps/installer/installclasspath/config/wrapper.conf> need to be updated.");
System.out.println("NOT using wrapper (at least not correctly). Your freenet-ext.jar <http://downloads.freenetproject.org/alpha/freenet-ext.jar> and/or wrapper.conf <https://emu.freenetproject.org/svn/trunk/apps/installer/installclasspath/config/wrapper.conf> need to be updated.");
}
Logger.normal(this, "Freenet 0.7.5 Build #"+Version.buildNumber()+" r"+Version.cvsRevision());
System.out.println("Freenet 0.7.5 Build #"+Version.buildNumber()+" r"+Version.cvsRevision());
Logger.normal(this, "FNP port is on "+darknetCrypto.getBindTo()+ ':' +getDarknetPortNumber());
System.out.println("FNP port is on "+darknetCrypto.getBindTo()+ ':' +getDarknetPortNumber());
// Start services
// SubConfig pluginManagerConfig = new SubConfig("pluginmanager3", config);
// pluginManager3 = new freenet.plugin_new.PluginManager(pluginManagerConfig);
ipDetector.start();
// Start sending swaps
lm.start();
// Node Updater
try{
Logger.normal(this, "Starting the node updater");
nodeUpdater.start();
}catch (Exception e) {
e.printStackTrace();
throw new NodeInitException(NodeInitException.EXIT_COULD_NOT_START_UPDATER, "Could not start Updater: "+e);
}
/* TODO: Make sure that this is called BEFORE any instances of HTTPFilter are created.
* HTTPFilter uses checkForGCJCharConversionBug() which returns the value of the static
* variable jvmHasGCJCharConversionBug - and this is initialized in the following function.
* If this is not possible then create a separate function to check for the GCJ bug and
* call this function earlier.
*/
checkForEvilJVMBugs();
if(!NativeThread.HAS_ENOUGH_NICE_LEVELS)
clientCore.alerts.register(new NotEnoughNiceLevelsUserAlert());
this.clientCore.start(config);
tracker.startDeadUIDChecker();
// After everything has been created, write the config file back to disk.
if(config instanceof FreenetFilePersistentConfig) {
FreenetFilePersistentConfig cfg = (FreenetFilePersistentConfig) config;
cfg.finishedInit(this.ticker);
cfg.setHasNodeStarted();
}
config.store();
// Process any data in the extra peer data directory
peers.readExtraPeerData();
Logger.normal(this, "Started node");
hasStarted = true;
}
private void scheduleVersionTransition() {
long now = System.currentTimeMillis();
long transition = Version.transitionTime();
if(now < transition)
ticker.queueTimedJob(new Runnable() {
@Override
public void run() {
freenet.support.Logger.OSThread.logPID(this);
for(PeerNode pn: peers.myPeers()) {
pn.updateVersionRoutablity();
}
}
}, transition - now);
}
private static boolean jvmHasGCJCharConversionBug=false;
private void checkForEvilJVMBugs() {
// Now check whether we are likely to get the EvilJVMBug.
// If we are running a Sun/Oracle or Blackdown JVM, on Linux, and LD_ASSUME_KERNEL is not set, then we are.
String jvmVendor = System.getProperty("java.vm.vendor");
String jvmSpecVendor = System.getProperty("java.specification.vendor","");
String javaVersion = System.getProperty("java.version");
String jvmName = System.getProperty("java.vm.name");
String osName = System.getProperty("os.name");
String osVersion = System.getProperty("os.version");
boolean isOpenJDK = false;
//boolean isOracle = false;
if(logMINOR) Logger.minor(this, "JVM vendor: "+jvmVendor+", JVM name: "+jvmName+", JVM version: "+javaVersion+", OS name: "+osName+", OS version: "+osVersion);
if(jvmName.startsWith("OpenJDK ")) {
isOpenJDK = true;
}
//Add some checks for "Oracle" to futureproof against them renaming from "Sun".
//Should have no effect because if a user has downloaded a new enough file for Oracle to have changed the name these bugs shouldn't apply.
//Still, one never knows and this code might be extended to cover future bugs.
if((!isOpenJDK) && (jvmVendor.startsWith("Sun ") || jvmVendor.startsWith("Oracle ")) || (jvmVendor.startsWith("The FreeBSD Foundation") && (jvmSpecVendor.startsWith("Sun ") || jvmSpecVendor.startsWith("Oracle "))) || (jvmVendor.startsWith("Apple "))) {
//isOracle = true;
// Sun/Oracle bugs
// Spurious OOMs
// http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4855795
// http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=2138757
// http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=2138759
// Fixed in 1.5.0_10 and 1.4.2_13
boolean is150 = javaVersion.startsWith("1.5.0_");
boolean is160 = javaVersion.startsWith("1.6.0_");
if(is150 || is160) {
String[] split = javaVersion.split("_");
String secondPart = split[1];
if(secondPart.indexOf("-") != -1) {
split = secondPart.split("-");
secondPart = split[0];
}
int subver = Integer.parseInt(secondPart);
Logger.minor(this, "JVM version: "+javaVersion+" subver: "+subver+" from "+secondPart);
}
} else if (jvmVendor.startsWith("Apple ") || jvmVendor.startsWith("\"Apple ")) {
//Note that Sun/Oracle does not produce VMs for the Macintosh operating system, dont ask the user to find one...
} else if(!isOpenJDK) {
if(jvmVendor.startsWith("Free Software Foundation")) {
// GCJ/GIJ.
try {
javaVersion = System.getProperty("java.version").split(" ")[0].replaceAll("[.]","");
int jvmVersionInt = Integer.parseInt(javaVersion);
if(jvmVersionInt <= 422 && jvmVersionInt >= 100) // make sure that no bogus values cause true
jvmHasGCJCharConversionBug=true;
}
catch(Throwable t) {
Logger.error(this, "GCJ version check is broken!", t);
}
clientCore.alerts.register(new SimpleUserAlert(true, l10n("usingGCJTitle"), l10n("usingGCJ"), l10n("usingGCJTitle"), UserAlert.WARNING));
}
}
if(!isUsingWrapper() && !skipWrapperWarning) {
clientCore.alerts.register(new SimpleUserAlert(true, l10n("notUsingWrapperTitle"), l10n("notUsingWrapper"), l10n("notUsingWrapperShort"), UserAlert.WARNING));
}
// Unfortunately debian's version of OpenJDK appears to have segfaulting issues.
// Which presumably are exploitable.
// So we can't recommend people switch just yet. :(
// if(isOracle && Rijndael.AesCtrProvider == null) {
// if(!(FileUtil.detectedOS == FileUtil.OperatingSystem.Windows || FileUtil.detectedOS == FileUtil.OperatingSystem.MacOS))
// clientCore.alerts.register(new SimpleUserAlert(true, l10n("usingOracleTitle"), l10n("usingOracle"), l10n("usingOracleTitle"), UserAlert.WARNING));
// }
}
public static boolean checkForGCJCharConversionBug() {
return jvmHasGCJCharConversionBug; // should be initialized on early startup
}
private String l10n(String key) {
return NodeL10n.getBase().getString("Node."+key);
}
private String l10n(String key, String pattern, String value) {
return NodeL10n.getBase().getString("Node."+key, pattern, value);
}
private String l10n(String key, String[] pattern, String[] value) {
return NodeL10n.getBase().getString("Node."+key, pattern, value);
}
/**
* Export volatile data about the node as a SimpleFieldSet
*/
public SimpleFieldSet exportVolatileFieldSet() {
return nodeStats.exportVolatileFieldSet();
}
/**
* Do a routed ping of another node on the network by its location.
* @param loc2 The location of the other node to ping. It must match
* exactly.
* @param pubKeyHash The hash of the pubkey of the target node. We match
* by location; this is just a shortcut if we get close.
* @return The number of hops it took to find the node, if it was found.
* Otherwise -1.
*/
public int routedPing(double loc2, byte[] pubKeyHash) {
long uid = random.nextLong();
int initialX = random.nextInt();
Message m = DMT.createFNPRoutedPing(uid, loc2, maxHTL, initialX, pubKeyHash);
Logger.normal(this, "Message: "+m);
dispatcher.handleRouted(m, null);
// FIXME: might be rejected
MessageFilter mf1 = MessageFilter.create().setField(DMT.UID, uid).setType(DMT.FNPRoutedPong).setTimeout(5000);
try {
//MessageFilter mf2 = MessageFilter.create().setField(DMT.UID, uid).setType(DMT.FNPRoutedRejected).setTimeout(5000);
// Ignore Rejected - let it be retried on other peers
m = usm.waitFor(mf1/*.or(mf2)*/, null);
} catch (DisconnectedException e) {
Logger.normal(this, "Disconnected in waiting for pong");
return -1;
}
if(m == null) return -1;
if(m.getSpec() == DMT.FNPRoutedRejected) return -1;
return m.getInt(DMT.COUNTER) - initialX;
}
/**
* Look for a block in the datastore, as part of a request.
* @param key The key to fetch.
* @param uid The UID of the request (for logging only).
* @param promoteCache Whether to promote the key if found.
* @param canReadClientCache If the request is local, we can read the client cache.
* @param canWriteClientCache If the request is local, and the client hasn't turned off
* writing to the client cache, we can write to the client cache.
* @param canWriteDatastore If the request HTL is too high, including if it is local, we
* cannot write to the datastore.
* @return A KeyBlock for the key requested or null.
*/
private KeyBlock makeRequestLocal(Key key, long uid, boolean canReadClientCache, boolean canWriteClientCache, boolean canWriteDatastore, boolean offersOnly) {
KeyBlock kb = null;
if (key instanceof NodeCHK) {
kb = fetch(key, false, canReadClientCache, canWriteClientCache, canWriteDatastore, null);
} else if (key instanceof NodeSSK) {
NodeSSK sskKey = (NodeSSK) key;
DSAPublicKey pubKey = sskKey.getPubKey();
if (pubKey == null) {
pubKey = getPubKey.getKey(sskKey.getPubKeyHash(), canReadClientCache, offersOnly, null);
if (logMINOR)
Logger.minor(this, "Fetched pubkey: " + pubKey);
try {
sskKey.setPubKey(pubKey);
} catch (SSKVerifyException e) {
Logger.error(this, "Error setting pubkey: " + e, e);
}
}
if (pubKey != null) {
if (logMINOR)
Logger.minor(this, "Got pubkey: " + pubKey);
kb = fetch(sskKey, canReadClientCache, canWriteClientCache, canWriteDatastore, false, null);
} else {
if (logMINOR)
Logger.minor(this, "Not found because no pubkey: " + uid);
}
} else
throw new IllegalStateException("Unknown key type: " + key.getClass());
if (kb != null) {
// Probably somebody waiting for it. Trip it.
if (clientCore != null && clientCore.requestStarters != null) {
if (kb instanceof CHKBlock) {
clientCore.requestStarters.chkFetchSchedulerBulk.tripPendingKey(kb);
clientCore.requestStarters.chkFetchSchedulerRT.tripPendingKey(kb);
} else {
clientCore.requestStarters.sskFetchSchedulerBulk.tripPendingKey(kb);
clientCore.requestStarters.sskFetchSchedulerRT.tripPendingKey(kb);
}
}
failureTable.onFound(kb);
return kb;
}
return null;
}
/**
* Check the datastore, then if the key is not in the store,
* check whether another node is requesting the same key at
* the same HTL, and if all else fails, create a new
* RequestSender for the key/htl.
* @param closestLocation The closest location to the key so far.
* @param localOnly If true, only check the datastore.
* @return A KeyBlock if the data is in the store, otherwise
* a RequestSender, unless the HTL is 0, in which case NULL.
* RequestSender.
*/
public Object makeRequestSender(Key key, short htl, long uid, RequestTag tag, PeerNode source, boolean localOnly, boolean ignoreStore, boolean offersOnly, boolean canReadClientCache, boolean canWriteClientCache, boolean realTimeFlag) {
boolean canWriteDatastore = canWriteDatastoreRequest(htl);
if(logMINOR) Logger.minor(this, "makeRequestSender("+key+ ',' +htl+ ',' +uid+ ',' +source+") on "+getDarknetPortNumber());
// In store?
if(!ignoreStore) {
KeyBlock kb = makeRequestLocal(key, uid, canReadClientCache, canWriteClientCache, canWriteDatastore, offersOnly);
if (kb != null)
return kb;
}
if(localOnly) return null;
if(logMINOR) Logger.minor(this, "Not in store locally");
// Transfer coalescing - match key only as HTL irrelevant
RequestSender sender = key instanceof NodeCHK ?
tracker.getTransferringRequestSenderByKey((NodeCHK)key, realTimeFlag) : null;
if(sender != null) {
if(logMINOR) Logger.minor(this, "Data already being transferred: "+sender);
sender.setTransferCoalesced();
tag.setSender(sender, true);
return sender;
}
// HTL == 0 => Don't search further
if(htl == 0) {
if(logMINOR) Logger.minor(this, "No HTL");
return null;
}
sender = new RequestSender(key, null, htl, uid, tag, this, source, offersOnly, canWriteClientCache, canWriteDatastore, realTimeFlag);
tag.setSender(sender, false);
sender.start();
if(logMINOR) Logger.minor(this, "Created new sender: "+sender);
return sender;
}
/** Can we write to the datastore for a given request?
* We do not write to the datastore until 2 hops below maximum. This is an average of 4
* hops from the originator. Thus, data returned from local requests is never cached,
* finally solving The Register's attack, Bloom filter sharing doesn't give away your local
* requests and inserts, and *anything starting at high HTL* is not cached, including stuff
* from other nodes which hasn't been decremented far enough yet, so it's not ONLY local
* requests that don't get cached. */
boolean canWriteDatastoreRequest(short htl) {
return htl <= (maxHTL - 2);
}
/** Can we write to the datastore for a given insert?
* We do not write to the datastore until 3 hops below maximum. This is an average of 5
* hops from the originator. Thus, data sent by local inserts is never cached,
* finally solving The Register's attack, Bloom filter sharing doesn't give away your local
* requests and inserts, and *anything starting at high HTL* is not cached, including stuff
* from other nodes which hasn't been decremented far enough yet, so it's not ONLY local
* inserts that don't get cached. */
boolean canWriteDatastoreInsert(short htl) {
return htl <= (maxHTL - 3);
}
/**
* Fetch a block from the datastore.
* @param key
* @param canReadClientCache
* @param canWriteClientCache
* @param canWriteDatastore
* @param forULPR
* @param mustBeMarkedAsPostCachingChanges If true, the key must have the
* ENTRY_NEW_BLOCK flag (if saltedhash), indicating that it a) has been added
* since the caching changes in 1224 (since we didn't delete the stores), and b)
* that it wasn't added due to low network security caching everything, unless we
* are currently in low network security mode. Only applies to main store.
*/
public KeyBlock fetch(Key key, boolean canReadClientCache, boolean canWriteClientCache, boolean canWriteDatastore, boolean forULPR, BlockMetadata meta) {
if(key instanceof NodeSSK)
return fetch((NodeSSK)key, false, canReadClientCache, canWriteClientCache, canWriteDatastore, forULPR, meta);
else if(key instanceof NodeCHK)
return fetch((NodeCHK)key, false, canReadClientCache, canWriteClientCache, canWriteDatastore, forULPR, meta);
else throw new IllegalArgumentException();
}
public SSKBlock fetch(NodeSSK key, boolean dontPromote, boolean canReadClientCache, boolean canWriteClientCache, boolean canWriteDatastore, boolean forULPR, BlockMetadata meta) {
double loc=key.toNormalizedDouble();
double dist=Location.distance(lm.getLocation(), loc);
if(canReadClientCache) {
try {
SSKBlock block = sskClientcache.fetch(key, dontPromote || !canWriteClientCache, canReadClientCache, forULPR, false, meta);
if(block != null) {
nodeStats.avgClientCacheSSKSuccess.report(loc);
if (dist > nodeStats.furthestClientCacheSSKSuccess)
nodeStats.furthestClientCacheSSKSuccess=dist;
if(logDEBUG) Logger.debug(this, "Found key "+key+" in client-cache");
return block;
}
} catch (IOException e) {
Logger.error(this, "Could not read from client cache: "+e, e);
}
}
if(forULPR || useSlashdotCache || canReadClientCache) {
try {
SSKBlock block = sskSlashdotcache.fetch(key, dontPromote, canReadClientCache, forULPR, false, meta);
if(block != null) {
nodeStats.avgSlashdotCacheSSKSuccess.report(loc);
if (dist > nodeStats.furthestSlashdotCacheSSKSuccess)
nodeStats.furthestSlashdotCacheSSKSuccess=dist;
if(logDEBUG) Logger.debug(this, "Found key "+key+" in slashdot-cache");
return block;
}
} catch (IOException e) {
Logger.error(this, "Could not read from slashdot/ULPR cache: "+e, e);
}
}
boolean ignoreOldBlocks = !writeLocalToDatastore;
if(canReadClientCache) ignoreOldBlocks = false;
if(logMINOR) dumpStoreHits();
try {
nodeStats.avgRequestLocation.report(loc);
SSKBlock block = sskDatastore.fetch(key, dontPromote || !canWriteDatastore, canReadClientCache, forULPR, ignoreOldBlocks, meta);
if(block == null) {
SSKStore store = oldSSK;
if(store != null)
block = store.fetch(key, dontPromote || !canWriteDatastore, canReadClientCache, forULPR, ignoreOldBlocks, meta);
}
if(block != null) {
nodeStats.avgStoreSSKSuccess.report(loc);
if (dist > nodeStats.furthestStoreSSKSuccess)
nodeStats.furthestStoreSSKSuccess=dist;
if(logDEBUG) Logger.debug(this, "Found key "+key+" in store");
return block;
}
block=sskDatacache.fetch(key, dontPromote || !canWriteDatastore, canReadClientCache, forULPR, ignoreOldBlocks, meta);
if(block == null) {
SSKStore store = oldSSKCache;
if(store != null)
block = store.fetch(key, dontPromote || !canWriteDatastore, canReadClientCache, forULPR, ignoreOldBlocks, meta);
}
if (block != null) {
nodeStats.avgCacheSSKSuccess.report(loc);
if (dist > nodeStats.furthestCacheSSKSuccess)
nodeStats.furthestCacheSSKSuccess=dist;
if(logDEBUG) Logger.debug(this, "Found key "+key+" in cache");
}
return block;
} catch (IOException e) {
Logger.error(this, "Cannot fetch data: "+e, e);
return null;
}
}
public CHKBlock fetch(NodeCHK key, boolean dontPromote, boolean canReadClientCache, boolean canWriteClientCache, boolean canWriteDatastore, boolean forULPR, BlockMetadata meta) {
double loc=key.toNormalizedDouble();
double dist=Location.distance(lm.getLocation(), loc);
if(canReadClientCache) {
try {
CHKBlock block = chkClientcache.fetch(key, dontPromote || !canWriteClientCache, false, meta);
if(block != null) {
nodeStats.avgClientCacheCHKSuccess.report(loc);
if (dist > nodeStats.furthestClientCacheCHKSuccess)
nodeStats.furthestClientCacheCHKSuccess=dist;
return block;
}
} catch (IOException e) {
Logger.error(this, "Could not read from client cache: "+e, e);
}
}
if(forULPR || useSlashdotCache || canReadClientCache) {
try {
CHKBlock block = chkSlashdotcache.fetch(key, dontPromote, false, meta);
if(block != null) {
nodeStats.avgSlashdotCacheCHKSucess.report(loc);
if (dist > nodeStats.furthestSlashdotCacheCHKSuccess)
nodeStats.furthestSlashdotCacheCHKSuccess=dist;
return block;
}
} catch (IOException e) {
Logger.error(this, "Could not read from slashdot/ULPR cache: "+e, e);
}
}
boolean ignoreOldBlocks = !writeLocalToDatastore;
if(canReadClientCache) ignoreOldBlocks = false;
if(logMINOR) dumpStoreHits();
try {
nodeStats.avgRequestLocation.report(loc);
CHKBlock block = chkDatastore.fetch(key, dontPromote || !canWriteDatastore, ignoreOldBlocks, meta);
if(block == null) {
CHKStore store = oldCHK;
if(store != null)
block = store.fetch(key, dontPromote || !canWriteDatastore, ignoreOldBlocks, meta);
}
if (block != null) {
nodeStats.avgStoreCHKSuccess.report(loc);
if (dist > nodeStats.furthestStoreCHKSuccess)
nodeStats.furthestStoreCHKSuccess=dist;
return block;
}
block=chkDatacache.fetch(key, dontPromote || !canWriteDatastore, ignoreOldBlocks, meta);
if(block == null) {
CHKStore store = oldCHKCache;
if(store != null)
block = store.fetch(key, dontPromote || !canWriteDatastore, ignoreOldBlocks, meta);
}
if (block != null) {
nodeStats.avgCacheCHKSuccess.report(loc);
if (dist > nodeStats.furthestCacheCHKSuccess)
nodeStats.furthestCacheCHKSuccess=dist;
}
return block;
} catch (IOException e) {
Logger.error(this, "Cannot fetch data: "+e, e);
return null;
}
}
CHKStore getChkDatacache() {
return chkDatacache;
}
CHKStore getChkDatastore() {
return chkDatastore;
}
SSKStore getSskDatacache() {
return sskDatacache;
}
SSKStore getSskDatastore() {
return sskDatastore;
}
CHKStore getChkSlashdotCache() {
return chkSlashdotcache;
}
CHKStore getChkClientCache() {
return chkClientcache;
}
SSKStore getSskSlashdotCache() {
return sskSlashdotcache;
}
SSKStore getSskClientCache() {
return sskClientcache;
}
/**
* This method returns all statistics info for our data store stats table
*
* @return map that has an entry for each data store instance type and corresponding stats
*/
public Map<DataStoreInstanceType, DataStoreStats> getDataStoreStats() {
Map<DataStoreInstanceType, DataStoreStats> map = new LinkedHashMap<DataStoreInstanceType, DataStoreStats>();
map.put(new DataStoreInstanceType(CHK, STORE), new StoreCallbackStats(chkDatastore, nodeStats.chkStoreStats()));
map.put(new DataStoreInstanceType(CHK, CACHE), new StoreCallbackStats(chkDatacache, nodeStats.chkCacheStats()));
map.put(new DataStoreInstanceType(CHK, SLASHDOT), new StoreCallbackStats(chkSlashdotcache,nodeStats.chkSlashDotCacheStats()));
map.put(new DataStoreInstanceType(CHK, CLIENT), new StoreCallbackStats(chkClientcache, nodeStats.chkClientCacheStats()));
map.put(new DataStoreInstanceType(SSK, STORE), new StoreCallbackStats(sskDatastore, nodeStats.sskStoreStats()));
map.put(new DataStoreInstanceType(SSK, CACHE), new StoreCallbackStats(sskDatacache, nodeStats.sskCacheStats()));
map.put(new DataStoreInstanceType(SSK, SLASHDOT), new StoreCallbackStats(sskSlashdotcache, nodeStats.sskSlashDotCacheStats()));
map.put(new DataStoreInstanceType(SSK, CLIENT), new StoreCallbackStats(sskClientcache, nodeStats.sskClientCacheStats()));
map.put(new DataStoreInstanceType(PUB_KEY, STORE), new StoreCallbackStats(pubKeyDatastore, new NotAvailNodeStoreStats()));
map.put(new DataStoreInstanceType(PUB_KEY, CACHE), new StoreCallbackStats(pubKeyDatacache, new NotAvailNodeStoreStats()));
map.put(new DataStoreInstanceType(PUB_KEY, SLASHDOT), new StoreCallbackStats(pubKeySlashdotcache, new NotAvailNodeStoreStats()));
map.put(new DataStoreInstanceType(PUB_KEY, CLIENT), new StoreCallbackStats(pubKeyClientcache, new NotAvailNodeStoreStats()));
return map;
}
public long getMaxTotalKeys() {
return maxTotalKeys;
}
long timeLastDumpedHits;
public void dumpStoreHits() {
long now = System.currentTimeMillis();
if(now - timeLastDumpedHits > 5000) {
timeLastDumpedHits = now;
} else return;
Logger.minor(this, "Distribution of hits and misses over stores:\n"+
"CHK Datastore: "+chkDatastore.hits()+ '/' +(chkDatastore.hits()+chkDatastore.misses())+ '/' +chkDatastore.keyCount()+
"\nCHK Datacache: "+chkDatacache.hits()+ '/' +(chkDatacache.hits()+chkDatacache.misses())+ '/' +chkDatacache.keyCount()+
"\nSSK Datastore: "+sskDatastore.hits()+ '/' +(sskDatastore.hits()+sskDatastore.misses())+ '/' +sskDatastore.keyCount()+
"\nSSK Datacache: "+sskDatacache.hits()+ '/' +(sskDatacache.hits()+sskDatacache.misses())+ '/' +sskDatacache.keyCount());
}
public void storeShallow(CHKBlock block, boolean canWriteClientCache, boolean canWriteDatastore, boolean forULPR) {
store(block, false, canWriteClientCache, canWriteDatastore, forULPR);
}
/**
* Store a datum.
* @param block
* a KeyBlock
* @param deep If true, insert to the store as well as the cache. Do not set
* this to true unless the store results from an insert, and this node is the
* closest node to the target; see the description of chkDatastore.
*/
public void store(KeyBlock block, boolean deep, boolean canWriteClientCache, boolean canWriteDatastore, boolean forULPR) throws KeyCollisionException {
if(block instanceof CHKBlock)
store((CHKBlock)block, deep, canWriteClientCache, canWriteDatastore, forULPR);
else if(block instanceof SSKBlock)
store((SSKBlock)block, deep, false, canWriteClientCache, canWriteDatastore, forULPR);
else throw new IllegalArgumentException("Unknown keytype ");
}
private void store(CHKBlock block, boolean deep, boolean canWriteClientCache, boolean canWriteDatastore, boolean forULPR) {
try {
double loc = block.getKey().toNormalizedDouble();
if (canWriteClientCache) {
chkClientcache.put(block, false);
nodeStats.avgClientCacheCHKLocation.report(loc);
}
if ((forULPR || useSlashdotCache) && !(canWriteDatastore || writeLocalToDatastore)) {
chkSlashdotcache.put(block, false);
nodeStats.avgSlashdotCacheCHKLocation.report(loc);
}
if (canWriteDatastore || writeLocalToDatastore) {
if (deep) {
chkDatastore.put(block, !canWriteDatastore);
nodeStats.avgStoreCHKLocation.report(loc);
}
chkDatacache.put(block, !canWriteDatastore);
nodeStats.avgCacheCHKLocation.report(loc);
}
if (canWriteDatastore || forULPR || useSlashdotCache)
failureTable.onFound(block);
} catch (IOException e) {
Logger.error(this, "Cannot store data: "+e, e);
} catch (Throwable t) {
System.err.println(t);
t.printStackTrace();
Logger.error(this, "Caught "+t+" storing data", t);
}
if(clientCore != null && clientCore.requestStarters != null) {
clientCore.requestStarters.chkFetchSchedulerBulk.tripPendingKey(block);
clientCore.requestStarters.chkFetchSchedulerRT.tripPendingKey(block);
}
}
/** Store the block if this is a sink. Call for inserts. */
public void storeInsert(SSKBlock block, boolean deep, boolean overwrite, boolean canWriteClientCache, boolean canWriteDatastore) throws KeyCollisionException {
store(block, deep, overwrite, canWriteClientCache, canWriteDatastore, false);
}
/** Store only to the cache, and not the store. Called by requests,
* as only inserts cause data to be added to the store. */
public void storeShallow(SSKBlock block, boolean canWriteClientCache, boolean canWriteDatastore, boolean fromULPR) throws KeyCollisionException {
store(block, false, canWriteClientCache, canWriteDatastore, fromULPR);
}
public void store(SSKBlock block, boolean deep, boolean overwrite, boolean canWriteClientCache, boolean canWriteDatastore, boolean forULPR) throws KeyCollisionException {
try {
// Store the pubkey before storing the data, otherwise we can get a race condition and
// end up deleting the SSK data.
double loc = block.getKey().toNormalizedDouble();
getPubKey.cacheKey((block.getKey()).getPubKeyHash(), (block.getKey()).getPubKey(), deep, canWriteClientCache, canWriteDatastore, forULPR || useSlashdotCache, writeLocalToDatastore);
if(canWriteClientCache) {
sskClientcache.put(block, overwrite, false);
nodeStats.avgClientCacheSSKLocation.report(loc);
}
if((forULPR || useSlashdotCache) && !(canWriteDatastore || writeLocalToDatastore)) {
sskSlashdotcache.put(block, overwrite, false);
nodeStats.avgSlashdotCacheSSKLocation.report(loc);
}
if(canWriteDatastore || writeLocalToDatastore) {
if(deep) {
sskDatastore.put(block, overwrite, !canWriteDatastore);
nodeStats.avgStoreSSKLocation.report(loc);
}
sskDatacache.put(block, overwrite, !canWriteDatastore);
nodeStats.avgCacheSSKLocation.report(loc);
}
if(canWriteDatastore || forULPR || useSlashdotCache)
failureTable.onFound(block);
} catch (IOException e) {
Logger.error(this, "Cannot store data: "+e, e);
} catch (KeyCollisionException e) {
throw e;
} catch (Throwable t) {
System.err.println(t);
t.printStackTrace();
Logger.error(this, "Caught "+t+" storing data", t);
}
if(clientCore != null && clientCore.requestStarters != null) {
clientCore.requestStarters.sskFetchSchedulerBulk.tripPendingKey(block);
clientCore.requestStarters.sskFetchSchedulerRT.tripPendingKey(block);
}
}
final boolean decrementAtMax;
final boolean decrementAtMin;
/**
* Decrement the HTL according to the policy of the given
* NodePeer if it is non-null, or do something else if it is
* null.
*/
public short decrementHTL(PeerNode source, short htl) {
if(source != null)
return source.decrementHTL(htl);
// Otherwise...
if(htl >= maxHTL) htl = maxHTL;
if(htl <= 0) {
return 0;
}
if(htl == maxHTL) {
if(decrementAtMax || disableProbabilisticHTLs) htl--;
return htl;
}
if(htl == 1) {
if(decrementAtMin || disableProbabilisticHTLs) htl--;
return htl;
}
return --htl;
}
/**
* Fetch or create an CHKInsertSender for a given key/htl.
* @param key The key to be inserted.
* @param htl The current HTL. We can't coalesce inserts across
* HTL's.
* @param uid The UID of the caller's request chain, or a new
* one. This is obviously not used if there is already an
* CHKInsertSender running.
* @param source The node that sent the InsertRequest, or null
* if it originated locally.
* @param ignoreLowBackoff
* @param preferInsert
*/
public CHKInsertSender makeInsertSender(NodeCHK key, short htl, long uid, InsertTag tag, PeerNode source,
byte[] headers, PartiallyReceivedBlock prb, boolean fromStore, boolean canWriteClientCache, boolean forkOnCacheable, boolean preferInsert, boolean ignoreLowBackoff, boolean realTimeFlag) {
if(logMINOR) Logger.minor(this, "makeInsertSender("+key+ ',' +htl+ ',' +uid+ ',' +source+",...,"+fromStore);
CHKInsertSender is = null;
is = new CHKInsertSender(key, uid, tag, headers, htl, source, this, prb, fromStore, canWriteClientCache, forkOnCacheable, preferInsert, ignoreLowBackoff,realTimeFlag);
is.start();
// CHKInsertSender adds itself to insertSenders
return is;
}
/**
* Fetch or create an SSKInsertSender for a given key/htl.
* @param key The key to be inserted.
* @param htl The current HTL. We can't coalesce inserts across
* HTL's.
* @param uid The UID of the caller's request chain, or a new
* one. This is obviously not used if there is already an
* SSKInsertSender running.
* @param source The node that sent the InsertRequest, or null
* if it originated locally.
* @param ignoreLowBackoff
* @param preferInsert
*/
public SSKInsertSender makeInsertSender(SSKBlock block, short htl, long uid, InsertTag tag, PeerNode source,
boolean fromStore, boolean canWriteClientCache, boolean canWriteDatastore, boolean forkOnCacheable, boolean preferInsert, boolean ignoreLowBackoff, boolean realTimeFlag) {
NodeSSK key = block.getKey();
if(key.getPubKey() == null) {
throw new IllegalArgumentException("No pub key when inserting");
}
getPubKey.cacheKey(key.getPubKeyHash(), key.getPubKey(), false, canWriteClientCache, canWriteDatastore, false, writeLocalToDatastore);
Logger.minor(this, "makeInsertSender("+key+ ',' +htl+ ',' +uid+ ',' +source+",...,"+fromStore);
SSKInsertSender is = null;
is = new SSKInsertSender(block, uid, tag, htl, source, this, fromStore, canWriteClientCache, forkOnCacheable, preferInsert, ignoreLowBackoff, realTimeFlag);
is.start();
return is;
}
/**
* @return Some status information.
*/
public String getStatus() {
StringBuilder sb = new StringBuilder();
if (peers != null)
sb.append(peers.getStatus());
else
sb.append("No peers yet");
sb.append(tracker.getNumTransferringRequestSenders());
sb.append('\n');
return sb.toString();
}
/**
* @return TMCI peer list
*/
public String getTMCIPeerList() {
StringBuilder sb = new StringBuilder();
if (peers != null)
sb.append(peers.getTMCIPeerList());
else
sb.append("No peers yet");
return sb.toString();
}
/** Length of signature parameters R and S */
static final int SIGNATURE_PARAMETER_LENGTH = 32;
public ClientKeyBlock fetchKey(ClientKey key, boolean canReadClientCache, boolean canWriteClientCache, boolean canWriteDatastore) throws KeyVerifyException {
if(key instanceof ClientCHK)
return fetch((ClientCHK)key, canReadClientCache, canWriteClientCache, canWriteDatastore);
else if(key instanceof ClientSSK)
return fetch((ClientSSK)key, canReadClientCache, canWriteClientCache, canWriteDatastore);
else
throw new IllegalStateException("Don't know what to do with "+key);
}
public ClientKeyBlock fetch(ClientSSK clientSSK, boolean canReadClientCache, boolean canWriteClientCache, boolean canWriteDatastore) throws SSKVerifyException {
DSAPublicKey key = clientSSK.getPubKey();
if(key == null) {
key = getPubKey.getKey(clientSSK.pubKeyHash, canReadClientCache, false, null);
}
if(key == null) return null;
clientSSK.setPublicKey(key);
SSKBlock block = fetch((NodeSSK)clientSSK.getNodeKey(true), false, canReadClientCache, canWriteClientCache, canWriteDatastore, false, null);
if(block == null) {
if(logMINOR)
Logger.minor(this, "Could not find key for "+clientSSK);
return null;
}
// Move the pubkey to the top of the LRU, and fix it if it
// was corrupt.
getPubKey.cacheKey(clientSSK.pubKeyHash, key, false, canWriteClientCache, canWriteDatastore, false, writeLocalToDatastore);
return ClientSSKBlock.construct(block, clientSSK);
}
private ClientKeyBlock fetch(ClientCHK clientCHK, boolean canReadClientCache, boolean canWriteClientCache, boolean canWriteDatastore) throws CHKVerifyException {
CHKBlock block = fetch(clientCHK.getNodeCHK(), false, canReadClientCache, canWriteClientCache, canWriteDatastore, false, null);
if(block == null) return null;
return new ClientCHKBlock(block, clientCHK);
}
public void exit(int reason) {
try {
this.park();
System.out.println("Goodbye.");
System.out.println(reason);
} finally {
System.exit(reason);
}
}
public void exit(String reason){
try {
this.park();
System.out.println("Goodbye. from "+this+" ("+reason+ ')');
} finally {
System.exit(0);
}
}
/**
* Returns true if the node is shutting down.
* The packet receiver calls this for every packet, and boolean is atomic, so this method is not synchronized.
*/
public boolean isStopping() {
return isStopping;
}
/**
* Get the node into a state where it can be stopped safely
* May be called twice - once in exit (above) and then again
* from the wrapper triggered by calling System.exit(). Beware!
*/
public void park() {
synchronized(this) {
if(isStopping) return;
isStopping = true;
}
try {
Message msg = DMT.createFNPDisconnect(false, false, -1, new ShortBuffer(new byte[0]));
peers.localBroadcast(msg, true, false, peers.ctrDisconn);
} catch (Throwable t) {
try {
// E.g. if we haven't finished startup
Logger.error(this, "Failed to tell peers we are going down: "+t, t);
} catch (Throwable t1) {
// Ignore. We don't want to mess up the exit process!
}
}
config.store();
if(random instanceof PersistentRandomSource) {
((PersistentRandomSource) random).write_seed(true);
}
}
public NodeUpdateManager getNodeUpdater(){
return nodeUpdater;
}
public DarknetPeerNode[] getDarknetConnections() {
return peers.getDarknetPeers();
}
public boolean addPeerConnection(PeerNode pn) {
boolean retval = peers.addPeer(pn);
peers.writePeersUrgent(pn.isOpennet());
return retval;
}
public void removePeerConnection(PeerNode pn) {
peers.disconnectAndRemove(pn, true, false, false);
}
public void onConnectedPeer() {
if(logMINOR) Logger.minor(this, "onConnectedPeer()");
ipDetector.onConnectedPeer();
}
public int getFNPPort(){
return this.getDarknetPortNumber();
}
public boolean isOudated() {
return peers.isOutdated();
}
private Map<Integer, NodeToNodeMessageListener> n2nmListeners = new HashMap<Integer, NodeToNodeMessageListener>();
public synchronized void registerNodeToNodeMessageListener(int type, NodeToNodeMessageListener listener) {
n2nmListeners.put(type, listener);
}
/**
* Handle a received node to node message
*/
public void receivedNodeToNodeMessage(Message m, PeerNode src) {
int type = ((Integer) m.getObject(DMT.NODE_TO_NODE_MESSAGE_TYPE)).intValue();
ShortBuffer messageData = (ShortBuffer) m.getObject(DMT.NODE_TO_NODE_MESSAGE_DATA);
receivedNodeToNodeMessage(src, type, messageData, false);
}
public void receivedNodeToNodeMessage(PeerNode src, int type, ShortBuffer messageData, boolean partingMessage) {
boolean fromDarknet = src instanceof DarknetPeerNode;
NodeToNodeMessageListener listener = null;
synchronized(this) {
listener = n2nmListeners.get(type);
}
if(listener == null) {
Logger.error(this, "Unknown n2nm ID: "+type+" - discarding packet length "+messageData.getLength());
return;
}
listener.handleMessage(messageData.getData(), fromDarknet, src, type);
}
private NodeToNodeMessageListener diffNoderefListener = new NodeToNodeMessageListener() {
@Override
public void handleMessage(byte[] data, boolean fromDarknet, PeerNode src, int type) {
Logger.normal(this, "Received differential node reference node to node message from "+src.getPeer());
SimpleFieldSet fs = null;
try {
fs = new SimpleFieldSet(new String(data, "UTF-8"), false, true, false);
} catch (IOException e) {
Logger.error(this, "IOException while parsing node to node message data", e);
return;
}
if(fs.get("n2nType") != null) {
fs.removeValue("n2nType");
}
try {
src.processDiffNoderef(fs);
} catch (FSParseException e) {
Logger.error(this, "FSParseException while parsing node to node message data", e);
return;
}
}
};
private NodeToNodeMessageListener fproxyN2NMListener = new NodeToNodeMessageListener() {
@Override
public void handleMessage(byte[] data, boolean fromDarknet, PeerNode src, int type) {
if(!fromDarknet) {
Logger.error(this, "Got N2NTM from non-darknet node ?!?!?!: from "+src);
return;
}
DarknetPeerNode darkSource = (DarknetPeerNode) src;
Logger.normal(this, "Received N2NTM from '"+darkSource.getPeer()+"'");
SimpleFieldSet fs = null;
try {
fs = new SimpleFieldSet(new String(data, "UTF-8"), false, true, false);
} catch (UnsupportedEncodingException e) {
throw new Error("Impossible: JVM doesn't support UTF-8: " + e, e);
} catch (IOException e) {
Logger.error(this, "IOException while parsing node to node message data", e);
return;
}
fs.putOverwrite("n2nType", Integer.toString(type));
fs.putOverwrite("receivedTime", Long.toString(System.currentTimeMillis()));
fs.putOverwrite("receivedAs", "nodeToNodeMessage");
int fileNumber = darkSource.writeNewExtraPeerDataFile( fs, EXTRA_PEER_DATA_TYPE_N2NTM);
if( fileNumber == -1 ) {
Logger.error( this, "Failed to write N2NTM to extra peer data file for peer "+darkSource.getPeer());
}
// Keep track of the fileNumber so we can potentially delete the extra peer data file later, the file is authoritative
try {
handleNodeToNodeTextMessageSimpleFieldSet(fs, darkSource, fileNumber);
} catch (FSParseException e) {
// Shouldn't happen
throw new Error(e);
}
}
};
/**
* Handle a node to node text message SimpleFieldSet
* @throws FSParseException
*/
public void handleNodeToNodeTextMessageSimpleFieldSet(SimpleFieldSet fs, DarknetPeerNode source, int fileNumber) throws FSParseException {
if(logMINOR)
Logger.minor(this, "Got node to node message: \n"+fs);
int overallType = fs.getInt("n2nType");
fs.removeValue("n2nType");
if(overallType == Node.N2N_MESSAGE_TYPE_FPROXY) {
handleFproxyNodeToNodeTextMessageSimpleFieldSet(fs, source, fileNumber);
} else {
Logger.error(this, "Received unknown node to node message type '"+overallType+"' from "+source.getPeer());
}
}
private void handleFproxyNodeToNodeTextMessageSimpleFieldSet(SimpleFieldSet fs, DarknetPeerNode source, int fileNumber) throws FSParseException {
int type = fs.getInt("type");
if(type == Node.N2N_TEXT_MESSAGE_TYPE_USERALERT) {
source.handleFproxyN2NTM(fs, fileNumber);
} else if(type == Node.N2N_TEXT_MESSAGE_TYPE_FILE_OFFER) {
source.handleFproxyFileOffer(fs, fileNumber);
} else if(type == Node.N2N_TEXT_MESSAGE_TYPE_FILE_OFFER_ACCEPTED) {
source.handleFproxyFileOfferAccepted(fs, fileNumber);
} else if(type == Node.N2N_TEXT_MESSAGE_TYPE_FILE_OFFER_REJECTED) {
source.handleFproxyFileOfferRejected(fs, fileNumber);
} else if(type == Node.N2N_TEXT_MESSAGE_TYPE_BOOKMARK) {
source.handleFproxyBookmarkFeed(fs, fileNumber);
} else if(type == Node.N2N_TEXT_MESSAGE_TYPE_DOWNLOAD) {
source.handleFproxyDownloadFeed(fs, fileNumber);
} else {
Logger.error(this, "Received unknown fproxy node to node message sub-type '"+type+"' from "+source.getPeer());
}
}
public String getMyName() {
return myName;
}
public MessageCore getUSM() {
return usm;
}
public LocationManager getLocationManager() {
return lm;
}
public int getSwaps() {
return LocationManager.swaps;
}
public int getNoSwaps() {
return LocationManager.noSwaps;
}
public int getStartedSwaps() {
return LocationManager.startedSwaps;
}
public int getSwapsRejectedAlreadyLocked() {
return LocationManager.swapsRejectedAlreadyLocked;
}
public int getSwapsRejectedNowhereToGo() {
return LocationManager.swapsRejectedNowhereToGo;
}
public int getSwapsRejectedRateLimit() {
return LocationManager.swapsRejectedRateLimit;
}
public int getSwapsRejectedRecognizedID() {
return LocationManager.swapsRejectedRecognizedID;
}
public PeerNode[] getPeerNodes() {
return peers.myPeers();
}
public PeerNode[] getConnectedPeers() {
return peers.connectedPeers();
}
/**
* Return a peer of the node given its ip and port, name or identity, as a String
*/
public PeerNode getPeerNode(String nodeIdentifier) {
for(PeerNode pn: peers.myPeers()) {
Peer peer = pn.getPeer();
String nodeIpAndPort = "";
if(peer != null) {
nodeIpAndPort = peer.toString();
}
String identity = pn.getIdentityString();
if(pn instanceof DarknetPeerNode) {
DarknetPeerNode dpn = (DarknetPeerNode) pn;
String name = dpn.myName;
if(identity.equals(nodeIdentifier) || nodeIpAndPort.equals(nodeIdentifier) || name.equals(nodeIdentifier)) {
return pn;
}
} else {
if(identity.equals(nodeIdentifier) || nodeIpAndPort.equals(nodeIdentifier)) {
return pn;
}
}
}
return null;
}
public boolean isHasStarted() {
return hasStarted;
}
public void queueRandomReinsert(KeyBlock block) {
clientCore.queueRandomReinsert(block);
}
public String getExtraPeerDataDir() {
return extraPeerDataDir.getPath();
}
public boolean noConnectedPeers() {
return !peers.anyConnectedPeers();
}
public double getLocation() {
return lm.getLocation();
}
public double getLocationChangeSession() {
return lm.getLocChangeSession();
}
public int getAverageOutgoingSwapTime() {
return lm.getAverageSwapTime();
}
public long getSendSwapInterval() {
return lm.getSendSwapInterval();
}
public int getNumberOfRemotePeerLocationsSeenInSwaps() {
return lm.numberOfRemotePeerLocationsSeenInSwaps;
}
public boolean isAdvancedModeEnabled() {
if(clientCore == null) return false;
return clientCore.isAdvancedModeEnabled();
}
public boolean isFProxyJavascriptEnabled() {
return clientCore.isFProxyJavascriptEnabled();
}
// FIXME convert these kind of threads to Checkpointed's and implement a handler
// using the PacketSender/Ticker. Would save a few threads.
public int getNumARKFetchers() {
int x = 0;
for(PeerNode p: peers.myPeers()) {
if(p.isFetchingARK()) x++;
}
return x;
}
// FIXME put this somewhere else
private volatile Object statsSync = new Object();
/** The total number of bytes of real data i.e. payload sent by the node */
private long totalPayloadSent;
public void sentPayload(int len) {
synchronized(statsSync) {
totalPayloadSent += len;
}
}
/**
* Get the total number of bytes of payload (real data) sent by the node
*
* @return Total payload sent in bytes
*/
public long getTotalPayloadSent() {
synchronized(statsSync) {
return totalPayloadSent;
}
}
public void setName(String key) throws InvalidConfigValueException, NodeNeedRestartException {
config.get("node").getOption("name").setValue(key);
}
public Ticker getTicker() {
return ticker;
}
public int getUnclaimedFIFOSize() {
return usm.getUnclaimedFIFOSize();
}
/**
* Connect this node to another node (for purposes of testing)
*/
public void connectToSeednode(SeedServerTestPeerNode node) throws OpennetDisabledException, FSParseException, PeerParseException, ReferenceSignatureVerificationException {
peers.addPeer(node,false,false);
}
public void connect(Node node, FRIEND_TRUST trust, FRIEND_VISIBILITY visibility) throws FSParseException, PeerParseException, ReferenceSignatureVerificationException, PeerTooOldException {
peers.connect(node.darknetCrypto.exportPublicFieldSet(), darknetCrypto.packetMangler, trust, visibility);
}
public short maxHTL() {
return maxHTL;
}
public int getDarknetPortNumber() {
return darknetCrypto.portNumber;
}
public synchronized int getOutputBandwidthLimit() {
return outputBandwidthLimit;
}
public synchronized int getInputBandwidthLimit() {
if(inputLimitDefault)
return outputBandwidthLimit * 4;
return inputBandwidthLimit;
}
/**
* @return total datastore size in bytes.
*/
public synchronized long getStoreSize() {
return maxTotalDatastoreSize;
}
@Override
public synchronized void setTimeSkewDetectedUserAlert() {
if(timeSkewDetectedUserAlert == null) {
timeSkewDetectedUserAlert = new TimeSkewDetectedUserAlert();
clientCore.alerts.register(timeSkewDetectedUserAlert);
}
}
public File getNodeDir() { return nodeDir.dir(); }
public File getCfgDir() { return cfgDir.dir(); }
public File getUserDir() { return userDir.dir(); }
public File getRunDir() { return runDir.dir(); }
public File getStoreDir() { return storeDir.dir(); }
public File getPluginDir() { return pluginDir.dir(); }
public ProgramDirectory nodeDir() { return nodeDir; }
public ProgramDirectory cfgDir() { return cfgDir; }
public ProgramDirectory userDir() { return userDir; }
public ProgramDirectory runDir() { return runDir; }
public ProgramDirectory storeDir() { return storeDir; }
public ProgramDirectory pluginDir() { return pluginDir; }
public DarknetPeerNode createNewDarknetNode(SimpleFieldSet fs, FRIEND_TRUST trust, FRIEND_VISIBILITY visibility) throws FSParseException, PeerParseException, ReferenceSignatureVerificationException, PeerTooOldException {
return new DarknetPeerNode(fs, this, darknetCrypto, false, trust, visibility);
}
public OpennetPeerNode createNewOpennetNode(SimpleFieldSet fs) throws FSParseException, OpennetDisabledException, PeerParseException, ReferenceSignatureVerificationException, PeerTooOldException {
if(opennet == null) throw new OpennetDisabledException("Opennet is not currently enabled");
return new OpennetPeerNode(fs, this, opennet.crypto, opennet, false);
}
public SeedServerTestPeerNode createNewSeedServerTestPeerNode(SimpleFieldSet fs) throws FSParseException, OpennetDisabledException, PeerParseException, ReferenceSignatureVerificationException, PeerTooOldException {
if(opennet == null) throw new OpennetDisabledException("Opennet is not currently enabled");
return new SeedServerTestPeerNode(fs, this, opennet.crypto, true);
}
public OpennetPeerNode addNewOpennetNode(SimpleFieldSet fs, ConnectionType connectionType) throws FSParseException, PeerParseException, ReferenceSignatureVerificationException {
// FIXME: perhaps this should throw OpennetDisabledExcemption rather than returing false?
if(opennet == null) return null;
return opennet.addNewOpennetNode(fs, connectionType, false);
}
public byte[] getOpennetPubKeyHash() {
return opennet.crypto.ecdsaPubKeyHash;
}
public byte[] getDarknetPubKeyHash() {
return darknetCrypto.ecdsaPubKeyHash;
}
public synchronized boolean isOpennetEnabled() {
return opennet != null;
}
public SimpleFieldSet exportDarknetPublicFieldSet() {
return darknetCrypto.exportPublicFieldSet();
}
public SimpleFieldSet exportOpennetPublicFieldSet() {
return opennet.crypto.exportPublicFieldSet();
}
public SimpleFieldSet exportDarknetPrivateFieldSet() {
return darknetCrypto.exportPrivateFieldSet();
}
public SimpleFieldSet exportOpennetPrivateFieldSet() {
return opennet.crypto.exportPrivateFieldSet();
}
/**
* Should the IP detection code only use the IP address override and the bindTo information,
* rather than doing a full detection?
*/
public synchronized boolean dontDetect() {
// Only return true if bindTo is set on all ports which are in use
if(!darknetCrypto.getBindTo().isRealInternetAddress(false, true, false)) return false;
if(opennet != null) {
if(opennet.crypto.getBindTo().isRealInternetAddress(false, true, false)) return false;
}
return true;
}
public int getOpennetFNPPort() {
if(opennet == null) return -1;
return opennet.crypto.portNumber;
}
public OpennetManager getOpennet() {
return opennet;
}
public synchronized boolean passOpennetRefsThroughDarknet() {
return passOpennetRefsThroughDarknet;
}
/**
* Get the set of public ports that need to be forwarded. These are internal
* ports, not necessarily external - they may be rewritten by the NAT.
* @return A Set of ForwardPort's to be fed to port forward plugins.
*/
public Set<ForwardPort> getPublicInterfacePorts() {
HashSet<ForwardPort> set = new HashSet<ForwardPort>();
// FIXME IPv6 support
set.add(new ForwardPort("darknet", false, ForwardPort.PROTOCOL_UDP_IPV4, darknetCrypto.portNumber));
if(opennet != null) {
NodeCrypto crypto = opennet.crypto;
if(crypto != null) {
set.add(new ForwardPort("opennet", false, ForwardPort.PROTOCOL_UDP_IPV4, crypto.portNumber));
}
}
return set;
}
/**
* Get the time since the node was started in milliseconds.
*
* @return Uptime in milliseconds
*/
public long getUptime() {
return System.currentTimeMillis() - usm.getStartedTime();
}
public synchronized UdpSocketHandler[] getPacketSocketHandlers() {
// FIXME better way to get these!
if(opennet != null) {
return new UdpSocketHandler[] { darknetCrypto.socket, opennet.crypto.socket };
// TODO Auto-generated method stub
} else {
return new UdpSocketHandler[] { darknetCrypto.socket };
}
}
public int getMaxOpennetPeers() {
return maxOpennetPeers;
}
public void onAddedValidIP() {
OpennetManager om;
synchronized(this) {
om = opennet;
}
if(om != null) {
Announcer announcer = om.announcer;
if(announcer != null) {
announcer.maybeSendAnnouncement();
}
}
}
public boolean isSeednode() {
return acceptSeedConnections;
}
/**
* Returns true if the packet receiver should try to decode/process packets that are not from a peer (i.e. from a seed connection)
* The packet receiver calls this upon receiving an unrecognized packet.
*/
public boolean wantAnonAuth(boolean isOpennet) {
if(isOpennet)
return opennet != null && acceptSeedConnections;
else
return false;
}
// FIXME make this configurable
// Probably should wait until we have non-opennet anon auth so we can add it to NodeCrypto.
public boolean wantAnonAuthChangeIP(boolean isOpennet) {
return !isOpennet;
}
public boolean opennetDefinitelyPortForwarded() {
OpennetManager om;
synchronized(this) {
om = this.opennet;
}
if(om == null) return false;
NodeCrypto crypto = om.crypto;
if(crypto == null) return false;
return crypto.definitelyPortForwarded();
}
public boolean darknetDefinitelyPortForwarded() {
if(darknetCrypto == null) return false;
return darknetCrypto.definitelyPortForwarded();
}
public boolean hasKey(Key key, boolean canReadClientCache, boolean forULPR) {
// FIXME optimise!
if(key instanceof NodeCHK)
return fetch((NodeCHK)key, true, canReadClientCache, false, false, forULPR, null) != null;
else
return fetch((NodeSSK)key, true, canReadClientCache, false, false, forULPR, null) != null;
}
/**
* Warning: does not announce change in location!
*/
public void setLocation(double loc) {
lm.setLocation(loc);
}
public boolean peersWantKey(Key key) {
return failureTable.peersWantKey(key, null);
}
private SimpleUserAlert alertMTUTooSmall;
public final RequestClient nonPersistentClientBulk = new RequestClientBuilder().build();
public final RequestClient nonPersistentClientRT = new RequestClientBuilder().realTime().build();
public void setDispatcherHook(NodeDispatcherCallback cb) {
this.dispatcher.setHook(cb);
}
public boolean shallWePublishOurPeersLocation() {
return publishOurPeersLocation;
}
public boolean shallWeRouteAccordingToOurPeersLocation(int htl) {
return routeAccordingToOurPeersLocation && htl > 1;
}
/** Can be called to decrypt client.dat* etc, or can be called when switching from another
* security level to HIGH. */
public void setMasterPassword(String password, boolean inFirstTimeWizard) throws AlreadySetPasswordException, MasterKeysWrongPasswordException, MasterKeysFileSizeException, IOException {
MasterKeys k;
synchronized(this) {
if(keys == null) {
// Decrypting.
keys = MasterKeys.read(masterKeysFile, secureRandom, password);
databaseKey = keys.createDatabaseKey(secureRandom);
} else {
// Setting password when changing to HIGH from another mode.
keys.changePassword(masterKeysFile, password, secureRandom);
return;
}
k = keys;
}
setPasswordInner(k, inFirstTimeWizard);
}
private void setPasswordInner(MasterKeys keys, boolean inFirstTimeWizard) throws MasterKeysWrongPasswordException, MasterKeysFileSizeException, IOException {
MasterSecret secret = keys.getPersistentMasterSecret();
clientCore.setupMasterSecret(secret);
boolean wantClientCache = false;
boolean wantDatabase = false;
synchronized(this) {
wantClientCache = clientCacheAwaitingPassword;
wantDatabase = databaseAwaitingPassword;
databaseAwaitingPassword = false;
}
if(wantClientCache)
activatePasswordedClientCache(keys);
if(wantDatabase)
lateSetupDatabase(keys.createDatabaseKey(secureRandom));
}
private void activatePasswordedClientCache(MasterKeys keys) {
synchronized(this) {
if(clientCacheType.equals("ram")) {
System.err.println("RAM client cache cannot be passworded!");
return;
}
if(!clientCacheType.equals("salt-hash")) {
System.err.println("Unknown client cache type, cannot activate passworded store: "+clientCacheType);
return;
}
}
Runnable migrate = new MigrateOldStoreData(true);
String suffix = getStoreSuffix();
try {
initSaltHashClientCacheFS(suffix, true, keys.clientCacheMasterKey);
} catch (NodeInitException e) {
Logger.error(this, "Unable to activate passworded client cache", e);
System.err.println("Unable to activate passworded client cache: "+e);
e.printStackTrace();
return;
}
synchronized(this) {
clientCacheAwaitingPassword = false;
}
executor.execute(migrate, "Migrate data from previous store");
}
public void changeMasterPassword(String oldPassword, String newPassword, boolean inFirstTimeWizard) throws MasterKeysWrongPasswordException, MasterKeysFileSizeException, IOException, AlreadySetPasswordException {
if(securityLevels.getPhysicalThreatLevel() == PHYSICAL_THREAT_LEVEL.MAXIMUM)
Logger.error(this, "Changing password while physical threat level is at MAXIMUM???");
if(masterKeysFile.exists()) {
keys.changePassword(masterKeysFile, newPassword, secureRandom);
setPasswordInner(keys, inFirstTimeWizard);
} else {
setMasterPassword(newPassword, inFirstTimeWizard);
}
}
public static class AlreadySetPasswordException extends Exception {
final private static long serialVersionUID = -7328456475029374032L;
}
public synchronized File getMasterPasswordFile() {
return masterKeysFile;
}
boolean hasPanicked() {
return hasPanicked;
}
public void panic() {
hasPanicked = true;
clientCore.clientLayerPersister.panic();
clientCore.clientLayerPersister.killAndWaitForNotRunning();
try {
MasterKeys.killMasterKeys(getMasterPasswordFile());
} catch (IOException e) {
System.err.println("Unable to wipe master passwords key file!");
System.err.println("Please delete " + getMasterPasswordFile()
+ " to ensure that nobody can recover your old downloads.");
}
// persistent-temp will be cleaned on restart.
}
public void finishPanic() {
WrapperManager.restart();
System.exit(0);
}
public boolean awaitingPassword() {
if(clientCacheAwaitingPassword) return true;
if(databaseAwaitingPassword) return true;
return false;
}
public boolean wantEncryptedDatabase() {
return this.securityLevels.getPhysicalThreatLevel() != PHYSICAL_THREAT_LEVEL.LOW;
}
public boolean wantNoPersistentDatabase() {
return this.securityLevels.getPhysicalThreatLevel() == PHYSICAL_THREAT_LEVEL.MAXIMUM;
}
public boolean hasDatabase() {
return !clientCore.clientLayerPersister.isKilledOrNotLoaded();
}
/**
* @return canonical path of the database file in use.
*/
public String getDatabasePath() throws IOException {
return clientCore.clientLayerPersister.getWriteFilename().toString();
}
/** Should we commit the block to the store rather than the cache?
*
* <p>We used to check whether we are a sink by checking whether any peer has
* a closer location than we do. Then we made low-uptime nodes exempt from
* this calculation: if we route to a low uptime node with a closer location,
* we want to store it anyway since he may go offline. The problem was that
* if we routed to a low-uptime node, and there was another option that wasn't
* low-uptime but was closer to the target than we were, then we would not
* store in the store. Also, routing isn't always by the closest peer location:
* FOAF and per-node failure tables change it. So now, we consider the nodes
* we have actually routed to:</p>
*
* <p>Store in datastore if our location is closer to the target than:</p><ol>
* <li>the source location (if any, and ignoring if low-uptime)</li>
* <li>the locations of the nodes we just routed to (ditto)</li>
* </ol>
*
* @param key
* @param source
* @param routedTo
* @return
*/
public boolean shouldStoreDeep(Key key, PeerNode source, PeerNode[] routedTo) {
double myLoc = getLocation();
double target = key.toNormalizedDouble();
double myDist = Location.distance(myLoc, target);
// First, calculate whether we would have stored it using the old formula.
if(logMINOR) Logger.minor(this, "Should store for "+key+" ?");
// Don't sink store if any of the nodes we routed to, or our predecessor, is both high-uptime and closer to the target than we are.
if(source != null && !source.isLowUptime()) {
if(Location.distance(source, target) < myDist) {
if(logMINOR) Logger.minor(this, "Not storing because source is closer to target for "+key+" : "+source);
return false;
}
}
for(PeerNode pn : routedTo) {
if(Location.distance(pn, target) < myDist && !pn.isLowUptime()) {
if(logMINOR) Logger.minor(this, "Not storing because peer "+pn+" is closer to target for "+key+" his loc "+pn.getLocation()+" my loc "+myLoc+" target is "+target);
return false;
} else {
if(logMINOR) Logger.minor(this, "Should store maybe, peer "+pn+" loc = "+pn.getLocation()+" my loc is "+myLoc+" target is "+target+" low uptime is "+pn.isLowUptime());
}
}
if(logMINOR) Logger.minor(this, "Should store returning true for "+key+" target="+target+" myLoc="+myLoc+" peers: "+routedTo.length);
return true;
}
public boolean getWriteLocalToDatastore() {
return writeLocalToDatastore;
}
public boolean getUseSlashdotCache() {
return useSlashdotCache;
}
// FIXME remove the visibility alert after a few builds.
public void createVisibilityAlert() {
synchronized(this) {
if(showFriendsVisibilityAlert) return;
showFriendsVisibilityAlert = true;
}
// Wait until startup completed.
this.getTicker().queueTimedJob(new Runnable() {
@Override
public void run() {
config.store();
}
}, 0);
registerFriendsVisibilityAlert();
}
private UserAlert visibilityAlert = new SimpleUserAlert(true, l10n("pleaseSetPeersVisibilityAlertTitle"), l10n("pleaseSetPeersVisibilityAlert"), l10n("pleaseSetPeersVisibilityAlert"), UserAlert.ERROR) {
@Override
public void onDismiss() {
synchronized(Node.this) {
showFriendsVisibilityAlert = false;
}
config.store();
unregisterFriendsVisibilityAlert();
}
};
private void registerFriendsVisibilityAlert() {
if(clientCore == null || clientCore.alerts == null) {
// Wait until startup completed.
this.getTicker().queueTimedJob(new Runnable() {
@Override
public void run() {
registerFriendsVisibilityAlert();
}
}, 0);
return;
}
clientCore.alerts.register(visibilityAlert);
}
private void unregisterFriendsVisibilityAlert() {
clientCore.alerts.unregister(visibilityAlert);
}
public int getMinimumMTU() {
int mtu;
synchronized(this) {
mtu = maxPacketSize;
}
if(ipDetector != null) {
int detected = ipDetector.getMinimumDetectedMTU();
if(detected < mtu) return detected;
}
return mtu;
}
public void updateMTU() {
this.darknetCrypto.socket.calculateMaxPacketSize();
OpennetManager om = opennet;
if(om != null) {
om.crypto.socket.calculateMaxPacketSize();
}
}
public static boolean isTestnetEnabled() {
return false;
}
public MersenneTwister createRandom() {
byte[] buf = new byte[16];
random.nextBytes(buf);
return new MersenneTwister(buf);
}
public boolean enableNewLoadManagement(boolean realTimeFlag) {
NodeStats stats = this.nodeStats;
if(stats == null) {
Logger.error(this, "Calling enableNewLoadManagement before Node constructor completes! FIX THIS!", new Exception("error"));
return false;
}
return stats.enableNewLoadManagement(realTimeFlag);
}
/** FIXME move to Probe.java? */
public boolean enableRoutedPing() {
return enableRoutedPing;
}
public boolean updateIsUrgent() {
OpennetManager om = getOpennet();
if(om != null) {
if(om.announcer != null && om.announcer.isWaitingForUpdater())
return true;
}
if(peers.getPeerNodeStatusSize(PeerManager.PEER_NODE_STATUS_TOO_NEW, true) > PeerManager.OUTDATED_MIN_TOO_NEW_DARKNET)
return true;
return false;
}
public byte[] getPluginStoreKey(String storeIdentifier) {
DatabaseKey key;
synchronized(this) {
key = databaseKey;
}
if(key != null)
return key.getPluginStoreKey(storeIdentifier);
else
return null;
}
public PluginManager getPluginManager() {
return pluginManager;
}
DatabaseKey getDatabaseKey() {
return databaseKey;
}
}
| nextgens/fred | src/freenet/node/Node.java | Java | gpl-2.0 | 177,426 |
package org.adempiere.impexp.impl;
/*
* #%L
* de.metas.adempiere.adempiere.base
* %%
* Copyright (C) 2015 metas GmbH
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-2.0.html>.
* #L%
*/
import java.util.HashMap;
import java.util.Map;
import org.adempiere.exceptions.AdempiereException;
import org.adempiere.impexp.BPartnerImportProcess;
import org.adempiere.impexp.IImportProcess;
import org.adempiere.impexp.IImportProcessFactory;
import org.adempiere.impexp.ProductImportProcess;
import org.adempiere.impexp.spi.IAsyncImportProcessBuilder;
import org.adempiere.model.InterfaceWrapperHelper;
import org.adempiere.util.Check;
import org.compiere.model.I_I_BPartner;
import org.compiere.model.I_I_Product;
import com.google.common.base.Supplier;
public class ImportProcessFactory implements IImportProcessFactory
{
private final Map<Class<?>, Class<?>> modelImportClass2importProcessClasses = new HashMap<>();
private final Map<String, Class<?>> tableName2importProcessClasses = new HashMap<>();
private Supplier<IAsyncImportProcessBuilder> asyncImportProcessBuilderSupplier;
public ImportProcessFactory()
{
// Register standard import processes
registerImportProcess(I_I_BPartner.class, BPartnerImportProcess.class);
registerImportProcess(I_I_Product.class, ProductImportProcess.class);
}
@Override
public <ImportRecordType> void registerImportProcess(final Class<ImportRecordType> modelImportClass, final Class<? extends IImportProcess<ImportRecordType>> importProcessClass)
{
Check.assumeNotNull(modelImportClass, "modelImportClass not null");
Check.assumeNotNull(importProcessClass, "importProcessClass not null");
modelImportClass2importProcessClasses.put(modelImportClass, importProcessClass);
final String tableName = InterfaceWrapperHelper.getTableName(modelImportClass);
tableName2importProcessClasses.put(tableName, importProcessClass);
}
@Override
public <ImportRecordType> IImportProcess<ImportRecordType> newImportProcess(final Class<ImportRecordType> modelImportClass)
{
final IImportProcess<ImportRecordType> importProcess = newImportProcessOrNull(modelImportClass);
Check.assumeNotNull(importProcess, "importProcess not null for {}", modelImportClass);
return importProcess;
}
@Override
public <ImportRecordType> IImportProcess<ImportRecordType> newImportProcessOrNull(final Class<ImportRecordType> modelImportClass)
{
Check.assumeNotNull(modelImportClass, "modelImportClass not null");
final Class<?> importProcessClass = modelImportClass2importProcessClasses.get(modelImportClass);
if (importProcessClass == null)
{
return null;
}
return newInstance(importProcessClass);
}
private <ImportRecordType> IImportProcess<ImportRecordType> newInstance(final Class<?> importProcessClass)
{
try
{
@SuppressWarnings("unchecked")
final IImportProcess<ImportRecordType> importProcess = (IImportProcess<ImportRecordType>)importProcessClass.newInstance();
return importProcess;
}
catch (Exception e)
{
throw new AdempiereException("Failed instantiating " + importProcessClass, e);
}
}
@Override
public <ImportRecordType> IImportProcess<ImportRecordType> newImportProcessForTableNameOrNull(final String tableName)
{
Check.assumeNotNull(tableName, "tableName not null");
final Class<?> importProcessClass = tableName2importProcessClasses.get(tableName);
if (importProcessClass == null)
{
return null;
}
return newInstance(importProcessClass);
}
@Override
public <ImportRecordType> IImportProcess<ImportRecordType> newImportProcessForTableName(final String tableName)
{
final IImportProcess<ImportRecordType> importProcess = newImportProcessForTableNameOrNull(tableName);
Check.assumeNotNull(importProcess, "importProcess not null for {}", tableName);
return importProcess;
}
@Override
public IAsyncImportProcessBuilder newAsyncImportProcessBuilder()
{
Check.assumeNotNull(asyncImportProcessBuilderSupplier, "A supplier for {} shall be registered first", IAsyncImportProcessBuilder.class);
return asyncImportProcessBuilderSupplier.get();
}
@Override
public void setAsyncImportProcessBuilderSupplier(Supplier<IAsyncImportProcessBuilder> asyncImportProcessBuilderSupplier)
{
Check.assumeNotNull(asyncImportProcessBuilderSupplier, "asyncImportProcessBuilderSupplier not null");
this.asyncImportProcessBuilderSupplier = asyncImportProcessBuilderSupplier;
}
}
| klst-com/metasfresh | de.metas.business/src/main/java/org/adempiere/impexp/impl/ImportProcessFactory.java | Java | gpl-2.0 | 5,016 |
package irc.bot;
import java.io.*;
public class OutHandler implements Runnable {
OutHandler(Connection connection) {
this.connection = connection;
}
private Connection connection;
public void run() {}
}
| propheh/IRCBot | irc/bot/OutHandler.java | Java | gpl-2.0 | 225 |
/* AUTO-GENERATED FILE. DO NOT MODIFY.
*
* This class was automatically generated by the
* aapt tool from the resource data it found. It
* should not be modified by hand.
*/
package com.ellcs.stack.android;
public final class R {
public static final class attr {
}
public static final class drawable {
public static final int ic_launcher=0x7f020000;
}
public static final class string {
public static final int app_name=0x7f030000;
}
public static final class style {
public static final int GdxTheme=0x7f040000;
}
}
| ellcs/point | android/build/generated/source/r/debug/com/ellcs/stack/android/R.java | Java | gpl-2.0 | 582 |
/**
* OWASP Benchmark Project v1.1
*
* This file is part of the Open Web Application Security Project (OWASP)
* Benchmark Project. For details, please see
* <a href="https://www.owasp.org/index.php/Benchmark">https://www.owasp.org/index.php/Benchmark</a>.
*
* The Benchmark is free software: you can redistribute it and/or modify it under the terms
* of the GNU General Public License as published by the Free Software Foundation, version 2.
*
* The Benchmark is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details
*
* @author Nick Sanidas <a href="https://www.aspectsecurity.com">Aspect Security</a>
* @created 2015
*/
package org.owasp.benchmark.testcode;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@WebServlet("/BenchmarkTest06598")
public class BenchmarkTest06598 extends HttpServlet {
private static final long serialVersionUID = 1L;
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
doPost(request, response);
}
@Override
public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
org.owasp.benchmark.helpers.SeparateClassRequest scr = new org.owasp.benchmark.helpers.SeparateClassRequest( request );
String param = scr.getTheValue("foo");
java.util.List<String> valuesList = new java.util.ArrayList<String>( );
valuesList.add("safe");
valuesList.add( param );
valuesList.add( "moresafe" );
valuesList.remove(0); // remove the 1st safe value
String bar = valuesList.get(0); // get the param value
try {
java.util.Properties Benchmarkprops = new java.util.Properties();
Benchmarkprops.load(this.getClass().getClassLoader().getResourceAsStream("Benchmark.properties"));
String algorithm = Benchmarkprops.getProperty("cryptoAlg2", "AES/ECB/PKCS5Padding");
javax.crypto.Cipher c = javax.crypto.Cipher.getInstance(algorithm);
} catch (java.security.NoSuchAlgorithmException e) {
System.out.println("Problem executing crypto - javax.crypto.Cipher.getInstance(java.lang.String) Test Case");
throw new ServletException(e);
} catch (javax.crypto.NoSuchPaddingException e) {
System.out.println("Problem executing crypto - javax.crypto.Cipher.getInstance(java.lang.String) Test Case");
throw new ServletException(e);
}
response.getWriter().println("Crypto Test javax.crypto.Cipher.getInstance(java.lang.String) executed");
}
}
| iammyr/Benchmark | src/main/java/org/owasp/benchmark/testcode/BenchmarkTest06598.java | Java | gpl-2.0 | 2,819 |
/**
* Copyright (c) 2008-2012 Indivica Inc.
*
* This software is made available under the terms of the
* GNU General Public License, Version 2, 1991 (GPLv2).
* License details are available via "indivica.ca/gplv2"
* and "gnu.org/licenses/gpl-2.0.html".
*/
package org.oscarehr.document.web;
import java.io.File;
import java.io.FileInputStream;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.pdfbox.pdfparser.PDFParser;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.PDPage;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.actions.DispatchAction;
import org.oscarehr.common.dao.CtlDocumentDao;
import org.oscarehr.common.dao.DocumentDao;
import org.oscarehr.common.dao.PatientLabRoutingDao;
import org.oscarehr.common.dao.ProviderInboxRoutingDao;
import org.oscarehr.common.dao.ProviderLabRoutingDao;
import org.oscarehr.common.dao.QueueDocumentLinkDao;
import org.oscarehr.common.model.CtlDocument;
import org.oscarehr.common.model.CtlDocumentPK;
import org.oscarehr.common.model.Document;
import org.oscarehr.common.model.PatientLabRouting;
import org.oscarehr.common.model.ProviderInboxItem;
import org.oscarehr.common.model.ProviderLabRoutingModel;
import org.oscarehr.util.LoggedInInfo;
import org.oscarehr.util.SpringUtils;
import org.springframework.web.context.WebApplicationContext;
import org.springframework.web.context.support.WebApplicationContextUtils;
import oscar.dms.EDoc;
import oscar.dms.EDocUtil;
import oscar.oscarLab.ca.all.upload.ProviderLabRouting;
public class SplitDocumentAction extends DispatchAction {
private DocumentDao documentDao = SpringUtils.getBean(DocumentDao.class);
public ActionForward split(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
String docNum = request.getParameter("document");
String[] commands = request.getParameterValues("page[]");
Document doc = documentDao.getDocument(docNum);
String docdownload = oscar.OscarProperties.getInstance().getProperty("DOCUMENT_DIR");
new File(docdownload);
String newFilename = doc.getDocfilename();
FileInputStream input = new FileInputStream(docdownload + doc.getDocfilename());
PDFParser parser = new PDFParser(input);
parser.parse();
PDDocument pdf = parser.getPDDocument();
PDDocument newPdf = new PDDocument();
List pages = pdf.getDocumentCatalog().getAllPages();
if (commands != null) {
for (String c : commands) {
String[] command = c.split(",");
int pageNum = Integer.parseInt(command[0]);
int rotation = Integer.parseInt(command[1]);
PDPage p = (PDPage)pages.get(pageNum-1);
p.setRotation(rotation);
newPdf.addPage(p);
}
}
//newPdf.save(docdownload + newFilename);
if (newPdf.getNumberOfPages() > 0) {
LoggedInInfo loggedInInfo=LoggedInInfo.loggedInInfo.get();
EDoc newDoc = new EDoc("","", newFilename, "", loggedInInfo.loggedInProvider.getProviderNo(), doc.getDoccreator(), "", 'A', oscar.util.UtilDateUtilities.getToday("yyyy-MM-dd"), "", "", "demographic", "-1",0);
newDoc.setDocPublic("0");
newDoc.setContentType("application/pdf");
newDoc.setNumberOfPages(newPdf.getNumberOfPages());
String newDocNo = EDocUtil.addDocumentSQL(newDoc);
newPdf.save(docdownload + newDoc.getFileName());
newPdf.close();
WebApplicationContext ctx = WebApplicationContextUtils.getRequiredWebApplicationContext(request.getSession().getServletContext());
ProviderInboxRoutingDao providerInboxRoutingDao = (ProviderInboxRoutingDao) ctx.getBean("providerInboxRoutingDAO");
providerInboxRoutingDao.addToProviderInbox("0", Integer.parseInt(newDocNo), "DOC");
List<ProviderInboxItem> routeList = providerInboxRoutingDao.getProvidersWithRoutingForDocument("DOC", Integer.parseInt(docNum));
for (ProviderInboxItem i : routeList) {
providerInboxRoutingDao.addToProviderInbox(i.getProviderNo(), Integer.parseInt(newDocNo), "DOC");
}
providerInboxRoutingDao.addToProviderInbox(loggedInInfo.loggedInProvider.getProviderNo(), Integer.parseInt(newDocNo), "DOC");
QueueDocumentLinkDao queueDocumentLinkDAO = (QueueDocumentLinkDao) ctx.getBean("queueDocumentLinkDAO");
Integer qid = 1;
Integer did= Integer.parseInt(newDocNo.trim());
queueDocumentLinkDAO.addToQueueDocumentLink(qid,did);
ProviderLabRoutingDao providerLabRoutingDao = (ProviderLabRoutingDao) SpringUtils.getBean("providerLabRoutingDao");
List<ProviderLabRoutingModel> result = providerLabRoutingDao.getProviderLabRoutingDocuments(Integer.parseInt(docNum));
if (!result.isEmpty()) {
new ProviderLabRouting().route(newDocNo,
result.get(0).getProviderNo(),"DOC");
}
PatientLabRoutingDao patientLabRoutingDao = (PatientLabRoutingDao) SpringUtils.getBean("patientLabRoutingDao");
List<PatientLabRouting> result2 = patientLabRoutingDao.findDocByDemographic(Integer.parseInt(docNum));
if (!result2.isEmpty()) {
PatientLabRouting newPatientRoute = new PatientLabRouting();
newPatientRoute.setDemographicNo(result2.get(0).getDemographicNo());
newPatientRoute.setLabNo(Integer.parseInt(newDocNo));
newPatientRoute.setLabType("DOC");
patientLabRoutingDao.persist(newPatientRoute);
}
CtlDocumentDao ctlDocumentDao = SpringUtils.getBean(CtlDocumentDao.class);
CtlDocument result3 = ctlDocumentDao.getCtrlDocument(Integer.parseInt(docNum));
if (result3!=null) {
CtlDocumentPK ctlDocumentPK = new CtlDocumentPK(Integer.parseInt(newDocNo), "demographic");
CtlDocument newCtlDocument = new CtlDocument();
newCtlDocument.setId(ctlDocumentPK);
newCtlDocument.getId().setModuleId(result3.getId().getModuleId());
newCtlDocument.setStatus(result3.getStatus());
documentDao.persist(newCtlDocument);
}
}
pdf.close();
input.close();
return mapping.findForward("success");
}
public ActionForward rotate180(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
Document doc = documentDao.getDocument(request.getParameter("document"));
String docdownload = oscar.OscarProperties.getInstance().getProperty("DOCUMENT_DIR");
FileInputStream input = new FileInputStream(docdownload + doc.getDocfilename());
PDFParser parser = new PDFParser(input);
parser.parse();
PDDocument pdf = parser.getPDDocument();
int x = 1;
for (Object p : pdf.getDocumentCatalog().getAllPages()) {
PDPage pg = (PDPage)p;
Integer r = (pg.getRotation() != null ? pg.getRotation() : 0);
pg.setRotation((r+180)%360);
ManageDocumentAction.deleteCacheVersion(doc, x);
x++;
}
pdf.save(docdownload + doc.getDocfilename());
pdf.close();
input.close();
return null;
}
public ActionForward rotate90(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
Document doc = documentDao.getDocument(request.getParameter("document"));
String docdownload = oscar.OscarProperties.getInstance().getProperty("DOCUMENT_DIR");
FileInputStream input = new FileInputStream(docdownload + doc.getDocfilename());
PDFParser parser = new PDFParser(input);
parser.parse();
PDDocument pdf = parser.getPDDocument();
int x = 1;
for (Object p : pdf.getDocumentCatalog().getAllPages()) {
PDPage pg = (PDPage)p;
Integer r = (pg.getRotation() != null ? pg.getRotation() : 0);
pg.setRotation((r+90)%360);
ManageDocumentAction.deleteCacheVersion(doc, x);
x++;
}
pdf.save(docdownload + doc.getDocfilename());
pdf.close();
input.close();
return null;
}
public ActionForward removeFirstPage(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
Document doc = documentDao.getDocument(request.getParameter("document"));
String docdownload = oscar.OscarProperties.getInstance().getProperty("DOCUMENT_DIR");
FileInputStream input = new FileInputStream(docdownload + doc.getDocfilename());
PDFParser parser = new PDFParser(input);
parser.parse();
PDDocument pdf = parser.getPDDocument();
// Documents must have at least 2 pages, for the first page to be removed.
if (pdf.getNumberOfPages() <= 1) { return null; }
int x = 1;
for (Object p : pdf.getDocumentCatalog().getAllPages()) {
ManageDocumentAction.deleteCacheVersion(doc, x);
x++;
}
pdf.removePage(0);
EDocUtil.subtractOnePage(request.getParameter("document"));
pdf.save(docdownload + doc.getDocfilename());
pdf.close();
input.close();
return null;
}
} | hexbinary/landing | src/main/java/org/oscarehr/document/web/SplitDocumentAction.java | Java | gpl-2.0 | 8,755 |
package edu.ucsd.ncmir.WIB.client.core.components;
import com.google.gwt.user.client.ui.Widget;
import edu.ucsd.ncmir.WIB.client.core.message.Message;
import edu.ucsd.ncmir.WIB.client.core.message.MessageListener;
import edu.ucsd.ncmir.WIB.client.core.message.MessageManager;
import edu.ucsd.ncmir.WIB.client.core.messages.ResetMessage;
/**
* HorizontalSlider bar.
* @author spl
*/
public class HorizontalSlider
extends SliderHorizontal
implements HorizontalSliderInterface,
MessageListener,
SliderValueUpdateHandler
{
private final Message _message;
/**
* Creates a <code>HorizontalSlider</code> object.
*/
public HorizontalSlider( Message message )
{
super( "150px" );
this._message = message;
super.addSliderValueUpdateHandler( this );
MessageManager.registerListener( ResetMessage.class, this );
}
private int _min_value = 0;
private int _max_value = 1;
private int _default_value = 0;
@Override
public final void setSliderParameters( int min_value, int max_value,
int default_value )
{
this._min_value = min_value;
this._max_value = max_value;
this._default_value = default_value;
this.setMaxValue( this._max_value - this._min_value );
this.setSliderValue( default_value );
super.setMinMarkStep( 1 );
}
@Override
public void setWidth( String size )
{
this._transmit_value = false;
double value = super.getValue();
super.setWidth( size );
super.setValue( value );
this._transmit_value = true;
}
private boolean _transmit_value = true;
/**
* Updates the value of the slider without firing the handler.
* @param value The value to be set.
*/
@Override
public void setSliderValueOnly( int value )
{
// Turn off the handler.
this._transmit_value = false;
this.setSliderValue( value );
this._transmit_value = true;
}
@Override
public void setSliderValue( double value )
{
super.setValue( value - this._min_value );
}
@Override
public void action( Message m, Object o )
{
this.setSliderValue( this._default_value );
}
private boolean _initial = true; // To prevent premature Message firing.
/**
* Fired when the bar value changes.
* @param event The <code>BarValueChangedEvent</code>.
*/
@Override
public void onBarValueChanged( SliderValueUpdateEvent event )
{
if ( this._transmit_value && !this._initial )
this.updateHandler( event.getValue() + this._min_value );
// Turn off the initial flag. The SliderBar object fires a
// spurious BarValueChangedEvent when the object is loaded.
// This prevents it being propagated.
this._initial = false;
}
@Override
public void updateHandler( double value )
{
this._message.send( value );
}
@Override
public Widget widget()
{
return this;
}
@Override
public double getSliderValue()
{
return this.getValue();
}
}
| imclab/WebImageBrowser | WIB/src/java/edu/ucsd/ncmir/WIB/client/core/components/HorizontalSlider.java | Java | gpl-2.0 | 3,160 |
package org.nla.tarotdroid.lib.helpers;
import static com.google.common.base.Preconditions.checkArgument;
import android.content.Context;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
/**
* Network and internet connexion helper class.
*/
public class ConnexionHelper {
/**
* Checking for all possible internet providers
* **/
public static boolean isConnectedToInternet(Context context) {
checkArgument(context != null, "context is null");
boolean toReturn = false;
ConnectivityManager connectivity = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
if (connectivity != null)
{
NetworkInfo[] info = connectivity.getAllNetworkInfo();
if (info != null)
for (int i = 0; i < info.length; i++)
if (info[i].getState() == NetworkInfo.State.CONNECTED)
{
toReturn = true;
break;
}
}
return toReturn;
}
} | daffycricket/tarotdroid | tarotDroidUiLib/src/main/java/org/nla/tarotdroid/lib/helpers/ConnexionHelper.java | Java | gpl-2.0 | 1,094 |
//: containers/MapDataTest.java
package course.containers; /* Added by Eclipse.py */
import java.util.*;
import net.mindview.util.*;
import static net.mindview.util.Print.*;
class Letters implements Generator<Pair<Integer,String>>,
Iterable<Integer> {
private int size = 9;
private int number = 1;
private char letter = 'A';
public Pair<Integer,String> next() {
return new Pair<Integer,String>(
number++, "" + letter++);
}
public Iterator<Integer> iterator() {
return new Iterator<Integer>() {
public Integer next() { return number++; }
public boolean hasNext() { return number < size; }
public void remove() {
throw new UnsupportedOperationException();
}
};
}
}
public class MapDataTest {
public static void main(String[] args) {
// Pair Generator:
print(MapData.map(new Letters(), 11));
// Two separate generators:
print(MapData.map(new CountingGenerator.Character(),
new RandomGenerator.String(3), 8));
// A key Generator and a single value:
print(MapData.map(new CountingGenerator.Character(),
"Value", 6));
// An Iterable and a value Generator:
print(MapData.map(new Letters(),
new RandomGenerator.String(3)));
// An Iterable and a single value:
print(MapData.map(new Letters(), "Pop"));
}
} /* Output:
{1=A, 2=B, 3=C, 4=D, 5=E, 6=F, 7=G, 8=H, 9=I, 10=J, 11=K}
{a=YNz, b=brn, c=yGc, d=FOW, e=ZnT, f=cQr, g=Gse, h=GZM}
{a=Value, b=Value, c=Value, d=Value, e=Value, f=Value}
{1=mJM, 2=RoE, 3=suE, 4=cUO, 5=neO, 6=EdL, 7=smw, 8=HLG}
{1=Pop, 2=Pop, 3=Pop, 4=Pop, 5=Pop, 6=Pop, 7=Pop, 8=Pop}
*///:~
| fengzhongdege/TIJ4 | src/main/java/course/containers/MapDataTest.java | Java | gpl-2.0 | 1,677 |
/*
* Copyright (C) 2013-2019 52°North Initiative for Geospatial Open Source
* Software GmbH
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 as published
* by the Free Software Foundation.
*
* If the program is linked with libraries which are licensed under one of
* the following licenses, the combination of the program with the linked
* library is not considered a "derivative work" of the program:
*
* - Apache License, version 2.0
* - Apache Software License, version 1.0
* - GNU Lesser General Public License, version 3
* - Mozilla Public License, versions 1.0, 1.1 and 2.0
* - Common Development and Distribution License (CDDL), version 1.0
*
* Therefore the distribution of the program linked with libraries licensed
* under the aforementioned licenses, is permitted by the copyright holders
* if the distribution is compliant with both the GNU General Public License
* version 2 and the aforementioned licenses.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*/
package org.n52.series.spi.geo;
import org.locationtech.jts.geom.Geometry;
import org.n52.io.crs.CRSUtils;
import org.n52.io.request.IoParameters;
import org.n52.io.response.dataset.StationOutput;
import org.opengis.referencing.FactoryException;
import org.opengis.referencing.operation.TransformException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TransformationService {
private static final Logger LOGGER = LoggerFactory.getLogger(TransformationService.class);
/**
* @param station
* the station to transform
* @param parameters
* the query containing CRS and how to handle axes order
*/
protected void transformInline(StationOutput station, IoParameters parameters) {
String crs = parameters.getCrs();
if (CRSUtils.DEFAULT_CRS.equals(crs)) {
// no need to transform
return;
}
Geometry geometry = transform(station.getGeometry(), parameters);
station.setValue(StationOutput.GEOMETRY, geometry, parameters, station::setGeometry);
}
public Geometry transform(Geometry geometry, IoParameters query) {
String crs = query.getCrs();
if (CRSUtils.DEFAULT_CRS.equals(crs)) {
// no need to transform
return geometry;
}
return transformGeometry(query, geometry, crs);
}
private Geometry transformGeometry(IoParameters query,
Geometry geometry,
String crs)
throws RuntimeException {
try {
CRSUtils crsUtils = query.isForceXY()
? CRSUtils.createEpsgForcedXYAxisOrder()
: CRSUtils.createEpsgStrictAxisOrder();
return geometry != null
? crsUtils.transformInnerToOuter(geometry, crs)
: geometry;
} catch (TransformException e) {
throwRuntimeException(crs, e);
} catch (FactoryException e) {
LOGGER.debug("Couldn't create geometry factory", e);
}
return geometry;
}
private void throwRuntimeException(String crs, TransformException e) throws RuntimeException {
throw new RuntimeException("Could not transform to requested CRS: " + crs, e);
}
}
| ridoo/series-rest-api | spi/src/main/java/org/n52/series/spi/geo/TransformationService.java | Java | gpl-2.0 | 3,637 |
/* Copyright (C) 2022, Specify Collections Consortium
*
* Specify Collections Consortium, Biodiversity Institute, University of Kansas,
* 1345 Jayhawk Boulevard, Lawrence, Kansas, 66045, USA, [email protected]
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package edu.ku.brc.af.ui.forms.persist;
import static edu.ku.brc.helpers.XMLHelper.getAttr;
import static edu.ku.brc.ui.UIHelper.createDuplicateJGoodiesDef;
import static edu.ku.brc.ui.UIRegistry.getResourceString;
import static org.apache.commons.lang.StringUtils.isEmpty;
import static org.apache.commons.lang.StringUtils.isNotEmpty;
import java.io.File;
import java.io.FileWriter;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import java.util.Vector;
import javax.swing.JLabel;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import javax.swing.ScrollPaneConstants;
import javax.swing.table.DefaultTableModel;
import org.apache.commons.betwixt.XMLIntrospector;
import org.apache.commons.betwixt.io.BeanWriter;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dom4j.Element;
import org.dom4j.Node;
import com.jgoodies.forms.builder.PanelBuilder;
import com.jgoodies.forms.layout.CellConstraints;
import com.jgoodies.forms.layout.FormLayout;
import edu.ku.brc.af.core.db.DBFieldInfo;
import edu.ku.brc.af.core.db.DBRelationshipInfo;
import edu.ku.brc.af.core.db.DBTableChildIFace;
import edu.ku.brc.af.core.db.DBTableIdMgr;
import edu.ku.brc.af.core.db.DBTableInfo;
import edu.ku.brc.af.prefs.AppPreferences;
import edu.ku.brc.af.ui.forms.FormDataObjIFace;
import edu.ku.brc.af.ui.forms.FormHelper;
import edu.ku.brc.af.ui.forms.formatters.UIFieldFormatterIFace;
import edu.ku.brc.af.ui.forms.formatters.UIFieldFormatterMgr;
import edu.ku.brc.af.ui.forms.validation.TypeSearchForQueryFactory;
import edu.ku.brc.ui.CustomFrame;
import edu.ku.brc.ui.UIHelper;
import edu.ku.brc.ui.UIRegistry;
import edu.ku.brc.helpers.XMLHelper;
/**
* Factory that creates Views from ViewSet files. This class uses the singleton ViewSetMgr to verify the View Set Name is unique.
* If it is not unique than it throws an exception.<br> In this case a "form" is really the definition of a form. The form's object hierarchy
* is used to creates the forms using Swing UI objects. The classes will also be used by the forms editor.
* @code_status Beta
**
* @author rods
*/
public class ViewLoader
{
public static final int DEFAULT_ROWS = 4;
public static final int DEFAULT_COLS = 10;
public static final int DEFAULT_SUBVIEW_ROWS = 5;
// Statics
private static final Logger log = Logger.getLogger(ViewLoader.class);
private static final ViewLoader instance = new ViewLoader();
private static final String ID = "id";
private static final String NAME = "name";
private static final String TYPE = "type";
private static final String LABEL = "label";
private static final String DESC = "desc";
private static final String TITLE = "title";
private static final String CLASSNAME = "class";
private static final String GETTABLE = "gettable";
private static final String SETTABLE = "settable";
private static final String INITIALIZE = "initialize";
private static final String DSPUITYPE = "dspuitype";
private static final String VALIDATION = "validation";
private static final String ISREQUIRED = "isrequired";
private static final String RESOURCELABELS = "useresourcelabels";
// Data Members
protected boolean doingResourceLabels = false;
protected String viewSetName = null;
// Members needed for verification
protected static boolean doFieldVerification = true;
protected static boolean isTreeClass = false;
protected static DBTableInfo fldVerTableInfo = null;
protected static FormViewDef fldVerFormViewDef = null;
protected static String colDefType = null;
protected static CustomFrame verifyDlg = null;
protected FieldVerifyTableModel fldVerTableModel = null;
// Debug
//protected static ViewDef gViewDef = null;
static
{
doFieldVerification = AppPreferences.getLocalPrefs().getBoolean("verify_field_names", false);
}
/**
* Default Constructor
*
*/
protected ViewLoader()
{
// do nothing
}
/**
* Creates the view.
* @param element the element to build the View from
* @param altViewsViewDefName the hashtable to track the AltView's ViewDefName
* @return the View
* @throws Exception
*/
protected static ViewIFace createView(final Element element,
final Hashtable<AltViewIFace, String> altViewsViewDefName) throws Exception
{
String name = element.attributeValue(NAME);
String objTitle = getAttr(element, "objtitle", null);
String className = element.attributeValue(CLASSNAME);
String desc = getDesc(element);
String businessRules = getAttr(element, "busrules", null);
boolean isInternal = getAttr(element, "isinternal", true);
DBTableInfo ti = DBTableIdMgr.getInstance().getByClassName(className);
if (ti != null && StringUtils.isEmpty(objTitle))
{
objTitle = ti.getTitle();
}
View view = new View(instance.viewSetName,
name,
objTitle,
className,
businessRules != null ? businessRules.trim() : null,
getAttr(element, "usedefbusrule", true),
isInternal,
desc);
// Later we should get this from a properties file.
if (ti != null)
{
view.setTitle(ti.getTitle());
}
/*if (!isInternal)
{
System.err.println(StringUtils.replace(name, " ", "_")+"="+UIHelper.makeNamePretty(name));
}*/
Element altviews = (Element)element.selectSingleNode("altviews");
if (altviews != null)
{
AltViewIFace defaultAltView = null;
AltView.CreationMode defaultMode = AltView.parseMode(getAttr(altviews, "mode", ""), AltViewIFace.CreationMode.VIEW);
String selectorName = altviews.attributeValue("selector");
view.setDefaultMode(defaultMode);
view.setSelectorName(selectorName);
Hashtable<String, Boolean> nameCheckHash = new Hashtable<String, Boolean>();
// iterate through child elements
for ( Iterator<?> i = altviews.elementIterator( "altview" ); i.hasNext(); )
{
Element altElement = (Element) i.next();
AltView.CreationMode mode = AltView.parseMode(getAttr(altElement, "mode", ""), AltViewIFace.CreationMode.VIEW);
String altName = altElement.attributeValue(NAME);
String viewDefName = altElement.attributeValue("viewdef");
String title = altElement.attributeValue(TITLE);
boolean isValidated = getAttr(altElement, "validated", mode == AltViewIFace.CreationMode.EDIT);
boolean isDefault = getAttr(altElement, "default", false);
// Make sure we only have one default view
if (defaultAltView != null && isDefault)
{
isDefault = false;
}
// Check to make sure all the AlViews have different names.
Boolean nameExists = nameCheckHash.get(altName);
if (nameExists == null) // no need to check the boolean
{
AltView altView = new AltView(view, altName, title, mode, isValidated, isDefault, null); // setting a null viewdef
altViewsViewDefName.put(altView, viewDefName);
if (StringUtils.isNotEmpty(selectorName))
{
altView.setSelectorName(selectorName);
String selectorValue = altElement.attributeValue("selector_value");
if (StringUtils.isNotEmpty(selectorValue))
{
altView.setSelectorValue(selectorValue);
} else
{
FormDevHelper.appendFormDevError("Selector Value is missing for viewDefName["+viewDefName+"] altName["+altName+"]");
}
}
if (defaultAltView == null && isDefault)
{
defaultAltView = altView;
}
view.addAltView(altView);
nameCheckHash.put(altName, true);
} else
{
log.error("The altView name["+altName+"] already exists!");
}
nameCheckHash.clear(); // why not?
}
// No default Alt View was indicated, so choose the first one (if there is one)
if (defaultAltView == null && view.getAltViews() != null && view.getAltViews().size() > 0)
{
view.getAltViews().get(0).setDefault(true);
}
}
return view;
}
/**
* Creates a ViewDef
* @param element the element to build the ViewDef from
* @return a viewdef
* @throws Exception
*/
private static ViewDef createViewDef(final Element element) throws Exception
{
String name = element.attributeValue(NAME);
String className = element.attributeValue(CLASSNAME);
String gettableClassName = element.attributeValue(GETTABLE);
String settableClassName = element.attributeValue(SETTABLE);
String desc = getDesc(element);
String resLabels = getAttr(element, RESOURCELABELS, "false");
boolean useResourceLabels = resLabels.equals("true");
if (isEmpty(name))
{
FormDevHelper.appendFormDevError("Name is null for element["+element.asXML()+"]");
return null;
}
if (isEmpty(className))
{
FormDevHelper.appendFormDevError("className is null. name["+name+"] for element["+element.asXML()+"]");
return null;
}
if (isEmpty(gettableClassName))
{
FormDevHelper.appendFormDevError("gettableClassName Name is null.name["+name+"] classname["+className+"]");
return null;
}
DBTableInfo tableinfo = DBTableIdMgr.getInstance().getByClassName(className);
ViewDef.ViewType type = null;
try
{
type = ViewDefIFace.ViewType.valueOf(element.attributeValue(TYPE));
} catch (Exception ex)
{
String msg = "view["+name+"] has illegal type["+element.attributeValue(TYPE)+"]";
log.error(msg, ex);
FormDevHelper.appendFormDevError(msg, ex);
return null;
}
ViewDef viewDef = null;//new ViewDef(type, name, className, gettableClassName, settableClassName, desc);
switch (type)
{
case rstable:
case formtable :
case form :
viewDef = createFormViewDef(element, type, name, className, gettableClassName, settableClassName, desc, useResourceLabels, tableinfo);
break;
case table :
//view = createTableView(element, id, name, className, gettableClassName, settableClassName,
// desc, instance.doingResourceLabels, isValidated);
break;
case field :
//view = createFormView(FormView.ViewType.field, element, id, name, gettableClassName, settableClassName,
// className, desc, instance.doingResourceLabels, isValidated);
break;
case iconview:
viewDef = createIconViewDef(type, name, className, gettableClassName, settableClassName, desc, useResourceLabels);
break;
}
return viewDef;
}
/**
* Gets the optional description text
* @param element the parent element of the desc node
* @return the string of the text or null
*/
protected static String getDesc(final Element element)
{
String desc = null;
Element descElement = (Element)element.selectSingleNode(DESC);
if (descElement != null)
{
desc = descElement.getTextTrim();
}
return desc;
}
/**
* Fill the Vector with all the views from the DOM document
* @param doc the DOM document conforming to form.xsd
* @param views the list to be filled
* @throws Exception for duplicate view set names or if a Form ID is not unique
*/
public static String getViews(final Element doc,
final Hashtable<String, ViewIFace> views,
final Hashtable<AltViewIFace, String> altViewsViewDefName) throws Exception
{
instance.viewSetName = doc.attributeValue(NAME);
/*
System.err.println("#################################################");
System.err.println("# "+instance.viewSetName);
System.err.println("#################################################");
*/
Element viewsElement = (Element)doc.selectSingleNode("views");
if (viewsElement != null)
{
for ( Iterator<?> i = viewsElement.elementIterator( "view" ); i.hasNext(); )
{
Element element = (Element) i.next(); // assume element is NOT null, if it is null it will cause an exception
ViewIFace view = createView(element, altViewsViewDefName);
if (view != null)
{
if (views.get(view.getName()) == null)
{
views.put(view.getName(), view);
} else
{
String msg = "View Set ["+instance.viewSetName+"] ["+view.getName()+"] is not unique.";
log.error(msg);
FormDevHelper.appendFormDevError(msg);
}
}
}
}
return instance.viewSetName;
}
/**
* Fill the Vector with all the views from the DOM document
* @param doc the DOM document conforming to form.xsd
* @param viewDefs the list to be filled
* @param doMapDefinitions tells it to map and clone the definitions for formtables (use false for the FormEditor)
* @return the viewset name
* @throws Exception for duplicate view set names or if a ViewDef name is not unique
*/
public static String getViewDefs(final Element doc,
final Hashtable<String, ViewDefIFace> viewDefs,
@SuppressWarnings("unused") final Hashtable<String, ViewIFace> views,
final boolean doMapDefinitions) throws Exception
{
colDefType = AppPreferences.getLocalPrefs().get("ui.formatting.formtype", UIHelper.getOSTypeAsStr());
instance.viewSetName = doc.attributeValue(NAME);
Element viewDefsElement = (Element)doc.selectSingleNode("viewdefs");
if (viewDefsElement != null)
{
for ( Iterator<?> i = viewDefsElement.elementIterator( "viewdef" ); i.hasNext(); )
{
Element element = (Element) i.next(); // assume element is NOT null, if it is null it will cause an exception
ViewDef viewDef = createViewDef(element);
if (viewDef != null)
{
if (viewDefs.get(viewDef.getName()) == null)
{
viewDefs.put(viewDef.getName(), viewDef);
} else
{
String msg = "View Set ["+instance.viewSetName+"] the View Def Name ["+viewDef.getName()+"] is not unique.";
log.error(msg);
FormDevHelper.appendFormDevError(msg);
}
}
}
if (doMapDefinitions)
{
mapDefinitionViewDefs(viewDefs);
}
}
return instance.viewSetName;
}
/**
* Re-maps and clones the definitions.
* @param viewDefs the hash table to be mapped
* @throws Exception
*/
public static void mapDefinitionViewDefs(final Hashtable<String, ViewDefIFace> viewDefs) throws Exception
{
// Now that all the definitions have been read in
// cycle thru and have all the tableform objects clone there definitions
for (ViewDefIFace viewDef : new Vector<ViewDefIFace>(viewDefs.values()))
{
if (viewDef.getType() == ViewDefIFace.ViewType.formtable)
{
String viewDefName = ((FormViewDefIFace)viewDef).getDefinitionName();
if (viewDefName != null)
{
//log.debug(viewDefName);
ViewDefIFace actualDef = viewDefs.get(viewDefName);
if (actualDef != null)
{
viewDefs.remove(viewDef.getName());
actualDef = (ViewDef)actualDef.clone();
actualDef.setType(ViewDefIFace.ViewType.formtable);
actualDef.setName(viewDef.getName());
viewDefs.put(actualDef.getName(), actualDef);
} else
{
String msg = "Couldn't find the ViewDef for formtable definition name["+((FormViewDefIFace)viewDef).getDefinitionName()+"]";
log.error(msg);
FormDevHelper.appendFormDevError(msg);
}
}
}
}
}
/**
* Processes all the AltViews
* @param aFormView the form they should be associated with
* @param aElement the element to process
*/
public static Hashtable<String, String> getEnableRules(final Element element)
{
Hashtable<String, String> rulesList = new Hashtable<String, String>();
if (element != null)
{
Element enableRules = (Element)element.selectSingleNode("enableRules");
if (enableRules != null)
{
// iterate through child elements of root with element name "foo"
for ( Iterator<?> i = enableRules.elementIterator( "rule" ); i.hasNext(); )
{
Element ruleElement = (Element) i.next();
String id = getAttr(ruleElement, ID, "");
if (isNotEmpty(id))
{
rulesList.put(id, ruleElement.getTextTrim());
} else
{
String msg = "The name is missing for rule["+ruleElement.getTextTrim()+"] is missing.";
log.error(msg);
FormDevHelper.appendFormDevError(msg);
}
}
}
} else
{
log.error("View Set ["+instance.viewSetName+"] element ["+element+"] is null.");
}
return rulesList;
}
/**
* Gets the string (or creates one) from a columnDef
* @param element the DOM element to process
* @param attrName the name of the element to go get all the elements (strings) from
* @param numRows the number of rows
* @param item
* @return the String representing the column definition for JGoodies
*/
protected static String createDef(final Element element,
final String attrName,
final int numRows,
final FormViewDef.JGDefItem item)
{
Element cellDef = null;
if (attrName.equals("columnDef"))
{
// For columnDef(s) we can mark one or more as being platform specific
// but if we can't find a default one (no 'os' defined)
// then we ultimately pick the first one.
List<?> list = element.selectNodes(attrName);
if (list.size() == 1)
{
cellDef = (Element)list.get(0); // pick the first one if there is only one.
} else
{
String osTypeStr = UIHelper.getOSTypeAsStr();
Element defCD = null;
Element defOSCD = null;
Element ovrOSCD = null;
for (Object obj : list)
{
Element ce = (Element)obj;
String osType = getAttr(ce, "os", null);
if (osType == null)
{
defCD = ce; // ok we found the default one
} else
{
if (osType.equals(osTypeStr))
{
defOSCD = ce; // we found the matching our OS
}
if (colDefType != null && osType.equals(colDefType))
{
ovrOSCD = ce; // we found the one matching prefs
}
}
}
if (ovrOSCD != null)
{
cellDef = ovrOSCD;
} else if (defOSCD != null)
{
cellDef = defOSCD;
} else if (defCD != null)
{
cellDef = defCD;
} else
{
// ok, we couldn't find one for our platform, so use the default
// or pick the first one.
cellDef = (Element)list.get(0);
}
}
} else
{
// this is for rowDef
cellDef = (Element)element.selectSingleNode(attrName);
}
if (cellDef != null)
{
String cellText = cellDef.getText();
String cellStr = getAttr(cellDef, "cell", null);
String sepStr = getAttr(cellDef, "sep", null);
item.setDefStr(cellText);
item.setCellDefStr(cellStr);
item.setSepDefStr(sepStr);
if (StringUtils.isNotEmpty(cellStr) && StringUtils.isNotEmpty(sepStr))
{
boolean auto = getAttr(cellDef, "auto", false);
item.setAuto(auto);
if (auto)
{
String autoStr = createDuplicateJGoodiesDef(cellStr, sepStr, numRows) +
(StringUtils.isNotEmpty(cellText) ? ("," + cellText) : "");
item.setDefStr(autoStr);
return autoStr;
}
// else
FormDevHelper.appendFormDevError("Element ["+element.getName()+"] Cell or Sep is null for 'dup' or 'auto 'on column def.");
return "";
}
// else
item.setAuto(false);
return cellText;
}
// else
String msg = "Element ["+element.getName()+"] must have a columnDef";
log.error(msg);
FormDevHelper.appendFormDevError(msg);
return "";
}
/**
* Returns a resource string if it is suppose to
* @param label the label or the label key
* @return Returns a resource string if it is suppose to
*/
protected static String getResourceLabel(final String label)
{
if (isNotEmpty(label) && StringUtils.deleteWhitespace(label).length() > 0)
{
return instance.doingResourceLabels ? getResourceString(label) : label;
}
// else
return "";
}
/**
* Returns a Label from the cell and gets the resource string for it if necessary
* @param cellElement the cell
* @param labelId the Id of the resource or the string
* @return the localized string (if necessary)
*/
protected static String getLabel(final Element cellElement)
{
String lbl = getAttr(cellElement, LABEL, null);
if (lbl == null || lbl.equals("##"))
{
return "##";
}
return getResourceLabel(lbl);
}
/**
* Processes all the rows
* @param element the parent DOM element of the rows
* @param cellRows the list the rows are to be added to
*/
protected static void processRows(final Element element,
final List<FormRowIFace> cellRows,
final DBTableInfo tableinfo)
{
Element rowsElement = (Element)element.selectSingleNode("rows");
if (rowsElement != null)
{
byte rowNumber = 0;
for ( Iterator<?> i = rowsElement.elementIterator( "row" ); i.hasNext(); )
{
Element rowElement = (Element) i.next();
FormRow formRow = new FormRow();
formRow.setRowNumber(rowNumber);
for ( Iterator<?> cellIter = rowElement.elementIterator( "cell" ); cellIter.hasNext(); )
{
Element cellElement = (Element)cellIter.next();
String cellId = getAttr(cellElement, ID, "");
String cellName = getAttr(cellElement, NAME, cellId); // let the name default to the id if it doesn't have a name
int colspan = getAttr(cellElement, "colspan", 1);
int rowspan = getAttr(cellElement, "rowspan", 1);
/*boolean isReq = getAttr(cellElement, ISREQUIRED, false);
if (isReq)
{
System.err.println(String.format("%s\t%s\t%s\t%s", gViewDef.getName(), cellId, cellName, tableinfo != null ? tableinfo.getTitle() : "N/A"));
}*/
FormCell.CellType cellType = null;
FormCellIFace cell = null;
try
{
cellType = FormCellIFace.CellType.valueOf(cellElement.attributeValue(TYPE));
} catch (java.lang.IllegalArgumentException ex)
{
FormDevHelper.appendFormDevError(ex.toString());
FormDevHelper.appendFormDevError(String.format("Cell Name[%s] Id[%s] Type[%s]", cellName, cellId, cellElement.attributeValue(TYPE)));
return;
}
if (doFieldVerification &&
fldVerTableInfo != null &&
cellType == FormCellIFace.CellType.field &&
StringUtils.isNotEmpty(cellId) &&
!cellName.equals("this"))
{
processFieldVerify(cellName, cellId, rowNumber);
}
switch (cellType)
{
case label:
{
cell = formRow.addCell(new FormCellLabel(cellId,
cellName,
getLabel(cellElement),
getAttr(cellElement, "labelfor", ""),
getAttr(cellElement, "icon", null),
getAttr(cellElement, "recordobj", false),
colspan));
String initialize = getAttr(cellElement, INITIALIZE, null);
if (StringUtils.isNotEmpty(initialize))
{
cell.setProperties(UIHelper.parseProperties(initialize));
}
break;
}
case separator:
{
cell = formRow.addCell(new FormCellSeparator(cellId,
cellName,
getLabel(cellElement),
getAttr(cellElement, "collapse", ""),
colspan));
String initialize = getAttr(cellElement, INITIALIZE, null);
if (StringUtils.isNotEmpty(initialize))
{
cell.setProperties(UIHelper.parseProperties(initialize));
}
break;
}
case field:
{
String uitypeStr = getAttr(cellElement, "uitype", "");
String format = getAttr(cellElement, "format", "");
String formatName = getAttr(cellElement, "formatname", "");
String uiFieldFormatterName = getAttr(cellElement, "uifieldformatter", "");
int cols = getAttr(cellElement, "cols", DEFAULT_COLS); // XXX PREF for default width of text field
int rows = getAttr(cellElement, "rows", DEFAULT_ROWS); // XXX PREF for default heightof text area
String validationType = getAttr(cellElement, "valtype", "Changed");
String validationRule = getAttr(cellElement, VALIDATION, "");
String initialize = getAttr(cellElement, INITIALIZE, "");
boolean isRequired = getAttr(cellElement, ISREQUIRED, false);
String pickListName = getAttr(cellElement, "picklist", "");
if (isNotEmpty(format) && isNotEmpty(formatName))
{
String msg = "Both format and formatname cannot both be set! ["+cellName+"] ignoring format";
log.error(msg);
FormDevHelper.appendFormDevError(msg);
format = "";
}
Properties properties = UIHelper.parseProperties(initialize);
if (isEmpty(uitypeStr))
{
// XXX DEBUG ONLY PLease REMOVE LATER
//log.debug("***************************************************************************");
//log.debug("***** Cell Id["+cellId+"] Name["+cellName+"] uitype is empty and should be 'text'. (Please Fix!)");
//log.debug("***************************************************************************");
uitypeStr = "text";
}
// THis switch is used to get the "display type" and
// set up other vars needed for creating the controls
FormCellFieldIFace.FieldType uitype = null;
try
{
uitype = FormCellFieldIFace.FieldType.valueOf(uitypeStr);
} catch (java.lang.IllegalArgumentException ex)
{
FormDevHelper.appendFormDevError(ex.toString());
FormDevHelper.appendFormDevError(String.format("Cell Name[%s] Id[%s] uitype[%s] is in error", cellName, cellId, uitypeStr));
uitype = FormCellFieldIFace.FieldType.text; // default to text
}
String dspUITypeStr = null;
switch (uitype)
{
case textarea:
dspUITypeStr = getAttr(cellElement, DSPUITYPE, "dsptextarea");
break;
case textareabrief:
dspUITypeStr = getAttr(cellElement, DSPUITYPE, "textareabrief");
break;
case querycbx:
{
dspUITypeStr = getAttr(cellElement, DSPUITYPE, "textfieldinfo");
String fmtName = TypeSearchForQueryFactory.getInstance().getDataObjFormatterName(properties.getProperty("name"));
if (isEmpty(formatName) && isNotEmpty(fmtName))
{
formatName = fmtName;
}
break;
}
case formattedtext:
{
validationRule = getAttr(cellElement, VALIDATION, "formatted"); // XXX Is this OK?
dspUITypeStr = getAttr(cellElement, DSPUITYPE, "formattedtext");
//-------------------------------------------------------
// This part should be moved to the ViewFactory
// because it is the only part that need the Schema Information
//-------------------------------------------------------
if (isNotEmpty(uiFieldFormatterName))
{
UIFieldFormatterIFace uiFormatter = UIFieldFormatterMgr.getInstance().getFormatter(uiFieldFormatterName);
if (uiFormatter == null)
{
String msg = "Couldn't find formatter["+uiFieldFormatterName+"]";
log.error(msg);
FormDevHelper.appendFormDevError(msg);
uiFieldFormatterName = "";
uitype = FormCellFieldIFace.FieldType.text;
}
} else // ok now check the schema for the UI formatter
{
if (tableinfo != null)
{
DBFieldInfo fieldInfo = tableinfo.getFieldByName(cellName);
if (fieldInfo != null)
{
if (fieldInfo.getFormatter() != null)
{
uiFieldFormatterName = fieldInfo.getFormatter().getName();
} else if (fieldInfo.getDataClass().isAssignableFrom(Date.class) ||
fieldInfo.getDataClass().isAssignableFrom(Calendar.class))
{
String msg = "Missing Date Formatter for ["+cellName+"]";
log.error(msg);
FormDevHelper.appendFormDevError(msg);
uiFieldFormatterName = "Date";
UIFieldFormatterIFace uiFormatter = UIFieldFormatterMgr.getInstance().getFormatter(uiFieldFormatterName);
if (uiFormatter == null)
{
uiFieldFormatterName = "";
uitype = FormCellFieldIFace.FieldType.text;
}
} else
{
uiFieldFormatterName = "";
uitype = FormCellFieldIFace.FieldType.text;
}
}
}
}
break;
}
case url:
dspUITypeStr = getAttr(cellElement, DSPUITYPE, uitypeStr);
properties = UIHelper.parseProperties(initialize);
break;
case list:
case image:
case tristate:
case checkbox:
case password:
dspUITypeStr = getAttr(cellElement, DSPUITYPE, uitypeStr);
break;
case plugin:
case button:
dspUITypeStr = getAttr(cellElement, DSPUITYPE, uitypeStr);
properties = UIHelper.parseProperties(initialize);
String ttl = properties.getProperty(TITLE);
if (ttl != null)
{
properties.put(TITLE, getResourceLabel(ttl));
}
break;
case spinner:
dspUITypeStr = getAttr(cellElement, DSPUITYPE, "dsptextfield");
properties = UIHelper.parseProperties(initialize);
break;
case combobox:
dspUITypeStr = getAttr(cellElement, DSPUITYPE, "textpl");
if (tableinfo != null)
{
DBFieldInfo fieldInfo = tableinfo.getFieldByName(cellName);
if (fieldInfo != null)
{
if (StringUtils.isNotEmpty(pickListName))
{
fieldInfo.setPickListName(pickListName);
} else
{
pickListName = fieldInfo.getPickListName();
}
}
}
break;
default:
dspUITypeStr = getAttr(cellElement, DSPUITYPE, "dsptextfield");
break;
} //switch
FormCellFieldIFace.FieldType dspUIType = FormCellFieldIFace.FieldType.valueOf(dspUITypeStr);
try
{
dspUIType = FormCellFieldIFace.FieldType.valueOf(dspUITypeStr);
} catch (java.lang.IllegalArgumentException ex)
{
FormDevHelper.appendFormDevError(ex.toString());
FormDevHelper.appendFormDevError(String.format("Cell Name[%s] Id[%s] dspuitype[%s] is in error", cellName, cellId, dspUIType));
uitype = FormCellFieldIFace.FieldType.label; // default to text
}
// check to see see if the validation is a node in the cell
if (isEmpty(validationRule))
{
Element valNode = (Element)cellElement.selectSingleNode(VALIDATION);
if (valNode != null)
{
String str = valNode.getTextTrim();
if (isNotEmpty(str))
{
validationRule = str;
}
}
}
boolean isEncrypted = getAttr(cellElement, "isencrypted", false);
boolean isReadOnly = uitype == FormCellFieldIFace.FieldType.dsptextfield ||
uitype == FormCellFieldIFace.FieldType.dsptextarea ||
uitype == FormCellFieldIFace.FieldType.label;
FormCellField field = new FormCellField(FormCellIFace.CellType.field, cellId,
cellName, uitype, dspUIType, format, formatName, uiFieldFormatterName, isRequired,
cols, rows, colspan, rowspan, validationType, validationRule, isEncrypted);
String labelStr = uitype == FormCellFieldIFace.FieldType.checkbox ? getLabel(cellElement) : getAttr(cellElement, "label", "");
field.setLabel(labelStr);
field.setReadOnly(getAttr(cellElement, "readonly", isReadOnly));
field.setDefaultValue(getAttr(cellElement, "default", ""));
field.setPickListName(pickListName);
field.setChangeListenerOnly(getAttr(cellElement, "changesonly", true) && !isRequired);
field.setProperties(properties);
cell = formRow.addCell(field);
break;
}
case command:
{
cell = formRow.addCell(new FormCellCommand(cellId, cellName,
getLabel(cellElement),
getAttr(cellElement, "commandtype", ""),
getAttr(cellElement, "action", "")));
String initialize = getAttr(cellElement, INITIALIZE, null);
if (StringUtils.isNotEmpty(initialize))
{
cell.setProperties(UIHelper.parseProperties(initialize));
}
break;
}
case panel:
{
FormCellPanel cellPanel = new FormCellPanel(cellId, cellName,
getAttr(cellElement, "paneltype", ""),
getAttr(cellElement, "coldef", "p"),
getAttr(cellElement, "rowdef", "p"),
colspan, rowspan);
String initialize = getAttr(cellElement, INITIALIZE, null);
if (StringUtils.isNotEmpty(initialize))
{
cellPanel.setProperties(UIHelper.parseProperties(initialize));
}
processRows(cellElement, cellPanel.getRows(), tableinfo);
fixLabels(cellPanel.getName(), cellPanel.getRows(), tableinfo);
cell = formRow.addCell(cellPanel);
break;
}
case subview:
{
Properties properties = UIHelper.parseProperties(getAttr(cellElement, INITIALIZE, null));
String svViewSetName = cellElement.attributeValue("viewsetname");
if (isEmpty(svViewSetName))
{
svViewSetName = null;
}
if (instance.doingResourceLabels && properties != null)
{
String title = properties.getProperty(TITLE);
if (title != null)
{
properties.setProperty(TITLE, UIRegistry.getResourceString(title));
}
}
String viewName = getAttr(cellElement, "viewname", null);
cell = formRow.addCell(new FormCellSubView(cellId,
cellName,
svViewSetName,
viewName,
cellElement.attributeValue("class"),
getAttr(cellElement, "desc", ""),
getAttr(cellElement, "defaulttype", null),
getAttr(cellElement, "rows", DEFAULT_SUBVIEW_ROWS),
colspan,
rowspan,
getAttr(cellElement, "single", false)));
cell.setProperties(properties);
break;
}
case iconview:
{
String vsName = cellElement.attributeValue("viewsetname");
if (isEmpty(vsName))
{
vsName = instance.viewSetName;
}
String viewName = getAttr(cellElement, "viewname", null);
cell = formRow.addCell(new FormCellSubView(cellId, cellName,
vsName,
viewName,
cellElement.attributeValue("class"),
getAttr(cellElement, "desc", ""),
colspan,
rowspan));
break;
}
case statusbar:
{
cell = formRow.addCell(new FormCell(FormCellIFace.CellType.statusbar, cellId, cellName, colspan, rowspan));
break;
}
default:
{
// what is this?
log.error("Encountered unknown cell type");
continue;
}
} // switch
cell.setIgnoreSetGet(getAttr(cellElement, "ignore", false));
}
cellRows.add(formRow);
rowNumber++;
}
}
}
/**
* @param cellName
* @param cellId
* @param rowNumber
*/
private static void processFieldVerify(final String cellName, final String cellId, final int rowNumber)
{
try
{
boolean isOK = false;
if (StringUtils.contains(cellName, '.'))
{
DBTableInfo tblInfo = fldVerTableInfo;
String[] fieldNames = StringUtils.split(cellName, ".");
for (int i=0;i<fieldNames.length-1;i++)
{
String type = null;
DBTableChildIFace child = tblInfo.getItemByName(fieldNames[i]);
if (child instanceof DBFieldInfo)
{
DBFieldInfo fldInfo = (DBFieldInfo)child;
type = fldInfo.getType();
if (type != null)
{
DBTableInfo tInfo = DBTableIdMgr.getInstance().getByClassName(type);
tblInfo = tInfo != null ? tInfo : tblInfo;
}
isOK = tblInfo.getItemByName(fieldNames[fieldNames.length-1]) != null;
} else if (child instanceof DBRelationshipInfo)
{
DBRelationshipInfo relInfo = (DBRelationshipInfo)child;
type = relInfo.getDataClass().getName();
if (type != null)
{
tblInfo = DBTableIdMgr.getInstance().getByClassName(type);
}
}
//System.out.println(type);
}
if (tblInfo != null)
{
isOK = tblInfo.getItemByName(fieldNames[fieldNames.length-1]) != null;
}
} else
{
isOK = fldVerTableInfo.getItemByName(cellName) != null;
}
if (!isOK)
{
String msg = " ViewSet["+instance.viewSetName+"]\n ViewDef["+fldVerFormViewDef.getName()+"]\n The cell name ["+cellName+"] for cell with Id ["+cellId+"] is not a field\n in Data Object["+fldVerTableInfo.getName()+"]\n on Row ["+rowNumber+"]";
if (!isTreeClass)
{
instance.fldVerTableModel.addRow(instance.viewSetName, fldVerFormViewDef.getName(), cellId, cellName, Integer.toString(rowNumber));
}
log.error(msg);
}
} catch (Exception ex)
{
log.error(ex);
}
}
/**
* @param element the DOM element for building the form
* @param type the type of form to be built
* @param id the id of the form
* @param name the name of the form
* @param className the class name of the data object
* @param gettableClassName the class name of the getter
* @param settableClassName the class name of the setter
* @param desc the description
* @param useResourceLabels whether to use resource labels
* @param tableinfo table info
* @return a form view of type "form"
*/
protected static FormViewDef createFormViewDef(final Element element,
final ViewDef.ViewType type,
final String name,
final String className,
final String gettableClassName,
final String settableClassName,
final String desc,
final boolean useResourceLabels,
final DBTableInfo tableinfo)
{
FormViewDef formViewDef = new FormViewDef(type, name, className, gettableClassName, settableClassName, desc,
useResourceLabels, XMLHelper.getAttr(element, "editableDlg", true));
fldVerTableInfo = null;
if (type != ViewDefIFace.ViewType.formtable)
{
if (doFieldVerification)
{
if (instance.fldVerTableModel == null)
{
instance.createFieldVerTableModel();
}
try
{
//log.debug(className);
Class<?> classObj = Class.forName(className);
if (FormDataObjIFace.class.isAssignableFrom(classObj))
{
fldVerTableInfo = DBTableIdMgr.getInstance().getByClassName(className);
isTreeClass = fldVerTableInfo != null && fldVerTableInfo.getFieldByName("highestChildNodeNumber") != null;
fldVerFormViewDef = formViewDef;
}
} catch (ClassNotFoundException ex)
{
String msg = "ClassNotFoundException["+className+"] Name["+name+"]";
log.error(msg);
FormDevHelper.appendFormDevError(msg);
//edu.ku.brc.af.core.UsageTracker.incrHandledUsageCount();
//edu.ku.brc.exceptions.ExceptionTracker.getInstance().capture(ViewLoader.class, comments, ex);
} catch (Exception ex)
{
edu.ku.brc.af.core.UsageTracker.incrHandledUsageCount();
edu.ku.brc.exceptions.ExceptionTracker.getInstance().capture(ViewLoader.class, ex);
}
}
List<FormRowIFace> rows = formViewDef.getRows();
instance.doingResourceLabels = useResourceLabels;
//gViewDef = formViewDef;
processRows(element, rows, tableinfo);
instance.doingResourceLabels = false;
createDef(element, "columnDef", rows.size(), formViewDef.getColumnDefItem());
createDef(element, "rowDef", rows.size(), formViewDef.getRowDefItem());
formViewDef.setEnableRules(getEnableRules(element));
fixLabels(formViewDef.getName(), rows, tableinfo);
} else
{
Node defNode = element.selectSingleNode("definition");
if (defNode != null) {
String defName = defNode.getText();
if (StringUtils.isNotEmpty(defName)) {
formViewDef.setDefinitionName(defName);
return formViewDef;
}
}
String msg = "formtable is missing or has empty <defintion> node";
log.error(msg);
FormDevHelper.appendFormDevError(msg);
return null;
}
return formViewDef;
}
/**
* @param fieldName
* @param tableInfo
* @return
*/
protected static String getTitleFromFieldName(final String fieldName,
final DBTableInfo tableInfo)
{
DBTableChildIFace derivedCI = null;
if (fieldName.indexOf(".") > -1)
{
derivedCI = FormHelper.getChildInfoFromPath(fieldName, tableInfo);
if (derivedCI == null)
{
String msg = "The name 'path' ["+fieldName+"] was not valid in ViewSet ["+instance.viewSetName+"]";
FormDevHelper.appendFormDevError(msg);
log.error(msg);
return "";
}
}
DBTableChildIFace tblChild = derivedCI != null ? derivedCI : tableInfo.getItemByName(fieldName);
if (tblChild == null)
{
String msg = "The Field Name ["+fieldName+"] was not in the Table ["+tableInfo.getTitle()+"] in ViewSet ["+instance.viewSetName+"]";
log.error(msg);
FormDevHelper.appendFormDevError(msg);
return "";
}
return tblChild.getTitle();
}
/**
* @param rows
* @param tableInfo
*/
protected static void fixLabels(final String name,
final List<FormRowIFace> rows,
final DBTableInfo tableInfo)
{
if (tableInfo == null)
{
return;
}
Hashtable<String, String> fldIdMap = new Hashtable<String, String>();
for (FormRowIFace row : rows)
{
for (FormCellIFace cell : row.getCells())
{
if (cell.getType() == FormCellIFace.CellType.field ||
cell.getType() == FormCellIFace.CellType.subview)
{
fldIdMap.put(cell.getIdent(), cell.getName());
}/* else
{
System.err.println("Skipping ["+cell.getIdent()+"] " + cell.getType());
}*/
}
}
for (FormRowIFace row : rows)
{
for (FormCellIFace cell : row.getCells())
{
if (cell.getType() == FormCellIFace.CellType.label)
{
FormCellLabelIFace lblCell = (FormCellLabelIFace)cell;
String label = lblCell.getLabel();
if (label.length() == 0 || label.equals("##"))
{
String idFor = lblCell.getLabelFor();
if (StringUtils.isNotEmpty(idFor))
{
String fieldName = fldIdMap.get(idFor);
if (StringUtils.isNotEmpty(fieldName))
{
if (!fieldName.equals("this"))
{
//FormCellFieldIFace fcf = get
lblCell.setLabel(getTitleFromFieldName(fieldName, tableInfo));
}
} else
{
String msg = "Setting Label - Form control with id["+idFor+"] is not in ViewDef or Panel ["+name+"] in ViewSet ["+instance.viewSetName+"]";
log.error(msg);
FormDevHelper.appendFormDevError(msg);
}
}
}
} else if (cell.getType() == FormCellIFace.CellType.field && cell instanceof FormCellFieldIFace &&
((((FormCellFieldIFace)cell).getUiType() == FormCellFieldIFace.FieldType.checkbox) ||
(((FormCellFieldIFace)cell).getUiType() == FormCellFieldIFace.FieldType.tristate)))
{
FormCellFieldIFace fcf = (FormCellFieldIFace)cell;
if (fcf.getLabel().equals("##"))
{
fcf.setLabel(getTitleFromFieldName(cell.getName(), tableInfo));
}
}
}
}
}
/**
* @param type the type of form to be built
* @param name the name of the form
* @param className the class name of the data object
* @param gettableClassName the class name of the getter
* @param settableClassName the class name of the setter
* @param desc the description
* @param useResourceLabels whether to use resource labels
* @return a form view of type "form"
*/
protected static ViewDef createIconViewDef(final ViewDef.ViewType type,
final String name,
final String className,
final String gettableClassName,
final String settableClassName,
final String desc,
final boolean useResourceLabels)
{
ViewDef formView = new ViewDef(type, name, className, gettableClassName, settableClassName, desc, useResourceLabels);
//formView.setEnableRules(getEnableRules(element));
return formView;
}
/**
* Creates a Table Form View
* @param typeName the type of form to be built
* @param element the DOM element for building the form
* @param name the name of the form
* @param className the class name of the data object
* @param gettableClassName the class name of the getter
* @param settableClassName the class name of the setter
* @param desc the description
* @param useResourceLabels whether to use resource labels
* @return a form view of type "table"
*/
protected static TableViewDefIFace createTableView(final Element element,
final String name,
final String className,
final String gettableClassName,
final String settableClassName,
final String desc,
final boolean useResourceLabels)
{
TableViewDefIFace tableView = new TableViewDef( name, className, gettableClassName, settableClassName, desc, useResourceLabels);
//tableView.setResourceLabels(resLabels);
Element columns = (Element)element.selectSingleNode("columns");
if (columns != null)
{
for ( Iterator<?> i = columns.elementIterator( "column" ); i.hasNext(); ) {
Element colElement = (Element) i.next();
FormColumn column = new FormColumn(colElement.attributeValue(NAME),
colElement.attributeValue(LABEL),
getAttr(colElement, "dataobjformatter", null),
getAttr(colElement, "format", null)
);
tableView.addColumn(column);
}
}
return tableView;
}
/**
* Save out a viewSet to a file
* @param viewSet the viewSet to save
* @param filename the filename (full path) as to where to save it
*/
public static void save(final ViewSet viewSet, final String filename)
{
try
{
Vector<ViewSet> viewsets = new Vector<ViewSet>();
viewsets.add(viewSet);
File file = new File(filename);
FileWriter fw = new FileWriter(file);
fw.write("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n");
BeanWriter beanWriter = new BeanWriter(fw);
XMLIntrospector introspector = beanWriter.getXMLIntrospector();
introspector.getConfiguration().setWrapCollectionsInElement(false);
beanWriter.getBindingConfiguration().setMapIDs(false);
beanWriter.setWriteEmptyElements(false);
beanWriter.enablePrettyPrint();
beanWriter.write(viewSet);
fw.close();
} catch(Exception ex)
{
log.error("error writing views", ex);
}
}
//--------------------------------------------------------------------------------------------
//-- Field Verify Methods, Classes, Helpers
//--------------------------------------------------------------------------------------------
public void createFieldVerTableModel()
{
fldVerTableModel = new FieldVerifyTableModel();
}
/**
* @return the doFieldVerification
*/
public static boolean isDoFieldVerification()
{
return doFieldVerification;
}
/**
* @param doFieldVerification the doFieldVerification to set
*/
public static void setDoFieldVerification(boolean doFieldVerification)
{
ViewLoader.doFieldVerification = doFieldVerification;
}
public static void clearFieldVerInfo()
{
if (instance.fldVerTableModel != null)
{
instance.fldVerTableModel.clear();
}
}
/**
* Di
*/
public static void displayFieldVerInfo()
{
if (verifyDlg != null)
{
verifyDlg.setVisible(false);
verifyDlg.dispose();
verifyDlg = null;
}
System.err.println("------------- "+(instance.fldVerTableModel != null ? instance.fldVerTableModel.getRowCount() : "null"));
if (instance.fldVerTableModel != null && instance.fldVerTableModel.getRowCount() > 0)
{
JLabel lbl = UIHelper.createLabel("<html><i>(Some of fields are special buttons or labal names. Review them to make sure you have not <br>mis-named any of the fields you are working with.)");
final JTable table = new JTable(instance.fldVerTableModel);
UIHelper.calcColumnWidths(table);
CellConstraints cc = new CellConstraints();
JScrollPane sp = new JScrollPane(table, ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED, ScrollPaneConstants.HORIZONTAL_SCROLLBAR_AS_NEEDED);
PanelBuilder pb = new PanelBuilder(new FormLayout("f:p:g", "f:p:g,4px,p"));
pb.add(sp, cc.xy(1, 1));
pb.add(lbl, cc.xy(1, 3));
pb.setDefaultDialogBorder();
verifyDlg = new CustomFrame("Field Names on Form, but not in Database : "+instance.fldVerTableModel.getRowCount(), CustomFrame.OK_BTN, pb.getPanel())
{
@Override
protected void okButtonPressed()
{
super.okButtonPressed();
table.setModel(new DefaultTableModel());
dispose();
verifyDlg = null;
}
};
verifyDlg.setOkLabel(getResourceString("CLOSE"));
verifyDlg.createUI();
verifyDlg.setVisible(true);
}
}
class FieldVerifyTableModel extends DefaultTableModel
{
protected Vector<List<String>> rowData = new Vector<List<String>>();
protected String[] colNames = {"ViewSet", "View Def", "Cell Id", "Cell Name", "Row"};
protected Hashtable<String, Boolean> nameHash = new Hashtable<String, Boolean>();
public FieldVerifyTableModel()
{
super();
}
public void clear()
{
for (List<String> list : rowData)
{
list.clear();
}
rowData.clear();
nameHash.clear();
}
public void addRow(final String viewSet,
final String viewDef,
final String cellId,
final String cellName,
final String rowInx)
{
String key = viewSet + viewDef + cellId;
if (nameHash.get(key) == null)
{
List<String> row = new ArrayList<String>(5);
row.add(viewSet);
row.add(viewDef);
row.add(cellId);
row.add(cellName);
row.add(rowInx);
rowData.add(row);
nameHash.put(key, true);
}
}
/* (non-Javadoc)
* @see javax.swing.table.DefaultTableModel#getColumnCount()
*/
@Override
public int getColumnCount()
{
return colNames.length;
}
/* (non-Javadoc)
* @see javax.swing.table.DefaultTableModel#getColumnName(int)
*/
@Override
public String getColumnName(int column)
{
return colNames[column];
}
/* (non-Javadoc)
* @see javax.swing.table.DefaultTableModel#getRowCount()
*/
@Override
public int getRowCount()
{
return rowData == null ? 0 : rowData.size();
}
/* (non-Javadoc)
* @see javax.swing.table.DefaultTableModel#getValueAt(int, int)
*/
@Override
public Object getValueAt(int row, int column)
{
List<String> rowList = rowData.get(row);
return rowList.get(column);
}
}
}
| specify/specify6 | src/edu/ku/brc/af/ui/forms/persist/ViewLoader.java | Java | gpl-2.0 | 72,771 |
/**
* OWASP Benchmark Project v1.1
*
* This file is part of the Open Web Application Security Project (OWASP)
* Benchmark Project. For details, please see
* <a href="https://www.owasp.org/index.php/Benchmark">https://www.owasp.org/index.php/Benchmark</a>.
*
* The Benchmark is free software: you can redistribute it and/or modify it under the terms
* of the GNU General Public License as published by the Free Software Foundation, version 2.
*
* The Benchmark is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details
*
* @author Nick Sanidas <a href="https://www.aspectsecurity.com">Aspect Security</a>
* @created 2015
*/
package org.owasp.benchmark.testcode;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@WebServlet("/BenchmarkTest04982")
public class BenchmarkTest04982 extends HttpServlet {
private static final long serialVersionUID = 1L;
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
doPost(request, response);
}
@Override
public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
org.owasp.benchmark.helpers.SeparateClassRequest scr = new org.owasp.benchmark.helpers.SeparateClassRequest( request );
String param = scr.getTheParameter("foo");
String bar;
String guess = "ABC";
char switchTarget = guess.charAt(1); // condition 'B', which is safe
// Simple case statement that assigns param to bar on conditions 'A' or 'C'
switch (switchTarget) {
case 'A':
bar = param;
break;
case 'B':
bar = "bob";
break;
case 'C':
case 'D':
bar = param;
break;
default:
bar = "bob's your uncle";
break;
}
Object[] obj = { bar, "b"};
response.getWriter().printf("notfoo",obj);
}
}
| iammyr/Benchmark | src/main/java/org/owasp/benchmark/testcode/BenchmarkTest04982.java | Java | gpl-2.0 | 2,239 |
/**************************************************************************************
* Copyright (C) 2008 EsperTech, Inc. All rights reserved. *
* http://esper.codehaus.org *
* http://www.espertech.com *
* ---------------------------------------------------------------------------------- *
* The software in this package is published under the terms of the GPL license *
* a copy of which has been included with this distribution in the license.txt file. *
**************************************************************************************/
package com.espertech.esper.epl.join.pollindex;
import com.espertech.esper.epl.join.table.EventTable;
import com.espertech.esper.epl.join.table.UnindexedEventTableList;
import com.espertech.esper.epl.join.table.PropertyIndexedEventTable;
import com.espertech.esper.client.EventBean;
import com.espertech.esper.client.EventType;
import java.util.Arrays;
import java.util.List;
/**
* Strategy for building an index out of poll-results knowing the properties to base the index on.
*/
public class PollResultIndexingStrategyIndex implements PollResultIndexingStrategy
{
private final int streamNum;
private final EventType eventType;
private final String[] propertyNames;
/**
* Ctor.
* @param streamNum is the stream number of the indexed stream
* @param eventType is the event type of the indexed stream
* @param propertyNames is the property names to be indexed
*/
public PollResultIndexingStrategyIndex(int streamNum, EventType eventType, String[] propertyNames)
{
this.streamNum = streamNum;
this.eventType = eventType;
this.propertyNames = propertyNames;
}
public EventTable index(List<EventBean> pollResult, boolean isActiveCache)
{
if (!isActiveCache)
{
return new UnindexedEventTableList(pollResult);
}
PropertyIndexedEventTable table = new PropertyIndexedEventTable(streamNum, eventType, propertyNames);
table.add(pollResult.toArray(new EventBean[pollResult.size()]));
return table;
}
public String toQueryPlan() {
return this.getClass().getSimpleName() + " properties " + Arrays.toString(propertyNames);
}
}
| intelie/esper | esper/src/main/java/com/espertech/esper/epl/join/pollindex/PollResultIndexingStrategyIndex.java | Java | gpl-2.0 | 2,448 |
package com.ht.halo.hibernate3.base;
import java.beans.BeanInfo;
import java.beans.IntrospectionException;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Map.Entry;
import org.apache.commons.beanutils.BeanUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.ht.halo.hibernate3.HaloDao;
import com.ht.halo.map.HaloMap;
public class MyEntityUtils {
private static final Log logger = LogFactory.getLog(MyEntityUtils.class);
/**
* @Title: setEntity
* @Description: TODO Action层 设置实体某字段值 map转entity
* @param entity
* @param parameters
*/
public static Object setEntity(Object entity,HaloMap parameter){
if(null!=parameter){
for (Entry<String, ?> entry : parameter.entrySet()) {
try {
BeanUtils.setProperty(entity, entry.getKey(), entry.getValue());
} catch (IllegalAccessException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InvocationTargetException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// MyBeanUtils.setFieldValue(entity, entry.getKey(), entry.getValue());
}
}
return entity;
}
public static <T> T toEntity(Class<T> clazz, HaloMap map) {
T obj = null;
try {
BeanInfo beanInfo = Introspector.getBeanInfo(clazz);
obj = clazz.newInstance(); // 创建 JavaBean 对象
// 给 JavaBean 对象的属性赋值
PropertyDescriptor[] propertyDescriptors = beanInfo.getPropertyDescriptors();
for (int i = 0; i < propertyDescriptors.length; i++) {
PropertyDescriptor descriptor = propertyDescriptors[i];
String propertyName = descriptor.getName();
if (map.containsKey(propertyName)) {
// 下面一句可以 try 起来,这样当一个属性赋值失败的时候就不会影响其他属性赋值。
Object value = map.get(propertyName);
if ("".equals(value)) {
value = null;
}
Object[] args = new Object[1];
args[0] = value;
try {
descriptor.getWriteMethod().invoke(obj, args);
} catch (InvocationTargetException e) {
logger.warn("字段映射失败");
}
}
}
} catch (IllegalAccessException e) {
logger.error("实例化 JavaBean 失败");
} catch (IntrospectionException e) {
logger.error("分析类属性失败");
} catch (IllegalArgumentException e) {
logger.error("映射错误");
} catch (InstantiationException e) {
logger.error("实例化 JavaBean 失败");
}
return (T) obj;
}
public static HaloMap toHaloMap(Object bean) {
Class<? extends Object> clazz = bean.getClass();
HaloMap returnMap = new HaloMap();
BeanInfo beanInfo = null;
try {
beanInfo = Introspector.getBeanInfo(clazz);
PropertyDescriptor[] propertyDescriptors = beanInfo.getPropertyDescriptors();
for (int i = 0; i < propertyDescriptors.length; i++) {
PropertyDescriptor descriptor = propertyDescriptors[i];
String propertyName = descriptor.getName();
if (!propertyName.equals("class")) {
Method readMethod = descriptor.getReadMethod();
Object result = readMethod.invoke(bean, new Object[0]);
if (null != propertyName) {
propertyName = propertyName.toString();
}
if(null==result){
continue;
}
if (null != result) {
result = result.toString();
}
returnMap.put(propertyName, result);
}
}
} catch (IntrospectionException e) {
logger.error("分析类属性失败");
} catch (IllegalAccessException e) {
logger.error("实例化 JavaBean 失败");
} catch (IllegalArgumentException e) {
logger.error("映射错误");
} catch (InvocationTargetException e) {
logger.error("调用属性的 setter 方法失败");
}
return returnMap;
}
public static HaloMap toFindHaloMap(Object bean) {
Class<? extends Object> clazz = bean.getClass();
HaloMap returnMap = new HaloMap();
BeanInfo beanInfo = null;
try {
beanInfo = Introspector.getBeanInfo(clazz);
PropertyDescriptor[] propertyDescriptors = beanInfo.getPropertyDescriptors();
for (int i = 0; i < propertyDescriptors.length; i++) {
PropertyDescriptor descriptor = propertyDescriptors[i];
String propertyName = descriptor.getName();
if (!propertyName.equals("class")) {
Method readMethod = descriptor.getReadMethod();
Object result = readMethod.invoke(bean, new Object[0]);
if (null != propertyName) {
propertyName = propertyName.toString();
}
if(null==result){
continue;
}
if (null != result) {
result = result.toString();
}
if(propertyName.equals("json")){
continue;
}
returnMap.put(propertyName+HaloDao.MYSPACE+HaloDao.PRM, result);
}
}
} catch (IntrospectionException e) {
logger.error("分析类属性失败");
} catch (IllegalAccessException e) {
logger.error("实例化 JavaBean 失败");
} catch (IllegalArgumentException e) {
logger.error("映射错误");
} catch (InvocationTargetException e) {
logger.error("调用属性的 setter 方法失败");
}
return returnMap;
}
}
| VonChange/haloDao-Hibernate3 | src/main/java/com/ht/halo/hibernate3/base/MyEntityUtils.java | Java | gpl-2.0 | 6,670 |
package visitors;
/**
* Created by stratosphr on 20/11/15.
*/
public interface IVisitedSort extends IVisited {
Object accept(ISortVisitor visitor);
}
| stratosphr/Logic | src/visitors/IVisitedSort.java | Java | gpl-2.0 | 159 |
/*
* aTunes
* Copyright (C) Alex Aranda, Sylvain Gaudard and contributors
*
* See http://www.atunes.org/wiki/index.php?title=Contributing for information about contributors
*
* http://www.atunes.org
* http://sourceforge.net/projects/atunes
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*/
package net.sourceforge.atunes.kernel.modules.navigator;
import java.awt.Component;
import java.util.List;
import javax.swing.Action;
import javax.swing.JMenu;
import javax.swing.JMenuItem;
import net.sourceforge.atunes.kernel.actions.CustomAbstractAction;
import net.sourceforge.atunes.model.IAudioObject;
import net.sourceforge.atunes.model.IPlayListHandler;
import net.sourceforge.atunes.model.ITreeNode;
/**
* Enables or disables navigator actions given selection
*
* @author alex
*
*/
public class NavigatorActionsStateController {
private IPlayListHandler playListHandler;
/**
* @param playListHandler
*/
public void setPlayListHandler(final IPlayListHandler playListHandler) {
this.playListHandler = playListHandler;
}
/**
* Enables or disables tree popup actions
*
* @param rootSelected
* @param components
* @param nodes
*/
void updateTreePopupMenuWithTreeSelection(final boolean rootSelected,
final Component[] components, final List<ITreeNode> nodes) {
for (Component c : components) {
updateMenuComponent(rootSelected, nodes, c);
}
}
/**
* Enables or disables table popup actions
*
* @param rootSelected
* @param components
* @param selection
*/
void updateTablePopupMenuWithTableSelection(final boolean rootSelected,
final Component[] components, final List<IAudioObject> selection) {
for (Component c : components) {
updateTableMenuComponent(rootSelected, selection, c);
}
}
/**
* @param rootSelected
* @param selection
* @param c
*/
private void updateMenuComponent(final boolean rootSelected,
final List<ITreeNode> selection, final Component c) {
if (c != null) {
if (c instanceof JMenu) {
for (int i = 0; i < ((JMenu) c).getItemCount(); i++) {
updateMenuComponent(rootSelected, selection,
((JMenu) c).getItem(i));
}
} else if (c instanceof JMenuItem) {
updateMenuItem(rootSelected, selection, (JMenuItem) c);
}
}
}
/**
* @param rootSelected
* @param selection
* @param c
*/
private void updateTableMenuComponent(final boolean rootSelected,
final List<IAudioObject> selection, final Component c) {
if (c != null) {
if (c instanceof JMenu) {
for (int i = 0; i < ((JMenu) c).getItemCount(); i++) {
updateTableMenuComponent(rootSelected, selection,
((JMenu) c).getItem(i));
}
} else if (c instanceof JMenuItem) {
updateTableMenuItem(rootSelected, selection, (JMenuItem) c);
}
}
}
/**
* @param rootSelected
* @param selection
* @param menuItem
*/
private void updateMenuItem(final boolean rootSelected,
final List<ITreeNode> selection, final JMenuItem menuItem) {
Action a = menuItem.getAction();
if (a instanceof CustomAbstractAction) {
CustomAbstractAction customAction = (CustomAbstractAction) a;
if (!customAction.isEnabledForPlayList(this.playListHandler
.getVisiblePlayList())) {
customAction.setEnabled(false);
} else {
customAction.setEnabled(customAction
.isEnabledForNavigationTreeSelection(rootSelected,
selection));
}
}
}
/**
* @param rootSelected
* @param selection
* @param menuItem
*/
private void updateTableMenuItem(final boolean rootSelected,
final List<IAudioObject> selection, final JMenuItem menuItem) {
Action a = menuItem.getAction();
if (a instanceof CustomAbstractAction) {
CustomAbstractAction customAction = (CustomAbstractAction) a;
if (!customAction.isEnabledForPlayList(this.playListHandler
.getVisiblePlayList())) {
customAction.setEnabled(false);
} else {
customAction.setEnabled(customAction
.isEnabledForNavigationTableSelection(selection));
}
}
}
}
| PDavid/aTunes | aTunes/src/main/java/net/sourceforge/atunes/kernel/modules/navigator/NavigatorActionsStateController.java | Java | gpl-2.0 | 4,445 |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package edu.mum.ea.mb;
import edu.mum.ea.ejb.ProjectEJB;
import edu.mum.ea.ejb.ReleaseBacklogEJB;
import edu.mum.ea.ejb.SprintEJB;
import edu.mum.ea.ejb.TaskEJB;
import edu.mum.ea.entity.Project;
import edu.mum.ea.entity.ReleaseBacklog;
import edu.mum.ea.entity.Sprint;
import edu.mum.ea.entity.Task;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.annotation.PostConstruct;
import javax.ejb.EJB;
import javax.faces.bean.ManagedBean;
import javax.faces.bean.ManagedProperty;
import javax.faces.bean.RequestScoped;
import javax.faces.context.FacesContext;
/**
*
* @author Syed
*/
@ManagedBean
@RequestScoped
public class SprintMB {
private Sprint sprint;
@EJB
private SprintEJB sprintEJB;
@EJB
private ProjectEJB projectEJB;
@EJB
private ReleaseBacklogEJB releaseBacklogEJB;
@EJB
private TaskEJB taskEJB;
@ManagedProperty(value = "#{sessionMB}")
private SessionMB sessionMB;
private List<Task> taskList = new ArrayList<Task>();
private List<ReleaseBacklog> releaseBacklogList = new ArrayList<ReleaseBacklog>();
private long relBacklogId;
private List<String> selectedTasks = new ArrayList<String>();
private List<Sprint> sprintList;
/**
* Creates a new instance of ProjectMB
*/
public SprintMB() {
sprint = new Sprint();
sprintList = new ArrayList<Sprint>();
}
@PostConstruct
public void init() {
//Map<String, Object> sessionMap = FacesContext.getCurrentInstance().getExternalContext().getSessionMap();
//Project project = projectEJB.find(sessionMB.getUserSelectedProject().getId());
releaseBacklogList = releaseBacklogEJB.findAllRelBakByProject(sessionMB.getUserSelectedProject().getId());//project.getReleaseBacklogList();
}
public SessionMB getSessionMB() {
return sessionMB;
}
public void setSessionMB(SessionMB sessionMB) {
this.sessionMB = sessionMB;
}
public Sprint getSprint() {
return sprint;
}
public void setSprint(Sprint sprint) {
this.sprint = sprint;
}
public List<Sprint> getSprintList() {
sprintList = sprintEJB.findAllSprintByProject(sessionMB.getUserSelectedProject().getId());//sprintEJB.findAll();
return sprintList;
}
public void setSprintList(List<Sprint> sprintList) {
this.sprintList = sprintList;
}
public List<ReleaseBacklog> getReleaseBacklogList() {
return releaseBacklogList;
}
public void setReleaseBacklogList(List<ReleaseBacklog> releaseBacklogList) {
this.releaseBacklogList = releaseBacklogList;
}
public long getRelBacklogId() {
return relBacklogId;
}
public void setRelBacklogId(long relBacklogId) {
this.relBacklogId = relBacklogId;
}
public List<Task> getTaskList() {
return taskList;
}
public void setTaskList(List<Task> taskList) {
this.taskList = taskList;
}
public List<String> getSelectedTasks() {
return selectedTasks;
}
public void setSelectedTasks(List<String> selectedTasks) {
this.selectedTasks = selectedTasks;
}
public String createSprint() {
sprint.setReleaseBacklog(releaseBacklogEJB.find(getRelBacklogId()));
sprintEJB.save(sprint);
return "sprint-list";
}
public String gotoUpdatePage(Long id){
sprint = sprintEJB.find(id);
try {
setRelBacklogId(sprint.getReleaseBacklog().getId());
} catch(Exception e) {
//System.out.println("-----" + e.getMessage());
}
return "sprint-update";
}
public String updateSprint(){
try {
sprint.setReleaseBacklog(releaseBacklogEJB.find(getRelBacklogId()));
} catch (Exception e) {
//System.out.println("-----" + e.getMessage());
}
sprintEJB.edit(sprint);
return "sprint-list";
}
public String deleteSprint(Long sprintId){
sprintEJB.delete(sprintId);
return "sprint-list";
}
public String sprintDetail(Long id) {
sprint = sprintEJB.find(id);
taskList = taskEJB.findAll();
for (Task t : sprint.getTasks()) {
selectedTasks.add(t.getId().toString());
}
return "sprint-view";
}
public String addTaskToSprint() {
long sprintId = sprint.getId();
sprint = sprintEJB.find(sprintId);
taskList = sprint.getTasks();
int size = taskList.size();
for (int i = 0; i < size; i++) {
taskList.remove(taskList.get(i));
size--;
--i;
}
for (String taskId : selectedTasks) {
if (!taskList.contains(taskEJB.find(Long.parseLong(taskId)))) {
sprint.getTasks().add(taskEJB.find(Long.parseLong(taskId)));
}
}
sprintEJB.edit(sprint);
return "/sprint/sprint-list";
}
}
| ruzdi/ProjectManagement | ProjectManagement-war/src/java/edu/mum/ea/mb/SprintMB.java | Java | gpl-2.0 | 5,482 |
package mrdev023.opengl;
import static org.lwjgl.glfw.GLFW.*;
import static org.lwjgl.opengl.GL11.*;
import static org.lwjgl.system.MemoryUtil.*;
import java.awt.*;
import java.nio.*;
import org.lwjgl.*;
import org.lwjgl.glfw.*;
import org.lwjgl.opengl.*;
import mrdev023.exception.*;
public class Display {
private static DisplayMode displayMode;
private static String TITLE = "";
private static long window;
private static boolean hasResized = false;
public static void create(String title,int width,int height){
if ( !glfwInit() )
throw new IllegalStateException("Unable to initialize GLFW");
TITLE = title;
displayMode = new DisplayMode(width,height);
window = glfwCreateWindow(displayMode.getWidth(),displayMode.getHeight(), TITLE, NULL, NULL);
}
public static void create(String title,int width,int height,int major,int minor){
if ( !glfwInit() )
throw new IllegalStateException("Unable to initialize GLFW");
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, major); // Nous voulons OpenGL 3.3
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, minor);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
TITLE = title;
displayMode = new DisplayMode(width,height);
window = glfwCreateWindow(displayMode.getWidth(),displayMode.getHeight(), TITLE, NULL, NULL);
}
public static void setMouseGrabbed(boolean a){
if(a){
glfwSetInputMode(window, GLFW_CURSOR, GLFW_CURSOR_DISABLED);
}else{
glfwSetInputMode(window, GLFW_CURSOR, GLFW_CURSOR_NORMAL);
}
}
public static void setVSync(boolean a) throws DisplayException{
if(a)glfwSwapInterval(1);
else glfwSwapInterval(0);
}
public static void create(String title,int width,int height,int major,int minor,int sample){
if ( !glfwInit() )
throw new IllegalStateException("Unable to initialize GLFW");
glfwWindowHint(GLFW_SAMPLES, sample); // antialiasing 4x
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, major); // Nous voulons OpenGL 3.3
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, minor);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
TITLE = title;
displayMode = new DisplayMode(width,height);
window = glfwCreateWindow(displayMode.getWidth(),displayMode.getHeight(), TITLE, NULL, NULL);
}
public static void setSample(int sample){
glfwWindowHint(GLFW_SAMPLES, sample);
}
public static void setResizable(boolean a){
if(a)glfwWindowHint(GLFW_RESIZABLE, GL_TRUE);
else glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
}
public static void setTitle(String title){
TITLE = title;
glfwSetWindowTitle(window, TITLE);
}
public static String getTitle(){
return TITLE;
}
public static boolean wasResized(){
IntBuffer w = BufferUtils.createIntBuffer(1);
IntBuffer h = BufferUtils.createIntBuffer(1);
glfwGetWindowSize(window, w, h);
int width = w.get(0);
int height = h.get(0);
if(Display.getDisplayMode().getWidth() != width || Display.getDisplayMode().getHeight() != height || hasResized){
setDisplayMode(new DisplayMode(width, height));
hasResized = false;
return true;
}else{
return false;
}
}
public static void printMonitorsInfo(){
PointerBuffer monitors = glfwGetMonitors();
GLFWVidMode m;
if(monitors == null){
System.out.println("No monitor detected !");
return;
}
for(int i = 0;i < monitors.capacity();i++){
m = glfwGetVideoMode(monitors.get(i));
System.out.println(glfwGetMonitorName(monitors.get(i)) + "(" + i + ") : " + m.width() + "x" + m.height() + ":" + m.refreshRate() + "Hz");
}
}
public static boolean isCloseRequested(){
return glfwWindowShouldClose(window);
}
public static void createContext(){
glfwMakeContextCurrent(window);
GL.createCapabilities();
}
public static void updateEvent(){
glfwPollEvents();
}
public static void updateFrame(){
glfwSwapBuffers(window);
}
public static DisplayMode getDisplayMode() {
return displayMode;
}
public static void setDisplayMode(DisplayMode displayMode) {
if(Display.displayMode == null || displayMode == null)return;
Display.displayMode.setDisplayMode(displayMode);
hasResized = true;
}
public static void destroy(){
glfwDestroyWindow(window);
glfwTerminate();
}
public static long getWindow() {
return window;
}
}
| mrdev023/Modern-Game-Engine | src/mrdev023/opengl/Display.java | Java | gpl-2.0 | 4,476 |
// Copyright (C) 2012 Markus Fischer
//
// This program is free software; you can redistribute it and/or
// modify it under the terms of the GNU General Public License
// as published by the Free Software Foundation; version 2 of the License.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
//
// Contact: [email protected]
package ch.dbs.actions.bestellung;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.commons.lang.StringEscapeUtils;
import org.jsoup.Jsoup;
import org.jsoup.safety.Whitelist;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import util.CodeUrl;
import util.Http;
import ch.dbs.form.JournalDetails;
import enums.Connect;
/**
* This class reads answers from the normal EZB UI searched with the parameter
* xmloutput=1 to get XML.
*/
public class EZBXML {
private static final Logger LOG = LoggerFactory.getLogger(EZBXML.class);
public List<JournalDetails> searchByTitle(final String jtitle, final String bibid) {
final Http http = new Http();
final CodeUrl coder = new CodeUrl();
final StringBuffer link = new StringBuffer(
"http://ezb.uni-regensburg.de/ezeit/searchres.phtml?xmloutput=1&colors=7&lang=de&jq_type1=KT&jq_bool2=AND&jq_not2=+&jq_type2=KS&jq_term2=&jq_bool3=AND&jq_not3=+&jq_type3=PU&jq_term3=&offset=-1&hits_per_page=30&search_journal=Suche+starten&Notations%5B%5D=all&selected_colors%5B%5D=1&selected_colors%5B%5D=2&selected_colors%5B%5D=4&bibid=");
link.append(bibid);
link.append("&jq_term1=");
link.append(coder.encode(jtitle, "ISO-8859-1"));
String content = http.getContent(link.toString(), Connect.TIMEOUT_2.getValue(), Connect.TRIES_2.getValue(),
null);
// if we have > 30 hits, try a more concise search using: &jq_type1=KS (title starts with) instead of &jq_type1=KT (words in title)
if (content != null && content.contains("<search_count>")) {
final int x = Integer.parseInt(content.substring(content.indexOf("<search_count>") + 14,
content.indexOf("</search_count>")));
if (x > 30) {
final StringBuffer link2 = new StringBuffer(
"http://ezb.uni-regensburg.de/ezeit/searchres.phtml?xmloutput=1&colors=7&lang=de&jq_type1=KS&jq_bool2=AND&jq_not2=+&jq_type2=KS&jq_term2=&jq_bool3=AND&jq_not3=+&jq_type3=PU&jq_term3=&offset=-1&hits_per_page=30&search_journal=Suche+starten&Notations%5B%5D=all&selected_colors%5B%5D=1&selected_colors%5B%5D=2&selected_colors%5B%5D=4&bibid=");
link2.append(bibid);
link2.append("&jq_term1=");
link2.append(coder.encode(jtitle, "ISO-8859-1"));
content = http.getContent(link2.toString(), Connect.TIMEOUT_2.getValue(), Connect.TRIES_2.getValue(),
null);
}
}
final List<String> jourids = getJourids(content);
return searchByJourids(jourids, bibid);
}
public List<JournalDetails> searchByIssn(final String issn, final String bibid) {
final Http http = new Http();
final StringBuffer link = new StringBuffer(
"http://ezb.uni-regensburg.de/ezeit/searchres.phtml?xmloutput=1&colors=5&lang=de&jq_type1=KT&jq_term1=&jq_bool2=AND&jq_not2=+&jq_type2=KS&jq_term2=&jq_bool3=AND&jq_not3=+&jq_type3=PU&jq_term3=&jq_bool4=AND&jq_not4=+&jq_type4=IS&offset=-1&hits_per_page=50&search_journal=Suche+starten&Notations%5B%5D=all&selected_colors%5B%5D=1&selected_colors%5B%5D=2&selected_colors%5B%5D=4&bibid=");
link.append(bibid);
link.append("&jq_term4=");
link.append(issn);
final String content = http.getContent(link.toString(), Connect.TIMEOUT_2.getValue(),
Connect.TRIES_2.getValue(), null);
final List<String> jourids = getJourids(content);
return searchByJourids(jourids, bibid);
}
public List<JournalDetails> searchByJourids(final List<String> jourids, final String bibid) {
final List<JournalDetails> list = new ArrayList<JournalDetails>();
final Http http = new Http();
final StringBuffer link = new StringBuffer(
"http://rzblx1.uni-regensburg.de/ezeit/detail.phtml?xmloutput=1&colors=7&lang=de&bibid=");
link.append(bibid);
link.append("&jour_id=");
final StringBuffer infoLink = new StringBuffer(
"http://ezb.uni-regensburg.de/ezeit/detail.phtml?colors=7&lang=de&bibid=");
infoLink.append(bibid);
infoLink.append("&jour_id=");
try {
for (final String jourid : jourids) {
final JournalDetails jd = new JournalDetails();
final String content = http.getContent(link.toString() + jourid, Connect.TIMEOUT_1.getValue(),
Connect.TRIES_1.getValue(), null);
if (content != null) {
final DocumentBuilderFactory domFactory = DocumentBuilderFactory.newInstance();
domFactory.setNamespaceAware(true);
final DocumentBuilder builder = domFactory.newDocumentBuilder();
final Document doc = builder.parse(new InputSource(new StringReader(content)));
final XPathFactory factory = XPathFactory.newInstance();
final XPath xpath = factory.newXPath();
final XPathExpression exprJournal = xpath.compile("//journal");
final XPathExpression exprPissns = xpath.compile("//journal/detail/P_ISSNs");
final XPathExpression exprEissns = xpath.compile("//journal/detail/E_ISSNs");
final NodeList resultJournal = (NodeList) exprJournal.evaluate(doc, XPathConstants.NODESET);
for (int i = 0; i < resultJournal.getLength(); i++) {
final Node firstResultNode = resultJournal.item(i);
final Element journal = (Element) firstResultNode;
// Title
String title = getValue(journal.getElementsByTagName("title"));
if (title != null) {
title = Jsoup.clean(title, Whitelist.none());
title = Jsoup.parse(title).text();
}
jd.setZeitschriftentitel(title);
// P-ISSNs
final NodeList resultPissns = (NodeList) exprPissns.evaluate(doc, XPathConstants.NODESET);
// get first pissn
for (int z = 0; z < resultPissns.getLength(); z++) {
final Node firstPissnsNode = resultPissns.item(i);
final Element pissnElement = (Element) firstPissnsNode;
final String pissn = getValue(pissnElement.getElementsByTagName("P_ISSN"));
jd.setIssn(pissn);
}
// try to get Eissn if we have no Pissn
if (jd.getIssn() == null) {
// E-ISSNs
final NodeList resultEissns = (NodeList) exprEissns.evaluate(doc, XPathConstants.NODESET);
// get first eissn
for (int z = 0; z < resultEissns.getLength(); z++) {
final Node firstEissnsNode = resultEissns.item(i);
final Element eissnElement = (Element) firstEissnsNode;
final String eissn = getValue(eissnElement.getElementsByTagName("E_ISSN"));
jd.setIssn(eissn);
}
}
// add info link
jd.setLink(infoLink.toString() + jourid);
list.add(jd);
}
}
}
} catch (final XPathExpressionException e) {
LOG.error(e.toString());
} catch (final SAXParseException e) {
LOG.error(e.toString());
} catch (final SAXException e) {
LOG.error(e.toString());
} catch (final IOException e) {
LOG.error(e.toString());
} catch (final ParserConfigurationException e) {
LOG.error(e.toString());
} catch (final Exception e) {
LOG.error(e.toString());
}
return list;
}
private List<String> getJourids(final String content) {
final List<String> result = new ArrayList<String>();
try {
if (content != null) {
final DocumentBuilderFactory domFactory = DocumentBuilderFactory.newInstance();
domFactory.setNamespaceAware(true);
final DocumentBuilder builder = domFactory.newDocumentBuilder();
final Document doc = builder.parse(new InputSource(new StringReader(content)));
final XPathFactory factory = XPathFactory.newInstance();
final XPath xpath = factory.newXPath();
final XPathExpression exprJournals = xpath.compile("//journals/journal");
final NodeList journals = (NodeList) exprJournals.evaluate(doc, XPathConstants.NODESET);
for (int i = 0; i < journals.getLength(); i++) {
final Node firstResultNode = journals.item(i);
final Element journal = (Element) firstResultNode;
final String id = journal.getAttribute("jourid");
if (id != null) {
result.add(id);
}
}
}
} catch (final XPathExpressionException e) {
LOG.error(e.toString());
} catch (final SAXParseException e) {
LOG.error(e.toString());
} catch (final SAXException e) {
LOG.error(e.toString());
} catch (final IOException e) {
LOG.error(e.toString());
} catch (final ParserConfigurationException e) {
LOG.error(e.toString());
} catch (final Exception e) {
LOG.error(e.toString());
}
return result;
}
private String getValue(final NodeList list) {
String result = null;
final Element listElement = (Element) list.item(0);
if (listElement != null) {
final NodeList textList = listElement.getChildNodes();
if (textList.getLength() > 0) {
result = StringEscapeUtils.unescapeXml(textList.item(0).getNodeValue());
}
}
return result;
}
}
| gbv/doctor-doc | source/ch/dbs/actions/bestellung/EZBXML.java | Java | gpl-2.0 | 11,803 |
// External imports
import java.awt.BorderLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.io.FileInputStream;
import java.io.BufferedInputStream;
import javax.imageio.ImageIO;
import javax.swing.*;
// Local imports
import org.j3d.renderer.aviatrix3d.texture.TextureCreateUtils;
/**
* Example application that demonstrates how to use the loader interface
* to load a file into the scene graph.
* <p>
*
* @author Justin Couch
* @version $Revision: 1.1 $
*/
public class NormalMapDemo extends JFrame
implements ActionListener
{
private JFileChooser openDialog;
/** Renderer for the basic image */
private ImageIcon srcIcon;
private JLabel srcLabel;
/** Renderer for the normal map version */
private ImageIcon mapIcon;
private JLabel mapLabel;
/** Utility for munging textures to power of 2 size */
private TextureCreateUtils textureUtils;
public NormalMapDemo()
{
super("Normal map conversion demo");
setSize(1280, 1024);
setLocation(0, 0);
setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
textureUtils = new TextureCreateUtils();
JPanel p1 = new JPanel(new BorderLayout());
srcIcon = new ImageIcon();
srcLabel = new JLabel();
srcLabel.setVerticalTextPosition(SwingConstants.BOTTOM);
srcLabel.setText("Source Image");
mapIcon = new ImageIcon();
mapLabel = new JLabel();
mapLabel.setVerticalTextPosition(SwingConstants.BOTTOM);
mapLabel.setText("NormalMap Image");
JButton b = new JButton("Open A file");
b.addActionListener(this);
p1.add(b, BorderLayout.SOUTH);
p1.add(srcLabel, BorderLayout.WEST);
p1.add(mapLabel, BorderLayout.EAST);
getContentPane().add(p1);
}
//---------------------------------------------------------------
// Methods defined by WindowListener
//---------------------------------------------------------------
/**
* Process the action event from the open button
*/
public void actionPerformed(ActionEvent evt)
{
if(openDialog == null)
openDialog = new JFileChooser();
int ret_val = openDialog.showOpenDialog(this);
if(ret_val != JFileChooser.APPROVE_OPTION)
return;
File file = openDialog.getSelectedFile();
try
{
System.out.println("Loading external file: " + file);
FileInputStream is = new FileInputStream(file);
BufferedInputStream stream = new BufferedInputStream(is);
BufferedImage img = ImageIO.read(stream);
if(img == null)
{
System.out.println("Image load barfed");
return;
}
srcIcon.setImage(img);
srcLabel.setIcon(srcIcon);
BufferedImage map_img = textureUtils.createNormalMap(img, null);
mapIcon.setImage(map_img);
mapLabel.setIcon(mapIcon);
}
catch(IOException ioe)
{
System.out.println("crashed " + ioe.getMessage());
ioe.printStackTrace();
}
}
//---------------------------------------------------------------
// Local methods
//---------------------------------------------------------------
public static void main(String[] args)
{
NormalMapDemo demo = new NormalMapDemo();
demo.setVisible(true);
}
}
| Norkart/NK-VirtualGlobe | aviatrix3d/examples/texture/NormalMapDemo.java | Java | gpl-2.0 | 3,599 |
/*
* $RCSfile: OrCRIF.java,v $
*
* Copyright (c) 2005 Sun Microsystems, Inc. All rights reserved.
*
* Use is subject to license terms.
*
* $Revision: 1.1 $
* $Date: 2005/02/11 04:56:38 $
* $State: Exp $
*/
package com.sun.media.jai.opimage;
import java.awt.RenderingHints;
import java.awt.geom.Rectangle2D;
import java.awt.image.RenderedImage;
import java.awt.image.renderable.RenderContext;
import java.awt.image.renderable.ParameterBlock;
import java.awt.image.renderable.RenderableImage;
import javax.media.jai.CRIFImpl;
import javax.media.jai.ImageLayout;
import java.util.Map;
/**
* A <code>CRIF</code> supporting the "Or" operation in the
* rendered and renderable image layers.
*
* @since EA2
* @see javax.media.jai.operator.OrDescriptor
* @see OrOpImage
*
*/
public class OrCRIF extends CRIFImpl {
/** Constructor. */
public OrCRIF() {
super("or");
}
/**
* Creates a new instance of <code>OrOpImage</code> in the
* rendered layer. This method satisifies the implementation of RIF.
*
* @param paramBlock The two source images to be "Ored" together.
* @param renderHints Optionally contains destination image layout.
*/
public RenderedImage create(ParameterBlock paramBlock,
RenderingHints renderHints) {
// Get ImageLayout from renderHints if any.
ImageLayout layout = RIFUtil.getImageLayoutHint(renderHints);
return new OrOpImage(paramBlock.getRenderedSource(0),
paramBlock.getRenderedSource(1),
renderHints,
layout);
}
}
| RoProducts/rastertheque | JAILibrary/src/com/sun/media/jai/opimage/OrCRIF.java | Java | gpl-2.0 | 1,740 |
package org.janelia.alignment.match;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* List of {@link CanvasMatches} with associated tileIds mapped for fast lookup.
*
* @author Eric Trautman
*/
public class TileIdsWithMatches {
private final Set<String> tileIds;
private final List<CanvasMatches> canvasMatchesList;
public TileIdsWithMatches() {
this.canvasMatchesList = new ArrayList<>();
this.tileIds = new HashSet<>();
}
/**
*
* @param canvasMatchesList list of matches for section (could include tiles not in stack).
* @param stackTileIds set of tile ids in stack.
* To be kept, match pair must have both tiles in stack.
*/
public void addMatches(final List<CanvasMatches> canvasMatchesList,
final Set<String> stackTileIds) {
for (final CanvasMatches canvasMatches : canvasMatchesList) {
final String pId = canvasMatches.getpId();
final String qId = canvasMatches.getqId();
if (stackTileIds.contains(pId) && stackTileIds.contains(qId)) {
this.canvasMatchesList.add(canvasMatches);
this.tileIds.add(pId);
this.tileIds.add(qId);
}
}
}
public boolean contains(final String tileId) {
return tileIds.contains(tileId);
}
public List<CanvasMatches> getCanvasMatchesList() {
return canvasMatchesList;
}
}
| saalfeldlab/render | render-app/src/main/java/org/janelia/alignment/match/TileIdsWithMatches.java | Java | gpl-2.0 | 1,543 |
package org.wordpress.android.ui.notifications;
import com.android.volley.VolleyError;
import org.wordpress.android.models.Note;
import java.util.List;
public class NotificationEvents {
public static class NotificationsChanged {
final public boolean hasUnseenNotes;
public NotificationsChanged() {
this.hasUnseenNotes = false;
}
public NotificationsChanged(boolean hasUnseenNotes) {
this.hasUnseenNotes = hasUnseenNotes;
}
}
public static class NoteModerationFailed {}
public static class NoteModerationStatusChanged {
final boolean isModerating;
final String noteId;
public NoteModerationStatusChanged(String noteId, boolean isModerating) {
this.noteId = noteId;
this.isModerating = isModerating;
}
}
public static class NoteLikeStatusChanged {
final String noteId;
public NoteLikeStatusChanged(String noteId) {
this.noteId = noteId;
}
}
public static class NoteVisibilityChanged {
final boolean isHidden;
final String noteId;
public NoteVisibilityChanged(String noteId, boolean isHidden) {
this.noteId = noteId;
this.isHidden = isHidden;
}
}
public static class NotificationsSettingsStatusChanged {
final String mMessage;
public NotificationsSettingsStatusChanged(String message) {
mMessage = message;
}
public String getMessage() {
return mMessage;
}
}
public static class NotificationsUnseenStatus {
final public boolean hasUnseenNotes;
public NotificationsUnseenStatus(boolean hasUnseenNotes) {
this.hasUnseenNotes = hasUnseenNotes;
}
}
public static class NotificationsRefreshCompleted {
final List<Note> notes;
public NotificationsRefreshCompleted(List<Note> notes) {
this.notes = notes;
}
}
public static class NotificationsRefreshError {
VolleyError error;
public NotificationsRefreshError(VolleyError error) {
this.error = error;
}
public NotificationsRefreshError() {
}
}
}
| mzorz/WordPress-Android | WordPress/src/main/java/org/wordpress/android/ui/notifications/NotificationEvents.java | Java | gpl-2.0 | 2,258 |
/*
* Copyright (c) 2017 Red Hat, Inc. and/or its affiliates.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
/**
* @test TestMemoryMXBeans
* @key gc
* @summary Test JMX memory beans
* @modules java.base/jdk.internal.misc
* java.management
* @run main/othervm -XX:+UseShenandoahGC -Xmx1g TestMemoryMXBeans -1 1024
* @run main/othervm -XX:+UseShenandoahGC -Xms1g -Xmx1g TestMemoryMXBeans 1024 1024
* @run main/othervm -XX:+UseShenandoahGC -Xms128m -Xmx1g TestMemoryMXBeans 128 1024
*/
import java.lang.management.*;
import java.util.*;
public class TestMemoryMXBeans {
public static void main(String[] args) throws Exception {
if (args.length < 2) {
throw new IllegalStateException("Should provide expected heap sizes");
}
long initSize = 1L * Integer.parseInt(args[0]) * 1024 * 1024;
long maxSize = 1L * Integer.parseInt(args[1]) * 1024 * 1024;
testMemoryBean(initSize, maxSize);
}
public static void testMemoryBean(long initSize, long maxSize) {
MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
long heapInit = memoryMXBean.getHeapMemoryUsage().getInit();
long heapMax = memoryMXBean.getHeapMemoryUsage().getMax();
long nonHeapInit = memoryMXBean.getNonHeapMemoryUsage().getInit();
long nonHeapMax = memoryMXBean.getNonHeapMemoryUsage().getMax();
if (initSize > 0 && heapInit != initSize) {
throw new IllegalStateException("Init heap size is wrong: " + heapInit + " vs " + initSize);
}
if (maxSize > 0 && heapMax != maxSize) {
throw new IllegalStateException("Max heap size is wrong: " + heapMax + " vs " + maxSize);
}
}
}
| ojdkbuild/lookaside_java-1.8.0-openjdk | hotspot/test/gc/shenandoah/TestMemoryMXBeans.java | Java | gpl-2.0 | 2,609 |
package com.github.esadmin.meta.model;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.ManyToMany;
import javax.persistence.OneToMany;
import javax.persistence.OrderBy;
import javax.persistence.Table;
import org.guess.core.orm.IdEntity;
import org.hibernate.annotations.Cache;
import org.hibernate.annotations.CacheConcurrencyStrategy;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
/**
* 索引对象Entity
* @author Joe.zhang
* @version 2015-12-08
*/
@Entity
@Table(name = "meta_dbindex")
@JsonIgnoreProperties(value = {"hibernateLazyInitializer","handler", "columns"})
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
public class DBIndex extends IdEntity {
/**
* 数据表
*/
@ManyToMany(cascade = { CascadeType.PERSIST, CascadeType.MERGE }, targetEntity = DBTable.class)
@JoinTable(name = "meta_table_index", joinColumns = { @JoinColumn(name = "index_id") }, inverseJoinColumns = { @JoinColumn(name = "table_id") })
@JsonIgnoreProperties(value = { "hibernateLazyInitializer","handler","datasource"})
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
private Set<DBTable> tables = new HashSet<DBTable>(0);
/**
* 索引库名
*/
@Column(name="index_name")
private String index_name;
/**
* 索引表名
*/
@Column(name="type_name")
private String type_name;
/**
* 索引类别
*/
@Column(name="index_type")
private Integer indexType;
/**
* 建立者
*/
@Column(name="createby_id")
private Long createbyId;
/**
* 更新者
*/
@Column(name="updateby_id")
private Long updatebyId;
/**
* 建立世间
*/
@Column(name="create_date")
private Date createDate;
/**
* 更新世间
*/
@Column(name="update_date")
private Date updateDate;
/**
* 备注
*/
@Column(name="remark")
private String remark;
@OneToMany(targetEntity = DbColumn.class, fetch = FetchType.LAZY, cascade = CascadeType.ALL,mappedBy="dbindex")
@OrderBy("id ASC")
private Set<DbColumn> columns;
@Column(name="check_label")
private Integer checkLabel;
public Integer getCheckLabel() {
return checkLabel;
}
public void setCheckLabel(Integer checkLabel) {
this.checkLabel = checkLabel;
}
public Set<DBTable> getTables() {
return tables;
}
public void setTables(Set<DBTable> tables) {
this.tables = tables;
}
public String getIndex_name() {
return index_name;
}
public void setIndex_name(String index_name) {
this.index_name = index_name;
}
public String getType_name() {
return type_name;
}
public void setType_name(String type_name) {
this.type_name = type_name;
}
public Integer getIndexType() {
return indexType;
}
public void setIndexType(Integer indexType) {
this.indexType = indexType;
}
public Long getCreatebyId() {
return createbyId;
}
public void setCreatebyId(Long createbyId) {
this.createbyId = createbyId;
}
public Set<DbColumn> getColumns() {
return columns;
}
public void setColumns(Set<DbColumn> columns) {
this.columns = columns;
}
public Long getUpdatebyId() {
return updatebyId;
}
public void setUpdatebyId(Long updatebyId) {
this.updatebyId = updatebyId;
}
public Date getCreateDate() {
return createDate;
}
public void setCreateDate(Date createDate) {
this.createDate = createDate;
}
public Date getUpdateDate() {
return updateDate;
}
public void setUpdateDate(Date updateDate) {
this.updateDate = updateDate;
}
public String getRemark() {
return remark;
}
public void setRemark(String remark) {
this.remark = remark;
}
} | joezxh/DATAX-UI | eshbase-proxy/src/main/java/com/github/esadmin/meta/model/DBIndex.java | Java | gpl-2.0 | 3,790 |
package com.cluit.util.dataTypes;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Set;
import com.cluit.util.Const;
import com.cluit.util.AoP.MethodMapper;
import com.cluit.util.methods.ClusteringUtils;
import com.cluit.util.structures.KeyPriorityQueue_Max;
import com.cluit.util.structures.Pair;
/**A cluster is a collection of entries.
*
* The class has a lot of utility functions related to clusters such as calculating centoid, finding the entry furthest
* from the centoid and so on.
*
* @author Simon
*
*/
public class Cluster {
//*******************************************************************************************************
//region VARIABLES
//*******************************************************************************************************
private Entry centoid;
public Pair<Double, Entry> cache = new Pair<Double, Entry>( 0.0, new Entry() );
private final int dimensions;
private final Set<Entry> members = new HashSet<>();
private final KeyPriorityQueue_Max<Entry> distanceQueue = new KeyPriorityQueue_Max<Entry>();
//endregion *********************************************************************************************
//region CUNSTRUCTOR
//*******************************************************************************************************
/**
*
* @param position
* @param centoidIsMember
*/
public Cluster(double[] position) {
if( position.length < 1){
API_Exeption("A cluster's position must be defined and have 1 or more dimenstions!");
}
this.centoid = new Entry(position);
this.dimensions = centoid.getDimensions();
};
//endregion *********************************************************************************************
//region STATIC METHODS
//*******************************************************************************************************
/**Calculates a central point (centoid) from a collection of entries. Not that all entries must have the same dimensionality.
*
* @param entries
* @return A new entry, with a position that is the mean of all parameter entries (NULL if entries.lenght == 0)
*/
public static Entry calculateCentoid(Entry[] entries){
if( entries.length == 0)
return null;
//Fetch dimensionality for the entries and set up the coordinate array
int dim = entries[0].getDimensions();
double[] centoidCoordinates = new double[dim];
//Add all entries positions together (for example, add all entries x-values together in one array slot,
//and all y-values together in the next array slot).
for( Entry p : entries ){
for( int i = 0; i < p.getDimensions(); i++ )
centoidCoordinates[i] += p.getCoordinateAt(i);
}
//Divide each position by the number of entries (to get the mean of each dimension's position
for( int i = 0; i < centoidCoordinates.length; i++)
centoidCoordinates[i] /= entries.length;
return new Entry(centoidCoordinates);
}
/**Calculates the sum of squared errors for a given set of entries, given a centoid.<br>
* The calculation is simply: For each point, calculate the euclidian distance from that point to the centoid, and square the distance
*
* @param centoid The mean position of the entries (see @link {@link Cluster.calculateCentoid} )
* @param entries
* @return
*/
public static double calculateSquaredError(Entry centoid, Entry[] entries){
double out = 0;
double dist = 0;
for(Entry e : entries ){
dist = ClusteringUtils.eucDistance(centoid, e);
out += (dist*dist);
}
return out;
}
//endregion *********************************************************************************************
//region PUBLIC
//*******************************************************************************************************
public int getNumberOfMembers(){
return distanceQueue.size() == members.size() ? distanceQueue.size() : -1;
}
/**Returns the distance to the centoid for the point which is farthest from the centoid
*
* @return The distance, if there are any members of the cluster. -1 otherwise
*/
public double getFurthestMembersDistance(){
if( distanceQueue.size() == 0 )
return -1;
return distanceQueue.peekKey();
}
/** Calculates a new centoid for the cluster. This method also update each points distance to the centoid
* <br><br>
* Complexity = <b>O(n * d)</b>,
* where <b>n</b> is the number of elements in the cluster
* where <b>d</b> the number of dimensions for each point
*/
public void calculateCentoid(){
int dim = centoid.getDimensions();
double[] newCentoidCoordinates = new double[dim];
for( Entry p : distanceQueue.values() ){
for( int i = 0; i < p.getDimensions(); i++ )
newCentoidCoordinates[i] += p.getCoordinateAt(i);
}
for( int i = 0; i < newCentoidCoordinates.length; i++)
newCentoidCoordinates[i] /= distanceQueue.size();
centoid = new Entry(newCentoidCoordinates );
updateMemberDistances();
}
/**Fetches a <b>copy</b> of the centoid of the cluster
*
* @return A new Entry, which is a copy of the cluster's centoid
*/
public Entry getCentoid(){
return new Entry(centoid);
}
/**Adds an entry to the cluster. The same entry cannot be added twice to the same cluster.
* This does not automatically update the cluster centoid. To do that, call "UpdateCentoid"
*
* @param e
* @return True if the entry was added, false if it was not
*/
public boolean add(Entry e){
if( e.getDimensions() != dimensions ){
API_Exeption("An entry cannot be added to a cluster if their dimenstions does not match! Cluster.dim = "+dimensions+" Entry.dim = "+e.getDimensions() );
return false;
}
if( members.contains(e) ){
API_Exeption("An entry cannot be added to a cluster twice! The entry "+e+" is already present in the cluster" );
return false;
}
double dist;
if( e == cache.right )
dist = cache.left;
else
dist = ClusteringUtils.eucDistance(e, centoid);
boolean a = distanceQueue.put(dist, e);
boolean b = members.add(e);
return a & b;
}
/**Removes a point from the cluster
*
* @param e The point to be removed
* @return True if it was found. False if the point wasn't found.
*/
public boolean removeEntry(Entry e){
boolean a = distanceQueue.remove(e);
boolean b = members.remove(e);
return a & b;
}
/**Calculates a points distance to the clusters centoid.
* The result is cached (the cache stores only 1 element), to prevent
* the result from having to be re-computed in the near future.
* <br>It is therefore recommended that whenever a point checks its distance to
* all clusters, it should be added to a cluster before another point checks
* it's distances.
*
* @param p The point
* @return Distance to the centoid
*/
public double distanceToCentoid(Entry p){
double dist = ClusteringUtils.eucDistance(p, centoid);
cache = new Pair<Double, Entry>(dist, p);
return dist;
}
/**Checks whether a given point is member of this cluster or not
*
* @param p The point
* @return True if the point is found within the cluster
*/
public boolean isMember(Entry e) {
return members.contains(e);
}
/**Fetches an array of all entries that are present within this cluster. This array can have a lenght of 0, in case no
* entries are registered within this cluster
*/
public Entry[] getMembers() {
return members.toArray( new Entry[0] );
}
/**Calculates the sum of squared errors for this cluster
*
* @return
*/
public double getSquaredError(){
return Cluster.calculateSquaredError(centoid, getMembers()) ;
}
public String toString(){
String out = "[ ";
for( Entry e : members ){
out += e.toString() + " : ";
}
return members.size() > 0 ? out.substring(0, out.length() - 3) + " ]" : "[ ]";
}
//endregion *********************************************************************************************
//region PRIVATE
//*******************************************************************************************************
/**Update each member's distance to the centoid
*
*/
private void updateMemberDistances() {
ArrayList<Entry> list = distanceQueue.values();
distanceQueue.clear();
for(Entry p : list){
double newDistance = ClusteringUtils.eucDistance(centoid, p);
distanceQueue.add(newDistance, p);
}
}
private int API_Exeption(String s){
MethodMapper.invoke(Const.METHOD_EXCEPTION_GENERAL, "Error in Cluster.java! " + s +" " + com.cluit.util.methods.MiscUtils.getStackPos(), new Exception() );
return -1;
}
//endregion *********************************************************************************************
//*******************************************************************************************************
}
| Gikkman/CluIt | CluIt/src/com/cluit/util/dataTypes/Cluster.java | Java | gpl-2.0 | 8,826 |
package storm.starter.trident.homework.state;
import storm.trident.operation.TridentCollector;
import storm.trident.state.BaseStateUpdater;
import storm.trident.tuple.TridentTuple;
import java.util.ArrayList;
import java.util.List;
/**
* Updater class that updates the state with the new tweets.
* Created by Parth Satra on 4/5/15.
*/
public class TopKStateUpdater extends BaseStateUpdater<TopKState> {
@Override
public void updateState(TopKState topKState, List<TridentTuple> list, TridentCollector tridentCollector) {
for(TridentTuple tuple : list) {
// Gets all the space separated hashtags.
String hashTags = tuple.getString(0);
String[] tag = hashTags.split(" ");
// Creates the list to be added to the state
List<TopTweet> tweetList = new ArrayList<TopTweet>();
for(String t : tag) {
if(t != null && t.trim().length() != 0) {
TopTweet tt = new TopTweet(t, 1);
tweetList.add(tt);
}
}
// Adds the list to the state.
topKState.add(tweetList);
}
}
}
| parthsatra/TwitterTopKTrends | apache-storm-0.9.3/examples/storm-starter/src/jvm/storm/starter/trident/homework/state/TopKStateUpdater.java | Java | gpl-2.0 | 1,167 |
package eu.ttbox.geoping.ui.admob;
import android.app.Activity;
import android.content.Context;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import android.util.Log;
import android.view.View;
import com.google.android.gms.ads.AdListener;
import com.google.android.gms.ads.AdRequest;
import com.google.android.gms.ads.AdView;
import com.google.android.gms.ads.InterstitialAd;
import eu.ttbox.geoping.BuildConfig;
import eu.ttbox.geoping.R;
import eu.ttbox.geoping.core.AppConstants;
public class AdmobHelper {
private static final String TAG = "AdmobHelper";
// ===========================================================
// AdView : https://developers.google.com/mobile-ads-sdk/docs/admob/fundamentals
// https://groups.google.com/forum/#!msg/google-admob-ads-sdk/8MCNsiVAc7A/pkRLcQ9zPtYJ
// ===========================================================
public static AdView bindAdMobView(Activity context) {
// Admob
final View admob = context.findViewById(R.id.admob);
final AdView adView = (AdView) context.findViewById(R.id.adView);
if (isAddBlocked(context)) {
Log.d(TAG, "### is Add Blocked adsContainer : " + admob);
if (admob != null) {
admob.setVisibility(View.GONE);
Log.d(TAG, "### is Add Blocked adsContainer ==> GONE");
}
} else {
// Container
Log.d(TAG, "### is Add Not Blocked adsContainer : " + admob);
if (admob != null) {
admob.setVisibility(View.VISIBLE);
Log.d(TAG, "### is Add Not Blocked adsContainer ==> VISIBLE");
}
}
// Request Ad
if (adView != null) {
// http://stackoverflow.com/questions/11790376/animated-mopub-admob-native-ads-overlayed-on-a-game-black-out-screen
//adView.setLayerType(View.LAYER_TYPE_SOFTWARE, null);
// Listener
adView.setAdListener(new AdListener() {
public void onAdOpened() {
Log.d(TAG, "### AdListener onAdOpened AdView");
}
public void onAdLoaded() {
Log.d(TAG, "### AdListener onAdLoaded AdView");
}
public void onAdFailedToLoad(int errorcode) {
if (admob!=null) {
Log.d(TAG, "### AdListener onAdFailedToLoad ==> HIDE adsContainer : " + admob);
admob.setVisibility(View.GONE);
}
switch (errorcode) {
case AdRequest.ERROR_CODE_INTERNAL_ERROR:
Log.d(TAG, "### ########################################################################## ###");
Log.d(TAG, "### AdListener onAdFailedToLoad AdView : errorcode = ERROR_CODE_INTERNAL_ERROR ###");
Log.d(TAG, "### ########################################################################## ###");
break;
case AdRequest.ERROR_CODE_INVALID_REQUEST:
Log.d(TAG, "### ########################################################################### ###");
Log.d(TAG, "### AdListener onAdFailedToLoad AdView : errorcode = ERROR_CODE_INVALID_REQUEST ###");
Log.d(TAG, "### ########################################################################### ###");
break;
case AdRequest.ERROR_CODE_NETWORK_ERROR:
Log.d(TAG, "### ######################################################################### ###");
Log.d(TAG, "### AdListener onAdFailedToLoad AdView : errorcode = ERROR_CODE_NETWORK_ERROR ###");
Log.d(TAG, "### ######################################################################### ###");
break;
case AdRequest.ERROR_CODE_NO_FILL:
Log.d(TAG, "### ################################################################### ###");
Log.d(TAG, "### AdListener onAdFailedToLoad AdView : errorcode = ERROR_CODE_NO_FILL ###");
Log.d(TAG, "### ################################################################### ###");
break;
default:
Log.d(TAG, "### ########################################################################### ###");
Log.d(TAG, "### AdListener onAdFailedToLoad AdView : errorcode = " + errorcode + " ###");
Log.d(TAG, "### ########################################################################### ###");
}
}
});
// adView.setAdUnitId(context.getString(R.string.admob_key));
// adView.setAdSize(AdSize.SMART_BANNER);
AdRequest.Builder adRequestBuilder = new AdRequest.Builder();
if (BuildConfig.DEBUG) {
adRequestBuilder
.addTestDevice(AdRequest.DEVICE_ID_EMULATOR)
.addTestDevice("149D6C776DC12F380715698A396A64C4");
}
AdRequest adRequest = adRequestBuilder.build();
adView.loadAd(adRequest);
Log.d(TAG, "### Load adRequest AdView");
} else {
Log.e(TAG, "### Null AdView");
}
return adView;
}
public static boolean isAddBlocked(Context context) {
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context);
boolean isAddBlocked = sharedPreferences != null ? sharedPreferences.getBoolean(AppConstants.PREFS_ADD_BLOCKED, false) : false;
return isAddBlocked;
}
// ===========================================================
// InterstitialAd
// ===========================================================
public static class AppAdListener extends AdListener {
InterstitialAd interstitial;
public AppAdListener() {
}
public AppAdListener(InterstitialAd interstitial) {
this.interstitial = interstitial;
}
@Override
public void onAdLoaded() {
Log.i(TAG, "### AdListener : onAdLoaded");
super.onAdLoaded();
interstitial.show();
}
}
public static InterstitialAd displayInterstitialAd(Context context) {
return displayInterstitialAd(context, new AppAdListener());
}
public static InterstitialAd displayInterstitialAd(Context context, AppAdListener adListener) {
final InterstitialAd interstitial = new InterstitialAd(context);
interstitial.setAdUnitId(context.getString(R.string.admob_key));
// Add Listener
adListener.interstitial = interstitial;
interstitial.setAdListener(adListener);
// Create ad request.
AdRequest adRequest = new AdRequest.Builder().build();
// Begin loading your interstitial.
interstitial.loadAd(adRequest);
return interstitial;
}
}
| gabuzomeu/geoPingProject | geoPing/src/main/java/eu/ttbox/geoping/ui/admob/AdmobHelper.java | Java | gpl-2.0 | 7,327 |
package org.cohorte.utilities.security;
/**
* @author ogattaz
*
*/
public class CXPassphraseBuilder {
/**
* @param aValue
* @return
* @throws InstantiationException
* @throws CXPassphraseSchemeException
*/
public static IXPassphrase buildB64(final IXPassphrase aPassphrase)
throws InstantiationException, CXPassphraseSchemeException {
return new CXPassphraseB64(aPassphrase);
}
/**
* @param aValue
* @return
* @throws InstantiationException
* @throws CXPassphraseSchemeException
*/
public static IXPassphrase buildB64(final String aValue)
throws InstantiationException, CXPassphraseSchemeException {
return new CXPassphraseB64(aValue);
}
/**
* @param aValue
* @return
* @throws InstantiationException
* @throws CXPassphraseSchemeException
*/
public static IXPassphrase buildB64OBFRDM(final String aValue)
throws InstantiationException, CXPassphraseSchemeException {
return new CXPassphraseB64(new CXPassphraseOBF(new CXPassphraseRDM(
aValue)));
}
/**
* @param aValue
* @return
* @throws InstantiationException
* @throws CXPassphraseSchemeException
*/
public static IXPassphrase buildOBF(final IXPassphrase aPassphrase)
throws InstantiationException, CXPassphraseSchemeException {
return new CXPassphraseOBF(aPassphrase);
}
/**
* @param aValue
* @return
* @throws InstantiationException
* @throws CXPassphraseSchemeException
*/
public static IXPassphrase buildOBF(final String aValue)
throws InstantiationException, CXPassphraseSchemeException {
return new CXPassphraseOBF(aValue);
}
/**
* @param aPassphrase
* @return
* @throws InstantiationException
* @throws CXPassphraseSchemeException
*/
public static IXPassphrase buildRDM(final IXPassphrase aPassphrase)
throws InstantiationException, CXPassphraseSchemeException {
return new CXPassphraseRDM(aPassphrase);
}
/**
* @param aValue
* @return
* @throws InstantiationException
* @throws CXPassphraseSchemeException
*/
public static IXPassphrase buildRDM(final String aValue)
throws InstantiationException, CXPassphraseSchemeException {
return new CXPassphraseRDM(aValue);
}
}
| isandlaTech/cohorte-utilities | org.cohorte.utilities/src/org/cohorte/utilities/security/CXPassphraseBuilder.java | Java | gpl-2.0 | 2,174 |
package com.tachys.moneyshare.dataaccess.db.contracts;
import android.provider.BaseColumns;
public class SettlementContract {
public SettlementContract() {
}
public static class SettlementEntry implements BaseColumns {
public static final String TABLE_NAME = "settlement";
public static final String COLUMN_NAME_PAYERID = "payer";
public static final String COLUMN_NAME_PAYEEID = "payee";
public static final String COLUMN_NAME_AMOUNT = "amount";
}
}
| StrawHatPirates/MoneyShare | src/app/src/main/java/com/tachys/moneyshare/dataaccess/db/contracts/SettlementContract.java | Java | gpl-2.0 | 502 |
package mnm.mcpackager.gui;
import java.io.File;
import javax.swing.filechooser.FileFilter;
public class JarFilter extends FileFilter {
@Override
public boolean accept(File f) {
if (f.isDirectory())
return true;
return getExtension(f).equalsIgnoreCase("jar");
}
@Override
public String getDescription() {
return "Jar Archives";
}
private String getExtension(File f) {
String ext = null;
String s = f.getName();
int i = s.lastIndexOf('.');
if (i > 0 && i < s.length() - 1)
ext = s.substring(i + 1).toLowerCase();
return ext;
}
}
| killjoy1221/MCPatcher-Repackager | src/main/java/mnm/mcpackager/gui/JarFilter.java | Java | gpl-2.0 | 656 |
/* This file is part of calliope.
*
* calliope is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 2 of the License, or
* (at your option) any later version.
*
* calliope is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with calliope. If not, see <http://www.gnu.org/licenses/>.
*/
package calliope.handler.post;
import calliope.Connector;
import calliope.exception.AeseException;
import calliope.handler.post.importer.*;
import calliope.constants.Formats;
import calliope.importer.Archive;
import calliope.constants.Config;
import calliope.json.JSONDocument;
import calliope.constants.Database;
import calliope.constants.Params;
import calliope.constants.JSONKeys;
import java.util.ArrayList;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.fileupload.servlet.ServletFileUpload;
/**
* Handle import of a set of XML files from a tool like mmpupload.
* @author desmond 23-7-2012
*/
public class AeseXMLImportHandler extends AeseImportHandler
{
public void handle( HttpServletRequest request,
HttpServletResponse response, String urn ) throws AeseException
{
try
{
if (ServletFileUpload.isMultipartContent(request) )
{
parseImportParams( request );
Archive cortex = new Archive(docID.getWork(),
docID.getAuthor(),Formats.MVD_TEXT,encoding);
Archive corcode = new Archive(docID.getWork(),
docID.getAuthor(),Formats.MVD_STIL,encoding);
cortex.setStyle( style );
corcode.setStyle( style );
StageOne stage1 = new StageOne( files );
log.append( stage1.process(cortex,corcode) );
if ( stage1.hasFiles() )
{
String suffix = "";
StageTwo stage2 = new StageTwo( stage1, false );
stage2.setEncoding( encoding );
log.append( stage2.process(cortex,corcode) );
StageThreeXML stage3Xml = new StageThreeXML( stage2,
style, dict, hhExceptions );
stage3Xml.setStripConfig( getConfig(Config.stripper,
stripperName) );
stage3Xml.setSplitConfig( getConfig(Config.splitter,
splitterName) );
if ( stage3Xml.hasTEI() )
{
ArrayList<File> notes = stage3Xml.getNotes();
if ( notes.size()> 0 )
{
Archive nCorTex = new Archive(docID.getWork(),
docID.getAuthor(),Formats.MVD_TEXT,encoding);
nCorTex.setStyle( style );
Archive nCorCode = new Archive(docID.getWork(),
docID.getAuthor(),Formats.MVD_STIL,encoding);
StageThreeXML s3notes = new StageThreeXML(
style,dict, hhExceptions);
s3notes.setStripConfig(
getConfig(Config.stripper, stripperName) );
s3notes.setSplitConfig(
getConfig(Config.splitter, splitterName) );
for ( int j=0;j<notes.size();j++ )
s3notes.add(notes.get(j));
log.append( s3notes.process(nCorTex,nCorCode) );
addToDBase(nCorTex, "cortex", "notes" );
addToDBase( nCorCode, "corcode", "notes" );
// differentiate base from notes
suffix = "base";
}
if ( xslt == null )
xslt = Params.XSLT_DEFAULT;
String transform = getConfig(Config.xslt,xslt);
JSONDocument jDoc = JSONDocument.internalise(
transform );
stage3Xml.setTransform( (String)
jDoc.get(JSONKeys.BODY) );
}
log.append( stage3Xml.process(cortex,corcode) );
addToDBase( cortex, "cortex", suffix );
addToDBase( corcode, "corcode", suffix );
// now get the json docs and add them at the right docid
// Connector.getConnection().putToDb( Database.CORTEX,
// docID.get(), cortex.toMVD("cortex") );
// log.append( cortex.getLog() );
// String fullAddress = docID.get()+"/"+Formats.DEFAULT;
// log.append( Connector.getConnection().putToDb(
// Database.CORCODE,fullAddress, corcode.toMVD("corcode")) );
// log.append( corcode.getLog() );
}
response.setContentType("text/html;charset=UTF-8");
response.getWriter().println( wrapLog() );
}
}
catch ( Exception e )
{
throw new AeseException( e );
}
}
}
| discoverygarden/calliope | src/calliope/handler/post/AeseXMLImportHandler.java | Java | gpl-2.0 | 5,684 |
///**
// * ClassName: MainTest.java
// * Date: 2017年5月16日
// */
//package com.ojdbc.sql.test;
//
//import java.sql.Connection;
//import java.sql.SQLException;
//
//import com.ojdbc.sql.ConnectionObject;
//import com.ojdbc.sql.DataBase;
//import com.ojdbc.sql.DataBaseEnum;
//import com.ojdbc.sql.DataBaseManager;
//import com.ojdbc.sql.SQLResultSet;
//import com.ojdbc.sql.connection.MongoDataBaseConnection;
//import com.ojdbc.sql.connection.MySQLDataBaseConnection;
//import com.ojdbc.sql.connection.OracleDataBaseConnection;
//import com.ojdbc.sql.connection.SQLiteDataBaseConnection;
//
///**
// * Author: ShaoGaige
// * Description: 猪测试类
// * Log:
// */
//public class MainTest {
//
// /**
// * @param args
// * @throws SQLException
// */
// public static void main(String[] args) throws SQLException {
// // TODO Auto-generated method stub
// ConnectionObject conn;
//
// SQLiteDataBaseConnection sqlite = new SQLiteDataBaseConnection();
// String dataBaseURL = "jdbc:sqlite://E:/shaogaige/iNote/iNoteRun/data/iNoteData.note";
// String userName = "";
// String passWord = "";
// conn = sqlite.createConnection(dataBaseURL, userName, passWord);
// System.out.println(conn.getMetaData().getURL());
//
// DataBase database = new DataBase(conn);
// SQLResultSet rs =database.exeSQLSelect("select * from noteinfo");
// System.out.println(rs.getRowNum());
//
// MongoDataBaseConnection mongo = new MongoDataBaseConnection();
// dataBaseURL = "jdbc:mongo://172.15.103.42:10001/geoglobe";
// userName = "data";
// passWord = "data";
// conn = mongo.createConnection(dataBaseURL, userName, passWord);
// System.out.println(conn.getMetaData().getURL());
//
// MySQLDataBaseConnection mysql = new MySQLDataBaseConnection();
// dataBaseURL = "jdbc:mysql://172.15.103.42:3306/geoglobe";
// userName = "root";
// passWord = "root";
// conn = mysql.createConnection(dataBaseURL, userName, passWord);
// System.out.println(conn.getMetaData().getURL());
//
// OracleDataBaseConnection oracle = new OracleDataBaseConnection();
// dataBaseURL = "jdbc:oracle:thin:@172.15.103.43:1521:geoglobe";
// userName = "autotest";
// passWord = "autotest";
// conn = oracle.createConnection(dataBaseURL, userName, passWord);
// System.out.println(conn.getMetaData().getURL());
//
// DataBaseManager.getDataBase(DataBaseEnum.ORACLE, "", "", "");
// }
//
//}
| shaogaige/iDataBaseConnection | src/com/ojdbc/sql/test/MainTest.java | Java | gpl-2.0 | 2,468 |
package org.mo.game.editor.face.apl.logic.report;
import org.mo.jfa.common.page.FAbstractFormPage;
public class FWebReportPage
extends FAbstractFormPage{
private static final long serialVersionUID = 1L;
private String _tempName;
public String getTempName(){
return _tempName;
}
public void setTempName(String tempName){
_tempName = tempName;
}
}
| favedit/MoPlatform | mo-gm-develop/src/editor-face/org/mo/game/editor/face/apl/logic/report/FWebReportPage.java | Java | gpl-2.0 | 389 |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package ch.quantasy.iot.mqtt.tinkerforge.device.deviceHandler.IMU.status;
import ch.quantasy.iot.mqtt.base.AHandler;
import ch.quantasy.iot.mqtt.base.message.AStatus;
import ch.quantasy.iot.mqtt.tinkerforge.device.deviceHandler.IMU.IMU;
import org.eclipse.paho.client.mqttv3.MqttAsyncClient;
/**
*
* @author Reto E. Koenig <[email protected]>
*/
public class AllDataPeriodStatus extends AStatus {
public AllDataPeriodStatus(AHandler deviceHandler, String statusTopic, MqttAsyncClient mqttClient) {
super(deviceHandler, statusTopic, "allData", mqttClient);
super.addDescription(IMU.PERIOD, Long.class, "JSON", "0", "..", "" + Long.MAX_VALUE);
}
}
| knr1/ch.bfh.mobicomp | ch.quantasy.iot.gateway.tinkerforge/src/main/java/ch/quantasy/iot/mqtt/tinkerforge/device/deviceHandler/IMU/status/AllDataPeriodStatus.java | Java | gpl-2.0 | 848 |
package com.karniyarik.common.util;
import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;
import org.apache.commons.lang.StringUtils;
import com.karniyarik.common.KarniyarikRepository;
import com.karniyarik.common.config.system.DeploymentConfig;
import com.karniyarik.common.config.system.WebConfig;
public class IndexMergeUtil
{
public static final String SITE_NAME_PARAMETER = "s";
public static final void callMergeSiteIndex(String siteName) throws Throwable
{
callMergeSiteIndex(siteName, false);
}
public static final void callMergeSiteIndex(String siteName, boolean reduceBoost) throws Throwable
{
WebConfig webConfig = KarniyarikRepository.getInstance().getConfig().getConfigurationBundle().getWebConfig();
DeploymentConfig config = KarniyarikRepository.getInstance().getConfig().getConfigurationBundle().getDeploymentConfig();
//String url = "http://www.karniyarik.com";
String url = config.getMasterWebUrl();
URL servletURL = null;
URLConnection connection = null;
InputStream is = null;
String tail = webConfig.getMergeIndexServlet() + "?" + SITE_NAME_PARAMETER + "=" + siteName;
if (StringUtils.isNotBlank(url))
{
if(!tail.startsWith("/") && !url.endsWith("/"))
{
url += "/";
}
url += tail;
if(reduceBoost)
{
url += "&rb=true";
}
servletURL = new URL(url);
connection = servletURL.openConnection();
connection.connect();
is = connection.getInputStream();
is.close();
}
servletURL = null;
connection = null;
is = null;
tail = null;
}
public static void callReduceSiteIndex(String siteName) throws Throwable{
callMergeSiteIndex(siteName, true);
}
public static void main(String[] args) throws Throwable{
String[] sites = new String[]{
"hataystore",
"damakzevki", "robertopirlanta", "bebekken", "elektrikmalzemem", "starsexshop", "altinsarrafi", "budatoys", "taffybaby", "medikalcim", "beyazdepo", "tasarimbookshop", "boviza",
"evdepo", "bonnyfood", "beyazkutu", "koctas", "bizimmarket", "narbebe", "gonayakkabi", "tgrtpazarlama", "pasabahce", "vatanbilgisayar", "egerate-store", "dr", "hipernex", "ensarshop",
"yesil", "dealextreme", "petsrus", "otoyedekparcaburada", "elektrikdeposu", "alisveris", "radikalteknoloji", "ekopasaj", "strawberrynet", "yenisayfa", "adresimegelsin",
"juenpetmarket", "nadirkitap"};
for(String site: sites)
{
System.out.println(site);
callMergeSiteIndex(site);
Thread.sleep(10000);
}
}
}
| Karniyarik/karniyarik | karniyarik-common/src/main/java/com/karniyarik/common/util/IndexMergeUtil.java | Java | gpl-2.0 | 2,586 |
package org.mo.game.editor.face.apl.logic.form;
import com.lowagie.text.Document;
import com.lowagie.text.DocumentException;
import com.lowagie.text.Element;
import com.lowagie.text.Font;
import com.lowagie.text.HeaderFooter;
import com.lowagie.text.PageSize;
import com.lowagie.text.Paragraph;
import com.lowagie.text.Phrase;
import com.lowagie.text.Rectangle;
import com.lowagie.text.pdf.BaseFont;
import com.lowagie.text.pdf.PdfPTable;
import com.lowagie.text.pdf.PdfWriter;
import java.awt.Color;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import org.mo.com.lang.FFatalError;
import org.mo.com.lang.RString;
import org.mo.com.logging.ILogger;
import org.mo.com.logging.RLogger;
import org.mo.com.validator.RStringValidator;
import org.mo.com.xml.FXmlNode;
import org.mo.com.xml.IXmlObject;
import org.mo.core.aop.face.ALink;
import org.mo.eng.data.common.ISqlContext;
import org.mo.web.core.servlet.common.IWebServletResponse;
import org.mo.web.core.webform.FWebFormDatasetArgs;
import org.mo.web.core.webform.IWebFormConsole;
import org.mo.web.core.webform.IWebFormDatasetConsole;
import org.mo.web.protocol.context.IWebContext;
public class FWebFormPdfServlet
implements
IWebFormPdfServlet{
private static ILogger _logger = RLogger.find(FWebFormPdfServlet.class);
public static byte[] creatPdf(FXmlNode dsNode,
IXmlObject formNode){
// 创建一个Document对象
Rectangle rectPageSize = new Rectangle(PageSize.A4);// 定义A4页面大小
rectPageSize = rectPageSize.rotate();// 实现A4页面的横置
Document document = new Document(rectPageSize, 50, 30, 30, 30);// 4个参数,
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
// 设置了页面的4个边距
try{
// 生成名为 HelloWorld.pdf 的文档
PdfWriter.getInstance(document, buffer);
BaseFont bfChinese;
try{
bfChinese = BaseFont.createFont("STSongStd-Light", "UniGB-UCS2-H", false);
}catch(IOException e){
throw new FFatalError(e);
}
Font fontChinese = new Font(bfChinese, 12, Font.NORMAL, Color.red);
document.open();
// 插入一个段落
//获得模块名称
Paragraph par = new Paragraph(formNode.innerGet("label"), fontChinese);
document.add(par);
int tableColumns = formNode.children().count();
// 定义表格填充内容
PdfPTable datatable = new PdfPTable(tableColumns); // 创建新表.
// int headerwidths[] = { 9, 4, 8, 10, 8, 7 }; // percentage 定义表格头宽度
// datatable.setWidths(headerwidths);
datatable.setWidthPercentage(100);
// 设置表头的高度
datatable.getDefaultCell().setPadding(2);
// 设置表头的粗细线条
datatable.getDefaultCell().setBorderWidth(1);
datatable.getDefaultCell().setGrayFill(0.8f);
datatable.getDefaultCell().setHorizontalAlignment(Element.ALIGN_CENTER);
// 以下是填充表头
for(int i = 0; i < tableColumns; i++){
datatable.addCell(new Phrase(formNode.children().get(i).innerGet("label"), fontChinese));
}
datatable.setHeaderRows(1);
// 结束表格的头部
// 设置页码
HeaderFooter footer = new HeaderFooter(new Phrase("页码:", fontChinese), true);
footer.setBorder(Rectangle.NO_BORDER);
document.setFooter(footer);
// 结束页码的设置
//设置表格颜色参数
int i = 1;
datatable.getDefaultCell().setBorderWidth(1);
for(FXmlNode row : dsNode.nodes()){
if(i % 2 == 1){
// 设置表格颜色
datatable.getDefaultCell().setGrayFill(1.0f);
}
//根据数据列项,依次取出该列所对应的值
for(int x = 0; x < tableColumns; x++){
String columnName = formNode.children().get(x).innerGet("data_name");
datatable.addCell(new Phrase(row.get(columnName), fontChinese));
}
if(i % 2 == 1){
// 设置表格颜色
datatable.getDefaultCell().setGrayFill(0.9f);
}
i++;
}
document.add(datatable);// 加载新表
}catch(DocumentException de){
de.printStackTrace();
System.err.println("document: " + de.getMessage());
}finally{
document.close();
}
return buffer.toByteArray();
}
@ALink
protected IWebFormConsole _webformConsole;
@ALink
IWebFormDatasetConsole _webFormDataConsole;
@Override
public void build(IWebContext context,
ISqlContext sqlContext,
IWebServletResponse response){
String formName = context.parameter("form_name");
RStringValidator.checkEmpty(formName, "form_name");
IXmlObject xform = findForm(formName);
// 查找数据集
FWebFormDatasetArgs args = new FWebFormDatasetArgs(context, sqlContext);
args.setPageSize(-1);
xform.children().get(0).innerGet("label");
args.setForm(xform);
FXmlNode dsNode = _webFormDataConsole.fetchNode(args);
// 生成PDF文件
byte[] bytes = creatPdf(dsNode, xform);
_logger.debug(this, "build", "Make form pdf file. (form={0}, pdf size={1})", xform.name(), bytes.length);
response.write(bytes);
}
public IXmlObject findForm(String formName){
IXmlObject xform = null;
if(RString.isNotEmpty(formName)){
xform = _webformConsole.find(formName);
if(null == xform){
throw new FFatalError("Show form is null. (name={0})", formName);
}
}
return xform;
}
}
| favedit/MoPlatform | mo-gm-develop/src/editor-face/org/mo/game/editor/face/apl/logic/form/FWebFormPdfServlet.java | Java | gpl-2.0 | 5,836 |
/**
*
* Copyright (c) 2009-2016 Freedomotic team http://freedomotic.com
*
* This file is part of Freedomotic
*
* This Program is free software; you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation; either version 2, or (at your option) any later version.
*
* This Program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License along with
* Freedomotic; see the file COPYING. If not, see
* <http://www.gnu.org/licenses/>.
*/
package com.freedomotic.api;
import com.google.inject.AbstractModule;
import com.google.inject.Singleton;
/**
*
* @author Enrico Nicoletti
*/
public class InjectorApi extends AbstractModule {
@Override
protected void configure() {
bind(API.class).to(APIStandardImpl.class).in(Singleton.class);
}
}
| abollaert/freedomotic | framework/freedomotic-core/src/main/java/com/freedomotic/api/InjectorApi.java | Java | gpl-2.0 | 1,096 |
package com.lonebytesoft.thetaleclient.api.model;
import android.os.Parcel;
import android.os.Parcelable;
import com.lonebytesoft.thetaleclient.api.dictionary.CompanionSpecies;
import com.lonebytesoft.thetaleclient.util.ObjectUtils;
import org.json.JSONException;
import org.json.JSONObject;
/**
* @author Hamster
* @since 17.02.2015
*/
public class CompanionInfo implements Parcelable {
public final CompanionSpecies species;
public final String name;
public final int healthCurrent;
public final int healthMax;
public final int coherence;
public final int experienceCurrent;
public final int experienceForNextLevel;
public CompanionInfo(final JSONObject json) throws JSONException {
species = ObjectUtils.getEnumForCode(CompanionSpecies.class, json.getInt("type"));
name = json.getString("name");
healthCurrent = json.getInt("health");
healthMax = json.getInt("max_health");
coherence = json.getInt("coherence");
experienceCurrent = json.getInt("experience");
experienceForNextLevel = json.getInt("experience_to_level");
}
// parcelable stuff
private CompanionInfo(final Parcel in) {
final int index = in.readInt();
species = index == -1 ? null : CompanionSpecies.values()[index];
name = in.readString();
healthCurrent = in.readInt();
healthMax = in.readInt();
coherence = in.readInt();
experienceCurrent = in.readInt();
experienceForNextLevel = in.readInt();
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel out, int flags) {
out.writeInt(species == null ? -1 : species.ordinal());
out.writeString(name);
out.writeInt(healthCurrent);
out.writeInt(healthMax);
out.writeInt(coherence);
out.writeInt(experienceCurrent);
out.writeInt(experienceForNextLevel);
}
public static final Parcelable.Creator<CompanionInfo> CREATOR = new Parcelable.Creator<CompanionInfo>() {
@Override
public CompanionInfo createFromParcel(Parcel source) {
return new CompanionInfo(source);
}
@Override
public CompanionInfo[] newArray(int size) {
return new CompanionInfo[size];
}
};
}
| hamsterxc/TheTaleClient | app/src/main/java/com/lonebytesoft/thetaleclient/api/model/CompanionInfo.java | Java | gpl-2.0 | 2,364 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.