language
stringclasses 5
values | text
stringlengths 15
988k
|
---|---|
Java | public class OStorageRemote extends OStorageAbstract implements OStorageProxy {
public static final String PARAM_CONNECTION_STRATEGY = "connectionStrategy";
private static final String DEFAULT_HOST = "localhost";
private static final int DEFAULT_PORT = 2424;
private static final int DEFAULT_SSL_PORT = 2434;
private static final String ADDRESS_SEPARATOR = ";";
public static final String DRIVER_NAME = "OrientDB Java";
private static final String LOCAL_IP = "127.0.0.1";
private static final String LOCALHOST = "localhost";
private static AtomicInteger sessionSerialId = new AtomicInteger(-1);
public enum CONNECTION_STRATEGY {
STICKY, ROUND_ROBIN_CONNECT, ROUND_ROBIN_REQUEST
}
private CONNECTION_STRATEGY connectionStrategy = CONNECTION_STRATEGY.STICKY;
private final OSBTreeCollectionManagerRemote sbTreeCollectionManager = new OSBTreeCollectionManagerRemote(this);
protected final List<String> serverURLs = new ArrayList<String>();
protected final Map<String, OCluster> clusterMap = new ConcurrentHashMap<String, OCluster>();
private final ExecutorService asynchExecutor;
private final ODocument clusterConfiguration = new ODocument();
private final String clientId;
private final AtomicInteger users = new AtomicInteger(0);
private OContextConfiguration clientConfiguration;
private int connectionRetry;
private int connectionRetryDelay;
private OCluster[] clusters = OCommonConst.EMPTY_CLUSTER_ARRAY;
private int defaultClusterId;
private OStorageRemoteAsynchEventListener asynchEventListener;
private Map<String, Object> connectionOptions;
private String recordFormat;
protected ORemoteConnectionManager connectionManager;
private final Set<OStorageRemoteSession> sessions = Collections
.newSetFromMap(new ConcurrentHashMap<OStorageRemoteSession, Boolean>());
public OStorageRemote(final String iClientId, final String iURL, final String iMode) throws IOException {
this(iClientId, iURL, iMode, null, true);
}
public OStorageRemote(final String iClientId, final String iURL, final String iMode, final STATUS status,
final boolean managePushMessages) throws IOException {
super(iURL, iURL, iMode, 0); // NO TIMEOUT @SINCE 1.5
if (status != null)
this.status = status;
clientId = iClientId;
configuration = null;
clientConfiguration = new OContextConfiguration();
connectionRetry = clientConfiguration.getValueAsInteger(OGlobalConfiguration.NETWORK_SOCKET_RETRY);
connectionRetryDelay = clientConfiguration.getValueAsInteger(OGlobalConfiguration.NETWORK_SOCKET_RETRY_DELAY);
if (managePushMessages)
asynchEventListener = new OStorageRemoteAsynchEventListener(this);
parseServerURLs();
asynchExecutor = Executors.newSingleThreadScheduledExecutor();
OEngineRemote engine = (OEngineRemote) Orient.instance().getRunningEngine(OEngineRemote.NAME);
connectionManager = engine.getConnectionManager();
}
public <T> T asyncNetworkOperation(final OStorageRemoteOperationWrite write, final OStorageRemoteOperationRead<T> read, int mode,
final ORecordId recordId, final ORecordCallback<T> callback, final String errorMessage) {
final int pMode;
if (mode == 1 && callback == null)
// ASYNCHRONOUS MODE NO ANSWER
pMode = 2;
else
pMode = mode;
return baseNetworkOperation(new OStorageRemoteOperation<T>() {
@Override
public T execute(final OChannelBinaryAsynchClient network, final OStorageRemoteSession session) throws IOException {
// Send The request
write.execute(network, session, pMode);
final T res;
if (pMode == 0) {
// SYNC
res = read.execute(network, session);
connectionManager.release(network);
} else if (pMode == 1) {
// ASYNC
res = null;
OStorageRemote.this.asynchExecutor.submit(new Runnable() {
@Override
public void run() {
try {
T inRes = read.execute(network, session);
callback.call(recordId, inRes);
connectionManager.release(network);
} catch (Throwable e) {
connectionManager.remove(network);
OLogManager.instance().error(this, "Exception on async query", e);
}
}
});
} else {
// NO RESPONSE
connectionManager.release(network);
res = null;
}
return res;
}
}, errorMessage, connectionRetry);
}
public <T> T networkOperationRetry(final OStorageRemoteOperation<T> operation, final String errorMessage, int retry) {
return baseNetworkOperation(new OStorageRemoteOperation<T>() {
@Override
public T execute(OChannelBinaryAsynchClient network, OStorageRemoteSession session) throws IOException {
final T res = operation.execute(network, session);
connectionManager.release(network);
return res;
}
}, errorMessage, retry);
}
public <T> T networkOperation(final OStorageRemoteOperation<T> operation, final String errorMessage) {
return networkOperationRetry(operation, errorMessage, connectionRetry);
}
public <T> T baseNetworkOperation(final OStorageRemoteOperation<T> operation, final String errorMessage, int retry) {
OStorageRemoteSession session = getCurrentSession();
if (session.commandExecuting)
throw new ODatabaseException(
"Cannot execute the request because an asynchronous operation is in progress. Please use a different connection");
String serverUrl = null;
do {
OChannelBinaryAsynchClient network = null;
if (serverUrl == null)
serverUrl = getNextAvailableServerURL(false, session);
do {
try {
network = getNetwork(serverUrl);
} catch (OException e) {
serverUrl = useNewServerURL(serverUrl);
if (serverUrl == null)
throw e;
}
} while (network == null);
try {
// In case i do not have a token or i'm switching between server i've to execute a open operation.
OStorageRemoteNodeSession nodeSession = session.getServerSession(network.getServerURL());
if (nodeSession == null || !nodeSession.isValid()) {
openRemoteDatabase(network);
if (!network.tryLock()) {
connectionManager.release(network);
continue;
}
}
return operation.execute(network, session);
} catch (ODistributedRedirectException e) {
connectionManager.release(network);
OLogManager.instance()
.debug(this, "Redirecting the request from server '%s' to the server '%s' because %s", e.getFromServer(), e.toString(),
e.getMessage());
// RECONNECT TO THE SERVER SUGGESTED IN THE EXCEPTION
serverUrl = e.getToServerAddress();
} catch (OModificationOperationProhibitedException mope) {
connectionManager.release(network);
handleDBFreeze();
serverUrl = null;
} catch (OTokenException e) {
connectionManager.release(network);
session.removeServerSession(network.getServerURL());
if (--retry <= 0)
throw OException.wrapException(new OStorageException(errorMessage), e);
serverUrl = null;
} catch (OTokenSecurityException e) {
connectionManager.release(network);
session.removeServerSession(network.getServerURL());
if (--retry <= 0)
throw OException.wrapException(new OStorageException(errorMessage), e);
serverUrl = null;
} catch (OOfflineNodeException e) {
connectionManager.release(network);
// Remove the current url because the node is offline
synchronized (serverURLs) {
serverURLs.remove(serverUrl);
}
for (OStorageRemoteSession activeSession : sessions) {
// Not thread Safe ...
activeSession.removeServerSession(serverUrl);
}
serverUrl = null;
} catch (IOException e) {
connectionManager.release(network);
retry = handleIOException(retry, network, e);
serverUrl = null;
} catch (OIOException e) {
connectionManager.release(network);
retry = handleIOException(retry, network, e);
serverUrl = null;
} catch (OException e) {
connectionManager.release(network);
throw e;
} catch (Exception e) {
connectionManager.release(network);
throw OException.wrapException(new OStorageException(errorMessage), e);
}
} while (true);
}
private int handleIOException(int retry, final OChannelBinaryAsynchClient network, final Exception e) {
OLogManager.instance()
.info(this, "Caught Network I/O errors on %s, trying an automatic reconnection... (error: %s)", network.getServerURL(),
e.getMessage());
OLogManager.instance().debug(this, "I/O error stack: ", e);
connectionManager.remove(network);
if (--retry <= 0)
throw OException.wrapException(new OIOException(e.getMessage()), e);
else {
try {
Thread.sleep(connectionRetryDelay);
} catch (InterruptedException e1) {
throw OException.wrapException(new OInterruptedException(e1.getMessage()), e);
}
}
return retry;
}
@Override
public boolean isAssigningClusterIds() {
return false;
}
public int getSessionId() {
OStorageRemoteSession session = getCurrentSession();
return session != null ? session.getSessionId() : -1;
}
public String getServerURL() {
OStorageRemoteSession session = getCurrentSession();
return session != null ? session.getServerUrl() : null;
}
public void open(final String iUserName, final String iUserPassword, final Map<String, Object> iOptions) {
stateLock.acquireWriteLock();
addUser();
try {
OStorageRemoteSession session = getCurrentSession();
if (status == STATUS.CLOSED || !iUserName.equals(session.connectionUserName) || !iUserPassword
.equals(session.connectionUserPassword) || session.sessions.isEmpty()) {
OCredentialInterceptor ci = OSecurityManager.instance().newCredentialInterceptor();
if (ci != null) {
ci.intercept(getURL(), iUserName, iUserPassword);
session.connectionUserName = ci.getUsername();
session.connectionUserPassword = ci.getPassword();
} else // Do Nothing
{
session.connectionUserName = iUserName;
session.connectionUserPassword = iUserPassword;
}
parseOptions(iOptions);
openRemoteDatabase();
final OStorageConfiguration storageConfiguration = new OStorageRemoteConfiguration(this, recordFormat);
storageConfiguration.load(iOptions);
configuration = storageConfiguration;
componentsFactory = new OCurrentStorageComponentsFactory(configuration);
} else {
reopenRemoteDatabase();
}
} catch (Exception e) {
removeUser();
if (e instanceof RuntimeException)
// PASS THROUGH
throw (RuntimeException) e;
else
throw OException.wrapException(new OStorageException("Cannot open the remote storage: " + name), e);
} finally {
stateLock.releaseWriteLock();
}
}
private void parseOptions(final Map<String, Object> iOptions) {
if (iOptions == null || iOptions.size() == 0)
return;
final Object connType = iOptions.get(PARAM_CONNECTION_STRATEGY.toLowerCase());
if (connType != null)
connectionStrategy = CONNECTION_STRATEGY.valueOf(connType.toString().toUpperCase());
// CREATE A COPY TO AVOID POST OPEN MANIPULATION BY USER
connectionOptions = new HashMap<String, Object>(iOptions);
}
@Override
public OSBTreeCollectionManager getSBtreeCollectionManager() {
return sbTreeCollectionManager;
}
public void reload() {
networkOperation(new OStorageRemoteOperation<Void>() {
@Override
public Void execute(OChannelBinaryAsynchClient network, OStorageRemoteSession session) throws IOException {
stateLock.acquireWriteLock();
try {
try {
beginRequest(network, OChannelBinaryProtocol.REQUEST_DB_RELOAD, session);
} finally {
endRequest(network);
}
try {
beginResponse(network, session);
readDatabaseInformation(network);
} finally {
endResponse(network);
}
configuration.load(new HashMap<String, Object>());
return null;
} finally {
stateLock.releaseWriteLock();
}
}
}, "Error on reloading database information");
}
public void create(final Map<String, Object> iOptions) {
throw new UnsupportedOperationException(
"Cannot create a database in a remote server. Please use the console or the OServerAdmin class.");
}
public boolean exists() {
throw new UnsupportedOperationException(
"Cannot check the existence of a database in a remote server. Please use the console or the OServerAdmin class.");
}
public void close(final boolean iForce, boolean onDelete) {
if (status == STATUS.CLOSED)
return;
stateLock.acquireWriteLock();
try {
if (status == STATUS.CLOSED)
return;
final OStorageRemoteSession session = getCurrentSession();
if (session != null) {
final Collection<OStorageRemoteNodeSession> nodes = session.getAllServerSessions();
if (!nodes.isEmpty()) {
for (OStorageRemoteNodeSession nodeSession : nodes) {
OChannelBinaryAsynchClient network = null;
try {
network = getNetwork(nodeSession.getServerURL());
network.beginRequest(OChannelBinaryProtocol.REQUEST_DB_CLOSE, session);
endRequest(network);
connectionManager.release(network);
} catch (OIOException ex) {
// IGNORING IF THE SERVER IS DOWN OR NOT REACHABLE THE SESSION IS AUTOMATICALLY CLOSED.
OLogManager.instance().debug(this, "Impossible to comunicate to the server for close: %s", ex);
connectionManager.remove(network);
} catch (IOException ex) {
// IGNORING IF THE SERVER IS DOWN OR NOT REACHABLE THE SESSION IS AUTOMATICALLY CLOSED.
OLogManager.instance().debug(this, "Impossible to comunicate to the server for close: %s", ex);
connectionManager.remove(network);
}
}
session.close();
sessions.remove(session);
if (!checkForClose(iForce))
return;
} else {
if (!iForce)
return;
}
}
status = STATUS.CLOSING;
// CLOSE ALL THE CONNECTIONS
for (String url : serverURLs) {
connectionManager.closePool(url);
}
sbTreeCollectionManager.close();
super.close(iForce, onDelete);
status = STATUS.CLOSED;
Orient.instance().unregisterStorage(this);
} finally {
stateLock.releaseWriteLock();
}
}
private boolean checkForClose(final boolean force) {
if (status == STATUS.CLOSED)
return false;
if (status == STATUS.CLOSED)
return false;
final int remainingUsers = getUsers() > 0 ? removeUser() : 0;
return force || remainingUsers == 0;
}
@Override
public int getUsers() {
return users.get();
}
@Override
public int addUser() {
return users.incrementAndGet();
}
@Override
public int removeUser() {
if (users.get() < 1)
throw new IllegalStateException("Cannot remove user of the remote storage '" + toString() + "' because no user is using it");
return users.decrementAndGet();
}
public void delete() {
throw new UnsupportedOperationException(
"Cannot delete a database in a remote server. Please use the console or the OServerAdmin class.");
}
public Set<String> getClusterNames() {
stateLock.acquireReadLock();
try {
return new HashSet<String>(clusterMap.keySet());
} finally {
stateLock.releaseReadLock();
}
}
public OStorageOperationResult<OPhysicalPosition> createRecord(final ORecordId iRid, final byte[] iContent,
final int iRecordVersion, final byte iRecordType, final int iMode, final ORecordCallback<Long> iCallback) {
final OSBTreeCollectionManager collectionManager = ODatabaseRecordThreadLocal.INSTANCE.get().getSbTreeCollectionManager();
ORecordCallback<OPhysicalPosition> realCallback = null;
if (iCallback != null) {
realCallback = new ORecordCallback<OPhysicalPosition>() {
@Override
public void call(ORecordId iRID, OPhysicalPosition iParameter) {
iCallback.call(iRID, iParameter.clusterPosition);
}
};
}
final ORecordId idCopy = iRid.copy();
// The Upper layer require to return this also if it not really receive response from the network
final OPhysicalPosition ppos = new OPhysicalPosition(iRecordType);
asyncNetworkOperation(new OStorageRemoteOperationWrite() {
@Override
public void execute(final OChannelBinaryAsynchClient network, final OStorageRemoteSession session, int mode)
throws IOException {
try {
beginRequest(network, OChannelBinaryProtocol.REQUEST_RECORD_CREATE, session);
network.writeShort((short) iRid.getClusterId());
network.writeBytes(iContent);
network.writeByte(iRecordType);
network.writeByte((byte) mode);
} finally {
endRequest(network);
}
}
}, new OStorageRemoteOperationRead<OPhysicalPosition>() {
@Override
public OPhysicalPosition execute(OChannelBinaryAsynchClient network, OStorageRemoteSession session) throws IOException {
// SYNCHRONOUS
try {
beginResponse(network, session);
// FIRST READ THE ENTIRE RESPONSE
short clusterId = network.readShort();
final long clPos = network.readLong();
final int recVer = network.readVersion();
final Map<OBonsaiCollectionPointer, OPair<Long, Long>> collectionChanges = readCollectionChanges(network);
// APPLY CHANGES
ppos.clusterPosition = clPos;
ppos.recordVersion = recVer;
// THIS IS A COMPATIBILITY FIX TO AVOID TO FILL THE CLUSTER ID IN CASE OF ASYNC
if (iMode == 0) {
iRid.setClusterId(clusterId);
iRid.setClusterPosition(ppos.clusterPosition);
}
idCopy.setClusterId(clusterId);
idCopy.setClusterPosition(ppos.clusterPosition);
updateCollection(collectionChanges, collectionManager);
return ppos;
} finally {
endResponse(network);
}
}
}, iMode, idCopy, realCallback, "Error on create record in cluster " + iRid.getClusterId());
return new OStorageOperationResult<OPhysicalPosition>(ppos);
}
@Override
public ORecordMetadata getRecordMetadata(final ORID rid) {
return networkOperation(new OStorageRemoteOperation<ORecordMetadata>() {
@Override
public ORecordMetadata execute(OChannelBinaryAsynchClient network, OStorageRemoteSession session) throws IOException {
try {
beginRequest(network, OChannelBinaryProtocol.REQUEST_RECORD_METADATA, session);
network.writeRID(rid);
} finally {
endRequest(network);
}
try {
beginResponse(network, session);
final ORID responseRid = network.readRID();
final int responseVersion = network.readVersion();
return new ORecordMetadata(responseRid, responseVersion);
} finally {
endResponse(network);
}
}
}, "Error on record metadata read " + rid);
}
@Override
public OStorageOperationResult<ORawBuffer> readRecordIfVersionIsNotLatest(final ORecordId rid, final String fetchPlan,
final boolean ignoreCache, final int recordVersion) throws ORecordNotFoundException {
if (getCurrentSession().commandExecuting)
// PENDING NETWORK OPERATION, CAN'T EXECUTE IT NOW
return new OStorageOperationResult<ORawBuffer>(null);
return networkOperation(new OStorageRemoteOperation<OStorageOperationResult<ORawBuffer>>() {
@Override
public OStorageOperationResult<ORawBuffer> execute(OChannelBinaryAsynchClient network, OStorageRemoteSession session)
throws IOException {
try {
beginRequest(network, OChannelBinaryProtocol.REQUEST_RECORD_LOAD_IF_VERSION_NOT_LATEST, session);
network.writeRID(rid);
network.writeVersion(recordVersion);
network.writeString(fetchPlan != null ? fetchPlan : "");
network.writeByte((byte) (ignoreCache ? 1 : 0));
} finally {
endRequest(network);
}
try {
beginResponse(network, session);
if (network.readByte() == 0)
return new OStorageOperationResult<ORawBuffer>(null);
byte type = network.readByte();
int recVersion = network.readVersion();
byte[] bytes = network.readBytes();
ORawBuffer buffer = new ORawBuffer(bytes, recVersion, type);
final ODatabaseDocument database = ODatabaseRecordThreadLocal.INSTANCE.getIfDefined();
ORecord record;
while (network.readByte() == 2) {
record = (ORecord) OChannelBinaryProtocol.readIdentifiable(network);
if (database != null)
// PUT IN THE CLIENT LOCAL CACHE
database.getLocalCache().updateRecord(record);
}
return new OStorageOperationResult<ORawBuffer>(buffer);
} finally {
endResponse(network);
}
}
}, "Error on read record " + rid);
}
public OStorageOperationResult<ORawBuffer> readRecord(final ORecordId iRid, final String iFetchPlan, final boolean iIgnoreCache,
boolean prefetchRecords, final ORecordCallback<ORawBuffer> iCallback) {
if (getCurrentSession().commandExecuting)
// PENDING NETWORK OPERATION, CAN'T EXECUTE IT NOW
return new OStorageOperationResult<ORawBuffer>(null);
return networkOperation(new OStorageRemoteOperation<OStorageOperationResult<ORawBuffer>>() {
@Override
public OStorageOperationResult<ORawBuffer> execute(OChannelBinaryAsynchClient network, OStorageRemoteSession session)
throws IOException {
try {
beginRequest(network, OChannelBinaryProtocol.REQUEST_RECORD_LOAD, session);
network.writeRID(iRid);
network.writeString(iFetchPlan != null ? iFetchPlan : "");
if (network.getSrvProtocolVersion() >= 9)
network.writeByte((byte) (iIgnoreCache ? 1 : 0));
if (network.getSrvProtocolVersion() >= 13)
network.writeByte((byte) 0);
} finally {
endRequest(network);
}
try {
beginResponse(network, session);
if (network.readByte() == 0)
return new OStorageOperationResult<ORawBuffer>(null);
final ORawBuffer buffer;
if (network.getSrvProtocolVersion() <= 27)
buffer = new ORawBuffer(network.readBytes(), network.readVersion(), network.readByte());
else {
final byte type = network.readByte();
final int recVersion = network.readVersion();
final byte[] bytes = network.readBytes();
buffer = new ORawBuffer(bytes, recVersion, type);
}
final ODatabaseDocument database = ODatabaseRecordThreadLocal.INSTANCE.getIfDefined();
ORecord record;
while (network.readByte() == 2) {
record = (ORecord) OChannelBinaryProtocol.readIdentifiable(network);
if (database != null)
// PUT IN THE CLIENT LOCAL CACHE
database.getLocalCache().updateRecord(record);
}
return new OStorageOperationResult<ORawBuffer>(buffer);
} finally {
endResponse(network);
}
}
}, "Error on read record " + iRid);
}
@Override
public String incrementalBackup(final String backupDirectory) {
return networkOperation(new OStorageRemoteOperation<String>() {
@Override
public String execute(OChannelBinaryAsynchClient network, OStorageRemoteSession session) throws IOException {
try {
network = beginRequest(network, OChannelBinaryProtocol.REQUEST_INCREMENTAL_BACKUP, session);
network.writeString(backupDirectory);
} finally {
endRequest(network);
}
try {
beginResponse(network, session);
String fileName = network.readString();
return fileName;
} finally {
endResponse(network);
}
}
}, "Error on incremental backup");
}
@Override
public void restoreFromIncrementalBackup(final String filePath) {
throw new UnsupportedOperationException("This operations is part of internal API and is not supported in remote storage");
}
public OStorageOperationResult<Integer> updateRecord(final ORecordId iRid, final boolean updateContent, final byte[] iContent,
final int iVersion, final byte iRecordType, final int iMode, final ORecordCallback<Integer> iCallback) {
final OSBTreeCollectionManager collectionManager = ODatabaseRecordThreadLocal.INSTANCE.get().getSbTreeCollectionManager();
Integer resVersion = asyncNetworkOperation(new OStorageRemoteOperationWrite() {
@Override
public void execute(final OChannelBinaryAsynchClient network, final OStorageRemoteSession session, int mode)
throws IOException {
try {
beginRequest(network, OChannelBinaryProtocol.REQUEST_RECORD_UPDATE, session);
network.writeRID(iRid);
network.writeBoolean(updateContent);
network.writeBytes(iContent);
network.writeVersion(iVersion);
network.writeByte(iRecordType);
network.writeByte((byte) mode);
} finally {
endRequest(network);
}
}
}, new OStorageRemoteOperationRead<Integer>() {
@Override
public Integer execute(OChannelBinaryAsynchClient network, OStorageRemoteSession session) throws IOException {
try {
beginResponse(network, session);
final Integer r = network.readVersion();
final Map<OBonsaiCollectionPointer, OPair<Long, Long>> collectionChanges = readCollectionChanges(network);
updateCollection(collectionChanges, collectionManager);
return r;
} finally {
endResponse(network);
}
}
}, iMode, iRid, iCallback, "Error on update record " + iRid);
if (resVersion == null)
// Returning given version in case of no answer from server
resVersion = iVersion;
return new OStorageOperationResult<Integer>(resVersion);
}
@Override
public OStorageOperationResult<Integer> recyclePosition(ORecordId iRecordId, byte[] iContent, int iVersion, byte recordType) {
throw new UnsupportedOperationException("recyclePosition");
}
public OStorageOperationResult<Boolean> deleteRecord(final ORecordId iRid, final int iVersion, final int iMode,
final ORecordCallback<Boolean> iCallback) {
Boolean resDelete = asyncNetworkOperation(new OStorageRemoteOperationWrite() {
@Override
public void execute(OChannelBinaryAsynchClient network, OStorageRemoteSession session, int mode) throws IOException {
try {
beginRequest(network, OChannelBinaryProtocol.REQUEST_RECORD_DELETE, session);
network.writeRID(iRid);
network.writeVersion(iVersion);
network.writeByte((byte) mode);
} finally {
endRequest(network);
}
}
}, new OStorageRemoteOperationRead<Boolean>() {
@Override
public Boolean execute(OChannelBinaryAsynchClient network, OStorageRemoteSession session) throws IOException {
try {
beginResponse(network, session);
return network.readByte() == 1;
} finally {
endResponse(network);
}
}
}, iMode, iRid, iCallback, "Error on delete record " + iRid);
return new OStorageOperationResult<Boolean>(resDelete);
}
@Override
public OStorageOperationResult<Boolean> hideRecord(final ORecordId recordId, final int mode,
final ORecordCallback<Boolean> callback) {
Boolean resHide = asyncNetworkOperation(new OStorageRemoteOperationWrite() {
@Override
public void execute(final OChannelBinaryAsynchClient network, final OStorageRemoteSession session, int mode)
throws IOException {
try {
beginRequest(network, OChannelBinaryProtocol.REQUEST_RECORD_HIDE, session);
network.writeRID(recordId);
network.writeByte((byte) mode);
} finally {
endRequest(network);
}
}
}, new OStorageRemoteOperationRead<Boolean>() {
@Override
public Boolean execute(OChannelBinaryAsynchClient network, OStorageRemoteSession session) throws IOException {
try {
beginResponse(network, session);
return network.readByte() == 1;
} finally {
endResponse(network);
}
}
}, mode, recordId, callback, "Error on hide record " + recordId);
return new OStorageOperationResult<Boolean>(resHide);
}
@Override
public boolean cleanOutRecord(final ORecordId recordId, final int recordVersion, final int iMode,
final ORecordCallback<Boolean> callback) {
return asyncNetworkOperation(new OStorageRemoteOperationWrite() {
@Override
public void execute(OChannelBinaryAsynchClient network, OStorageRemoteSession session, int mode) throws IOException {
try {
beginRequest(network, OChannelBinaryProtocol.REQUEST_RECORD_CLEAN_OUT, session);
network.writeRID(recordId);
network.writeVersion(recordVersion);
network.writeByte((byte) mode);
} finally {
endRequest(network);
}
}
}, new OStorageRemoteOperationRead<Boolean>() {
@Override
public Boolean execute(OChannelBinaryAsynchClient network, OStorageRemoteSession session) throws IOException {
try {
beginResponse(network, session);
return network.readByte() == 1;
} finally {
endResponse(network);
}
}
}, iMode, recordId, callback, "Error on delete record " + recordId);
}
@Override
public List<String> backup(OutputStream out, Map<String, Object> options, Callable<Object> callable,
final OCommandOutputListener iListener, int compressionLevel, int bufferSize) throws IOException {
throw new UnsupportedOperationException(
"backup is not supported against remote storage. Open the database with plocal or use the incremental backup in the Enterprise Edition");
}
@Override
public void restore(InputStream in, Map<String, Object> options, Callable<Object> callable,
final OCommandOutputListener iListener) throws IOException {
throw new UnsupportedOperationException(
"restore is not supported against remote storage. Open the database with plocal or use Enterprise Edition");
}
public OContextConfiguration getClientConfiguration() {
return clientConfiguration;
}
public long count(final int iClusterId) {
return count(new int[] { iClusterId });
}
@Override
public long count(int iClusterId, boolean countTombstones) {
return count(new int[] { iClusterId }, countTombstones);
}
public long[] getClusterDataRange(final int iClusterId) {
return networkOperation(new OStorageRemoteOperation<long[]>() {
@Override
public long[] execute(final OChannelBinaryAsynchClient network, OStorageRemoteSession session) throws IOException {
try {
beginRequest(network, OChannelBinaryProtocol.REQUEST_DATACLUSTER_DATARANGE, session);
network.writeShort((short) iClusterId);
} finally {
endRequest(network);
}
try {
beginResponse(network, session);
return new long[] { network.readLong(), network.readLong() };
} finally {
endResponse(network);
}
}
}, "Error on getting last entry position count in cluster: " + iClusterId);
}
@Override
public OPhysicalPosition[] higherPhysicalPositions(final int iClusterId, final OPhysicalPosition iClusterPosition) {
return networkOperation(new OStorageRemoteOperation<OPhysicalPosition[]>() {
@Override
public OPhysicalPosition[] execute(final OChannelBinaryAsynchClient network, OStorageRemoteSession session)
throws IOException {
try {
beginRequest(network, OChannelBinaryProtocol.REQUEST_POSITIONS_HIGHER, session);
network.writeInt(iClusterId);
network.writeLong(iClusterPosition.clusterPosition);
} finally {
endRequest(network);
}
try {
beginResponse(network, session);
final int positionsCount = network.readInt();
if (positionsCount == 0) {
return OCommonConst.EMPTY_PHYSICAL_POSITIONS_ARRAY;
} else {
return readPhysicalPositions(network, positionsCount);
}
} finally {
endResponse(network);
}
}
}, "Error on retrieving higher positions after " + iClusterPosition.clusterPosition);
}
@Override
public OPhysicalPosition[] ceilingPhysicalPositions(final int clusterId, final OPhysicalPosition physicalPosition) {
return networkOperation(new OStorageRemoteOperation<OPhysicalPosition[]>() {
@Override
public OPhysicalPosition[] execute(final OChannelBinaryAsynchClient network, OStorageRemoteSession session)
throws IOException {
try {
beginRequest(network, OChannelBinaryProtocol.REQUEST_POSITIONS_CEILING, session);
network.writeInt(clusterId);
network.writeLong(physicalPosition.clusterPosition);
} finally {
endRequest(network);
}
try {
beginResponse(network, session);
final int positionsCount = network.readInt();
if (positionsCount == 0) {
return OCommonConst.EMPTY_PHYSICAL_POSITIONS_ARRAY;
} else {
return readPhysicalPositions(network, positionsCount);
}
} finally {
endResponse(network);
}
}
}, "Error on retrieving ceiling positions after " + physicalPosition.clusterPosition);
}
@Override
public OPhysicalPosition[] lowerPhysicalPositions(final int iClusterId, final OPhysicalPosition physicalPosition) {
return networkOperation(new OStorageRemoteOperation<OPhysicalPosition[]>() {
@Override
public OPhysicalPosition[] execute(final OChannelBinaryAsynchClient network, OStorageRemoteSession session)
throws IOException {
try {
beginRequest(network, OChannelBinaryProtocol.REQUEST_POSITIONS_LOWER, session);
network.writeInt(iClusterId);
network.writeLong(physicalPosition.clusterPosition);
} finally {
endRequest(network);
}
try {
beginResponse(network, session);
final int positionsCount = network.readInt();
if (positionsCount == 0) {
return OCommonConst.EMPTY_PHYSICAL_POSITIONS_ARRAY;
} else {
return readPhysicalPositions(network, positionsCount);
}
} finally {
endResponse(network);
}
}
}, "Error on retrieving lower positions after " + physicalPosition.clusterPosition);
}
@Override
public OPhysicalPosition[] floorPhysicalPositions(final int clusterId, final OPhysicalPosition physicalPosition) {
return networkOperation(new OStorageRemoteOperation<OPhysicalPosition[]>() {
@Override
public OPhysicalPosition[] execute(final OChannelBinaryAsynchClient network, OStorageRemoteSession session)
throws IOException {
try {
beginRequest(network, OChannelBinaryProtocol.REQUEST_POSITIONS_FLOOR, session);
network.writeInt(clusterId);
network.writeLong(physicalPosition.clusterPosition);
} finally {
endRequest(network);
}
try {
beginResponse(network, session);
final int positionsCount = network.readInt();
if (positionsCount == 0) {
return OCommonConst.EMPTY_PHYSICAL_POSITIONS_ARRAY;
} else {
return readPhysicalPositions(network, positionsCount);
}
} finally {
endResponse(network);
}
}
}, "Error on retrieving floor positions after " + physicalPosition.clusterPosition);
}
public long getSize() {
return networkOperation(new OStorageRemoteOperation<Long>() {
@Override
public Long execute(final OChannelBinaryAsynchClient network, OStorageRemoteSession session) throws IOException {
try {
beginRequest(network, OChannelBinaryProtocol.REQUEST_DB_SIZE, session);
} finally {
endRequest(network);
}
try {
beginResponse(network, session);
return network.readLong();
} finally {
endResponse(network);
}
}
}, "Error on read database size");
}
@Override
public long countRecords() {
return networkOperation(new OStorageRemoteOperation<Long>() {
@Override
public Long execute(OChannelBinaryAsynchClient network, OStorageRemoteSession session) throws IOException {
try {
beginRequest(network, OChannelBinaryProtocol.REQUEST_DB_COUNTRECORDS, session);
} finally {
endRequest(network);
}
try {
beginResponse(network, session);
return network.readLong();
} finally {
endResponse(network);
}
}
}, "Error on read database record count");
}
public long count(final int[] iClusterIds) {
return count(iClusterIds, false);
}
public long count(final int[] iClusterIds, final boolean countTombstones) {
return networkOperation(new OStorageRemoteOperation<Long>() {
@Override
public Long execute(OChannelBinaryAsynchClient network, OStorageRemoteSession session) throws IOException {
try {
beginRequest(network, OChannelBinaryProtocol.REQUEST_DATACLUSTER_COUNT, session);
network.writeShort((short) iClusterIds.length);
for (int iClusterId : iClusterIds)
network.writeShort((short) iClusterId);
if (network.getSrvProtocolVersion() >= 13)
network.writeByte(countTombstones ? (byte) 1 : (byte) 0);
} finally {
endRequest(network);
}
try {
beginResponse(network, session);
return network.readLong();
} finally {
endResponse(network);
}
}
}, "Error on read record count in clusters: " + Arrays.toString(iClusterIds));
}
/**
* Execute the command remotely and get the results back.
*/
public Object command(final OCommandRequestText iCommand) {
if (!(iCommand instanceof OSerializableStream))
throw new OCommandExecutionException("Cannot serialize the command to be executed to the server side.");
final boolean live = iCommand instanceof OLiveQuery;
final ODatabaseDocument database = ODatabaseRecordThreadLocal.INSTANCE.get();
return networkOperation(new OStorageRemoteOperation<Object>() {
@Override
public Object execute(final OChannelBinaryAsynchClient network, OStorageRemoteSession session) throws IOException {
Object result = null;
session.commandExecuting = true;
try {
final boolean asynch = iCommand instanceof OCommandRequestAsynch && ((OCommandRequestAsynch) iCommand).isAsynchronous();
try {
beginRequest(network, OChannelBinaryProtocol.REQUEST_COMMAND, session);
if (live) {
network.writeByte((byte) 'l');
} else {
network.writeByte((byte) (asynch ? 'a' : 's')); // ASYNC / SYNC
}
network.writeBytes(OStreamSerializerAnyStreamable.INSTANCE.toStream(iCommand));
} finally {
endRequest(network);
}
try {
beginResponse(network, session);
// Collection of prefetched temporary record (nested projection record), to refer for avoid garbage collection.
List<ORecord> temporaryResults = new ArrayList<ORecord>();
boolean addNextRecord = true;
if (asynch) {
byte status;
// ASYNCH: READ ONE RECORD AT TIME
while ((status = network.readByte()) > 0) {
final ORecord record = (ORecord) OChannelBinaryProtocol.readIdentifiable(network);
if (record == null)
continue;
switch (status) {
case 1:
// PUT AS PART OF THE RESULT SET. INVOKE THE LISTENER
if (addNextRecord) {
addNextRecord = iCommand.getResultListener().result(record);
database.getLocalCache().updateRecord(record);
}
break;
case 2:
if (record.getIdentity().getClusterId() == -2)
temporaryResults.add(record);
// PUT IN THE CLIENT LOCAL CACHE
database.getLocalCache().updateRecord(record);
}
}
} else {
result = readSynchResult(network, database, temporaryResults);
if (live) {
final ODocument doc = ((List<ODocument>) result).get(0);
final Integer token = doc.field("token");
final Boolean unsubscribe = doc.field("unsubscribe");
if (token != null) {
if (Boolean.TRUE.equals(unsubscribe)) {
if (OStorageRemote.this.asynchEventListener != null)
OStorageRemote.this.asynchEventListener.unregisterLiveListener(token);
} else {
final OLiveResultListener listener = (OLiveResultListener) iCommand.getResultListener();
ODatabaseDocumentInternal current = ODatabaseRecordThreadLocal.INSTANCE.get();
final ODatabaseDocument dbCopy = current.copy();
ORemoteConnectionPool pool = OStorageRemote.this.connectionManager.getPool(network.getServerURL());
OStorageRemote.this.asynchEventListener.registerLiveListener(pool, token, new OLiveResultListener() {
@Override
public void onUnsubscribe(int iLiveToken) {
listener.onUnsubscribe(iLiveToken);
dbCopy.close();
}
@Override
public void onLiveResult(int iLiveToken, ORecordOperation iOp) throws OException {
dbCopy.activateOnCurrentThread();
listener.onLiveResult(iLiveToken, iOp);
}
@Override
public void onError(int iLiveToken) {
listener.onError(iLiveToken);
dbCopy.close();
}
});
}
} else {
throw new OStorageException("Cannot execute live query, returned null token");
}
}
}
if (!temporaryResults.isEmpty()) {
if (result instanceof OBasicResultSet<?>) {
((OBasicResultSet<?>) result).setTemporaryRecordCache(temporaryResults);
}
}
return result;
} finally {
endResponse(network);
}
} finally {
session.commandExecuting = false;
if (iCommand.getResultListener() != null && !live)
iCommand.getResultListener().end();
}
}
}, "Error on executing command: " + iCommand);
}
protected Object readSynchResult(final OChannelBinaryAsynchClient network, final ODatabaseDocument database,
List<ORecord> temporaryResults) throws IOException {
final Object result;
final byte type = network.readByte();
switch (type) {
case 'n':
result = null;
break;
case 'r':
result = OChannelBinaryProtocol.readIdentifiable(network);
if (result instanceof ORecord)
database.getLocalCache().updateRecord((ORecord) result);
break;
case 'l':
case 's':
final int tot = network.readInt();
final Collection<OIdentifiable> coll;
coll = type == 's' ? new HashSet<OIdentifiable>(tot) : new OBasicResultSet<OIdentifiable>(tot);
for (int i = 0; i < tot; ++i) {
final OIdentifiable resultItem = OChannelBinaryProtocol.readIdentifiable(network);
if (resultItem instanceof ORecord)
database.getLocalCache().updateRecord((ORecord) resultItem);
coll.add(resultItem);
}
result = coll;
break;
case 'i':
coll = new OBasicResultSet<OIdentifiable>();
byte status;
while ((status = network.readByte()) > 0) {
final OIdentifiable record = OChannelBinaryProtocol.readIdentifiable(network);
if (record == null)
continue;
if (status == 1) {
if (record instanceof ORecord)
database.getLocalCache().updateRecord((ORecord) record);
coll.add(record);
}
}
result = coll;
break;
case 'w':
final OIdentifiable record = OChannelBinaryProtocol.readIdentifiable(network);
// ((ODocument) record).setLazyLoad(false);
result = ((ODocument) record).field("result");
break;
default:
OLogManager.instance().warn(this, "Received unexpected result from query: %d", type);
result = null;
}
if (network.getSrvProtocolVersion() >= 17) {
// LOAD THE FETCHED RECORDS IN CACHE
byte status;
while ((status = network.readByte()) > 0) {
final ORecord record = (ORecord) OChannelBinaryProtocol.readIdentifiable(network);
if (record != null && status == 2) {
// PUT IN THE CLIENT LOCAL CACHE
database.getLocalCache().updateRecord(record);
if (record.getIdentity().getClusterId() == -2)
temporaryResults.add(record);
}
}
}
return result;
}
public List<ORecordOperation> commit(final OTransaction iTx, final Runnable callback) {
networkOperation(new OStorageRemoteOperation<Void>() {
@Override
public Void execute(OChannelBinaryAsynchClient network, OStorageRemoteSession session) throws IOException {
final List<ORecordOperation> committedEntries = new ArrayList<ORecordOperation>();
try {
session.commandExecuting = true;
try {
beginRequest(network, OChannelBinaryProtocol.REQUEST_TX_COMMIT, session);
network.writeInt(iTx.getId());
network.writeByte((byte) (iTx.isUsingLog() ? 1 : 0));
for (ORecordOperation txEntry : iTx.getAllRecordEntries()) {
commitEntry(network, txEntry);
}
// END OF RECORD ENTRIES
network.writeByte((byte) 0);
// SEND EMPTY TX CHANGES, TRACKING MADE SERVER SIDE
network.writeBytes(iTx.getIndexChanges().toStream());
} finally {
endRequest(network);
}
try {
// READ THE ENTIRE RESPONSE FIRST
beginResponse(network, session);
// NEW RECORDS
final int createdRecords = network.readInt();
final Map<ORecordId, ORecordId> createdRecordsMap = new HashMap<ORecordId, ORecordId>(createdRecords);
for (int i = 0; i < createdRecords; i++)
createdRecordsMap.put(network.readRID(), network.readRID());
// UPDATED RECORDS
final int updatedRecords = network.readInt();
final Map<ORecordId, Integer> updatedRecordsMap = new HashMap<ORecordId, Integer>(updatedRecords);
for (int i = 0; i < updatedRecords; ++i)
updatedRecordsMap.put(network.readRID(), network.readVersion());
Map<OBonsaiCollectionPointer, OPair<Long, Long>> collectionChanges = null;
if (network.getSrvProtocolVersion() >= 20)
collectionChanges = readCollectionChanges(network);
// APPLY CHANGES
for (Map.Entry<ORecordId, ORecordId> entry : createdRecordsMap.entrySet())
iTx.updateIdentityAfterCommit(entry.getKey(), entry.getValue());
createdRecordsMap.clear();
for (Map.Entry<ORecordId, Integer> entry : updatedRecordsMap.entrySet()) {
final ORecordOperation rop = iTx.getRecordEntry(entry.getKey());
if (rop != null) {
if (entry.getValue() > rop.getRecord().getVersion() + 1)
// IN CASE OF REMOTE CONFLICT STRATEGY FORCE UNLOAD DUE TO INVALID CONTENT
rop.getRecord().unload();
ORecordInternal.setVersion(rop.getRecord(), entry.getValue());
}
}
updatedRecordsMap.clear();
if (collectionChanges != null)
updateCollection(collectionChanges, ODatabaseRecordThreadLocal.INSTANCE.get().getSbTreeCollectionManager());
} finally {
endResponse(network);
}
committedEntries.clear();
// SET ALL THE RECORDS AS UNDIRTY
for (ORecordOperation txEntry : iTx.getAllRecordEntries())
ORecordInternal.unsetDirty(txEntry.getRecord());
// UPDATE THE CACHE ONLY IF THE ITERATOR ALLOWS IT. USE THE STRATEGY TO ALWAYS REMOVE ALL THE RECORDS SINCE THEY COULD BE
// CHANGED AS CONTENT IN CASE OF TREE AND GRAPH DUE TO CROSS REFERENCES
OTransactionAbstract.updateCacheFromEntries(iTx, iTx.getAllRecordEntries(), true);
return null;
} finally
{
session.commandExecuting = false;
}
}
}, "Error on commit");
return null;
}
public void rollback(OTransaction iTx) {
}
public int getClusterIdByName(final String iClusterName) {
stateLock.acquireReadLock();
try {
if (iClusterName == null)
return -1;
if (Character.isDigit(iClusterName.charAt(0)))
return Integer.parseInt(iClusterName);
final OCluster cluster = clusterMap.get(iClusterName.toLowerCase());
if (cluster == null)
return -1;
return cluster.getId();
} finally {
stateLock.releaseReadLock();
}
}
public int getDefaultClusterId() {
return defaultClusterId;
}
public void setDefaultClusterId(int defaultClusterId) {
this.defaultClusterId = defaultClusterId;
}
public int addCluster(final String iClusterName, boolean forceListBased, final Object... iArguments) {
return addCluster(iClusterName, -1, forceListBased, iArguments);
}
public int addCluster(final String iClusterName, final int iRequestedId, final boolean forceListBased,
final Object... iParameters) {
return networkOperation(new OStorageRemoteOperation<Integer>() {
@Override
public Integer execute(OChannelBinaryAsynchClient network, OStorageRemoteSession session) throws IOException {
stateLock.acquireWriteLock();
try {
try {
beginRequest(network, OChannelBinaryProtocol.REQUEST_DATACLUSTER_ADD, session);
network.writeString(iClusterName);
if (network.getSrvProtocolVersion() >= 18)
network.writeShort((short) iRequestedId);
} finally {
endRequest(network);
}
try {
beginResponse(network, session);
final int clusterId = network.readShort();
final OClusterRemote cluster = new OClusterRemote();
cluster.configure(OStorageRemote.this, clusterId, iClusterName.toLowerCase());
if (clusters.length <= clusterId)
clusters = Arrays.copyOf(clusters, clusterId + 1);
clusters[cluster.getId()] = cluster;
clusterMap.put(cluster.getName().toLowerCase(), cluster);
return clusterId;
} finally {
endResponse(network);
}
} finally {
stateLock.releaseWriteLock();
}
}
}, "Error on add new cluster");
}
public boolean dropCluster(final int iClusterId, final boolean iTruncate) {
return networkOperation(new OStorageRemoteOperation<Boolean>() {
@Override
public Boolean execute(OChannelBinaryAsynchClient network, OStorageRemoteSession session) throws IOException {
stateLock.acquireWriteLock();
try {
try {
beginRequest(network, OChannelBinaryProtocol.REQUEST_DATACLUSTER_DROP, session);
network.writeShort((short) iClusterId);
} finally {
endRequest(network);
}
byte result = 0;
try {
beginResponse(network, session);
result = network.readByte();
} finally {
endResponse(network);
}
if (result == 1) {
// REMOVE THE CLUSTER LOCALLY
final OCluster cluster = clusters[iClusterId];
clusters[iClusterId] = null;
clusterMap.remove(cluster.getName());
if (configuration.clusters.size() > iClusterId)
configuration.dropCluster(iClusterId); // endResponse must be called before this line, which call updateRecord
return true;
}
return false;
} finally {
stateLock.releaseWriteLock();
}
}
}, "Error on removing of cluster");
}
public void synch() {
}
public String getPhysicalClusterNameById(final int iClusterId) {
stateLock.acquireReadLock();
try {
if (iClusterId >= clusters.length)
return null;
final OCluster cluster = clusters[iClusterId];
return cluster != null ? cluster.getName() : null;
} finally {
stateLock.releaseReadLock();
}
}
public int getClusterMap() {
stateLock.acquireReadLock();
try {
return clusterMap.size();
} finally {
stateLock.releaseReadLock();
}
}
public Collection<OCluster> getClusterInstances() {
stateLock.acquireReadLock();
try {
return Arrays.asList(clusters);
} finally {
stateLock.releaseReadLock();
}
}
public OCluster getClusterById(int iClusterId) {
stateLock.acquireReadLock();
try {
if (iClusterId == ORID.CLUSTER_ID_INVALID)
// GET THE DEFAULT CLUSTER
iClusterId = defaultClusterId;
return clusters[iClusterId];
} finally {
stateLock.releaseReadLock();
}
}
@Override
public long getVersion() {
throw new UnsupportedOperationException("getVersion");
}
public ODocument getClusterConfiguration() {
return clusterConfiguration;
}
/**
* Ends the request and unlock the write lock
*/
public void endRequest(final OChannelBinaryAsynchClient iNetwork) throws IOException {
if (iNetwork == null)
return;
iNetwork.flush();
iNetwork.releaseWriteLock();
}
/**
* End response reached: release the channel in the pool to being reused
*/
public void endResponse(final OChannelBinaryAsynchClient iNetwork) throws IOException {
iNetwork.endResponse();
}
@Override
public boolean isRemote() {
return true;
}
public boolean isPermanentRequester() {
return false;
}
@SuppressWarnings("unchecked")
public void updateClusterConfiguration(final String iConnectedURL, final byte[] obj) {
if (obj == null)
return;
// TEMPORARY FIX: DISTRIBUTED MODE DOESN'T SUPPORT TREE BONSAI, KEEP ALWAYS EMBEDDED RIDS
OGlobalConfiguration.RID_BAG_EMBEDDED_TO_SBTREEBONSAI_THRESHOLD.setValue(Integer.MAX_VALUE);
final List<ODocument> members;
synchronized (clusterConfiguration) {
clusterConfiguration.fromStream(obj);
clusterConfiguration.toString();
members = clusterConfiguration.field("members");
}
// UPDATE IT
synchronized (serverURLs) {
if (members != null) {
// ADD CURRENT SERVER AS FIRST
if (iConnectedURL != null) {
addHost(iConnectedURL);
}
for (ODocument m : members) {
if (m == null)
continue;
final String nodeStatus = m.field("status");
if (m != null && !"OFFLINE".equals(nodeStatus)) {
final Collection<Map<String, Object>> listeners = ((Collection<Map<String, Object>>) m.field("listeners"));
if (listeners != null)
for (Map<String, Object> listener : listeners) {
if (((String) listener.get("protocol")).equals("ONetworkProtocolBinary")) {
String url = (String) listener.get("listen");
if (!serverURLs.contains(url))
addHost(url);
}
}
}
}
}
}
}
public void removeSessions(final String url) {
synchronized (serverURLs) {
serverURLs.remove(url);
}
for (OStorageRemoteSession session : sessions) {
session.removeServerSession(url + "/" + getName());
}
}
@Override
public OCluster getClusterByName(final String iClusterName) {
throw new UnsupportedOperationException("getClusterByName()");
}
@Override
public ORecordConflictStrategy getConflictStrategy() {
throw new UnsupportedOperationException("getConflictStrategy");
}
@Override
public void setConflictStrategy(final ORecordConflictStrategy iResolver) {
throw new UnsupportedOperationException("setConflictStrategy");
}
@Override
public String getURL() {
return OEngineRemote.NAME + ":" + url;
}
public int getClusters() {
stateLock.acquireReadLock();
try {
return clusterMap.size();
} finally {
stateLock.releaseReadLock();
}
}
@Override
public String getType() {
return OEngineRemote.NAME;
}
@Override
public String getUserName() {
final OStorageRemoteSession session = getCurrentSession();
if (session == null)
return null;
return session.connectionUserName;
}
protected String reopenRemoteDatabase() throws IOException {
String currentURL = getCurrentServerURL();
do {
do {
final OChannelBinaryAsynchClient network = getNetwork(currentURL);
try {
OStorageRemoteSession session = getCurrentSession();
OStorageRemoteNodeSession nodeSession = session.getOrCreateServerSession(network.getServerURL());
if (nodeSession == null || !nodeSession.isValid()) {
openRemoteDatabase(network);
connectionManager.release(network);
return network.getServerURL();
} else {
try {
network.writeByte(OChannelBinaryProtocol.REQUEST_DB_REOPEN);
network.writeInt(nodeSession.getSessionId());
network.writeBytes(nodeSession.getToken());
} finally {
endRequest(network);
}
final int sessionId;
try {
byte[] newToken = network.beginResponse(nodeSession.getSessionId(), true);
sessionId = network.readInt();
if (newToken != null && newToken.length > 0) {
nodeSession.setSession(sessionId, newToken);
} else {
nodeSession.setSession(sessionId, nodeSession.getToken());
}
OLogManager.instance().debug(this, "Client connected to %s with session id=%d", network.getServerURL(), sessionId);
return currentURL;
} finally {
endResponse(network);
connectionManager.release(network);
}
}
} catch (OIOException e) {
if (network != null) {
// REMOVE THE NETWORK CONNECTION IF ANY
connectionManager.remove(network);
}
OLogManager.instance().error(this, "Cannot open database with url " + currentURL, e);
} catch (OOfflineNodeException e) {
if (network != null) {
// REMOVE THE NETWORK CONNECTION IF ANY
connectionManager.remove(network);
}
OLogManager.instance().debug(this, "Cannot open database with url " + currentURL, e);
} catch (OSecurityException ex) {
OLogManager.instance().debug(this, "Invalidate token for url=%s", ex, currentURL);
OStorageRemoteSession session = getCurrentSession();
session.removeServerSession(currentURL);
if (network != null) {
// REMOVE THE NETWORK CONNECTION IF ANY
try {
connectionManager.remove(network);
} catch (Exception e) {
// IGNORE ANY EXCEPTION
OLogManager.instance().debug(this, "Cannot remove connection or database url=" + currentURL, e);
}
}
} catch (OException e) {
connectionManager.release(network);
// PROPAGATE ANY OTHER ORIENTDB EXCEPTION
throw e;
} catch (Exception e) {
OLogManager.instance().debug(this, "Cannot open database with url " + currentURL, e);
if (network != null) {
// REMOVE THE NETWORK CONNECTION IF ANY
try {
connectionManager.remove(network);
} catch (Exception ex) {
// IGNORE ANY EXCEPTION
OLogManager.instance().debug(this, "Cannot remove connection or database url=" + currentURL, e);
}
}
}
} while (connectionManager.getAvailableConnections(currentURL) > 0);
currentURL = useNewServerURL(currentURL);
} while (currentURL != null);
// REFILL ORIGINAL SERVER LIST
parseServerURLs();
synchronized (serverURLs) {
throw new OStorageException("Cannot create a connection to remote server address(es): " + serverURLs);
}
}
protected synchronized String openRemoteDatabase() throws IOException {
final String currentURL = getNextAvailableServerURL(true, getCurrentSession());
return openRemoteDatabase(currentURL);
}
public void openRemoteDatabase(OChannelBinaryAsynchClient network) throws IOException {
stateLock.acquireWriteLock();
try {
OStorageRemoteSession session = getCurrentSession();
OStorageRemoteNodeSession nodeSession = session.getOrCreateServerSession(network.getServerURL());
try {
network.writeByte(OChannelBinaryProtocol.REQUEST_DB_OPEN);
network.writeInt(nodeSession.getSessionId());
// @SINCE 1.0rc8
sendClientInfo(network, DRIVER_NAME, true, true);
network.writeString(name);
network.writeString(session.connectionUserName);
network.writeString(session.connectionUserPassword);
} finally {
endRequest(network);
}
final int sessionId;
try {
network.beginResponse(nodeSession.getSessionId(), false);
sessionId = network.readInt();
byte[] token = network.readBytes();
if (token.length == 0) {
token = null;
}
nodeSession.setSession(sessionId, token);
OLogManager.instance().debug(this, "Client connected to %s with session id=%d", network.getServerURL(), sessionId);
readDatabaseInformation(network);
// READ CLUSTER CONFIGURATION
updateClusterConfiguration(network.getServerURL(), network.readBytes());
// read OrientDB release info
if (network.getSrvProtocolVersion() >= 14)
network.readString();
status = STATUS.OPEN;
} finally {
endResponse(network);
}
} finally {
stateLock.releaseWriteLock();
}
}
protected String openRemoteDatabase(String currentURL) {
do {
do {
OChannelBinaryAsynchClient network = null;
try {
network = getNetwork(currentURL);
openRemoteDatabase(network);
connectionManager.release(network);
return currentURL;
} catch (OIOException e) {
if (network != null) {
// REMOVE THE NETWORK CONNECTION IF ANY
connectionManager.remove(network);
}
OLogManager.instance().debug(this, "Cannot open database with url " + currentURL, e);
} catch (OException e) {
connectionManager.release(network);
// PROPAGATE ANY OTHER ORIENTDB EXCEPTION
throw e;
} catch (Exception e) {
if (network != null) {
// REMOVE THE NETWORK CONNECTION IF ANY
try {
connectionManager.remove(network);
} catch (Exception ex) {
// IGNORE ANY EXCEPTION
OLogManager.instance().debug(this, "Cannot remove connection or database url=" + currentURL, e);
}
}
OLogManager.instance().error(this, "Cannot open database url=" + currentURL, e);
}
} while (connectionManager.getReusableConnections(currentURL) > 0);
currentURL = useNewServerURL(currentURL);
} while (currentURL != null);
// REFILL ORIGINAL SERVER LIST
parseServerURLs();
synchronized (serverURLs) {
throw new OStorageException("Cannot create a connection to remote server address(es): " + serverURLs);
}
}
protected String useNewServerURL(final String iUrl) {
int pos = iUrl.indexOf('/');
if (pos >= iUrl.length() - 1)
// IGNORE ENDING /
pos = -1;
final String postFix = pos > -1 ? iUrl.substring(pos) : "";
final String url = pos > -1 ? iUrl.substring(0, pos) : iUrl;
synchronized (serverURLs) {
// REMOVE INVALID URL
serverURLs.remove(url);
for (OStorageRemoteSession activeSession : sessions) {
// Not thread Safe ...
activeSession.removeServerSession(url + "/" + getName());
}
OLogManager.instance().debug(this, "Updated server list: %s...", serverURLs);
if (!serverURLs.isEmpty())
return serverURLs.get(0) + postFix;
}
return null;
}
protected void sendClientInfo(final OChannelBinaryAsynchClient network, final String driverName,
final boolean supportsPushMessages, final boolean collectStats) throws IOException {
if (network.getSrvProtocolVersion() >= 7) {
// @COMPATIBILITY 1.0rc8
network.writeString(driverName).writeString(OConstants.ORIENT_VERSION)
.writeShort((short) OChannelBinaryProtocol.CURRENT_PROTOCOL_VERSION).writeString(clientId);
}
if (network.getSrvProtocolVersion() > OChannelBinaryProtocol.PROTOCOL_VERSION_21) {
network.writeString(ODatabaseDocumentTx.getDefaultSerializer().toString());
recordFormat = ODatabaseDocumentTx.getDefaultSerializer().toString();
} else
recordFormat = ORecordSerializerSchemaAware2CSV.NAME;
if (network.getSrvProtocolVersion() > OChannelBinaryProtocol.PROTOCOL_VERSION_26)
network.writeBoolean(true);
if (network.getSrvProtocolVersion() > OChannelBinaryProtocol.PROTOCOL_VERSION_33) {
network.writeBoolean(supportsPushMessages);
network.writeBoolean(collectStats);
}
}
/**
* Parse the URLs. Multiple URLs must be separated by semicolon (;)
*/
protected void parseServerURLs() {
String lastHost = null;
int dbPos = url.indexOf('/');
if (dbPos == -1) {
// SHORT FORM
addHost(url);
lastHost = url;
name = url;
} else {
name = url.substring(url.lastIndexOf("/") + 1);
for (String host : url.substring(0, dbPos).split(ADDRESS_SEPARATOR)) {
lastHost = host;
addHost(host);
}
}
synchronized (serverURLs) {
if (serverURLs.size() == 1 && OGlobalConfiguration.NETWORK_BINARY_DNS_LOADBALANCING_ENABLED.getValueAsBoolean()) {
// LOOK FOR LOAD BALANCING DNS TXT RECORD
final String primaryServer = lastHost;
OLogManager.instance().debug(this, "Retrieving URLs from DNS '%s' (timeout=%d)...", primaryServer,
OGlobalConfiguration.NETWORK_BINARY_DNS_LOADBALANCING_TIMEOUT.getValueAsInteger());
try {
final Hashtable<String, String> env = new Hashtable<String, String>();
env.put("java.naming.factory.initial", "com.sun.jndi.dns.DnsContextFactory");
env.put("com.sun.jndi.ldap.connect.timeout",
OGlobalConfiguration.NETWORK_BINARY_DNS_LOADBALANCING_TIMEOUT.getValueAsString());
final DirContext ictx = new InitialDirContext(env);
final String hostName = !primaryServer.contains(":") ?
primaryServer :
primaryServer.substring(0, primaryServer.indexOf(":"));
final Attributes attrs = ictx.getAttributes(hostName, new String[] { "TXT" });
final Attribute attr = attrs.get("TXT");
if (attr != null) {
for (int i = 0; i < attr.size(); ++i) {
String configuration = (String) attr.get(i);
if (configuration.startsWith("\""))
configuration = configuration.substring(1, configuration.length() - 1);
if (configuration != null) {
serverURLs.clear();
final String[] parts = configuration.split(" ");
for (String part : parts) {
if (part.startsWith("s=")) {
addHost(part.substring("s=".length()));
}
}
}
}
}
} catch (NamingException ignore) {
}
}
}
}
/**
* Registers the remote server with port.
*/
protected String addHost(String host) {
if (host.startsWith(LOCALHOST))
host = LOCAL_IP + host.substring("localhost".length());
if (host.contains("/"))
host = host.substring(0, host.indexOf("/"));
// REGISTER THE REMOTE SERVER+PORT
if (!host.contains(":"))
host += ":" + (clientConfiguration.getValueAsBoolean(OGlobalConfiguration.CLIENT_USE_SSL) ?
getDefaultSSLPort() :
getDefaultPort());
else if (host.split(":").length < 2 || host.split(":")[1].trim().length() == 0)
host += (clientConfiguration.getValueAsBoolean(OGlobalConfiguration.CLIENT_USE_SSL) ? getDefaultSSLPort() : getDefaultPort());
// DISABLED BECAUSE THIS DID NOT ALLOW TO CONNECT TO LOCAL HOST ANYMORE IF THE SERVER IS BOUND TO 127.0.0.1
// CONVERT 127.0.0.1 TO THE PUBLIC IP IF POSSIBLE
// if (host.startsWith(LOCAL_IP)) {
// try {
// final String publicIP = InetAddress.getLocalHost().getHostAddress();
// host = publicIP + host.substring(LOCAL_IP.length());
// } catch (UnknownHostException e) {
// // IGNORE IT
// }
// }
synchronized (serverURLs) {
if (!serverURLs.contains(host)) {
serverURLs.add(host);
OLogManager.instance().debug(this, "Registered the new available server '%s'", host);
}
}
return host;
}
protected int getDefaultPort() {
return DEFAULT_PORT;
}
protected int getDefaultSSLPort() {
return DEFAULT_SSL_PORT;
}
/**
* Acquire a network channel from the pool. Don't lock the write stream since the connection usage is exclusive.
*
* @param iCommand id. Ids described at {@link OChannelBinaryProtocol}
* @param session
*
* @return connection to server
*
* @throws IOException
*/
public OChannelBinaryAsynchClient beginRequest(final OChannelBinaryAsynchClient network, final byte iCommand,
OStorageRemoteSession session) throws IOException {
network.beginRequest(iCommand, session);
return network;
}
protected String getNextAvailableServerURL(boolean iIsConnectOperation, OStorageRemoteSession session) {
String url = null;
switch (connectionStrategy) {
case STICKY:
url = session != null ? session.getServerUrl() : null;
if (url == null)
url = getServerURFromList(false, session);
break;
case ROUND_ROBIN_CONNECT:
if (!iIsConnectOperation)
url = session != null ? session.getServerUrl() : null;
if (url == null)
url = getServerURFromList(iIsConnectOperation, session);
OLogManager.instance()
.debug(this, "ROUND_ROBIN_CONNECT: Next remote operation will be executed on server: %s (isConnectOperation=%s)", url,
iIsConnectOperation);
break;
case ROUND_ROBIN_REQUEST:
url = getServerURFromList(true, session);
OLogManager.instance()
.debug(this, "ROUND_ROBIN_REQUEST: Next remote operation will be executed on server: %s (isConnectOperation=%s)", url,
iIsConnectOperation);
break;
default:
throw new OConfigurationException("Connection mode " + connectionStrategy + " is not supported");
}
return url;
}
protected String getCurrentServerURL() {
return getServerURFromList(false, getCurrentSession());
}
protected String getServerURFromList(final boolean iNextAvailable, OStorageRemoteSession session) {
synchronized (serverURLs) {
if (serverURLs.isEmpty()) {
parseServerURLs();
if (serverURLs.isEmpty())
throw new OStorageException("Cannot create a connection to remote server because url list is empty");
}
// GET CURRENT THREAD INDEX
int serverURLIndex;
if (session != null)
serverURLIndex = session.serverURLIndex;
else
serverURLIndex = 0;
if (iNextAvailable)
serverURLIndex++;
if (serverURLIndex < 0 || serverURLIndex >= serverURLs.size())
// RESET INDEX
serverURLIndex = 0;
final String serverURL = serverURLs.get(serverURLIndex) + "/" + getName();
if (session != null)
session.serverURLIndex = serverURLIndex;
return serverURL;
}
}
public OChannelBinaryAsynchClient getNetwork(final String iCurrentURL) {
OChannelBinaryAsynchClient network;
do {
try {
network = connectionManager.acquire(iCurrentURL, clientConfiguration, connectionOptions, asynchEventListener);
} catch (OIOException cause) {
throw cause;
} catch (Exception cause) {
throw OException.wrapException(new OStorageException("Cannot open a connection to remote server: " + iCurrentURL), cause);
}
if (!network.tryLock()) {
// CANNOT LOCK IT, MAYBE HASN'T BE CORRECTLY UNLOCKED BY PREVIOUS USER?
OLogManager.instance()
.error(this, "Removing locked network channel '%s' (connected=%s)...", iCurrentURL, network.isConnected());
connectionManager.remove(network);
network = null;
}
} while (network == null);
return network;
}
public void beginResponse(OChannelBinaryAsynchClient iNetwork, OStorageRemoteSession session) throws IOException {
OStorageRemoteNodeSession nodeSession = session.getServerSession(iNetwork.getServerURL());
byte[] newToken = iNetwork.beginResponse(nodeSession.getSessionId(), true);
if (newToken != null && newToken.length > 0) {
nodeSession.setSession(nodeSession.getSessionId(), newToken);
}
}
protected void getResponse(final OChannelBinaryAsynchClient iNetwork, OStorageRemoteSession session) throws IOException {
try {
beginResponse(iNetwork, session);
} finally {
endResponse(iNetwork);
}
}
private OPhysicalPosition[] readPhysicalPositions(OChannelBinaryAsynchClient network, int positionsCount) throws IOException {
final OPhysicalPosition[] physicalPositions = new OPhysicalPosition[positionsCount];
for (int i = 0; i < physicalPositions.length; i++) {
final OPhysicalPosition position = new OPhysicalPosition();
position.clusterPosition = network.readLong();
position.recordSize = network.readInt();
position.recordVersion = network.readVersion();
physicalPositions[i] = position;
}
return physicalPositions;
}
private Map<OBonsaiCollectionPointer, OPair<Long, Long>> readCollectionChanges(final OChannelBinaryAsynchClient network)
throws IOException {
final int count = network.readInt();
final Map<OBonsaiCollectionPointer, OPair<Long, Long>> changes = new HashMap<OBonsaiCollectionPointer, OPair<Long, Long>>(
count);
for (int i = 0; i < count; i++) {
final long mBitsOfId = network.readLong();
final long lBitsOfId = network.readLong();
final OBonsaiCollectionPointer pointer = OCollectionNetworkSerializer.INSTANCE.readCollectionPointer(network);
changes.put(pointer, new OPair<Long, Long>(mBitsOfId, lBitsOfId));
}
return changes;
}
private void updateCollection(final Map<OBonsaiCollectionPointer, OPair<Long, Long>> changes,
final OSBTreeCollectionManager collectionManager) throws IOException {
if (collectionManager == null)
return;
for (Map.Entry<OBonsaiCollectionPointer, OPair<Long, Long>> entry : changes.entrySet()) {
final OBonsaiCollectionPointer pointer = entry.getKey();
final long mBitsOfId = entry.getValue().getKey();
final long lBitsOfId = entry.getValue().getValue();
collectionManager.updateCollectionPointer(new UUID(mBitsOfId, lBitsOfId), pointer);
}
if (ORecordSerializationContext.getDepth() <= 1)
collectionManager.clearPendingCollections();
}
private void commitEntry(final OChannelBinaryAsynchClient iNetwork, final ORecordOperation txEntry) throws IOException {
if (txEntry.type == ORecordOperation.LOADED)
// JUMP LOADED OBJECTS
return;
// SERIALIZE THE RECORD IF NEEDED. THIS IS DONE HERE TO CATCH EXCEPTION AND SEND A -1 AS ERROR TO THE SERVER TO SIGNAL THE ABORT
// OF TX COMMIT
byte[] stream = null;
try {
switch (txEntry.type) {
case ORecordOperation.CREATED:
case ORecordOperation.UPDATED:
stream = txEntry.getRecord().toStream();
break;
}
} catch (Exception e) {
// ABORT TX COMMIT
iNetwork.writeByte((byte) -1);
throw OException.wrapException(new OTransactionException("Error on transaction commit"), e);
}
iNetwork.writeByte((byte) 1);
iNetwork.writeByte(txEntry.type);
iNetwork.writeRID(txEntry.getRecord().getIdentity());
iNetwork.writeByte(ORecordInternal.getRecordType(txEntry.getRecord()));
switch (txEntry.type) {
case ORecordOperation.CREATED:
iNetwork.writeBytes(stream);
break;
case ORecordOperation.UPDATED:
iNetwork.writeVersion(txEntry.getRecord().getVersion());
iNetwork.writeBytes(stream);
if (iNetwork.getSrvProtocolVersion() >= 23)
iNetwork.writeBoolean(ORecordInternal.isContentChanged(txEntry.getRecord()));
break;
case ORecordOperation.DELETED:
iNetwork.writeVersion(txEntry.getRecord().getVersion());
break;
}
}
private boolean handleDBFreeze() {
boolean retry;
OLogManager.instance().warn(this,
"DB is frozen will wait for " + OGlobalConfiguration.CLIENT_DB_RELEASE_WAIT_TIMEOUT.getValue() + " ms. and then retry.");
retry = true;
try {
Thread.sleep(OGlobalConfiguration.CLIENT_DB_RELEASE_WAIT_TIMEOUT.getValueAsInteger());
} catch (InterruptedException ie) {
retry = false;
Thread.currentThread().interrupt();
}
return retry;
}
private void readDatabaseInformation(final OChannelBinaryAsynchClient network) throws IOException {
// @COMPATIBILITY 1.0rc8
final int tot = network.getSrvProtocolVersion() >= 7 ? network.readShort() : network.readInt();
stateLock.acquireWriteLock();
try {
clusters = new OCluster[tot];
clusterMap.clear();
for (int i = 0; i < tot; ++i) {
final OClusterRemote cluster = new OClusterRemote();
String clusterName = network.readString();
final int clusterId = network.readShort();
if (clusterName != null) {
clusterName = clusterName.toLowerCase();
if (network.getSrvProtocolVersion() < 24)
network.readString();
final int dataSegmentId =
network.getSrvProtocolVersion() >= 12 && network.getSrvProtocolVersion() < 24 ? (int) network.readShort() : 0;
cluster.configure(this, clusterId, clusterName);
if (clusterId >= clusters.length)
clusters = Arrays.copyOf(clusters, clusterId + 1);
clusters[clusterId] = cluster;
clusterMap.put(clusterName, cluster);
}
}
final OCluster defaultCluster = clusterMap.get(CLUSTER_DEFAULT_NAME);
if (defaultCluster != null)
defaultClusterId = clusterMap.get(CLUSTER_DEFAULT_NAME).getId();
} finally {
stateLock.releaseWriteLock();
}
}
private boolean deleteRecord(byte command, final ORecordId iRid, final int iVersion, int iMode,
final ORecordCallback<Boolean> iCallback, final OChannelBinaryAsynchClient network, final OStorageRemoteSession session)
throws IOException {
try {
beginRequest(network, command, session);
network.writeRID(iRid);
network.writeVersion(iVersion);
network.writeByte((byte) iMode);
} finally {
endRequest(network);
}
switch (iMode) {
case 0:
// SYNCHRONOUS
try {
beginResponse(network, session);
return network.readByte() == 1;
} finally {
endResponse(network);
}
case 1:
// ASYNCHRONOUS
if (iCallback != null) {
Callable<Object> response = new Callable<Object>() {
public Object call() throws Exception {
Boolean result;
try {
beginResponse(network, session);
result = network.readByte() == 1;
} finally {
endResponse(network);
}
iCallback.call(iRid, result);
return null;
}
};
asynchExecutor.submit(new FutureTask<Object>(response));
}
}
return false;
}
protected OStorageRemoteSession getCurrentSession() {
final ODatabaseDocumentTx db = (ODatabaseDocumentTx) ODatabaseRecordThreadLocal.INSTANCE.getIfDefined();
if (db == null)
return null;
OStorageRemoteSession session = (OStorageRemoteSession) ODatabaseDocumentTxInternal.getSessionMetadata(db);
if (session == null) {
session = new OStorageRemoteSession(sessionSerialId.decrementAndGet());
sessions.add(session);
ODatabaseDocumentTxInternal.setSessionMetadata(db, session);
}
return session;
}
@Override
public boolean isClosed() {
if (super.isClosed())
return true;
final OStorageRemoteSession session = getCurrentSession();
if (session == null)
return false;
return session.isClosed();
}
@Override
public OStorageRemote copy(final ODatabaseDocumentTx source, final ODatabaseDocumentTx dest) {
ODatabaseDocumentInternal origin = ODatabaseRecordThreadLocal.INSTANCE.getIfDefined();
final OStorageRemoteSession session = (OStorageRemoteSession) ODatabaseDocumentTxInternal.getSessionMetadata(source);
if (session != null) {
// TODO:may run a session reopen
final OStorageRemoteSession newSession = new OStorageRemoteSession(sessionSerialId.decrementAndGet());
newSession.connectionUserName = session.connectionUserName;
newSession.connectionUserPassword = session.connectionUserPassword;
ODatabaseDocumentTxInternal.setSessionMetadata(dest, newSession);
}
try {
dest.activateOnCurrentThread();
openRemoteDatabase();
} catch (IOException e) {
e.printStackTrace();
} finally {
ODatabaseRecordThreadLocal.INSTANCE.set(origin);
}
return this;
}
public void importDatabase(final String options, final InputStream inputStream, final String name,
final OCommandOutputListener listener) {
networkOperationRetry(new OStorageRemoteOperation<Void>() {
@Override
public Void execute(OChannelBinaryAsynchClient network, OStorageRemoteSession session) throws IOException {
try {
beginRequest(network, OChannelBinaryProtocol.REQUEST_DB_IMPORT, session);
network.writeString(options);
network.writeString(name);
byte[] buffer = new byte[1024];
int size;
while ((size = inputStream.read(buffer)) > 0) {
network.writeBytes(buffer, size);
}
network.writeBytes(null);
} finally {
endRequest(network);
}
int timeout = network.getSocketTimeout();
try {
// Import messages are sent while import is running, using the request timeout instead of message timeout to avoid early
// reading interrupt.
network.setSocketTimeout(OGlobalConfiguration.NETWORK_REQUEST_TIMEOUT.getValueAsInteger());
beginResponse(network, session);
String message;
while ((message = network.readString()) != null) {
listener.onMessage(message);
}
} finally {
endResponse(network);
network.setSocketTimeout(timeout);
}
return null;
}
}, "Error sending import request", 0);
}
} |
Java | public class DoFnRunnerWithMetrics<InT, OutT> implements DoFnRunner<InT, OutT> {
private final DoFnRunner<InT, OutT> underlying;
private final SamzaMetricsContainer metricsContainer;
private final FnWithMetricsWrapper metricsWrapper;
private DoFnRunnerWithMetrics(
DoFnRunner<InT, OutT> underlying, SamzaMetricsContainer metricsContainer, String stepName) {
this.underlying = underlying;
this.metricsContainer = metricsContainer;
this.metricsWrapper = new FnWithMetricsWrapper(metricsContainer, stepName);
}
public static <InT, OutT> DoFnRunner<InT, OutT> wrap(
DoFnRunner<InT, OutT> doFnRunner, SamzaMetricsContainer metricsContainer, String stepName) {
return new DoFnRunnerWithMetrics<>(doFnRunner, metricsContainer, stepName);
}
@Override
public void startBundle() {
withMetrics(() -> underlying.startBundle());
}
@Override
public void processElement(WindowedValue<InT> elem) {
withMetrics(() -> underlying.processElement(elem));
}
@Override
public void onTimer(
String timerId, BoundedWindow window, Instant timestamp, TimeDomain timeDomain) {
withMetrics(() -> underlying.onTimer(timerId, window, timestamp, timeDomain));
}
@Override
public void finishBundle() {
withMetrics(() -> underlying.finishBundle());
metricsContainer.updateMetrics();
}
@Override
public DoFn<InT, OutT> getFn() {
return underlying.getFn();
}
private void withMetrics(Runnable runnable) {
try {
metricsWrapper.wrap(
() -> {
runnable.run();
return (Void) null;
});
} catch (Exception e) {
throw new RuntimeException(e);
}
}
} |
Java | static class IncomingMessageHandler extends Handler {
private final WeakReference<AudioPlayer> mService;
IncomingMessageHandler(AudioPlayer service) {
mService = new WeakReference<>(service);
}
@Override
public void handleMessage(Message msg) {
AudioPlayer service = mService.get();
if (service != null && service.audioServiceBinder != null) {
/* The update process message is sent from AudioServiceBinder class's thread object */
if (msg.what == service.audioServiceBinder.UPDATE_AUDIO_PROGRESS_BAR) {
try {
int position = service.audioServiceBinder.getCurrentAudioPosition();
int duration = service.audioServiceBinder.getAudioPlayer().getDuration();
if (position <= duration) {
JSONObject message = new JSONObject();
message.put("name", "onTime");
message.put("time",
service.audioServiceBinder.getCurrentAudioPosition() / 1000);
service.eventSink.success(message);
}
} catch (Exception e) { /* ignore */ }
} else if (msg.what == service.audioServiceBinder.UPDATE_PLAYER_STATE_TO_PAUSE) {
service.notifyDartOnPause();
} else if (msg.what == service.audioServiceBinder.UPDATE_PLAYER_STATE_TO_PLAY) {
service.notifyDartOnPlay();
} else if (msg.what == service.audioServiceBinder.UPDATE_PLAYER_STATE_TO_COMPLETE) {
service.notifyDartOnComplete();
} else if (msg.what == service.audioServiceBinder.UPDATE_PLAYER_STATE_TO_ERROR) {
service.notifyDartOnError(msg.obj.toString());
} else if (msg.what == service.audioServiceBinder.UPDATE_AUDIO_DURATION) {
service.onDuration();
}
}
}
} |
Java | public abstract class Constants {
private static final String apiKey = BuildConfig.APIKey;
public static final String popularSortLink = "https://api.themoviedb.org/3/movie/popular?api_key=" + apiKey ;
public static final String topRatingSortLink = "https://api.themoviedb.org/3/movie/top_rated?api_key=" + apiKey;
public static final String tLink1 = "https://api.themoviedb.org/3/movie/";
public static final String tLink2 = "/videos?api_key=" + apiKey;
public static final String rLink1 = "https://api.themoviedb.org/3/movie/";
public static final String rLink2 = "/reviews?api_key=" + apiKey;
public static final String imageUrl1 = "http://image.tmdb.org/t/p/" + "w780";
public static final String imageUrl = "http://image.tmdb.org/t/p/" + "w342" ;
public static final String trailerVideo = "https://www.youtube.com/watch?v=";
public static final String FAVORITES_KEY = "FAVORITES-KEY";
public static final String RECYCLER_STATE = "poster_recycler_state";
public static final String FAVORITES_VIEW_STATE = "favorites-view-state";
} |
Java | public class SecurityLongCellTransformer
implements CellTransformer<Map.Entry<Key,Value>, SecurityLongValueCell> {
private boolean recordTsAndColVis;
private boolean recordColFamilies;
public SecurityLongCellTransformer(boolean recordTimestampAndColVis, boolean recordColFam){
recordTsAndColVis = recordTimestampAndColVis;
recordColFamilies = recordColFam;
}
public SecurityLongCellTransformer() {
recordTsAndColVis = false;
recordColFamilies = false;
}
public CellGroup<SecurityLongValueCell> apply(Map.Entry<Key, Value> dbItem,
CellGroup<SecurityLongValueCell> cellGroup) {
String activeRowId = dbItem.getKey().getRow().toString();
if (!cellGroup.getTag().equals(activeRowId)) {
cellGroup = new CellGroup<SecurityLongValueCell>(activeRowId);
}
String label = dbItem.getKey().getColumnQualifier().toString();
byte [] valueBytes = dbItem.getValue().get();
long value = valueBytes.length > 0 ? ByteBuffer.wrap(valueBytes).asLongBuffer().get() : Defaults.defaultValue(long.class);
String colVis = dbItem.getKey().getColumnVisibility().toString();
String colFam = dbItem.getKey().getColumnFamily().toString();
long timestamp = dbItem.getKey().getTimestamp();
SecurityLongValueCell cell;
if(recordColFamilies & recordTsAndColVis) {
cell = new SecurityLongValueCell(label, value, timestamp, colVis, colFam);
} else if (recordColFamilies) {
cell = new SecurityLongValueCell(label, value, colFam);
} else if (recordTsAndColVis) {
cell = new SecurityLongValueCell(label, value, timestamp, colVis);
} else {
cell = new SecurityLongValueCell(label, value);
}
cellGroup.addCell(cell);
return cellGroup;
}
} |
Java | @Named
@SessionScoped
public class IndexView implements Serializable{
@PostConstruct
private void init(){
question = "Hi Frank! How is your mood today?";
step=0;
selectedDomains= new ArrayList<>();
}
private String question;
private int step;
private List<String> selectedDomains;
public String getQuestion() {
return question;
}
public void setQuestion(String question) {
this.question = question;
}
public void save(int step){
this.step=step+1;
if (this.step==1){
question ="Good to hear that. Why are you feeling so good?";
} else {
question ="...not surprisingly..I know that your mates feels the same way.";
}
}
public void reset(){
init();
}
public int getStep() {
return step;
}
public List<String> getSelectedDomains() {
return selectedDomains;
}
public void setSelectedDomains(List<String> selectedDomains) {
this.selectedDomains = selectedDomains;
}
} |
Java | public class Equipo {
int comp1,comp2,comp3,comp4;
TiempoU t= new TiempoU();
ArrayList costop1= new ArrayList();
ArrayList costop2= new ArrayList();
ArrayList suspencionp2 = new ArrayList();
ArrayList suspencionp1 = new ArrayList();
public Equipo(){
t.variablesaleatorias();
}
public void politica1(){
int suspenciones1=0;
comp1=t.retornarVida_u();
comp2=t.retornarVida_u();
comp3=t.retornarVida_u();
comp4=t.retornarVida_u();
int penalizacion=0;
int costoxc=0;
int costo=0;
for(int i=0;i<20000;i++){
if(comp1!=0 && comp2!=0 &&comp3!=0 && comp4!=0){
comp1--;
comp2--;
comp3--;
comp4--;
}
if(comp1==0){
comp1=t.retornarVida_u();
costoxc+=200;
penalizacion+=100;
i++;
suspenciones1++;
}
if(comp2==0){
comp2=t.retornarVida_u();
costoxc+=200;
penalizacion+=100;
i++;
suspenciones1++;
}
if(comp3==0){
comp3=t.retornarVida_u();
costoxc+=200;
penalizacion+=100;
i++;
suspenciones1++;
}
if(comp4==0){
comp4=t.retornarVida_u();
costoxc+=200;
penalizacion+=100;
i++;
suspenciones1++;
}
}
costo=costoxc+penalizacion;
costop1.add(costo);
suspencionp1.add(suspenciones1);
}
public void politica2(){
int suspenciones2=0;
comp1=t.retornarVida_u();
comp2=t.retornarVida_u();
comp3=t.retornarVida_u();
comp4=t.retornarVida_u();
int penalizacion=0;
int costoxc=0;
int costo=0;
for(int i=0;i<20000;i++){
if(comp1!=0 && comp2!=0 &&comp3!=0 && comp4!=0){
comp1--;
comp2--;
comp3--;
comp4--;
}
if(comp1==0){
comp1=t.retornarVida_u();
comp2=t.retornarVida_u();
comp3=t.retornarVida_u();
comp4=t.retornarVida_u();
costoxc+=800;
penalizacion+=200;
i+=2;
suspenciones2++;
}
if(comp2==0){
comp1=t.retornarVida_u();
comp2=t.retornarVida_u();
comp3=t.retornarVida_u();
comp4=t.retornarVida_u();
costoxc+=800;
penalizacion+=200;
i+=2;
suspenciones2++;
}
if(comp3==0){
comp1=t.retornarVida_u();
comp2=t.retornarVida_u();
comp3=t.retornarVida_u();
comp4=t.retornarVida_u();
costoxc+=800;
penalizacion+=200;
i+=2;
suspenciones2++;
}
if(comp4==0){
comp1=t.retornarVida_u();
comp2=t.retornarVida_u();
comp3=t.retornarVida_u();
comp4=t.retornarVida_u();
costoxc+=800;
penalizacion+=200;
i+=2;
suspenciones2++;
}
}
costo=costoxc+penalizacion;
costop2.add(costo);
suspencionp2.add(suspenciones2);
}
public void promediocostos(){
int n = 0,m=0,s1=0,s2=0;
for(int i=0;i<costop1.size();i++){
n+=(int) costop1.get(i);
}
n=n/costop1.size();
for(int i=0;i<costop2.size();i++){
m+=(int) costop2.get(i);
}
m=m/costop2.size();
if(n<m){
System.out.println("La politica 1 tiene un costo menor promedio por ende es la mejor opción a implementar " + "\n"
+"Politica 1: "+n + "\n"+ "Politica 2: "+m);
}else{
System.out.println("La politica 2 tiene un costo menor promedio por ende es la mejor opción a implementar " + "\n"
+"Politica 2: "+m + "\n"+ "Politica 1: "+n);
}
}
} |
Java | public class WebSensors {
/**
* Método da main principal para execução do programa
* Faz a leitura do arquivo com a entrada do dados e seta os valores em um objeto Coordenada
* Após passa esse objeto a um ArrayList, finalizando assim a leitura
* Em seguida é chamado o KMeans para a execução.
* @author Paulo Henrique Lima de Paula
* @throws java.lang.Exception
*/
public static void main(String[] args) throws Exception {
BufferedReader br = new BufferedReader(new FileReader(System.getProperty("user.dir")+"\\entrada.csv"));
ArrayList posicaoCoordenada = new ArrayList();
while(br.ready()){
String[] linha = br.readLine().split(";");
Coordenada c = new Coordenada();
c.setId(linha[0]);
c.setLat(Double.parseDouble(linha[1]));
c.setLon(Double.parseDouble(linha[2]));
posicaoCoordenada.add(c);
}
br.close();
KMeans kmeans = new KMeans(5,posicaoCoordenada);
kmeans.segmentar(100);
}
} |
Java | public class GetNewsDocumentExample {
public static void main(String[] args) {
AlchemyDataNews service = new AlchemyDataNews();
service.setApiKey("<api_key>");
Map<String, Object> params = new HashMap<String, Object>();
String[] fields =
new String[] { "enriched.url.title", "enriched.url.url", "enriched.url.author", "enriched.url.publicationDate",
"enriched.url.enrichedTitle.entities", "enriched.url.enrichedTitle.docSentiment"};
params.put(AlchemyDataNews.RETURN, StringUtils.join(fields, ","));
params.put(AlchemyDataNews.START, "1440720000");
params.put(AlchemyDataNews.END, "1441407600");
params.put(AlchemyDataNews.COUNT, 7);
// Query on adjacent nested fields:
params.put("q.enriched.url.enrichedTitle.entities.entity", "|text=IBM,type=company|");
params.put("q.enriched.url.enrichedTitle.docSentiment.type", "positive");
params.put("q.enriched.url.enrichedTitle.taxonomy.taxonomy_.label", "technology and computing");
DocumentsResult result = service.getNewsDocuments(params).execute();
System.out.println(result);
}
} |
Java | public class Problem72 {
/**
* Let following be the function definition :-
*
* f(i, j) := minimum cost (or steps) required to convert first i characters of word1 to first j characters of word2
*
* Case 1: word1[i] == word2[j], i.e. the ith the jth character matches.
* f(i, j) = f(i - 1, j - 1)
*
* Case 2: word1[i] != word2[j], then we must either insert, delete or replace, whichever is cheaper
* f(i, j) = 1 + min { f(i, j - 1), f(i - 1, j), f(i - 1, j - 1) }
*
* f(i, j - 1) represents insert operation
* f(i - 1, j) represents delete operation
* f(i - 1, j - 1) represents replace operation
*
* Here, we consider any operation from word1 to word2. It means, when we say insert operation, we insert a new
* character after word1 that matches the jth character of word2. So, now have to match i characters of word1 to
* j - 1 characters of word2. Same goes for other 2 operations as well.
*
* Note that the problem is symmetric. The insert operation in one direction (i.e. from word1 to word2) is same
* as delete operation in other. So, we could choose any direction.
*
* @param word1
* @param word2
* @return
*/
private static int minDistance(String word1, String word2) {
int m = word1.length(), n = word2.length();
int[][] dp = new int[m + 1][n + 1];
IntStream.range(1, m + 1).forEach(i -> dp[i][0] = i);
IntStream.range(1, n + 1).forEach(i -> dp[0][i] = i);
for (int i = 1; i <= m; i++) {
for (int j = 1; j <= n; j++) {
if (word1.charAt(i - 1) == word2.charAt(j - 1)) {
dp[i][j] = dp[i - 1][j - 1];
} else {
dp[i][j] = Math.min(dp[i - 1][j - 1], Math.min(dp[i - 1][j], dp[i][j - 1])) + 1;
}
}
}
return dp[m][n];
}
/**
* Main method for test cases
* @param args
*/
public static void main(String[] args) {
String word1 = "horse", word2 = "ros";
System.out.println(minDistance(word1, word2));
}
} |
Java | @NoArgsConstructor
@Data
@JsonInclude(JsonInclude.Include.NON_NULL)
public class SendPoll implements MethodObject {
/**
* Unique identifier for the target chat or username of the target channel (in the format @channelusername)
*/
@JsonProperty(value = "chat_id", required = true)
private Object chatId;
/**
* Poll question, 1-255 characters
*/
@JsonProperty(required = true)
private String question;
/**
* List of answer options, 2-10 strings 1-100 characters each
*/
@JsonProperty(required = true)
private List<String> options;
/**
* Sends the message silently. Users will receive a notification with no sound.
*/
@JsonProperty(value = "disable_notification", required = false)
private Boolean disableNotification;
/**
* If the message is a reply, ID of the original message
*/
@JsonProperty(value = "reply_to_message_id", required = false)
private Integer replyToMessageId;
/**
* Additional interface options. A JSON-serialized object for an inline keyboard, custom reply keyboard, instructions to remove reply keyboard or to force a reply from the user.
* <p>
* InlineKeyboardMarkup or ReplyKeyboardMarkup or ReplyKeyboardRemove or ForceReply
*/
@JsonProperty(value = "reply_markup", required = false)
private Object replyMarkup;
public static SendPoll create() {
return new SendPoll();
}
@Override
@JsonIgnore
public String getPathMethod() {
return "sendPoll";
}
public SendPoll chatId(Object chatId) {
this.chatId = chatId;
return this;
}
public SendPoll question(String question) {
this.question = question;
return this;
}
public SendPoll options(List<String> options) {
this.options = options;
return this;
}
public SendPoll disableNotification(Boolean disableNotification) {
this.disableNotification = disableNotification;
return this;
}
public SendPoll replyToMessageId(Integer replyToMessageId) {
this.replyToMessageId = replyToMessageId;
return this;
}
public SendPoll replyMarkup(Object replyMarkup) {
this.replyMarkup = replyMarkup;
return this;
}
} |
Java | @Slf4j
@Component
public class MetadataVersionPredicate extends AbstractServerPredicate {
public final static String CURRENT_REQUEST_CONTEXT_VERSION;
private final static Random RANDOM;
static {
CURRENT_REQUEST_CONTEXT_VERSION = "current-request-context-version";
RANDOM = new Random(System.currentTimeMillis());
}
@Autowired
private MetadataBalancingProperties metadataBalancingProperties;
@Autowired
private DiscoveryClient discoveryClient;
@Override
public boolean apply(PredicateKey predicateKey) {
val server = predicateKey.getServer();
if (!(server instanceof DiscoveryEnabledServer)) {
return true;
}
val instanceInfo = ((DiscoveryEnabledServer) server).getInstanceInfo();
val appName = instanceInfo.getAppName().toLowerCase();
val appVersion = instanceInfo.getMetadata().getOrDefault("version", "0");
log.debug("Checking service '{}:{}'", appName, appVersion);
val requestContextVersion = getRequestContextServiceVersion(appName, appVersion);
val result = requestContextVersion.equals(appVersion);
if (result) {
log.info("Service '{}:{}' was chosen", appName, appVersion);
} else {
log.info("Service '{}:{}' was not chosen", appName, appVersion);
}
return result;
}
private String getRequestContextServiceVersion(String appName, String appVersion) {
PredicateContextHolder currentContext = PredicateContextHolder.getCurrentContext();
if (currentContext.containsKey(CURRENT_REQUEST_CONTEXT_VERSION)) {
val version = currentContext.getString(CURRENT_REQUEST_CONTEXT_VERSION);
log.debug("Request context version is {}", version);
return version;
}
val availableVersions = discoveryClient.getInstances(appName)
.stream()
.filter(it -> it instanceof EurekaServiceInstance)
.map(it -> ((EurekaServiceInstance) it).getInstanceInfo())
.filter(it -> it.getStatus() == UP)
.map(it -> it.getMetadata().get("version"))
.filter(Objects::nonNull)
.collect(toSet());
val latestVersion = availableVersions
.stream()
.map(DefaultArtifactVersion::new)
.max(Comparator.naturalOrder())
.map(DefaultArtifactVersion::toString)
.orElse(appVersion);
log.debug("Latest version of {} is {}", appName, latestVersion);
val trafficRules = metadataBalancingProperties.getRules();
if (trafficRules == null || !trafficRules.containsKey(appName)) {
log.debug("No traffic rules for {}, set context version as {}",
appName, appVersion);
currentContext.put(CURRENT_REQUEST_CONTEXT_VERSION, appVersion);
return appVersion;
}
val ranges = new HashMap<>(metadataBalancingProperties.getRanges().get(appName));
if (isMigration(appName)) {
log.debug("{} has migration version", appName);
val rangeRule = trafficRules.get(appName);
val value = rangeRule.values().iterator().next();
rangeRule.put("0", 100 - value);
ranges.put("0", new Range(value, 100));
availableVersions.add("0");
}
val bound = trafficRules.get(appName).values().stream().mapToInt(it -> it).sum();
val value = RANDOM.nextInt(bound);
log.debug("Random value is {}", value);
val version = ranges
.entrySet()
.stream()
.filter(it -> {
Range range = it.getValue();
return range.getFrom() <= value && value < range.getTo() &&
availableVersions.contains(it.getKey());
})
.findFirst()
.map(Entry::getKey)
.orElse(latestVersion);
log.debug("Set context version for {} as {}", appName, version);
currentContext.put(CURRENT_REQUEST_CONTEXT_VERSION, version);
return version;
}
private boolean isMigration(String appName) {
val allRanges = metadataBalancingProperties.getRanges();
if (allRanges == null) {
return true;
}
val ranges = allRanges.get(appName);
if (ranges == null) {
return true;
}
val hasOldVersions = discoveryClient.getInstances(appName).stream()
.map(it -> it.getMetadata().get("version"))
.anyMatch(Objects::isNull);
return ranges.size() == 1 && hasOldVersions;
}
} |
Java | public class Turtles extends CommandNode {
private TurtleManager turtleManager;
@Override
protected double execute() {
return turtleManager.numTurtles();
}
@Override
public void handleSpecific(ExpressionTree tree) {
this.turtleManager = tree.getTurtleManager();
}
} |
Java | class TraceRecord {
/**
* Stores info about the conditions under which this trace should be
* triggered. Should be a combination of TCL.TRACE_READS, TCL.TRACE_WRITES
* or TCL.TRACE_UNSETS.
*/
int flags;
/**
* Stores the trace procedure to invoke when a trace is fired.
*/
VarTrace trace;
} |
Java | public class QeoParceler
{
private static final Logger LOG = Logger.getLogger("QeoParceler");
/** ID to indicate ON_DATA call. */
public static final int ID_ON_DATA = 0;
/** ID to indicate ON_REMOVE call. */
public static final int ID_ON_REMOVE = 1;
/** ID to indicate ON_READ_OR_TAKE call. */
public static final int ID_ON_READ_OR_TAKE = 2;
/** Data size (in bytes) that parcels will be fragmented in. */
private static final int PARCEL_DATA_SIZE = 128 * 1024; // 128KB
private int mCurrentSize;
private byte[] mAllData;
private final SplitCallbacks mSplitCallbacks;
private final JoinCallbacks mJoinCallbacks;
/**
* Create a QeoParceler that can join fragments.
*
* @param callbacks The callbacks that will be called if a packet is fully reconstructed.
*/
public QeoParceler(JoinCallbacks callbacks)
{
mJoinCallbacks = callbacks;
mSplitCallbacks = null;
}
/**
* Create a QeoParceler that can split data into fragments.
*
* @param callbacks The callbacks that will be called for each fragment.
*/
public QeoParceler(SplitCallbacks callbacks)
{
mJoinCallbacks = null;
mSplitCallbacks = callbacks;
}
/**
* Join fragments into a ParcelableData object.<br/>
* Fragments have to be passed in order.<br/>
* Fragments from different readers/ids can't be mixed. One ParcelableData object should be fully reconstructed
* before starting reconstruction on a 2nd one.<br/>
* It will call JoinCallbacks.partCreated() if an object is created.
*
* @param id An id that can be passed. This will be passed to the partCreated() callback.
* @param firstBlock Indicate that this is the first block.
* @param lastBlock Indicate that this is the last block.
* @param totalSize The total size of the data to be joined.
* @param data The data block.
*/
public void join(int id, boolean firstBlock, boolean lastBlock, int totalSize, byte[] data)
{
// the unmarshall function of parcel needs to have the complete byte array at once, so reconstruct big array
// first
int dataSize = data.length;
if (firstBlock) {
// first block, assign memory for the complete array.
LOG.finest("Creating new parcel for writer");
mAllData = new byte[totalSize];
mCurrentSize = 0;
}
// put the next block of data in the big array
System.arraycopy(data, 0, mAllData, mCurrentSize, dataSize);
data = null;
LOG.finest("Unmarshall " + dataSize + " bytes at position " + mCurrentSize);
mCurrentSize += dataSize;
if (lastBlock) {
// this was the last block, so reconstruct parcel
Parcel parcel;
// get a parcel
parcel = Parcel.obtain();
// unmarshall data
parcel.unmarshall(mAllData, 0, mCurrentSize);
// free memory, big array is not needed anymore now.
mAllData = null;
// reset parcel position, very important
parcel.setDataPosition(0);
// create ParcelabelData from parcel
ParcelableData pd = ParcelableData.CREATOR.createFromParcel(parcel);
// give parcel object back to os.
parcel.recycle();
// part created, call callback to let it handle it.
mJoinCallbacks.onFragmentsJoined(id, pd);
}
}
/**
* Split ObjectData objects into fragments (byte arrays).<br/>
* It will call SplitCallbacks.writePart() for every fragment created.
*
* @param id An id that can be passed. This will be passed to the writePart() callback.
* @param data The data to be splitted
* @return Will return a ParcelableException. It will contain an exception if something went wrong.
* @throws RemoteException If sending data over the wire fails.
*/
public ParcelableException split(int id, ObjectData data)
throws RemoteException
{
return split(id, new ParcelableData(data));
}
/**
* Split ParcelableData objects into fragments (byte arrays).<br/>
* It will call SplitCallbacks.writePart() for every fragment created.
*
* @param id An id that can be passed. This will be passed to the writePart() callback.
* @param pd The data to be splitted
* @return Will return a ParcelableException. It will contain an exception if something went wrong.
* @throws RemoteException If sending data over the wire fails.
*/
public ParcelableException split(int id, ParcelableData pd)
throws RemoteException
{
// get a parcel from the os.
Parcel p = Parcel.obtain();
// create the parcel.
pd.writeToParcel(p, 0);
pd = null;
// query how big the parcel is
int size = p.dataSize();
LOG.finest("Writing " + size + " bytes");
// create empty exception, might get filled if something wrong happens
final ParcelableException exception = new ParcelableException();
// create byte array from the parcel
byte[] buf = p.marshall();
// give parcel back to the os, no longer needed
p.recycle();
p = null;
if (size < PARCEL_DATA_SIZE) {
// can send in 1 go
mSplitCallbacks.onWriteFragment(id, true, true, size, buf, exception);
}
else {
// need to fragment
int start = 0;
boolean last = false;
boolean first = true;
int i = 0;
// allocate buffer of parcel size to avoid having to create a new buffer in every iteration.
byte[] buf2 = new byte[PARCEL_DATA_SIZE];
do {
i++;
int blockSize = PARCEL_DATA_SIZE;
if (i * PARCEL_DATA_SIZE >= size) {
// last block
blockSize = size - ((i - 1) * PARCEL_DATA_SIZE);
last = true;
buf2 = new byte[blockSize];
}
LOG.finest("Writing parcel " + i + "(" + start + " to " + (start + blockSize) + " of " + size + ")");
// copy chunk of the big array into a small array
System.arraycopy(buf, start, buf2, 0, blockSize);
// call the callback with the created fragment.
mSplitCallbacks.onWriteFragment(id, first, last, buf.length, buf2, exception);
first = false;
start += PARCEL_DATA_SIZE;
}
while (!last);
buf2 = null;
}
buf = null;
return exception;
}
/**
* Interface to be used for splitting ObjectData into fragments.
*/
public interface SplitCallbacks
{
/**
* Will be called for every fragment created.
*
* @param id The id passed to QeoParceler.split()
* @param firstBlock Indicates that this is the first block.
* @param lastBlock Indicated that this is the last block.
* @param totalSize The total size of all the blocks.
* @param data The block itself.
* @param exception An exception that can be set by the remote end.
* @throws RemoteException If writing over the wire fails.
*/
void onWriteFragment(int id, boolean firstBlock, boolean lastBlock, int totalSize, byte[] data,
ParcelableException exception)
throws RemoteException;
}
/**
* Interface to be used for joining fragments into ParcelableData.
*/
public interface JoinCallbacks
{
/**
* Will be called for every object created.
*
* @param id the id passed to QeoParceler.join()
* @param pd The data itself.
*/
void onFragmentsJoined(int id, ParcelableData pd);
}
} |
Java | class ABStore {
private SharedPreferences sharedPref;
ABStore(SharedPreferences sharedPref) {
this.sharedPref = sharedPref;
}
int getInt(String name, int defaultValue) {
return sharedPref.getInt(name, defaultValue);
}
void set(String name, int value) {
sharedPref.edit()
.putInt(name, value)
.apply();
}
} |
Java | public class SignalEventDispatcher {
private static final SchemaNameAdjuster schemaNameAdjuster = SchemaNameAdjuster.create();
public static final String DATABASE_NAME = "db";
public static final String TABLE_NAME = "table";
public static final String WATERMARK_SIGNAL = "_split_watermark_signal_";
public static final String SPLIT_ID_KEY = "split_id";
public static final String BINLOG_FILENAME_OFFSET_KEY = "file";
public static final String BINLOG_POSITION_OFFSET_KEY = "pos";
public static final String WATERMARK_KIND = "watermark_kind";
public static final String SIGNAL_EVENT_KEY_SCHEMA_NAME =
"io.debezium.connector.flink.cdc.embedded.watermark.key";
public static final String SIGNAL_EVENT_VALUE_SCHEMA_NAME =
"io.debezium.connector.flink.cdc.embedded.watermark.value";
private final Schema signalEventKeySchema;
private final Schema signalEventValueSchema;
private final MySqlOffsetContext offsetContext;
private final String topic;
private final ChangeEventQueue<DataChangeEvent> queue;
public SignalEventDispatcher(
MySqlOffsetContext offsetContext,
String topic,
ChangeEventQueue<DataChangeEvent> queue) {
this.offsetContext = offsetContext;
this.topic = topic;
this.queue = queue;
this.signalEventKeySchema =
SchemaBuilder.struct()
.name(schemaNameAdjuster.adjust(SIGNAL_EVENT_KEY_SCHEMA_NAME))
.field(SPLIT_ID_KEY, Schema.STRING_SCHEMA)
.field(WATERMARK_SIGNAL, Schema.BOOLEAN_SCHEMA)
.build();
this.signalEventValueSchema =
SchemaBuilder.struct()
.name(schemaNameAdjuster.adjust(SIGNAL_EVENT_VALUE_SCHEMA_NAME))
.field(SPLIT_ID_KEY, Schema.STRING_SCHEMA)
.field(WATERMARK_KIND, Schema.STRING_SCHEMA)
.field(BINLOG_FILENAME_OFFSET_KEY, Schema.STRING_SCHEMA)
.field(BINLOG_POSITION_OFFSET_KEY, Schema.INT64_SCHEMA)
.build();
}
public void dispatchWatermarkEvent(
MySqlSplit mySqlSplit, BinlogOffset watermark, WatermarkKind watermarkKind)
throws InterruptedException {
SourceRecord sourceRecord =
new SourceRecord(
offsetContext.getPartition(),
offsetContext.getPartition(),
topic,
signalEventKeySchema,
signalRecordKey(mySqlSplit.splitId()),
signalEventValueSchema,
signalRecordValue(mySqlSplit.splitId(), watermark, watermarkKind));
queue.enqueue(new DataChangeEvent(sourceRecord));
}
private Struct signalRecordKey(String splitId) {
Struct result = new Struct(signalEventKeySchema);
result.put(SPLIT_ID_KEY, splitId);
result.put(WATERMARK_SIGNAL, true);
return result;
}
private Struct signalRecordValue(
String splitId, BinlogOffset binlogOffset, WatermarkKind watermarkKind) {
Struct result = new Struct(signalEventValueSchema);
result.put(SPLIT_ID_KEY, splitId);
result.put(WATERMARK_KIND, watermarkKind.toString());
result.put(BINLOG_FILENAME_OFFSET_KEY, binlogOffset.getFilename());
result.put(BINLOG_POSITION_OFFSET_KEY, binlogOffset.getPosition());
return result;
}
/** The watermark kind. */
public enum WatermarkKind {
LOW,
HIGH,
BINLOG_END;
public WatermarkKind fromString(String kindString) {
if ("LOW".equalsIgnoreCase(kindString)) {
return LOW;
} else if ("HIGH".equalsIgnoreCase(kindString)) {
return HIGH;
} else {
return BINLOG_END;
}
}
}
} |
Java | public class DisplayCommand extends Command {
public static final String COMMAND_WORD = "display";
public static final String MESSAGE_USAGE = COMMAND_WORD
+ ": Changes the display board to show the specified system.\n"
+ "Parameter: SYSTEM (must be p (pets), s (schedule), c (calendar) or i (inventory)).\n"
+ "Example: display p";
public static final String MESSAGE_SUCCESS = "Display changed to %s. \nShowing all.";
public static final String ADDITIONAL_MESSAGE_INVENTORY = "Double click on each item to view list breakdown.";
public static final String MESSAGE_INVALID_SYSTEM_TYPE = "Invalid system type specified.";
private final DisplaySystemType type;
public DisplayCommand(DisplaySystemType type) {
this.type = type;
}
@Override
public CommandResult execute(Model model) throws CommandException {
requireNonNull(model);
try {
model.updateAll();
model.changeDisplaySystem(type);
} catch (IllegalValueException e) {
throw new CommandException(MESSAGE_INVALID_SYSTEM_TYPE);
}
String message = String.format(getMessageSuccess(), type);
return new CommandResult(message, false, false, type);
}
public String getMessageSuccess() {
if (type.equals(DisplaySystemType.INVENTORY)) {
return MESSAGE_SUCCESS + "\n" + ADDITIONAL_MESSAGE_INVENTORY;
} else {
return MESSAGE_SUCCESS;
}
}
} |
Java | public class User {
private String name, followers, following, projects;
private String email, location, phone, website, intro;
private String status;
//TODO: Remove default values and
public User() {
this.name = "Cleverchuk";
this.followers = "27K";
this.intro = "My intro, everything special";
this.location = "Death Valley";
this.phone = "000-000-0000";
this.website = "cleverchuk.github.io";
this.email = "[email protected]";
this.following = "425";
this.projects = "434";
this.status = "Online";
}
public User(String name) {
this.name = name;
}
public String getName() {
return name;
}
public String getFollowers() {
return followers;
}
public String getFollowing() {
return following;
}
public String getProjects() {
return projects;
}
public String getEmail() {
return email;
}
public String getLocation() {
return location;
}
public String getPhone() {
return phone;
}
public String getWebsite() {
return website;
}
public String getIntro() {
return intro;
}
public String getStatus() {
return status;
}
} |
Java | public class RestUIPrompt implements UIPrompt
{
private final Deque<String> input = new LinkedList<String>();
public RestUIPrompt()
{
}
public RestUIPrompt(Collection<String> inputs)
{
input.addAll(inputs);
}
@Override
public String prompt(String message)
{
if (input.isEmpty())
throw new InputRequiredException(message);
return input.pop();
}
@Override
public String promptSecret(String message)
{
if (input.isEmpty())
throw new InputRequiredException(message);
return input.pop();
}
@Override
public boolean promptBoolean(String message)
{
return promptBoolean(message, false);
}
@Override
public boolean promptBoolean(String message, boolean defaultValue)
{
if (input.isEmpty())
return defaultValue;
return "Y".equalsIgnoreCase(input.pop());
}
} |
Java | @XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "")
@XmlRootElement(name = "props")
public class Props
extends PropsType
{
@XmlAttribute(name = "merge")
protected DefaultableBoolean merge;
/**
* Gets the value of the merge property.
*
* @return
* possible object is
* {@link DefaultableBoolean }
*
*/
public DefaultableBoolean getMerge() {
if (merge == null) {
return DefaultableBoolean.DEFAULT;
} else {
return merge;
}
}
/**
* Sets the value of the merge property.
*
* @param value
* allowed object is
* {@link DefaultableBoolean }
*
*/
public void setMerge(DefaultableBoolean value) {
this.merge = value;
}
} |
Java | @VsoObject(create = false, name = "VsApicExtension")
@VsoFinder(name = Constants.FINDER_VRO_VSAPICEXTENSION)
@JsonIgnoreProperties(ignoreUnknown = true)
@Service
public class VsApicExtension extends AviRestResource {
@JsonProperty("se_uuid")
@JsonInclude(Include.NON_NULL)
private String seUuid = null;
@JsonProperty("txn_uuid")
@JsonInclude(Include.NON_NULL)
private String txnUuid = null;
@JsonProperty("uuid")
@JsonInclude(Include.NON_NULL)
private String uuid = null;
@JsonProperty("vnic")
@JsonInclude(Include.NON_NULL)
private List<VsSeVnic> vnic = null;
/**
* This is the getter method this will return the attribute value.
* Unique object identifier of se.
* @return seUuid
*/
@VsoMethod
public String getSeUuid() {
return seUuid;
}
/**
* This is the setter method to the attribute.
* Unique object identifier of se.
* @param seUuid set the seUuid.
*/
@VsoMethod
public void setSeUuid(String seUuid) {
this.seUuid = seUuid;
}
/**
* This is the getter method this will return the attribute value.
* Unique object identifier of txn.
* @return txnUuid
*/
@VsoMethod
public String getTxnUuid() {
return txnUuid;
}
/**
* This is the setter method to the attribute.
* Unique object identifier of txn.
* @param txnUuid set the txnUuid.
*/
@VsoMethod
public void setTxnUuid(String txnUuid) {
this.txnUuid = txnUuid;
}
/**
* This is the getter method this will return the attribute value.
* Unique object identifier of the object.
* @return uuid
*/
@VsoMethod
public String getUuid() {
return uuid;
}
/**
* This is the setter method to the attribute.
* Unique object identifier of the object.
* @param uuid set the uuid.
*/
@VsoMethod
public void setUuid(String uuid) {
this.uuid = uuid;
}
/**
* This is the getter method this will return the attribute value.
* Placeholder for description of property vnic of obj type vsapicextension field type str type array.
* @return vnic
*/
@VsoMethod
public List<VsSeVnic> getVnic() {
return vnic;
}
/**
* This is the setter method. this will set the vnic
* Placeholder for description of property vnic of obj type vsapicextension field type str type array.
* @return vnic
*/
@VsoMethod
public void setVnic(List<VsSeVnic> vnic) {
this.vnic = vnic;
}
/**
* This is the setter method this will set the vnic
* Placeholder for description of property vnic of obj type vsapicextension field type str type array.
* @return vnic
*/
@VsoMethod
public VsApicExtension addVnicItem(VsSeVnic vnicItem) {
if (this.vnic == null) {
this.vnic = new ArrayList<VsSeVnic>();
}
this.vnic.add(vnicItem);
return this;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
VsApicExtension objVsApicExtension = (VsApicExtension) o;
return Objects.equals(this.uuid, objVsApicExtension.uuid)&&
Objects.equals(this.txnUuid, objVsApicExtension.txnUuid)&&
Objects.equals(this.seUuid, objVsApicExtension.seUuid)&&
Objects.equals(this.vnic, objVsApicExtension.vnic);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class VsApicExtension {\n");
sb.append(" seUuid: ").append(toIndentedString(seUuid)).append("\n");
sb.append(" txnUuid: ").append(toIndentedString(txnUuid)).append("\n");
sb.append(" uuid: ").append(toIndentedString(uuid)).append("\n");
sb.append(" vnic: ").append(toIndentedString(vnic)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
} |
Java | public final class FSErrorTestUtils {
private FSErrorTestUtils() {}
private static final MethodHandle FSERROR_NEW_INSTANCE_HANDLE;
static {
try {
Constructor<FSError> constructor = FSError.class.getDeclaredConstructor(Throwable.class);
constructor.setAccessible(true);
FSERROR_NEW_INSTANCE_HANDLE = MethodHandles.lookup().unreflectConstructor(constructor);
} catch (NoSuchMethodException | IllegalAccessException e) {
throw new RuntimeException(e);
}
}
/**
* Create a new {@code FSError} through reflection
* @param t
* @return
*/
public static FSError newFSError(Throwable t) {
// Thanks Hadoop for making it so difficult!
try {
return (FSError) FSERROR_NEW_INSTANCE_HANDLE.invokeExact(t);
} catch (Throwable e) {
throw Throwables.propagate(e);
}
}
/**
* Create dummy arguments for invoking the provided method
* @param method
* @return
*/
public static Object[] getDummyArguments(Method method) {
final Class<?>[] parameterTypes = method.getParameterTypes();
Object[] params = new Object[parameterTypes.length];
for(int i = 0; i<params.length; i++) {
final Class<?> parameterType = parameterTypes[i];
if (!parameterType.isPrimitive()) {
params[i] = null;
continue;
}
if (parameterType == boolean.class) {
params[i] = false;
} else if (parameterType == byte.class) {
params[i] = (byte) 0;
} else if (parameterType == char.class) {
params[i] = (char) 0;
} else if (parameterType == short.class) {
params[i] = (short) 0;
} else if (parameterType == int.class) {
params[i] = 0;
} else if (parameterType == long.class) {
params[i] = 0L;
} else if (parameterType == float.class) {
params[i] = 0f;
} else if (parameterType == double.class) {
params[i] = 0d;
}
}
return params;
}
} |
Java | public abstract class BaseRules implements Rules {
protected TableConfiguration tableConfiguration;
protected IntrospectedTable introspectedTable;
protected final boolean isModelOnly;
public BaseRules(IntrospectedTable introspectedTable) {
super();
this.introspectedTable = introspectedTable;
this.tableConfiguration = introspectedTable.getTableConfiguration();
String modelOnly = tableConfiguration.getProperty(PropertyRegistry.TABLE_MODEL_ONLY);
isModelOnly = StringUtility.isTrue(modelOnly);
}
/**
* Implements the rule for generating the insert SQL Map element and DAO method. If the insert statement is allowed,
* then generate the element and method.
*
* @return true if the element and method should be generated
*/
@Override
public boolean generateInsert() {
if (isModelOnly) {
return false;
}
return tableConfiguration.isInsertStatementEnabled();
}
/**
* Implements the rule for generating the insert selective SQL Map element and DAO method. If the insert statement
* is allowed, then generate the element and method.
*
* @return true if the element and method should be generated
*/
@Override
public boolean generateInsertSelective() {
if (isModelOnly) {
return false;
}
return tableConfiguration.isInsertStatementEnabled();
}
/**
* Calculates the class that contains all fields. This class is used as the insert statement parameter, as well as
* the returned value from the select by primary key method. The actual class depends on how the domain model is
* generated.
*
* @return the type of the class that holds all fields
*/
@Override
public FullyQualifiedJavaType calculateAllFieldsClass() {
String answer;
if (generateRecordWithBLOBsClass()) {
answer = introspectedTable.getRecordWithBLOBsType();
} else if (generateBaseRecordClass()) {
answer = introspectedTable.getBaseRecordType();
} else {
answer = introspectedTable.getPrimaryKeyType();
}
return new FullyQualifiedJavaType(answer);
}
/**
* Implements the rule for generating the update by primary key without BLOBs SQL Map element and DAO method. If the
* table has a primary key as well as other non-BLOB fields, and the updateByPrimaryKey statement is allowed, then
* generate the element and method.
*
* @return true if the element and method should be generated
*/
@Override
public boolean generateUpdateByPrimaryKeyWithoutBLOBs() {
if (isModelOnly) {
return false;
}
if (ListUtilities.removeGeneratedAlwaysColumns(introspectedTable.getBaseColumns()).isEmpty()) {
return false;
}
boolean rc = tableConfiguration.isUpdateByPrimaryKeyStatementEnabled()
&& introspectedTable.hasPrimaryKeyColumns() && introspectedTable.hasBaseColumns();
return rc;
}
/**
* Implements the rule for generating the update by primary key with BLOBs SQL Map element and DAO method. If the
* table has a primary key as well as other BLOB fields, and the updateByPrimaryKey statement is allowed, then
* generate the element and method.
*
* @return true if the element and method should be generated
*/
@Override
public boolean generateUpdateByPrimaryKeyWithBLOBs() {
if (isModelOnly) {
return false;
}
if (ListUtilities.removeGeneratedAlwaysColumns(introspectedTable.getNonPrimaryKeyColumns()).isEmpty()) {
return false;
}
boolean rc = tableConfiguration.isUpdateByPrimaryKeyStatementEnabled()
&& introspectedTable.hasPrimaryKeyColumns() && introspectedTable.hasBLOBColumns();
return rc;
}
/**
* Implements the rule for generating the update by primary key selective SQL Map element and DAO method. If the
* table has a primary key as well as other fields, and the updateByPrimaryKey statement is allowed, then generate
* the element and method.
*
* @return true if the element and method should be generated
*/
@Override
public boolean generateUpdateByPrimaryKeySelective() {
if (isModelOnly) {
return false;
}
if (ListUtilities.removeGeneratedAlwaysColumns(introspectedTable.getNonPrimaryKeyColumns()).isEmpty()) {
return false;
}
boolean rc = tableConfiguration.isUpdateByPrimaryKeyStatementEnabled()
&& introspectedTable.hasPrimaryKeyColumns()
&& (introspectedTable.hasBLOBColumns() || introspectedTable.hasBaseColumns());
return rc;
}
/**
* Implements the rule for generating the delete by primary key SQL Map element and DAO method. If the table has a
* primary key, and the deleteByPrimaryKey statement is allowed, then generate the element and method.
*
* @return true if the element and method should be generated
*/
@Override
public boolean generateDeleteByPrimaryKey() {
if (isModelOnly) {
return false;
}
boolean rc = tableConfiguration.isDeleteByPrimaryKeyStatementEnabled()
&& introspectedTable.hasPrimaryKeyColumns();
return rc;
}
/**
* Implements the rule for generating the delete by example SQL Map element and DAO method. If the deleteByExample
* statement is allowed, then generate the element and method.
*
* @return true if the element and method should be generated
*/
@Override
public boolean generateDeleteByExample() {
if (isModelOnly) {
return false;
}
boolean rc = tableConfiguration.isDeleteByExampleStatementEnabled();
return rc;
}
/**
* Implements the rule for generating the result map without BLOBs. If either select method is allowed, then
* generate the result map.
*
* @return true if the result map should be generated
*/
@Override
public boolean generateBaseResultMap() {
if (isModelOnly) {
return true;
}
boolean rc = tableConfiguration.isSelectByExampleStatementEnabled()
|| tableConfiguration.isSelectByPrimaryKeyStatementEnabled();
return rc;
}
/**
* Implements the rule for generating the result map with BLOBs. If the table has BLOB columns, and either select
* method is allowed, then generate the result map.
*
* @return true if the result map should be generated
*/
@Override
public boolean generateResultMapWithBLOBs() {
boolean rc;
if (introspectedTable.hasBLOBColumns()) {
if (isModelOnly) {
rc = true;
} else {
rc = tableConfiguration.isSelectByExampleStatementEnabled()
|| tableConfiguration.isSelectByPrimaryKeyStatementEnabled();
}
} else {
rc = false;
}
return rc;
}
/**
* Implements the rule for generating the SQL example where clause element.
*
* <p>
* In MyBatis3, generate the element if the selectByExample, deleteByExample, or countByExample statements are
* allowed.
*
* @return true if the SQL where clause element should be generated
*/
@Override
public boolean generateSQLExampleWhereClause() {
if (isModelOnly) {
return false;
}
boolean rc = tableConfiguration.isSelectByExampleStatementEnabled()
|| tableConfiguration.isDeleteByExampleStatementEnabled()
|| tableConfiguration.isCountByExampleStatementEnabled();
return rc;
}
/**
* Implements the rule for generating the SQL example where clause element specifically for use in the update by
* example methods.
*
* <p>
* In MyBatis3, generate the element if the updateByExample statements are allowed.
*
* @return true if the SQL where clause element should be generated
*/
@Override
public boolean generateMyBatis3UpdateByExampleWhereClause() {
if (isModelOnly) {
return false;
}
return introspectedTable.getTargetRuntime() == TargetRuntime.MYBATIS3
&& tableConfiguration.isUpdateByExampleStatementEnabled();
}
/**
* Implements the rule for generating the select by primary key SQL Map element and DAO method. If the table has a
* primary key as well as other fields, and the selectByPrimaryKey statement is allowed, then generate the element
* and method.
*
* @return true if the element and method should be generated
*/
@Override
public boolean generateSelectByPrimaryKey() {
if (isModelOnly) {
return false;
}
boolean rc = tableConfiguration.isSelectByPrimaryKeyStatementEnabled()
&& introspectedTable.hasPrimaryKeyColumns()
&& (introspectedTable.hasBaseColumns() || introspectedTable.hasBLOBColumns());
return rc;
}
/**
* Implements the rule for generating the select by example without BLOBs SQL Map element and DAO method. If the
* selectByExample statement is allowed, then generate the element and method.
*
* @return true if the element and method should be generated
*/
@Override
public boolean generateSelectByExampleWithoutBLOBs() {
if (isModelOnly) {
return false;
}
return tableConfiguration.isSelectByExampleStatementEnabled();
}
/**
* Implements the rule for generating the select by example with BLOBs SQL Map element and DAO method. If the table
* has BLOB fields and the selectByExample statement is allowed, then generate the element and method.
*
* @return true if the element and method should be generated
*/
@Override
public boolean generateSelectByExampleWithBLOBs() {
if (isModelOnly) {
return false;
}
boolean rc = tableConfiguration.isSelectByExampleStatementEnabled() && introspectedTable.hasBLOBColumns();
return rc;
}
@Override
public boolean generateSelectPaginationByExampleWithoutBLOBs() {
if (isModelOnly) {
return false;
}
return tableConfiguration.isSelectByExampleStatementEnabled();
}
@Override
public boolean generateSelectPaginationByExampleWithBLOBs() {
if (isModelOnly) {
return false;
}
boolean rc = tableConfiguration.isSelectByExampleStatementEnabled() && introspectedTable.hasBLOBColumns();
return rc;
}
/**
* Implements the rule for generating an example class. The class should be generated if the selectByExample or
* deleteByExample or countByExample methods are allowed.
*
* @return true if the example class should be generated
*/
@Override
public boolean generateExampleClass() {
if (introspectedTable.getContext().getSqlMapGeneratorConfiguration() == null
&& introspectedTable.getContext().getJavaClientGeneratorConfiguration() == null) {
// this is a model only context - don't generate the example class
return false;
}
if (isModelOnly) {
return false;
}
boolean rc = tableConfiguration.isSelectByExampleStatementEnabled()
|| tableConfiguration.isDeleteByExampleStatementEnabled()
|| tableConfiguration.isCountByExampleStatementEnabled()
|| tableConfiguration.isUpdateByExampleStatementEnabled();
return rc;
}
@Override
public boolean generateCountByExample() {
if (isModelOnly) {
return false;
}
boolean rc = tableConfiguration.isCountByExampleStatementEnabled();
return rc;
}
@Override
public boolean generateUpdateByExampleSelective() {
if (isModelOnly) {
return false;
}
boolean rc = tableConfiguration.isUpdateByExampleStatementEnabled();
return rc;
}
@Override
public boolean generateUpdateByExampleWithoutBLOBs() {
if (isModelOnly) {
return false;
}
boolean rc = tableConfiguration.isUpdateByExampleStatementEnabled()
&& (introspectedTable.hasPrimaryKeyColumns() || introspectedTable.hasBaseColumns());
return rc;
}
@Override
public boolean generateUpdateByExampleWithBLOBs() {
if (isModelOnly) {
return false;
}
boolean rc = tableConfiguration.isUpdateByExampleStatementEnabled() && introspectedTable.hasBLOBColumns();
return rc;
}
@Override
public IntrospectedTable getIntrospectedTable() {
return introspectedTable;
}
@Override
public boolean generateBaseColumnList() {
if (isModelOnly) {
return false;
}
return generateSelectByPrimaryKey() || generateSelectByExampleWithoutBLOBs();
}
@Override
public boolean generateBlobColumnList() {
if (isModelOnly) {
return false;
}
return introspectedTable.hasBLOBColumns() && (tableConfiguration.isSelectByExampleStatementEnabled()
|| tableConfiguration.isSelectByPrimaryKeyStatementEnabled());
}
@Override
public boolean generateJavaClient() {
return !isModelOnly;
}
} |
Java | public class BundlerInstallConfigurator extends AbstractRubyTaskConfigurator {
private static final Set<String> FIELDS_TO_COPY = Sets.newHashSet(
RUBY_KEY,
TaskConfigConstants.CFG_WORKING_SUB_DIRECTORY,
BundlerInstallTask.PATH,
BundlerInstallTask.ENVIRONMENT,
BundlerInstallTask.BIN_STUBS );
@Override
protected Set<String> getFieldsToCopy() {
return FIELDS_TO_COPY;
}
@Override
public void populateContextForCreate( @NotNull Map<String, Object> context ) {
super.populateContextForCreate( context );
// context.put( BundlerInstallTask.PATH, "vendor/bundle" );
}
} |
Java | public abstract class BaseChannelCount implements Serializable {
public static String REF = "ChannelCount";
public static String PROP_CHANNEL = "channel";
public static String PROP_VIEWS_MONTH = "viewsMonth";
public static String PROP_VIEWS_WEEK = "viewsWeek";
public static String PROP_VIEWS = "views";
public static String PROP_ID = "id";
public static String PROP_VIEWS_DAY = "viewsDay";
// constructors
public BaseChannelCount () {
initialize();
}
/**
* Constructor for primary key
*/
public BaseChannelCount (java.lang.Integer id) {
this.setId(id);
initialize();
}
/**
* Constructor for required fields
*/
public BaseChannelCount (
java.lang.Integer id,
java.lang.Integer views,
java.lang.Integer viewsMonth,
java.lang.Integer viewsWeek,
java.lang.Integer viewsDay) {
this.setId(id);
this.setViews(views);
this.setViewsMonth(viewsMonth);
this.setViewsWeek(viewsWeek);
this.setViewsDay(viewsDay);
initialize();
}
protected void initialize () {}
private int hashCode = Integer.MIN_VALUE;
// primary key
private java.lang.Integer id;
// fields
private java.lang.Integer views;
private java.lang.Integer viewsMonth;
private java.lang.Integer viewsWeek;
private java.lang.Integer viewsDay;
// one to one
private com.jeecms.cms.entity.main.Channel channel;
/**
* Return the unique identifier of this class
* @hibernate.id
* generator-class="foreign"
* column="channel_id"
*/
public java.lang.Integer getId () {
return id;
}
/**
* Set the unique identifier of this class
* @param id the new ID
*/
public void setId (java.lang.Integer id) {
this.id = id;
this.hashCode = Integer.MIN_VALUE;
}
/**
* Return the value associated with the column: views
*/
public java.lang.Integer getViews () {
return views;
}
/**
* Set the value related to the column: views
* @param views the views value
*/
public void setViews (java.lang.Integer views) {
this.views = views;
}
/**
* Return the value associated with the column: views_month
*/
public java.lang.Integer getViewsMonth () {
return viewsMonth;
}
/**
* Set the value related to the column: views_month
* @param viewsMonth the views_month value
*/
public void setViewsMonth (java.lang.Integer viewsMonth) {
this.viewsMonth = viewsMonth;
}
/**
* Return the value associated with the column: views_week
*/
public java.lang.Integer getViewsWeek () {
return viewsWeek;
}
/**
* Set the value related to the column: views_week
* @param viewsWeek the views_week value
*/
public void setViewsWeek (java.lang.Integer viewsWeek) {
this.viewsWeek = viewsWeek;
}
/**
* Return the value associated with the column: views_day
*/
public java.lang.Integer getViewsDay () {
return viewsDay;
}
/**
* Set the value related to the column: views_day
* @param viewsDay the views_day value
*/
public void setViewsDay (java.lang.Integer viewsDay) {
this.viewsDay = viewsDay;
}
/**
* Return the value associated with the column: channel
*/
public com.jeecms.cms.entity.main.Channel getChannel () {
return channel;
}
/**
* Set the value related to the column: channel
* @param channel the channel value
*/
public void setChannel (com.jeecms.cms.entity.main.Channel channel) {
this.channel = channel;
}
public boolean equals (Object obj) {
if (null == obj) return false;
if (!(obj instanceof com.jeecms.cms.entity.main.ChannelCount)) return false;
else {
com.jeecms.cms.entity.main.ChannelCount channelCount = (com.jeecms.cms.entity.main.ChannelCount) obj;
if (null == this.getId() || null == channelCount.getId()) return false;
else return (this.getId().equals(channelCount.getId()));
}
}
public int hashCode () {
if (Integer.MIN_VALUE == this.hashCode) {
if (null == this.getId()) return super.hashCode();
else {
String hashStr = this.getClass().getName() + ":" + this.getId().hashCode();
this.hashCode = hashStr.hashCode();
}
}
return this.hashCode;
}
public String toString () {
return super.toString();
}
} |
Java | public class NaiveBayes
implements Estimator<NaiveBayes, NaiveBayesModel>, NaiveBayesParams<NaiveBayes> {
private final Map<Param<?>, Object> paramMap = new HashMap<>();
public NaiveBayes() {
ParamUtils.initializeMapWithDefaultValues(paramMap, this);
}
@Override
public NaiveBayesModel fit(Table... inputs) {
Preconditions.checkArgument(inputs.length == 1);
final String featuresCol = getFeaturesCol();
final String labelCol = getLabelCol();
final double smoothing = getSmoothing();
StreamTableEnvironment tEnv =
(StreamTableEnvironment) ((TableImpl) inputs[0]).getTableEnvironment();
DataStream<Tuple2<Vector, Double>> input =
tEnv.toDataStream(inputs[0])
.map(
new MapFunction<Row, Tuple2<Vector, Double>>() {
@Override
public Tuple2<Vector, Double> map(Row row) throws Exception {
Number number = (Number) row.getField(labelCol);
Preconditions.checkNotNull(
number, "Input data should contain label value.");
Preconditions.checkArgument(
number.intValue() == number.doubleValue(),
"Label value should be indexed number.");
return new Tuple2<>(
(Vector) row.getField(featuresCol),
number.doubleValue());
}
});
DataStream<NaiveBayesModelData> modelData =
input.flatMap(new ExtractFeatureFunction())
.keyBy(value -> new Tuple2<>(value.f0, value.f1).hashCode())
.transform(
"GenerateFeatureWeightMapFunction",
Types.TUPLE(
Types.DOUBLE,
Types.INT,
Types.MAP(Types.DOUBLE, Types.DOUBLE),
Types.INT),
new MapPartitionFunctionWrapper<>(
new GenerateFeatureWeightMapFunction()))
.keyBy(value -> value.f0)
.transform(
"AggregateIntoArrayFunction",
Types.TUPLE(
Types.DOUBLE,
Types.INT,
Types.OBJECT_ARRAY(Types.MAP(Types.DOUBLE, Types.DOUBLE))),
new MapPartitionFunctionWrapper<>(new AggregateIntoArrayFunction()))
.transform(
"GenerateModelFunction",
TypeInformation.of(NaiveBayesModelData.class),
new MapPartitionFunctionWrapper<>(
new GenerateModelFunction(smoothing)))
.setParallelism(1);
NaiveBayesModel model =
new NaiveBayesModel()
.setModelData(NaiveBayesModelData.getModelDataTable(modelData));
ReadWriteUtils.updateExistingParams(model, paramMap);
return model;
}
@Override
public void save(String path) throws IOException {
ReadWriteUtils.saveMetadata(this, path);
}
public static NaiveBayes load(StreamExecutionEnvironment env, String path) throws IOException {
return ReadWriteUtils.loadStageParam(path);
}
@Override
public Map<Param<?>, Object> getParamMap() {
return paramMap;
}
/**
* Function to extract feature values from input rows.
*
* <p>Output records are tuples with the following fields in order:
*
* <ul>
* <li>label value
* <li>feature column index
* <li>feature value
* </ul>
*/
private static class ExtractFeatureFunction
implements FlatMapFunction<Tuple2<Vector, Double>, Tuple3<Double, Integer, Double>> {
@Override
public void flatMap(
Tuple2<Vector, Double> value,
Collector<Tuple3<Double, Integer, Double>> collector) {
Preconditions.checkNotNull(value.f1);
for (int i = 0; i < value.f0.size(); i++) {
collector.collect(new Tuple3<>(value.f1, i, value.f0.get(i)));
}
}
}
/**
* Function that aggregates entries of feature value and weight into maps.
*
* <p>Input records should have the same label value and feature column index.
*
* <p>Input records are tuples with the following fields in order:
*
* <ul>
* <li>label value
* <li>feature column index
* <li>feature value
* </ul>
*
* <p>Output records are tuples with the following fields in order:
*
* <ul>
* <li>label value
* <li>feature column index
* <li>map of (feature value, weight)
* <li>number of records
* </ul>
*/
private static class GenerateFeatureWeightMapFunction
implements MapPartitionFunction<
Tuple3<Double, Integer, Double>,
Tuple4<Double, Integer, Map<Double, Double>, Integer>> {
@Override
public void mapPartition(
Iterable<Tuple3<Double, Integer, Double>> iterable,
Collector<Tuple4<Double, Integer, Map<Double, Double>, Integer>> collector) {
List<Tuple3<Double, Integer, Double>> list = new ArrayList<>();
iterable.iterator().forEachRemaining(list::add);
Map<Tuple2<Double, Integer>, Map<Double, Double>> accMap = new HashMap<>();
Map<Tuple2<Double, Integer>, Integer> numMap = new HashMap<>();
for (Tuple3<Double, Integer, Double> value : list) {
Tuple2<Double, Integer> key = new Tuple2<>(value.f0, value.f1);
Map<Double, Double> acc = accMap.computeIfAbsent(key, x -> new HashMap<>());
acc.put(value.f2, acc.getOrDefault(value.f2, 0.) + 1.0);
numMap.put(key, numMap.getOrDefault(key, 0) + 1);
}
for (Map.Entry<Tuple2<Double, Integer>, Map<Double, Double>> entry :
accMap.entrySet()) {
collector.collect(
new Tuple4<>(
entry.getKey().f0,
entry.getKey().f1,
entry.getValue(),
numMap.get(entry.getKey())));
}
}
}
/**
* Function that aggregates maps under the same label into arrays.
*
* <p>Length of the generated array equals to the number of feature columns.
*
* <p>Input records are tuples with the following fields in order:
*
* <ul>
* <li>label value
* <li>feature column index
* <li>map of (feature value, weight)
* <li>number of records
* </ul>
*
* <p>Output records are tuples with the following fields in order:
*
* <ul>
* <li>label value
* <li>number of records
* <li>array of featureValue-weight maps of each feature
* </ul>
*/
private static class AggregateIntoArrayFunction
implements MapPartitionFunction<
Tuple4<Double, Integer, Map<Double, Double>, Integer>,
Tuple3<Double, Integer, Map<Double, Double>[]>> {
@Override
public void mapPartition(
Iterable<Tuple4<Double, Integer, Map<Double, Double>, Integer>> iterable,
Collector<Tuple3<Double, Integer, Map<Double, Double>[]>> collector) {
Map<Double, List<Tuple4<Double, Integer, Map<Double, Double>, Integer>>> map =
new HashMap<>();
for (Tuple4<Double, Integer, Map<Double, Double>, Integer> value : iterable) {
map.computeIfAbsent(value.f0, x -> new ArrayList<>()).add(value);
}
for (List<Tuple4<Double, Integer, Map<Double, Double>, Integer>> list : map.values()) {
final int featureSize =
list.stream().map(x -> x.f1).max(Integer::compareTo).orElse(-1) + 1;
int minDocNum =
list.stream()
.map(x -> x.f3)
.min(Integer::compareTo)
.orElse(Integer.MAX_VALUE);
int maxDocNum =
list.stream()
.map(x -> x.f3)
.max(Integer::compareTo)
.orElse(Integer.MIN_VALUE);
Preconditions.checkArgument(
minDocNum == maxDocNum, "Feature vectors should be of equal length.");
Map<Double, Integer> numMap = new HashMap<>();
Map<Double, Map<Double, Double>[]> featureWeightMap = new HashMap<>();
for (Tuple4<Double, Integer, Map<Double, Double>, Integer> value : list) {
Map<Double, Double>[] featureWeight =
featureWeightMap.computeIfAbsent(
value.f0, x -> new HashMap[featureSize]);
numMap.put(value.f0, value.f3);
featureWeight[value.f1] = value.f2;
}
for (double key : featureWeightMap.keySet()) {
collector.collect(
new Tuple3<>(key, numMap.get(key), featureWeightMap.get(key)));
}
}
}
}
/** Function to generate Naive Bayes model data. */
private static class GenerateModelFunction
implements MapPartitionFunction<
Tuple3<Double, Integer, Map<Double, Double>[]>, NaiveBayesModelData> {
private final double smoothing;
private GenerateModelFunction(double smoothing) {
this.smoothing = smoothing;
}
@Override
public void mapPartition(
Iterable<Tuple3<Double, Integer, Map<Double, Double>[]>> iterable,
Collector<NaiveBayesModelData> collector) {
ArrayList<Tuple3<Double, Integer, Map<Double, Double>[]>> list = new ArrayList<>();
iterable.iterator().forEachRemaining(list::add);
final int featureSize = list.get(0).f2.length;
for (Tuple3<Double, Integer, Map<Double, Double>[]> tup : list) {
Preconditions.checkArgument(
featureSize == tup.f2.length, "Feature vectors should be of equal length.");
}
double[] numDocs = new double[featureSize];
HashSet<Double>[] categoryNumbers = new HashSet[featureSize];
for (int i = 0; i < featureSize; i++) {
categoryNumbers[i] = new HashSet<>();
}
for (Tuple3<Double, Integer, Map<Double, Double>[]> tup : list) {
for (int i = 0; i < featureSize; i++) {
numDocs[i] += tup.f1;
categoryNumbers[i].addAll(tup.f2[i].keySet());
}
}
int[] categoryNumber = new int[featureSize];
double piLog = 0;
int numLabels = list.size();
for (int i = 0; i < featureSize; i++) {
categoryNumber[i] = categoryNumbers[i].size();
piLog += numDocs[i];
}
piLog = Math.log(piLog + numLabels * smoothing);
Map<Double, Double>[][] theta = new HashMap[numLabels][featureSize];
double[] piArray = new double[numLabels];
double[] labels = new double[numLabels];
// Consider smoothing.
for (int i = 0; i < numLabels; i++) {
Map<Double, Double>[] param = list.get(i).f2;
for (int j = 0; j < featureSize; j++) {
Map<Double, Double> squareData = new HashMap<>();
double thetaLog =
Math.log(list.get(i).f1 * 1.0 + smoothing * categoryNumber[j]);
for (Double cate : categoryNumbers[j]) {
double value = param[j].getOrDefault(cate, 0.0);
squareData.put(cate, Math.log(value + smoothing) - thetaLog);
}
theta[i][j] = squareData;
}
labels[i] = list.get(i).f0;
double weightSum = list.get(i).f1 * featureSize;
piArray[i] = Math.log(weightSum + smoothing) - piLog;
}
NaiveBayesModelData modelData = new NaiveBayesModelData(theta, piArray, labels);
collector.collect(modelData);
}
}
} |
Java | private static class ExtractFeatureFunction
implements FlatMapFunction<Tuple2<Vector, Double>, Tuple3<Double, Integer, Double>> {
@Override
public void flatMap(
Tuple2<Vector, Double> value,
Collector<Tuple3<Double, Integer, Double>> collector) {
Preconditions.checkNotNull(value.f1);
for (int i = 0; i < value.f0.size(); i++) {
collector.collect(new Tuple3<>(value.f1, i, value.f0.get(i)));
}
}
} |
Java | private static class GenerateFeatureWeightMapFunction
implements MapPartitionFunction<
Tuple3<Double, Integer, Double>,
Tuple4<Double, Integer, Map<Double, Double>, Integer>> {
@Override
public void mapPartition(
Iterable<Tuple3<Double, Integer, Double>> iterable,
Collector<Tuple4<Double, Integer, Map<Double, Double>, Integer>> collector) {
List<Tuple3<Double, Integer, Double>> list = new ArrayList<>();
iterable.iterator().forEachRemaining(list::add);
Map<Tuple2<Double, Integer>, Map<Double, Double>> accMap = new HashMap<>();
Map<Tuple2<Double, Integer>, Integer> numMap = new HashMap<>();
for (Tuple3<Double, Integer, Double> value : list) {
Tuple2<Double, Integer> key = new Tuple2<>(value.f0, value.f1);
Map<Double, Double> acc = accMap.computeIfAbsent(key, x -> new HashMap<>());
acc.put(value.f2, acc.getOrDefault(value.f2, 0.) + 1.0);
numMap.put(key, numMap.getOrDefault(key, 0) + 1);
}
for (Map.Entry<Tuple2<Double, Integer>, Map<Double, Double>> entry :
accMap.entrySet()) {
collector.collect(
new Tuple4<>(
entry.getKey().f0,
entry.getKey().f1,
entry.getValue(),
numMap.get(entry.getKey())));
}
}
} |
Java | private static class AggregateIntoArrayFunction
implements MapPartitionFunction<
Tuple4<Double, Integer, Map<Double, Double>, Integer>,
Tuple3<Double, Integer, Map<Double, Double>[]>> {
@Override
public void mapPartition(
Iterable<Tuple4<Double, Integer, Map<Double, Double>, Integer>> iterable,
Collector<Tuple3<Double, Integer, Map<Double, Double>[]>> collector) {
Map<Double, List<Tuple4<Double, Integer, Map<Double, Double>, Integer>>> map =
new HashMap<>();
for (Tuple4<Double, Integer, Map<Double, Double>, Integer> value : iterable) {
map.computeIfAbsent(value.f0, x -> new ArrayList<>()).add(value);
}
for (List<Tuple4<Double, Integer, Map<Double, Double>, Integer>> list : map.values()) {
final int featureSize =
list.stream().map(x -> x.f1).max(Integer::compareTo).orElse(-1) + 1;
int minDocNum =
list.stream()
.map(x -> x.f3)
.min(Integer::compareTo)
.orElse(Integer.MAX_VALUE);
int maxDocNum =
list.stream()
.map(x -> x.f3)
.max(Integer::compareTo)
.orElse(Integer.MIN_VALUE);
Preconditions.checkArgument(
minDocNum == maxDocNum, "Feature vectors should be of equal length.");
Map<Double, Integer> numMap = new HashMap<>();
Map<Double, Map<Double, Double>[]> featureWeightMap = new HashMap<>();
for (Tuple4<Double, Integer, Map<Double, Double>, Integer> value : list) {
Map<Double, Double>[] featureWeight =
featureWeightMap.computeIfAbsent(
value.f0, x -> new HashMap[featureSize]);
numMap.put(value.f0, value.f3);
featureWeight[value.f1] = value.f2;
}
for (double key : featureWeightMap.keySet()) {
collector.collect(
new Tuple3<>(key, numMap.get(key), featureWeightMap.get(key)));
}
}
}
} |
Java | private static class GenerateModelFunction
implements MapPartitionFunction<
Tuple3<Double, Integer, Map<Double, Double>[]>, NaiveBayesModelData> {
private final double smoothing;
private GenerateModelFunction(double smoothing) {
this.smoothing = smoothing;
}
@Override
public void mapPartition(
Iterable<Tuple3<Double, Integer, Map<Double, Double>[]>> iterable,
Collector<NaiveBayesModelData> collector) {
ArrayList<Tuple3<Double, Integer, Map<Double, Double>[]>> list = new ArrayList<>();
iterable.iterator().forEachRemaining(list::add);
final int featureSize = list.get(0).f2.length;
for (Tuple3<Double, Integer, Map<Double, Double>[]> tup : list) {
Preconditions.checkArgument(
featureSize == tup.f2.length, "Feature vectors should be of equal length.");
}
double[] numDocs = new double[featureSize];
HashSet<Double>[] categoryNumbers = new HashSet[featureSize];
for (int i = 0; i < featureSize; i++) {
categoryNumbers[i] = new HashSet<>();
}
for (Tuple3<Double, Integer, Map<Double, Double>[]> tup : list) {
for (int i = 0; i < featureSize; i++) {
numDocs[i] += tup.f1;
categoryNumbers[i].addAll(tup.f2[i].keySet());
}
}
int[] categoryNumber = new int[featureSize];
double piLog = 0;
int numLabels = list.size();
for (int i = 0; i < featureSize; i++) {
categoryNumber[i] = categoryNumbers[i].size();
piLog += numDocs[i];
}
piLog = Math.log(piLog + numLabels * smoothing);
Map<Double, Double>[][] theta = new HashMap[numLabels][featureSize];
double[] piArray = new double[numLabels];
double[] labels = new double[numLabels];
// Consider smoothing.
for (int i = 0; i < numLabels; i++) {
Map<Double, Double>[] param = list.get(i).f2;
for (int j = 0; j < featureSize; j++) {
Map<Double, Double> squareData = new HashMap<>();
double thetaLog =
Math.log(list.get(i).f1 * 1.0 + smoothing * categoryNumber[j]);
for (Double cate : categoryNumbers[j]) {
double value = param[j].getOrDefault(cate, 0.0);
squareData.put(cate, Math.log(value + smoothing) - thetaLog);
}
theta[i][j] = squareData;
}
labels[i] = list.get(i).f0;
double weightSum = list.get(i).f1 * featureSize;
piArray[i] = Math.log(weightSum + smoothing) - piLog;
}
NaiveBayesModelData modelData = new NaiveBayesModelData(theta, piArray, labels);
collector.collect(modelData);
}
} |
Java | public class ProducerDTO extends BaseDTO {
private String id;
private String namespace;
private String topic;
private String app;
@Column(alias = "client_type")
private Byte clientType;
@Column(alias = "produce_policy")
private String producePolicy;
@Column(alias = "limit_policy")
private String limitPolicy;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getNamespace() {
return namespace;
}
public void setNamespace(String namespace) {
this.namespace = namespace;
}
public String getTopic() {
return topic;
}
public void setTopic(String topic) {
this.topic = topic;
}
public String getApp() {
return app;
}
public void setApp(String app) {
this.app = app;
}
public Byte getClientType() {
return clientType;
}
public void setClientType(Byte clientType) {
this.clientType = clientType;
}
public void setProducePolicy(String producePolicy) {
this.producePolicy = producePolicy;
}
public String getProducePolicy() {
return producePolicy;
}
public String getLimitPolicy() {
return limitPolicy;
}
public void setLimitPolicy(String limitPolicy) {
this.limitPolicy = limitPolicy;
}
} |
Java | public class UserComparable implements Comparable<UserComparable> {
/**
* The user's name.
*/
String name;
/**
* The user's age.
*/
Integer age;
/**
* Default constructor.
* @param name - user's name.
* @param age - user's age.
*/
public UserComparable(String name, Integer age) {
this.name = name;
this.age = age;
}
/**
* Compares this object with the specified object for order. Returns a
* negative integer, zero, or a positive integer as this object is less
* than, equal to, or greater than the specified object.
* @throws NullPointerException param equals to null.
* @param o the object to be compared.
* @return a negative integer, zero, or a positive integer as this object
* is less than, equal to, or greater than the specified object.
*/
@Override
public int compareTo(UserComparable o) {
if (o != null) {
return this.age.compareTo(o.age);
}
throw new NullPointerException();
}
} |
Java | public class FileStreamSource implements org.apache.spark.sql.execution.streaming.Source, org.apache.spark.internal.Logging {
static public class FileEntry implements scala.Serializable, scala.Product {
public java.lang.String path () { throw new RuntimeException(); }
public long timestamp () { throw new RuntimeException(); }
public long batchId () { throw new RuntimeException(); }
// not preceding
public FileEntry (java.lang.String path, long timestamp, long batchId) { throw new RuntimeException(); }
}
static public class FileEntry$ extends scala.runtime.AbstractFunction3<java.lang.String, java.lang.Object, java.lang.Object, org.apache.spark.sql.execution.streaming.FileStreamSource.FileEntry> implements scala.Serializable {
/**
* Static reference to the singleton instance of this Scala object.
*/
public static final FileEntry$ MODULE$ = null;
public FileEntry$ () { throw new RuntimeException(); }
}
/**
* A custom hash map used to track the list of files seen. This map is not thread-safe.
* <p>
* To prevent the hash map from growing indefinitely, a purge function is available to
* remove files "maxAgeMs" older than the latest file.
*/
static public class SeenFilesMap {
public SeenFilesMap (long maxAgeMs, boolean fileNameOnly) { throw new RuntimeException(); }
/** Add a new file to the map. */
public void add (java.lang.String path, long timestamp) { throw new RuntimeException(); }
/**
* Returns true if we should consider this file a new file. The file is only considered "new"
* if it is new enough that we are still tracking, and we have not seen it before.
* @param path (undocumented)
* @param timestamp (undocumented)
* @return (undocumented)
*/
public boolean isNewFile (java.lang.String path, long timestamp) { throw new RuntimeException(); }
/** Timestamp for the last purge operation. */
private long lastPurgeTimestamp () { throw new RuntimeException(); }
/** Timestamp of the latest file. */
private long latestTimestamp () { throw new RuntimeException(); }
/** Mapping from file to its timestamp. */
private java.util.HashMap<java.lang.String, java.lang.Object> map () { throw new RuntimeException(); }
/** Removes aged entries and returns the number of files removed. */
public int purge () { throw new RuntimeException(); }
public int size () { throw new RuntimeException(); }
private java.lang.String stripPathIfNecessary (java.lang.String path) { throw new RuntimeException(); }
}
static protected java.lang.String logName () { throw new RuntimeException(); }
static protected org.slf4j.Logger log () { throw new RuntimeException(); }
static protected void logInfo (scala.Function0<java.lang.String> msg) { throw new RuntimeException(); }
static protected void logDebug (scala.Function0<java.lang.String> msg) { throw new RuntimeException(); }
static protected void logTrace (scala.Function0<java.lang.String> msg) { throw new RuntimeException(); }
static protected void logWarning (scala.Function0<java.lang.String> msg) { throw new RuntimeException(); }
static protected void logError (scala.Function0<java.lang.String> msg) { throw new RuntimeException(); }
static protected void logInfo (scala.Function0<java.lang.String> msg, java.lang.Throwable throwable) { throw new RuntimeException(); }
static protected void logDebug (scala.Function0<java.lang.String> msg, java.lang.Throwable throwable) { throw new RuntimeException(); }
static protected void logTrace (scala.Function0<java.lang.String> msg, java.lang.Throwable throwable) { throw new RuntimeException(); }
static protected void logWarning (scala.Function0<java.lang.String> msg, java.lang.Throwable throwable) { throw new RuntimeException(); }
static protected void logError (scala.Function0<java.lang.String> msg, java.lang.Throwable throwable) { throw new RuntimeException(); }
static protected boolean isTraceEnabled () { throw new RuntimeException(); }
static protected void initializeLogIfNecessary (boolean isInterpreter) { throw new RuntimeException(); }
static protected boolean initializeLogIfNecessary (boolean isInterpreter, boolean silent) { throw new RuntimeException(); }
static protected boolean initializeLogIfNecessary$default$2 () { throw new RuntimeException(); }
public org.apache.spark.sql.types.StructType schema () { throw new RuntimeException(); }
// not preceding
public FileStreamSource (org.apache.spark.sql.SparkSession sparkSession, java.lang.String path, java.lang.String fileFormatClassName, org.apache.spark.sql.types.StructType schema, scala.collection.Seq<java.lang.String> partitionColumns, java.lang.String metadataPath, scala.collection.immutable.Map<java.lang.String, java.lang.String> options) { throw new RuntimeException(); }
private org.apache.spark.sql.execution.streaming.FileStreamOptions sourceOptions () { throw new RuntimeException(); }
private org.apache.hadoop.conf.Configuration hadoopConf () { throw new RuntimeException(); }
private org.apache.hadoop.fs.FileSystem fs () { throw new RuntimeException(); }
private org.apache.hadoop.fs.Path qualifiedBasePath () { throw new RuntimeException(); }
private scala.collection.immutable.Map<java.lang.String, java.lang.String> optionsWithPartitionBasePath () { throw new RuntimeException(); }
private org.apache.spark.sql.execution.streaming.FileStreamSourceLog metadataLog () { throw new RuntimeException(); }
private long metadataLogCurrentOffset () { throw new RuntimeException(); }
/** Maximum number of new files to be considered in each batch */
private scala.Option<java.lang.Object> maxFilesPerBatch () { throw new RuntimeException(); }
private scala.math.Ordering<java.lang.Object> fileSortOrder () { throw new RuntimeException(); }
private long maxFileAgeMs () { throw new RuntimeException(); }
private boolean fileNameOnly () { throw new RuntimeException(); }
/** A mapping from a file that we have processed to some timestamp it was last modified. */
public org.apache.spark.sql.execution.streaming.FileStreamSource.SeenFilesMap seenFiles () { throw new RuntimeException(); }
/**
* Returns the maximum offset that can be retrieved from the source.
* <p>
* <code>synchronized</code> on this method is for solving race conditions in tests. In the normal usage,
* there is no race here, so the cost of <code>synchronized</code> should be rare.
* @return (undocumented)
*/
private org.apache.spark.sql.execution.streaming.FileStreamSourceOffset fetchMaxOffset () { throw new RuntimeException(); }
/**
* For test only. Run <code>func</code> with the internal lock to make sure when <code>func</code> is running,
* the current offset won't be changed and no new batch will be emitted.
* @param func (undocumented)
* @return (undocumented)
*/
public <T extends java.lang.Object> T withBatchingLocked (scala.Function0<T> func) { throw new RuntimeException(); }
/** Return the latest offset in the {@link FileStreamSourceLog} */
public long currentLogOffset () { throw new RuntimeException(); }
/**
* Returns the data that is between the offsets (<code>start</code>, <code>end</code>].
* @param start (undocumented)
* @param end (undocumented)
* @return (undocumented)
*/
public org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> getBatch (scala.Option<org.apache.spark.sql.execution.streaming.Offset> start, org.apache.spark.sql.execution.streaming.Offset end) { throw new RuntimeException(); }
/**
* If the source has a metadata log indicating which files should be read, then we should use it.
* Only when user gives a non-glob path that will we figure out whether the source has some
* metadata log
* <p>
* None means we don't know at the moment
* Some(true) means we know for sure the source DOES have metadata
* Some(false) means we know for sure the source DOSE NOT have metadata
* @return (undocumented)
*/
scala.Option<java.lang.Object> sourceHasMetadata () { throw new RuntimeException(); }
private scala.collection.Seq<org.apache.hadoop.fs.FileStatus> allFilesUsingInMemoryFileIndex () { throw new RuntimeException(); }
private scala.collection.Seq<org.apache.hadoop.fs.FileStatus> allFilesUsingMetadataLogFileIndex () { throw new RuntimeException(); }
/**
* Returns a list of files found, sorted by their timestamp.
* @return (undocumented)
*/
private scala.collection.Seq<scala.Tuple2<java.lang.String, java.lang.Object>> fetchAllFiles () { throw new RuntimeException(); }
public scala.Option<org.apache.spark.sql.execution.streaming.Offset> getOffset () { throw new RuntimeException(); }
public java.lang.String toString () { throw new RuntimeException(); }
/**
* Informs the source that Spark has completed processing all data for offsets less than or
* equal to <code>end</code> and will only request offsets greater than <code>end</code> in the future.
* @param end (undocumented)
*/
public void commit (org.apache.spark.sql.execution.streaming.Offset end) { throw new RuntimeException(); }
public void stop () { throw new RuntimeException(); }
} |
Java | static public class SeenFilesMap {
public SeenFilesMap (long maxAgeMs, boolean fileNameOnly) { throw new RuntimeException(); }
/** Add a new file to the map. */
public void add (java.lang.String path, long timestamp) { throw new RuntimeException(); }
/**
* Returns true if we should consider this file a new file. The file is only considered "new"
* if it is new enough that we are still tracking, and we have not seen it before.
* @param path (undocumented)
* @param timestamp (undocumented)
* @return (undocumented)
*/
public boolean isNewFile (java.lang.String path, long timestamp) { throw new RuntimeException(); }
/** Timestamp for the last purge operation. */
private long lastPurgeTimestamp () { throw new RuntimeException(); }
/** Timestamp of the latest file. */
private long latestTimestamp () { throw new RuntimeException(); }
/** Mapping from file to its timestamp. */
private java.util.HashMap<java.lang.String, java.lang.Object> map () { throw new RuntimeException(); }
/** Removes aged entries and returns the number of files removed. */
public int purge () { throw new RuntimeException(); }
public int size () { throw new RuntimeException(); }
private java.lang.String stripPathIfNecessary (java.lang.String path) { throw new RuntimeException(); }
} |
Java | public class EventUsageImpl extends EObjectImpl implements EventUsage {
/**
* The default value of the '{@link #getNotes() <em>Notes</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getNotes()
* @generated
* @ordered
*/
protected static final String NOTES_EDEFAULT = null;
/**
* The cached value of the '{@link #getNotes() <em>Notes</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getNotes()
* @generated
* @ordered
*/
protected String notes = NOTES_EDEFAULT;
/**
* The cached value of the '{@link #getRequestResources() <em>Request Resources</em>}' attribute list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getRequestResources()
* @generated
* @ordered
*/
protected EList<String> requestResources;
/**
* The cached value of the '{@link #getRequestAggregations() <em>Request Aggregations</em>}' attribute list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getRequestAggregations()
* @generated
* @ordered
*/
protected EList<String> requestAggregations;
/**
* The cached value of the '{@link #getResponseResources() <em>Response Resources</em>}' attribute list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getResponseResources()
* @generated
* @ordered
*/
protected EList<String> responseResources;
/**
* The cached value of the '{@link #getResponseAggregations() <em>Response Aggregations</em>}' attribute list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getResponseAggregations()
* @generated
* @ordered
*/
protected EList<String> responseAggregations;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected EventUsageImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return FhirPackage.Literals.EVENT_USAGE;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String getNotes() {
return notes;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void setNotes(String newNotes) {
String oldNotes = notes;
notes = newNotes;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, FhirPackage.EVENT_USAGE__NOTES, oldNotes, notes));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public EList<String> getRequestResources() {
if (requestResources == null) {
requestResources = new EDataTypeUniqueEList<String>(String.class, this, FhirPackage.EVENT_USAGE__REQUEST_RESOURCES);
}
return requestResources;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public EList<String> getRequestAggregations() {
if (requestAggregations == null) {
requestAggregations = new EDataTypeUniqueEList<String>(String.class, this, FhirPackage.EVENT_USAGE__REQUEST_AGGREGATIONS);
}
return requestAggregations;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public EList<String> getResponseResources() {
if (responseResources == null) {
responseResources = new EDataTypeUniqueEList<String>(String.class, this, FhirPackage.EVENT_USAGE__RESPONSE_RESOURCES);
}
return responseResources;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public EList<String> getResponseAggregations() {
if (responseAggregations == null) {
responseAggregations = new EDataTypeUniqueEList<String>(String.class, this, FhirPackage.EVENT_USAGE__RESPONSE_AGGREGATIONS);
}
return responseAggregations;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case FhirPackage.EVENT_USAGE__NOTES:
return getNotes();
case FhirPackage.EVENT_USAGE__REQUEST_RESOURCES:
return getRequestResources();
case FhirPackage.EVENT_USAGE__REQUEST_AGGREGATIONS:
return getRequestAggregations();
case FhirPackage.EVENT_USAGE__RESPONSE_RESOURCES:
return getResponseResources();
case FhirPackage.EVENT_USAGE__RESPONSE_AGGREGATIONS:
return getResponseAggregations();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case FhirPackage.EVENT_USAGE__NOTES:
setNotes((String)newValue);
return;
case FhirPackage.EVENT_USAGE__REQUEST_RESOURCES:
getRequestResources().clear();
getRequestResources().addAll((Collection<? extends String>)newValue);
return;
case FhirPackage.EVENT_USAGE__REQUEST_AGGREGATIONS:
getRequestAggregations().clear();
getRequestAggregations().addAll((Collection<? extends String>)newValue);
return;
case FhirPackage.EVENT_USAGE__RESPONSE_RESOURCES:
getResponseResources().clear();
getResponseResources().addAll((Collection<? extends String>)newValue);
return;
case FhirPackage.EVENT_USAGE__RESPONSE_AGGREGATIONS:
getResponseAggregations().clear();
getResponseAggregations().addAll((Collection<? extends String>)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case FhirPackage.EVENT_USAGE__NOTES:
setNotes(NOTES_EDEFAULT);
return;
case FhirPackage.EVENT_USAGE__REQUEST_RESOURCES:
getRequestResources().clear();
return;
case FhirPackage.EVENT_USAGE__REQUEST_AGGREGATIONS:
getRequestAggregations().clear();
return;
case FhirPackage.EVENT_USAGE__RESPONSE_RESOURCES:
getResponseResources().clear();
return;
case FhirPackage.EVENT_USAGE__RESPONSE_AGGREGATIONS:
getResponseAggregations().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case FhirPackage.EVENT_USAGE__NOTES:
return NOTES_EDEFAULT == null ? notes != null : !NOTES_EDEFAULT.equals(notes);
case FhirPackage.EVENT_USAGE__REQUEST_RESOURCES:
return requestResources != null && !requestResources.isEmpty();
case FhirPackage.EVENT_USAGE__REQUEST_AGGREGATIONS:
return requestAggregations != null && !requestAggregations.isEmpty();
case FhirPackage.EVENT_USAGE__RESPONSE_RESOURCES:
return responseResources != null && !responseResources.isEmpty();
case FhirPackage.EVENT_USAGE__RESPONSE_AGGREGATIONS:
return responseAggregations != null && !responseAggregations.isEmpty();
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (notes: ");
result.append(notes);
result.append(", requestResources: ");
result.append(requestResources);
result.append(", requestAggregations: ");
result.append(requestAggregations);
result.append(", responseResources: ");
result.append(responseResources);
result.append(", responseAggregations: ");
result.append(responseAggregations);
result.append(')');
return result.toString();
}
} |
Java | @Slf4j
@ControllerAdvice
@SuppressWarnings("unused")
@Priority(HIGHEST_PRECEDENCE)
public class FeignExceptionHandler {
private Messages messages;
@Autowired
public FeignExceptionHandler(Messages messages) {
this.messages = messages;
}
/**
* Re-maps FeignExceptions to ExceptionResponse.
* <p>
* Currently, we want to remap only 404s and hide others by presenting a general 500.
*/
@ResponseBody
@ExceptionHandler(FeignException.class)
public ExceptionResponse handleFeignException(FeignException exception, HttpServletResponse response) {
UUID uuid = UUID.randomUUID();
log.error("{}", uuid, exception);
int status = exception.status();
ExceptionResponse.ExceptionResponseBuilder builder = ExceptionResponse.builder();
builder.uuid(uuid);
switch (status) {
case 404:
response.setStatus(404);
builder.message(messages.get("omw.exception.city"));
break;
default:
response.setStatus(500);
break;
}
return builder.build();
}
} |
Java | public abstract class UserProfile implements Serializable, Externalizable {
private static final long serialVersionUID = 9020114478664816338L;
protected transient final Logger logger = LoggerFactory.getLogger(getClass());
private String id;
private Map<String, Object> attributes = new HashMap<>();
private Map<String, Object> authenticationAttributes = new HashMap<>();
public transient static final String SEPARATOR = "#";
private boolean isRemembered = false;
private Set<String> roles = new HashSet<>();
private Set<String> permissions = new HashSet<>();
private String clientName;
private String linkedId;
private final boolean canAttributesBeMerged;
protected UserProfile() {
this( false ); //backwards compatibility
}
/**
* Create a profile with possibility to merge attributes with the same name and collection-type values.
* @param canAttributesBeMerged if true - merge attributes with the same name and collection-type values, if false - overwrite them
* @since 3.1.0
*/
protected UserProfile( boolean canAttributesBeMerged ) {
this.canAttributesBeMerged = canAttributesBeMerged;
}
/**
* Build a profile from user identifier and attributes.
*
* @param id user identifier
* @param attributes user attributes
*/
public void build(final Object id, final Map<String, Object> attributes) {
setId(ProfileHelper.sanitizeIdentifier(this, id));
addAttributes(attributes);
}
/**
* Build a profile from user identifier, attributes, and authentication attributes.
*
* @param id user identifier
* @param attributes user attributes
* @param authenticationAttributes authentication attributes
*/
public void build(final Object id, final Map<String, Object> attributes, final Map<String, Object> authenticationAttributes ) {
build(id, attributes);
addAuthenticationAttributes(authenticationAttributes);
}
private void addAttributeToMap(final Map<String, Object> map, final String key, Object value)
{
if (value != null) {
logger.debug("adding => key: {} / value: {} / {}", key, value, value.getClass());
Object valueForMap = getValueForMap(map, key, ProfileHelper.getInternalAttributeHandler().prepare(value));
map.put(key, valueForMap);
}
}
@SuppressWarnings({ "rawtypes", "unchecked" })
private Object getValueForMap(final Map<String, Object> map, final String key, Object preparedValue) {
// support multiple attribute values (e.g. roles can be received as separate attributes and require merging)
// https://github.com/pac4j/pac4j/issues/1145
if (canMergeAttributes(map, key, preparedValue))
{
Collection existingCollection = (Collection) map.get(key);
Collection newCollection = (Collection) preparedValue;
return mergeCollectionAttributes(existingCollection, newCollection);
} else
{
return preparedValue;
}
}
private boolean canMergeAttributes(final Map<String, Object> map, final String key, Object preparedValue)
{
return this.canAttributesBeMerged && preparedValue instanceof Collection && map.get(key) instanceof Collection;
}
private <T> Collection<T> mergeCollectionAttributes(Collection<T> existingCollection, Collection<T> newCollection)
{
return Streams.concat(existingCollection.stream(), newCollection.stream()).collect(Collectors.toList());
}
/**
* Add an attribute.
*
* If existing attribute value is collection and the new value is collection - merge the collections
*
* @param key key of the attribute
* @param value value of the attribute
*/
public void addAttribute(final String key, Object value) {
addAttributeToMap(this.attributes, key, value);
}
/**
* Add an authentication-related attribute
*
* @param key the attribute key
* @param value the attribute value
*/
public void addAuthenticationAttribute(final String key, Object value) {
addAttributeToMap(this.authenticationAttributes, key, value);
}
/**
* Add attributes.
*
* @param attributes use attributes
*/
public void addAttributes(final Map<String, Object> attributes) {
if (attributes != null) {
for (final Map.Entry<String, Object> entry : attributes.entrySet()) {
addAttribute(entry.getKey(), entry.getValue());
}
}
}
/**
* Add authentication attributes.
*
* @param attributeMap the authentication attributes
*/
public void addAuthenticationAttributes(Map<String, Object> attributeMap) {
if (attributeMap != null) {
for (final Map.Entry<String, Object> entry : attributeMap.entrySet()) {
addAuthenticationAttribute(entry.getKey(), entry.getValue());
}
}
}
/**
* Remove an attribute by its key.
*
* @param key the key
*/
public void removeAttribute(final String key) {
CommonHelper.assertNotNull("key", key);
attributes.remove(key);
}
/**
* Remove an authentication attribute by its key
*
* @param key the key
*/
public void removeAuthenticationAttribute(final String key) {
CommonHelper.assertNotNull("key", key);
authenticationAttributes.remove(key);
}
/**
* Set the identifier.
*
* @param id user identifier
*/
public void setId(final String id) {
CommonHelper.assertNotBlank("id", id);
this.id = id;
}
/**
* Get the user identifier. This identifier is unique for this provider but not necessarily through all providers.
*
* @return the user identifier
*/
public String getId() {
return this.id;
}
/**
* Get the user identifier with a prefix which is the profile type (full class name with package).
* This identifier is unique through all providers.
*
* @return the typed user identifier
*/
public String getTypedId() {
return this.getClass().getName() + SEPARATOR + this.id;
}
/**
* Get all attributes as immutable map.
*
* @return the immutable attributes
*/
public Map<String, Object> getAttributes() {
return getAttributeMap(this.attributes);
}
/**
* Get all authentication attributes as an immutable map
*
* @return the immutable authentication attributes
*/
public Map<String, Object> getAuthenticationAttributes() {
return getAttributeMap(this.authenticationAttributes);
}
private static Map<String, Object> getAttributeMap(Map<String, Object> attributeMap) {
final Map<String, Object> newAttributes = new HashMap<>();
for (Map.Entry<String, Object> entries : attributeMap.entrySet()) {
final String key = entries.getKey();
final Object value = ProfileHelper.getInternalAttributeHandler().restore(attributeMap.get(key));
newAttributes.put(key, value);
}
return newAttributes;
}
/**
* Return the attribute with name.
*
* @param name attribute name
* @return the attribute with name
*/
public Object getAttribute(final String name) {
return ProfileHelper.getInternalAttributeHandler().restore(this.attributes.get(name));
}
/**
* Return the authentication attribute with name.
*
* @param name authentication attribute name
* @return the authentication attribute with name
*/
public Object getAuthenticationAttribute(final String name) {
return ProfileHelper.getInternalAttributeHandler().restore(this.authenticationAttributes.get(name));
}
/**
* Check to see if profile contains attribute name.
*
* @param name the name
* @return true/false
*/
public boolean containsAttribute(final String name) {
CommonHelper.assertNotNull("name", name);
return this.attributes.containsKey(name);
}
/**
* Check to see if profile contains attribute name.
*
* @param name the name
* @return true/false
*/
public boolean containsAuthenicationAttribute(final String name) {
CommonHelper.assertNotNull("name", name);
return this.authenticationAttributes.containsKey(name);
}
/**
* Return the attribute with name.
*
* @param name the attribute name
* @param clazz the class of the attribute
* @param <T> the type of the attribute
* @return the attribute by its name
* @since 1.8
*/
public <T> T getAttribute(final String name, final Class<T> clazz) {
final Object attribute = getAttribute(name);
return getAttributeByType(name, clazz, attribute);
}
/**
* Return authentication attribute with name
*
* @param name Name of authentication attribute
* @param clazz The class of the authentication attribute
* @param <T> The type of the authentication attribute
* @return the named attribute
*/
public <T> T getAuthenticationAttribute(final String name, final Class<T> clazz)
{
final Object attribute = getAuthenticationAttribute(name);
return getAttributeByType(name, clazz, attribute);
}
private <T> T getAttributeByType(String name, Class<T> clazz, Object attribute) {
if (attribute == null) {
return null;
}
if (!clazz.isAssignableFrom(attribute.getClass())) {
throw new ClassCastException("Attribute [" + name
+ " is of type " + attribute.getClass()
+ " when we were expecting " + clazz);
}
return (T) attribute;
}
/**
* Add a role.
*
* @param role the role to add.
*/
public void addRole(final String role) {
CommonHelper.assertNotBlank("role", role);
this.roles.add(role);
}
/**
* Add roles.
*
* @param roles the roles to add.
*/
public void addRoles(final Collection<String> roles) {
CommonHelper.assertNotNull("roles", roles);
this.roles.addAll(roles);
}
/**
* Add roles.
*
* @param roles the roles to add.
*/
public void addRoles(final Set<String> roles) {
CommonHelper.assertNotNull("roles", roles);
this.roles.addAll(roles);
}
/**
* Add a permission.
*
* @param permission the permission to add.
*/
public void addPermission(final String permission) {
CommonHelper.assertNotBlank("permission", permission);
this.permissions.add(permission);
}
/** Add permissions.
*
* @param permissions the permissions to add.
*/
public void addPermissions(final Collection<String> permissions) {
CommonHelper.assertNotNull("permissions", permissions);
this.permissions.addAll(permissions);
}
/**
* Define if this profile is remembered.
*
* @param rme whether the user is remembered.
*/
public void setRemembered(final boolean rme) {
this.isRemembered = rme;
}
/**
* Get the roles of the user.
*
* @return the user roles.
*/
public Set<String> getRoles() {
return new LinkedHashSet<>(this.roles);
}
public void setRoles(Set<String> roles) {
CommonHelper.assertNotNull("roles", roles);
this.roles = roles;
}
/**
* Get the permissions of the user.
*
* @return the user permissions.
*/
public Set<String> getPermissions() {
return new LinkedHashSet<>(this.permissions);
}
public void setPermissions(Set<String> permissions) {
CommonHelper.assertNotNull("permissions", permissions);
this.permissions = permissions;
}
/**
* Is the user remembered?
*
* @return whether the user is remembered.
*/
public boolean isRemembered() {
return this.isRemembered;
}
@Override
public String toString() {
return CommonHelper.toNiceString(this.getClass(), "id", this.id, "attributes", this.attributes, "roles",
this.roles, "permissions", this.permissions, "isRemembered", this.isRemembered,
"clientName", this.clientName, "linkedId", this.linkedId);
}
@Override
public void writeExternal(final ObjectOutput out) throws IOException {
out.writeObject(this.id);
out.writeObject(this.attributes);
out.writeObject(this.authenticationAttributes);
out.writeBoolean(this.isRemembered);
out.writeObject(this.roles);
out.writeObject(this.permissions);
out.writeObject(this.clientName);
out.writeObject(this.linkedId);
}
@Override
public void readExternal(final ObjectInput in) throws IOException, ClassNotFoundException {
this.id = (String) in.readObject();
this.attributes = (Map) in.readObject();
this.authenticationAttributes = (Map) in.readObject();
this.isRemembered = in.readBoolean();
this.roles = (Set) in.readObject();
this.permissions = (Set) in.readObject();
this.clientName = (String) in.readObject();
this.linkedId = (String) in.readObject();
}
public void clearSensitiveData() {
// No-op. Allow subtypes to specify which state should be cleared out.
}
public String getClientName() {
return clientName;
}
public void setClientName(final String clientName) {
this.clientName = clientName;
}
public String getLinkedId() {
return linkedId;
}
public void setLinkedId(final String linkedId) {
this.linkedId = linkedId;
}
} |
Java | @Command(scope = "camel", name = "endpoint-list", description = "Lists all Camel endpoints available in CamelContexts.")
public class EndpointList extends CamelCommandSupport {
private static final String CONTEXT_COLUMN_LABEL = "Context";
private static final String URI_COLUMN_LABEL = "Uri";
private static final String STATUS_COLUMN_LABEL = "Status";
private static final int DEFAULT_COLUMN_WIDTH_INCREMENT = 0;
private static final String DEFAULT_FIELD_PREAMBLE = " ";
private static final String DEFAULT_FIELD_POSTAMBLE = " ";
private static final String DEFAULT_HEADER_PREAMBLE = " ";
private static final String DEFAULT_HEADER_POSTAMBLE = " ";
private static final int DEFAULT_FORMAT_BUFFER_LENGTH = 24;
// endpoint uris can be very long so clip by default after 120 chars
private static final int MAX_COLUMN_WIDTH = 120;
private static final int MIN_COLUMN_WIDTH = 12;
@Argument(index = 0, name = "name", description = "The Camel context name where to look for the endpoints", required = false, multiValued = false)
String name;
@Option(name = "--decode", aliases = "-d", description = "Whether to decode the endpoint uri so its human readable",
required = false, multiValued = false, valueToShowInHelp = "true")
Boolean decode;
@Option(name = "--verbose", aliases = "-v", description = "Verbose output which does not limit the length of the uri shown",
required = false, multiValued = false, valueToShowInHelp = "false")
Boolean verbose;
protected Object doExecute() throws Exception {
List<Endpoint> endpoints = camelController.getEndpoints(name);
final Map<String, Integer> columnWidths = computeColumnWidths(endpoints);
final String headerFormat = buildFormatString(columnWidths, true);
final String rowFormat = buildFormatString(columnWidths, false);
final PrintStream out = System.out;
if (endpoints.size() > 0) {
out.println(String.format(headerFormat, CONTEXT_COLUMN_LABEL, URI_COLUMN_LABEL, STATUS_COLUMN_LABEL));
out.println(String.format(headerFormat, "-------", "---", "------"));
for (final Endpoint endpoint : endpoints) {
String contextId = endpoint.getCamelContext().getName();
String uri = endpoint.getEndpointUri();
if (decode == null || decode) {
// decode uri so its more human readable
uri = URLDecoder.decode(uri, "UTF-8");
}
// sanitize and mask uri so we dont see passwords
uri = URISupport.sanitizeUri(uri);
String state = getEndpointState(endpoint);
out.println(String.format(rowFormat, contextId, uri, state));
}
}
return null;
}
private Map<String, Integer> computeColumnWidths(final Iterable<Endpoint> endpoints) throws Exception {
if (endpoints == null) {
throw new IllegalArgumentException("Unable to determine column widths from null Iterable<Endpoint>");
} else {
int maxContextLen = 0;
int maxUriLen = 0;
int maxStatusLen = 0;
for (final Endpoint endpoint : endpoints) {
final String name = endpoint.getCamelContext().getName();
maxContextLen = java.lang.Math.max(maxContextLen, name == null ? 0 : name.length());
String uri = endpoint.getEndpointUri();
if (decode == null || decode) {
// decode uri so its more human readable
uri = URLDecoder.decode(uri, "UTF-8");
}
// sanitize and mask uri so we dont see passwords
uri = URISupport.sanitizeUri(uri);
maxUriLen = java.lang.Math.max(maxUriLen, uri == null ? 0 : uri.length());
final String status = getEndpointState(endpoint);
maxStatusLen = java.lang.Math.max(maxStatusLen, status == null ? 0 : status.length());
}
final Map<String, Integer> retval = new Hashtable<String, Integer>(3);
retval.put(CONTEXT_COLUMN_LABEL, maxContextLen);
retval.put(URI_COLUMN_LABEL, maxUriLen);
retval.put(STATUS_COLUMN_LABEL, maxStatusLen);
return retval;
}
}
private String buildFormatString(final Map<String, Integer> columnWidths, final boolean isHeader) {
final String fieldPreamble;
final String fieldPostamble;
final int columnWidthIncrement;
if (isHeader) {
fieldPreamble = DEFAULT_HEADER_PREAMBLE;
fieldPostamble = DEFAULT_HEADER_POSTAMBLE;
} else {
fieldPreamble = DEFAULT_FIELD_PREAMBLE;
fieldPostamble = DEFAULT_FIELD_POSTAMBLE;
}
columnWidthIncrement = DEFAULT_COLUMN_WIDTH_INCREMENT;
int contextLen = java.lang.Math.min(columnWidths.get(CONTEXT_COLUMN_LABEL) + columnWidthIncrement, getMaxColumnWidth());
int uriLen = java.lang.Math.min(columnWidths.get(URI_COLUMN_LABEL) + columnWidthIncrement, getMaxColumnWidth());
int statusLen = java.lang.Math.min(columnWidths.get(STATUS_COLUMN_LABEL) + columnWidthIncrement, getMaxColumnWidth());
contextLen = Math.max(MIN_COLUMN_WIDTH, contextLen);
uriLen = Math.max(MIN_COLUMN_WIDTH, uriLen);
// last row does not have min width
final StringBuilder retval = new StringBuilder(DEFAULT_FORMAT_BUFFER_LENGTH);
retval.append(fieldPreamble).append("%-").append(contextLen).append('.').append(contextLen).append('s').append(fieldPostamble).append(' ');
retval.append(fieldPreamble).append("%-").append(uriLen).append('.').append(uriLen).append('s').append(fieldPostamble).append(' ');
retval.append(fieldPreamble).append("%-").append(statusLen).append('.').append(statusLen).append('s').append(fieldPostamble).append(' ');
return retval.toString();
}
private int getMaxColumnWidth() {
if (verbose != null && verbose) {
return Integer.MAX_VALUE;
} else {
return MAX_COLUMN_WIDTH;
}
}
private static String getEndpointState(Endpoint endpoint) {
// must use String type to be sure remote JMX can read the attribute without requiring Camel classes.
if (endpoint instanceof StatefulService) {
ServiceStatus status = ((StatefulService) endpoint).getStatus();
return status.name();
}
// assume started if not a ServiceSupport instance
return ServiceStatus.Started.name();
}
} |
Java | @Configuration
@Order(-1)
public class CorsConfig {
@Bean
public WebMvcConfigurer corsConfigurer() {
return new WebMvcConfigurer() {
@Override
public void addCorsMappings(CorsRegistry registry) {
// 添加映射路径
registry.addMapping("/**")
// 允许哪些原始域
.allowedOrigins("*")
// 是否发送 Cookie 信息
.allowCredentials(true)
// 允许哪些请求方法
.allowedMethods("*")
// 允许哪些头信息
.allowedHeaders("*");
}
};
}
@Bean
public CorsConfigurationSource corsConfigurationSource() {
var config = new CorsConfiguration();
config.addAllowedOrigin("*");
config.addAllowedMethod("*");
config.addAllowedHeader("*");
config.setAllowCredentials(true);
var source = new UrlBasedCorsConfigurationSource();
source.registerCorsConfiguration("/**", config);
return source;
}
} |
Java | @Deprecated
public class ThreadWatcher extends org.jets3t.service.multi.ThreadWatcher {
protected ThreadWatcher(BytesProgressWatcher[] progressWatchers) {
super(progressWatchers);
}
protected ThreadWatcher(long threadCount) {
super(threadCount);
}
public void updateThreadsCompletedCount(long completedThreads,
org.jets3t.service.multithread.CancelEventTrigger cancelEventListener) {
super.updateThreadsCompletedCount(completedThreads, cancelEventListener);
}
@Override
public org.jets3t.service.multithread.CancelEventTrigger getCancelEventListener() {
return (org.jets3t.service.multithread.CancelEventTrigger) super.getCancelEventListener();
}
} |
Java | public class ConstraintValidatorManager {
private static final Log LOG = LoggerFactory.make( MethodHandles.lookup() );
/**
* Dummy {@code ConstraintValidator} used as placeholder for the case that for a given context there exists
* no matching constraint validator instance
*/
static ConstraintValidator<?, ?> DUMMY_CONSTRAINT_VALIDATOR = new ConstraintValidator<Null, Object>() {
@Override
public boolean isValid(Object value, ConstraintValidatorContext context) {
return false;
}
};
/**
* The explicit or implicit default constraint validator factory. We always cache {@code ConstraintValidator}
* instances if they are created via the default instance and with the default initialization context. Constraint
* validator instances created via other factory instances (specified eg via {@code ValidatorFactory#usingContext()}
* or initialization context are only cached for the most recently used factory and context.
*/
private final ConstraintValidatorFactory defaultConstraintValidatorFactory;
/**
* The explicit or implicit default constraint validator initialization context. We always cache
* {@code ConstraintValidator} instances if they are created via the default instance and with the default context.
* Constraint validator instances created via other factory instances (specified eg via
* {@code ValidatorFactory#usingContext()} or initialization context are only cached for the most recently used
* factory and context.
*/
private final HibernateConstraintValidatorInitializationContext defaultConstraintValidatorInitializationContext;
/**
* The most recently used non default constraint validator factory.
*/
private volatile ConstraintValidatorFactory mostRecentlyUsedNonDefaultConstraintValidatorFactory;
/**
* The most recently used non default constraint validator initialization context.
*/
private volatile HibernateConstraintValidatorInitializationContext mostRecentlyUsedNonDefaultConstraintValidatorInitializationContext;
/**
* Used for synchronizing access to {@link #mostRecentlyUsedNonDefaultConstraintValidatorFactory} (which can be
* null itself).
*/
private final Object mostRecentlyUsedNonDefaultConstraintValidatorFactoryAndInitializationContextMutex = new Object();
/**
* Cache of initialized {@code ConstraintValidator} instances keyed against validated type, annotation,
* constraint validator factory and constraint validator initialization context ({@code CacheKey}).
*/
private final ConcurrentHashMap<CacheKey, ConstraintValidator<?, ?>> constraintValidatorCache;
/**
* Creates a new {@code ConstraintValidatorManager}.
*
* @param defaultConstraintValidatorFactory the default validator factory
* @param defaultConstraintValidatorInitializationContext the default initialization context
*/
public ConstraintValidatorManager(ConstraintValidatorFactory defaultConstraintValidatorFactory,
HibernateConstraintValidatorInitializationContext defaultConstraintValidatorInitializationContext) {
this.defaultConstraintValidatorFactory = defaultConstraintValidatorFactory;
this.defaultConstraintValidatorInitializationContext = defaultConstraintValidatorInitializationContext;
this.constraintValidatorCache = new ConcurrentHashMap<>();
}
/**
* @param validatedValueType the type of the value to be validated. Cannot be {@code null}.
* @param descriptor the constraint descriptor for which to get an initialized constraint validator. Cannot be {@code null}
* @param constraintValidatorFactory constraint factory used to instantiate the constraint validator. Cannot be {@code null}.
* @param initializationContext context used on constraint validator initialization
* @param <A> the annotation type
*
* @return an initialized constraint validator for the given type and annotation of the value to be validated.
* {@code null} is returned if no matching constraint validator could be found.
*/
public <A extends Annotation> ConstraintValidator<A, ?> getInitializedValidator(
Type validatedValueType,
ConstraintDescriptorImpl<A> descriptor,
ConstraintValidatorFactory constraintValidatorFactory,
HibernateConstraintValidatorInitializationContext initializationContext) {
Contracts.assertNotNull( validatedValueType );
Contracts.assertNotNull( descriptor );
Contracts.assertNotNull( constraintValidatorFactory );
Contracts.assertNotNull( initializationContext );
CacheKey key = new CacheKey( descriptor.getAnnotationDescriptor(), validatedValueType, constraintValidatorFactory, initializationContext );
@SuppressWarnings("unchecked")
ConstraintValidator<A, ?> constraintValidator = (ConstraintValidator<A, ?>) constraintValidatorCache.get( key );
if ( constraintValidator == null ) {
constraintValidator = createAndInitializeValidator( validatedValueType, descriptor, constraintValidatorFactory, initializationContext );
constraintValidator = cacheValidator( key, constraintValidator );
}
else {
LOG.tracef( "Constraint validator %s found in cache.", constraintValidator );
}
return DUMMY_CONSTRAINT_VALIDATOR == constraintValidator ? null : constraintValidator;
}
private <A extends Annotation> ConstraintValidator<A, ?> cacheValidator(CacheKey key,
ConstraintValidator<A, ?> constraintValidator) {
// we only cache constraint validator instances for the default and most recently used factory
if ( ( key.getConstraintValidatorFactory() != defaultConstraintValidatorFactory
&& key.getConstraintValidatorFactory() != mostRecentlyUsedNonDefaultConstraintValidatorFactory ) ||
( key.getConstraintValidatorInitializationContext() != defaultConstraintValidatorInitializationContext
&& key.getConstraintValidatorInitializationContext() != mostRecentlyUsedNonDefaultConstraintValidatorInitializationContext ) ) {
synchronized ( mostRecentlyUsedNonDefaultConstraintValidatorFactoryAndInitializationContextMutex ) {
if ( key.constraintValidatorFactory != mostRecentlyUsedNonDefaultConstraintValidatorFactory ||
key.constraintValidatorInitializationContext != mostRecentlyUsedNonDefaultConstraintValidatorInitializationContext ) {
clearEntries( mostRecentlyUsedNonDefaultConstraintValidatorFactory, mostRecentlyUsedNonDefaultConstraintValidatorInitializationContext );
mostRecentlyUsedNonDefaultConstraintValidatorFactory = key.getConstraintValidatorFactory();
mostRecentlyUsedNonDefaultConstraintValidatorInitializationContext = key.getConstraintValidatorInitializationContext();
}
}
}
@SuppressWarnings("unchecked")
ConstraintValidator<A, ?> cached = (ConstraintValidator<A, ?>) constraintValidatorCache.putIfAbsent( key, constraintValidator );
return cached != null ? cached : constraintValidator;
}
@SuppressWarnings("unchecked")
private <A extends Annotation> ConstraintValidator<A, ?> createAndInitializeValidator(
Type validatedValueType,
ConstraintDescriptorImpl<A> descriptor,
ConstraintValidatorFactory constraintValidatorFactory,
HibernateConstraintValidatorInitializationContext initializationContext) {
ConstraintValidatorDescriptor<A> validatorDescriptor = findMatchingValidatorDescriptor( descriptor, validatedValueType );
ConstraintValidator<A, ?> constraintValidator;
if ( validatorDescriptor == null ) {
constraintValidator = (ConstraintValidator<A, ?>) DUMMY_CONSTRAINT_VALIDATOR;
}
else {
constraintValidator = validatorDescriptor.newInstance( constraintValidatorFactory );
initializeValidator( descriptor, constraintValidator, initializationContext );
}
return constraintValidator;
}
private void clearEntries(ConstraintValidatorFactory constraintValidatorFactory, HibernateConstraintValidatorInitializationContext constraintValidatorInitializationContext) {
Iterator<Entry<CacheKey, ConstraintValidator<?, ?>>> cacheEntries = constraintValidatorCache.entrySet().iterator();
while ( cacheEntries.hasNext() ) {
Entry<CacheKey, ConstraintValidator<?, ?>> cacheEntry = cacheEntries.next();
if ( cacheEntry.getKey().getConstraintValidatorFactory() == constraintValidatorFactory &&
cacheEntry.getKey().getConstraintValidatorInitializationContext() == constraintValidatorInitializationContext ) {
constraintValidatorFactory.releaseInstance( cacheEntry.getValue() );
cacheEntries.remove();
}
}
}
public void clear() {
for ( Map.Entry<CacheKey, ConstraintValidator<?, ?>> entry : constraintValidatorCache.entrySet() ) {
entry.getKey().getConstraintValidatorFactory().releaseInstance( entry.getValue() );
}
constraintValidatorCache.clear();
}
public ConstraintValidatorFactory getDefaultConstraintValidatorFactory() {
return defaultConstraintValidatorFactory;
}
public HibernateConstraintValidatorInitializationContext getDefaultConstraintValidatorInitializationContext() {
return defaultConstraintValidatorInitializationContext;
}
public int numberOfCachedConstraintValidatorInstances() {
return constraintValidatorCache.size();
}
/**
* Runs the validator resolution algorithm.
*
* @param validatedValueType The type of the value to be validated (the type of the member/class the constraint was placed on).
*
* @return The class of a matching validator.
*/
private <A extends Annotation> ConstraintValidatorDescriptor<A> findMatchingValidatorDescriptor(ConstraintDescriptorImpl<A> descriptor, Type validatedValueType) {
Map<Type, ConstraintValidatorDescriptor<A>> availableValidatorDescriptors = TypeHelper.getValidatorTypes(
descriptor.getAnnotationType(),
descriptor.getMatchingConstraintValidatorDescriptors()
);
List<Type> discoveredSuitableTypes = findSuitableValidatorTypes( validatedValueType, availableValidatorDescriptors.keySet() );
resolveAssignableTypes( discoveredSuitableTypes );
if ( discoveredSuitableTypes.size() == 0 ) {
return null;
}
if ( discoveredSuitableTypes.size() > 1 ) {
throw LOG.getMoreThanOneValidatorFoundForTypeException( validatedValueType, discoveredSuitableTypes );
}
Type suitableType = discoveredSuitableTypes.get( 0 );
return availableValidatorDescriptors.get( suitableType );
}
private <A extends Annotation> List<Type> findSuitableValidatorTypes(Type type, Iterable<Type> availableValidatorTypes) {
List<Type> determinedSuitableTypes = newArrayList();
for ( Type validatorType : availableValidatorTypes ) {
if ( TypeHelper.isAssignable( validatorType, type )
&& !determinedSuitableTypes.contains( validatorType ) ) {
determinedSuitableTypes.add( validatorType );
}
}
return determinedSuitableTypes;
}
@SuppressWarnings("unchecked")
private <A extends Annotation> void initializeValidator(
ConstraintDescriptor<A> descriptor,
ConstraintValidator<A, ?> constraintValidator,
HibernateConstraintValidatorInitializationContext initializationContext) {
try {
if ( constraintValidator instanceof HibernateConstraintValidator ) {
( (HibernateConstraintValidator<A, ?>) constraintValidator ).initialize( descriptor, initializationContext );
}
constraintValidator.initialize( descriptor.getAnnotation() );
}
catch (RuntimeException e) {
if ( e instanceof ConstraintDeclarationException ) {
throw e;
}
throw LOG.getUnableToInitializeConstraintValidatorException( constraintValidator.getClass(), e );
}
}
/**
* Tries to reduce all assignable classes down to a single class.
*
* @param assignableTypes The set of all classes which are assignable to the class of the value to be validated and
* which are handled by at least one of the validators for the specified constraint.
*/
private void resolveAssignableTypes(List<Type> assignableTypes) {
if ( assignableTypes.size() == 0 || assignableTypes.size() == 1 ) {
return;
}
List<Type> typesToRemove = new ArrayList<>();
do {
typesToRemove.clear();
Type type = assignableTypes.get( 0 );
for ( int i = 1; i < assignableTypes.size(); i++ ) {
if ( TypeHelper.isAssignable( type, assignableTypes.get( i ) ) ) {
typesToRemove.add( type );
}
else if ( TypeHelper.isAssignable( assignableTypes.get( i ), type ) ) {
typesToRemove.add( assignableTypes.get( i ) );
}
}
assignableTypes.removeAll( typesToRemove );
} while ( typesToRemove.size() > 0 );
}
private static final class CacheKey {
// These members are not final for optimization purposes
private ConstraintAnnotationDescriptor<?> annotationDescriptor;
private Type validatedType;
private ConstraintValidatorFactory constraintValidatorFactory;
private HibernateConstraintValidatorInitializationContext constraintValidatorInitializationContext;
private int hashCode;
private CacheKey(ConstraintAnnotationDescriptor<?> annotationDescriptor, Type validatorType, ConstraintValidatorFactory constraintValidatorFactory,
HibernateConstraintValidatorInitializationContext constraintValidatorInitializationContext) {
this.annotationDescriptor = annotationDescriptor;
this.validatedType = validatorType;
this.constraintValidatorFactory = constraintValidatorFactory;
this.constraintValidatorInitializationContext = constraintValidatorInitializationContext;
this.hashCode = createHashCode();
}
public ConstraintValidatorFactory getConstraintValidatorFactory() {
return constraintValidatorFactory;
}
public HibernateConstraintValidatorInitializationContext getConstraintValidatorInitializationContext() {
return constraintValidatorInitializationContext;
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
// no need to check for the type here considering it's only used in a typed map
if ( o == null ) {
return false;
}
CacheKey other = (CacheKey) o;
if ( !annotationDescriptor.equals( other.annotationDescriptor ) ) {
return false;
}
if ( !validatedType.equals( other.validatedType ) ) {
return false;
}
if ( !constraintValidatorFactory.equals( other.constraintValidatorFactory ) ) {
return false;
}
if ( !constraintValidatorInitializationContext.equals( other.constraintValidatorInitializationContext ) ) {
return false;
}
return true;
}
@Override
public int hashCode() {
return hashCode;
}
private int createHashCode() {
int result = annotationDescriptor.hashCode();
result = 31 * result + validatedType.hashCode();
result = 31 * result + constraintValidatorFactory.hashCode();
result = 31 * result + constraintValidatorInitializationContext.hashCode();
return result;
}
}
} |
Java | public class SimCookie
implements Cookie
{
private String name;
private String value;
private String path;
private int age;
private boolean secure;
/**
* Constructs a new simulated cookie with the given parameters.
*
* @param name the name of the cookie
* @param value the value of the cookie
* @param path the path of the cookie
* @param age the age of the cookie
* @param secure whether the cookie should be marked secure
*/
public SimCookie(String name, String value, String path, int age, boolean secure)
{
this.name = name;
this.value = value;
this.path = path;
this.age = age;
this.secure = secure;
}
@Override
public String getValue()
{
return this.value;
}
@Override
public String getName()
{
return this.name;
}
/**
* Returns the path of this cookie.
*
* @return the path of this cookie
*/
public String getPath()
{
return this.path;
}
/**
* Returns the age of this cookie.
*
* @return the age of this cookie
*/
public int getAge()
{
return this.age;
}
/**
* Returns {@code true} if this cookie is secure.
*
* @return {@code true} if this cookie is secure
*/
public boolean isSecure()
{
return this.secure;
}
} |
Java | public class LA_SinkApp extends SensorApp implements ActiveComponent {
public int stopIt = 0;
//to keep track of the total number of received packets
private int totalINpackets = 0;
/*To collect and display Total packets received by sink node in graph form. Created a
port that will output to a plotter*/
public static final String PACKETS_REC_EVENT = "Total Packets Received by Sink";
public static final String PLOTTER_PORT_ID = ".PacketsReceivedPlot";
public Port packetsPlotPort = addEventPort(PLOTTER_PORT_ID); //for total packets received.
//this counter will be used to keep track of how many packets that the BS would have actually
//received if it wasn't using Clusters. In other words every CH sends one packet which combines
//the data of all its nodes in the their respective cluster. So just to see by how much traffic
//near the sink is reduced we also graph this plot as well.
private int totalVirtualPackets = 0;
public static final String VIRTUAL_PACKETS_REC_EVENT = "Total Theoretical Packets Received";
public static final String PLOTTER_PORT_ID_2 = ".theo_PacketsReceivedPlot";
public Port packetsPlotPort2 = addEventPort(PLOTTER_PORT_ID_2); //for total theoretical packets received.
long iDstNid;
Vector DataFromPkts;
Vector CountOfAggPckt;
//packet RouteMinHopPckt, will send from sink has this type
//this packet compute min hop to sink from every node
public static final int SINK_DATA = 0 ;
//data that sense in a source and aggregated will send to sink with this type
public static final int SOURCE_DATA = 1 ;
//ack from node that recieve data packet has this type
public static final int ACK_DATA = 2 ;
File file;
int nn_;//NodeNum
Vector Neighbours;//13 esfand logfile
int NeighbourNum;//13 esfand logfile
//===============================================================================
public LA_SinkApp(){
iDstNid = -1;
DataFromPkts = new Vector();
CountOfAggPckt = new Vector();
Neighbours = new Vector();
totalINpackets=0;
totalVirtualPackets=0;
}
//===============================================================================
protected void _start ()
{
file = new File("./out/LAnode/n"+ new Long(nid).toString()+".txt"); //13 esfand log file
rTimer = setTimeout("SendData", 1);
// setTimeout("PktsFile", 150);
// rTimer = setTimeout("check", 1000 + 61 * 20);
}
//===============================================================================
/* public void check(){
if(this.NeighbourNum == 0)
stopIt = 1;
}
public int shouldStop(){
return stopIt;
}
*///===============================================================================
protected void _stop()
{
//13 esfand log file
try{
FileWriter fw=new FileWriter(file,true);
BufferedWriter bw=new BufferedWriter(fw);
bw.write("stop at time:"+ new Double(getTime()).toString());
bw.newLine();
bw.close();
fw.close();
}
catch(IOException ioe){
System.out.println("an IOE is happend:"+ioe);
}
//END 13 esfand log file
if (rTimer != null)
cancelTimeout(rTimer);
this.setCPUMode(3); //turn off CPU when sim stops
//WriteRslts();
}
//===============================================================================
protected synchronized void timeout(Object data_)
{
//13 esfand log file
try{
FileWriter fw=new FileWriter(file,true);
BufferedWriter bw=new BufferedWriter(fw);
bw.write("timeout at time:"+ new Double(getTime()).toString());
bw.newLine();
bw.close();
fw.close();
}
catch(IOException ioe){
System.out.println("an IOE is happend:"+ioe);
}
//END 13 esfand log file
if (!sensorDEAD && data_.equals("SendData"))
this.SendData();
else if ( data_.equals("PktsFile")) {
try{
File file2 = new File("./out/LAnode/total.txt");
FileWriter fw2= new FileWriter(file2,true);
BufferedWriter bw2 = new BufferedWriter(fw2);
bw2.write(new Integer(totalINpackets).toString() + ",");
bw2.close(); fw2.close();
File file3= new File("./out/LAnode/Virtual.txt");
FileWriter fw3= new FileWriter(file3,true);
BufferedWriter bw3 = new BufferedWriter(fw3);
bw3.write(new Integer(totalVirtualPackets).toString()+ ",");
bw3.close(); fw3.close();
}
catch(IOException ioe){
System.out.println("an IOE is happend:"+ioe);
}
setTimeout("PktsFile", 1);
return;
}
}
//===============================================================================
public synchronized void recvSensorPacket(Object data_)
{
//13 esfand log file
try{
FileWriter fw=new FileWriter(file,true);
BufferedWriter bw=new BufferedWriter(fw);
if ( data_ instanceof SensorPacket) {
SensorPacket spkt = (SensorPacket)data_ ;
if(spkt.getPktType() == SOURCE_DATA )
{
bw.write("recvSensorPacket-SOURCE_DATA at time:"+ new Double(getTime()).toString()+"from "+new Long(spkt.id) );
RcvSourcePacket(spkt);
}
}
else
bw.write("recvSensorPacket at time:"+ new Double(getTime()).toString());
super.recvSensorPacket(data_) ;
bw.newLine();
bw.close();
fw.close();
}
catch(IOException ioe){
System.out.println("an IOE is happend:"+ioe);
}
//END 13 esfand log file
}
//===============================================================================
//MinimRouteConstruction
void SendData(){
//13 esfand log file
try{
FileWriter fw=new FileWriter(file,true);
BufferedWriter bw=new BufferedWriter(fw);
bw.write("SendData at time:"+ new Double(getTime()).toString());
bw.newLine();
bw.close();
fw.close();
}
catch(IOException ioe){
System.out.println("an IOE is happend:"+ioe);
}
//END 13 esfand log file
double [] loc= new double[3];
loc[0]=getX(); loc[1]=getY(); loc[2] = getZ();
LA_SinkPacket pckt= new LA_SinkPacket(this.nid,"MinimRouteConstruction",8,loc);
downPort.doSending(new SensorAppWirelessAgentContract.Message(
SensorAppWirelessAgentContract.BROADCAST_SENSOR_PACKET,SINK_DATA,this.nid,8,pckt));
return;
}
//===============================================================================
//13 esfand
protected void RcvSourcePacket(SensorPacket spkt){
//13 esfand log file
try{
FileWriter fw=new FileWriter(file,true);
BufferedWriter bw=new BufferedWriter(fw);
bw.write("SendAckPacket at:"+ new Double(getTime()).toString());
bw.newLine();
bw.close();
fw.close();
}
catch(IOException ioe){
System.out.println("an IOE is happend:"+ioe);
}
double time=getTime();
LA_SourcePacket msg = (LA_SourcePacket) spkt.getBody();
/* int index = Neighbours.indexOf(msg.ID);
if(index==-1){
System.out.println("error in index RcvSourcePacket in node "+nid);
addNeighbour(msg.ID);
}
*/ DataFromPkts.add(msg.getData());
CountOfAggPckt.add(new Integer(msg.getNumOfAggPckt()));
System.out.println("packet rcvd in Sink"+ msg.getData());
System.out.println("Num of agg pkt in it is"+ msg.getNumOfAggPckt());
System.out.println("Base Source of this pkt is"+ msg.BaseSrc);
for(int j=0; j<msg.PathNode.size() ; j++)
System.out.print(msg.PathNode.elementAt(j)+",");
System.out.println();
//***********************
// Step 1. Update the number of total packets that have been received. This means
// only the packets received and processed from the CHs throughout the simulation.
//***********************
this.totalINpackets = this.totalINpackets + 1;
if (packetsPlotPort.anyOutConnection()) {
packetsPlotPort.exportEvent(PACKETS_REC_EVENT, new DoubleObj(this.totalINpackets), null);
}
//**********************
// Step 2. Theoretical packets received-Since tree combined packets from all sensors in
// its path into one... here we actually display the theoretical number of packets that
// should have been received by the base station.
//**********************
this.totalVirtualPackets = this.totalVirtualPackets +msg.getNumOfAggPckt()+1 ;//added by one because of packet itself: agg+1=all packets
if (packetsPlotPort2.anyOutConnection()) {
packetsPlotPort2.exportEvent(VIRTUAL_PACKETS_REC_EVENT, new DoubleObj(totalVirtualPackets), null);
}
WriteRslts(msg.BaseSrc,msg.PathNode,msg.PathInfo,time);
SendAckPacket(msg.id,msg.getLoc());
}
//===============================================================================
//13 esfand
protected void SendAckPacket(long Dst_id,double [] DstLoc){
//13 esfand log file
try{
FileWriter fw=new FileWriter(file,true);
BufferedWriter bw=new BufferedWriter(fw);
bw.write("SendAckPacket at:"+ new Double(getTime()).toString());
bw.newLine();
bw.close();
fw.close();
}
catch(IOException ioe){
System.out.println("an IOE is happend:"+ioe);
}
double [] loc = new double[3];loc[0] = loc[1] = loc[2] = 0.0;
LA_ControlPacket pckt=new LA_ControlPacket(nid,"Ack Data Packet",8,new Double(1),1,0.1,loc);
downPort.doSending(new SensorAppWirelessAgentContract.Message(SensorAppWirelessAgentContract.UNICAST_SENSOR_PACKET,
Dst_id,this.nid,/*7 esfand*/DstLoc,8,ACK_DATA,eID,this.nid,pckt));
}
//===============================================================================
//13 esfand
private void addNeighbour(Long NghbrID){
//13 esfand log file
try{
FileWriter fw=new FileWriter(file,true);
BufferedWriter bw=new BufferedWriter(fw);
bw.write("addNeighbour at:"+ new Double(getTime()).toString());
bw.write("NeighbourNum = "+new Integer(NeighbourNum+1).toString()+" Neighbour: "
+NghbrID.toString());
bw.newLine();
bw.close();
fw.close();
}
catch(IOException ioe){
System.out.println("an IOE is happend:"+ioe);
}
//END 13 esfand log file
NeighbourNum++;
Neighbours.add(NghbrID);
//show Neighbours if we add a Neighbour
System.out.print("Neighbours of "+nid+": ");
for (int j=0;j<Neighbours.size() ; j++)
System.out.print(Neighbours.elementAt(j)+",");
}
//===============================================================================
protected void addRoute(long src_nid_, long dst_nid_, int timeout_){
int type = 0;
RTKey key = new RTKey(src_nid_, dst_nid_, timeout_);
RTEntry entry = new RTEntry(new drcl.data.BitSet(new int[]{0}));
setRoutePort.sendReceive(new RTConfig.Message(type, key, entry, timeout_));
}
//===============================================================================
void WriteRslts(long BaseSrc,Vector PathNode,Vector PathInfo,double time){
//13 esfand log file
try{
FileWriter fw=new FileWriter(file,true);
BufferedWriter bw=new BufferedWriter(fw);
bw.write("WriteRslts at time:"+ new Double(getTime()).toString());
bw.newLine();
bw.close(); fw.close();
}
catch(IOException ioe){
System.out.println("an IOE is happend:"+ioe);
}
//END 13 esfand log file
try{
//IO
File file= new File("./out/LAnode/rslt.txt");
FileWriter fw= new FileWriter(file,true);
BufferedWriter bw= new BufferedWriter(fw);
int i = DataFromPkts.size()-1;
//write rslts in a file
bw.write("time of rcving this pkt is:"+ new Double(time).toString()); bw.newLine();
bw.write("data rcvd in Sink is: "+DataFromPkts.elementAt(i).toString()); bw.newLine();
bw.write("Num of agg pkt in it is: "+ CountOfAggPckt.elementAt(i).toString()); bw.newLine();
bw.write("Base Source of this pkt is: "+ BaseSrc); bw.newLine();
bw.write("Path of this pkt is: ");
for(int j=0; j<PathNode.size() ; j++){
bw.write(PathNode.elementAt(j).toString()+", ");
}
bw.write("0."); bw.newLine();
bw.write("info of this Path is: ");
for(int k=0; k<PathInfo.size() ; k++){
bw.write(PathInfo.elementAt(k).toString()+", ");
}
bw.newLine(); bw.newLine(); bw.newLine();
bw.close(); fw.close();
//27 bahman
File file1= new File("./out/LAnode/pckts.txt");
FileWriter fw1= new FileWriter(file1,true);
BufferedWriter bw1 = new BufferedWriter(fw1);
bw1.write("with agg: "+ new Integer(totalINpackets).toString() + " ");
bw1.write("without agg: "+ new Integer(totalVirtualPackets).toString()+ " ");
bw1.write("at time:"+ new Double(time).toString());
bw1.newLine(); bw1.newLine();
bw1.close(); fw1.close();
//end 27 bahman
//31 farv
/* File file2 = new File("./out/LAnode/total.txt");
FileWriter fw2= new FileWriter(file2,true);
BufferedWriter bw2 = new BufferedWriter(fw2);
bw2.write(new Integer(totalINpackets).toString() + ",");
bw2.close(); fw2.close();
File file3= new File("./out/
LAnode/Virtual.txt");
FileWriter fw3= new FileWriter(file3,true);
BufferedWriter bw3 = new BufferedWriter(fw3);
bw3.write(new Integer(totalVirtualPackets).toString()+ ",");
bw3.close(); fw3.close();*/
//end 31 farv
}
catch(IOException ioe){
System.out.println("an IOE is happend:"+ioe);
}
return;
}
//===============================================================================
public void setNodeNumber(int nn_){
this.nn_ = nn_;
}
//===============================================================================
//===============================================================================
public double getTotalPkts(){
return (double)totalINpackets;
}
public double getVirtualPkts(){
return (double)totalVirtualPackets;
}
//===============================================================================
} |
Java | public class ParallelSets {
public static final int MAJOR_VERSION = 2;
public static final int MINOR_VERSION = 1;
public static final String VERSION = MAJOR_VERSION+"."+MINOR_VERSION;
public static final String PROGRAMNAME = "Parallel Sets";
public static final String WEBSITE = "http://eagereyes.org/parsets/";
/** If true, the program is run installed by a user, and needs to act like that. That includes accessing
* the installed version of the database, showing a crash reporter dialog when the program crashes, etc.
* If false, it's the development version using its local database.
*
* There are currently two properties that can be set to "true" on the commandline to switch
* this to true: <tt>parsets.use_installed_db</tt> and <tt>parsets.installed</tt>. The former
* will be deprecated eventually.
*/
protected static boolean installed = false;
static {
installed = System.getProperty("parsets.use_installed_db", "false").equalsIgnoreCase("true") ||
System.getProperty("parsets.installed", "false").equalsIgnoreCase("true");
}
public static void main(String[] args) {
AbstractOS.determineOS();
if (args == null || args.length == 0)
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
new MainWindow();
}
});
else {
installed = false;
PSLogging.init(null, Level.ERROR);
BatchConvert.batchConvert(args);
}
}
public static boolean isInstalled() {
return installed;
}
} |
Java | public class ResultFileWriterTest {
/**
* The problem used for testing.
*/
private Problem problem;
/**
* A feasible solution.
*/
private Solution solution1;
/**
* Another feasible solution.
*/
private Solution solution2;
/**
* A solution violating its constraints.
*/
private Solution solution3;
/**
* Creates the problem used for testing.
*/
@Before
public void setUp() {
problem = new AbstractProblem(3, 2, 1) {
@Override
public void evaluate(Solution solution) {
throw new UnsupportedOperationException();
}
@Override
public Solution newSolution() {
Solution solution = new Solution(3, 2, 1);
solution.setVariable(0, new RealVariable(0.0, 1.0));
solution.setVariable(1, new BinaryVariable(5));
solution.setVariable(2, new Permutation(3));
return solution;
}
};
solution1 = problem.newSolution();
((RealVariable)solution1.getVariable(0)).setValue(0.0);
((BinaryVariable)solution1.getVariable(1)).set(2, true);
((Permutation)solution1.getVariable(2)).swap(0, 2);
solution1.setObjectives(new double[] { 0.0, 1.0 });
solution2 = problem.newSolution();
((RealVariable)solution1.getVariable(0)).setValue(1.0);
((BinaryVariable)solution1.getVariable(1)).set(1, true);
((Permutation)solution1.getVariable(2)).swap(0, 1);
solution2.setObjectives(new double[] { 1.0, 0.0 });
solution3 = problem.newSolution();
((RealVariable)solution1.getVariable(0)).setValue(0.5);
((BinaryVariable)solution1.getVariable(1)).set(1, true);
((Permutation)solution1.getVariable(2)).swap(1, 2);
solution3.setObjectives(new double[] { 0.5, 0.5 });
solution3.setConstraints(new double[] { -1.0 });
}
/**
* Removes references to shared objects so they can be garbage collected.
*/
@After
public void tearDown() {
problem = null;
solution1 = null;
solution2 = null;
solution3 = null;
}
/**
* Tests if special characters are escaped correctly when writing property
* files.
*
* @throws IOException should not occur
*/
@Test
public void testSpecialCharactersInProperties() throws IOException {
File file = TestUtils.createTempFile();
ResultFileWriter writer = null;
ResultFileReader reader = null;
NondominatedPopulation population = new NondominatedPopulation();
Properties properties = new Properties();
properties.setProperty("\"'!@#$=:%^&*()\\\r\n//\t ", "\"'!@#$=:%^&*()\\\r\n//\t ");
try {
writer = new ResultFileWriter(problem, file);
writer.append(new ResultEntry(population, properties));
} finally {
if (writer != null) {
writer.close();
}
}
try {
reader = new ResultFileReader(problem, file);
Assert.assertEquals(properties, reader.next().getProperties());
} finally {
if (reader != null) {
reader.close();
}
}
}
/**
* Tests if {@code null} properties are written correctly.
*
* @throws IOException should not occur
*/
@Test
public void testNullProperties() throws IOException {
File file = TestUtils.createTempFile();
ResultFileWriter writer = null;
ResultFileReader reader = null;
NondominatedPopulation population = new NondominatedPopulation();
Properties properties = new Properties();
try {
writer = new ResultFileWriter(problem, file);
writer.append(new ResultEntry(population, (TypedProperties)null));
} finally {
if (writer != null) {
writer.close();
}
}
try {
reader = new ResultFileReader(problem, file);
Assert.assertEquals(properties, reader.next().getProperties());
} finally {
if (reader != null) {
reader.close();
}
}
}
/**
* Tests if empty properties are written correctly.
*
* @throws IOException should not occur
*/
@Test
public void testEmptyProperties() throws IOException {
File file = TestUtils.createTempFile();
ResultFileWriter writer = null;
ResultFileReader reader = null;
NondominatedPopulation population = new NondominatedPopulation();
Properties properties = new Properties();
try {
writer = new ResultFileWriter(problem, file);
writer.append(new ResultEntry(population, properties));
} finally {
if (writer != null) {
writer.close();
}
}
try {
reader = new ResultFileReader(problem, file);
Assert.assertEquals(properties, reader.next().getProperties());
} finally {
if (reader != null) {
reader.close();
}
}
}
/**
* Tests if the population and properties are written correctly.
*
* @throws IOException should not occur
*/
@Test
public void testNormal() throws IOException {
File file = TestUtils.createTempFile();
ResultFileWriter writer = null;
ResultFileReader reader = null;
NondominatedPopulation population = new NondominatedPopulation();
population.add(solution1);
population.add(solution2);
Properties properties = new Properties();
properties.setProperty("foo", "bar");
try {
writer = new ResultFileWriter(problem, file);
writer.append(new ResultEntry(population, properties));
} finally {
if (writer != null) {
writer.close();
}
}
try {
reader = new ResultFileReader(problem, file);
ResultEntry entry = reader.next();
TestUtils.assertEquals(population, entry.getPopulation());
Assert.assertEquals(properties, entry.getProperties());
} finally {
if (reader != null) {
reader.close();
}
}
}
/**
* Tests if the population and properties are written correctly when
* writing decision variables is disabled.
*
* @throws IOException should not occur
*/
@Test
public void testNoVariables() throws IOException {
File file = TestUtils.createTempFile();
ResultFileWriter writer = null;
ResultFileReader reader = null;
NondominatedPopulation population = new NondominatedPopulation();
population.add(solution1);
population.add(solution2);
Properties properties = new Properties();
properties.setProperty("foo", "bar");
try {
writer = new ResultFileWriter(problem, file, false);
writer.append(new ResultEntry(population, properties));
} finally {
if (writer != null) {
writer.close();
}
}
population.clear();
population.add(new Solution(solution1.getObjectives()));
population.add(new Solution(solution2.getObjectives()));
try {
reader = new ResultFileReader(problem, file);
ResultEntry entry = reader.next();
TestUtils.assertEquals(population, entry.getPopulation());
Assert.assertEquals(properties, entry.getProperties());
} finally {
if (reader != null) {
reader.close();
}
}
}
/**
* Tests if constraint violating solutions are not written, and that
* empty populations are written correctly.
*
* @throws IOException should not occur
*/
@Test
public void testConstrainedSolution() throws IOException {
File file = TestUtils.createTempFile();
ResultFileWriter writer = null;
ResultFileReader reader = null;
NondominatedPopulation population = new NondominatedPopulation();
population.add(solution3);
Properties properties = new Properties();
properties.setProperty("foo", "bar");
try {
writer = new ResultFileWriter(problem, file);
writer.append(new ResultEntry(population, properties));
} finally {
if (writer != null) {
writer.close();
}
}
try {
reader = new ResultFileReader(problem, file);
ResultEntry entry = reader.next();
Assert.assertEquals(0, entry.getPopulation().size());
Assert.assertEquals(properties, entry.getProperties());
} finally {
if (reader != null) {
reader.close();
}
}
}
/**
* Tests if result files with multiple entries are written correctly, and
* that writing can be resumed correctly.
*
* @throws IOException should not occur
*/
@Test
public void testResume() throws IOException {
File file = TestUtils.createTempFile();
ResultFileWriter writer = null;
ResultFileReader reader = null;
NondominatedPopulation population = new NondominatedPopulation();
population.add(solution1);
population.add(solution2);
Properties properties = new Properties();
properties.setProperty("foo", "bar");
try {
writer = new ResultFileWriter(problem, file);
Assert.assertEquals(0, writer.getNumberOfEntries());
writer.append(new ResultEntry(population, properties));
writer.append(new ResultEntry(population, properties));
Assert.assertEquals(2, writer.getNumberOfEntries());
} finally {
if (writer != null) {
writer.close();
}
}
try {
writer = new ResultFileWriter(problem, file);
Assert.assertEquals(2, writer.getNumberOfEntries());
writer.append(new ResultEntry(population, properties));
Assert.assertEquals(3, writer.getNumberOfEntries());
} finally {
if (writer != null) {
writer.close();
}
}
try {
reader = new ResultFileReader(problem, file);
ResultEntry entry = null;
Assert.assertTrue(reader.hasNext());
entry = reader.next();
TestUtils.assertEquals(population, entry.getPopulation());
Assert.assertEquals(properties, entry.getProperties());
Assert.assertTrue(reader.hasNext());
entry = reader.next();
TestUtils.assertEquals(population, entry.getPopulation());
Assert.assertEquals(properties, entry.getProperties());
Assert.assertTrue(reader.hasNext());
entry = reader.next();
TestUtils.assertEquals(population, entry.getPopulation());
Assert.assertEquals(properties, entry.getProperties());
Assert.assertFalse(reader.hasNext());
} finally {
if (reader != null) {
reader.close();
}
}
}
@Test
public void testUnsupportedDecisionVariable() throws IOException {
File file = TestUtils.createTempFile();
final Variable variable = new Variable() {
private static final long serialVersionUID = -54413529004858950L;
@Override
public Variable copy() {
return this;
}
};
problem = new AbstractProblem(2, 2, 1) {
@Override
public void evaluate(Solution solution) {
throw new UnsupportedOperationException();
}
@Override
public Solution newSolution() {
Solution solution = new Solution(2, 2, 1);
solution.setVariable(0, new RealVariable(0.0, 1.0));
solution.setVariable(1, variable);
return solution;
}
};
ResultFileWriter writer = null;
ResultFileReader reader = null;
NondominatedPopulation population = new NondominatedPopulation();
Solution solution = problem.newSolution();
((RealVariable)solution.getVariable(0)).setValue(0.5);
solution.setObjectives(new double[] { 0.0, 1.0 });
population.add(solution);
try {
writer = new ResultFileWriter(problem, file);
writer.append(new ResultEntry(population, (Properties)null));
} finally {
if (writer != null) {
writer.close();
}
}
try {
reader = new ResultFileReader(problem, file);
ResultEntry entry = reader.next();
Assert.assertEquals(1, entry.getPopulation().size());
Assert.assertArrayEquals(solution.getObjectives(),
entry.getPopulation().get(0).getObjectives(), Settings.EPS);
Assert.assertEquals(solution.getVariable(0),
entry.getPopulation().get(0).getVariable(0));
} finally {
if (reader != null) {
reader.close();
}
}
}
@Test
public void testEncode() throws IOException {
File file = TestUtils.createTempFile();
ResultFileWriter writer = null;
try {
writer = new ResultFileWriter(problem, file);
RealVariable rv = new RealVariable(0.5, 0.0, 1.0);
Assert.assertEquals("0.5", writer.encode(rv));
Assert.assertFalse(writer.encode(rv).matches(".*\\s.*"));
BinaryVariable bv = new BinaryVariable(5);
bv.set(2, true);
Assert.assertEquals("00100", writer.encode(bv));
Assert.assertFalse(writer.encode(bv).matches(".*\\s.*"));
Permutation p = new Permutation(5);
Assert.assertEquals("0,1,2,3,4", writer.encode(p));
Assert.assertFalse(writer.encode(p).matches(".*\\s.*"));
Grammar g = new Grammar(5);
//Assert.assertEquals("-", writer.encode(g));
Assert.assertFalse(writer.encode(g).matches(".*\\s.*"));
} finally {
if (writer != null) {
writer.close();
}
}
}
} |
Java | public class CreateUserHandler implements Handler<RoutingContext> {
public static final Logger LOG = LoggerFactory.getLogger(GetServicesHandler.class);
private UserApplication userApplication;
public CreateUserHandler(UserApplication userApplication) {
this.userApplication = userApplication;
}
@Override
public void handle(RoutingContext context) {
var params = context.getBodyAsJson();
var user = params.mapTo(User.class);
LOG.info("Creating user {}", user.getName());
userApplication.createUser(user)
.onFailure(Http.handleFailure(context, "Could not create the user"))
.compose(userId -> userApplication.getUserById(userId))
.onFailure(Http.handleFailure(context, "Could not retrieve the created user"))
.onSuccess(created -> {
LOG.info("User created: {}", created);
context.response()
.putHeader(Http.CONTENT_TYPE, Http.APPLICATION_JSON)
.setStatusCode(200)
.end(created.toJson().toBuffer());
});
}
} |
Java | public class TestUtils {
public static ContentValues getFirstFrom(SupportSQLiteDatabase database, String table) {
Cursor cursor = database.query("SELECT * FROM " + table);
assertNotNull(cursor);
assertTrue(cursor.moveToNext());
ContentValues result = new ContentValues();
DatabaseUtils.cursorRowToContentValues(cursor, result);
cursor.close();
return result;
}
public static void assertOffsetDateTime(String field, ContentValues expected, ContentValues actual) {
String actualRaw = actual.getAsString(field);
long expectedRaw = expected.getAsLong(field);
if (expectedRaw == -1) {
assertNull(actualRaw);
} else {
assertNotNull(actualRaw);
OffsetDateTime actualDate = OffsetDateTime.parse(actualRaw, DateTimeFormatter.ISO_OFFSET_DATE_TIME);
OffsetDateTime expectedDate = OffsetDateTime.ofInstant(Instant.ofEpochMilli(expectedRaw), ZoneId.systemDefault());
assertEquals(expectedDate.toInstant(), actualDate.toInstant());
}
expected.remove(field);
actual.remove(field);
}
public static void assertInstant(String field, ContentValues expected, ContentValues actual) {
String actualRaw = actual.getAsString(field);
long expectedRaw = expected.getAsLong(field);
if (expectedRaw == -1) {
assertNull(actualRaw);
} else {
assertNotNull(actualRaw);
Instant actualDate = Instant.parse(actualRaw);
Instant expectedDate = Instant.ofEpochMilli(expectedRaw);
assertEquals(expectedDate, actualDate);
}
expected.remove(field);
actual.remove(field);
}
/**
* We assume the expected values have the correct type.
*
* @param expected The expected value.
* @param actual The new values.
*/
@SuppressWarnings("IfStatementWithTooManyBranches")
public static void assertContent(ContentValues expected, ContentValues actual) {
assertEquals(expected.size(), actual.size());
for (Map.Entry<String, Object> entry : expected.valueSet()) {
if (entry.getValue() instanceof Integer) {
assertEquals(entry.getValue(), actual.getAsInteger(entry.getKey()));
} else if (entry.getValue() instanceof Long) {
assertEquals(entry.getValue(), actual.getAsLong(entry.getKey()));
} else if (entry.getValue() instanceof Boolean) {
assertEquals(entry.getValue(), actual.getAsBoolean(entry.getKey()));
} else if (entry.getValue() instanceof Double) {
assertEquals(entry.getValue(), actual.getAsDouble(entry.getKey()));
} else if (entry.getValue() instanceof Float) {
assertEquals(entry.getValue(), actual.getAsFloat(entry.getKey()));
} else {
assertEquals(entry.getValue(), actual.get(entry.getKey()));
}
}
}
} |
Java | @SuppressWarnings("serial")
public class General extends Principal {
public static void main(String[] args) {
try {
binToCfile(new File("/root/hacking/gsm_dump/test"));
} catch (IOException ex) {
Logger.getLogger(General.class.getName()).log(Level.SEVERE, null, ex);
}
}
/**
* Base regex for a SDCCH ou SACCH frame example : C1 1218124 1881368:
* 011010011101111.......10010000110101011
*/
public static Pattern RGX_FRAME_CCCH
= Pattern.compile("([CPS][10]) ([0-9]*) ([0-9]*): ([10]{114})");
/**
* Base regex for a SDCCH ou SACCH frame example : C1 1218124 1881368:
* 011010011101111.......10010000110101011 BUT b
*/
public static Pattern RGX_MALFORMED_FRAME_CCCH
= Pattern.compile("([CPS][10]) ([0-9]*) ([0-9]*): ([10]*)");
/**
* Base regex for a "cannot decode" error example : gsmstack.c:301 cannot
* decode fnr=0x12965a (1218138) ts=2
*/
public static Pattern RGX_CANNOT_DEC
= Pattern.compile("gsmstack.c:[0-9]* cannot decode fnr=0x[0-9a-fA"
+ "-F]* \\(([0-9]*)\\) ts=[0-9]");
/**
* Base regex for a "parity error" example : cch.c:419 error: sacch: parity
* error (-1 fn=1218138)
*/
public static Pattern RGX_PARITY
= Pattern.compile("cch.c:[0-9]* error: sacch: parity error \\(-1 fn=([0-9]*)\\)");
/**
* Base regex for a "WRN" error example : WRN: errors=18 fn=1218189
*/
public static Pattern RGX_WRN_ERR
= Pattern.compile("(.*)(WRN: errors=[0-9]* fn=[0-9]*)$");
/**
* Base regex for a "conv_decode" error example : sch.c:260 ERR: conv_decode
* 1
*/
public static Pattern RGX_CONVDEC_ERR
= Pattern.compile("(.*)(sch.c:[0-9]* ERR: conv_decode [0-9]*)$");
/**
* Base regex for a decoded frame example : 1218142 2: 03 42 0d 06 0d 00 6d
* .... d9 39 45 b9 c5 b1 55
*/
public static Pattern RGX_FRAME_DEC
= Pattern.compile("[0-9]* [0-9]: [0-9a-fA-F ]*");
/**
* Convert a bin file (capture with rtl_sdr command) to a readable cfile for
* airprobe (gnuradio)
*
* @param binfile the binary capture file
* @throws java.io.IOException I/O error
*/
public static void binToCfile(File binfile) throws IOException {
// TODO : ne pas emcombrer la mémoire et libérer l'espace au fur
// et a mesure (que 2 bytes occuper à la fois normalement)
int i = 0;
int byte1, byte2;
int temp1, temp2;
byte[] bytes = new byte[4];
int bits;
InputStream buffy = new BufferedInputStream(new FileInputStream(binfile));
LittleEndianOutputStream dataOut =
new LittleEndianOutputStream(new BufferedOutputStream(new FileOutputStream(binfile.getAbsoluteFile() + ".cfile")));
while ((byte1 = buffy.read()) != -1) {
if ((byte2 = buffy.read()) == -1) {
dataOut.close();
break;
}
dataOut.writeFloat(((float)byte1 - 127) * 1 / 128);
dataOut.writeFloat(((float)byte2 - 127) * 1 / 128);
}
dataOut.close();
}
/**
* Determine if a file is a cfile
*
* @return true is the file is a cfile, false otherwhise
* @param file the file to test
*/
public static boolean isCfile(File file) {
try {
InputStream buffy = new BufferedInputStream(new FileInputStream(file));
} catch (FileNotFoundException ex) {
return false;
}
// test only the first 20 bytes
for (int i = 0; i < 20; i++) {
// TODO : find the best way to make this test
}
return true;
}
/**
* Use rtl_sdr command to sniff a GSM tower
*
* @param dir the current working dir
*/
public static ProcessBuilder rtlSdrSnif(String dir, String frequency, String gain, String samplerate) {
ProcessBuilder pb = new ProcessBuilder("rtl_sdr", "-f",
frequency,
"-g", gain,
frequency + "_AIRPROBE_OUTPUT_BIN",
"-s", samplerate);
pb.directory(new File(dir));
return pb;
}
/**
* Read a file
*
* @param file the file
* @return an ArrayList of String
*/
public static ArrayList<String> readFile(String file) {
ArrayList<String> fichier = new ArrayList<String>();
try {
InputStream ips = new FileInputStream(file);
InputStreamReader ipsr = new InputStreamReader(ips);
BufferedReader br = new BufferedReader(ipsr);
String ligne = br.readLine();
@SuppressWarnings("unused")
int i = 0;
while (ligne != null) {
fichier.add(ligne.toString());
i++;
ligne = br.readLine();
}
br.close();
} catch (Exception e) {
// user has to give this output if he gets problem from I/O
System.out.println(e.toString() + e);
}
return fichier;
}
/**
* Read a file
*
* @param file the file
* @return an ArrayList of String
*/
public static ArrayList<String> readFile(File file) {
ArrayList<String> fichier = new ArrayList<String>();
try {
InputStream ips = new FileInputStream(file);
InputStreamReader ipsr = new InputStreamReader(ips);
BufferedReader br = new BufferedReader(ipsr);
String ligne = br.readLine();
@SuppressWarnings("unused")
int i = 0;
while (ligne != null) {
fichier.add(ligne.toString());
ligne = br.readLine();
}
br.close();
} catch (Exception e) {
// user has to give this output if he gets problem from I/O
System.out.println(e.toString() + e);
}
return fichier;
}
/**
* Write a file
*
* @param array the array to write into a file
* @param fileName absolute path et and output file name
*/
public static void writeFile(ArrayList<String> array, String fileName) {
try {
FileWriter fw = new FileWriter(fileName);
BufferedWriter bw = new BufferedWriter(fw);
PrintWriter outputFile = new PrintWriter(bw);
for (int i = 0; i < array.size(); i++) {
outputFile.println(array.get(i));
}
outputFile.close();
} catch (Exception e) {
// user has to give this output if he gets problem from I/O
System.out.println(e.toString());
}
}
/**
* Write a file
*
* @param array the array to write
* @param fileName absolute path and output file name
* @param param 1 : write all index to same line, 2 : write one index per
* line
*/
public static void writeFileWithArray(ArrayList<String[]> array, String fileName, int param) {
try {
FileWriter fw = new FileWriter(fileName);
BufferedWriter bw = new BufferedWriter(fw);
PrintWriter outputFile = new PrintWriter(bw);
if (param == 1) {
for (int i = 0; i < array.size(); i++) {
for (int j = 0; j < array.get(i).length; j++) {
if (j == array.get(i).length - 1) {
outputFile.println(array.get(i)[j]);
} else {
outputFile.print(array.get(i)[j] + " : ");
}
}
}
} else if (param == 2) {
for (int i = 0; i < array.size(); i++) {
for (int j = 0; j < array.get(i).length; j++) {
// don't write empty or null value
if(!(array.get(i)[j] == null || array.get(i)[j].equals("")))
outputFile.println(array.get(i)[j]);
}
outputFile.println("-------------------");
}
}
outputFile.close();
} catch (Exception e) {
// user has to give this output if he gets problem from I/O
System.out.println(e.toString());
}
}
/**
* Detect if a cfile has already been processed
*
* @param cfile the cfile file
* @return true if it has already been processed, false if not
*/
public static boolean alreadyDone(File cfile) {
if (new File(cfile.getAbsolutePath() + "_" + Configuration.BTSCONF).exists()) {
for (int i = 0; i < 7; i++) {
if (new File(cfile.getAbsolutePath() + "_" + Integer.toString(i) + "S").exists()) {
timeslot = Integer.toString(i);
return true;
}
}
}
return false;
}
/**
* Clean an airprobe output (with S parameter)
*
* @param aTraite the "file" to clean (into an ArrayList)
* @return the cleaned ArrayList
*/
public static ArrayList<String> cleanAirprobeOutput(ArrayList<String> aTraite) {
ArrayList<String> aTraiter = aTraite;
for (int liste = 0; liste < aTraiter.size(); liste++) {
String i = aTraiter.get(liste);
// if :
if (RGX_FRAME_DEC.matcher(i).matches()) {
// If it's a decoded frame: do nothing
}
// We place PARITY ERROR on an unique line (if not)
if (!(RGX_FRAME_CCCH.matcher(i).matches())) {
Pattern pat = Pattern.compile("(.*)(" + RGX_PARITY + ")");
// if it's a malformed frame
Matcher recup_err = pat.matcher(i);
if (recup_err.find() && !(recup_err.group(1).equals(""))) {
//int indice = aTraiter.indexOf(aTraiter.get(liste));
// Taille = taille - taille(msg_err)
aTraiter.set(liste, i.substring(0, i.length()
- recup_err.group(2).length()));
aTraiter.add(++liste, recup_err.group(2));
}
}
// Delete "conv_decode" error
if (RGX_CONVDEC_ERR.matcher(i).matches()) {
Matcher recup_err = RGX_CONVDEC_ERR.matcher(i);
if (recup_err.find()) {
if (i.length() - recup_err.group(2).length() == 0) { // if the line is just a conv_decode error
aTraiter.remove(liste);
liste--; // avoid to jump a line
} else { // if the conv_decode error is not an unique line (something before)
aTraiter.set(liste, i.substring(0, i.length()
- recup_err.group(2).length()));
}
}
}
// Delete "WRN" error
if (RGX_WRN_ERR.matcher(i).matches()) {
Matcher recup_err = RGX_WRN_ERR.matcher(i);
if (recup_err.find()) {
if (i.length() - recup_err.group(2).length() == 0) {
aTraiter.remove(liste);
liste--; // avoid to jump a line
} else {
aTraiter.set(liste, i.substring(0, i.length()
- recup_err.group(2).length()));
}
}
}
}
// We correct this kind of problem :
/*
* C0 1228670 1897390: 01101001110111110010101100001
* cch.c:419 error: sacch: parity error (-1 fn=1228671)
* gsmstack.c:301 cannot decode fnr=0x12bf7f (1228671) ts=2
* cch.c:419 error: sacch: parity error (-1 fn=1228699)
* gsmstack.c:301 cannot decode fnr=0x12bf9b (1228699) ts=2
* 1110010110111011110000011110001001011101100110000100000101000000111010010000110101111
*
*/
for (int liste = 0; liste < aTraiter.size(); liste++) {
String i = aTraiter.get(liste);
// frame lenght compared to frame number (will not be the same
// if the fn is 111 or 12255 for exemple)
int framelenght;
// if :
if (i.length() < 133
&& !(RGX_WRN_ERR.matcher(i).matches()
|| RGX_CONVDEC_ERR.matcher(i).matches()
|| RGX_FRAME_DEC.matcher(i).matches()
|| RGX_PARITY.matcher(i).matches()
|| RGX_CANNOT_DEC.matcher(i).matches())) {
System.out.println(i + " capté");
for (int subList = liste; liste + 20 < aTraiter.size() && subList < liste + 20; subList++) {
if (!(RGX_WRN_ERR.matcher(aTraiter.get(subList)).matches()
|| RGX_CONVDEC_ERR.matcher(aTraiter.get(subList)).matches()
|| RGX_FRAME_DEC.matcher(aTraiter.get(subList)).matches()
|| RGX_PARITY.matcher(aTraiter.get(subList)).matches()
|| RGX_CANNOT_DEC.matcher(aTraiter.get(subList)).matches())) {
if (RGX_FRAME_CCCH.matcher(i + aTraiter.get(subList)).matches()) {
// We concatenate frame correctly
aTraiter.set(liste, i + aTraiter.get(subList));
aTraiter.remove(subList);
}
}
}
}
}
// TODO : place parity error (for encrypted frame) just after the frame
return aTraiter;
}
/**
* Get from a cfile airprobe output for B and S configuration
*
* @param file the cfile t
* @throws Exception Not Immediate Assignment found
*/
public static void getAirprobeOutput(File file) throws Exception {
// check if principal output exist
//File f = new File(fichier + "_0B"); TODO : intégrer la détection mais demandé à l'utilisateur une confirmation de pas faire les output
// TODO : utilisé la méthode alreadyDone (gui?)
// pour finir l'itération pour trouvé le channel dédié utilisé par la tour
boolean finish = false;
//if(!(f.exists() && !f.isDirectory())) {
// Extract
ProcessBuilder pb = new ProcessBuilder("sh", "go.sh", file.getAbsolutePath(), Configuration.DEC_RATE, Configuration.BTSCONF);
pb.directory(new File(Configuration.gsmReceivePath + "/src/python/"));
pb.redirectOutput(new File(file.getAbsolutePath() + "_" + Configuration.BTSCONF));
// avoid infinite time out with big cfile
pb.redirectErrorStream(true);
Process p = pb.start();
p.waitFor();
p.destroy();
p.destroyForcibly();
// }
// We get broadcast channel
broadcastChannelTab = Broadcast.lignesToTab(readFile(file.getAbsolutePath() + "_" + Configuration.BTSCONF));
// Potential Immediate Assignment index
ArrayList<Integer> imAs = Broadcast.extractImAs(broadcastChannelTab);
if (imAs.isEmpty()) {
throw new Exception("Sorry, don't find any Immediate Assignment on this cfile.\n"
+ START_LINE + "Please choose an other file, or sniff again a GSM tower.\n");
}
// else : if there is Immediate Assignment
for (int i = 0; i < imAs.size() && finish == false; i++) {
if ("1".equals(Broadcast.extractTsConf(broadcastChannelTab.get(imAs.get(i))).get(1))) {
// if an immediate assignment redirects to a dedicated channel
// pb.redirectOutput(new File(fichier.getAbsolutePath() + "0B"));
timeslot = Broadcast.extractTsConf(broadcastChannelTab.get(imAs.get(i))).get(0);
pb = new ProcessBuilder("sh", "go.sh",
file.getAbsolutePath(),
Configuration.DEC_RATE,
timeslot + "S"
);
pb.directory(new File(Configuration.gsmReceivePath + "/src/python/"));
pb.redirectOutput(new File(file.getAbsolutePath() + "_" + timeslot + "S"));
pb.redirectErrorStream(true);
p = pb.start();
p.waitFor();
p.destroy();
p.destroyForcibly();
finish = true;
}
}
dedicatedChannelTab = Dedicated.lignesToTab(readFile(file.getAbsolutePath() + "_" + timeslot + "S"));
// debug line TODO : delete
System.out.println("fichier " + file.getAbsolutePath() + "_" + timeslot + "S" + " traité");
}
/**
* XOR two array (ind 0 with ind 0, ind 1 with ind 1, ...)
*
* @param beginBursts the array that contains bursts
* @param endBursts the second array that contains bursts (and a5/1 fn) [encrypted]
* @param fn the plaintext frame number
* @param fnEnc the encrypted frame number
* @return all bursts (4 at maximum) xored
*/
public static String[] xorBursts(String[] beginBursts, String[] endBursts, String fn, String fnEnc) {
String[] xoredBursts = new String[10];
xoredBursts[8] = fn;
xoredBursts[9] = fnEnc;
StringBuilder oneXoredBurst;
// number of bursts that cannot be xored
int imposs = 0;
for (int j = 0; j < 4; j++) {
if (isABurst(beginBursts[j]) && isABurst(endBursts[j])) {
oneXoredBurst = new StringBuilder();
for (int i = 0; i < 114; i++) {
oneXoredBurst.append(beginBursts[j].charAt(i) ^ endBursts[j].charAt(i));
}
xoredBursts[j] = oneXoredBurst.toString();
} else {
imposs++;
xoredBursts[j] = "Incorrect bursts from capture, can't xor them.";
}
}
if(imposs >= 3)
return new String[0];
// add a5/1 frame number from encrypted frame to simplify crack steps
for(int i = 4; i < 8 ;i++) {
if(isInteger(endBursts[i])) {
xoredBursts[i] = "a5/1 burst fn = " + endBursts[i];
}
}
// we reorganize the array to match bursts with a5/1 fn
// TODO ; make something more algorithmic
String temp2 = xoredBursts[1];
String temp3 = xoredBursts[2];
String temp4 = xoredBursts[3];
xoredBursts[1] = xoredBursts[4];
xoredBursts[2] = temp2;
xoredBursts[3] = xoredBursts[5];
xoredBursts[4] = temp3;
xoredBursts[5] = xoredBursts[6];
xoredBursts[6] = temp4;
return xoredBursts;
}
/**
* XOR two array (ind 0 with ind 0, ind 1 with ind 1, ...) without any other
* information exept xored bits
*
* @param beginBursts the array that containt bursts
* @param endBursts the second array that containt bursts
* @return all bursts (4 at maximum) xored
*/
public static String[] xorBursts(String[] beginBursts, String[] endBursts) {
String[] xoredBursts = new String[4];
StringBuilder oneXoredBurst;
for (int j = 0; j < 4; j++) {
if (isABurst(beginBursts[j]) && isABurst(endBursts[j])) {
oneXoredBurst = new StringBuilder();
for (int i = 0; i < 114; i++) {
oneXoredBurst.append(beginBursts[j].charAt(i) ^ endBursts[j].charAt(i));
}
xoredBursts[j] = oneXoredBurst.toString();
} else {
xoredBursts[j] = "This burst is not correct, cannot xor it.";
}
}
int numberOfGoodBursts = 0;
for (int i = 0; i < xoredBursts.length; i++) {
if (xoredBursts[i].length() == 114) {
numberOfGoodBursts++;
}
}
// delete bad burst (This burst is not correct, .. etc)
String[] xoredBurstsOnly = new String[numberOfGoodBursts];
for (int i = 0, j = 0; i < 4; i++) {
if (xoredBursts[i].length() == 114) {
xoredBurstsOnly[j] = xoredBursts[i];
j++;
}
}
return xoredBurstsOnly;
}
/**
* Check if the String is a burst or not
*
* @param toTest the string to test
* @return true if the String seems to be a bursts, false otherwise
*/
public static boolean isABurst(String toTest) {
if (toTest.length() == 114 && toTest.matches("[01]*")) {
return true;
} else {
return false;
}
}
/**
* Retourne a binary String from and hexadecimal String
*
* @param s hexadecimal String
* @return binary String
*/
public static String hexToBin(String s) {
return new BigInteger(s, 16).toString(2);
}
/**
* toString(ArrayList<String[]>)
*
* @param liste
* @return listeString frames into an unique String ("\n" separator)
*/
public static String toStringALtabStr(ArrayList<String[]> liste) {
String listeString = "";
for (int i = 0; i < liste.size(); i++) {
for (int j = 0; j < liste.get(i).length; j++) {
listeString = listeString + liste.get(i)[j];
}
listeString = listeString + "\n";
}
return listeString;
}
/**
* ArrayList of String array to ArrayList of String
*
* @param liste
* @return listeString
*/
public static ArrayList<String> toArraylistString(ArrayList<String[]> liste) {
ArrayList<String> listeString = new ArrayList<String>();
StringBuilder temp = new StringBuilder();
for (int i = 0; i < liste.size(); i++) {
for (int j = 0; j < liste.get(i).length; j++) {
temp.append(liste.get(i)[j]);
}
listeString.add(temp.toString());
}
return listeString;
}
/**
* Check if a String is a number
*
* @param s the string to test
* @return true if the string is a number, false otherwhise
*/
public static boolean isInteger(String s) {
try {
Integer.parseInt(s);
} catch (NumberFormatException e) {
return false;
} catch (NullPointerException e) {
return false;
}
// only got here if we didn't return false
return true;
}
} |
Java | public class Pet extends NamedEntity {
/** Holds value of property birthDate. */
private Date birthDate;
/** Holds value of property typeId. */
private int typeId;
/** Holds value of property visits. */
private List visits = new ArrayList();
/** Holds value of property owner. */
private Owner owner;
/** Getter for property birthDate.
* @return Value of property birthDate.
*/
public Date getBirthDate() {
return this.birthDate;
}
/** Setter for property birthDate.
* @param birthDate New value of property birthDate.
*/
public void setBirthDate(Date birthDate) {
this.birthDate = birthDate;
}
/** Getter for property typeId.
* @return Value of property typeId.
*/
public int getTypeId() {
return this.typeId;
}
/** Setter for property typeId.
* @param type New value of property typeId.
*/
public void setTypeId(int typeId) {
this.typeId = typeId;
}
/** Getter for property visits.
* @return Value of property visits.
*/
public List getVisits() {
return this.visits;
}
/** Setter for property visits.
* @param visits New value of property visits.
*/
public void setVisits(List visits) {
this.visits = visits;
}
/** Getter for property owner.
* @return Value of property owner.
*/
public Owner getOwner() {
return this.owner;
}
/** Setter for property owner.
* @param owner New value of property owner.
*/
public void setOwner(Owner owner) {
this.owner = owner;
}
/** Method to add a visit to the List of visits.
* @param visit New visit to be added to the List of visits
*/
public void addVisit(Visit visit) {
this.visits.add(visit);
}
/** Method to copy properties from another <code>Pet</code>.
* @param pet Properties source
*/
public void copyPropertiesFrom(Pet pet) {
super.copyPropertiesFrom(pet);
setBirthDate(pet.getBirthDate());
setTypeId(pet.getTypeId());
setVisits(pet.getVisits());
setOwner(pet.getOwner());
}
} |
Java | public class WebglClippingPresenter implements Presenter {
@Override
public void dispatch(HTMLDivElement container) {
container.appendChild(((Attachable) GWT.create(WebglClipping.class)).asWidget());
}
} |
Java | public class IgnoreCacheTests extends AndroidTestCase {
private static final String test_key = "test-asd";
RxSnappyClient rxSnappyClient;
@Override
protected void setUp() throws Exception {
super.setUp();
RxSnappy.init(getContext());
rxSnappyClient = new RxSnappyClient();
}
@SmallTest
public void testDataHandlingWithoutCacheSupport() {
String key = "asdkey";
String key1 = RxSnappyUtils.generateKey(key, 1);
String key2 = RxSnappyUtils.generateKey(key, 2);
DummyData dummyData = DataGenerator.generateNewDummyData();
DummyData dummyData1 = DataGenerator.generateNewDummyData();
rxSnappyClient.setObject(key1, dummyData, true).blockingFirst();
int cnt = rxSnappyClient.countKeys(key).blockingFirst();
assertTrue(cnt == 1);
rxSnappyClient.setObject(key1, dummyData1, true).blockingFirst();
DummyData s = rxSnappyClient.getObject(key1, DummyData.class).blockingFirst();
assertTrue(s.equals(dummyData1));
cnt = rxSnappyClient.countKeys(key).blockingFirst();
assertTrue(cnt == 1);
boolean exists = rxSnappyClient.exists(key1).blockingFirst();
boolean notExists = rxSnappyClient.exists(key2).blockingFirst();
assertTrue(exists);
assertFalse(notExists);
rxSnappyClient.setObject(key2, dummyData, true).blockingFirst();
cnt = rxSnappyClient.countKeys(key).blockingFirst();
assertTrue(cnt == 2);
s = rxSnappyClient.getObject(key2, DummyData.class).blockingFirst();
assertTrue(s.equals(dummyData));
List<DummyData> datas =
rxSnappyClient.findKeys(key).flatMap(strings -> Observable.fromArray(strings))
.flatMap(value -> rxSnappyClient.getObject(value, DummyData.class)).toList().blockingGet();
assertTrue(datas.size() == 2);
rxSnappyClient.deleteCache(key).blockingFirst();
cnt = rxSnappyClient.countKeys(key).blockingFirst();
assertTrue(cnt == 0);
}
@Override
protected void tearDown() throws Exception {
super.tearDown();
RxSnappy.closeDatabase();
RxSnappy.destroyDatabase();
}
} |
Java | public class SystemConfigurationNotFoundException extends RuntimeException {
private static final long serialVersionUID = -5940797324238159301L;
public SystemConfigurationNotFoundException(SystemConfigurationManagerImpl.SystemConfigurationType type) {
super(String.format("No System Configuration with descriptor '%s' could be not found.", type.getDescriptor()));
}
} |
Java | @Repository
public class RechargingRepositoryImpl implements RechargingRepository {
private static final String SQL_INSERT = "insert into"
+ " t_recharging(tradeId,platform,uid,region,goods,amount,creationTime,status,channel)"
+ " values(?,?,?,?,?,?,?,?,?)";
private static final String SQL_ZUCKS_INSERT = "insert into t_zucks(zid,os,point,uid) values(?,?,?,?)";
@Resource
private JdbcTemplate jdbcTemplate;
@Override
public void save(final Recharging recharging) {
int r = jdbcTemplate.update(SQL_INSERT, new PreparedStatementSetter() {
@Override
public void setValues(PreparedStatement ps) throws SQLException {
int i = 1;
ps.setString(i++, recharging.getTradeId());
ps.setString(i++, recharging.getPlatform());
ps.setString(i++, recharging.getUid());
ps.setString(i++, recharging.getRegion());
ps.setString(i++, recharging.getGoods());
ps.setInt(i++, recharging.getAmount());
ps.setLong(i++, recharging.getCreationTime());
ps.setString(i++, recharging.getStatus());
ps.setString(i++, recharging.getChannel());
}
});
if (r < 1) {
throw new RepositoryException(RepositoryException.SIGNAL_INSERTING, "no inserted");
}
}
@Override
public void save(final Zucks zucks) {
int r = jdbcTemplate.update(SQL_ZUCKS_INSERT, new PreparedStatementSetter() {
@Override
public void setValues(PreparedStatement ps) throws SQLException {
int i = 1;
ps.setString(i++, zucks.getZid());
ps.setString(i++, zucks.getOs().name());
ps.setInt(i++, zucks.getPoint());
ps.setString(i++, zucks.getUid());
}
});
if (r < 1) {
throw new RepositoryException(RepositoryException.SIGNAL_INSERTING, "no inserted");
}
}
} |
Java | public class CoursesWithRequisitesGraph {
public int solve(int numberOfCourses, List<Integer> preReqsForCourse, List<Integer> coursesWithRequisites) {
Graph<Integer> g = new Graph<Integer>(numberOfCourses);
for (int i = 0; i < preReqsForCourse.size(); i++)
g.addEdge(preReqsForCourse.get(i) - 1, coursesWithRequisites.get(i) - 1);
return dfs(g) ? 1 : 0;
}
private boolean dfs(Graph<Integer> graph) {
boolean visited[] = new boolean[graph.getNumberOfVertices()];
for (int vertice : graph.vertices()) {
if (!visited[vertice]) {
Stack<Integer> stack = new Stack<>();
Set<Integer> hist = new HashSet<>();
stack.push(vertice);
hist.add(vertice);
visited[vertice] = true;
while (!stack.isEmpty()) {
int currentVertice = stack.pop();
for (int candidate : graph.adj(currentVertice)) {
if (hist.contains(candidate)) {
return false;
}
if (!visited[candidate]) {
visited[candidate] = true;
stack.push(candidate);
}
}
}
}
}
return true;
}
public static void main(String[] args) {
CoursesWithRequisitesGraph solution = new CoursesWithRequisitesGraph();
System.out.println(solution.solve(3, Arrays.asList(1, 2), Arrays.asList(2, 3)));
System.out.println(solution.solve(3, Arrays.asList(1, 2), Arrays.asList(2, 1)));
}
} |
Java | public final class ResourceLoader {
/**
* Create an input stream to read from the resource.
* @param resource The resource to load.
* @return An input stream.
*/
public static InputStream createInputStream(
final String resource) {
final ClassLoader loader = ResourceLoader.class.getClassLoader();
final InputStream is = loader.getResourceAsStream(resource);
if (is == null) {
final String str = "Can't read resource: " + resource;
throw new EncogError(str);
}
return is;
}
/**
* Load the resource as a string.
* @param resource The resource to load.
* @return The resource as a string.
*/
public static String loadString(final String resource) {
InputStream is = null;
try {
is = ResourceLoader.createInputStream(resource);
final StringBuilder result = new StringBuilder();
final BufferedReader br = new BufferedReader(new InputStreamReader(
is));
String line;
while ((line = br.readLine()) != null) {
result.append(line);
result.append("\r\n");
}
return result.toString();
} catch (final IOException e) {
throw new EncogError(e);
} finally {
try {
if (is != null) {
is.close();
}
} catch (final IOException e) {
throw new EncogError(e);
}
}
}
/**
* Private constructor.
*/
private ResourceLoader() {
}
} |
Java | public class RequestTimeoutException extends DatastoreException {
public RequestTimeoutException(String message, Throwable cause) {
super(message, cause);
}
} |
Java | public class GitPatternRepository {
/**
* Name of the repository.
*/
@JsonProperty(value = "name", required = true)
private String name;
/**
* Collection of pattern of the repository.
*/
@JsonProperty(value = "pattern")
private List<String> pattern;
/**
* URI of the repository.
*/
@JsonProperty(value = "uri", required = true)
private String uri;
/**
* Label of the repository.
*/
@JsonProperty(value = "label")
private String label;
/**
* Searching path of the repository.
*/
@JsonProperty(value = "searchPaths")
private List<String> searchPaths;
/**
* Username of git repository basic auth.
*/
@JsonProperty(value = "username")
private String username;
/**
* Password of git repository basic auth.
*/
@JsonProperty(value = "password")
private String password;
/**
* Public sshKey of git repository.
*/
@JsonProperty(value = "hostKey")
private String hostKey;
/**
* SshKey algorithm of git repository.
*/
@JsonProperty(value = "hostKeyAlgorithm")
private String hostKeyAlgorithm;
/**
* Private sshKey algorithm of git repository.
*/
@JsonProperty(value = "privateKey")
private String privateKey;
/**
* Strict host key checking or not.
*/
@JsonProperty(value = "strictHostKeyChecking")
private Boolean strictHostKeyChecking;
/**
* Get name of the repository.
*
* @return the name value
*/
public String name() {
return this.name;
}
/**
* Set name of the repository.
*
* @param name the name value to set
* @return the GitPatternRepository object itself.
*/
public GitPatternRepository withName(String name) {
this.name = name;
return this;
}
/**
* Get collection of pattern of the repository.
*
* @return the pattern value
*/
public List<String> pattern() {
return this.pattern;
}
/**
* Set collection of pattern of the repository.
*
* @param pattern the pattern value to set
* @return the GitPatternRepository object itself.
*/
public GitPatternRepository withPattern(List<String> pattern) {
this.pattern = pattern;
return this;
}
/**
* Get uRI of the repository.
*
* @return the uri value
*/
public String uri() {
return this.uri;
}
/**
* Set uRI of the repository.
*
* @param uri the uri value to set
* @return the GitPatternRepository object itself.
*/
public GitPatternRepository withUri(String uri) {
this.uri = uri;
return this;
}
/**
* Get label of the repository.
*
* @return the label value
*/
public String label() {
return this.label;
}
/**
* Set label of the repository.
*
* @param label the label value to set
* @return the GitPatternRepository object itself.
*/
public GitPatternRepository withLabel(String label) {
this.label = label;
return this;
}
/**
* Get searching path of the repository.
*
* @return the searchPaths value
*/
public List<String> searchPaths() {
return this.searchPaths;
}
/**
* Set searching path of the repository.
*
* @param searchPaths the searchPaths value to set
* @return the GitPatternRepository object itself.
*/
public GitPatternRepository withSearchPaths(List<String> searchPaths) {
this.searchPaths = searchPaths;
return this;
}
/**
* Get username of git repository basic auth.
*
* @return the username value
*/
public String username() {
return this.username;
}
/**
* Set username of git repository basic auth.
*
* @param username the username value to set
* @return the GitPatternRepository object itself.
*/
public GitPatternRepository withUsername(String username) {
this.username = username;
return this;
}
/**
* Get password of git repository basic auth.
*
* @return the password value
*/
public String password() {
return this.password;
}
/**
* Set password of git repository basic auth.
*
* @param password the password value to set
* @return the GitPatternRepository object itself.
*/
public GitPatternRepository withPassword(String password) {
this.password = password;
return this;
}
/**
* Get public sshKey of git repository.
*
* @return the hostKey value
*/
public String hostKey() {
return this.hostKey;
}
/**
* Set public sshKey of git repository.
*
* @param hostKey the hostKey value to set
* @return the GitPatternRepository object itself.
*/
public GitPatternRepository withHostKey(String hostKey) {
this.hostKey = hostKey;
return this;
}
/**
* Get sshKey algorithm of git repository.
*
* @return the hostKeyAlgorithm value
*/
public String hostKeyAlgorithm() {
return this.hostKeyAlgorithm;
}
/**
* Set sshKey algorithm of git repository.
*
* @param hostKeyAlgorithm the hostKeyAlgorithm value to set
* @return the GitPatternRepository object itself.
*/
public GitPatternRepository withHostKeyAlgorithm(String hostKeyAlgorithm) {
this.hostKeyAlgorithm = hostKeyAlgorithm;
return this;
}
/**
* Get private sshKey algorithm of git repository.
*
* @return the privateKey value
*/
public String privateKey() {
return this.privateKey;
}
/**
* Set private sshKey algorithm of git repository.
*
* @param privateKey the privateKey value to set
* @return the GitPatternRepository object itself.
*/
public GitPatternRepository withPrivateKey(String privateKey) {
this.privateKey = privateKey;
return this;
}
/**
* Get strict host key checking or not.
*
* @return the strictHostKeyChecking value
*/
public Boolean strictHostKeyChecking() {
return this.strictHostKeyChecking;
}
/**
* Set strict host key checking or not.
*
* @param strictHostKeyChecking the strictHostKeyChecking value to set
* @return the GitPatternRepository object itself.
*/
public GitPatternRepository withStrictHostKeyChecking(Boolean strictHostKeyChecking) {
this.strictHostKeyChecking = strictHostKeyChecking;
return this;
}
} |
Java | @SupportedAnnotationTypes({
"javax.persistence.Entity",
"javax.persistence.Embeddable",
"javax.persistence.MappedSuperclass" })
@SupportedOptions({ "openjpa.log",
"openjpa.source",
"openjpa.naming",
"openjpa.header",
"openjpa.metamodel",
"openjpa.addGeneratedAnnotation"
})
public class AnnotationProcessor6 extends AbstractProcessor {
private SourceAnnotationHandler handler;
private MetaDataFactory factory;
private int generatedSourceVersion = 6;
private CompileTimeLogger logger;
private List<String> header = new ArrayList<>();
private boolean active;
private static Localizer _loc = Localizer.forPackage(AnnotationProcessor6.class);
private SourceVersion supportedSourceVersion;
private String addGeneratedOption;
private Class<?> generatedAnnotation;
private Date generationDate;
/**
* Category of members as per JPA 2.0 type system.
*
*/
private static enum TypeCategory {
ATTRIBUTE("javax.persistence.metamodel.SingularAttribute"),
COLLECTION("javax.persistence.metamodel.CollectionAttribute"),
SET("javax.persistence.metamodel.SetAttribute"),
LIST("javax.persistence.metamodel.ListAttribute"),
MAP("javax.persistence.metamodel.MapAttribute");
private String type;
private TypeCategory(String type) {
this.type = type;
}
public String getMetaModelType() {
return type;
}
}
/**
* Enumerates available java.util.* collection classes to categorize them
* into corresponding JPA meta-model member type.
*/
private static List<String> CLASSNAMES_LIST = Arrays.asList(
new String[]{
"java.util.List", "java.util.AbstractList",
"java.util.AbstractSequentialList", "java.util.ArrayList",
"java.util.Stack", "java.util.Vector"});
private static List<String> CLASSNAMES_SET = Arrays.asList(
new String[]{
"java.util.Set", "java.util.AbstractSet", "java.util.EnumSet",
"java.util.HashSet", "java.util.LinkedList", "java.util.LinkedHashSet",
"java.util.SortedSet", "java.util.TreeSet"});
private static List<String> CLASSNAMES_MAP = Arrays.asList(
new String[]{
"java.util.Map", "java.util.AbstractMap", "java.util.EnumMap",
"java.util.HashMap", "java.util.Hashtable",
"java.util.IdentityHashMap", "java.util.LinkedHashMap",
"java.util.Properties", "java.util.SortedMap",
"java.util.TreeMap"});
private static List<String> CLASSNAMES_COLLECTION = Arrays.asList(
new String[]{
"java.util.Collection", "java.util.AbstractCollection",
"java.util.AbstractQueue", "java.util.Queue",
"java.util.PriorityQueue"});
/**
* Gets the fully-qualified name of member class in JPA 2.0 type system,
* given the fully-qualified name of a Java class.
*
*/
private TypeCategory toMetaModelTypeCategory(TypeMirror mirror,
String name, boolean persistentCollection) {
if (mirror.getKind() == TypeKind.ARRAY && persistentCollection ) {
return TypeCategory.LIST;
}
if (CLASSNAMES_COLLECTION.contains(name))
return TypeCategory.COLLECTION;
if (CLASSNAMES_LIST.contains(name))
return TypeCategory.LIST;
if (CLASSNAMES_SET.contains(name))
return TypeCategory.SET;
if (CLASSNAMES_MAP.contains(name))
return TypeCategory.MAP;
return TypeCategory.ATTRIBUTE;
}
@Override
public SourceVersion getSupportedSourceVersion() {
if (supportedSourceVersion != null) {
return supportedSourceVersion;
}
return SourceVersion.latestSupported();
}
/**
* Initialization.
*/
@Override
public synchronized void init(ProcessingEnvironment processingEnv) {
super.init(processingEnv);
active = "true".equalsIgnoreCase(getOptionValue("openjpa.metamodel"));
if (!active)
return;
final String supported = getOptionValue("openjpa.processor.supportedversion");
if (supported != null) {
supportedSourceVersion = SourceVersion.valueOf(supported);
} else { // default to ensure we don't log a false warning for every compilation, see OPENJPA-2300
supportedSourceVersion = SourceVersion.latestSupported();
}
processingEnv.getMessager().printMessage(Diagnostic.Kind.NOTE, _loc.get("mmg-tool-banner").toString());
logger = new CompileTimeLogger(processingEnv, getOptionValue("openjpa.log"));
setSourceVersion();
setNamingPolicy();
setHeader();
handler = new SourceAnnotationHandler(processingEnv, logger);
setAddGeneratedAnnotation();
this.generationDate = new Date();
}
/**
* The entry point for java compiler.
*/
@Override
public boolean process(Set<? extends TypeElement> annos, RoundEnvironment roundEnv) {
if (active && !roundEnv.processingOver()) {
Set<? extends Element> elements = roundEnv.getRootElements();
for (Element e : elements) {
if (e instanceof TypeElement) {
process((TypeElement) e);
}
}
}
return true;
}
/**
* Generate meta-model source code for the given type.
*
* @return true if code is generated for the given element. false otherwise.
*/
private boolean process(TypeElement e) {
if (!handler.isAnnotatedAsEntity(e)) {
return false;
}
Elements eUtils = processingEnv.getElementUtils();
String originalClass = eUtils.getBinaryName((TypeElement) e).toString();
String originalSimpleClass = e.getSimpleName().toString();
String metaClass = factory.getMetaModelClassName(originalClass);
SourceCode source = new SourceCode(metaClass);
comment(source);
annotate(source, originalClass);
TypeElement supCls = handler.getPersistentSupertype(e);
if (supCls != null) {
String superName = factory.getMetaModelClassName(supCls.toString());
source.getTopLevelClass().setSuper(superName);
}
try {
PrintWriter writer = createSourceFile(originalClass, metaClass, e);
SourceCode.Class modelClass = source.getTopLevelClass();
Set<? extends Element> members = handler.getPersistentMembers(e);
for (Element m : members) {
boolean isPersistentCollection = m.getAnnotation(PersistentCollection.class) != null;
TypeMirror decl = handler.getDeclaredType(m);
String fieldName = handler.getPersistentMemberName(m);
String fieldType = handler.getDeclaredTypeName(decl, true, isPersistentCollection);
TypeCategory typeCategory =
toMetaModelTypeCategory(decl, fieldType, isPersistentCollection);
String metaModelType = typeCategory.getMetaModelType();
SourceCode.Field modelField = null;
switch (typeCategory) {
case ATTRIBUTE:
modelField = modelClass.addField(fieldName, metaModelType);
modelField.addParameter(originalSimpleClass)
.addParameter(fieldType);
break;
case COLLECTION:
case LIST:
case SET:
TypeMirror param = handler.getTypeParameter(m, decl, 0, true);
String elementType = handler.getDeclaredTypeName(param);
modelField = modelClass.addField(fieldName, metaModelType);
modelField.addParameter(originalSimpleClass)
.addParameter(elementType);
break;
case MAP:
TypeMirror key = handler.getTypeParameter(m, decl, 0, false);
TypeMirror value = handler.getTypeParameter(m, decl, 1, true);
String keyType = handler.getDeclaredTypeName(key);
String valueType = handler.getDeclaredTypeName(value);
modelField = modelClass.addField(fieldName, metaModelType);
modelField.addParameter(originalSimpleClass)
.addParameter(keyType)
.addParameter(valueType);
break;
}
modelField.makePublic().makeStatic().makeVolatile();
}
source.write(writer);
writer.flush();
writer.close();
return true;
} catch (Exception e1) {
logger.error(_loc.get("mmg-process-error", e.getQualifiedName()), e1);
return false;
}
}
private void annotate(SourceCode source, String originalClass) {
SourceCode.Class cls = source.getTopLevelClass();
cls.addAnnotation(StaticMetamodel.class.getName())
.addArgument("value", originalClass + ".class", false);
switch (this.addGeneratedOption) {
case "false":
return;
case "force":
cls.addAnnotation(javax.annotation.Generated.class.getName())
.addArgument("value", this.getClass().getName())
.addArgument("date", this.generationDate.toString());
break;
case "auto":
// fall through
default:
// only add the annotation if it is on the classpath for Java 6+.
if (generatedAnnotation != null && generatedSourceVersion >= 6) {
cls.addAnnotation(generatedAnnotation.getName())
.addArgument("value", this.getClass().getName())
.addArgument("date", this.generationDate.toString());
}
break;
}
}
private void comment(SourceCode source) {
if (header.size() != 0)
source.addComment(false, header.toArray(new String[header.size()]));
String defaultHeader = _loc.get("mmg-tool-sign").getMessage();
source.addComment(false, defaultHeader);
}
/**
* Parse annotation processor option <code>-Aopenjpa.source=n</code> to detect
* the source version for the generated classes.
* n must be a integer. Default or wrong specification returns 6.
*/
private void setSourceVersion() {
String version = getOptionValue("openjpa.source");
if (version != null) {
try {
generatedSourceVersion = Integer.parseInt(version);
} catch (NumberFormatException e) {
logger.warn(_loc.get("mmg-bad-source", version, 6));
generatedSourceVersion = 6;
}
} else {
generatedSourceVersion = 6;
}
}
private void setNamingPolicy() {
String policy = getOptionValue("openjpa.naming");
if (policy != null) {
try {
factory = (MetaDataFactory)Class.forName(policy).newInstance();
} catch (Throwable e) {
logger.warn(_loc.get("mmg-bad-naming", policy, e));
factory = new PersistenceMetaDataFactory();
}
} else {
factory = new PersistenceMetaDataFactory();
}
}
private void setHeader() {
String headerOption = getOptionValue("openjpa.header");
if (headerOption == null) {
return;
}
if ("ASL".equalsIgnoreCase(headerOption)) {
header.add(_loc.get("mmg-asl-header").getMessage());
} else {
try {
URL url = new URL(headerOption);
InputStream is = url.openStream();
Scanner s = new Scanner(is);
while (s.hasNextLine()) {
header.add(s.nextLine());
}
} catch (Throwable t) {
}
}
}
private void setAddGeneratedAnnotation() {
this.addGeneratedOption = getOptionValue("openjpa.addGeneratedAnnotation");
if (this.addGeneratedOption == null) {
this.addGeneratedOption = "auto";
}
// only add the annotation if it is on the classpath for Java 6+.
try {
this.generatedAnnotation = Class.forName("javax.annotation.Generated", false, null);
} catch (ClassNotFoundException generatedNotFoundEx) {
logger.trace(_loc.get("mmg-annotation-not-found"));
}
}
/**
* Creates a file where source code of the given metaClass will be written.
*
*/
private PrintWriter createSourceFile(String originalClass, String metaClass, TypeElement e)
throws IOException {
JavaFileObject javaFile = processingEnv.getFiler().createSourceFile(metaClass, e);
logger.info(_loc.get("mmg-process", javaFile.toUri().normalize()));
return new PrintWriter(javaFile.openWriter());
}
/**
* Get the value for the given keys, whoever matches first, in the current available options.
*/
private String getOptionValue(String... keys) {
Map<String,String> options = processingEnv.getOptions();
for (String key : keys) {
if (options.containsKey(key))
return options.get(key);
}
return null;
}
} |
Java | public class StaticObjects {
public static String emailPattern = "[a-zA-Z0-9._-]+@[a-zA-Z]+\\.+[a-z]+";
// public static String stripeKey = "pk_test_kBD5CZDk3MBZNRLeWqrfvhew";//In House Key Test
} |
Java | public class Matrix
implements Cloneable, Serializable {
/**
* The actual matrix */
protected weka.core.matrix.Matrix m_Matrix = null;
/**
* Constructs a matrix and initializes it with default values.
*
* @param nr the number of rows
* @param nc the number of columns
*/
public Matrix(int nr, int nc) {
m_Matrix = new weka.core.matrix.Matrix(nr, nc);
}
/**
* Constructs a matrix using a given array.
*
* @param array the values of the matrix
*/
public Matrix(double[][] array) throws Exception {
m_Matrix = new weka.core.matrix.Matrix(array);
}
/**
* Reads a matrix from a reader. The first line in the file should
* contain the number of rows and columns. Subsequent lines
* contain elements of the matrix.
*
* @param r the reader containing the matrix
* @throws Exception if an error occurs
*/
public Matrix(Reader r) throws Exception {
m_Matrix = new weka.core.matrix.Matrix(r);
}
/**
* Creates and returns a clone of this object.
*
* @return a clone of this instance.
* @throws Exception if an error occurs
*/
public Object clone() {
try {
return new Matrix(m_Matrix.getArrayCopy());
}
catch (Exception e) {
e.printStackTrace();
return null;
}
}
/**
* Writes out a matrix.
*
* @param w the output Writer
* @throws Exception if an error occurs
*/
public void write(Writer w) throws Exception {
m_Matrix.write(w);
}
/**
* returns the internal matrix
* @see #m_Matrix
*/
protected weka.core.matrix.Matrix getMatrix() {
return m_Matrix;
}
/**
* Returns the value of a cell in the matrix.
*
* @param rowIndex the row's index
* @param columnIndex the column's index
* @return the value of the cell of the matrix
*/
public final double getElement(int rowIndex, int columnIndex) {
return m_Matrix.get(rowIndex, columnIndex);
}
/**
* Add a value to an element.
*
* @param rowIndex the row's index.
* @param columnIndex the column's index.
* @param value the value to add.
*/
public final void addElement(int rowIndex, int columnIndex, double value) {
m_Matrix.set(
rowIndex, columnIndex, m_Matrix.get(rowIndex, columnIndex) + value);
}
/**
* Returns the number of rows in the matrix.
*
* @return the number of rows
*/
public final int numRows() {
return m_Matrix.getRowDimension();
}
/**
* Returns the number of columns in the matrix.
*
* @return the number of columns
*/
public final int numColumns() {
return m_Matrix.getColumnDimension();
}
/**
* Sets an element of the matrix to the given value.
*
* @param rowIndex the row's index
* @param columnIndex the column's index
* @param value the value
*/
public final void setElement(int rowIndex, int columnIndex, double value) {
m_Matrix.set(rowIndex, columnIndex, value);
}
/**
* Sets a row of the matrix to the given row. Performs a deep copy.
*
* @param index the row's index
* @param newRow an array of doubles
*/
public final void setRow(int index, double[] newRow) {
for (int i = 0; i < newRow.length; i++)
m_Matrix.set(index, i, newRow[i]);
}
/**
* Gets a row of the matrix and returns it as double array.
*
* @param index the row's index
* @return an array of doubles
*/
public double[] getRow(int index) {
double[] newRow = new double[this.numColumns()];
for (int i = 0; i < newRow.length; i++)
newRow[i] = getElement(index, i);
return newRow;
}
/**
* Gets a column of the matrix and returns it as a double array.
*
* @param index the column's index
* @return an array of doubles
*/
public double[] getColumn(int index) {
double[] newColumn = new double[this.numRows()];
for (int i = 0; i < newColumn.length; i++)
newColumn[i] = getElement(i, index);
return newColumn;
}
/**
* Sets a column of the matrix to the given column. Performs a deep copy.
*
* @param index the column's index
* @param newColumn an array of doubles
*/
public final void setColumn(int index, double[] newColumn) {
for (int i = 0; i < numRows(); i++)
m_Matrix.set(i, index, newColumn[i]);
}
/**
* Converts a matrix to a string
*
* @return the converted string
*/
public String toString() {
return m_Matrix.toString();
}
/**
* Returns the sum of this matrix with another.
*
* @return a matrix containing the sum.
*/
public final Matrix add(Matrix other) {
try {
return new Matrix(m_Matrix.plus(other.getMatrix()).getArrayCopy());
}
catch (Exception e) {
e.printStackTrace();
return null;
}
}
/**
* Returns the transpose of a matrix.
*
* @return the transposition of this instance.
*/
public final Matrix transpose() {
try {
return new Matrix(m_Matrix.transpose().getArrayCopy());
}
catch (Exception e) {
e.printStackTrace();
return null;
}
}
/**
* Returns true if the matrix is symmetric.
*
* @return boolean true if matrix is symmetric.
*/
public boolean isSymmetric() {
return m_Matrix.isSymmetric();
}
/**
* Returns the multiplication of two matrices
*
* @param b the multiplication matrix
* @return the product matrix
*/
public final Matrix multiply(Matrix b) {
try {
return new Matrix(getMatrix().times(b.getMatrix()).getArrayCopy());
}
catch (Exception e) {
e.printStackTrace();
return null;
}
}
/**
* Performs a (ridged) linear regression.
*
* @param y the dependent variable vector
* @param ridge the ridge parameter
* @return the coefficients
* @throws IllegalArgumentException if not successful
*/
public final double[] regression(Matrix y, double ridge) {
return getMatrix().regression(y.getMatrix(), ridge).getCoefficients();
}
/**
* Performs a weighted (ridged) linear regression.
*
* @param y the dependent variable vector
* @param w the array of data point weights
* @param ridge the ridge parameter
* @return the coefficients
* @throws IllegalArgumentException if the wrong number of weights were
* provided.
*/
public final double[] regression(Matrix y, double [] w, double ridge) {
return getMatrix().regression(y.getMatrix(), w, ridge).getCoefficients();
}
/**
* Returns the L part of the matrix.
* This does only make sense after LU decomposition.
*
* @return matrix with the L part of the matrix;
* @see #LUDecomposition()
*/
public Matrix getL() throws Exception {
int nr = numRows(); // num of rows
int nc = numColumns(); // num of columns
double[][] ld = new double[nr][nc];
for (int i = 0; i < nr; i++) {
for (int j = 0; (j < i) && (j < nc); j++) {
ld[i][j] = getElement(i, j);
}
if (i < nc) ld[i][i] = 1;
}
Matrix l = new Matrix(ld);
return l;
}
/**
* Returns the U part of the matrix.
* This does only make sense after LU decomposition.
*
* @return matrix with the U part of a matrix;
* @see #LUDecomposition()
*/
public Matrix getU() throws Exception {
int nr = numRows(); // num of rows
int nc = numColumns(); // num of columns
double[][] ud = new double[nr][nc];
for (int i = 0; i < nr; i++) {
for (int j = i; j < nc ; j++) {
ud[i][j] = getElement(i, j);
}
}
Matrix u = new Matrix(ud);
return u;
}
/**
* Performs a LUDecomposition on the matrix.
* It changes the matrix into its LU decomposition.
*
* @return the indices of the row permutation
*/
public int[] LUDecomposition() throws Exception {
// decompose
weka.core.matrix.LUDecomposition lu = m_Matrix.lu();
// singular? old class throws Exception!
if (!lu.isNonsingular())
throw new Exception("Matrix is singular");
weka.core.matrix.Matrix u = lu.getU();
weka.core.matrix.Matrix l = lu.getL();
// modify internal matrix
int nr = numRows();
int nc = numColumns();
for (int i = 0; i < nr; i++) {
for (int j = 0; j < nc; j++) {
if (j < i)
setElement(i, j, l.get(i, j));
else
setElement(i, j, u.get(i, j));
}
}
u = null;
l = null;
return lu.getPivot();
}
/**
* Solve A*X = B using backward substitution.
* A is current object (this). Note that this matrix will be changed!
* B parameter bb.
* X returned in parameter bb.
*
* @param bb first vector B in above equation then X in same equation.
*/
public void solve(double[] bb) throws Exception {
// solve
weka.core.matrix.Matrix x = m_Matrix.solve(
new weka.core.matrix.Matrix(bb, bb.length));
// move X into bb
int nr = x.getRowDimension();
for (int i = 0; i < nr; i++)
bb[i] = x.get(i, 0);
}
/**
* Performs Eigenvalue Decomposition using Householder QR Factorization
*
* Matrix must be symmetrical.
* Eigenvectors are return in parameter V, as columns of the 2D array.
* (Real parts of) Eigenvalues are returned in parameter d.
*
* @param V double array in which the eigenvectors are returned
* @param d array in which the eigenvalues are returned
* @throws Exception if matrix is not symmetric
*/
public void eigenvalueDecomposition(double[][] V, double[] d)
throws Exception {
// old class only worked with symmetric matrices!
if (!this.isSymmetric())
throw new Exception("EigenvalueDecomposition: Matrix must be symmetric.");
// perform eigenvalue decomposition
weka.core.matrix.EigenvalueDecomposition eig = m_Matrix.eig();
weka.core.matrix.Matrix v = eig.getV();
double[] d2 = eig.getRealEigenvalues();
// transfer data
int nr = numRows();
int nc = numColumns();
for (int i = 0; i < nr; i++)
for (int j = 0; j < nc; j++)
V[i][j] = v.get(i, j);
for (int i = 0; i < d2.length; i++)
d[i] = d2[i];
}
/**
* Returns sqrt(a^2 + b^2) without under/overflow.
*
* @param a length of one side of rectangular triangle
* @param b length of other side of rectangular triangle
* @return lenght of third side
*/
protected static double hypot(double a, double b) {
return weka.core.matrix.Maths.hypot(a, b);
}
/**
* converts the Matrix into a single line Matlab string: matrix is enclosed
* by parentheses, rows are separated by semicolon and single cells by
* blanks, e.g., [1 2; 3 4].
* @return the matrix in Matlab single line format
*/
public String toMatlab() {
return getMatrix().toMatlab();
}
/**
* creates a matrix from the given Matlab string.
* @param matlab the matrix in matlab format
* @return the matrix represented by the given string
* @see #toMatlab()
*/
public static Matrix parseMatlab(String matlab) throws Exception {
return new Matrix(weka.core.matrix.Matrix.parseMatlab(matlab).getArray());
}
/**
* Main method for testing this class.
*/
public static void main(String[] ops) {
double[] first = {2.3, 1.2, 5};
double[] second = {5.2, 1.4, 9};
double[] response = {4, 7, 8};
double[] weights = {1, 2, 3};
try {
// test eigenvaluedecomposition
double[][] m = {{1, 2, 3}, {2, 5, 6},{3, 6, 9}};
Matrix M = new Matrix(m);
int n = M.numRows();
double[][] V = new double[n][n];
double[] d = new double[n];
double[] e = new double[n];
M.eigenvalueDecomposition(V, d);
Matrix v = new Matrix(V);
// M.testEigen(v, d, );
// end of test-eigenvaluedecomposition
Matrix a = new Matrix(2, 3);
Matrix b = new Matrix(3, 2);
System.out.println("Number of columns for a: " + a.numColumns());
System.out.println("Number of rows for a: " + a.numRows());
a.setRow(0, first);
a.setRow(1, second);
b.setColumn(0, first);
b.setColumn(1, second);
System.out.println("a:\n " + a);
System.out.println("b:\n " + b);
System.out.println("a (0, 0): " + a.getElement(0, 0));
System.out.println("a transposed:\n " + a.transpose());
System.out.println("a * b:\n " + a.multiply(b));
Matrix r = new Matrix(3, 1);
r.setColumn(0, response);
System.out.println("r:\n " + r);
System.out.println("Coefficients of regression of b on r: ");
double[] coefficients = b.regression(r, 1.0e-8);
for (int i = 0; i < coefficients.length; i++) {
System.out.print(coefficients[i] + " ");
}
System.out.println();
System.out.println("Weights: ");
for (int i = 0; i < weights.length; i++) {
System.out.print(weights[i] + " ");
}
System.out.println();
System.out.println("Coefficients of weighted regression of b on r: ");
coefficients = b.regression(r, weights, 1.0e-8);
for (int i = 0; i < coefficients.length; i++) {
System.out.print(coefficients[i] + " ");
}
System.out.println();
a.setElement(0, 0, 6);
System.out.println("a with (0, 0) set to 6:\n " + a);
a.write(new java.io.FileWriter("main.matrix"));
System.out.println("wrote matrix to \"main.matrix\"\n" + a);
a = new Matrix(new java.io.FileReader("main.matrix"));
System.out.println("read matrix from \"main.matrix\"\n" + a);
} catch (Exception e) {
e.printStackTrace();
}
}
} |
Java | @FixMethodOrder(MethodSorters.NAME_ASCENDING)
@RunWith(CryptoTokenTestRunner.class)
public class CmpConfirmMessageTest extends CmpTestCase {
private static final Logger log = Logger.getLogger(CrmfRequestTest.class);
private static final String user = "TestUser";
private static final X500Name userDN = new X500Name("CN=" + user + ", O=PrimeKey Solutions AB, C=SE");
private final X509Certificate cacert;
private final CA testx509ca;
private final CmpConfiguration cmpConfiguration;
private static final String cmpAlias = "CmpConfirmMessageTestConfAlias";
private final GlobalConfigurationSessionRemote globalConfigurationSession = EjbRemoteHelper.INSTANCE.getRemoteSession(GlobalConfigurationSessionRemote.class);
@ClassRule
public static CryptoTokenRule cryptoTokenRule = new CryptoTokenRule();
@BeforeClass
public static void beforeClass() {
CryptoProviderTools.installBCProvider();
}
public CmpConfirmMessageTest() throws Exception {
this.testx509ca = cryptoTokenRule.createX509Ca();
this.cacert = (X509Certificate) this.testx509ca.getCACertificate();
this.cmpConfiguration = (CmpConfiguration) this.globalConfigurationSession.getCachedConfiguration(CmpConfiguration.CMP_CONFIGURATION_ID);
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
//this.caSession.addCA(ADMIN, this.testx509ca);
log.debug("this.testx509ca.getSubjectDN(): " + this.testx509ca.getSubjectDN());
log.debug("caid: " + this.testx509ca.getCAId());
this.cmpConfiguration.addAlias(cmpAlias);
this.globalConfigurationSession.saveConfiguration(ADMIN, this.cmpConfiguration);
}
@Override
@After
public void tearDown() throws Exception {
super.tearDown();
cryptoTokenRule.cleanUp();
this.cmpConfiguration.removeAlias(cmpAlias);
this.globalConfigurationSession.saveConfiguration(ADMIN, this.cmpConfiguration);
}
@Override
public String getRoleName() {
return this.getClass().getSimpleName();
}
/**
* This test sends a CmpConfirmMessage and expects a successful CmpConfirmResponse message
* signed using the CA specified as recipient in the request.
* @throws Exception
*/
@Test
public void test01ConfRespSignedByRecepient() throws Exception {
log.trace(">test01ConfRespSignedByRecepient");
this.cmpConfiguration.setResponseProtection(cmpAlias, "signature");
this.cmpConfiguration.setCMPDefaultCA(cmpAlias, "");
this.globalConfigurationSession.saveConfiguration(ADMIN, this.cmpConfiguration);
byte[] nonce = CmpMessageHelper.createSenderNonce();
byte[] transid = CmpMessageHelper.createSenderNonce();
// Send a confirm message to the CA
String hash = "foo123";
PKIMessage confirm = genCertConfirm(userDN, this.cacert, nonce, transid, hash, 0);
assertNotNull(confirm);
ByteArrayOutputStream bao = new ByteArrayOutputStream();
DEROutputStream out = new DEROutputStream(bao);
out.writeObject(confirm);
byte[] ba = bao.toByteArray();
// Send request and receive response
byte[] resp = sendCmpHttp(ba, 200, cmpAlias);
checkCmpResponseGeneral(resp, this.testx509ca.getSubjectDN(), userDN, this.cacert, nonce, transid, true, null, PKCSObjectIdentifiers.sha1WithRSAEncryption.getId());
checkCmpPKIConfirmMessage(userDN, this.cacert, resp);
log.trace("<test01ConfRespSignedByRecepient");
}
/**
* This test sends a CmpConfirmMessage and expects a successful CmpConfirmResponse message
* signed using the CA set in cmp.defaultca
* @throws Exception
*/
@Test
public void test02ConfRespSignedByDefaultCA() throws Exception {
log.trace(">test02ConfRespSignedByDefaultCA");
this.cmpConfiguration.setResponseProtection(cmpAlias, "signature");
this.cmpConfiguration.setCMPDefaultCA(cmpAlias, this.testx509ca.getSubjectDN());
this.globalConfigurationSession.saveConfiguration(ADMIN, this.cmpConfiguration);
byte[] nonce = CmpMessageHelper.createSenderNonce();
byte[] transid = CmpMessageHelper.createSenderNonce();
// Send a confirm message to the CA
String hash = "foo123";
// the parameter 'null' is to generate a confirm request for a recipient that does not exist
PKIMessage confirm = genCertConfirm(userDN, null, nonce, transid, hash, 0);
assertNotNull(confirm);
ByteArrayOutputStream bao = new ByteArrayOutputStream();
DEROutputStream out = new DEROutputStream(bao);
out.writeObject(confirm);
byte[] ba = bao.toByteArray();
// Send request and receive response
byte[] resp = sendCmpHttp(ba, 200, cmpAlias);
checkCmpResponseGeneral(resp, this.testx509ca.getSubjectDN(), userDN, this.cacert, nonce, transid, true, null, PKCSObjectIdentifiers.sha1WithRSAEncryption.getId());
checkCmpPKIConfirmMessage(userDN, this.cacert, resp);
log.trace("<test02ConfRespSignedByDefaultCA");
}
/**
* This test sends a CmpConfirmMessage and expects a successful CmpConfirmResponse message
* protected with PBE using the global shared secret set as authentication module parameter
* in cmp.authenticationparameter.
* @throws Exception
*/
@Test
public void test03ConfRespPbeProtectedByGlobalSharedSecret() throws Exception {
log.trace(">test03ConfRespPbeProtected");
this.cmpConfiguration.setRAMode(cmpAlias, true);
this.cmpConfiguration.setResponseProtection(cmpAlias, "pbe");
this.cmpConfiguration.setCMPDefaultCA(cmpAlias, "");
this.cmpConfiguration.setAuthenticationModule(cmpAlias, CmpConfiguration.AUTHMODULE_HMAC);
this.cmpConfiguration.setAuthenticationParameters(cmpAlias, "password");
this.globalConfigurationSession.saveConfiguration(ADMIN, this.cmpConfiguration);
byte[] nonce = CmpMessageHelper.createSenderNonce();
byte[] transid = CmpMessageHelper.createSenderNonce();
// Send a confirm message to the CA
String hash = "foo123";
PKIMessage confirm = genCertConfirm(userDN, this.cacert, nonce, transid, hash, 0);
confirm = protectPKIMessage(confirm, false, "password", 567);
assertNotNull(confirm);
ByteArrayOutputStream bao = new ByteArrayOutputStream();
DEROutputStream out = new DEROutputStream(bao);
out.writeObject(confirm);
byte[] ba = bao.toByteArray();
// Send request and receive response
byte[] resp = sendCmpHttp(ba, 200, cmpAlias);
checkCmpResponseGeneral(resp, this.testx509ca.getSubjectDN(), userDN, this.cacert, nonce, transid, false, "password", PKCSObjectIdentifiers.sha1WithRSAEncryption.getId());
checkCmpPKIConfirmMessage(userDN, this.cacert, resp);
log.trace("<test03ConfRespPbeProtected");
}
/**
* This test sends a CmpConfirmMessage and expects a successful CmpConfirmResponse message
* protected with PBE using the global shared secret set as authentication module parameter
* in cmp.authenticationparameter.
* @throws Exception
*/
@Test
public void test04ConfRespPbeProtectedByCACmpSecret() throws Exception {
log.trace(">test03ConfRespPbeProtected");
this.cmpConfiguration.setRAMode(cmpAlias, true);
this.cmpConfiguration.setResponseProtection(cmpAlias, "pbe");
this.cmpConfiguration.setCMPDefaultCA(cmpAlias, this.testx509ca.getSubjectDN());
this.cmpConfiguration.setAuthenticationModule(cmpAlias, CmpConfiguration.AUTHMODULE_HMAC);
this.cmpConfiguration.setAuthenticationParameters(cmpAlias, "-");
this.globalConfigurationSession.saveConfiguration(ADMIN, this.cmpConfiguration);
byte[] nonce = CmpMessageHelper.createSenderNonce();
byte[] transid = CmpMessageHelper.createSenderNonce();
// Send a confirm message to the CA
String hash = "foo123";
PKIMessage confirm = genCertConfirm(userDN, this.cacert, nonce, transid, hash, 0);
confirm = protectPKIMessage(confirm, false, "foo123", 567);
assertNotNull(confirm);
ByteArrayOutputStream bao = new ByteArrayOutputStream();
DEROutputStream out = new DEROutputStream(bao);
out.writeObject(confirm);
byte[] ba = bao.toByteArray();
// Send request and receive response
byte[] resp = sendCmpHttp(ba, 200, cmpAlias);
checkCmpResponseGeneral(resp, this.testx509ca.getSubjectDN(), userDN, this.cacert, nonce, transid, false, "foo123", PKCSObjectIdentifiers.sha1WithRSAEncryption.getId());
checkCmpPKIConfirmMessage(userDN, this.cacert, resp);
log.trace("<test03ConfRespPbeProtected");
}
} |
Java | public class MediaCacheUtils {
public static void updateMediaCache(String title, String artist, String album, long id, Context ctx) {
ContentResolver musicResolver = ctx.getContentResolver();
Uri musicUri = android.provider.MediaStore.Audio.Media.EXTERNAL_CONTENT_URI;
ContentValues newValues = new ContentValues();
newValues.put(android.provider.MediaStore.Audio.Media.TITLE, title);
newValues.put(android.provider.MediaStore.Audio.Media.ARTIST, artist);
newValues.put(android.provider.MediaStore.Audio.Media.ALBUM, album);
int res = musicResolver.update(musicUri, newValues, android.provider.MediaStore.Audio.Media._ID + "=?", new String[]{String.valueOf(id)});
if (res > 0) {
// Toast.makeText(this, "Updated MediaStore cache", Toast.LENGTH_SHORT).show();
}
}
} |
Java | public class SafeCast
{
/**
* Convert a primitive {@code double }value to a primitive {@code int} value,
* throwing an exception if the value can not be exactly represented as an {@code int}.
* Wrapper method for toIntExact, converting an ArithmeticException to a PrismExeption.
*
* @param value {@code double} value
* @return the corresponding {@code int} value
* @throws PrismException if the value cannot be converted to {@code int}
*/
public static int toInt(double value) throws PrismException
{
try {
return toIntExact(value);
} catch (ArithmeticException e) {
throw new PrismException(e.getMessage());
}
}
/**
* Convert a primitive {@code double} to a primitive {@code int} value,
* throwing an exception if the value can not be exactly represented as an {@code int}.
*
* @param value {@code double} value
* @return the corresponding {@code int} value
* @throws ArithmeticException if the value cannot be converted to {@code int}
*/
public static int toIntExact(double value)
{
if (!Double.isFinite(value)) {
throw new ArithmeticException(value + " is non-finite, cannot be represented by int");
}
if ((int) value != value) {
throw new ArithmeticException(value + " cannot be losslessly converted to int");
}
return (int) value;
}
/**
* Convert a primitive double to a primitive long value
* throwing an exception if the value is a special value or not an {@code long}.
*
* @param value {@code double} value
* @return the equivalent {@code long} value
* @throws ArithmeticException if the value cannot be converted to {@code long}
*/
public static long toLongExact(double value)
{
if (!Double.isFinite(value)) {
throw new ArithmeticException(value + " is non-finite, cannot be represented by long");
}
if ((long) value != value) {
throw new ArithmeticException(value + " cannot be losslessly converted to long");
}
return (long) value;
}
} |
Java | public class ServerWebSocketContainer implements ServerContainer, Closeable {
public static final String TIMEOUT = "io.undertow.websocket.CONNECT_TIMEOUT";
public static final int DEFAULT_WEB_SOCKET_TIMEOUT_SECONDS = 10;
private final ClassIntrospecter classIntrospecter;
private final Map<Class<?>, ConfiguredClientEndpoint> clientEndpoints = new CopyOnWriteMap<>();
private final List<ConfiguredServerEndpoint> configuredServerEndpoints = new ArrayList<>();
private final Set<Class<?>> annotatedEndpointClasses = new HashSet<>();
/**
* set of all deployed server endpoint paths. Due to the comparison function we can detect
* overlaps
*/
private final TreeSet<PathTemplate> seenPaths = new TreeSet<>();
private final Supplier<XnioWorker> xnioWorker;
private final ByteBufferPool bufferPool;
private final boolean dispatchToWorker;
private final InetSocketAddress clientBindAddress;
private final WebSocketReconnectHandler webSocketReconnectHandler;
private volatile long defaultAsyncSendTimeout;
private volatile long defaultMaxSessionIdleTimeout;
private volatile int defaultMaxBinaryMessageBufferSize;
private volatile int defaultMaxTextMessageBufferSize;
private volatile boolean deploymentComplete = false;
private final List<DeploymentException> deploymentExceptions = new ArrayList<>();
private ServletContextImpl contextToAddFilter = null;
private final List<WebsocketClientSslProvider> clientSslProviders;
private final List<PauseListener> pauseListeners = new ArrayList<>();
private final List<Extension> installedExtensions;
private final ThreadSetupHandler.Action<Void, Runnable> invokeEndpointTask;
private volatile boolean closed = false;
public ServerWebSocketContainer(final ClassIntrospecter classIntrospecter, final Supplier<XnioWorker> xnioWorker, ByteBufferPool bufferPool, List<ThreadSetupHandler> threadSetupHandlers, boolean dispatchToWorker, boolean clientMode) {
this(classIntrospecter, ServerWebSocketContainer.class.getClassLoader(), xnioWorker, bufferPool, threadSetupHandlers, dispatchToWorker, null, null);
}
public ServerWebSocketContainer(final ClassIntrospecter classIntrospecter, final ClassLoader classLoader, Supplier<XnioWorker> xnioWorker, ByteBufferPool bufferPool, List<ThreadSetupHandler> threadSetupHandlers, boolean dispatchToWorker) {
this(classIntrospecter, classLoader, xnioWorker, bufferPool, threadSetupHandlers, dispatchToWorker, null, null);
}
public ServerWebSocketContainer(final ClassIntrospecter classIntrospecter, final ClassLoader classLoader, Supplier<XnioWorker> xnioWorker, ByteBufferPool bufferPool, List<ThreadSetupHandler> threadSetupHandlers, boolean dispatchToWorker, InetSocketAddress clientBindAddress, WebSocketReconnectHandler reconnectHandler) {
this(classIntrospecter, classLoader, xnioWorker, bufferPool, threadSetupHandlers, dispatchToWorker, clientBindAddress, reconnectHandler, Collections.emptyList());
}
public ServerWebSocketContainer(final ClassIntrospecter classIntrospecter, final ClassLoader classLoader, Supplier<XnioWorker> xnioWorker, ByteBufferPool bufferPool, List<ThreadSetupHandler> threadSetupHandlers, boolean dispatchToWorker, InetSocketAddress clientBindAddress, WebSocketReconnectHandler reconnectHandler, List<Extension> installedExtensions) {
this.classIntrospecter = classIntrospecter;
this.bufferPool = bufferPool;
this.xnioWorker = xnioWorker;
this.dispatchToWorker = dispatchToWorker;
this.clientBindAddress = clientBindAddress;
this.installedExtensions = new ArrayList<>(installedExtensions);
List<WebsocketClientSslProvider> clientSslProviders = new ArrayList<>();
for (WebsocketClientSslProvider provider : ServiceLoader.load(WebsocketClientSslProvider.class, classLoader)) {
clientSslProviders.add(provider);
}
this.clientSslProviders = Collections.unmodifiableList(clientSslProviders);
this.webSocketReconnectHandler = reconnectHandler;
ThreadSetupHandler.Action<Void, Runnable> task = new ThreadSetupHandler.Action<Void, Runnable>() {
@Override
public Void call(HttpServerExchange exchange, Runnable context) throws Exception {
context.run();
return null;
}
};
for(ThreadSetupHandler handler : threadSetupHandlers) {
task = handler.create(task);
}
this.invokeEndpointTask = task;
}
@Override
public long getDefaultAsyncSendTimeout() {
return defaultAsyncSendTimeout;
}
@Override
public void setAsyncSendTimeout(long defaultAsyncSendTimeout) {
this.defaultAsyncSendTimeout = defaultAsyncSendTimeout;
}
public Session connectToServer(final Object annotatedEndpointInstance, WebSocketClient.ConnectionBuilder connectionBuilder) throws DeploymentException, IOException {
if(closed) {
throw new ClosedChannelException();
}
ConfiguredClientEndpoint config = getClientEndpoint(annotatedEndpointInstance.getClass(), false);
if (config == null) {
throw JsrWebSocketMessages.MESSAGES.notAValidClientEndpointType(annotatedEndpointInstance.getClass());
}
Endpoint instance = config.getFactory().createInstance(new ImmediateInstanceHandle<>(annotatedEndpointInstance));
return connectToServerInternal(instance, config, connectionBuilder);
}
@Override
public Session connectToServer(final Object annotatedEndpointInstance, final URI path) throws DeploymentException, IOException {
if(closed) {
throw new ClosedChannelException();
}
ConfiguredClientEndpoint config = getClientEndpoint(annotatedEndpointInstance.getClass(), false);
if (config == null) {
throw JsrWebSocketMessages.MESSAGES.notAValidClientEndpointType(annotatedEndpointInstance.getClass());
}
Endpoint instance = config.getFactory().createInstance(new ImmediateInstanceHandle<>(annotatedEndpointInstance));
XnioSsl ssl = null;
for (WebsocketClientSslProvider provider : clientSslProviders) {
ssl = provider.getSsl(xnioWorker.get(), annotatedEndpointInstance, path);
if (ssl != null) {
break;
}
}
if(ssl == null) {
try {
ssl = new UndertowXnioSsl(xnioWorker.get().getXnio(), OptionMap.EMPTY, SSLContext.getDefault());
} catch (NoSuchAlgorithmException e) {
//ignore
}
}
return connectToServerInternal(instance, ssl, config, path);
}
public Session connectToServer(Class<?> aClass, WebSocketClient.ConnectionBuilder connectionBuilder) throws DeploymentException, IOException {
if(closed) {
throw new ClosedChannelException();
}
ConfiguredClientEndpoint config = getClientEndpoint(aClass, true);
if (config == null) {
throw JsrWebSocketMessages.MESSAGES.notAValidClientEndpointType(aClass);
}
try {
AnnotatedEndpointFactory factory = config.getFactory();
InstanceHandle<?> instance = config.getInstanceFactory().createInstance();
return connectToServerInternal(factory.createInstance(instance), config, connectionBuilder);
} catch (InstantiationException e) {
throw new RuntimeException(e);
}
}
@Override
public Session connectToServer(Class<?> aClass, URI uri) throws DeploymentException, IOException {
if(closed) {
throw new ClosedChannelException();
}
ConfiguredClientEndpoint config = getClientEndpoint(aClass, true);
if (config == null) {
throw JsrWebSocketMessages.MESSAGES.notAValidClientEndpointType(aClass);
}
try {
AnnotatedEndpointFactory factory = config.getFactory();
InstanceHandle<?> instance = config.getInstanceFactory().createInstance();
XnioSsl ssl = null;
for (WebsocketClientSslProvider provider : clientSslProviders) {
ssl = provider.getSsl(xnioWorker.get(), aClass, uri);
if (ssl != null) {
break;
}
}
if(ssl == null) {
try {
ssl = new UndertowXnioSsl(xnioWorker.get().getXnio(), OptionMap.EMPTY, SSLContext.getDefault());
} catch (NoSuchAlgorithmException e) {
//ignore
}
}
return connectToServerInternal(factory.createInstance(instance), ssl, config, uri);
} catch (InstantiationException e) {
throw new RuntimeException(e);
}
}
@Override
public Session connectToServer(final Endpoint endpointInstance, final ClientEndpointConfig config, final URI path) throws DeploymentException, IOException {
if(closed) {
throw new ClosedChannelException();
}
ClientEndpointConfig cec = config != null ? config : ClientEndpointConfig.Builder.create().build();
XnioSsl ssl = null;
for (WebsocketClientSslProvider provider : clientSslProviders) {
ssl = provider.getSsl(xnioWorker.get(), endpointInstance, cec, path);
if (ssl != null) {
break;
}
}
if(ssl == null) {
try {
ssl = new UndertowXnioSsl(xnioWorker.get().getXnio(), OptionMap.EMPTY, SSLContext.getDefault());
} catch (NoSuchAlgorithmException e) {
//ignore
}
}
//in theory we should not be able to connect until the deployment is complete, but the definition of when a deployment is complete is a bit nebulous.
WebSocketClientNegotiation clientNegotiation = new ClientNegotiation(cec.getPreferredSubprotocols(), toExtensionList(cec.getExtensions()), cec);
WebSocketClient.ConnectionBuilder connectionBuilder = WebSocketClient.connectionBuilder(xnioWorker.get(), bufferPool, path)
.setSsl(ssl)
.setBindAddress(clientBindAddress)
.setClientNegotiation(clientNegotiation);
return connectToServer(endpointInstance, config, connectionBuilder);
}
public Session connectToServer(final Endpoint endpointInstance, final ClientEndpointConfig config, WebSocketClient.ConnectionBuilder connectionBuilder) throws DeploymentException, IOException {
if(closed) {
throw new ClosedChannelException();
}
ClientEndpointConfig cec = config != null ? config : ClientEndpointConfig.Builder.create().build();
WebSocketClientNegotiation clientNegotiation = connectionBuilder.getClientNegotiation();
IoFuture<WebSocketChannel> session = connectionBuilder
.connect();
Number timeout = (Number) cec.getUserProperties().get(TIMEOUT);
if(session.await(timeout == null ? DEFAULT_WEB_SOCKET_TIMEOUT_SECONDS: timeout.intValue(), TimeUnit.SECONDS) == IoFuture.Status.WAITING) {
//add a notifier to close the channel if the connection actually completes
session.cancel();
session.addNotifier(new IoFuture.HandlingNotifier<WebSocketChannel, Object>() {
@Override
public void handleDone(WebSocketChannel data, Object attachment) {
IoUtils.safeClose(data);
}
}, null);
throw JsrWebSocketMessages.MESSAGES.connectionTimedOut();
}
WebSocketChannel channel;
try {
channel = session.get();
} catch (UpgradeFailedException e) {
throw new DeploymentException(e.getMessage(), e);
}
EndpointSessionHandler sessionHandler = new EndpointSessionHandler(this);
final List<Extension> extensions = new ArrayList<>();
final Map<String, Extension> extMap = new HashMap<>();
for (Extension ext : cec.getExtensions()) {
extMap.put(ext.getName(), ext);
}
for (WebSocketExtension e : clientNegotiation.getSelectedExtensions()) {
Extension ext = extMap.get(e.getName());
if (ext == null) {
throw JsrWebSocketMessages.MESSAGES.extensionWasNotPresentInClientHandshake(e.getName(), clientNegotiation.getSupportedExtensions());
}
extensions.add(ExtensionImpl.create(e));
}
ConfiguredClientEndpoint configured = clientEndpoints.get(endpointInstance.getClass());
Endpoint instance = endpointInstance;
if(configured == null) {
synchronized (clientEndpoints) {
// make sure to create an instance of AnnotatedEndpoint if we have the annotation
configured = getClientEndpoint(endpointInstance.getClass(), false);
if(configured == null) {
// if we don't, add an endpoint anyway to the list of clientEndpoints
clientEndpoints.put(endpointInstance.getClass(), configured = new ConfiguredClientEndpoint());
} else {
// use the factory in configured to reach the endpoint
instance = configured.getFactory().createInstance(new ImmediateInstanceHandle<>(endpointInstance));
}
}
}
EncodingFactory encodingFactory = EncodingFactory.createFactory(classIntrospecter, cec.getDecoders(), cec.getEncoders());
UndertowSession undertowSession = new UndertowSession(channel, connectionBuilder.getUri(), Collections.<String, String>emptyMap(), Collections.<String, List<String>>emptyMap(), sessionHandler, null, new ImmediateInstanceHandle<>(endpointInstance), cec, connectionBuilder.getUri().getQuery(), encodingFactory.createEncoding(cec), configured, clientNegotiation.getSelectedSubProtocol(), extensions, connectionBuilder);
instance.onOpen(undertowSession, cec);
channel.resumeReceives();
return undertowSession;
}
@Override
public Session connectToServer(final Class<? extends Endpoint> endpointClass, final ClientEndpointConfig cec, final URI path) throws DeploymentException, IOException {
if(closed) {
throw new ClosedChannelException();
}
try {
Endpoint endpoint = classIntrospecter.createInstanceFactory(endpointClass).createInstance().getInstance();
return connectToServer(endpoint, cec, path);
} catch (InstantiationException | NoSuchMethodException e) {
throw new RuntimeException(e);
}
}
public void doUpgrade(HttpServletRequest request,
HttpServletResponse response, final ServerEndpointConfig sec,
Map<String,String> pathParams)
throws ServletException, IOException {
ServerEndpointConfig.Configurator configurator = sec.getConfigurator();
try {
EncodingFactory encodingFactory = EncodingFactory.createFactory(classIntrospecter, sec.getDecoders(), sec.getEncoders());
PathTemplate pt = PathTemplate.create(sec.getPath());
InstanceFactory<?> instanceFactory = null;
try {
instanceFactory = classIntrospecter.createInstanceFactory(sec.getEndpointClass());
} catch (Exception e) {
//so it is possible that this is still valid if a custom configurator is in use
if (configurator == null || configurator.getClass() == ServerEndpointConfig.Configurator.class) {
throw JsrWebSocketMessages.MESSAGES.couldNotDeploy(e);
} else {
instanceFactory = new InstanceFactory<Object>() {
@Override
public InstanceHandle<Object> createInstance() throws InstantiationException {
throw JsrWebSocketMessages.MESSAGES.endpointDoesNotHaveAppropriateConstructor(sec.getEndpointClass());
}
};
}
}
if (configurator == null) {
configurator = DefaultContainerConfigurator.INSTANCE;
}
ServerEndpointConfig config = ServerEndpointConfig.Builder.create(sec.getEndpointClass(), sec.getPath())
.decoders(sec.getDecoders())
.encoders(sec.getEncoders())
.subprotocols(sec.getSubprotocols())
.extensions(sec.getExtensions())
.configurator(configurator)
.build();
AnnotatedEndpointFactory annotatedEndpointFactory = null;
if(!Endpoint.class.isAssignableFrom(sec.getEndpointClass())) {
annotatedEndpointFactory = AnnotatedEndpointFactory.create(sec.getEndpointClass(), encodingFactory, pt.getParameterNames());
}
ConfiguredServerEndpoint confguredServerEndpoint;
if(annotatedEndpointFactory == null) {
confguredServerEndpoint = new ConfiguredServerEndpoint(config, instanceFactory, null, encodingFactory);
} else {
confguredServerEndpoint = new ConfiguredServerEndpoint(config, instanceFactory, null, encodingFactory, annotatedEndpointFactory, installedExtensions);
}
WebSocketHandshakeHolder hand;
WebSocketDeploymentInfo info = (WebSocketDeploymentInfo)request.getServletContext().getAttribute(WebSocketDeploymentInfo.ATTRIBUTE_NAME);
if (info == null || info.getExtensions() == null) {
hand = ServerWebSocketContainer.handshakes(confguredServerEndpoint);
} else {
hand = ServerWebSocketContainer.handshakes(confguredServerEndpoint, info.getExtensions());
}
final ServletWebSocketHttpExchange facade = new ServletWebSocketHttpExchange(request, response, new HashSet<WebSocketChannel>());
Handshake handshaker = null;
for (Handshake method : hand.handshakes) {
if (method.matches(facade)) {
handshaker = method;
break;
}
}
if (handshaker != null) {
if(isClosed()) {
response.sendError(StatusCodes.SERVICE_UNAVAILABLE);
return;
}
facade.putAttachment(HandshakeUtil.PATH_PARAMS, pathParams);
final Handshake selected = handshaker;
facade.upgradeChannel(new HttpUpgradeListener() {
@Override
public void handleUpgrade(StreamConnection streamConnection, HttpServerExchange exchange) {
WebSocketChannel channel = selected.createChannel(facade, streamConnection, facade.getBufferPool());
new EndpointSessionHandler(ServerWebSocketContainer.this).onConnect(facade, channel);
}
});
handshaker.handshake(facade);
return;
}
} catch (Exception e) {
throw new ServletException(e);
}
}
private Session connectToServerInternal(final Endpoint endpointInstance, XnioSsl ssl, final ConfiguredClientEndpoint cec, final URI path) throws DeploymentException, IOException {
//in theory we should not be able to connect until the deployment is complete, but the definition of when a deployment is complete is a bit nebulous.
WebSocketClientNegotiation clientNegotiation = new ClientNegotiation(cec.getConfig().getPreferredSubprotocols(), toExtensionList(cec.getConfig().getExtensions()), cec.getConfig());
WebSocketClient.ConnectionBuilder connectionBuilder = WebSocketClient.connectionBuilder(xnioWorker.get(), bufferPool, path)
.setSsl(ssl)
.setBindAddress(clientBindAddress)
.setClientNegotiation(clientNegotiation);
return connectToServerInternal(endpointInstance, cec, connectionBuilder);
}
private Session connectToServerInternal(final Endpoint endpointInstance, final ConfiguredClientEndpoint cec, WebSocketClient.ConnectionBuilder connectionBuilder) throws DeploymentException, IOException {
IoFuture<WebSocketChannel> session = connectionBuilder
.connect();
Number timeout = (Number) cec.getConfig().getUserProperties().get(TIMEOUT);
IoFuture.Status result = session.await(timeout == null ? DEFAULT_WEB_SOCKET_TIMEOUT_SECONDS : timeout.intValue(), TimeUnit.SECONDS);
if(result == IoFuture.Status.WAITING) {
//add a notifier to close the channel if the connection actually completes
session.cancel();
session.addNotifier(new IoFuture.HandlingNotifier<WebSocketChannel, Object>() {
@Override
public void handleDone(WebSocketChannel data, Object attachment) {
IoUtils.safeClose(data);
}
}, null);
throw JsrWebSocketMessages.MESSAGES.connectionTimedOut();
}
WebSocketChannel channel;
try {
channel = session.get();
} catch (UpgradeFailedException e) {
throw new DeploymentException(e.getMessage(), e);
}
EndpointSessionHandler sessionHandler = new EndpointSessionHandler(this);
final List<Extension> extensions = new ArrayList<>();
final Map<String, Extension> extMap = new HashMap<>();
for (Extension ext : cec.getConfig().getExtensions()) {
extMap.put(ext.getName(), ext);
}
String subProtocol = null;
if(connectionBuilder.getClientNegotiation() != null) {
for (WebSocketExtension e : connectionBuilder.getClientNegotiation().getSelectedExtensions()) {
Extension ext = extMap.get(e.getName());
if (ext == null) {
throw JsrWebSocketMessages.MESSAGES.extensionWasNotPresentInClientHandshake(e.getName(), connectionBuilder.getClientNegotiation().getSupportedExtensions());
}
extensions.add(ExtensionImpl.create(e));
}
subProtocol = connectionBuilder.getClientNegotiation().getSelectedSubProtocol();
}
UndertowSession undertowSession = new UndertowSession(channel, connectionBuilder.getUri(), Collections.<String, String>emptyMap(), Collections.<String, List<String>>emptyMap(), sessionHandler, null, new ImmediateInstanceHandle<>(endpointInstance), cec.getConfig(), connectionBuilder.getUri().getQuery(), cec.getEncodingFactory().createEncoding(cec.getConfig()), cec, subProtocol, extensions, connectionBuilder);
endpointInstance.onOpen(undertowSession, cec.getConfig());
channel.resumeReceives();
return undertowSession;
}
@Override
public long getDefaultMaxSessionIdleTimeout() {
return defaultMaxSessionIdleTimeout;
}
@Override
public void setDefaultMaxSessionIdleTimeout(final long timeout) {
this.defaultMaxSessionIdleTimeout = timeout;
}
@Override
public int getDefaultMaxBinaryMessageBufferSize() {
return defaultMaxBinaryMessageBufferSize;
}
@Override
public void setDefaultMaxBinaryMessageBufferSize(int defaultMaxBinaryMessageBufferSize) {
this.defaultMaxBinaryMessageBufferSize = defaultMaxBinaryMessageBufferSize;
}
@Override
public int getDefaultMaxTextMessageBufferSize() {
return defaultMaxTextMessageBufferSize;
}
@Override
public void setDefaultMaxTextMessageBufferSize(int defaultMaxTextMessageBufferSize) {
this.defaultMaxTextMessageBufferSize = defaultMaxTextMessageBufferSize;
}
@Override
public Set<Extension> getInstalledExtensions() {
return new HashSet<>(installedExtensions);
}
/**
* Runs a web socket invocation, setting up the threads and dispatching a thread pool
* <p>
* Unfortunately we need to dispatch to a thread pool, because there is a good chance that the endpoint
* will use blocking IO methods. We suspend recieves while this is in progress, to make sure that we do not have multiple
* methods invoked at once.
* <p>
*
* @param invocation The task to run
*/
public void invokeEndpointMethod(final Executor executor, final Runnable invocation) {
if (dispatchToWorker) {
executor.execute(new Runnable() {
@Override
public void run() {
invokeEndpointMethod(invocation);
}
});
} else {
invokeEndpointMethod(invocation);
}
}
/**
* Directly invokes an endpoint method, without dispatching to an executor
* @param invocation The invocation
*/
public void invokeEndpointMethod(final Runnable invocation) {
try {
invokeEndpointTask.call(null, invocation);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
public void addEndpoint(final Class<?> endpoint) throws DeploymentException {
if (deploymentComplete) {
throw JsrWebSocketMessages.MESSAGES.cannotAddEndpointAfterDeployment();
}
//work around a TCK7 problem
//if the class has already been added we just ignore it
if(annotatedEndpointClasses.contains(endpoint)) {
return;
}
annotatedEndpointClasses.add(endpoint);
try {
addEndpointInternal(endpoint, true);
} catch (DeploymentException e) {
deploymentExceptions.add(e);
throw e;
}
}
private synchronized void addEndpointInternal(final Class<?> endpoint, boolean requiresCreation) throws DeploymentException {
ServerEndpoint serverEndpoint = endpoint.getAnnotation(ServerEndpoint.class);
ClientEndpoint clientEndpoint = endpoint.getAnnotation(ClientEndpoint.class);
if (serverEndpoint != null) {
JsrWebSocketLogger.ROOT_LOGGER.addingAnnotatedServerEndpoint(endpoint, serverEndpoint.value());
final PathTemplate template = PathTemplate.create(serverEndpoint.value());
if (seenPaths.contains(template)) {
PathTemplate existing = null;
for (PathTemplate p : seenPaths) {
if (p.compareTo(template) == 0) {
existing = p;
break;
}
}
throw JsrWebSocketMessages.MESSAGES.multipleEndpointsWithOverlappingPaths(template, existing);
}
seenPaths.add(template);
Class<? extends ServerEndpointConfig.Configurator> configuratorClass = serverEndpoint.configurator();
EncodingFactory encodingFactory = EncodingFactory.createFactory(classIntrospecter, serverEndpoint.decoders(), serverEndpoint.encoders());
AnnotatedEndpointFactory annotatedEndpointFactory = AnnotatedEndpointFactory.create(endpoint, encodingFactory, template.getParameterNames());
InstanceFactory<?> instanceFactory = null;
try {
instanceFactory = classIntrospecter.createInstanceFactory(endpoint);
} catch (Exception e) {
//so it is possible that this is still valid if a custom configurator is in use
if(configuratorClass == ServerEndpointConfig.Configurator.class) {
throw JsrWebSocketMessages.MESSAGES.couldNotDeploy(e);
} else {
instanceFactory = new InstanceFactory<Object>() {
@Override
public InstanceHandle<Object> createInstance() throws InstantiationException {
throw JsrWebSocketMessages.MESSAGES.endpointDoesNotHaveAppropriateConstructor(endpoint);
}
};
}
}
ServerEndpointConfig.Configurator configurator;
if (configuratorClass != ServerEndpointConfig.Configurator.class) {
try {
configurator = classIntrospecter.createInstanceFactory(configuratorClass).createInstance().getInstance();
} catch (InstantiationException | NoSuchMethodException e) {
throw JsrWebSocketMessages.MESSAGES.couldNotDeploy(e);
}
} else {
configurator = DefaultContainerConfigurator.INSTANCE;
}
ServerEndpointConfig config = ServerEndpointConfig.Builder.create(endpoint, serverEndpoint.value())
.decoders(Arrays.asList(serverEndpoint.decoders()))
.encoders(Arrays.asList(serverEndpoint.encoders()))
.subprotocols(Arrays.asList(serverEndpoint.subprotocols()))
.extensions(Collections.<Extension>emptyList())
.configurator(configurator)
.build();
ConfiguredServerEndpoint confguredServerEndpoint = new ConfiguredServerEndpoint(config, instanceFactory, template, encodingFactory, annotatedEndpointFactory, installedExtensions);
configuredServerEndpoints.add(confguredServerEndpoint);
handleAddingFilterMapping();
} else if (clientEndpoint != null) {
JsrWebSocketLogger.ROOT_LOGGER.addingAnnotatedClientEndpoint(endpoint);
EncodingFactory encodingFactory = EncodingFactory.createFactory(classIntrospecter, clientEndpoint.decoders(), clientEndpoint.encoders());
InstanceFactory<?> instanceFactory;
try {
instanceFactory = classIntrospecter.createInstanceFactory(endpoint);
} catch (Exception e) {
try {
instanceFactory = new ConstructorInstanceFactory<>(endpoint.getConstructor()); //this endpoint cannot be created by the container, the user will instantiate it
} catch (NoSuchMethodException e1) {
if(requiresCreation) {
throw JsrWebSocketMessages.MESSAGES.couldNotDeploy(e);
} else {
instanceFactory = new InstanceFactory<Object>() {
@Override
public InstanceHandle<Object> createInstance() throws InstantiationException {
throw new InstantiationException();
}
};
}
}
}
AnnotatedEndpointFactory factory = AnnotatedEndpointFactory.create(endpoint, encodingFactory, Collections.<String>emptySet());
ClientEndpointConfig.Configurator configurator = null;
try {
configurator = classIntrospecter.createInstanceFactory(clientEndpoint.configurator()).createInstance().getInstance();
} catch (InstantiationException | NoSuchMethodException e) {
throw JsrWebSocketMessages.MESSAGES.couldNotDeploy(e);
}
ClientEndpointConfig config = ClientEndpointConfig.Builder.create()
.decoders(Arrays.asList(clientEndpoint.decoders()))
.encoders(Arrays.asList(clientEndpoint.encoders()))
.preferredSubprotocols(Arrays.asList(clientEndpoint.subprotocols()))
.configurator(configurator)
.build();
ConfiguredClientEndpoint configuredClientEndpoint = new ConfiguredClientEndpoint(config, factory, encodingFactory, instanceFactory);
clientEndpoints.put(endpoint, configuredClientEndpoint);
} else {
throw JsrWebSocketMessages.MESSAGES.classWasNotAnnotated(endpoint);
}
}
private void handleAddingFilterMapping() {
if (contextToAddFilter != null) {
contextToAddFilter.getDeployment().getDeploymentInfo().addFilterUrlMapping(Bootstrap.FILTER_NAME, "/*", DispatcherType.REQUEST);
contextToAddFilter.getDeployment().getServletPaths().invalidate();
contextToAddFilter = null;
}
}
@Override
public void addEndpoint(final ServerEndpointConfig endpoint) throws DeploymentException {
if (deploymentComplete) {
throw JsrWebSocketMessages.MESSAGES.cannotAddEndpointAfterDeployment();
}
JsrWebSocketLogger.ROOT_LOGGER.addingProgramaticEndpoint(endpoint.getEndpointClass(), endpoint.getPath());
final PathTemplate template = PathTemplate.create(endpoint.getPath());
if (seenPaths.contains(template)) {
PathTemplate existing = null;
for (PathTemplate p : seenPaths) {
if (p.compareTo(template) == 0) {
existing = p;
break;
}
}
throw JsrWebSocketMessages.MESSAGES.multipleEndpointsWithOverlappingPaths(template, existing);
}
seenPaths.add(template);
EncodingFactory encodingFactory = EncodingFactory.createFactory(classIntrospecter, endpoint.getDecoders(), endpoint.getEncoders());
AnnotatedEndpointFactory annotatedEndpointFactory = null;
if(!Endpoint.class.isAssignableFrom(endpoint.getEndpointClass())) {
// We may want to check that the path in @ServerEndpoint matches the specified path, and throw if they are not equivalent
annotatedEndpointFactory = AnnotatedEndpointFactory.create(endpoint.getEndpointClass(), encodingFactory, template.getParameterNames());
}
ConfiguredServerEndpoint confguredServerEndpoint = new ConfiguredServerEndpoint(endpoint, null, template, encodingFactory, annotatedEndpointFactory, endpoint.getExtensions());
configuredServerEndpoints.add(confguredServerEndpoint);
handleAddingFilterMapping();
}
private ConfiguredClientEndpoint getClientEndpoint(final Class<?> endpointType, boolean requiresCreation) {
Class<?> type = endpointType;
while (type != Object.class && type != null && !type.isAnnotationPresent(ClientEndpoint.class)) {
type = type.getSuperclass();
}
if(type == Object.class || type == null) {
return null;
}
ConfiguredClientEndpoint existing = clientEndpoints.get(type);
if (existing != null) {
return existing;
}
synchronized (this) {
existing = clientEndpoints.get(type);
if (existing != null) {
return existing;
}
if (type.isAnnotationPresent(ClientEndpoint.class)) {
try {
addEndpointInternal(type, requiresCreation);
return clientEndpoints.get(type);
} catch (DeploymentException e) {
throw new RuntimeException(e);
}
}
return null;
}
}
public void validateDeployment() {
if(!deploymentExceptions.isEmpty()) {
RuntimeException e = JsrWebSocketMessages.MESSAGES.deploymentFailedDueToProgramaticErrors();
for(DeploymentException ex : deploymentExceptions) {
e.addSuppressed(ex);
}
throw e;
}
}
public void deploymentComplete() {
deploymentComplete = true;
validateDeployment();
}
public List<ConfiguredServerEndpoint> getConfiguredServerEndpoints() {
return configuredServerEndpoints;
}
public ServletContextImpl getContextToAddFilter() {
return contextToAddFilter;
}
public void setContextToAddFilter(ServletContextImpl contextToAddFilter) {
this.contextToAddFilter = contextToAddFilter;
}
public synchronized void close(int waitTime) {
doClose();
//wait for them to close
long end = currentTimeMillis() + waitTime;
for (ConfiguredServerEndpoint endpoint : configuredServerEndpoints) {
endpoint.awaitClose(end - System.currentTimeMillis());
}
}
@Override
public synchronized void close() {
close(10000);
}
public ByteBufferPool getBufferPool() {
return bufferPool;
}
public XnioWorker getXnioWorker() {
return xnioWorker.get();
}
private static List<WebSocketExtension> toExtensionList(final List<Extension> extensions) {
List<WebSocketExtension> ret = new ArrayList<>();
for (Extension e : extensions) {
final List<WebSocketExtension.Parameter> parameters = new ArrayList<>();
for (Extension.Parameter p : e.getParameters()) {
parameters.add(new WebSocketExtension.Parameter(p.getName(), p.getValue()));
}
ret.add(new WebSocketExtension(e.getName(), parameters));
}
return ret;
}
private static class ClientNegotiation extends WebSocketClientNegotiation {
private final ClientEndpointConfig config;
ClientNegotiation(List<String> supportedSubProtocols, List<WebSocketExtension> supportedExtensions, ClientEndpointConfig config) {
super(supportedSubProtocols, supportedExtensions);
this.config = config;
}
@Override
public void afterRequest(final Map<String, List<String>> headers) {
ClientEndpointConfig.Configurator configurator = config.getConfigurator();
if (configurator != null) {
final Map<String, List<String>> newHeaders = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
for (Map.Entry<String, List<String>> entry : headers.entrySet()) {
ArrayList<String> arrayList = new ArrayList<>();
arrayList.addAll(entry.getValue());
newHeaders.put(entry.getKey(), arrayList);
}
configurator.afterResponse(new HandshakeResponse() {
@Override
public Map<String, List<String>> getHeaders() {
return newHeaders;
}
});
}
}
@Override
public void beforeRequest(Map<String, List<String>> headers) {
ClientEndpointConfig.Configurator configurator = config.getConfigurator();
if (configurator != null) {
final Map<String, List<String>> newHeaders = new HashMap<>();
for (Map.Entry<String, List<String>> entry : headers.entrySet()) {
ArrayList<String> arrayList = new ArrayList<>();
arrayList.addAll(entry.getValue());
newHeaders.put(entry.getKey(), arrayList);
}
configurator.beforeRequest(newHeaders);
headers.clear(); //TODO: more efficient way
for (Map.Entry<String, List<String>> entry : newHeaders.entrySet()) {
if (!entry.getValue().isEmpty()) {
headers.put(entry.getKey(), entry.getValue());
}
}
}
}
}
/**
* Pauses the container
* @param listener
*/
public synchronized void pause(PauseListener listener) {
closed = true;
if(configuredServerEndpoints.isEmpty()) {
listener.paused();
return;
}
if(listener != null) {
pauseListeners.add(listener);
}
for (ConfiguredServerEndpoint endpoint : configuredServerEndpoints) {
for (final Session session : endpoint.getOpenSessions()) {
((UndertowSession)session).getExecutor().execute(new Runnable() {
@Override
public void run() {
try {
session.close(new CloseReason(CloseReason.CloseCodes.GOING_AWAY, ""));
} catch (Exception e) {
JsrWebSocketLogger.ROOT_LOGGER.couldNotCloseOnUndeploy(e);
}
}
});
}
}
Runnable done = new Runnable() {
int count = configuredServerEndpoints.size();
@Override
public synchronized void run() {
List<PauseListener> copy = null;
synchronized (ServerWebSocketContainer.this) {
count--;
if (count == 0) {
copy = new ArrayList<>(pauseListeners);
pauseListeners.clear();
}
}
if(copy != null) {
for (PauseListener p : copy) {
p.paused();
}
}
}
};
for (ConfiguredServerEndpoint endpoint : configuredServerEndpoints) {
endpoint.notifyClosed(done);
}
}
private void doClose() {
closed = true;
for (ConfiguredServerEndpoint endpoint : configuredServerEndpoints) {
for (Session session : endpoint.getOpenSessions()) {
try {
session.close(new CloseReason(CloseReason.CloseCodes.GOING_AWAY, ""));
} catch (Exception e) {
JsrWebSocketLogger.ROOT_LOGGER.couldNotCloseOnUndeploy(e);
}
}
}
}
static WebSocketHandshakeHolder handshakes(ConfiguredServerEndpoint config) {
List<Handshake> handshakes = new ArrayList<>();
handshakes.add(new JsrHybi13Handshake(config));
handshakes.add(new JsrHybi08Handshake(config));
handshakes.add(new JsrHybi07Handshake(config));
return new WebSocketHandshakeHolder(handshakes, config);
}
static WebSocketHandshakeHolder handshakes(ConfiguredServerEndpoint config, List<ExtensionHandshake> extensions) {
List<Handshake> handshakes = new ArrayList<>();
Handshake jsrHybi13Handshake = new JsrHybi13Handshake(config);
Handshake jsrHybi08Handshake = new JsrHybi08Handshake(config);
Handshake jsrHybi07Handshake = new JsrHybi07Handshake(config);
for (ExtensionHandshake extension : extensions) {
jsrHybi13Handshake.addExtension(extension);
jsrHybi08Handshake.addExtension(extension);
jsrHybi07Handshake.addExtension(extension);
}
handshakes.add(jsrHybi13Handshake);
handshakes.add(jsrHybi08Handshake);
handshakes.add(jsrHybi07Handshake);
return new WebSocketHandshakeHolder(handshakes, config);
}
static final class WebSocketHandshakeHolder {
final List<Handshake> handshakes;
final ConfiguredServerEndpoint endpoint;
private WebSocketHandshakeHolder(List<Handshake> handshakes, ConfiguredServerEndpoint endpoint) {
this.handshakes = handshakes;
this.endpoint = endpoint;
}
}
/**
* resumes a paused container
*/
public synchronized void resume() {
closed = false;
for(PauseListener p : pauseListeners) {
p.resumed();
}
pauseListeners.clear();
}
public WebSocketReconnectHandler getWebSocketReconnectHandler() {
return webSocketReconnectHandler;
}
public boolean isClosed() {
return closed;
}
public interface PauseListener {
void paused();
void resumed();
}
public boolean isDispatchToWorker() {
return dispatchToWorker;
}
} |
Java | public class CBRMode extends BaseMediaBitrateConfig {
/**
*
* @param bufSize
* @param bitrate 固定码率值
*/
public CBRMode(int bufSize, int bitrate){
if(bufSize<=0||bitrate<=0){
throw new IllegalArgumentException("bufSize or bitrate value error!");
}
this.bufSize=bufSize;
this.bitrate=bitrate;
this.mode= MODE.CBR;
}
} |
Java | public class AndQueryNode extends BooleanQueryNode {
/**
* @param clauses
* - the query nodes to be and'ed
*/
public AndQueryNode(List<QueryNode> clauses) {
super(clauses);
if ((clauses == null) || (clauses.size() == 0)) {
throw new IllegalArgumentException(
"AND query must have at least one clause");
}
}
@Override
public String toString() {
if (getChildren() == null || getChildren().size() == 0)
return "<boolean operation='and'/>";
StringBuilder sb = new StringBuilder();
sb.append("<boolean operation='and'>");
for (QueryNode child : getChildren()) {
sb.append("\n");
sb.append(child.toString());
}
sb.append("\n</boolean>");
return sb.toString();
}
@Override
public CharSequence toQueryString(EscapeQuerySyntax escapeSyntaxParser) {
if (getChildren() == null || getChildren().size() == 0)
return "";
StringBuilder sb = new StringBuilder();
String filler = "";
for (QueryNode child : getChildren()) {
sb.append(filler).append(child.toQueryString(escapeSyntaxParser));
filler = " AND ";
}
// in case is root or the parent is a group node avoid parenthesis
if ((getParent() != null && getParent() instanceof GroupQueryNode)
|| isRoot())
return sb.toString();
else
return "( " + sb.toString() + " )";
}
} |
Java | @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR1)
class ImageBlurOperator extends ImageProcessor<ImageData> {
ImageBlurOperator(String imageDataField) {
super(imageDataField);
}
@Override
protected ImageData processImage(UQI uqi, ImageData imageData) {
return imageData.getBlurred(uqi);
}
} |
Java | public class XSKDataStructureHDBTableModel extends XSKDataStructureModel {
private String tableType;
private String description;
private List<XSKDataStructureHDBTableColumnModel> columns = new ArrayList<XSKDataStructureHDBTableColumnModel>();
private XSKDataStructureHDBTableConstraintsModel constraints = new XSKDataStructureHDBTableConstraintsModel();
/**
* Getter for the columns.
*
* @return the columns
*/
public List<XSKDataStructureHDBTableColumnModel> getColumns() {
return columns;
}
/**
* Gets the constraints.
*
* @return the constraints
*/
public XSKDataStructureHDBTableConstraintsModel getConstraints() {
return constraints;
}
/**
* @return the tableType
*/
public String getTableType() {
return tableType;
}
/**
* @param tableType the tableType to set
*/
public void setTableType(String tableType) {
this.tableType = tableType;
}
/**
* @return the description
*/
public String getDescription() {
return description;
}
/**
* @param description the description to set
*/
public void setDescription(String description) {
this.description = description;
}
} |
Java | public class JsonSchemaReference implements JsonDocumentationOwner, JsonModelObject {
private JsonSchema schema;
private String schemaPath;
private JsonDocumentation schemaPathDocumentation;
private List<JsonContextualValue> schemaPathEquivalentItems = new ArrayList<>();
private List<JsonContextualValue> schemaPathExampleItems = new ArrayList<>();
/**
* Default constructor.
*/
public JsonSchemaReference() {}
/**
* Constructor that initializes this reference as a by-value schema.
*
* @param schemaDef the JSON schema definition to assign
*/
public JsonSchemaReference(JsonSchema schemaDef) {
this.schema = schemaDef;
}
/**
* Constructor that initializes this reference as a by-reference schema assignments.
*
* @param schemaPath the schema path to assign
*/
public JsonSchemaReference(String schemaPath) {
this.schemaPath = schemaPath;
}
/**
* Returns the schema definition for by-value schema assignments.
*
* @return JsonSchema
*/
public JsonSchema getSchema() {
return schema;
}
/**
* Assigns the schema definition for by-value schema assignments.
*
* @param schemaDef the JSON schema definition to assign
*/
public void setSchema(JsonSchema schemaDef) {
this.schema = schemaDef;
this.schemaPath = null;
}
/**
* Returns the schema path for by-reference schema assignments.
*
* @return String
*/
public String getSchemaPath() {
return schemaPath;
}
/**
* Assigns the schema path for by-reference schema assignments.
*
* @param schemaRef the schema path to assign
*/
public void setSchemaPath(String schemaRef) {
this.schemaPath = schemaRef;
this.schema = null;
}
/**
* Returns the documentation for the schema path. If this schema reference contains a by-value schema (not a path
* reference), this documentation item will be ignored during marshalling.
*
* @return JsonDocumentation
*/
public JsonDocumentation getDocumentation() {
return schemaPathDocumentation;
}
/**
* Assigns the documentation for the schema path. If this schema reference contains a by-value schema (not a path
* reference), this documentation item will be ignored during marshalling.
*
* @param schemaPathDocumentation the schema path documentation to assign
*/
public void setDocumentation(JsonDocumentation schemaPathDocumentation) {
this.schemaPathDocumentation = schemaPathDocumentation;
}
/**
* Returns the list of equivalent item definitions for the schema path. If this schema reference contains a by-value
* schema (not a path reference), these equivalent items will be ignored during marshalling.
*
* @return List<JsonContextualValue>
*/
public List<JsonContextualValue> getEquivalentItems() {
return schemaPathEquivalentItems;
}
/**
* Returns the list of EXAMPLE value definitions for the schema path. If this schema reference contains a by-value
* schema (not a path reference), these EXAMPLE items will be ignored during marshalling.
*
* @return List<JsonContextualValue>
*/
public List<JsonContextualValue> getExampleItems() {
return schemaPathExampleItems;
}
/**
* @see org.opentravel.schemacompiler.codegen.json.model.JsonModelObject#toJson()
*/
public JsonObject toJson() {
JsonObject schemaRef;
if (schema != null) {
schemaRef = schema.toJson();
} else {
schemaRef = new JsonObject();
JsonSchemaCodegenUtils.createOtmAnnotations( schemaRef, this );
schemaRef.addProperty( "$ref", schemaPath );
}
return schemaRef;
}
} |
Java | public class Action implements IChoice
{
private String label;
private IChoice[] iac;
XmlPullParser parser = null;
public Action(XmlPullParser parser) throws IOException, XmlPullParserException
{
this.parser = parser;
boolean done = false;
label = parser.getAttributeValue(null, Bulletml.ATTR_LABEL);
Vector<IChoice> choices = new Vector<IChoice>();
do
{
int type = parser.nextToken();
if (type == XmlPullParser.END_TAG)
{
String endTag = parser.getName();
if (endTag != null)
{
done = processEndTag(endTag, Bulletml.TAG_ACTION);
}
}
else
if (type == XmlPullParser.START_TAG)
{
String startTag = parser.getName();
if (startTag != null)
{
processStartTag(parser, startTag, choices);
}
}
else
if (type == XmlPullParser.END_DOCUMENT)
{
done = true;
}
}
while (!done);
int size = choices.size();
iac = new IChoice[size];
choices.copyInto(iac);
}
/**
* Process Start tag.
*
* @param tag
* @throws IOException
* @throws XmlPullParserException
*/
private void processStartTag(XmlPullParser parser, String tag, Vector<IChoice> choices) throws IOException,
XmlPullParserException
{
if (tag.equals(Bulletml.TAG_REPEAT))
{
choices.addElement(new Repeat(parser));
}
else
if (tag.equals(Bulletml.TAG_FIRE))
{
choices.addElement(new Fire(parser));
}
else
if (tag.equals(Bulletml.TAG_FIRE_REF))
{
choices.addElement(new FireRef(parser));
}
else
if (tag.equals(Bulletml.TAG_CHANGE_SPEED))
{
choices.addElement(new ChangeSpeed(parser));
}
else
if (tag.equals(Bulletml.TAG_CHANGE_DIRECTION))
{
choices.addElement(new ChangeDirection(parser));
}
else
if (tag.equals(Bulletml.TAG_ACCEL))
{
choices.addElement(new Accel(parser));
}
else
if (tag.equals(Bulletml.TAG_WAIT))
{
choices.addElement(new Wait(parser));
}
else
if (tag.equals(Bulletml.TAG_VANISH))
{
choices.addElement(new Vanish());
}
else
if (tag.equals(Bulletml.TAG_ACTION))
{
choices.addElement(new Action(parser));
}
else
if (tag.equals(Bulletml.TAG_ACTION_REF))
{
choices.addElement(new ActionRef(parser));
}
}
/**
* Process end tag.
*
* @param tag
*
* @return True indicates end processing.
*/
private boolean processEndTag(String tag, String endTag)
{
boolean status = false;
if (tag.equals(endTag))
{
status = true;
}
return status;
}
public final String getLabel()
{
return label;
}
public final IChoice[] getContent()
{
return iac;
}
} |
Java | @Service
public class CarDrivingService {
private static final Logger LOGGER = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private final Car car;
private final Subscriber<CarStatus> carStatusSubscriber;
private BaseSubscriber<DriveCommand> driver;
private Subscription activeSubscription;
/**
* Creates and initializes the Car driving service.
* @param car the car this car driving service controls.
* @param carStatusSubscriber the subscriber to which new car status will need to be send.
*/
public CarDrivingService(final Car car, final Subscriber<CarStatus> carStatusSubscriber) {
LOGGER.info("Creating and initializing Car Driving Service...");
this.car = car;
this.carStatusSubscriber = carStatusSubscriber;
}
public CarStatus getCarStatus() {
return car.getStatus();
}
/**
* Puts a (new) driver behind the steering wheel of the car, one that will process the provided driver input. The new driver will now be
* in control of the car. The possible previous driver will be signaled that its services are no longer required (read: the subscription
* to the previous driver input will be disposed of).
*
* @param newDriverInput the driver input for the new driver.
*/
public void putBehindWheel(final Publisher<DriveCommand> newDriverInput) {
LOGGER.info("Putting a new driver ({}) behind the wheel of the car.", newDriverInput.getClass().getSimpleName());
car.stop();
publishCarStatus();
if (activeSubscription != null) {
activeSubscription.cancel();
}
driver = buildDriver();
newDriverInput.subscribe(driver);
}
private void processDriveCommand(final DriveCommand driveCommand) {
driveCommand.applyCommandOnCar(car);
publishCarStatus();
}
private BaseSubscriber<DriveCommand> buildDriver() {
return new BaseSubscriber<DriveCommand>() {
@Override
protected void hookOnSubscribe(final Subscription s) {
super.hookOnSubscribe(s);
activeSubscription = s;
}
@Override
protected void hookOnNext(final DriveCommand driveCommand) {
super.hookOnNext(driveCommand);
processDriveCommand(driveCommand);
}
@Override
protected void hookFinally(final SignalType type) {
super.hookFinally(type);
LOGGER.error("Car driver received termination event of type '{}', so stopping car immediately", type);
car.stop();
}
};
}
private void publishCarStatus() {
carStatusSubscriber.onNext(getCarStatus());
}
} |
Java | @CoreMethod(names = "execute", isModuleFunction = true, needsSelf = false, required = 1, argumentsAsArray = true)
public abstract static class ExecuteNode extends CoreMethodArrayArgumentsNode {
@Child private ForeignObjectAccessNode node;
public ExecuteNode(RubyContext context, SourceSection sourceSection) {
super(context, sourceSection);
}
@Specialization
public Object executeForeign(VirtualFrame frame, TruffleObject receiver, Object[] arguments) {
if (node == null) {
CompilerDirectives.transferToInterpreterAndInvalidate();
this.node = ForeignObjectAccessNode.getAccess(Execute.create(Receiver.create(), arguments.length));
}
return node.executeForeign(frame, receiver, arguments);
}
} |
Java | @RunWith(Parameterized.class)
public static class TestParDoPayloadTranslation {
public static TestPipeline p = TestPipeline.create().enableAbandonedNodeEnforcement(false);
private static PCollectionView<Long> singletonSideInput =
p.apply("GenerateSingleton", GenerateSequence.from(0L).to(1L)).apply(View.asSingleton());
private static PCollectionView<Map<Long, Iterable<String>>> multimapSideInput =
p.apply("CreateMultimap", Create.of(KV.of(1L, "foo"), KV.of(1L, "bar"), KV.of(2L, "spam")))
.setCoder(KvCoder.of(VarLongCoder.of(), StringUtf8Coder.of()))
.apply(View.asMultimap());
private static PCollection<KV<Long, String>> mainInput =
p.apply(
"CreateMainInput", Create.empty(KvCoder.of(VarLongCoder.of(), StringUtf8Coder.of())));
@Parameters(name = "{index}: {0}")
public static Iterable<ParDo.MultiOutput<?, ?>> data() {
return ImmutableList.of(
ParDo.of(new DropElementsFn()).withOutputTags(new TupleTag<>(), TupleTagList.empty()),
ParDo.of(new DropElementsFn())
.withOutputTags(new TupleTag<>(), TupleTagList.empty())
.withSideInputs(singletonSideInput, multimapSideInput),
ParDo.of(new DropElementsFn())
.withOutputTags(
new TupleTag<>(),
TupleTagList.of(new TupleTag<byte[]>() {}).and(new TupleTag<Integer>() {}))
.withSideInputs(singletonSideInput, multimapSideInput),
ParDo.of(new DropElementsFn())
.withOutputTags(
new TupleTag<>(),
TupleTagList.of(new TupleTag<byte[]>() {}).and(new TupleTag<Integer>() {})),
ParDo.of(new SplittableDropElementsFn())
.withOutputTags(new TupleTag<>(), TupleTagList.empty()),
ParDo.of(new StateTimerDropElementsFn())
.withOutputTags(new TupleTag<>(), TupleTagList.empty()));
}
@Parameter(0)
public ParDo.MultiOutput<KV<Long, String>, Void> parDo;
@Test
public void testToProto() throws Exception {
SdkComponents components = SdkComponents.create();
components.registerEnvironment(Environments.createDockerEnvironment("java"));
ParDoPayload payload =
ParDoTranslation.translateParDo(
parDo,
PCollection.createPrimitiveOutputInternal(
p,
WindowingStrategy.globalDefault(),
IsBounded.BOUNDED,
KvCoder.of(VarLongCoder.of(), StringUtf8Coder.of())),
DoFnSchemaInformation.create(),
p,
components);
assertThat(ParDoTranslation.getDoFn(payload), equalTo(parDo.getFn()));
assertThat(ParDoTranslation.getMainOutputTag(payload), equalTo(parDo.getMainOutputTag()));
for (PCollectionView<?> view : parDo.getSideInputs().values()) {
payload.getSideInputsOrThrow(view.getTagInternal().getId());
}
assertFalse(payload.getRequestsFinalization());
assertEquals(
parDo.getFn() instanceof StateTimerDropElementsFn,
components.requirements().contains(ParDoTranslation.REQUIRES_STATEFUL_PROCESSING_URN));
}
@Test
public void toTransformProto() throws Exception {
Map<TupleTag<?>, PCollection<?>> inputs = new HashMap<>();
inputs.put(new TupleTag<KV<Long, String>>("mainInputName") {}, mainInput);
inputs.putAll(PValues.fullyExpand(parDo.getAdditionalInputs()));
PCollectionTuple output = mainInput.apply(parDo);
SdkComponents sdkComponents = SdkComponents.create();
sdkComponents.registerEnvironment(Environments.createDockerEnvironment("java"));
// Encode
RunnerApi.PTransform protoTransform =
PTransformTranslation.toProto(
AppliedPTransform.<PCollection<KV<Long, String>>, PCollection<Void>, MultiOutput>of(
"foo", inputs, PValues.expandOutput(output), parDo, p),
sdkComponents);
RunnerApi.Components components = sdkComponents.toComponents();
RehydratedComponents rehydratedComponents = RehydratedComponents.forComponents(components);
// Decode
ParDoPayload parDoPayload = ParDoPayload.parseFrom(protoTransform.getSpec().getPayload());
for (PCollectionView<?> view : parDo.getSideInputs().values()) {
SideInput sideInput = parDoPayload.getSideInputsOrThrow(view.getTagInternal().getId());
PCollectionView<?> restoredView =
PCollectionViewTranslation.viewFromProto(
sideInput,
view.getTagInternal().getId(),
view.getPCollection(),
protoTransform,
rehydratedComponents);
assertThat(restoredView.getTagInternal(), equalTo(view.getTagInternal()));
assertThat(restoredView.getViewFn(), instanceOf(view.getViewFn().getClass()));
assertThat(
restoredView.getWindowMappingFn(), instanceOf(view.getWindowMappingFn().getClass()));
assertThat(
restoredView.getWindowingStrategyInternal(),
equalTo(view.getWindowingStrategyInternal().fixDefaults()));
assertThat(restoredView.getCoderInternal(), equalTo(view.getCoderInternal()));
}
String mainInputId = sdkComponents.registerPCollection(mainInput);
assertThat(
ParDoTranslation.getMainInput(protoTransform, components),
equalTo(components.getPcollectionsOrThrow(mainInputId)));
assertThat(ParDoTranslation.getMainInputName(protoTransform), equalTo("mainInputName"));
// Ensure the correct timer coder components are used from the main input PCollection's key
// and window coders.
for (RunnerApi.TimerFamilySpec timerFamilySpec :
parDoPayload.getTimerFamilySpecsMap().values()) {
Coder<?> timerCoder =
CoderTranslation.fromProto(
components.getCodersOrThrow(timerFamilySpec.getTimerFamilyCoderId()),
rehydratedComponents,
TranslationContext.DEFAULT);
assertEquals(
org.apache.beam.runners.core.construction.Timer.Coder.of(
VarLongCoder.of(), GlobalWindow.Coder.INSTANCE),
timerCoder);
}
}
} |
Java | @RunWith(Parameterized.class)
public static class TestStateAndTimerTranslation {
@Parameters(name = "{index}: {0}")
public static Iterable<StateSpec<?>> stateSpecs() {
return ImmutableList.of(
StateSpecs.value(VarIntCoder.of()),
StateSpecs.bag(VarIntCoder.of()),
StateSpecs.set(VarIntCoder.of()),
StateSpecs.map(StringUtf8Coder.of(), VarIntCoder.of()));
}
@Parameter public StateSpec<?> stateSpec;
@Test
public void testStateSpecToFromProto() throws Exception {
// Encode
SdkComponents sdkComponents = SdkComponents.create();
sdkComponents.registerEnvironment(Environments.createDockerEnvironment("java"));
RunnerApi.StateSpec stateSpecProto =
ParDoTranslation.translateStateSpec(stateSpec, sdkComponents);
// Decode
RehydratedComponents rehydratedComponents =
RehydratedComponents.forComponents(sdkComponents.toComponents());
StateSpec<?> deserializedStateSpec =
ParDoTranslation.fromProto(stateSpecProto, rehydratedComponents);
assertThat(stateSpec, equalTo(deserializedStateSpec));
}
} |
Java | public class SafeVectorHelpers {
public static Object getLast(Vector list) {
synchronized (list) {
int lastIndex = list.size() - 1;
return list.get(lastIndex);
}
}
public static void deleteLast(Vector list) {
synchronized (list) {
int lastIndex = list.size() - 1;
list.remove(lastIndex);
}
}
} |
Java | public class BeanRegistrationWriterOptions {
/**
* Constant for the default options.
*/
public static final BeanRegistrationWriterOptions DEFAULTS = builder().build();
private final BiFunction<String, BeanDefinition, BeanRegistrationWriter> writerFactory;
private BeanRegistrationWriterOptions(Builder builder) {
this.writerFactory = builder.writerFactory;
}
/**
* Return a {@link BeanRegistrationWriter} for the specified bean definition.
* @param beanName the name of the bean
* @param beanDefinition the definition of the bean
* @return a {@link BeanRegistrationWriter} for the specified bean definition, or
* {@code null} if none could be provided
*/
public BeanRegistrationWriter getWriterFor(String beanName, BeanDefinition beanDefinition) {
return (this.writerFactory != null) ? this.writerFactory.apply(beanName, beanDefinition) : null;
}
/**
* Create a new options {@link Builder}
* @return a builder with default settings
*/
public static Builder builder() {
return new Builder();
}
public static class Builder {
private Builder() {
}
private BiFunction<String, BeanDefinition, BeanRegistrationWriter> writerFactory;
public Builder withWriterFactory(BiFunction<String, BeanDefinition, BeanRegistrationWriter> writerFactory) {
this.writerFactory = writerFactory;
return this;
}
public BeanRegistrationWriterOptions build() {
return new BeanRegistrationWriterOptions(this);
}
}
} |
Java | public class PRBasicRemoveIndexDUnitTest extends PartitionedRegionDUnitTestCase
{
/**
* Constructor
* @param name
*/
public PRBasicRemoveIndexDUnitTest (String name) {
super(name);
}
PRQueryDUnitHelper PRQHelp = new PRQueryDUnitHelper("");
/**
* Name of the partitioned region for the test.
*/
final String name = "PartionedPortfolios";
final int start = 0;
final int end = 1003;
/**
* Reduncancy level for the pr.
*/
final int redundancy = 0;
/**
* Remove index test to remove all the indexes in a given partitioned region
*
* @throws Exception
* if the test fails
*/
public void testPRBasicIndexRemove() throws Exception
{
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
VM vm2 = host.getVM(2);
VM vm3 = host.getVM(3);
getLogWriter().info(
"PRBasicRemoveIndexDUnitTest.testPRBasicIndexCreate test now starts ....");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy));
vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy));
vm2.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy));
final PortfolioData[] portfolio = PRQHelp.createPortfolioData(start, end);
// Putting the data into the PR's created
vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(name, portfolio,
start, end));
// create all the indexes.
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(name,
"PrIndexOnPKID", "p.pkid",null, "p"));
vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(name,
"PrIndexOnStatus", "p.status",null, "p"));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(name,
"PrIndexOnId", "p.ID",null, "p"));
//remove indexes
vm1.invoke(PRQHelp.getCacheSerializableRunnableForRemoveIndex(name, false));
getLogWriter().info(
"PRBasicRemoveIndexDUnitTest.testPRBasicRemoveIndex test now ends sucessfully");
}
/**
* Test removing single index on a pr.
*/
public void testPRBasicRemoveParticularIndex() throws Exception {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
VM vm2 = host.getVM(2);
VM vm3 = host.getVM(3);
getLogWriter().info(
"PRBasicRemoveIndexDUnitTest.testPRBasicIndexCreate test now starts ....");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy));
vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy));
vm2.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy));
final PortfolioData[] portfolio = PRQHelp.createPortfolioData(start, end);
// Putting the data into the PR's created
vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(name, portfolio,
start, end));
vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(name,
"PrIndexOnPKID", "p.pkid",null, "p"));
vm2.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(name,
"PrIndexOnStatus", "p.status",null, "p"));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(name,
"PrIndexOnId", "p.ID",null, "p"));
// remove indexes
vm1.invoke(PRQHelp.getCacheSerializableRunnableForRemoveIndex(name, true));
}
} |
Java | public class Remark {
private final Duration time;
private final String message;
/**
* Constructs a {@code Remark} with the given message at given time.
*
* @param time The time when the remark was made.
* @param message The remark message.
*/
public Remark(Duration time, String message) {
this.time = time;
this.message = message;
}
/**
* Retrieves the time of the interview when this {@code Remark}
* was created.
*
* @return The Duration since interview start when this {@code Remark} was created.
*/
public Duration getTime() {
return time;
}
/**
* Formats the time in a readable format.
*
* @return the formatted time string in minutes and seconds.
*/
public String getTimeString() {
return String.format("%d:%02d", time.toMinutes(), time.toSecondsPart());
}
public String getMessage() {
return message;
}
@Override
public String toString() {
return this.message;
}
@Override
public boolean equals(Object other) {
if (other == this) {
return true;
} else if (other instanceof Remark) {
Remark otherRemark = ((Remark) other);
return time.equals(otherRemark.getTime())
&& message.equals(otherRemark.getMessage());
} else {
return false;
}
}
} |
Java | public class OIndexRemoteOneValue extends OIndexRemote<OIdentifiable> {
protected static final String QUERY_GET = "select rid from index:`%s` where key = ?";
public OIndexRemoteOneValue(final String iName, final String iWrappedType, final String algorithm, final ORID iRid,
final OIndexDefinition iIndexDefinition, final ODocument iConfiguration, final Set<String> clustersToIndex, String database) {
super(iName, iWrappedType, algorithm, iRid, iIndexDefinition, iConfiguration, clustersToIndex, database);
}
public OIdentifiable get(final Object iKey) {
try (final OResultSet result = getDatabase().indexQuery(getName(), String.format(QUERY_GET, name), iKey)) {
if (result != null && result.hasNext())
return ((OIdentifiable) result.next().getProperty("rid"));
return null;
}
}
public Iterator<Entry<Object, OIdentifiable>> iterator() {
try (final OResultSet result = getDatabase().indexQuery(getName(), String.format(QUERY_ENTRIES, name))) {
final Map<Object, OIdentifiable> map = result.stream()
.collect(Collectors.toMap((res) -> res.getProperty("key"), (res) -> res.getProperty("rid")));
return map.entrySet().iterator();
}
}
@Override
public boolean isUnique() {
return true;
}
@Override
public boolean supportsOrderedIterations() {
return false;
}
} |
Java | public class ClassDiagramSolutionTest {
File dotFile = new File("../dataFiles/3B_ClassDiagram/1- AllClasses"+File.separator+"classDiagram.dot");
/**
* If a CSVFile exists from a previous test, delete it.
*
* We do not clean this file up *after* running the test because you might want to inspect it /
* analyse it with Excel etc.
*
*/
@Before
public void cleanUpAndPrepare(){
if(dotFile.exists()){
dotFile.delete();
}
File outputDir = new File("outputs");
if(!outputDir.exists()){
outputDir.mkdir();
}
}
@Test
public void ClassDiagramTest() throws IOException {
ClassDiagramSolution testSubject = new ClassDiagramSolution("../assertj-core/target/classes/", false);
testSubject.writeDot(dotFile);
}
} |
Java | public class FileReader {
FileInputStream fs;
InputStreamReader isr;
BufferedReader br;
public String read(String filePath){
String result="";
File questionFile= new File(filePath);
if(questionFile==null){
throw new ResourceNotFoundException("There is NO Such File! Please, Check Your File Path");
}else{
try {
fs=new FileInputStream(questionFile);
isr=new InputStreamReader(fs,"utf-8");
br=new BufferedReader(isr);
String temp=br.readLine();
while(temp!=null){
result=result+temp+"\n";
temp=br.readLine();
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
finally {
try {
fs.close();
isr.close();
br.close();
} catch (IOException e) {
e.printStackTrace();
}
}
return result;
}
}
} |
Java | public class KeepScreenOnFragment extends Fragment {
@Override
public void onActivityCreated(@Nullable Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
getActivity().getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
@Override
public void onResume() {
super.onResume();
getActivity()
.registerReceiver(broadcastReceiver, new IntentFilter(Intent.ACTION_BATTERY_LOW));
}
@Override
public void onPause() {
super.onPause();
getActivity().unregisterReceiver(broadcastReceiver);
}
private final BroadcastReceiver broadcastReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
if (Intent.ACTION_BATTERY_LOW.equals(intent.getAction())) {
getActivity().getWindow()
.clearFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
}
};
} |
Java | public class CircularBuffer<E> {
private static final int MAX_ALLOWED_SIZE = 10000;
private final int size;
private List<E> bufferList;
private int startIndex;
private int endIndex;
/**
* Create a circular buffer with the given size.
*
* @param size - fixed size of the buffer
*/
public CircularBuffer(int size) {
if (size <= 0) {
throw new IllegalArgumentException(
"Requested size of circular buffer (" + size + ") is invalid");
}
if (size > MAX_ALLOWED_SIZE) {
throw new IllegalArgumentException(
"Requested size of circular buffer (" + size + ") is greater than the " +
"allowed max size " + MAX_ALLOWED_SIZE);
}
this.size = size;
this.bufferList = new ArrayList<E>(getSize());
this.startIndex = 0;
this.endIndex = -1;
}
/**
* Create a circular buffer with the maximum allowed size.
*/
public CircularBuffer() {
this(MAX_ALLOWED_SIZE);
}
/**
* Append elements while preserving the circular nature of the buffer.
*
* @param element - element to be appended
*/
public synchronized void append(E element) {
if (element == null) {
throw new IllegalArgumentException(
"Circular buffer doesn't support null values to be added to buffer");
}
if (startIndex == getSize() - 1) {
startIndex = 0;
} else if (endIndex == getSize() - 1) {
endIndex = -1;
startIndex = 1;
} else if (startIndex != 0) {
// start index is not in beginning of the buffer
startIndex++;
}
endIndex++;
if (getSize() == bufferList.size()) {
// if the buffer capacity has been reached, replace the existing elements,
// set method replaces the element in the given index
bufferList.set(endIndex, element);
} else {
// if the buffer capacity has not been reached add elements to the list,
// add method lets the array list grow, and appends elements to the end of list
bufferList.add(endIndex, element);
}
}
/**
* Retrieve the given amount of elements from the circular buffer. This is a forgiving
* operation, if the amount asked is greater than the size of the buffer it will return all the
* available elements.
*
* @param amount - no of elements to return
* @return - a list of elements
*/
public synchronized List<E> get(int amount) {
if (bufferList.isEmpty()) {
// if the container is empty return an empty list
return new ArrayList<E>();
}
if (amount <= 0) {
// if a negative amount is requested send an empty list
return new ArrayList<E>();
}
int amountOfElementsInContainer = bufferList.size();
int amountToRetrieve = amount;
List<E> result = new ArrayList<E>(amountOfElementsInContainer);
for (int i = startIndex; amountOfElementsInContainer > 0 && amountToRetrieve > 0;
i++, amountToRetrieve--, amountOfElementsInContainer--) {
// Use the size of the internal container to retrieve elements.
// Here starting from the start index we insert elements to the result.
// if the requested amount is added, we stop adding more elements to the result or
// if all the elements in the internal container is added, we stop adding more elements
// to the result.
result.add(bufferList.get(i % this.size));
}
return result;
}
/**
* This method is added for backward compatibility.
*
* @param amount - amount of elements to return from the buffer
* @return - an object array of amount number of elements in the buffer
*/
public synchronized Object[] getObjects(int amount) {
List<E> objectsList = get(amount);
return objectsList.toArray(new Object[objectsList.size()]);
}
/**
* Clear the circular buffer and reset the indices.
*/
public synchronized void clear() {
bufferList.clear();
startIndex = 0;
endIndex = -1;
}
public int getAmount() {
return bufferList.size();
}
/**
* Return the capacity of the circular buffer. This is set during buffer initialization.
*
* @return - capacity of the buffer
*/
public int getSize() {
return size;
}
public int getHead() {
return endIndex;
}
} |
Java | public abstract class DialogCentered extends DialogFragment {
FrameLayout container;
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setStyle(DialogFragment.STYLE_NO_FRAME, R.style.DialogCentered);
}
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle
savedInstanceState) {
container = new FrameLayout(inflater.getContext());
container.setBackgroundResource(R.drawable.light_center_dialog_background);
container.removeAllViews();
return inflater.inflate(contentView(), container, true);
}
abstract protected int contentView();
public void show(FragmentManager manager) {
FragmentTransaction tx = manager.beginTransaction();
super.show(tx, getClass().getSimpleName());
}
@NonNull
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
Dialog dialog = super.onCreateDialog(savedInstanceState);
dialog.setCanceledOnTouchOutside(isCancelable());
Window window = dialog.getWindow();
window.setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams
.FLAG_FORCE_NOT_FULLSCREEN);
window.setLayout(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT);
window.addFlags(WindowManager.LayoutParams.FLAG_DIM_BEHIND);
window.setWindowAnimations(windowAnimationStyle());
window.getAttributes().dimAmount = 0.5f;
return dialog;
}
int windowAnimationStyle() {
return R.style.DialogCentered_Animation;
}
} |
Java | @Provider
public class RestExceptionMapper implements ExceptionMapper<Exception> {
private static final int HTTP_STATUS_I_AM_A_TEAPOT = 418;
@Inject
@ThisLogger
private AppLogger log;
@Override
public Response toResponse(Exception e) {
Response result = null;
if (e instanceof BaseException) {
result = handleBaseException((BaseException) e);
} else if (e instanceof BaseExceptionWrapper) {
BaseExceptionWrapper<?> wrappedException = (BaseExceptionWrapper) e;
if (wrappedException.getException() != null) {
this.log.info("Wrapped exception. Trying to match the correct mapper...");
result = this.handleWrappedException(wrappedException.getException());
} else if (e.getCause() instanceof BaseException) {
this.log.info("Wrapped BaseException. Trying to match the correct mapper...");
result = this.handleWrappedException((BaseException) e.getCause());
} else {
this.log.error("Unknown error in cause: ", e);
this.log.writeLogToError();
}
} else {
this.log.error("Unknown error: ", e);
this.log.writeLogToError();
}
return result != null ? result : this.handleException(e);
}
private Response handleException(Exception e) {
TechnicalFault dto = new TechnicalFault();
addCommonInfo(dto, e, CoffeeFaultType.OPERATION_FAILED);
Response.Status statusCode = Response.Status.INTERNAL_SERVER_ERROR;
if (e instanceof InternalServerErrorException) {
statusCode = Response.Status.BAD_REQUEST;
}
Response.ResponseBuilder responseBuilder = Response.status(statusCode);
return responseBuilder.entity(dto).build();
}
private Response handleWrappedException(BaseException exception) {
if (exception == null) {
this.log.warn("Failed to map the wrapped exception. Wrapper exception don't have content.");
return null;
} else {
return handleBaseException(exception);
}
}
private Response handleBaseException(BaseException e) {
log.error("Known error: ", e);
log.writeLogToError();
if (e instanceof BONotFoundException) {
BONotFound dto = new BONotFound();
addCommonInfo(dto, e);
return Response.status(HTTP_STATUS_I_AM_A_TEAPOT).entity(dto).build();
} else if (e instanceof InvalidRequestException) {
InvalidRequestException ire = (InvalidRequestException) e;
InvalidRequestFault dto = new InvalidRequestFault();
addCommonInfo(dto, e);
addValidationErrors(dto, ire.getErrors());
return Response.status(Response.Status.BAD_REQUEST).entity(dto).build();
} else if (e instanceof AccessDeniedException) {
BusinessFault dto = new BusinessFault();
addCommonInfo(dto, e, e.getFaultTypeEnum());
return Response.status(Response.Status.UNAUTHORIZED).entity(dto).build();
} else {
// BaseException
TechnicalFault dto = new TechnicalFault();
addCommonInfo(dto, e);
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(dto).build();
}
}
private void addCommonInfo(BaseExceptionResultType dto, BaseException e) {
this.addCommonInfo(dto, e, e.getFaultTypeEnum());
}
private void addCommonInfo(BaseExceptionResultType dto, Exception e, Enum<?> faultType) {
if (e instanceof JAXBException) {
if (e != null) {
Throwable t = ((JAXBException) e).getLinkedException();
dto.setMessage(t != null ? t.getLocalizedMessage() : e.getLocalizedMessage());
}
} else {
dto.setMessage(e.getLocalizedMessage());
}
dto.setClassName(e.getClass().getName());
dto.setException(e.getLocalizedMessage());
dto.setFuncCode(FunctionCodeType.ERROR);
dto.setFaultType(faultType.name());
}
private void addValidationErrors(InvalidRequestFault dto, List<XMLValidationError> errors) {
if (errors != null) {
for (XMLValidationError error : errors) {
ValidationType valType = new ValidationType();
valType.setError(error.getError());
dto.getError().add(valType);
}
}
}
} |
Java | @Provider
@PreMatching
public class OptionsFilter implements ContainerRequestFilter {
/**
* Look for OPTIONS requests and abort the request with an OK response and the allowed methods and headers.
*
* @param requestContext
* the {@link ContainerRequestContext}
* @throws IOException
*/
@Override
public void filter(ContainerRequestContext requestContext) throws IOException {
if (requestContext.getMethod().equals("OPTIONS")) {
requestContext.abortWith(
Response.ok()
.header("Access-Control-Allow-Origin", "*")
.header("Access-Control-Allow-Methods", "GET, PUT, POST, OPTIONS, DELETE")
.header("Access-Control-Allow-Headers", "Content-Type, Authorization, Accept, " +
"Accept-Language, Authorization")
.build());
}
}
} |
Java | public class ValidatorParamsExt extends ValidatorParams
{
protected RevocationParametersExt revocationSettings;
/**
* Default constructor: proxies are allowed, no initial update listeners,
* default revocation settings.
*/
public ValidatorParamsExt()
{
this(new RevocationParametersExt(), ValidatorParams.DEFAULT_PROXY_SUPPORT,
new ArrayList<StoreUpdateListener>());
}
/**
* Allows for setting all parameters except the list of initial listeners
* @param revocationSettings desired revocation settings
* @param allowProxy whether to allow proxies
*/
public ValidatorParamsExt(RevocationParametersExt revocationSettings,
ProxySupport allowProxy)
{
this(revocationSettings, allowProxy, new ArrayList<StoreUpdateListener>());
}
/**
* Full version, allows for setting all parameters.
* @param revocationSettings desired revocation settings
* @param allowProxy whether to allow proxies
* @param initialListeners initial trust store update listeners
*/
public ValidatorParamsExt(RevocationParametersExt revocationSettings,
ProxySupport allowProxy,
Collection<? extends StoreUpdateListener> initialListeners)
{
super(revocationSettings, allowProxy, initialListeners);
setRevocationSettings(revocationSettings);
}
/**
* @return revocation checking settings
*/
@Override
public RevocationParametersExt getRevocationSettings()
{
return revocationSettings;
}
/**
* @param revocationSettings revocation checking settings
*/
public void setRevocationSettings(RevocationParametersExt revocationSettings)
{
this.revocationSettings = revocationSettings;
}
/**
* Do not use this method - it will always throw an exception. Use the one
* with extended parameters.
* @param revocationSettings revocation checking settings
*
*/
@Override
public void setRevocationSettings(RevocationParameters revocationSettings)
{
throw new IllegalArgumentException("This class can be configured " +
"only using " + RevocationParametersExt.class);
}
} |
Java | public class MainNode extends DSMainNode {
public MainNode() {
}
@Override
protected void declareDefaults() {
super.declareDefaults();
// Change the following URL to your README
declareDefault("Docs",
DSString.valueOf(Constants.WEATHER_DOC_URL))
.setTransient(true)
.setReadOnly(true);
declareDefault(Constants.CREATE_TRACKER, makeCreateTracerAction());
}
private DSAction makeCreateTracerAction() {
DSAction act = new DSAction() {
@Override
public ActionResult invoke(DSInfo info, ActionInvocation invocation) {
((MainNode) info.getParent()).addNewCity(invocation.getParameters());
return null;
}
};
act.addParameter(Constants.CITY, DSValueType.STRING, null);
act.addParameter(Constants.UNITS, DSValueType.ENUM, null).setEnumRange(Constants.UNITS_IMPERIAL, Constants.UNITS_METRIC);
return act;
}
private void addNewCity(DSMap parameters) {
String city = parameters.getString(Constants.CITY);
String units = parameters.getString(Constants.UNITS);
if (city != null) {
put(parameters.getString(Constants.CITY), new WeatherCityNode(city, units));
}
}
} |
Java | public class UnionGlob extends Glob {
private final Glob lval;
private final Glob rval;
UnionGlob(Glob lval, Glob rval) {
this.lval = Preconditions.checkNotNull(lval);
this.rval = Preconditions.checkNotNull(rval);
}
@Override
public PathMatcher relativeTo(Path base) {
PathMatcher leftMatcher = lval.relativeTo(base);
PathMatcher rightMatcher = rval.relativeTo(base);
return new PathMatcher() {
@Override
public boolean matches(Path path) {
return leftMatcher.matches(path) || rightMatcher.matches(path);
}
@Override
public String toString() {
return UnionGlob.this.toString();
}
};
}
@Override
public ImmutableSet<String> roots() {
return computeRootsFromIncludes(getIncludes());
}
@Override
protected Iterable<String> getIncludes() {
return Iterables.concat(lval.getIncludes(), rval.getIncludes());
}
@Override
public String toString() {
return lval + " + " + rval;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
UnionGlob unionGlob = (UnionGlob) o;
return Objects.equals(lval, unionGlob.lval)
&& Objects.equals(rval, unionGlob.rval);
}
@Override
public int hashCode() {
return Objects.hash(lval, rval);
}
} |
Java | @ApiModel(description = "A single entry credit block from the factom blockchain.")
@javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen", date = "2019-07-03T22:54:40.268681300Z[UTC]")
public class ECBlockLong {
public static final String SERIALIZED_NAME_HASH = "hash";
@SerializedName(SERIALIZED_NAME_HASH)
private String hash;
public static final String SERIALIZED_NAME_DBLOCK = "dblock";
@SerializedName(SERIALIZED_NAME_DBLOCK)
private ECBlockLongDblock dblock = null;
public static final String SERIALIZED_NAME_ENTRIES = "entries";
@SerializedName(SERIALIZED_NAME_ENTRIES)
private String entries;
public ECBlockLong hash(String hash) {
this.hash = hash;
return this;
}
/**
* The SHA256 Hash of this entry credit block.
* @return hash
**/
@ApiModelProperty(value = "The SHA256 Hash of this entry credit block.")
public String getHash() {
return hash;
}
public void setHash(String hash) {
this.hash = hash;
}
public ECBlockLong dblock(ECBlockLongDblock dblock) {
this.dblock = dblock;
return this;
}
/**
* Get dblock
* @return dblock
**/
@ApiModelProperty(value = "")
public ECBlockLongDblock getDblock() {
return dblock;
}
public void setDblock(ECBlockLongDblock dblock) {
this.dblock = dblock;
}
public ECBlockLong entries(String entries) {
this.entries = entries;
return this;
}
/**
* The entries contained in this admin block.
* @return entries
**/
@ApiModelProperty(value = "The entries contained in this admin block.")
public String getEntries() {
return entries;
}
public void setEntries(String entries) {
this.entries = entries;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ECBlockLong ecBlockLong = (ECBlockLong) o;
return Objects.equals(this.hash, ecBlockLong.hash) &&
Objects.equals(this.dblock, ecBlockLong.dblock) &&
Objects.equals(this.entries, ecBlockLong.entries);
}
@Override
public int hashCode() {
return Objects.hash(hash, dblock, entries);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class ECBlockLong {\n");
sb.append(" hash: ").append(toIndentedString(hash)).append("\n");
sb.append(" dblock: ").append(toIndentedString(dblock)).append("\n");
sb.append(" entries: ").append(toIndentedString(entries)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
} |
Java | @XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "wall", propOrder = {
"hasWallOpening"
})
public class Wall implements Cloneable
{
protected List<WallOpening> hasWallOpening;
@XmlAttribute(name = "id")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlID
@XmlSchemaType(name = "ID")
protected String id;
@XmlAttribute(name = "length")
protected String length;
@XmlAttribute(name = "height")
protected String height;
@XmlAttribute(name = "class")
@JsonProperty("class")
protected String clazz;
@XmlAttribute(name = "ref")
@XmlIDREF
@XmlSchemaType(name = "IDREF")
protected Object ref;
/**
* Creates a new {@code Wall} instance.
*
*/
public Wall() {
// CC-XJC Version 2.0.1 Build 2012-03-02T12:09:12+0000
super();
}
/**
* Creates a new {@code Wall} instance by deeply copying a given {@code Wall} instance.
*
*
* @param o
* The instance to copy.
* @throws NullPointerException
* if {@code o} is {@code null}.
*/
public Wall(final Wall o) {
// CC-XJC Version 2.0.1 Build 2012-03-02T12:09:12+0000
super();
if (o == null) {
throw new NullPointerException("Cannot create a copy of 'Wall' from 'null'.");
}
// 'HasWallOpening' collection.
if (o.hasWallOpening!= null) {
copyHasWallOpening(o.getHasWallOpening(), this.getHasWallOpening());
}
// CBuiltinLeafInfo: java.lang.String
this.id = ((o.id == null)?null:o.getId());
// CBuiltinLeafInfo: java.lang.String
this.length = ((o.length == null)?null:o.getLength());
// CBuiltinLeafInfo: java.lang.String
this.height = ((o.height == null)?null:o.getHeight());
// CBuiltinLeafInfo: java.lang.String
this.clazz = ((o.clazz == null)?null:o.getClazz());
// CBuiltinLeafInfo: java.lang.Object
this.ref = ((o.ref == null)?null:copyOf(o.getRef()));
}
/**
* Gets the value of the hasWallOpening property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the hasWallOpening property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getHasWallOpening().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link WallOpening }
*
*
*/
public List<WallOpening> getHasWallOpening() {
if (hasWallOpening == null) {
hasWallOpening = new ArrayList<WallOpening>();
}
return this.hasWallOpening;
}
/**
* Gets the value of the id property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getId() {
return id;
}
/**
* Sets the value of the id property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setId(String value) {
this.id = value;
}
/**
* Gets the value of the length property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getLength() {
return length;
}
/**
* Sets the value of the length property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setLength(String value) {
this.length = value;
}
/**
* Gets the value of the height property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getHeight() {
return height;
}
/**
* Sets the value of the height property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setHeight(String value) {
this.height = value;
}
/**
* Gets the value of the clazz property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getClazz() {
return clazz;
}
/**
* Sets the value of the clazz property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setClazz(String value) {
this.clazz = value;
}
/**
* Gets the value of the ref property.
*
* @return
* possible object is
* {@link Object }
*
*/
public Object getRef() {
return ref;
}
/**
* Sets the value of the ref property.
*
* @param value
* allowed object is
* {@link Object }
*
*/
public void setRef(Object value) {
this.ref = value;
}
/**
* Copies all values of property {@code HasWallOpening} deeply.
*
* @param source
* The source to copy from.
* @param target
* The target to copy {@code source} to.
* @throws NullPointerException
* if {@code target} is {@code null}.
*/
private static void copyHasWallOpening(final List<WallOpening> source, final List<WallOpening> target) {
// CC-XJC Version 2.0.1 Build 2012-03-02T12:09:12+0000
if ((source!= null)&&(!source.isEmpty())) {
for (final Iterator<?> it = source.iterator(); it.hasNext(); ) {
final Object next = it.next();
if (next instanceof WallOpening) {
// CClassInfo: it.polito.elite.dog.core.library.jaxb.WallOpening
target.add(((WallOpening) next).clone());
continue;
}
// Please report this at https://apps.sourceforge.net/mantisbt/ccxjc/
throw new AssertionError((("Unexpected instance '"+ next)+"' for property 'HasWallOpening' of class 'it.polito.elite.dog.core.library.jaxb.Wall'."));
}
}
}
/**
* Creates and returns a deep copy of a given object.
*
* @param o
* The instance to copy or {@code null}.
* @return A deep copy of {@code o} or {@code null} if {@code o} is
* {@code null}.
*/
@SuppressWarnings("rawtypes")
private static Object copyOf(final Object o) {
// CC-XJC Version 2.0.1 Build 2012-03-02T12:09:12+0000
try {
if (o!= null) {
if (o.getClass().isPrimitive()) {
return o;
}
if (o.getClass().isArray()) {
return copyOfArray(o);
}
// Immutable types.
if (o instanceof Boolean) {
return o;
}
if (o instanceof Byte) {
return o;
}
if (o instanceof Character) {
return o;
}
if (o instanceof Double) {
return o;
}
if (o instanceof Enum) {
return o;
}
if (o instanceof Float) {
return o;
}
if (o instanceof Integer) {
return o;
}
if (o instanceof Long) {
return o;
}
if (o instanceof Short) {
return o;
}
if (o instanceof String) {
return o;
}
if (o instanceof BigDecimal) {
return o;
}
if (o instanceof BigInteger) {
return o;
}
if (o instanceof UUID) {
return o;
}
if (o instanceof QName) {
return o;
}
if (o instanceof Duration) {
return o;
}
if (o instanceof Currency) {
return o;
}
// String based types.
if (o instanceof File) {
return new File(o.toString());
}
if (o instanceof URI) {
return new URI(o.toString());
}
if (o instanceof URL) {
return new URL(o.toString());
}
if (o instanceof MimeType) {
return new MimeType(o.toString());
}
// Cloneable types.
if (o instanceof XMLGregorianCalendar) {
return ((XMLGregorianCalendar) o).clone();
}
if (o instanceof Date) {
return ((Date) o).clone();
}
if (o instanceof Calendar) {
return ((Calendar) o).clone();
}
if (o instanceof TimeZone) {
return ((TimeZone) o).clone();
}
if (o instanceof Locale) {
return ((Locale) o).clone();
}
if (o instanceof Element) {
return ((Element)((Element) o).cloneNode(true));
}
if (o instanceof JAXBElement) {
return copyOf(((JAXBElement) o));
}
try {
return o.getClass().getMethod("clone", ((Class[]) null)).invoke(o, ((Object[]) null));
} catch (NoSuchMethodException e) {
if (o instanceof Serializable) {
return copyOf(((Serializable) o));
}
// Please report this at https://apps.sourceforge.net/mantisbt/ccxjc/
throw((AssertionError) new AssertionError((("Unexpected instance during copying object '"+ o)+"'.")).initCause(e));
} catch (IllegalAccessException e) {
// Please report this at https://apps.sourceforge.net/mantisbt/ccxjc/
throw((AssertionError) new AssertionError((("Unexpected instance during copying object '"+ o)+"'.")).initCause(e));
} catch (InvocationTargetException e) {
// Please report this at https://apps.sourceforge.net/mantisbt/ccxjc/
throw((AssertionError) new AssertionError((("Unexpected instance during copying object '"+ o)+"'.")).initCause(e));
} catch (SecurityException e) {
// Please report this at https://apps.sourceforge.net/mantisbt/ccxjc/
throw((AssertionError) new AssertionError((("Unexpected instance during copying object '"+ o)+"'.")).initCause(e));
} catch (IllegalArgumentException e) {
// Please report this at https://apps.sourceforge.net/mantisbt/ccxjc/
throw((AssertionError) new AssertionError((("Unexpected instance during copying object '"+ o)+"'.")).initCause(e));
} catch (ExceptionInInitializerError e) {
// Please report this at https://apps.sourceforge.net/mantisbt/ccxjc/
throw((AssertionError) new AssertionError((("Unexpected instance during copying object '"+ o)+"'.")).initCause(e));
}
}
return null;
} catch (MalformedURLException e) {
throw((AssertionError) new AssertionError((("Unexpected instance during copying object '"+ o)+"'.")).initCause(e));
} catch (MimeTypeParseException e) {
throw((AssertionError) new AssertionError((("Unexpected instance during copying object '"+ o)+"'.")).initCause(e));
} catch (URISyntaxException e) {
throw((AssertionError) new AssertionError((("Unexpected instance during copying object '"+ o)+"'.")).initCause(e));
}
}
/**
* Creates and returns a deep copy of a given array.
*
* @param array
* The array to copy or {@code null}.
* @return
* A deep copy of {@code array} or {@code null} if {@code array} is {@code null}.
*/
private static Object copyOfArray(final Object array) {
// CC-XJC Version 2.0.1 Build 2012-03-02T12:09:12+0000
if (array!= null) {
if (array.getClass() == boolean[].class) {
return ((((boolean[]) array) == null)?null:Arrays.copyOf(((boolean[]) array), ((boolean[]) array).length));
}
if (array.getClass() == byte[].class) {
return ((((byte[]) array) == null)?null:Arrays.copyOf(((byte[]) array), ((byte[]) array).length));
}
if (array.getClass() == char[].class) {
return ((((char[]) array) == null)?null:Arrays.copyOf(((char[]) array), ((char[]) array).length));
}
if (array.getClass() == double[].class) {
return ((((double[]) array) == null)?null:Arrays.copyOf(((double[]) array), ((double[]) array).length));
}
if (array.getClass() == float[].class) {
return ((((float[]) array) == null)?null:Arrays.copyOf(((float[]) array), ((float[]) array).length));
}
if (array.getClass() == int[].class) {
return ((((int[]) array) == null)?null:Arrays.copyOf(((int[]) array), ((int[]) array).length));
}
if (array.getClass() == long[].class) {
return ((((long[]) array) == null)?null:Arrays.copyOf(((long[]) array), ((long[]) array).length));
}
if (array.getClass() == short[].class) {
return ((((short[]) array) == null)?null:Arrays.copyOf(((short[]) array), ((short[]) array).length));
}
final int len = Array.getLength(array);
final Object copy = Array.newInstance(array.getClass().getComponentType(), len);
for (int i = (len- 1); (i >= 0); i--) {
Array.set(copy, i, copyOf(Array.get(array, i)));
}
return copy;
}
return null;
}
/**
* Creates and returns a deep copy of a given {@code JAXBElement} instance.
*
* @param element
* The instance to copy or {@code null}.
* @return
* A deep copy of {@code element} or {@code null} if {@code element} is {@code null}.
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
private static JAXBElement copyOf(final JAXBElement element) {
// CC-XJC Version 2.0.1 Build 2012-03-02T12:09:12+0000
if (element!= null) {
final JAXBElement copy = new JAXBElement(element.getName(), element.getDeclaredType(), element.getScope(), element.getValue());
copy.setNil(element.isNil());
copy.setValue(copyOf(copy.getValue()));
return copy;
}
return null;
}
/**
* Creates and returns a deep copy of a given {@code Serializable}.
*
* @param serializable
* The instance to copy or {@code null}.
* @return
* A deep copy of {@code serializable} or {@code null} if {@code serializable} is {@code null}.
*/
private static Serializable copyOf(final Serializable serializable) {
// CC-XJC Version 2.0.1 Build 2012-03-02T12:09:12+0000
if (serializable!= null) {
try {
final ByteArrayOutputStream byteArrayOutput = new ByteArrayOutputStream();
final ObjectOutputStream out = new ObjectOutputStream(byteArrayOutput);
out.writeObject(serializable);
out.close();
final ByteArrayInputStream byteArrayInput = new ByteArrayInputStream(byteArrayOutput.toByteArray());
final ObjectInputStream in = new ObjectInputStream(byteArrayInput);
final Serializable copy = ((Serializable) in.readObject());
in.close();
return copy;
} catch (SecurityException e) {
throw((AssertionError) new AssertionError((("Unexpected instance during copying object '"+ serializable)+"'.")).initCause(e));
} catch (ClassNotFoundException e) {
throw((AssertionError) new AssertionError((("Unexpected instance during copying object '"+ serializable)+"'.")).initCause(e));
} catch (InvalidClassException e) {
throw((AssertionError) new AssertionError((("Unexpected instance during copying object '"+ serializable)+"'.")).initCause(e));
} catch (NotSerializableException e) {
throw((AssertionError) new AssertionError((("Unexpected instance during copying object '"+ serializable)+"'.")).initCause(e));
} catch (StreamCorruptedException e) {
throw((AssertionError) new AssertionError((("Unexpected instance during copying object '"+ serializable)+"'.")).initCause(e));
} catch (OptionalDataException e) {
throw((AssertionError) new AssertionError((("Unexpected instance during copying object '"+ serializable)+"'.")).initCause(e));
} catch (IOException e) {
throw((AssertionError) new AssertionError((("Unexpected instance during copying object '"+ serializable)+"'.")).initCause(e));
}
}
return null;
}
/**
* Creates and returns a deep copy of this object.
*
*
* @return
* A deep copy of this object.
*/
@Override
public Wall clone() {
try {
{
// CC-XJC Version 2.0.1 Build 2012-03-02T12:09:12+0000
final Wall clone = ((Wall) super.clone());
// 'HasWallOpening' collection.
if (this.hasWallOpening!= null) {
clone.hasWallOpening = null;
copyHasWallOpening(this.getHasWallOpening(), clone.getHasWallOpening());
}
// CBuiltinLeafInfo: java.lang.String
clone.id = ((this.id == null)?null:this.getId());
// CBuiltinLeafInfo: java.lang.String
clone.length = ((this.length == null)?null:this.getLength());
// CBuiltinLeafInfo: java.lang.String
clone.height = ((this.height == null)?null:this.getHeight());
// CBuiltinLeafInfo: java.lang.String
clone.clazz = ((this.clazz == null)?null:this.getClazz());
// CBuiltinLeafInfo: java.lang.Object
clone.ref = ((this.ref == null)?null:copyOf(this.getRef()));
return clone;
}
} catch (CloneNotSupportedException e) {
// Please report this at https://apps.sourceforge.net/mantisbt/ccxjc/
throw new AssertionError(e);
}
}
} |
Java | public class MinusAssignStatement extends AssignNodeImpl {
public MinusAssignStatement(@NonNull AssignableValue left, @NonNull RuntimeValue minusOp,
LineNumber line) throws Exception {
super(left, minusOp, line);
}
public MinusAssignStatement(@NonNull ExpressionContext f,
@NonNull AssignableValue left, RuntimeValue value,
LineNumber line) throws Exception {
super(f, left, OperatorTypes.MINUS, value, line);
}
@Override
public AssignNode compileTimeConstantTransform(CompileTimeContext c)
throws Exception {
return new MinusAssignStatement(mLeftNode, mOperator.compileTimeExpressionFold(c), mLine);
}
} |
Java | @Plugin(type = Ops.Geometric.VertexInterpolator.class)
public class DefaultVertexInterpolator extends AbstractVertexInterpolator
implements Ops.Geometric.VertexInterpolator
{
@Parameter(type = ItemIO.INPUT)
double isolevel;
@Override
public void run() {
output = new double[3];
if (Math.abs(isolevel - p1Value) < 0.00001) {
for (int i = 0; i < 3; i++) {
output[i] = p1[i];
}
} else if (Math.abs(isolevel - p2Value) < 0.00001) {
for (int i = 0; i < 3; i++) {
output[i] = p2[i];
}
} else if (Math.abs(p1Value - p2Value) < 0.00001) {
for (int i = 0; i < 3; i++) {
output[i] = p1[i];
}
} else {
double mu = (isolevel - p1Value) / (p2Value - p1Value);
output[0] = p1[0] + mu * (p2[0] - p1[0]);
output[1] = p1[1] + mu * (p2[1] - p1[1]);
output[2] = p1[2] + mu * (p2[2] - p1[2]);
}
}
@Override
public void setIsoLevel(double d) {
isolevel = d;
}
} |
Java | public abstract class Entity implements Serializable {
private static final long serialVersionUID = 8466257860808346236L;
private int id;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
} |
Java | public class ServiceRequestRepo {
DatabaseReference serviceRequestRef;
String path;
Map<String, ServiceRequest> serviceRequestMap = new HashMap<String, ServiceRequest>();
public ServiceRequestRepo(String path) {
serviceRequestRef = Db.coRef(path);
this.path = path;
init();
}
private void init() {
serviceRequestRef.addChildEventListener(new ChildEventListener() {
@Override
public void onChildAdded(DataSnapshot dataSnapshot, String prevChildKey) {
ServiceRequest serviceRequest = dataSnapshot.getValue(ServiceRequest.class);
serviceRequestMap.put(path+"/"+dataSnapshot.getKey(), serviceRequest);
// TODO send an email to admin?
System.err.println(serviceRequest);
}
@Override
public void onChildChanged(DataSnapshot dataSnapshot, String prevChildKey) {
ServiceRequest serviceRequest = dataSnapshot.getValue(ServiceRequest.class);
serviceRequestMap.put(path+"/"+dataSnapshot.getKey(), serviceRequest);
}
@Override
public void onChildRemoved(DataSnapshot dataSnapshot) {
serviceRequestMap.remove(path+"/"+dataSnapshot.getKey());
}
@Override
public void onChildMoved(DataSnapshot dataSnapshot, String prevChildKey) {
}
@Override
public void onCancelled(DatabaseError databaseError) {
throw new RuntimeException(databaseError.getMessage());
}
});
}
} |
Java | public class Light_Philips_Hue_GO2_Tool {
private static int myId=0;
String brokerIpAddress1 = "192.168.50.179";
public Light_Philips_Hue_GO2_Tool() {
this.myId= this.myId +1;
}
/**
*
* @param mySwitchState: "ON","OFF"
* @return
*/
/*
public int mySwitch(String mySwitchState) {
//String topic = "MQTT Examples";
String topic = "zigbee2mqtt/0x0017880109e5d363/set";
return this.publish(topic, mySwitchState);
}
*/
/**
*
* @param mySwitchToState: "ON","OFF" 要转换成为的状态
* @return -1 代表 有问题, 0 代表 任务完成失败, 因为 网络慢等其他小问题 需要重新再发送请求 1代表成功
*/
public EnumDeviceTrancLogicResult mySwitchTransactionLogic(String mySwitchToState, PhilipsHueGo2Entity philipsHueGo2Entity1) {
//
//
//
if(philipsHueGo2Entity1 == null) {
System.err.println("entity is null");
return EnumDeviceTrancLogicResult.DEVICE_NULL;
}
//
//--------------------------
// 判断当前状态
EnumDeviceTrancLogicResult trancLogicResultTemp = null;
//
// 然后取出 当前 main中的subscriber的状态
String switchStateTmp = philipsHueGo2Entity1.getState();
//
//---------------------------------------------------------------
//
//
//
//
// 如果是null, 就去发送信息, 等待下一次 调用这个方法
// 例如 第一次传感器 过来的时候, 我的switcher 可能初始值是null, 那么就不去发送信息
// 第二次你运动的时候, 再传来信息的时候, 不是switcher是null就可以了
// 因为 虽然我main 用了两个subscription, 但是callback中只要有一个sleep 那么整个callback都无法运行
// 也就是说 即使我callback 写了 switcher 的监听, 但整个callback 当前都在被sensor这个分支 用
// 所以switcher 状态是没有办法改变的
// 但是 不用担心, 如无意外, 也就是第一次可能会出现这样的状况而已,
// 因为每次进行开关以后, 我们这边是有 subscription
// 所以我们是 会收到 当前这个switcher的状态,
// 然后我们的这个entity就会把当前状态记录更新下来
// 每次getState也是从entity中的 attribute 来获得
if(switchStateTmp == null){
// 先放松一个 请求 去让broker通知 所有的subscriber, 包括 main中的subscriber
System.out.println("mySwitchTransaction"+"hue go 2 light null state, try to get hue go 2 light status");
//this.sendGetToNotifySubscriberToGetStatus();
this.sendGetToNotifySubscriberToGetStatus(philipsHueGo2Entity1);
//
//publishResultTemp = 0;
trancLogicResultTemp = EnumDeviceTrancLogicResult.DEVICE_NULL;
}
//if(switchStateTmp!=null &&(switchStateTmp.equals("ON")==true || switchStateTmp.equals("OFF")==true) ) {
else if(switchStateTmp!=null &&(switchStateTmp.equals("ON")==true || switchStateTmp.equals("OFF")==true) ) {
// 如果 当前状态 和 想要改变成的状态 一直, 则无需改变
if(switchStateTmp.equals(mySwitchToState)==true) {
System.err.println("mySwitchTransaction"+"hue go 2 light same state");
// do nothing
//publishResultTemp = 1;
trancLogicResultTemp = EnumDeviceTrancLogicResult.NoNeedToChange;
}
// 如果 当前状态 和 想要改变成的状态 一直, 则 需改变
else if(switchStateTmp.equals(mySwitchToState)==false) {
System.err.println("mySwitchTransaction"+"hue go 2 light different state, changing");
//publishResultTemp = this.mySwitch(mySwitchToState);
//
//
//publishResultTemp = publish(philipsHueGo2Entity1.getTopicUrl_set() , mySwitchToState);
//publishResultTemp = publish(brokerIpAddress1, philipsHueGo2Entity1.getTopicUrl_set() , mySwitchToState);
trancLogicResultTemp = EnumDeviceTrancLogicResult.NeedToChange;
}
}
else {
System.err.println(this.getClass().getName() +":mySwitchTransactionLogic"+" something wrong");
trancLogicResultTemp = EnumDeviceTrancLogicResult.SOMETHING_WRONG;
}
//return publishResultTemp;
return trancLogicResultTemp;
}
/**
*
* 0 是 默认值, -1 是 各种原因导致的失败,
* 1 是不需要去改变状态,因为当前的设备状态 已经是 要改变的状态
* 2 是改变状态成功
*
* @param mySwitchToState
* @param philipsHueGo2Entity1
* @return
*/
public int mySwitchTransaction(String mySwitchToState, PhilipsHueGo2Entity philipsHueGo2Entity1) {
//
//
int trancResultTemp = 0;
int publishResultTmp = 0;
EnumDeviceTrancLogicResult trancLogicResultTemp = mySwitchTransactionLogic(mySwitchToState, philipsHueGo2Entity1);
if(trancLogicResultTemp == null) {
System.out.println(this.getClass().getName().toString()+"/mySwitchTransaction"+ "something is wrong");
trancResultTemp = -1;
}
else if (trancLogicResultTemp.equals(EnumDeviceTrancLogicResult.SOMETHING_WRONG)) {
System.out.println(this.getClass().getName().toString()+"/mySwitchTransaction"+ "something is wrong when this method calling mySwitchTransactionLogic");
trancResultTemp = -1;
}
else if (trancLogicResultTemp.equals(EnumDeviceTrancLogicResult.DEVICE_NULL)) {
trancResultTemp = -1;
}
else if (trancLogicResultTemp.equals(EnumDeviceTrancLogicResult.NoNeedToChange)) {
trancResultTemp = 1;
}
else if (trancLogicResultTemp.equals(EnumDeviceTrancLogicResult.NeedToChange)) {
publishResultTmp = publish(brokerIpAddress1, philipsHueGo2Entity1.getTopicUrl_set() , mySwitchToState);
if(publishResultTmp == 1) {
trancResultTemp = 2;
}
else {
//
trancResultTemp = -1;
}
}
return trancResultTemp;
}
public String establishPublishJson(String mySwitchState) {
//
// 制作 json
LinkedHashMap<String,Object> lhmap1 = new LinkedHashMap<>();
//lhmap1.put("linkquality", 132);
if(mySwitchState.contentEquals("ON")==true) {
lhmap1.put("state", "ON");
}
else if(mySwitchState.contentEquals("OFF")==true) {
lhmap1.put("state", "OFF");
}
else if(mySwitchState.contentEquals("")==true) {
lhmap1.put("state", "");
}
//
//
ObjectMapper mapperTmp = new ObjectMapper();
String str_content_tmp = null;
try {
str_content_tmp = mapperTmp.writeValueAsString(lhmap1);
} catch (JsonProcessingException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
return str_content_tmp;
}
/**
* 这个把 连接broker的操作 抽出去当工具类了, 使得代码看起来更舒服了, 但实际操作跟下面的publish 差不多作用, 用这个阅读起来我感觉更好
*
* @param brokerIpAddress
* @param topicUrlToPublish
* @param mySwitchState
* @return 成功返回1
*/
public int publish(String brokerIpAddress, String topicUrlToPublish , String mySwitchState) {
String clientId = "JavaSample_publisher1";
final InetSocketAddress LOCALHOST_EPHEMERAL1 = new InetSocketAddress(brokerIpAddress,1883);
//
// 制作 json
LinkedHashMap<String,Object> lhmap1 = new LinkedHashMap<>();
//lhmap1.put("linkquality", 132);
if(mySwitchState.contentEquals("ON")==true) {
lhmap1.put("state", "ON");
}
else if(mySwitchState.contentEquals("OFF")==true) {
lhmap1.put("state", "OFF");
}
else if(mySwitchState.contentEquals("")==true) {
lhmap1.put("state", "");
}
//
//
ObjectMapper mapperTmp = new ObjectMapper();
String str_content_tmp = null;
try {
str_content_tmp = mapperTmp.writeValueAsString(lhmap1);
} catch (JsonProcessingException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
ArrayList<String> aryList_str_jsonTmp = new ArrayList<String>();
aryList_str_jsonTmp.add(str_content_tmp);
return MyPublishTool.myPulibsh(LOCALHOST_EPHEMERAL1, clientId, topicUrlToPublish, aryList_str_jsonTmp);
}
public int publish(String topicUrlToPublish , String mySwitchState) {
String clientId = "JavaSample_publisher1";
//MemoryPersistence persistence = new MemoryPersistence();
//------------------------------- 创建 mqtt client --------------------------------------
final InetSocketAddress LOCALHOST_EPHEMERAL1 = new InetSocketAddress(brokerIpAddress1,1883);
//
//
Mqtt5AsyncClient client1 = Mqtt5Client.builder().serverAddress(LOCALHOST_EPHEMERAL1).identifier(clientId).buildAsync();
//Mqtt5AsyncClient client1 = Mqtt5Client.builder().serverAddress(LOCALHOST_EPHEMERAL1).identifier(clientId).automaticReconnectWithDefaultConfig().buildAsync();
//------------------------------- client connect --------------------------------------
System.out.println("try connect");
//注意这里的
// 有点像 MqttAsyncClient sampleClient.connect(connOpts, null, null).waitForCompletion(-1);
// 在pahoMqtt 这里 waitForCompletion(-1)的-1, 是指一直不停地等待
// 但是这里填写-1 是不等待,这里是等于0
CompletableFuture<Mqtt5ConnAck> cplfu_connect_rslt = client1.connect().orTimeout(1000, TimeUnit.MILLISECONDS);
//CompletableFuture<Mqtt5ConnAck> cplfu_connect_rslt = client1.connect();
System.out.println("try connecting");
while (cplfu_connect_rslt.isDone()==false) {
System.out.println(this.myId + " waitttt too much");
try {
Thread.sleep(500);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
//wait
System.out.println("mypublisher:" + this.myId + ",connected");
//------------------------------- client publish --------------------------------------
// ref:https://www.zigbee2mqtt.io/devices/BASICZBR3.html
for(int i=0;i<=0;i++) {
LinkedHashMap<String,Object> lhmap1 = new LinkedHashMap<>();
//lhmap1.put("linkquality", 132);
if(mySwitchState.contentEquals("ON")==true) {
lhmap1.put("state", "ON");
}
else if(mySwitchState.contentEquals("OFF")==true) {
lhmap1.put("state", "OFF");
}
else if(mySwitchState.contentEquals("")==true) {
lhmap1.put("state", "");
}
//
//
ObjectMapper mapperTmp = new ObjectMapper();
String str_content_tmp=null;
try {
str_content_tmp = mapperTmp.writeValueAsString(lhmap1);
} catch (JsonProcessingException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
//client1.publishWith().topic(topic).qos(MqttQos.AT_LEAST_ONCE).payload(content.getBytes()).send();
com.hivemq.client.mqtt.mqtt5.message.publish.Mqtt5PublishBuilder.Send<CompletableFuture<Mqtt5PublishResult>> publishBuilder1 = client1.publishWith();
com.hivemq.client.mqtt.mqtt5.message.publish.Mqtt5PublishBuilder.Send.Complete<CompletableFuture<Mqtt5PublishResult>> c1 = publishBuilder1.topic(topicUrlToPublish);
c1.qos(MqttQos.AT_LEAST_ONCE);
c1.payload(str_content_tmp.getBytes());
// send(): the result when the built Mqtt5Publish is sent by the parent
c1.send();
System.out.println("hello:"+str_content_tmp);
}
client1.disconnect();
try {
Thread.sleep(500);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
System.out.println("mypublisher:" + this.myId + ",disconnected");
return 0;
}
/**
* 这个方法 是发送了这个 get, 可以让 服务器那边 进行通知所有的subscriber 现在当前的状态
* @return
*/
/*
public int sendGetToNotifySubscriberToGetStatus() {
this.publish("zigbee2mqtt/0x0017880109e5d363"+"/get", "");
return 1;
}*/
/**
* "zigbee2mqtt/0x0017880109e5d363"+"/get"
*
* @return
*/
/**
* 这个方法 是发送了这个 get, 可以让 服务器那边 进行通知所有的subscriber 现在当前的状态
* @return
*/
/*
public int sendGetToNotifySubscriberToGetStatus() {
this.publish("zigbee2mqtt/0x0017880109e5d363"+"/get", "");
return 1;
}*/
/**
* "zigbee2mqtt/0x0017880109e5d363"+"/get"
*
* @return
*/
public int sendGetToNotifySubscriberToGetStatus(PhilipsHueGo2Entity philipsHueGo2Entity1) {
//this.publish(philipsHueGo2Entity1.getTopicUrl_get(), "");
String clientId = "JavaSample_publisher1";
final InetSocketAddress LOCALHOST_EPHEMERAL1 = new InetSocketAddress(brokerIpAddress1,1883);
//
// 制作 json
LinkedHashMap<String,Object> lhmap1 = new LinkedHashMap<>();
lhmap1.put("state", "");
//
//
ObjectMapper mapperTmp = new ObjectMapper();
String str_content_tmp = null;
try {
str_content_tmp = mapperTmp.writeValueAsString(lhmap1);
} catch (JsonProcessingException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
ArrayList<String> aryList_str_jsonTmp = new ArrayList<String>();
aryList_str_jsonTmp.add(str_content_tmp);
return MyPublishTool.myPulibsh(LOCALHOST_EPHEMERAL1, clientId, philipsHueGo2Entity1.getTopicUrl_get(), aryList_str_jsonTmp);
}
//-------------------------------------------------------------------------------
private class MyMessageTmp{
String myJsonContent = null;
public MyMessageTmp() {
}
public void setMyJsonContent(String myJsonContent) {
this.myJsonContent = myJsonContent;
}
public String getMyJsonContent() {
return this.myJsonContent;
}
public Map getMyJsonContentMap() {
//
ObjectMapper mapperTmp = new ObjectMapper();
LinkedHashMap<String,Object> lkhMapTmp1 = null;
TypeReference<LinkedHashMap<String,Object>> tpRfTmp1 = new TypeReference<LinkedHashMap<String,Object>>() {};
//
try {
lkhMapTmp1 = mapperTmp.readValue(myJsonContent, tpRfTmp1);
} catch (JsonMappingException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (JsonProcessingException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return lkhMapTmp1;
}
}
} |
Java | public final class TargetingProtoUtils {
/** Moves targeting values to the alternatives. */
public static AssetsDirectoryTargeting toAlternativeTargeting(
AssetsDirectoryTargeting targeting) {
AssetsDirectoryTargeting.Builder alternativeTargeting = AssetsDirectoryTargeting.newBuilder();
if (targeting.hasTextureCompressionFormat()) {
alternativeTargeting
.getTextureCompressionFormatBuilder()
.addAllAlternatives(targeting.getTextureCompressionFormat().getValueList());
}
if (targeting.hasAbi()) {
alternativeTargeting.getAbiBuilder().addAllAlternatives(targeting.getAbi().getValueList());
}
if (targeting.hasLanguage()) {
alternativeTargeting
.getLanguageBuilder()
.addAllAlternatives(targeting.getLanguage().getValueList());
}
if (targeting.hasDeviceTier()) {
alternativeTargeting
.getDeviceTierBuilder()
.addAllAlternatives(targeting.getDeviceTier().getValueList());
}
return alternativeTargeting.build();
}
/** Extracts multi-ABI values from the targeting. */
public static ImmutableSet<MultiAbi> multiAbiValues(ApkTargeting targeting) {
return ImmutableSet.copyOf(targeting.getMultiAbiTargeting().getValueList());
}
/** Extracts multi-ABI alternatives from the targeting. */
public static ImmutableSet<MultiAbi> multiAbiAlternatives(ApkTargeting targeting) {
return ImmutableSet.copyOf(targeting.getMultiAbiTargeting().getAlternativesList());
}
/** Extracts targeted multi-ABI universe (values and alternatives) from the targeting. */
public static ImmutableSet<MultiAbi> multiAbiUniverse(ApkTargeting targeting) {
return ImmutableSet.<MultiAbi>builder()
.addAll(multiAbiValues(targeting))
.addAll(multiAbiAlternatives(targeting))
.build();
}
/** Extracts ABI values from the targeting. */
public static ImmutableSet<Abi> abiValues(ApkTargeting targeting) {
return ImmutableSet.copyOf(targeting.getAbiTargeting().getValueList());
}
/** Extracts ABI alternatives from the targeting. */
public static ImmutableSet<Abi> abiAlternatives(ApkTargeting targeting) {
return ImmutableSet.copyOf(targeting.getAbiTargeting().getAlternativesList());
}
/** Extracts targeted ABI universe (values and alternatives) from the targeting. */
public static ImmutableSet<Abi> abiUniverse(ApkTargeting targeting) {
return ImmutableSet.<Abi>builder()
.addAll(abiValues(targeting))
.addAll(abiAlternatives(targeting))
.build();
}
/** Extracts screen density values from the targeting. */
public static ImmutableSet<ScreenDensity> densityValues(ApkTargeting targeting) {
return ImmutableSet.copyOf(targeting.getScreenDensityTargeting().getValueList());
}
/** Extracts screen density alternatives from the targeting. */
public static ImmutableSet<ScreenDensity> densityAlternatives(ApkTargeting targeting) {
return ImmutableSet.copyOf(targeting.getScreenDensityTargeting().getAlternativesList());
}
/** Extracts targeted screen density universe (values and alternatives) from the targeting. */
public static ImmutableSet<ScreenDensity> densityUniverse(ApkTargeting targeting) {
return ImmutableSet.<ScreenDensity>builder()
.addAll(densityValues(targeting))
.addAll(densityAlternatives(targeting))
.build();
}
/** Extracts language values from the targeting. */
public static ImmutableSet<String> languageValues(ApkTargeting targeting) {
return ImmutableSet.copyOf(targeting.getLanguageTargeting().getValueList());
}
/** Extracts language alternatives from the targeting. */
public static ImmutableSet<String> languageAlternatives(ApkTargeting targeting) {
return ImmutableSet.copyOf(targeting.getLanguageTargeting().getAlternativesList());
}
/** Extracts targeted language universe (values and alternatives) from the targeting. */
public static ImmutableSet<String> languageUniverse(ApkTargeting targeting) {
return ImmutableSet.<String>builder()
.addAll(languageValues(targeting))
.addAll(languageAlternatives(targeting))
.build();
}
/** Extracts Texture Compression Format values from the targeting. */
public static ImmutableSet<TextureCompressionFormat> textureCompressionFormatValues(
ApkTargeting targeting) {
return ImmutableSet.copyOf(targeting.getTextureCompressionFormatTargeting().getValueList());
}
/** Extracts Texture Compression Format alternatives from the targeting. */
public static ImmutableSet<TextureCompressionFormat> textureCompressionFormatAlternatives(
ApkTargeting targeting) {
return ImmutableSet.copyOf(
targeting.getTextureCompressionFormatTargeting().getAlternativesList());
}
/**
* Extracts targeted Texture Compression Format universe (values and alternatives) from the
* targeting.
*/
public static ImmutableSet<TextureCompressionFormat> textureCompressionFormatUniverse(
ApkTargeting targeting) {
return ImmutableSet.<TextureCompressionFormat>builder()
.addAll(textureCompressionFormatValues(targeting))
.addAll(textureCompressionFormatAlternatives(targeting))
.build();
}
/** Extracts Texture Compression Format values from the targeting. */
public static ImmutableSet<String> deviceTierValues(ApkTargeting targeting) {
return ImmutableSet.copyOf(targeting.getDeviceTierTargeting().getValueList());
}
/** Extracts targeted Device Tier universe (values and alternatives) from the targeting. */
public static ImmutableSet<String> deviceTierUniverse(ApkTargeting targeting) {
return ImmutableSet.<String>builder()
.addAll(targeting.getDeviceTierTargeting().getValueList())
.addAll(targeting.getDeviceTierTargeting().getAlternativesList())
.build();
}
public static SdkVersion sdkVersionFrom(int from) {
return SdkVersion.newBuilder().setMin(Int32Value.newBuilder().setValue(from)).build();
}
public static SdkVersionTargeting sdkVersionTargeting(
SdkVersion sdkVersion, ImmutableSet<SdkVersion> alternatives) {
return SdkVersionTargeting.newBuilder()
.addValue(sdkVersion)
.addAllAlternatives(alternatives)
.build();
}
public static SdkVersionTargeting sdkVersionTargeting(SdkVersion sdkVersion) {
return SdkVersionTargeting.newBuilder().addValue(sdkVersion).build();
}
public static VariantTargeting variantTargeting(SdkVersionTargeting sdkVersionTargeting) {
return VariantTargeting.newBuilder().setSdkVersionTargeting(sdkVersionTargeting).build();
}
public static VariantTargeting lPlusVariantTargeting() {
return variantTargeting(sdkVersionTargeting(sdkVersionFrom(ANDROID_L_API_VERSION)));
}
public static Optional<Integer> getScreenDensityDpi(
ScreenDensityTargeting screenDensityTargeting) {
if (screenDensityTargeting.getValueList().isEmpty()) {
return Optional.empty();
}
ScreenDensity densityTargeting =
screenDensityTargeting.getValueList().stream()
// For now we only support one value in ScreenDensityTargeting.
.collect(onlyElement());
return Optional.of(ResourcesUtils.convertToDpi(densityTargeting));
}
} |
Java | public class BaseUnionDatatype extends BaseDatatype implements UnionDatatype {
@SuppressWarnings("deprecation")
protected Set<Datatype> members;
@SuppressWarnings("deprecation")
BaseUnionDatatype(ATermAppl name, Datatype[] members) {
super(name);
this.members = SetUtils.create(members);
}
@SuppressWarnings("deprecation")
BaseUnionDatatype(ATermAppl name, Set<Datatype> members) {
super(name);
this.members = members;
}
@SuppressWarnings("deprecation")
BaseUnionDatatype(Datatype[] members) {
super(null);
this.members = SetUtils.create(members);
}
@SuppressWarnings("deprecation")
BaseUnionDatatype(Set<Datatype> members) {
super(null);
this.members = members;
}
@SuppressWarnings("deprecation")
public Set<Datatype> getMembers() {
return Collections.unmodifiableSet(members);
}
@SuppressWarnings("deprecation")
public int size() {
int size = 0;
for (Datatype dt : members) {
size += dt.size();
if (size < 0)
return Integer.MAX_VALUE;
}
return size;
}
@SuppressWarnings("deprecation")
public boolean contains(Object value) {
for (Datatype dt : members) {
if (dt.contains(value))
return true;
}
return false;
}
@SuppressWarnings("deprecation")
public boolean contains(Object value, AtomicDatatype datatype) {
// Datatype valDatatype = (Datatype) datatype;
for (Datatype dt : members) {
if (dt instanceof AtomicDatatype) {
if (!datatype.getPrimitiveType().equals(
((AtomicDatatype) dt).getPrimitiveType()))
continue;
}
if (dt.contains(value, datatype))
return true;
}
return false;
}
@SuppressWarnings("deprecation")
public Object getValue(String value, String datatypeURI) {
Object obj = null;
for (Datatype dt : members) {
obj = dt.getValue(value, datatypeURI);
if (obj != null)
break;
}
return obj;
}
@SuppressWarnings("deprecation")
public Datatype singleton(Object value) {
Datatype datatype = null;
for (Datatype dt : members) {
if (dt.contains(value)) {
datatype = dt.singleton(value);
if (datatype != null)
break;
}
}
return datatype;
}
@SuppressWarnings("deprecation")
public ATermAppl getValue(int n) {
for (Datatype dt : members) {
int dtSize = dt.size();
if (dtSize == ValueSpace.INFINITE || n < dtSize)
return dt.getValue(n);
else
n -= dt.size();
}
throw new InternalReasonerException("No values for this datatype");
}
public String toString() {
return "UnionDatatype " + members;
}
} |
Java | @ExtendWith( JGivenExtension.class )
public class ScenarioTest<GIVEN, WHEN, THEN> extends ScenarioTestBase<GIVEN, WHEN, THEN> {
private Scenario<GIVEN, WHEN, THEN> scenario = createScenario();
@Override
public Scenario<GIVEN, WHEN, THEN> getScenario() {
return scenario;
}
} |
Subsets and Splits