lang
stringclasses
2 values
license
stringclasses
13 values
stderr
stringlengths
0
343
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
6
87.7k
new_contents
stringlengths
0
6.23M
new_file
stringlengths
3
311
old_contents
stringlengths
0
6.23M
message
stringlengths
6
9.1k
old_file
stringlengths
3
311
subject
stringlengths
0
4k
git_diff
stringlengths
0
6.31M
Java
mpl-2.0
d939197836f7164ca1748794c9d9b6f1c51c8cb7
0
powsybl/powsybl-core,powsybl/powsybl-core,powsybl/powsybl-core
/** * Copyright (c) 2020, RTE (http://www.rte-france.com) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ package com.powsybl.matpower.model; import com.google.common.collect.Sets; import us.hebi.matlab.mat.format.Mat5; import us.hebi.matlab.mat.types.MatFile; import us.hebi.matlab.mat.types.Matrix; import us.hebi.matlab.mat.types.Sources; import us.hebi.matlab.mat.types.Struct; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.Objects; import java.util.Set; /** * @author Christian Biasuzzi <[email protected]> */ public final class MatpowerReader { public static final String MATPOWER_STRUCT_NAME = "mpc"; public static final String MATPOWER_SUPPORTED_VERSION = "2"; private MatpowerReader() { } public static MatpowerModel read(Path file, String caseName) throws IOException { try (InputStream stream = Files.newInputStream(file)) { return read(stream, caseName); } } public static MatpowerModel read(InputStream iStream, String caseName) throws IOException { Objects.requireNonNull(iStream); MatpowerModel model = null; try (MatFile mat = Mat5.newReader(Sources.wrapInputStream(iStream)).setEntryFilter(entry -> entry.getName().equals(MATPOWER_STRUCT_NAME)).readMat()) { if (mat.getNumEntries() == 0) { throw new IllegalStateException("no MATPOWER data: expected structure named '" + MATPOWER_STRUCT_NAME + "' not found."); } Struct mpcStruct = mat.getStruct(MATPOWER_STRUCT_NAME); Set<String> mpcNames = Sets.newHashSet("version", "baseMVA", "bus", "gen", "branch"); if (!mpcStruct.getFieldNames().containsAll(mpcNames)) { throw new IllegalStateException("expected MATPOWER variables not found: " + mpcNames); } String version = mpcStruct.get("version").toString().replace("'", ""); if (!version.equals(MATPOWER_SUPPORTED_VERSION)) { throw new IllegalStateException("unsupported MATPOWER version: " + version); } double baseMVA = mpcStruct.getMatrix("baseMVA").getDouble(0); Matrix buses = mpcStruct.getMatrix("bus"); Matrix generators = mpcStruct.getMatrix("gen"); Matrix branches = mpcStruct.getMatrix("branch"); model = new MatpowerModel(caseName); model.setVersion(version); model.setBaseMva(baseMVA); for (int row = 0; row < buses.getDimensions()[0]; row++) { MBus bus = new MBus(); for (int col = 0; col < buses.getDimensions()[1]; col++) { bus.setNumber(buses.getInt(row, 0)); bus.setType(MBus.Type.fromInt(buses.getInt(row, 1))); bus.setRealPowerDemand(buses.getDouble(row, 2)); bus.setReactivePowerDemand(buses.getDouble(row, 3)); bus.setShuntConductance(buses.getDouble(row, 4)); bus.setShuntSusceptance(buses.getDouble(row, 5)); bus.setAreaNumber(buses.getInt(row, 6)); bus.setVoltageMagnitude(buses.getDouble(row, 7)); bus.setVoltageAngle(buses.getDouble(row, 8)); bus.setBaseVoltage(buses.getDouble(row, 9)); bus.setLossZone(buses.getInt(row, 10)); bus.setMaximumVoltageMagnitude(buses.getDouble(row, 11)); bus.setMinimumVoltageMagnitude(buses.getDouble(row, 12)); } model.getBuses().add(bus); } for (int row = 0; row < generators.getDimensions()[0]; row++) { MGen gen = new MGen(); for (int col = 0; col < generators.getDimensions()[1]; col++) { gen.setNumber(generators.getInt(row, 0)); gen.setRealPowerOutput(generators.getDouble(row, 1)); gen.setReactivePowerOutput(generators.getDouble(row, 2)); gen.setMaximumReactivePowerOutput(generators.getDouble(row, 3)); gen.setMinimumReactivePowerOutput(generators.getDouble(row, 4)); gen.setVoltageMagnitudeSetpoint(generators.getDouble(row, 5)); gen.setTotalMbase(generators.getDouble(row, 6)); gen.setStatus(generators.getInt(row, 7)); gen.setMaximumRealPowerOutput(generators.getDouble(row, 8)); gen.setMinimumRealPowerOutput(generators.getDouble(row, 9)); gen.setPc1(generators.getDouble(row, 10)); gen.setPc2(generators.getDouble(row, 11)); gen.setQc1Min(generators.getDouble(row, 12)); gen.setQc1Max(generators.getDouble(row, 13)); gen.setQc2Min(generators.getDouble(row, 14)); gen.setQc2Max(generators.getDouble(row, 15)); gen.setRampAgc(generators.getDouble(row, 16)); gen.setRampTenMinutes(generators.getDouble(row, 17)); gen.setRampThirtyMinutes(generators.getDouble(row, 18)); gen.setRampQ(generators.getDouble(row, 19)); gen.setApf(generators.getDouble(row, 20)); } model.getGenerators().add(gen); } for (int row = 0; row < branches.getDimensions()[0]; row++) { MBranch branch = new MBranch(); for (int col = 0; col < branches.getDimensions()[1]; col++) { branch.setFrom(branches.getInt(row, 0)); branch.setTo(branches.getInt(row, 1)); branch.setR(branches.getDouble(row, 2)); branch.setX(branches.getDouble(row, 3)); branch.setB(branches.getDouble(row, 4)); branch.setRateA(branches.getDouble(row, 5)); branch.setRateB(branches.getDouble(row, 6)); branch.setRateC(branches.getDouble(row, 7)); branch.setRatio(branches.getDouble(row, 8)); branch.setPhaseShiftAngle(branches.getDouble(row, 9)); branch.setStatus(branches.getInt(row, 10)); branch.setAngMin(branches.getDouble(row, 11)); branch.setAngMax(branches.getDouble(row, 12)); } model.getBranches().add(branch); } } return model; } }
matpower/matpower-model/src/main/java/com/powsybl/matpower/model/MatpowerReader.java
/** * Copyright (c) 2020, RTE (http://www.rte-france.com) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ package com.powsybl.matpower.model; import com.google.common.collect.Sets; import us.hebi.matlab.mat.format.Mat5; import us.hebi.matlab.mat.types.MatFile; import us.hebi.matlab.mat.types.Matrix; import us.hebi.matlab.mat.types.Sources; import us.hebi.matlab.mat.types.Struct; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.Objects; import java.util.Set; /** * @author Christian Biasuzzi <[email protected]> */ public final class MatpowerReader { public static final String MATPOWER_STRUCT_NAME = "mpc"; public static final String MATPOWER_SUPPORTED_VERSION = "2"; private MatpowerReader() { } public static MatpowerModel read(Path file, String caseName) throws IOException { return read(Files.newInputStream(file), caseName); } public static MatpowerModel read(InputStream iStream, String caseName) throws IOException { Objects.requireNonNull(iStream); MatpowerModel model = null; try (MatFile mat = Mat5.newReader(Sources.wrapInputStream(iStream)).setEntryFilter(entry -> entry.getName().equals(MATPOWER_STRUCT_NAME)).readMat()) { if (mat.getNumEntries() == 0) { throw new IllegalStateException("no MATPOWER data: expected structure named '" + MATPOWER_STRUCT_NAME + "' not found."); } Struct mpcStruct = mat.getStruct(MATPOWER_STRUCT_NAME); Set<String> mpcNames = Sets.newHashSet("version", "baseMVA", "bus", "gen", "branch"); if (!mpcStruct.getFieldNames().containsAll(mpcNames)) { throw new IllegalStateException("expected MATPOWER variables not found: " + mpcNames); } String version = mpcStruct.get("version").toString().replace("'", ""); if (!version.equals(MATPOWER_SUPPORTED_VERSION)) { throw new IllegalStateException("unsupported MATPOWER version: " + version); } double baseMVA = mpcStruct.getMatrix("baseMVA").getDouble(0); Matrix buses = mpcStruct.getMatrix("bus"); Matrix generators = mpcStruct.getMatrix("gen"); Matrix branches = mpcStruct.getMatrix("branch"); model = new MatpowerModel(caseName); model.setVersion(version); model.setBaseMva(baseMVA); for (int row = 0; row < buses.getDimensions()[0]; row++) { MBus bus = new MBus(); for (int col = 0; col < buses.getDimensions()[1]; col++) { bus.setNumber(buses.getInt(row, 0)); bus.setType(MBus.Type.fromInt(buses.getInt(row, 1))); bus.setRealPowerDemand(buses.getDouble(row, 2)); bus.setReactivePowerDemand(buses.getDouble(row, 3)); bus.setShuntConductance(buses.getDouble(row, 4)); bus.setShuntSusceptance(buses.getDouble(row, 5)); bus.setAreaNumber(buses.getInt(row, 6)); bus.setVoltageMagnitude(buses.getDouble(row, 7)); bus.setVoltageAngle(buses.getDouble(row, 8)); bus.setBaseVoltage(buses.getDouble(row, 9)); bus.setLossZone(buses.getInt(row, 10)); bus.setMaximumVoltageMagnitude(buses.getDouble(row, 11)); bus.setMinimumVoltageMagnitude(buses.getDouble(row, 12)); } model.getBuses().add(bus); } for (int row = 0; row < generators.getDimensions()[0]; row++) { MGen gen = new MGen(); for (int col = 0; col < generators.getDimensions()[1]; col++) { gen.setNumber(generators.getInt(row, 0)); gen.setRealPowerOutput(generators.getDouble(row, 1)); gen.setReactivePowerOutput(generators.getDouble(row, 2)); gen.setMaximumReactivePowerOutput(generators.getDouble(row, 3)); gen.setMinimumReactivePowerOutput(generators.getDouble(row, 4)); gen.setVoltageMagnitudeSetpoint(generators.getDouble(row, 5)); gen.setTotalMbase(generators.getDouble(row, 6)); gen.setStatus(generators.getInt(row, 7)); gen.setMaximumRealPowerOutput(generators.getDouble(row, 8)); gen.setMinimumRealPowerOutput(generators.getDouble(row, 9)); gen.setPc1(generators.getDouble(row, 10)); gen.setPc2(generators.getDouble(row, 11)); gen.setQc1Min(generators.getDouble(row, 12)); gen.setQc1Max(generators.getDouble(row, 13)); gen.setQc2Min(generators.getDouble(row, 14)); gen.setQc2Max(generators.getDouble(row, 15)); gen.setRampAgc(generators.getDouble(row, 16)); gen.setRampTenMinutes(generators.getDouble(row, 17)); gen.setRampThirtyMinutes(generators.getDouble(row, 18)); gen.setRampQ(generators.getDouble(row, 19)); gen.setApf(generators.getDouble(row, 20)); } model.getGenerators().add(gen); } for (int row = 0; row < branches.getDimensions()[0]; row++) { MBranch branch = new MBranch(); for (int col = 0; col < branches.getDimensions()[1]; col++) { branch.setFrom(branches.getInt(row, 0)); branch.setTo(branches.getInt(row, 1)); branch.setR(branches.getDouble(row, 2)); branch.setX(branches.getDouble(row, 3)); branch.setB(branches.getDouble(row, 4)); branch.setRateA(branches.getDouble(row, 5)); branch.setRateB(branches.getDouble(row, 6)); branch.setRateC(branches.getDouble(row, 7)); branch.setRatio(branches.getDouble(row, 8)); branch.setPhaseShiftAngle(branches.getDouble(row, 9)); branch.setStatus(branches.getInt(row, 10)); branch.setAngMin(branches.getDouble(row, 11)); branch.setAngMax(branches.getDouble(row, 12)); } model.getBranches().add(branch); } } return model; } }
Fix a memory leak in Matpower reader (#1407) Signed-off-by: Mathieu BAGUE <[email protected]>
matpower/matpower-model/src/main/java/com/powsybl/matpower/model/MatpowerReader.java
Fix a memory leak in Matpower reader (#1407)
<ide><path>atpower/matpower-model/src/main/java/com/powsybl/matpower/model/MatpowerReader.java <ide> } <ide> <ide> public static MatpowerModel read(Path file, String caseName) throws IOException { <del> return read(Files.newInputStream(file), caseName); <add> try (InputStream stream = Files.newInputStream(file)) { <add> return read(stream, caseName); <add> } <ide> } <ide> <ide> public static MatpowerModel read(InputStream iStream, String caseName) throws IOException {
Java
apache-2.0
bdfe89711c17d13412aef0a4ffd9ea9fbe5cd792
0
calvinjia/tachyon,apc999/alluxio,yuluo-ding/alluxio,Reidddddd/alluxio,bf8086/alluxio,PasaLab/tachyon,Alluxio/alluxio,jswudi/alluxio,yuluo-ding/alluxio,madanadit/alluxio,Alluxio/alluxio,riversand963/alluxio,madanadit/alluxio,Reidddddd/alluxio,calvinjia/tachyon,jswudi/alluxio,madanadit/alluxio,madanadit/alluxio,madanadit/alluxio,wwjiang007/alluxio,ShailShah/alluxio,Reidddddd/alluxio,bf8086/alluxio,maobaolong/alluxio,Alluxio/alluxio,uronce-cc/alluxio,EvilMcJerkface/alluxio,wwjiang007/alluxio,ChangerYoung/alluxio,madanadit/alluxio,Reidddddd/mo-alluxio,maobaolong/alluxio,riversand963/alluxio,jsimsa/alluxio,ChangerYoung/alluxio,wwjiang007/alluxio,bf8086/alluxio,apc999/alluxio,wwjiang007/alluxio,maobaolong/alluxio,Reidddddd/alluxio,EvilMcJerkface/alluxio,calvinjia/tachyon,Alluxio/alluxio,riversand963/alluxio,Alluxio/alluxio,EvilMcJerkface/alluxio,PasaLab/tachyon,WilliamZapata/alluxio,aaudiber/alluxio,wwjiang007/alluxio,ChangerYoung/alluxio,maobaolong/alluxio,maboelhassan/alluxio,riversand963/alluxio,yuluo-ding/alluxio,wwjiang007/alluxio,EvilMcJerkface/alluxio,maboelhassan/alluxio,Alluxio/alluxio,PasaLab/tachyon,WilliamZapata/alluxio,wwjiang007/alluxio,ChangerYoung/alluxio,EvilMcJerkface/alluxio,bf8086/alluxio,maobaolong/alluxio,EvilMcJerkface/alluxio,jswudi/alluxio,jsimsa/alluxio,Alluxio/alluxio,calvinjia/tachyon,yuluo-ding/alluxio,apc999/alluxio,riversand963/alluxio,EvilMcJerkface/alluxio,Reidddddd/alluxio,jswudi/alluxio,ShailShah/alluxio,PasaLab/tachyon,maboelhassan/alluxio,Reidddddd/mo-alluxio,aaudiber/alluxio,jswudi/alluxio,aaudiber/alluxio,ChangerYoung/alluxio,PasaLab/tachyon,maobaolong/alluxio,bf8086/alluxio,ShailShah/alluxio,uronce-cc/alluxio,jswudi/alluxio,aaudiber/alluxio,uronce-cc/alluxio,WilliamZapata/alluxio,PasaLab/tachyon,aaudiber/alluxio,jsimsa/alluxio,yuluo-ding/alluxio,Reidddddd/alluxio,maobaolong/alluxio,EvilMcJerkface/alluxio,Reidddddd/mo-alluxio,madanadit/alluxio,jsimsa/alluxio,maboelhassan/alluxio,Alluxio/alluxio,ShailShah/alluxio,uronce-cc/alluxio,ChangerYoung/alluxio,wwjiang007/alluxio,bf8086/alluxio,maobaolong/alluxio,Reidddddd/alluxio,PasaLab/tachyon,apc999/alluxio,aaudiber/alluxio,ShailShah/alluxio,bf8086/alluxio,uronce-cc/alluxio,WilliamZapata/alluxio,wwjiang007/alluxio,Alluxio/alluxio,WilliamZapata/alluxio,calvinjia/tachyon,uronce-cc/alluxio,bf8086/alluxio,calvinjia/tachyon,yuluo-ding/alluxio,apc999/alluxio,madanadit/alluxio,Reidddddd/mo-alluxio,jsimsa/alluxio,Alluxio/alluxio,apc999/alluxio,Reidddddd/mo-alluxio,Reidddddd/mo-alluxio,maobaolong/alluxio,maboelhassan/alluxio,ShailShah/alluxio,aaudiber/alluxio,maboelhassan/alluxio,calvinjia/tachyon,jsimsa/alluxio,maobaolong/alluxio,riversand963/alluxio,WilliamZapata/alluxio,calvinjia/tachyon,wwjiang007/alluxio,apc999/alluxio,maboelhassan/alluxio
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the “License”). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.underfs.s3; import alluxio.Constants; import alluxio.util.io.PathUtils; import com.google.common.base.Preconditions; import org.jets3t.service.S3Service; import org.jets3t.service.model.S3Object; import org.jets3t.service.model.StorageObject; import org.jets3t.service.utils.Mimetypes; import org.jets3t.service.utils.MultipartUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.security.DigestOutputStream; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.ArrayList; import java.util.List; import java.util.UUID; import java.util.concurrent.atomic.AtomicBoolean; import javax.annotation.concurrent.NotThreadSafe; /** * A stream for writing a file into S3. The data will be persisted to a temporary directory on the * local disk and copied as a complete file when the {@link #close()} method is called. */ @NotThreadSafe public class S3OutputStream extends OutputStream { private static final Logger LOG = LoggerFactory.getLogger(Constants.LOGGER_TYPE); /** Bucket name of the Alluxio S3 bucket. */ private final String mBucketName; /** Key of the file when it is uploaded to S3. */ private final String mKey; /** The local file that will be uploaded when the stream is closed. */ private final File mFile; /** The JetS3t client for S3 operations. */ private final S3Service mClient; /** The output stream to a local file where the file will be buffered until closed. */ private OutputStream mLocalOutputStream; /** The MD5 hash of the file. */ private MessageDigest mHash; /** Flag to indicate this stream has been closed, to ensure close is only done once. */ private AtomicBoolean mClosed = new AtomicBoolean(false); /** * A {@link MultipartUtils} to upload the file to S3 using Multipart Uploads. Multipart Uploads * involves uploading an object's data in parts instead of all at once, which can work around S3's * limit of 5GB on a single Object PUT operation. * * It is recommended (http://docs.aws.amazon.com/AmazonS3/latest/dev/UploadingObjects.html) * to upload file larger than 100MB using Multipart Uploads, we use 512MB here * since it is close to the size of a typical Alluxio file block. */ private static final MultipartUtils MULTIPART_UTIL = new MultipartUtils(Constants.MB * 512); /** * Constructs a new stream for writing a file. * * @param bucketName the name of the bucket * @param key the key of the file * @param client the JetS3t client * @throws IOException when a non-Alluxio related error occurs */ public S3OutputStream(String bucketName, String key, S3Service client) throws IOException { Preconditions.checkArgument(bucketName != null && !bucketName.isEmpty(), "Bucket name must " + "not be null or empty."); mBucketName = bucketName; mKey = key; mClient = client; mFile = new File(PathUtils.concatPath("/tmp", UUID.randomUUID())); try { mHash = MessageDigest.getInstance("MD5"); mLocalOutputStream = new BufferedOutputStream(new DigestOutputStream(new FileOutputStream(mFile), mHash)); } catch (NoSuchAlgorithmException e) { LOG.warn("Algorithm not available for MD5 hash.", e); mHash = null; mLocalOutputStream = new BufferedOutputStream(new FileOutputStream(mFile)); } } @Override public void write(int b) throws IOException { mLocalOutputStream.write(b); } @Override public void write(byte[] b) throws IOException { mLocalOutputStream.write(b, 0, b.length); } @Override public void write(byte[] b, int off, int len) throws IOException { mLocalOutputStream.write(b, off, len); } @Override public void flush() throws IOException { mLocalOutputStream.flush(); } @Override public void close() throws IOException { if (mClosed.getAndSet(true)) { return; } mLocalOutputStream.close(); try { S3Object obj = new S3Object(mKey); obj.setBucketName(mBucketName); obj.setDataInputFile(mFile); obj.setContentLength(mFile.length()); obj.setContentEncoding(Mimetypes.MIMETYPE_BINARY_OCTET_STREAM); if (mHash != null) { obj.setMd5Hash(mHash.digest()); } else { LOG.warn("MD5 was not computed for: {}", mKey); } if (MULTIPART_UTIL.isFileLargerThanMaxPartSize(mFile)) { // Big object will be split into parts and uploaded to S3 in parallel. List<StorageObject> objectsToUploadAsMultipart = new ArrayList<>(); objectsToUploadAsMultipart.add(obj); MULTIPART_UTIL.uploadObjects(mBucketName, mClient, objectsToUploadAsMultipart, null); } else { // Avoid uploading file with Multipart if it's not necessary to save the // extra overhead. mClient.putObject(mBucketName, obj); } if (!mFile.delete()) { LOG.error("Failed to delete temporary file @ {}", mFile.getPath()); } } catch (Exception e) { LOG.error("Failed to upload {}. Temporary file @ {}", mKey, mFile.getPath()); throw new IOException(e); } } }
underfs/s3/src/main/java/alluxio/underfs/s3/S3OutputStream.java
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the “License”). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.underfs.s3; import alluxio.Constants; import alluxio.util.io.PathUtils; import com.google.common.base.Preconditions; import org.jets3t.service.S3Service; import org.jets3t.service.ServiceException; import org.jets3t.service.model.S3Object; import org.jets3t.service.utils.Mimetypes; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.security.DigestOutputStream; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.UUID; import java.util.concurrent.atomic.AtomicBoolean; import javax.annotation.concurrent.NotThreadSafe; /** * A stream for writing a file into S3. The data will be persisted to a temporary directory on the * local disk and copied as a complete file when the {@link #close()} method is called. */ @NotThreadSafe public class S3OutputStream extends OutputStream { private static final Logger LOG = LoggerFactory.getLogger(Constants.LOGGER_TYPE); /** Bucket name of the Alluxio S3 bucket. */ private final String mBucketName; /** Key of the file when it is uploaded to S3. */ private final String mKey; /** The local file that will be uploaded when the stream is closed. */ private final File mFile; /** The JetS3t client for S3 operations. */ private final S3Service mClient; /** The output stream to a local file where the file will be buffered until closed. */ private OutputStream mLocalOutputStream; /** The MD5 hash of the file. */ private MessageDigest mHash; /** Flag to indicate this stream has been closed, to ensure close is only done once. */ private AtomicBoolean mClosed = new AtomicBoolean(false); /** * Constructs a new stream for writing a file. * * @param bucketName the name of the bucket * @param key the key of the file * @param client the JetS3t client * @throws IOException when a non-Alluxio related error occurs */ public S3OutputStream(String bucketName, String key, S3Service client) throws IOException { Preconditions.checkArgument(bucketName != null && !bucketName.isEmpty(), "Bucket name must " + "not be null or empty."); mBucketName = bucketName; mKey = key; mClient = client; mFile = new File(PathUtils.concatPath("/tmp", UUID.randomUUID())); try { mHash = MessageDigest.getInstance("MD5"); mLocalOutputStream = new BufferedOutputStream(new DigestOutputStream(new FileOutputStream(mFile), mHash)); } catch (NoSuchAlgorithmException e) { LOG.warn("Algorithm not available for MD5 hash.", e); mHash = null; mLocalOutputStream = new BufferedOutputStream(new FileOutputStream(mFile)); } } @Override public void write(int b) throws IOException { mLocalOutputStream.write(b); } @Override public void write(byte[] b) throws IOException { mLocalOutputStream.write(b, 0, b.length); } @Override public void write(byte[] b, int off, int len) throws IOException { mLocalOutputStream.write(b, off, len); } @Override public void flush() throws IOException { mLocalOutputStream.flush(); } @Override public void close() throws IOException { if (mClosed.getAndSet(true)) { return; } mLocalOutputStream.close(); try { S3Object obj = new S3Object(mKey); obj.setBucketName(mBucketName); obj.setDataInputFile(mFile); obj.setContentLength(mFile.length()); obj.setContentEncoding(Mimetypes.MIMETYPE_BINARY_OCTET_STREAM); if (mHash != null) { obj.setMd5Hash(mHash.digest()); } else { LOG.warn("MD5 was not computed for: {}", mKey); } mClient.putObject(mBucketName, obj); if (!mFile.delete()) { LOG.error("Failed to delete temporary file @ {}", mFile.getPath()); } } catch (ServiceException e) { LOG.error("Failed to upload {}. Temporary file @ {}", mKey, mFile.getPath()); throw new IOException(e); } } }
[ALLUXIO-1897] use Multipart to upload large file to S3
underfs/s3/src/main/java/alluxio/underfs/s3/S3OutputStream.java
[ALLUXIO-1897] use Multipart to upload large file to S3
<ide><path>nderfs/s3/src/main/java/alluxio/underfs/s3/S3OutputStream.java <ide> <ide> import com.google.common.base.Preconditions; <ide> import org.jets3t.service.S3Service; <del>import org.jets3t.service.ServiceException; <ide> import org.jets3t.service.model.S3Object; <add>import org.jets3t.service.model.StorageObject; <ide> import org.jets3t.service.utils.Mimetypes; <add>import org.jets3t.service.utils.MultipartUtils; <ide> import org.slf4j.Logger; <ide> import org.slf4j.LoggerFactory; <ide> <ide> import java.security.DigestOutputStream; <ide> import java.security.MessageDigest; <ide> import java.security.NoSuchAlgorithmException; <add>import java.util.ArrayList; <add>import java.util.List; <ide> import java.util.UUID; <ide> import java.util.concurrent.atomic.AtomicBoolean; <ide> <ide> <ide> /** Flag to indicate this stream has been closed, to ensure close is only done once. */ <ide> private AtomicBoolean mClosed = new AtomicBoolean(false); <add> <add> /** <add> * A {@link MultipartUtils} to upload the file to S3 using Multipart Uploads. Multipart Uploads <add> * involves uploading an object's data in parts instead of all at once, which can work around S3's <add> * limit of 5GB on a single Object PUT operation. <add> * <add> * It is recommended (http://docs.aws.amazon.com/AmazonS3/latest/dev/UploadingObjects.html) <add> * to upload file larger than 100MB using Multipart Uploads, we use 512MB here <add> * since it is close to the size of a typical Alluxio file block. <add> */ <add> private static final MultipartUtils MULTIPART_UTIL = new MultipartUtils(Constants.MB * 512); <ide> <ide> /** <ide> * Constructs a new stream for writing a file. <ide> } else { <ide> LOG.warn("MD5 was not computed for: {}", mKey); <ide> } <del> mClient.putObject(mBucketName, obj); <add> if (MULTIPART_UTIL.isFileLargerThanMaxPartSize(mFile)) { <add> // Big object will be split into parts and uploaded to S3 in parallel. <add> List<StorageObject> objectsToUploadAsMultipart = new ArrayList<>(); <add> objectsToUploadAsMultipart.add(obj); <add> MULTIPART_UTIL.uploadObjects(mBucketName, mClient, objectsToUploadAsMultipart, null); <add> } else { <add> // Avoid uploading file with Multipart if it's not necessary to save the <add> // extra overhead. <add> mClient.putObject(mBucketName, obj); <add> } <ide> if (!mFile.delete()) { <ide> LOG.error("Failed to delete temporary file @ {}", mFile.getPath()); <ide> } <del> } catch (ServiceException e) { <add> } catch (Exception e) { <ide> LOG.error("Failed to upload {}. Temporary file @ {}", mKey, mFile.getPath()); <ide> throw new IOException(e); <ide> }
Java
mit
3e3b3c6e7eb5e2fbc8ac74c2173f304c3bfb1084
0
elBukkit/MagicPlugin,elBukkit/MagicPlugin,elBukkit/MagicPlugin
package com.elmakers.mine.bukkit.batch; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Set; import com.elmakers.mine.bukkit.api.block.ModifyType; import org.bukkit.Location; import org.bukkit.Material; import org.bukkit.World; import org.bukkit.block.Block; import org.bukkit.block.BlockFace; import org.bukkit.entity.Entity; import org.bukkit.entity.EntityType; import org.bukkit.entity.ExperienceOrb; import org.bukkit.entity.LivingEntity; import org.bukkit.entity.Player; import org.bukkit.inventory.ItemStack; import org.bukkit.potion.PotionEffectType; import org.bukkit.util.Vector; import com.elmakers.mine.bukkit.api.magic.Mage; import com.elmakers.mine.bukkit.api.wand.Wand; import com.elmakers.mine.bukkit.block.AutomatonLevel; import com.elmakers.mine.bukkit.block.MaterialAndData; import com.elmakers.mine.bukkit.spell.BlockSpell; import com.elmakers.mine.bukkit.utility.CompatibilityUtils; import com.elmakers.mine.bukkit.utility.Target; public class SimulateBatch extends SpellBatch { private static BlockFace[] NEIGHBOR_FACES = { BlockFace.NORTH, BlockFace.NORTH_EAST, BlockFace.EAST, BlockFace.SOUTH_EAST, BlockFace.SOUTH, BlockFace.SOUTH_WEST, BlockFace.WEST, BlockFace.NORTH_WEST }; private static BlockFace[] DIAGONAL_FACES = { BlockFace.SOUTH_EAST, BlockFace.NORTH_EAST, BlockFace.SOUTH_WEST, BlockFace.NORTH_WEST }; private static BlockFace[] MAIN_FACES = { BlockFace.EAST, BlockFace.NORTH, BlockFace.SOUTH, BlockFace.WEST }; private static BlockFace[] POWER_FACES = { BlockFace.EAST, BlockFace.WEST, BlockFace.SOUTH, BlockFace.NORTH, BlockFace.DOWN, BlockFace.UP }; private enum SimulationState { INITIALIZING, SCANNING, UPDATING, TARGETING, HEART_UPDATE, REGISTER, DELAY, CLEANUP, FINISHED }; public enum TargetMode { STABILIZE, WANDER, GLIDE, HUNT, FLEE, DIRECTED }; public enum TargetType { PLAYER, MAGE, MOB, AUTOMATON, ANY }; public static Material POWER_MATERIAL = Material.REDSTONE_BLOCK; public static boolean DEBUG = false; private Mage mage; private Block heartBlock; private Block heartTargetBlock; private TargetMode targetMode = TargetMode.STABILIZE; private TargetType targetType = TargetType.PLAYER; private String automataName; private AutomatonLevel level; private String dropItem; private Collection<String> dropItems; private int dropXp; private boolean reverseTargetDistanceScore = false; private boolean concurrent = false; private int commandMoveRangeSquared = 9; private int huntMaxRange = 128; private int castRange = 32; private int huntMinRange = 4; private int birthRangeSquared = 0; private int liveRangeSquared = 0; private float fovWeight = 100; private double huntFov = Math.PI * 1.8; private boolean commandReload; private int delay; private long delayTimeout; private World world; private MaterialAndData birthMaterial; private Material deathMaterial; private boolean isAutomata; private int radius; private int x; private int y; private int z; private int r; private int yRadius; private int updatingIndex; private ArrayList<Boolean> liveCounts = new ArrayList<>(); private ArrayList<Boolean> birthCounts = new ArrayList<>(); private ArrayList<Boolean> diagonalLiveCounts = new ArrayList<>(); private ArrayList<Boolean> diagonalBirthCounts = new ArrayList<>(); private SimulationState state; private Location center; private ModifyType modifyType = ModifyType.NO_PHYSICS; private double reflectChance; private int maxBlocks = 0; private List<Block> deadBlocks = new ArrayList<>(); private List<Block> bornBlocks = new ArrayList<>(); private List<Target> potentialHeartBlocks = new LinkedList<>(); public SimulateBatch(BlockSpell spell, Location center, int radius, int yRadius, MaterialAndData birth, Material death, Set<Integer> liveCounts, Set<Integer> birthCounts, String automataName) { super(spell); this.mage = spell.getMage(); this.yRadius = yRadius; this.radius = radius; this.center = center.clone(); this.birthMaterial = birth; this.deathMaterial = death; mapIntegers(liveCounts, this.liveCounts); mapIntegers(birthCounts, this.birthCounts); this.world = center.getWorld(); this.automataName = automataName; this.isAutomata = automataName != null; if (isAutomata) { this.heartBlock = center.getBlock(); } state = SimulationState.INITIALIZING; undoList.setModifyType(modifyType); } @Override public int size() { return radius * radius * radius * 8; } @Override public int remaining() { if (r >= radius) return 0; return (radius - r) * (radius - r) * (radius - r) * 8; } protected void checkForPotentialHeart(Block block, int distanceSquared) { if (isAutomata) { if (distanceSquared <= commandMoveRangeSquared) { // commandMoveRangeSquared is kind of too big, but it doesn't matter all that much // we still look at targets that end up with a score of 0, it just affects the sort ordering. potentialHeartBlocks.add(new Target(center, block, huntMinRange, huntMaxRange, huntFov, fovWeight, reverseTargetDistanceScore)); } } } protected void die() { String message = spell.getMessage("death_broadcast").replace("$name", automataName); if (message.length() > 0) { controller.sendToMages(message, center); } // Kill power block if (heartBlock != null) { if (commandReload) { controller.unregisterAutomata(heartBlock); } registerForUndo(heartBlock); heartBlock.setType(Material.AIR); } // Drop item if (dropItem != null && dropItem.length() > 0) { Wand magicItem = controller.createWand(dropItem); if (magicItem != null) { center.getWorld().dropItemNaturally(center, magicItem.getItem()); } } if (dropItems != null && dropItems.size() > 0) { for (String dropItemName : dropItems) { ItemStack drop = controller.createItem(dropItemName); if (drop != null) { center.getWorld().dropItemNaturally(center, drop); } } } // Drop Xp if (dropXp > 0) { Entity entity = center.getWorld().spawnEntity(center, EntityType.EXPERIENCE_ORB); if (entity != null && entity instanceof ExperienceOrb) { ExperienceOrb orb = (ExperienceOrb)entity; orb.setExperience(dropXp); } } if (level != null) { level.onDeath(mage, birthMaterial); } } @SuppressWarnings("deprecation") protected void killBlock(Block block) { if (concurrent) { registerForUndo(block); if (modifyType == ModifyType.FAST) { CompatibilityUtils.setBlockFast(block, deathMaterial, 0); } else { block.setTypeIdAndData(deathMaterial.getId(), (byte)0, false); } if (reflectChance > 0) { com.elmakers.mine.bukkit.block.UndoList.getRegistry().unregisterReflective(block); } } else { deadBlocks.add(block); } } protected void birthBlock(Block block) { if (concurrent) { registerForUndo(block); birthMaterial.modify(block, modifyType); if (reflectChance > 0) { com.elmakers.mine.bukkit.block.UndoList.getRegistry().registerReflective(block, reflectChance); undoList.setUndoReflective(true); } } else { bornBlocks.add(block); } } protected boolean simulateBlock(int dx, int dy, int dz) { int x = center.getBlockX() + dx; int y = center.getBlockY() + dy; int z = center.getBlockZ() + dz; Block block = world.getBlockAt(x, y, z); if (!block.getChunk().isLoaded()) { return false; } Material blockMaterial = block.getType(); if (birthMaterial.is(block)) { int distanceSquared = liveRangeSquared > 0 || isAutomata ? (int)Math.ceil(block.getLocation().distanceSquared(heartBlock.getLocation())) : 0; if (liveRangeSquared <= 0 || distanceSquared <= liveRangeSquared) { if (diagonalLiveCounts.size() > 0) { int faceNeighborCount = getFaceNeighborCount(block, birthMaterial, isAutomata); int diagonalNeighborCount = getDiagonalNeighborCount(block, birthMaterial, isAutomata); if (faceNeighborCount >= liveCounts.size() || !liveCounts.get(faceNeighborCount) || diagonalNeighborCount >= diagonalLiveCounts.size() || !diagonalLiveCounts.get(diagonalNeighborCount)) { killBlock(block); } else { checkForPotentialHeart(block, distanceSquared); } } else { int neighborCount = getNeighborCount(block, birthMaterial, isAutomata); if (neighborCount >= liveCounts.size() || !liveCounts.get(neighborCount)) { killBlock(block); } else { checkForPotentialHeart(block, distanceSquared); } } } else { killBlock(block); } } else if (blockMaterial == deathMaterial) { int distanceSquared = birthRangeSquared > 0 || isAutomata ? (int)Math.ceil(block.getLocation().distanceSquared(heartBlock.getLocation())) : 0; if (birthRangeSquared <= 0 || distanceSquared <= birthRangeSquared) { if (diagonalBirthCounts.size() > 0) { int faceNeighborCount = getFaceNeighborCount(block, birthMaterial, isAutomata); int diagonalNeighborCount = getDiagonalNeighborCount(block, birthMaterial, isAutomata); if (faceNeighborCount < birthCounts.size() && birthCounts.get(faceNeighborCount) && diagonalNeighborCount < diagonalBirthCounts.size() && diagonalBirthCounts.get(diagonalNeighborCount)) { birthBlock(block); checkForPotentialHeart(block, distanceSquared); } } else { int neighborCount = getNeighborCount(block, birthMaterial, isAutomata); if (neighborCount < birthCounts.size() && birthCounts.get(neighborCount)) { birthBlock(block); checkForPotentialHeart(block, distanceSquared); } } } } return true; } protected boolean simulateBlocks(int x, int y, int z) { boolean success = true; if (y != 0) { success = success && simulateBlock(x, -y, z); if (x != 0) success = success && simulateBlock(-x, -y, z); if (z != 0) success = success && simulateBlock(x, -y, -z); if (x != 0 && z != 0) success = success && simulateBlock(-x, -y, -z); } success = success && simulateBlock(x, y, z); if (x != 0) success = success && simulateBlock(-x, y, z); if (z != 0) success = success && simulateBlock(x, y, -z); if (z != 0 && x != 0) success = success && simulateBlock(-x, y, -z); return success; } @Override public int process(int maxBlocks) { int processedBlocks = 0; if (state == SimulationState.INITIALIZING) { // Reset state x = 0; y = 0; z = 0; r = 0; updatingIndex = 0; // Process the casting first, and only if specially configured to do so. if (isAutomata) { // Look for a target target(); // We are going to rely on the block toggling to kick this back to life when the chunk // reloads, so for now just bail and hope the timing works out. if (heartBlock == null || !heartBlock.getChunk().isLoaded()) { finish(); return processedBlocks; } // Check for death since activation (e.g. during delay period) if (heartBlock.getType() != POWER_MATERIAL) { if (DEBUG) { controller.getLogger().info("DIED, no Heart at : " + heartBlock); } die(); finish(); return processedBlocks; } // Reset potential new locations potentialHeartBlocks.clear(); } processedBlocks++; state = SimulationState.SCANNING; } while (state == SimulationState.SCANNING && processedBlocks <= maxBlocks) { // Make the heart a normal block so the sim will process it registerForUndo(heartBlock); birthMaterial.modify(heartBlock); if (!simulateBlocks(x, y, z)) { // TODO: Is this the right thing to do? finish(); return processedBlocks; } y++; if (y > yRadius) { y = 0; if (x < radius) { x++; } else { z--; if (z < 0) { r++; z = r; x = 0; } } } if (r > radius) { state = SimulationState.UPDATING; } } while (state == SimulationState.UPDATING && processedBlocks <= maxBlocks) { int deadIndex = updatingIndex; if (deadIndex >= 0 && deadIndex < deadBlocks.size()) { Block killBlock = deadBlocks.get(deadIndex); if (!killBlock.getChunk().isLoaded()) { killBlock.getChunk().load(); return processedBlocks; } if (birthMaterial.is(killBlock)) { registerForUndo(killBlock); killBlock.setType(deathMaterial); } else { // If this block was destroyed while we were processing, // avoid spawning a random birth block. // This tries to make it so automata don't "cheat" when // getting destroyed. A bit hacky though, I'm not about // to re-simulate... if (bornBlocks.size() > 0) { bornBlocks.remove(bornBlocks.size() - 1); } } processedBlocks++; } int bornIndex = updatingIndex - deadBlocks.size(); if (bornIndex >= 0 && bornIndex < bornBlocks.size()) { Block birthBlock = bornBlocks.get(bornIndex); if (!birthBlock.getChunk().isLoaded()) { birthBlock.getChunk().load(); return processedBlocks; } registerForUndo(birthBlock); birthMaterial.modify(birthBlock, modifyType); } updatingIndex++; if (updatingIndex >= deadBlocks.size() + bornBlocks.size()) { state = SimulationState.TARGETING; // Wait at least a tick return maxBlocks; } } // Each of the following states will end in this tick if (state == SimulationState.TARGETING) { if (isAutomata && potentialHeartBlocks.size() > 0) { switch (targetMode) { case HUNT: Collections.sort(potentialHeartBlocks); break; case FLEE: Collections.sort(potentialHeartBlocks); break; default: Collections.shuffle(potentialHeartBlocks); break; } // Find a valid block for the command heartTargetBlock = null; Block backupBlock = null; while (heartTargetBlock == null && potentialHeartBlocks.size() > 0) { Block block = potentialHeartBlocks.remove(0).getBlock(); if (block != null && birthMaterial.is(block)) { heartTargetBlock = block; } } // If we didn't find any powerable blocks, but we did find at least one valid sim block // just use that one. if (heartTargetBlock == null) heartTargetBlock = backupBlock; // Search for a power block if (heartTargetBlock == null && DEBUG) { controller.getLogger().info("Could not find a valid command block location"); } } if (DEBUG) { if (heartTargetBlock != null) { controller.getLogger().info("MOVED: " + heartTargetBlock.getLocation().toVector().subtract(center.toVector())); } } state = SimulationState.HEART_UPDATE; } if (state == SimulationState.HEART_UPDATE) { if (isAutomata) { if (heartTargetBlock != null) { if (!heartTargetBlock.getChunk().isLoaded()) { heartTargetBlock.getChunk().load(); return processedBlocks; } if (reflectChance > 0) { com.elmakers.mine.bukkit.block.UndoList.getRegistry().unregisterReflective(heartTargetBlock); } registerForUndo(heartTargetBlock); heartTargetBlock.setType(POWER_MATERIAL); heartBlock = heartTargetBlock; Location newLocation = heartTargetBlock.getLocation(); newLocation.setPitch(center.getPitch()); newLocation.setYaw(center.getYaw()); center = newLocation; mage.setLocation(newLocation); } else { die(); } } state = SimulationState.REGISTER; } if (state == SimulationState.REGISTER) { if (commandReload) { String automataName = this.automataName; if (automataName == null || automataName.length() <= 1) { automataName = controller.getMessages().get("automata.default_name"); } controller.registerAutomata(heartTargetBlock, automataName, "automata.awaken"); } delayTimeout = System.currentTimeMillis() + delay; state = delay > 0 ? SimulationState.DELAY : SimulationState.CLEANUP; } if (state == SimulationState.DELAY) { processedBlocks++; if (heartBlock != null && heartBlock.getType() != POWER_MATERIAL) { if (DEBUG) { controller.getLogger().info("DIED, no Heart at : " + heartBlock); } die(); finish(); } else { if (System.currentTimeMillis() > delayTimeout) { state = SimulationState.CLEANUP; } } return processedBlocks; } if (state == SimulationState.CLEANUP) { if (this.maxBlocks <= 0) { state = SimulationState.FINISHED; } else { boolean undid = false; while (processedBlocks <= maxBlocks && undoList.size() > this.maxBlocks) { if (undoList.undoNext(false) == null) break; undid = true; } // make sure we didn't undo the heart if (undid && heartBlock != null) { registerForUndo(heartBlock); heartBlock.setType(POWER_MATERIAL); } if (undoList.size() <= this.maxBlocks) { state = SimulationState.FINISHED; } } } if (state == SimulationState.FINISHED) { if (isAutomata) { state = SimulationState.INITIALIZING; } else { finish(); } } return processedBlocks; } public void setDrop(String dropName, int dropXp, Collection<String> drops) { this.dropItem = dropName; this.dropXp = dropXp; this.dropItems = drops; } public void setLevel(AutomatonLevel level) { this.level = level; this.commandMoveRangeSquared = level.getMoveRangeSquared(commandMoveRangeSquared); this.dropXp = level.getDropXp(dropXp); this.liveRangeSquared = level.getLiveRangeSquared(liveRangeSquared); this.birthRangeSquared = level.getBirthRangeSquared(birthRangeSquared); this.radius = level.getRadius(radius); this.yRadius = level.getYRadius(yRadius); } public void setBirthRange(int range) { birthRangeSquared = range * range; } public void setLiveRange(int range) { liveRangeSquared = range * range; } public void setMaxHuntRange(int range) { huntMaxRange = range; } public void setCastRange(int range) { castRange = range; } public void setMinHuntRange(int range) { huntMinRange = range; } public void setTargetType(TargetType targetType) { this.targetType = targetType; } public void target() { switch (targetMode) { case FLEE: case HUNT: case DIRECTED: Target bestTarget = null; reverseTargetDistanceScore = true; if (targetType == TargetType.ANY || targetType == TargetType.MOB) { List<Entity> entities = CompatibilityUtils.getNearbyEntities(center, huntMaxRange, huntMaxRange, huntMaxRange); for (Entity entity : entities) { // We'll get the players from the Mages list if (entity instanceof Player || !(entity instanceof LivingEntity) || entity.isDead()) continue; if (!entity.getLocation().getWorld().equals(center.getWorld())) continue; LivingEntity li = (LivingEntity)entity; if (li.hasPotionEffect(PotionEffectType.INVISIBILITY)) continue; Target newScore = new Target(center, entity, huntMinRange, huntMaxRange, huntFov, 100, false); int score = newScore.getScore(); if (bestTarget == null || score > bestTarget.getScore()) { bestTarget = newScore; } } } if (targetType == TargetType.MAGE || targetType == TargetType.AUTOMATON || targetType == TargetType.ANY || targetType == TargetType.PLAYER) { Collection<Mage> mages = controller.getMages(); for (Mage mage : mages) { if (mage == this.mage) continue; if (targetType == TargetType.AUTOMATON && mage.getPlayer() != null) continue; if (targetType == TargetType.PLAYER && mage.getPlayer() == null) continue; if (mage.isDead() || !mage.isOnline() || !mage.hasLocation() || mage.isSuperProtected()) continue; if (!mage.getLocation().getWorld().equals(center.getWorld())) continue; LivingEntity li = mage.getLivingEntity(); if (li != null && li.hasPotionEffect(PotionEffectType.INVISIBILITY)) continue; Target newScore = new Target(center, mage, huntMinRange, huntMaxRange, huntFov, 100, false); int score = newScore.getScore(); if (bestTarget == null || score > bestTarget.getScore()) { bestTarget = newScore; } } } if (bestTarget != null) { String targetDescription = bestTarget.getEntity() == null ? "NONE" : ((bestTarget instanceof Player) ? ((Player)bestTarget.getEntity()).getName() : bestTarget.getEntity().getType().name()); if (DEBUG) { controller.getLogger().info(" *Tracking " + targetDescription + " score: " + bestTarget.getScore() + " location: " + center + " -> " + bestTarget.getLocation() + " move " + commandMoveRangeSquared); } Vector direction = null; if (targetMode == TargetMode.DIRECTED) { direction = bestTarget.getLocation().getDirection(); if (DEBUG) { controller.getLogger().info(" *Directed: " + direction); } } else { Location targetLocation = bestTarget.getLocation(); direction = targetLocation.toVector().subtract(center.toVector()); } if (direction != null) { center.setDirection(direction); mage.setLocation(center); } // Check for obstruction // TODO Think about this more.. /* Block block = spell.getInteractBlock(); if (block.getType() != Material.AIR && block.getType() != POWER_MATERIAL && !!birthMaterial.is(block)) { // TODO: Use location.setDirection in 1.7+ center = CompatibilityUtils.setDirection(center, new Vector(0, 1, 0)); } */ if (level != null && center.distanceSquared(bestTarget.getLocation()) < castRange * castRange) { level.onTick(mage, birthMaterial); } // After ticking, re-position for movement. This way spells still fire towards the target. if (targetMode == TargetMode.FLEE) { direction = direction.multiply(-1); // Don't Flee upward if (direction.getY() > 0) { direction.setY(-direction.getY()); } } } break; case GLIDE: reverseTargetDistanceScore = true; break; default: reverseTargetDistanceScore = false; } } public void setMoveRange(int commandRadius, boolean reload) { commandReload = reload; commandMoveRangeSquared = commandRadius * commandRadius; } protected int getNeighborCount(Block block, MaterialAndData liveMaterial, boolean includeCommands) { return getDiagonalNeighborCount(block, liveMaterial, includeCommands) + getFaceNeighborCount(block, liveMaterial, includeCommands); } protected int getFaceNeighborCount(Block block, MaterialAndData liveMaterial, boolean includeCommands) { int liveCount = 0; BlockFace[] faces = yRadius > 0 ? POWER_FACES : MAIN_FACES; for (BlockFace face : faces) { if (liveMaterial.is(block.getRelative(face))) { liveCount++; } } return liveCount; } protected int getDiagonalNeighborCount(Block block, MaterialAndData liveMaterial, boolean includeCommands) { int liveCount = 0; for (BlockFace face : DIAGONAL_FACES) { if (liveMaterial.is(block.getRelative(face))) { liveCount++; } } if (yRadius > 0) { Block upBlock = block.getRelative(BlockFace.UP); for (BlockFace face : NEIGHBOR_FACES) { if (liveMaterial.is(upBlock.getRelative(face))) { liveCount++; } } Block downBlock = block.getRelative(BlockFace.DOWN); for (BlockFace face : NEIGHBOR_FACES) { if (liveMaterial.is(downBlock.getRelative(face))) { liveCount++; } } } return liveCount; } public void setConcurrent(boolean concurrent) { this.concurrent = concurrent; } @Override public void finish() { if (isAutomata && !mage.isPlayer()) { controller.forgetMage(mage); } state = SimulationState.FINISHED; super.finish(); } protected void mapIntegers(Collection<Integer> flags, List<Boolean> flagMap) { for (Integer flag : flags) { while (flagMap.size() <= flag) { flagMap.add(false); } flagMap.set(flag, true); } } public void setDiagonalLiveRules(Collection<Integer> rules) { mapIntegers(rules, this.diagonalLiveCounts); } public void setDiagonalBirthRules(Collection<Integer> rules) { mapIntegers(rules, this.diagonalBirthCounts); } public void setReflectChange(double reflectChance) { this.reflectChance = reflectChance; } public void setDelay(int delay) { this.delay = delay; } public void setTargetMode(TargetMode mode) { this.targetMode = mode; } public void setMaxBlocks(int maxBlocks) { this.maxBlocks = maxBlocks; } }
Magic/src/main/java/com/elmakers/mine/bukkit/batch/SimulateBatch.java
package com.elmakers.mine.bukkit.batch; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Set; import com.elmakers.mine.bukkit.api.block.ModifyType; import org.bukkit.Location; import org.bukkit.Material; import org.bukkit.World; import org.bukkit.block.Block; import org.bukkit.block.BlockFace; import org.bukkit.entity.Entity; import org.bukkit.entity.EntityType; import org.bukkit.entity.ExperienceOrb; import org.bukkit.entity.LivingEntity; import org.bukkit.entity.Player; import org.bukkit.inventory.ItemStack; import org.bukkit.potion.PotionEffectType; import org.bukkit.util.Vector; import com.elmakers.mine.bukkit.api.magic.Mage; import com.elmakers.mine.bukkit.api.wand.Wand; import com.elmakers.mine.bukkit.block.AutomatonLevel; import com.elmakers.mine.bukkit.block.MaterialAndData; import com.elmakers.mine.bukkit.spell.BlockSpell; import com.elmakers.mine.bukkit.utility.CompatibilityUtils; import com.elmakers.mine.bukkit.utility.Target; public class SimulateBatch extends SpellBatch { private static BlockFace[] NEIGHBOR_FACES = { BlockFace.NORTH, BlockFace.NORTH_EAST, BlockFace.EAST, BlockFace.SOUTH_EAST, BlockFace.SOUTH, BlockFace.SOUTH_WEST, BlockFace.WEST, BlockFace.NORTH_WEST }; private static BlockFace[] DIAGONAL_FACES = { BlockFace.SOUTH_EAST, BlockFace.NORTH_EAST, BlockFace.SOUTH_WEST, BlockFace.NORTH_WEST }; private static BlockFace[] MAIN_FACES = { BlockFace.EAST, BlockFace.NORTH, BlockFace.SOUTH, BlockFace.WEST }; private static BlockFace[] POWER_FACES = { BlockFace.EAST, BlockFace.WEST, BlockFace.SOUTH, BlockFace.NORTH, BlockFace.DOWN, BlockFace.UP }; private enum SimulationState { INITIALIZING, SCANNING, UPDATING, TARGETING, HEART_UPDATE, REGISTER, DELAY, CLEANUP, FINISHED }; public enum TargetMode { STABILIZE, WANDER, GLIDE, HUNT, FLEE, DIRECTED }; public enum TargetType { PLAYER, MAGE, MOB, AUTOMATON, ANY }; public static Material POWER_MATERIAL = Material.REDSTONE_BLOCK; public static boolean DEBUG = false; private Mage mage; private Block heartBlock; private Block heartTargetBlock; private TargetMode targetMode = TargetMode.STABILIZE; private TargetType targetType = TargetType.PLAYER; private String automataName; private AutomatonLevel level; private String dropItem; private Collection<String> dropItems; private int dropXp; private boolean reverseTargetDistanceScore = false; private boolean concurrent = false; private int commandMoveRangeSquared = 9; private int huntMaxRange = 128; private int castRange = 32; private int huntMinRange = 4; private int birthRangeSquared = 0; private int liveRangeSquared = 0; private float fovWeight = 100; private double huntFov = Math.PI * 1.8; private boolean commandReload; private int delay; private long delayTimeout; private World world; private MaterialAndData birthMaterial; private Material deathMaterial; private boolean isAutomata; private int radius; private int x; private int y; private int z; private int r; private int yRadius; private int updatingIndex; private ArrayList<Boolean> liveCounts = new ArrayList<>(); private ArrayList<Boolean> birthCounts = new ArrayList<>(); private ArrayList<Boolean> diagonalLiveCounts = new ArrayList<>(); private ArrayList<Boolean> diagonalBirthCounts = new ArrayList<>(); private SimulationState state; private Location center; private ModifyType modifyType = ModifyType.NO_PHYSICS; private double reflectChance; private int maxBlocks = 0; private List<Block> deadBlocks = new ArrayList<>(); private List<Block> bornBlocks = new ArrayList<>(); private List<Target> potentialHeartBlocks = new LinkedList<>(); public SimulateBatch(BlockSpell spell, Location center, int radius, int yRadius, MaterialAndData birth, Material death, Set<Integer> liveCounts, Set<Integer> birthCounts, String automataName) { super(spell); this.mage = spell.getMage(); this.yRadius = yRadius; this.radius = radius; this.center = center.clone(); this.birthMaterial = birth; this.deathMaterial = death; mapIntegers(liveCounts, this.liveCounts); mapIntegers(birthCounts, this.birthCounts); this.world = center.getWorld(); this.automataName = automataName; this.isAutomata = automataName != null; if (isAutomata) { this.heartBlock = center.getBlock(); } state = SimulationState.INITIALIZING; undoList.setModifyType(modifyType); } @Override public int size() { return radius * radius * radius * 8; } @Override public int remaining() { if (r >= radius) return 0; return (radius - r) * (radius - r) * (radius - r) * 8; } protected void checkForPotentialHeart(Block block, int distanceSquared) { if (isAutomata) { if (distanceSquared <= commandMoveRangeSquared) { // commandMoveRangeSquared is kind of too big, but it doesn't matter all that much // we still look at targets that end up with a score of 0, it just affects the sort ordering. potentialHeartBlocks.add(new Target(center, block, huntMinRange, huntMaxRange, huntFov, fovWeight, reverseTargetDistanceScore)); } } } protected void die() { String message = spell.getMessage("death_broadcast").replace("$name", automataName); if (message.length() > 0) { controller.sendToMages(message, center); } // Kill power block if (heartBlock != null) { if (commandReload) { controller.unregisterAutomata(heartBlock); } registerForUndo(heartBlock); heartBlock.setType(Material.AIR); } // Drop item if (dropItem != null && dropItem.length() > 0) { Wand magicItem = controller.createWand(dropItem); if (magicItem != null) { center.getWorld().dropItemNaturally(center, magicItem.getItem()); } } if (dropItems != null && dropItems.size() > 0) { for (String dropItemName : dropItems) { ItemStack drop = controller.createItem(dropItemName); if (drop != null) { center.getWorld().dropItemNaturally(center, drop); } } } // Drop Xp if (dropXp > 0) { Entity entity = center.getWorld().spawnEntity(center, EntityType.EXPERIENCE_ORB); if (entity != null && entity instanceof ExperienceOrb) { ExperienceOrb orb = (ExperienceOrb)entity; orb.setExperience(dropXp); } } if (level != null) { level.onDeath(mage, birthMaterial); } } @SuppressWarnings("deprecation") protected void killBlock(Block block) { if (concurrent) { registerForUndo(block); if (modifyType == ModifyType.FAST) { CompatibilityUtils.setBlockFast(block, deathMaterial, 0); } else { block.setTypeIdAndData(deathMaterial.getId(), (byte)0, false); } if (reflectChance > 0) { com.elmakers.mine.bukkit.block.UndoList.getRegistry().unregisterReflective(block); } } else { deadBlocks.add(block); } } protected void birthBlock(Block block) { if (concurrent) { registerForUndo(block); birthMaterial.modify(block, modifyType); if (reflectChance > 0) { com.elmakers.mine.bukkit.block.UndoList.getRegistry().registerReflective(block, reflectChance); undoList.setUndoReflective(true); } } else { bornBlocks.add(block); } } protected boolean simulateBlock(int dx, int dy, int dz) { int x = center.getBlockX() + dx; int y = center.getBlockY() + dy; int z = center.getBlockZ() + dz; Block block = world.getBlockAt(x, y, z); if (!block.getChunk().isLoaded()) { return false; } Material blockMaterial = block.getType(); if (birthMaterial.is(block)) { int distanceSquared = liveRangeSquared > 0 || isAutomata ? (int)Math.ceil(block.getLocation().distanceSquared(heartBlock.getLocation())) : 0; if (liveRangeSquared <= 0 || distanceSquared <= liveRangeSquared) { if (diagonalLiveCounts.size() > 0) { int faceNeighborCount = getFaceNeighborCount(block, birthMaterial, isAutomata); int diagonalNeighborCount = getDiagonalNeighborCount(block, birthMaterial, isAutomata); if (faceNeighborCount >= liveCounts.size() || !liveCounts.get(faceNeighborCount) || diagonalNeighborCount >= diagonalLiveCounts.size() || !diagonalLiveCounts.get(diagonalNeighborCount)) { killBlock(block); } else { checkForPotentialHeart(block, distanceSquared); } } else { int neighborCount = getNeighborCount(block, birthMaterial, isAutomata); if (neighborCount >= liveCounts.size() || !liveCounts.get(neighborCount)) { killBlock(block); } else { checkForPotentialHeart(block, distanceSquared); } } } else { killBlock(block); } } else if (blockMaterial == deathMaterial) { int distanceSquared = birthRangeSquared > 0 || isAutomata ? (int)Math.ceil(block.getLocation().distanceSquared(heartBlock.getLocation())) : 0; if (birthRangeSquared <= 0 || distanceSquared <= birthRangeSquared) { if (diagonalBirthCounts.size() > 0) { int faceNeighborCount = getFaceNeighborCount(block, birthMaterial, isAutomata); int diagonalNeighborCount = getDiagonalNeighborCount(block, birthMaterial, isAutomata); if (faceNeighborCount < birthCounts.size() && birthCounts.get(faceNeighborCount) && diagonalNeighborCount < diagonalBirthCounts.size() && diagonalBirthCounts.get(diagonalNeighborCount)) { birthBlock(block); checkForPotentialHeart(block, distanceSquared); } } else { int neighborCount = getNeighborCount(block, birthMaterial, isAutomata); if (neighborCount < birthCounts.size() && birthCounts.get(neighborCount)) { birthBlock(block); checkForPotentialHeart(block, distanceSquared); } } } } return true; } protected boolean simulateBlocks(int x, int y, int z) { boolean success = true; if (y != 0) { success = success && simulateBlock(x, -y, z); if (x != 0) success = success && simulateBlock(-x, -y, z); if (z != 0) success = success && simulateBlock(x, -y, -z); if (x != 0 && z != 0) success = success && simulateBlock(-x, -y, -z); } success = success && simulateBlock(x, y, z); if (x != 0) success = success && simulateBlock(-x, y, z); if (z != 0) success = success && simulateBlock(x, y, -z); if (z != 0 && x != 0) success = success && simulateBlock(-x, y, -z); return success; } @Override public int process(int maxBlocks) { int processedBlocks = 0; if (state == SimulationState.INITIALIZING) { // Reset state x = 0; y = 0; z = 0; r = 0; updatingIndex = 0; // Process the casting first, and only if specially configured to do so. if (isAutomata) { // Look for a target target(); // We are going to rely on the block toggling to kick this back to life when the chunk // reloads, so for now just bail and hope the timing works out. if (heartBlock == null || !heartBlock.getChunk().isLoaded()) { finish(); return processedBlocks; } // Check for death since activation (e.g. during delay period) if (heartBlock.getType() != POWER_MATERIAL) { if (DEBUG) { controller.getLogger().info("DIED, no Heart at : " + heartBlock); } die(); finish(); return processedBlocks; } // Reset potential new locations potentialHeartBlocks.clear(); } processedBlocks++; state = SimulationState.SCANNING; } while (state == SimulationState.SCANNING && processedBlocks <= maxBlocks) { // Make the heart a normal block so the sim will process it registerForUndo(heartBlock); birthMaterial.modify(heartBlock); if (!simulateBlocks(x, y, z)) { // TODO: Is this the right thing to do? finish(); return processedBlocks; } y++; if (y > yRadius) { y = 0; if (x < radius) { x++; } else { z--; if (z < 0) { r++; z = r; x = 0; } } } if (r > radius) { state = SimulationState.UPDATING; } } while (state == SimulationState.UPDATING && processedBlocks <= maxBlocks) { int deadIndex = updatingIndex; if (deadIndex >= 0 && deadIndex < deadBlocks.size()) { Block killBlock = deadBlocks.get(deadIndex); if (!killBlock.getChunk().isLoaded()) { killBlock.getChunk().load(); return processedBlocks; } if (birthMaterial.is(killBlock)) { registerForUndo(killBlock); killBlock.setType(deathMaterial); } else { // If this block was destroyed while we were processing, // avoid spawning a random birth block. // This tries to make it so automata don't "cheat" when // getting destroyed. A bit hacky though, I'm not about // to re-simulate... if (bornBlocks.size() > 0) { bornBlocks.remove(bornBlocks.size() - 1); } } processedBlocks++; } int bornIndex = updatingIndex - deadBlocks.size(); if (bornIndex >= 0 && bornIndex < bornBlocks.size()) { Block birthBlock = bornBlocks.get(bornIndex); if (!birthBlock.getChunk().isLoaded()) { birthBlock.getChunk().load(); return processedBlocks; } registerForUndo(birthBlock); birthMaterial.modify(birthBlock, modifyType); } updatingIndex++; if (updatingIndex >= deadBlocks.size() + bornBlocks.size()) { state = SimulationState.TARGETING; // Wait at least a tick return maxBlocks; } } // Each of the following states will end in this tick if (state == SimulationState.TARGETING) { if (isAutomata && potentialHeartBlocks.size() > 0) { switch (targetMode) { case HUNT: Collections.sort(potentialHeartBlocks); break; case FLEE: Collections.sort(potentialHeartBlocks); break; default: Collections.shuffle(potentialHeartBlocks); break; } // Find a valid block for the command heartTargetBlock = null; Block backupBlock = null; while (heartTargetBlock == null && potentialHeartBlocks.size() > 0) { Block block = potentialHeartBlocks.remove(0).getBlock(); if (block != null && birthMaterial.is(block)) { heartTargetBlock = block; } } // If we didn't find any powerable blocks, but we did find at least one valid sim block // just use that one. if (heartTargetBlock == null) heartTargetBlock = backupBlock; // Search for a power block if (heartTargetBlock == null && DEBUG) { controller.getLogger().info("Could not find a valid command block location"); } } if (DEBUG) { if (heartTargetBlock != null) { controller.getLogger().info("MOVED: " + heartTargetBlock.getLocation().toVector().subtract(center.toVector())); } } state = SimulationState.HEART_UPDATE; return processedBlocks; } if (state == SimulationState.HEART_UPDATE) { if (isAutomata) { if (heartTargetBlock != null) { if (!heartTargetBlock.getChunk().isLoaded()) { heartTargetBlock.getChunk().load(); return processedBlocks; } if (reflectChance > 0) { com.elmakers.mine.bukkit.block.UndoList.getRegistry().unregisterReflective(heartTargetBlock); } registerForUndo(heartTargetBlock); heartTargetBlock.setType(POWER_MATERIAL); heartBlock = heartTargetBlock; Location newLocation = heartTargetBlock.getLocation(); newLocation.setPitch(center.getPitch()); newLocation.setYaw(center.getYaw()); center = newLocation; mage.setLocation(newLocation); } else { die(); } } state = SimulationState.REGISTER; return processedBlocks; } if (state == SimulationState.REGISTER) { if (commandReload) { String automataName = this.automataName; if (automataName == null || automataName.length() <= 1) { automataName = controller.getMessages().get("automata.default_name"); } controller.registerAutomata(heartTargetBlock, automataName, "automata.awaken"); } delayTimeout = System.currentTimeMillis() + delay; state = delay > 0 ? SimulationState.DELAY : SimulationState.CLEANUP; return processedBlocks; } if (state == SimulationState.DELAY) { processedBlocks++; if (heartBlock != null && heartBlock.getType() != POWER_MATERIAL) { if (DEBUG) { controller.getLogger().info("DIED, no Heart at : " + heartBlock); } die(); finish(); } else { if (System.currentTimeMillis() > delayTimeout) { state = SimulationState.CLEANUP; } } return processedBlocks; } if (state == SimulationState.CLEANUP) { boolean undid = false; while (processedBlocks <= maxBlocks && undoList.size() > this.maxBlocks) { if (undoList.undoNext(false) == null) break; undid = true; } // make sure we didn't undo the heart if (undid && heartBlock != null) { registerForUndo(heartBlock); heartBlock.setType(POWER_MATERIAL); } if (undoList.size() <= this.maxBlocks) { state = SimulationState.FINISHED; } } if (state == SimulationState.FINISHED) { if (isAutomata) { state = SimulationState.INITIALIZING; } else { finish(); } } return processedBlocks; } public void setDrop(String dropName, int dropXp, Collection<String> drops) { this.dropItem = dropName; this.dropXp = dropXp; this.dropItems = drops; } public void setLevel(AutomatonLevel level) { this.level = level; this.commandMoveRangeSquared = level.getMoveRangeSquared(commandMoveRangeSquared); this.dropXp = level.getDropXp(dropXp); this.liveRangeSquared = level.getLiveRangeSquared(liveRangeSquared); this.birthRangeSquared = level.getBirthRangeSquared(birthRangeSquared); this.radius = level.getRadius(radius); this.yRadius = level.getYRadius(yRadius); } public void setBirthRange(int range) { birthRangeSquared = range * range; } public void setLiveRange(int range) { liveRangeSquared = range * range; } public void setMaxHuntRange(int range) { huntMaxRange = range; } public void setCastRange(int range) { castRange = range; } public void setMinHuntRange(int range) { huntMinRange = range; } public void setTargetType(TargetType targetType) { this.targetType = targetType; } public void target() { switch (targetMode) { case FLEE: case HUNT: case DIRECTED: Target bestTarget = null; reverseTargetDistanceScore = true; if (targetType == TargetType.ANY || targetType == TargetType.MOB) { List<Entity> entities = CompatibilityUtils.getNearbyEntities(center, huntMaxRange, huntMaxRange, huntMaxRange); for (Entity entity : entities) { // We'll get the players from the Mages list if (entity instanceof Player || !(entity instanceof LivingEntity) || entity.isDead()) continue; if (!entity.getLocation().getWorld().equals(center.getWorld())) continue; LivingEntity li = (LivingEntity)entity; if (li.hasPotionEffect(PotionEffectType.INVISIBILITY)) continue; Target newScore = new Target(center, entity, huntMinRange, huntMaxRange, huntFov, 100, false); int score = newScore.getScore(); if (bestTarget == null || score > bestTarget.getScore()) { bestTarget = newScore; } } } if (targetType == TargetType.MAGE || targetType == TargetType.AUTOMATON || targetType == TargetType.ANY || targetType == TargetType.PLAYER) { Collection<Mage> mages = controller.getMages(); for (Mage mage : mages) { if (mage == this.mage) continue; if (targetType == TargetType.AUTOMATON && mage.getPlayer() != null) continue; if (targetType == TargetType.PLAYER && mage.getPlayer() == null) continue; if (mage.isDead() || !mage.isOnline() || !mage.hasLocation() || mage.isSuperProtected()) continue; if (!mage.getLocation().getWorld().equals(center.getWorld())) continue; LivingEntity li = mage.getLivingEntity(); if (li != null && li.hasPotionEffect(PotionEffectType.INVISIBILITY)) continue; Target newScore = new Target(center, mage, huntMinRange, huntMaxRange, huntFov, 100, false); int score = newScore.getScore(); if (bestTarget == null || score > bestTarget.getScore()) { bestTarget = newScore; } } } if (bestTarget != null) { String targetDescription = bestTarget.getEntity() == null ? "NONE" : ((bestTarget instanceof Player) ? ((Player)bestTarget.getEntity()).getName() : bestTarget.getEntity().getType().name()); if (DEBUG) { controller.getLogger().info(" *Tracking " + targetDescription + " score: " + bestTarget.getScore() + " location: " + center + " -> " + bestTarget.getLocation() + " move " + commandMoveRangeSquared); } Vector direction = null; if (targetMode == TargetMode.DIRECTED) { direction = bestTarget.getLocation().getDirection(); if (DEBUG) { controller.getLogger().info(" *Directed: " + direction); } } else { Location targetLocation = bestTarget.getLocation(); direction = targetLocation.toVector().subtract(center.toVector()); } if (direction != null) { center.setDirection(direction); mage.setLocation(center); } // Check for obstruction // TODO Think about this more.. /* Block block = spell.getInteractBlock(); if (block.getType() != Material.AIR && block.getType() != POWER_MATERIAL && !!birthMaterial.is(block)) { // TODO: Use location.setDirection in 1.7+ center = CompatibilityUtils.setDirection(center, new Vector(0, 1, 0)); } */ if (level != null && center.distanceSquared(bestTarget.getLocation()) < castRange * castRange) { level.onTick(mage, birthMaterial); } // After ticking, re-position for movement. This way spells still fire towards the target. if (targetMode == TargetMode.FLEE) { direction = direction.multiply(-1); // Don't Flee upward if (direction.getY() > 0) { direction.setY(-direction.getY()); } } } break; case GLIDE: reverseTargetDistanceScore = true; break; default: reverseTargetDistanceScore = false; } } public void setMoveRange(int commandRadius, boolean reload) { commandReload = reload; commandMoveRangeSquared = commandRadius * commandRadius; } protected int getNeighborCount(Block block, MaterialAndData liveMaterial, boolean includeCommands) { return getDiagonalNeighborCount(block, liveMaterial, includeCommands) + getFaceNeighborCount(block, liveMaterial, includeCommands); } protected int getFaceNeighborCount(Block block, MaterialAndData liveMaterial, boolean includeCommands) { int liveCount = 0; BlockFace[] faces = yRadius > 0 ? POWER_FACES : MAIN_FACES; for (BlockFace face : faces) { if (liveMaterial.is(block.getRelative(face))) { liveCount++; } } return liveCount; } protected int getDiagonalNeighborCount(Block block, MaterialAndData liveMaterial, boolean includeCommands) { int liveCount = 0; for (BlockFace face : DIAGONAL_FACES) { if (liveMaterial.is(block.getRelative(face))) { liveCount++; } } if (yRadius > 0) { Block upBlock = block.getRelative(BlockFace.UP); for (BlockFace face : NEIGHBOR_FACES) { if (liveMaterial.is(upBlock.getRelative(face))) { liveCount++; } } Block downBlock = block.getRelative(BlockFace.DOWN); for (BlockFace face : NEIGHBOR_FACES) { if (liveMaterial.is(downBlock.getRelative(face))) { liveCount++; } } } return liveCount; } public void setConcurrent(boolean concurrent) { this.concurrent = concurrent; } @Override public void finish() { if (isAutomata && !mage.isPlayer()) { controller.forgetMage(mage); } state = SimulationState.FINISHED; super.finish(); } protected void mapIntegers(Collection<Integer> flags, List<Boolean> flagMap) { for (Integer flag : flags) { while (flagMap.size() <= flag) { flagMap.add(false); } flagMap.set(flag, true); } } public void setDiagonalLiveRules(Collection<Integer> rules) { mapIntegers(rules, this.diagonalLiveCounts); } public void setDiagonalBirthRules(Collection<Integer> rules) { mapIntegers(rules, this.diagonalBirthCounts); } public void setReflectChange(double reflectChance) { this.reflectChance = reflectChance; } public void setDelay(int delay) { this.delay = delay; } public void setTargetMode(TargetMode mode) { this.targetMode = mode; } public void setMaxBlocks(int maxBlocks) { this.maxBlocks = maxBlocks; } }
Fix Hunters getting stuck
Magic/src/main/java/com/elmakers/mine/bukkit/batch/SimulateBatch.java
Fix Hunters getting stuck
<ide><path>agic/src/main/java/com/elmakers/mine/bukkit/batch/SimulateBatch.java <ide> } <ide> } <ide> state = SimulationState.HEART_UPDATE; <del> return processedBlocks; <ide> } <ide> <ide> if (state == SimulationState.HEART_UPDATE) { <ide> } <ide> } <ide> state = SimulationState.REGISTER; <del> return processedBlocks; <ide> } <ide> <ide> if (state == SimulationState.REGISTER) { <ide> } <ide> delayTimeout = System.currentTimeMillis() + delay; <ide> state = delay > 0 ? SimulationState.DELAY : SimulationState.CLEANUP; <del> return processedBlocks; <ide> } <ide> <ide> if (state == SimulationState.DELAY) { <ide> } <ide> <ide> if (state == SimulationState.CLEANUP) { <del> boolean undid = false; <del> while (processedBlocks <= maxBlocks && undoList.size() > this.maxBlocks) { <del> if (undoList.undoNext(false) == null) break; <del> undid = true; <del> } <del> // make sure we didn't undo the heart <del> if (undid && heartBlock != null) { <del> registerForUndo(heartBlock); <del> heartBlock.setType(POWER_MATERIAL); <del> } <del> if (undoList.size() <= this.maxBlocks) { <add> if (this.maxBlocks <= 0) { <ide> state = SimulationState.FINISHED; <add> } else { <add> boolean undid = false; <add> while (processedBlocks <= maxBlocks && undoList.size() > this.maxBlocks) { <add> if (undoList.undoNext(false) == null) break; <add> undid = true; <add> } <add> // make sure we didn't undo the heart <add> if (undid && heartBlock != null) { <add> registerForUndo(heartBlock); <add> heartBlock.setType(POWER_MATERIAL); <add> } <add> if (undoList.size() <= this.maxBlocks) { <add> state = SimulationState.FINISHED; <add> } <ide> } <ide> } <ide>
Java
apache-2.0
1c2171fe090fe15e997d50d93a8c6688b6f0f55a
0
dreedyman/Rio,dreedyman/Rio,dreedyman/Rio
/* * Copyright to the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.rioproject.jsb; import net.jini.admin.Administrable; import net.jini.id.Uuid; import net.jini.io.MarshalledInstance; import org.rioproject.admin.ServiceBeanControl; import org.rioproject.core.jsb.DiscardManager; import org.rioproject.core.jsb.ServiceBeanManager; import org.rioproject.core.jsb.ServiceBeanManagerException; import org.rioproject.core.jsb.ServiceElementChangeListener; import org.rioproject.deploy.ServiceBeanInstance; import org.rioproject.deploy.ServiceProvisionListener; import org.rioproject.opstring.OperationalStringException; import org.rioproject.opstring.OperationalStringManager; import org.rioproject.opstring.ServiceBeanConfig; import org.rioproject.opstring.ServiceElement; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.management.NotificationBroadcasterSupport; import java.rmi.RemoteException; import java.util.ArrayList; import java.util.List; /** * Implement ServiceBeanManager support * * @see org.rioproject.core.jsb.ServiceBeanManager * * @author Dennis Reedy */ @SuppressWarnings("PMD.AvoidThrowingRawExceptionTypes") public class JSBManager implements ServiceBeanManager { private static final String COMPONENT="org.rioproject.jsb"; private static Logger logger = LoggerFactory.getLogger(COMPONENT); private DiscardManager discardManager; private OperationalStringManager opStringManager; private ServiceElement sElem; /** ServiceID */ private Uuid serviceID; /** Cybernode Uuid */ private Uuid cybernodeUuid; /** The marshalledInstance */ private MarshalledInstance marshalledInstance; /** The host address the service bean was instantiated on */ private String hostAddress; /** List of listeners */ private final List<ServiceElementChangeListener> listenerList = new ArrayList<ServiceElementChangeListener>(); /** Flag to indicate we are in the process of updating the * ServiceElement. This will avoid unnecessary ServiceBeanConfig updates * if DiscoveryManagement attributes have changed */ boolean updating = false; private final NotificationBroadcasterSupport notificationBroadcasterSupport = new NotificationBroadcasterSupport(); /** * Create a JSBManager * * @param sElem The ServiceElement * @param hostAddress The host address the service bean was instantiated on * @param cybernodeUuid The Uuuid of the Cybernode * * @throws IllegalArgumentException if the sElem or hostAddress parameters * are null */ public JSBManager(final ServiceElement sElem, final String hostAddress, final Uuid cybernodeUuid) { this(sElem, null, hostAddress, cybernodeUuid); } /** * Create a JSBManager * * @param sElem The ServiceElement * @param opStringManager The OperationalStringManager * @param hostAddress The host address the service bean was instantiated on * @param cybernodeUuid The Uuid of the Cybernode * * @throws IllegalArgumentException if the sElem or hostAddress parameters are null */ public JSBManager(final ServiceElement sElem, final OperationalStringManager opStringManager, final String hostAddress, final Uuid cybernodeUuid) { super(); if(sElem==null) throw new IllegalArgumentException("sElem is null"); if(hostAddress==null) throw new IllegalArgumentException("hostAddress is null"); if(cybernodeUuid==null) throw new IllegalArgumentException("cybernodeUuid is null"); this.sElem = sElem; this.opStringManager = opStringManager; this.hostAddress = hostAddress; this.cybernodeUuid = cybernodeUuid; } /** * Set the ServiceElement for the ServiceBean * * @param newElem The ServiceElement for the ServiceBean */ public void setServiceElement(final ServiceElement newElem) { if(newElem==null) throw new IllegalArgumentException("sElem is null"); ServiceElement preElem = sElem; this.sElem = newElem; try { updating = true; stateChange(preElem, sElem); } finally { updating = false; } } /** * Set the ServiceID for the ServiceBean * * @param serviceID The Service Identifier for the ServiceBean */ public void setServiceID(final Uuid serviceID) { if(serviceID==null) throw new IllegalArgumentException("serviceID is null"); this.serviceID = serviceID; } /** * @see org.rioproject.core.jsb.ServiceBeanManager#getServiceID */ public Uuid getServiceID() { return(serviceID); } /** * Set the Object that can be used to communicate to the ServiceBean * * @param mi The MarshalledInstance containing the proxy that can be used * to communicate to the ServiceBean */ public void setMarshalledInstance(final MarshalledInstance mi) { this.marshalledInstance = mi; } /** * Set the DiscardManager for the ServiceBean * * @param discardManager The DiscardManager for the ServiceBean */ public void setDiscardManager(final DiscardManager discardManager) { this.discardManager = discardManager; } /** * @see org.rioproject.core.jsb.ServiceBeanManager#getDiscardManager */ public DiscardManager getDiscardManager() { return(discardManager); } /** * Set the OperationalStringManager * * @param opStringManager The OperationalStringManager */ public void setOperationalStringManager(final OperationalStringManager opStringManager) { this.opStringManager = opStringManager; } /** * @see org.rioproject.core.jsb.ServiceBeanManager#getOperationalStringManager */ public OperationalStringManager getOperationalStringManager() { return(opStringManager); } /** * @see org.rioproject.core.jsb.ServiceBeanManager#update */ public void update(final ServiceBeanConfig sbConfig) throws ServiceBeanManagerException { if(sbConfig==null) throw new IllegalArgumentException("ServiceBeanConfig is null"); if(sElem==null) { logger.warn("No ServiceElement to update ServiceBeanConfig"); return; } if(updating) { if(logger.isTraceEnabled()) logger.trace("Updating ServiceElement, ServiceBeanConfig update ignored"); return; } ServiceElement preElem = ServiceElementUtil.copyServiceElement(sElem); Long instanceID = sElem.getServiceBeanConfig().getInstanceID(); sElem.setServiceBeanConfig(sbConfig); if(instanceID!=null) sElem = ServiceElementUtil.prepareInstanceID(sElem, false, instanceID); else logger.warn("No instanceID for [{}] to update", sElem.getName()); if(opStringManager==null) { logger.warn("No OperationalStringManager to update ServiceBeanConfig"); return; } try { opStringManager.update(getServiceBeanInstance()); } catch (OperationalStringException e) { throw new ServiceBeanManagerException("Unable to update ServiceBeanConfig", e); } catch (RemoteException e) { throw new ServiceBeanManagerException("Problem communicating to OperationalStringManager, " + "unable to update ServiceBeanConfig", e); } stateChange(preElem, sElem); } /** * @see org.rioproject.core.jsb.ServiceBeanManager#increment * * @throws ServiceBeanManagerException if the increment fails for any reason */ public void increment() throws ServiceBeanManagerException { increment(null); } /** * @see org.rioproject.core.jsb.ServiceBeanManager#increment */ public void increment(final ServiceProvisionListener listener) throws ServiceBeanManagerException { if(opStringManager==null) { throw new ServiceBeanManagerException("No OperationalStringManager to increment service"); } try { opStringManager.increment(sElem, false, listener); } catch (OperationalStringException e) { throw new ServiceBeanManagerException("Unable to increment", e); } catch (RemoteException e) { throw new ServiceBeanManagerException("Problem communicating to OperationalStringManager, unable to increment", e); } } /** * @see org.rioproject.core.jsb.ServiceBeanManager#decrement */ public void decrement(final boolean destroy) throws ServiceBeanManagerException { if(opStringManager==null) { throw new ServiceBeanManagerException("No OperationalStringManager to decrement service"); } try { opStringManager.decrement(getServiceBeanInstance(), false, destroy); } catch (OperationalStringException e) { e.printStackTrace(); } catch (RemoteException e) { throw new ServiceBeanManagerException("Problem communicating to OperationalStringManager, unable to decrement", e); } } /** * @see org.rioproject.core.jsb.ServiceBeanManager#relocate */ public void relocate(final ServiceProvisionListener listener, final Uuid uuid) throws ServiceBeanManagerException { if(opStringManager==null) { throw new ServiceBeanManagerException("No OperationalStringManager to relocate service"); } if(sElem.getProvisionType()!= ServiceElement.ProvisionType.DYNAMIC) throw new ServiceBeanManagerException("Relocation only available for DYNAMIC services"); try { opStringManager.relocate(getServiceBeanInstance(), listener, uuid); } catch (OperationalStringException e) { throw new ServiceBeanManagerException("Unable to relocate ServiceBeanConfig", e); } catch (RemoteException e) { throw new ServiceBeanManagerException("Problem communicating to OperationalStringManager, unable to relocate", e); } } /** * @see org.rioproject.core.jsb.ServiceBeanManager#getServiceBeanInstance */ public ServiceBeanInstance getServiceBeanInstance() { return(new ServiceBeanInstance(serviceID, marshalledInstance, sElem.getServiceBeanConfig(), hostAddress, cybernodeUuid)); } /** * @see org.rioproject.core.jsb.ServiceBeanManager#getServiceBeanControl() */ public ServiceBeanControl getServiceBeanControl() throws ServiceBeanManagerException { if(marshalledInstance==null) throw new ServiceBeanManagerException("Unable to obtain ServiceBeanControl, there is no marshalled proxy instance"); ServiceBeanControl serviceBeanControl; try { Object proxy = marshalledInstance.get(false); if(proxy instanceof Administrable) { Object adminObject = ((Administrable)proxy).getAdmin(); if(adminObject instanceof ServiceBeanControl) { serviceBeanControl = (ServiceBeanControl)proxy; } else { throw new ServiceBeanManagerException(String.format("Service is not an instanceof %s", ServiceBeanControl.class.getName())); } } else { throw new ServiceBeanManagerException(String.format("%s is derivable from %s, however, the service proxy does not implement %s", ServiceBeanControl.class.getName(), Administrable.class.getName(), Administrable.class.getName())); } } catch (Exception e) { throw new ServiceBeanManagerException("Unable to obtain ServiceBeanControl", e); } return serviceBeanControl; } /** * Notify all registered ServiceElementChangeListener instances. * * @param preElem The old ServiceElement * @param postElem An updated ServiceElement */ void stateChange(final ServiceElement preElem, final ServiceElement postElem) { notifyListeners(preElem, postElem); } /** * @see org.rioproject.core.jsb.ServiceBeanManager#addListener */ public void addListener(final ServiceElementChangeListener l) { if(l == null) { throw new IllegalArgumentException("can't add null listener"); } synchronized(listenerList) { listenerList.add(l); } } /** * @see org.rioproject.core.jsb.ServiceBeanManager#removeListener */ public void removeListener(ServiceElementChangeListener l) { if(l==null) return; synchronized(listenerList) { if(listenerList.contains(l)) listenerList.remove(l); } } /** * @see org.rioproject.core.jsb.ServiceBeanManager#getNotificationBroadcasterSupport() */ public NotificationBroadcasterSupport getNotificationBroadcasterSupport() { return notificationBroadcasterSupport; } /* * Notify all registered listeners of the ServiceElement change */ private synchronized void notifyListeners(final ServiceElement preElem, final ServiceElement postElem) { ServiceElementChangeListener[] listeners; synchronized(listenerList) { listeners = listenerList.toArray(new ServiceElementChangeListener[listenerList.size()]); } for (ServiceElementChangeListener listener : listeners) listener.changed(preElem, postElem); } }
rio-lib/src/main/java/org/rioproject/jsb/JSBManager.java
/* * Copyright to the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.rioproject.jsb; import net.jini.admin.Administrable; import net.jini.id.Uuid; import net.jini.io.MarshalledInstance; import org.rioproject.admin.ServiceBeanControl; import org.rioproject.core.jsb.DiscardManager; import org.rioproject.core.jsb.ServiceBeanManager; import org.rioproject.core.jsb.ServiceBeanManagerException; import org.rioproject.core.jsb.ServiceElementChangeListener; import org.rioproject.deploy.ServiceBeanInstance; import org.rioproject.deploy.ServiceProvisionListener; import org.rioproject.opstring.OperationalStringException; import org.rioproject.opstring.OperationalStringManager; import org.rioproject.opstring.ServiceBeanConfig; import org.rioproject.opstring.ServiceElement; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.management.NotificationBroadcasterSupport; import java.rmi.RemoteException; import java.util.ArrayList; import java.util.List; /** * Implement ServiceBeanManager support * * @see org.rioproject.core.jsb.ServiceBeanManager * * @author Dennis Reedy */ @SuppressWarnings("PMD.AvoidThrowingRawExceptionTypes") public class JSBManager implements ServiceBeanManager { private static final String COMPONENT="org.rioproject.jsb"; private static Logger logger = LoggerFactory.getLogger(COMPONENT); private DiscardManager discardManager; private OperationalStringManager opStringManager; private ServiceElement sElem; /** ServiceID */ private Uuid serviceID; /** Cybernode Uuid */ private Uuid cybernodeUuid; /** The marshalledInstance */ private MarshalledInstance marshalledInstance; /** The host address the service bean was instantiated on */ private String hostAddress; /** List of listeners */ private final List<ServiceElementChangeListener> listenerList = new ArrayList<ServiceElementChangeListener>(); /** Flag to indicate we are in the process of updating the * ServiceElement. This will avoid unnecessary ServiceBeanConfig updates * if DiscoveryManagement attributes have changed */ boolean updating = false; private final NotificationBroadcasterSupport notificationBroadcasterSupport = new NotificationBroadcasterSupport(); /** * Create a JSBManager * * @param sElem The ServiceElement * @param hostAddress The host address the service bean was instantiated on * @param cybernodeUuid The Uuuid of the Cybernode * * @throws IllegalArgumentException if the sElem or hostAddress parameters * are null */ public JSBManager(final ServiceElement sElem, final String hostAddress, final Uuid cybernodeUuid) { this(sElem, null, hostAddress, cybernodeUuid); } /** * Create a JSBManager * * @param sElem The ServiceElement * @param opStringManager The OperationalStringManager * @param hostAddress The host address the service bean was instantiated on * @param cybernodeUuid The Uuuid of the Cybernode * * @throws IllegalArgumentException if the sElem or hostAddress parameters are null */ public JSBManager(final ServiceElement sElem, final OperationalStringManager opStringManager, final String hostAddress, final Uuid cybernodeUuid) { super(); if(sElem==null) throw new IllegalArgumentException("sElem is null"); if(hostAddress==null) throw new IllegalArgumentException("hostAddress is null"); if(cybernodeUuid==null) throw new IllegalArgumentException("cybernodeUuid is null"); this.sElem = sElem; this.opStringManager = opStringManager; this.hostAddress = hostAddress; this.cybernodeUuid = cybernodeUuid; } /** * Set the ServiceElement for the ServiceBean * * @param newElem The ServiceElement for the ServiceBean */ public void setServiceElement(final ServiceElement newElem) { if(newElem==null) throw new IllegalArgumentException("sElem is null"); ServiceElement preElem = sElem; this.sElem = newElem; try { updating = true; stateChange(preElem, sElem); } finally { updating = false; } } /** * Set the ServiceID for the ServiceBean * * @param serviceID The Service Identifier for the ServiceBean */ public void setServiceID(final Uuid serviceID) { if(serviceID==null) throw new IllegalArgumentException("serviceID is null"); this.serviceID = serviceID; } /** * @see org.rioproject.core.jsb.ServiceBeanManager#getServiceID */ public Uuid getServiceID() { return(serviceID); } /** * Set the Object that can be used to communicate to the ServiceBean * * @param mi The MarshalledInstance containing the proxy that can be used * to communicate to the ServiceBean */ public void setMarshalledInstance(final MarshalledInstance mi) { this.marshalledInstance = mi; } /** * Set the DiscardManager for the ServiceBean * * @param discardManager The DiscardManager for the ServiceBean */ public void setDiscardManager(final DiscardManager discardManager) { this.discardManager = discardManager; } /** * @see org.rioproject.core.jsb.ServiceBeanManager#getDiscardManager */ public DiscardManager getDiscardManager() { return(discardManager); } /** * Set the OperationalStringManager * * @param opStringManager The OperationalStringManager */ public void setOperationalStringManager(final OperationalStringManager opStringManager) { this.opStringManager = opStringManager; } /** * @see org.rioproject.core.jsb.ServiceBeanManager#getOperationalStringManager */ public OperationalStringManager getOperationalStringManager() { return(opStringManager); } /** * @see org.rioproject.core.jsb.ServiceBeanManager#update */ public void update(final ServiceBeanConfig sbConfig) throws ServiceBeanManagerException { if(sbConfig==null) throw new IllegalArgumentException("ServiceBeanConfig is null"); if(sElem==null) { logger.warn("No ServiceElement to update ServiceBeanConfig"); return; } if(updating) { if(logger.isTraceEnabled()) logger.trace("Updating ServiceElement, ServiceBeanConfig update ignored"); return; } ServiceElement preElem = ServiceElementUtil.copyServiceElement(sElem); Long instanceID = sElem.getServiceBeanConfig().getInstanceID(); sElem.setServiceBeanConfig(sbConfig); if(instanceID!=null) sElem = ServiceElementUtil.prepareInstanceID(sElem, false, instanceID); else logger.warn("No instanceID for [{}] to update", sElem.getName()); if(opStringManager==null) { logger.warn("No OperationalStringManager to update ServiceBeanConfig"); return; } try { opStringManager.update(getServiceBeanInstance()); } catch (OperationalStringException e) { throw new ServiceBeanManagerException("Unable to update ServiceBeanConfig", e); } catch (RemoteException e) { throw new ServiceBeanManagerException("Problem communicating to OperationalStringManager, " + "unable to update ServiceBeanConfig", e); } stateChange(preElem, sElem); } /** * @see org.rioproject.core.jsb.ServiceBeanManager#increment * * @throws ServiceBeanManagerException if the increment fails for any reason */ public void increment() throws ServiceBeanManagerException { increment(null); } /** * @see org.rioproject.core.jsb.ServiceBeanManager#increment */ public void increment(final ServiceProvisionListener listener) throws ServiceBeanManagerException { if(opStringManager==null) { throw new ServiceBeanManagerException("No OperationalStringManager to increment service"); } try { opStringManager.increment(sElem, false, listener); } catch (OperationalStringException e) { throw new ServiceBeanManagerException("Unable to increment", e); } catch (RemoteException e) { throw new ServiceBeanManagerException("Problem communicating to OperationalStringManager, unable to increment", e); } } /** * @see org.rioproject.core.jsb.ServiceBeanManager#decrement */ public void decrement(final boolean destroy) throws ServiceBeanManagerException { if(opStringManager==null) { throw new ServiceBeanManagerException("No OperationalStringManager to decrement service"); } try { opStringManager.decrement(getServiceBeanInstance(), false, destroy); } catch (OperationalStringException e) { e.printStackTrace(); } catch (RemoteException e) { throw new ServiceBeanManagerException("Problem communicating to OperationalStringManager, unable to decrement", e); } } /** * @see org.rioproject.core.jsb.ServiceBeanManager#relocate */ public void relocate(final ServiceProvisionListener listener, final Uuid uuid) throws ServiceBeanManagerException { if(opStringManager==null) { throw new ServiceBeanManagerException("No OperationalStringManager to relocate service"); } if(sElem.getProvisionType()!= ServiceElement.ProvisionType.DYNAMIC) throw new ServiceBeanManagerException("Relocation only available for DYNAMIC services"); try { opStringManager.relocate(getServiceBeanInstance(), listener, uuid); } catch (OperationalStringException e) { throw new ServiceBeanManagerException("Unable to relocate ServiceBeanConfig", e); } catch (RemoteException e) { throw new ServiceBeanManagerException("Problem communicating to OperationalStringManager, unable to relocate", e); } } /** * @see org.rioproject.core.jsb.ServiceBeanManager#getServiceBeanInstance */ public ServiceBeanInstance getServiceBeanInstance() { return(new ServiceBeanInstance(serviceID, marshalledInstance, sElem.getServiceBeanConfig(), hostAddress, cybernodeUuid)); } /** * @see org.rioproject.core.jsb.ServiceBeanManager#getServiceBeanControl() */ public ServiceBeanControl getServiceBeanControl() throws ServiceBeanManagerException { if(marshalledInstance==null) throw new ServiceBeanManagerException("Unable to obtain ServiceBeanControl, there is no marshalled proxy instance"); ServiceBeanControl serviceBeanControl; try { Object proxy = marshalledInstance.get(false); if(proxy instanceof Administrable) { Object adminObject = ((Administrable)proxy).getAdmin(); if(adminObject instanceof ServiceBeanControl) { serviceBeanControl = (ServiceBeanControl)proxy; } else { throw new ServiceBeanManagerException(String.format("Service is not an instanceof %s", ServiceBeanControl.class.getName())); } } else { throw new ServiceBeanManagerException(String.format("%s is derivable from %s, however, the service proxy does not implement %s", ServiceBeanControl.class.getName(), Administrable.class.getName(), Administrable.class.getName())); } } catch (Exception e) { throw new ServiceBeanManagerException("Unable to obtain ServiceBeanControl", e); } return serviceBeanControl; } /** * Notify all registered ServiceElementChangeListener instances. * * @param preElem The old ServiceElement * @param postElem An updated ServiceElement */ void stateChange(final ServiceElement preElem, final ServiceElement postElem) { notifyListeners(preElem, postElem); } /** * @see org.rioproject.core.jsb.ServiceBeanManager#addListener */ public void addListener(final ServiceElementChangeListener l) { if(l == null) { throw new IllegalArgumentException("can't add null listener"); } synchronized(listenerList) { listenerList.add(l); } } /** * @see org.rioproject.core.jsb.ServiceBeanManager#removeListener */ public void removeListener(ServiceElementChangeListener l) { if(l==null) return; synchronized(listenerList) { if(listenerList.contains(l)) listenerList.remove(l); } } /** * @see org.rioproject.core.jsb.ServiceBeanManager#getNotificationBroadcasterSupport() */ public NotificationBroadcasterSupport getNotificationBroadcasterSupport() { return notificationBroadcasterSupport; } /* * Notify all registered listeners of the ServiceElement change */ private synchronized void notifyListeners(final ServiceElement preElem, final ServiceElement postElem) { ServiceElementChangeListener[] listeners; synchronized(listenerList) { listeners = listenerList.toArray(new ServiceElementChangeListener[listenerList.size()]); } for (ServiceElementChangeListener listener : listeners) listener.changed(preElem, postElem); } }
Javadoc fix
rio-lib/src/main/java/org/rioproject/jsb/JSBManager.java
Javadoc fix
<ide><path>io-lib/src/main/java/org/rioproject/jsb/JSBManager.java <ide> * @param sElem The ServiceElement <ide> * @param opStringManager The OperationalStringManager <ide> * @param hostAddress The host address the service bean was instantiated on <del> * @param cybernodeUuid The Uuuid of the Cybernode <add> * @param cybernodeUuid The Uuid of the Cybernode <ide> * <ide> * @throws IllegalArgumentException if the sElem or hostAddress parameters are null <ide> */
Java
apache-2.0
bac90c67b41145d9d916175cd0a2aec8585a7ed2
0
CommuteStream/cs-android-sdk,CommuteStream/cs-android-sdk,CommuteStream/cs-android-sdk,CommuteStream/android-sdk
package com.commutestream.ads; import java.util.Date; import java.util.Timer; import org.json.JSONException; import org.json.JSONObject; import com.commutestream.ads.http.JsonHttpResponseHandler; import com.commutestream.ads.http.RequestParams; import android.app.Application; import android.location.Location; import android.util.Log; //This application extension is where we store things that //should persist for the life of the application. i.e. The //CommuteStream class may get destroyed and re-instantiated //if it's within a view that gets destroyed and re-created public class CommuteStream extends Application { private static boolean initialized = false; private static String ad_unit_uuid; private static String banner_height; private static String banner_width; private static String sdk_name = "com.commutestreamsdk"; private static String sdk_ver = "0.1.1"; private static String app_name; private static String app_ver; private static String lat; private static String lon; private static String acc; private static String fix_time; private static Location currentBestLocation; private static String api_url = "https://api.commutestream.com:3000/"; private static String agency_interest = ""; private static String aid_sha; private static String aid_md5; private static Boolean testing = false; private static RequestParams http_params = new RequestParams(); private static Date lastServerRequestTime = new Date(); private static Date lastParameterChange = new Date(); private static Timer parameterCheckTimer = new Timer(); private static ParameterUpdateCheckTimer parameterCheckTimerTask = new ParameterUpdateCheckTimer( CommuteStream.lastServerRequestTime) { @Override public void run() { Log.v("CS_SDK", "TIMER FIRED"); if (CommuteStream.isInitialized() && (CommuteStream.lastParameterChange.getTime() > CommuteStream.lastServerRequestTime .getTime())) { Log.v("CS_SDK", "Updating the server."); CommuteStream.http_params.put("skip_fetch", "true"); RestClient.get("banner", CommuteStream.http_params, new JsonHttpResponseHandler() { @Override public void onSuccess(JSONObject response) { CommuteStream.reportSuccessfulGet(); // CommuteStream.lastServerRequestTime // = // CommuteStream.lastParameterChange; try { if (response.has("error")) { String error = response .getString("error"); Log.e("CS_SDK", "Error from banner server: " + error); } } catch (JSONException e) { // TODO Auto-generated catch // block e.printStackTrace(); } } @Override public void onFailure(Throwable e, JSONObject errorResponse) { Log.v("CS_SDK", "UPDATE FAILED"); } }); } } }; public static void init() { Log.v("CS_SDK", "init()"); // Every few seconds we should check to see if the parameters have been // updated since the last request to the server. If so we should send // the new parameters to ensure the server has the latest user info try { CommuteStream.parameterCheckTimer.scheduleAtFixedRate(CommuteStream.parameterCheckTimerTask, 20000, 20000); Log.v("CS_SDK", "Timer (Re)started"); } catch(Exception e) { Log.v("CS_SDK", "Already Initialized"); } } public static String getApp_name() { return CommuteStream.app_name; } public static void setApp_name(String app_name) { CommuteStream.app_name = app_name; CommuteStream.http_params.put("app_name", app_name); } public static String getApp_ver() { return CommuteStream.app_ver; } public static void setApp_ver(String app_ver) { CommuteStream.app_ver = app_ver; http_params.put("app_ver", app_ver); } public static String getSdk_name() { return CommuteStream.sdk_name; } public static void setSdk_name(String sdk_name) { CommuteStream.sdk_name = sdk_name; } public static String getSdk_ver() { return CommuteStream.sdk_ver; } public static void setSdk_ver(String sdk_ver) { CommuteStream.sdk_ver = sdk_ver; CommuteStream.http_params.put("sdk_ver", sdk_ver); } public static String getApi_url() { return CommuteStream.api_url; } public static void setApi_url(String api_url) { CommuteStream.api_url = api_url; } public static void setTheme(String theme) { http_params.put("theme", theme); } public static String getAid_sha() { return CommuteStream.aid_sha; } public static void setAid_sha(String aid_sha) { CommuteStream.aid_sha = aid_sha; CommuteStream.http_params.put("aid_sha", aid_sha); } public static String getAid_md5() { return CommuteStream.aid_md5; } public static void setAid_md5(String aid_md5) { CommuteStream.aid_md5 = aid_md5; CommuteStream.http_params.put("aid_md5", aid_md5); } public static String getBanner_height() { return CommuteStream.banner_height; } public static void setBanner_height(String banner_height) { CommuteStream.banner_height = banner_height; CommuteStream.http_params.put("banner_height", banner_height); } public static String getBanner_width() { return CommuteStream.banner_width; } public static void setBanner_width(String banner_width) { CommuteStream.banner_width = banner_width; CommuteStream.http_params.put("banner_width", banner_width); } public static boolean isInitialized() { return CommuteStream.initialized; } public static void setInitialized(boolean initialized) { CommuteStream.initialized = initialized; } public static String getAd_unit_uuid() { return CommuteStream.ad_unit_uuid; } public static void setAd_unit_uuid(String ad_unit_uuid) { CommuteStream.ad_unit_uuid = ad_unit_uuid; CommuteStream.http_params.put("ad_unit_uuid", ad_unit_uuid); } public static void setSkip_fetch(String skip_fetch) { CommuteStream.http_params.put("skip_fetch", skip_fetch); } public static RequestParams getHttp_params() { return CommuteStream.http_params; } // App Interface // This should be called by the app whenever tracking times for a given // route are displayed to a user public static void trackingDisplayed(String agency_id, String route_id, String stop_id) { setAgency_interest("TRACKING_DISPLAYED", agency_id, route_id, stop_id); } public static void alertDisplayed(String agency_id, String route_id, String stop_id) { setAgency_interest("ALERT_DISPLAYED", agency_id, route_id, stop_id); } public static void mapDisplayed(String agency_id, String route_id, String stop_id) { setAgency_interest("MAP_DISPLAYED", agency_id, route_id, stop_id); } public static void favoriteAdded(String agency_id, String route_id, String stop_id) { setAgency_interest("FAVORITE_ADDED", agency_id, route_id, stop_id); } public static void tripPlanningPointA(String agency_id, String route_id, String stop_id) { setAgency_interest("TRIP_PLANNING_POINT_A", agency_id, route_id, stop_id); } public static void tripPlanningPointB(String agency_id, String route_id, String stop_id) { setAgency_interest("TRIP_PLANNING_POINT_B", agency_id, route_id, stop_id); } public static void setLocation(Location location) { // We check that the new location is a better one before sending it if (isBetterLocation(location, CommuteStream.currentBestLocation)) { CommuteStream.currentBestLocation = location; CommuteStream.lat = Double.toString(location.getLatitude()); CommuteStream.lon = Double.toString(location.getLongitude()); CommuteStream.acc = Double.toString(location.getAccuracy()); CommuteStream.fix_time = Long.toString(location.getTime()); CommuteStream.http_params.put("lat", CommuteStream.lat); CommuteStream.http_params.put("lon", CommuteStream.lon); CommuteStream.http_params.put("acc", CommuteStream.acc); CommuteStream.http_params.put("fix_time", CommuteStream.fix_time); CommuteStream.parameterChange(); } } public static Boolean getTesting() { return CommuteStream.testing; } public static void setTesting() { CommuteStream.testing = true; CommuteStream.http_params.put("testing", "true"); Log.v("CS_SDK", "Testing Mode Set"); } private static void setAgency_interest(String type, String agency_id, String route_id, String stop_id) { // ad a comma if needed if (CommuteStream.agency_interest.length() > 0) { CommuteStream.agency_interest += ','; } CommuteStream.agency_interest += type + "," + agency_id + "," + route_id + "," + stop_id; CommuteStream.http_params.put("agency_interest", CommuteStream.agency_interest); CommuteStream.parameterChange(); } public static void reportSuccessfulGet() { CommuteStream.lastServerRequestTime = new Date(); // clear parameters that should only be sent once CommuteStream.http_params.remove("lat"); CommuteStream.http_params.remove("lon"); CommuteStream.http_params.remove("acc"); CommuteStream.http_params.remove("fix_time"); CommuteStream.http_params.remove("agency_interest"); CommuteStream.agency_interest = ""; } public static void parameterChange() { CommuteStream.lastParameterChange = new Date(); } public static void parametersSent() { CommuteStream.lastParameterChange = new Date(); } // Location helper stuff private static final int TWO_MINUTES = 1000 * 60 * 2; /** * Determines whether one Location reading is better than the current * Location fix * * @param location * The new Location that you want to evaluate * @param currentBestLocation * The current Location fix, to which you want to compare the new * one */ protected static boolean isBetterLocation(Location location, Location currentBestLocation) { if (currentBestLocation == null) { // A new location is always better than no location return true; } // Check whether the new location fix is newer or older long timeDelta = location.getTime() - currentBestLocation.getTime(); boolean isSignificantlyNewer = timeDelta > TWO_MINUTES; boolean isSignificantlyOlder = timeDelta < -TWO_MINUTES; boolean isNewer = timeDelta > 0; // If it's been more than two minutes since the current location, use // the new location // because the user has likely moved if (isSignificantlyNewer) { return true; // If the new location is more than two minutes older, it must be // worse } else if (isSignificantlyOlder) { return false; } // Check whether the new location fix is more or less accurate int accuracyDelta = (int) (location.getAccuracy() - currentBestLocation .getAccuracy()); boolean isLessAccurate = accuracyDelta > 0; boolean isMoreAccurate = accuracyDelta < 0; boolean isSignificantlyLessAccurate = accuracyDelta > 200; // Check if the old and new location are from the same provider boolean isFromSameProvider = isSameProvider(location.getProvider(), currentBestLocation.getProvider()); // Determine location quality using a combination of timeliness and // accuracy if (isMoreAccurate) { return true; } else if (isNewer && !isLessAccurate) { return true; } else if (isNewer && !isSignificantlyLessAccurate && isFromSameProvider) { return true; } return false; } /** Checks whether two providers are the same */ private static boolean isSameProvider(String provider1, String provider2) { if (provider1 == null) { return provider2 == null; } return provider1.equals(provider2); } }
src/com/commutestream/ads/CommuteStream.java
package com.commutestream.ads; import java.util.Date; import java.util.Timer; import org.json.JSONException; import org.json.JSONObject; import com.commutestream.ads.http.JsonHttpResponseHandler; import com.commutestream.ads.http.RequestParams; import android.app.Application; import android.location.Location; import android.util.Log; //This application extension is where we store things that //should persist for the life of the application. i.e. The //CommuteStream class may get destroyed and re-instantiated //if it's within a view that gets destroyed and re-created public class CommuteStream extends Application { private static boolean initialized = false; private static String ad_unit_uuid; private static String banner_height; private static String banner_width; private static String sdk_name = "com.commutestreamsdk"; private static String sdk_ver = "0.1.0"; private static String app_name; private static String app_ver; private static String lat; private static String lon; private static String acc; private static String fix_time; private static Location currentBestLocation; private static String api_url = "https://api.commutestream.com:3000/"; private static String agency_interest = ""; private static String aid_sha; private static String aid_md5; private static Boolean testing = false; private static RequestParams http_params = new RequestParams(); private static Date lastServerRequestTime = new Date(); private static Date lastParameterChange = new Date(); private static Timer parameterCheckTimer = new Timer(); public static void init() { Log.v("CS_SDK", "init()"); // Every few seconds we should check to see if the parameters have been // updated since the last request to the server. If so we should send // the new parameters to ensure the server has the latest user info CommuteStream.parameterCheckTimer.scheduleAtFixedRate( new ParameterUpdateCheckTimer( CommuteStream.lastServerRequestTime) { @Override public void run() { Log.v("CS_SDK", "TIMER FIRED"); if (CommuteStream.isInitialized() && (CommuteStream.lastParameterChange.getTime() > CommuteStream.lastServerRequestTime .getTime())) { Log.v("CS_SDK", "Updating the server."); CommuteStream.http_params.put("skip_fetch", "true"); RestClient.get("banner", CommuteStream.http_params, new JsonHttpResponseHandler() { @Override public void onSuccess( JSONObject response) { CommuteStream.reportSuccessfulGet(); // CommuteStream.lastServerRequestTime // = // CommuteStream.lastParameterChange; try { if (response.has("error")) { String error = response .getString("error"); Log.e("CS_SDK", "Error from banner server: " + error); } } catch (JSONException e) { // TODO Auto-generated catch // block e.printStackTrace(); } } @Override public void onFailure(Throwable e, JSONObject errorResponse) { Log.v("CS_SDK", "UPDATE FAILED"); } }); } } }, 20000, 20000); } public static String getApp_name() { return CommuteStream.app_name; } public static void setApp_name(String app_name) { CommuteStream.app_name = app_name; CommuteStream.http_params.put("app_name", app_name); } public static String getApp_ver() { return CommuteStream.app_ver; } public static void setApp_ver(String app_ver) { CommuteStream.app_ver = app_ver; http_params.put("app_ver", app_ver); } public static String getSdk_name() { return CommuteStream.sdk_name; } public static void setSdk_name(String sdk_name) { CommuteStream.sdk_name = sdk_name; } public static String getSdk_ver() { return CommuteStream.sdk_ver; } public static void setSdk_ver(String sdk_ver) { CommuteStream.sdk_ver = sdk_ver; CommuteStream.http_params.put("sdk_ver", sdk_ver); } public static String getApi_url() { return CommuteStream.api_url; } public static void setApi_url(String api_url) { CommuteStream.api_url = api_url; } public static String getAid_sha() { return CommuteStream.aid_sha; } public static void setAid_sha(String aid_sha) { CommuteStream.aid_sha = aid_sha; CommuteStream.http_params.put("aid_sha", aid_sha); } public static String getAid_md5() { return CommuteStream.aid_md5; } public static void setAid_md5(String aid_md5) { CommuteStream.aid_md5 = aid_md5; CommuteStream.http_params.put("aid_md5", aid_md5); } public static String getBanner_height() { return CommuteStream.banner_height; } public static void setBanner_height(String banner_height) { CommuteStream.banner_height = banner_height; CommuteStream.http_params.put("banner_height", banner_height); } public static String getBanner_width() { return CommuteStream.banner_width; } public static void setBanner_width(String banner_width) { CommuteStream.banner_width = banner_width; CommuteStream.http_params.put("banner_width", banner_width); } public static boolean isInitialized() { return CommuteStream.initialized; } public static void setInitialized(boolean initialized) { CommuteStream.initialized = initialized; } public static String getAd_unit_uuid() { return CommuteStream.ad_unit_uuid; } public static void setAd_unit_uuid(String ad_unit_uuid) { CommuteStream.ad_unit_uuid = ad_unit_uuid; CommuteStream.http_params.put("ad_unit_uuid", ad_unit_uuid); } public static void setSkip_fetch(String skip_fetch) { CommuteStream.http_params.put("skip_fetch", skip_fetch); } public static RequestParams getHttp_params() { return CommuteStream.http_params; } // App Interface // This should be called by the app whenever tracking times for a given // route are displayed to a user public static void trackingDisplayed(String agency_id, String route_id, String stop_id) { setAgency_interest("TRACKING_DISPLAYED", agency_id, route_id, stop_id); } public static void alertDisplayed(String agency_id, String route_id, String stop_id) { setAgency_interest("ALERT_DISPLAYED", agency_id, route_id, stop_id); } public static void mapDisplayed(String agency_id, String route_id, String stop_id) { setAgency_interest("MAP_DISPLAYED", agency_id, route_id, stop_id); } public static void favoriteAdded(String agency_id, String route_id, String stop_id) { setAgency_interest("FAVORITE_ADDED", agency_id, route_id, stop_id); } public static void tripPlanningPointA(String agency_id, String route_id, String stop_id) { setAgency_interest("TRIP_PLANNING_POINT_A", agency_id, route_id, stop_id); } public static void tripPlanningPointB(String agency_id, String route_id, String stop_id) { setAgency_interest("TRIP_PLANNING_POINT_B", agency_id, route_id, stop_id); } public static void setLocation(Location location) { //We check that the new location is a better one before sending it if(isBetterLocation(location, CommuteStream.currentBestLocation)){ CommuteStream.currentBestLocation = location; CommuteStream.lat = Double.toString(location.getLatitude()); CommuteStream.lon = Double.toString(location.getLongitude()); CommuteStream.acc = Double.toString(location.getAccuracy()); CommuteStream.fix_time = Long.toString(location.getTime()); CommuteStream.http_params.put("lat", CommuteStream.lat); CommuteStream.http_params.put("lon", CommuteStream.lon); CommuteStream.http_params.put("acc", CommuteStream.acc); CommuteStream.http_params.put("fix_time", CommuteStream.fix_time); CommuteStream.parameterChange(); } } public static Boolean getTesting() { return CommuteStream.testing; } public static void setTesting() { CommuteStream.testing = true; CommuteStream.http_params.put("testing", "true"); Log.v("CS_SDK", "Testing Mode Set"); } private static void setAgency_interest(String type, String agency_id, String route_id, String stop_id) { // ad a comma if needed if (CommuteStream.agency_interest.length() > 0) { CommuteStream.agency_interest += ','; } CommuteStream.agency_interest += type + "," + agency_id + "," + route_id + "," + stop_id; CommuteStream.http_params.put("agency_interest", CommuteStream.agency_interest); CommuteStream.parameterChange(); } public static void reportSuccessfulGet() { CommuteStream.lastServerRequestTime = new Date(); // clear parameters that should only be sent once CommuteStream.http_params.remove("lat"); CommuteStream.http_params.remove("lon"); CommuteStream.http_params.remove("acc"); CommuteStream.http_params.remove("fix_time"); CommuteStream.http_params.remove("agency_interest"); CommuteStream.agency_interest = ""; } public static void parameterChange() { CommuteStream.lastParameterChange = new Date(); } public static void parametersSent() { CommuteStream.lastParameterChange = new Date(); } // Location helper stuff private static final int TWO_MINUTES = 1000 * 60 * 2; /** * Determines whether one Location reading is better than the current * Location fix * * @param location * The new Location that you want to evaluate * @param currentBestLocation * The current Location fix, to which you want to compare the new * one */ protected static boolean isBetterLocation(Location location, Location currentBestLocation) { if (currentBestLocation == null) { // A new location is always better than no location return true; } // Check whether the new location fix is newer or older long timeDelta = location.getTime() - currentBestLocation.getTime(); boolean isSignificantlyNewer = timeDelta > TWO_MINUTES; boolean isSignificantlyOlder = timeDelta < -TWO_MINUTES; boolean isNewer = timeDelta > 0; // If it's been more than two minutes since the current location, use // the new location // because the user has likely moved if (isSignificantlyNewer) { return true; // If the new location is more than two minutes older, it must be // worse } else if (isSignificantlyOlder) { return false; } // Check whether the new location fix is more or less accurate int accuracyDelta = (int) (location.getAccuracy() - currentBestLocation .getAccuracy()); boolean isLessAccurate = accuracyDelta > 0; boolean isMoreAccurate = accuracyDelta < 0; boolean isSignificantlyLessAccurate = accuracyDelta > 200; // Check if the old and new location are from the same provider boolean isFromSameProvider = isSameProvider(location.getProvider(), currentBestLocation.getProvider()); // Determine location quality using a combination of timeliness and // accuracy if (isMoreAccurate) { return true; } else if (isNewer && !isLessAccurate) { return true; } else if (isNewer && !isSignificantlyLessAccurate && isFromSameProvider) { return true; } return false; } /** Checks whether two providers are the same */ private static boolean isSameProvider(String provider1, String provider2) { if (provider1 == null) { return provider2 == null; } return provider1.equals(provider2); } }
v0.1.1 added check for timer
src/com/commutestream/ads/CommuteStream.java
v0.1.1 added check for timer
<ide><path>rc/com/commutestream/ads/CommuteStream.java <ide> private static String banner_height; <ide> private static String banner_width; <ide> private static String sdk_name = "com.commutestreamsdk"; <del> private static String sdk_ver = "0.1.0"; <add> private static String sdk_ver = "0.1.1"; <ide> private static String app_name; <ide> private static String app_ver; <ide> private static String lat; <ide> private static String lon; <ide> private static String acc; <ide> private static String fix_time; <del> <add> <ide> private static Location currentBestLocation; <ide> <ide> private static String api_url = "https://api.commutestream.com:3000/"; <ide> private static Date lastParameterChange = new Date(); <ide> <ide> private static Timer parameterCheckTimer = new Timer(); <add> private static ParameterUpdateCheckTimer parameterCheckTimerTask = new ParameterUpdateCheckTimer( <add> CommuteStream.lastServerRequestTime) { <add> @Override <add> public void run() { <add> Log.v("CS_SDK", "TIMER FIRED"); <add> if (CommuteStream.isInitialized() <add> && (CommuteStream.lastParameterChange.getTime() > CommuteStream.lastServerRequestTime <add> .getTime())) { <add> Log.v("CS_SDK", "Updating the server."); <add> <add> CommuteStream.http_params.put("skip_fetch", "true"); <add> <add> RestClient.get("banner", CommuteStream.http_params, <add> new JsonHttpResponseHandler() { <add> @Override <add> public void onSuccess(JSONObject response) { <add> CommuteStream.reportSuccessfulGet(); <add> // CommuteStream.lastServerRequestTime <add> // = <add> // CommuteStream.lastParameterChange; <add> try { <add> if (response.has("error")) { <add> String error = response <add> .getString("error"); <add> Log.e("CS_SDK", <add> "Error from banner server: " <add> + error); <add> } <add> } catch (JSONException e) { <add> // TODO Auto-generated catch <add> // block <add> e.printStackTrace(); <add> } <add> } <add> <add> @Override <add> public void onFailure(Throwable e, <add> JSONObject errorResponse) { <add> Log.v("CS_SDK", "UPDATE FAILED"); <add> } <add> }); <add> } <add> } <add> }; <ide> <ide> public static void init() { <ide> Log.v("CS_SDK", "init()"); <ide> // Every few seconds we should check to see if the parameters have been <ide> // updated since the last request to the server. If so we should send <ide> // the new parameters to ensure the server has the latest user info <del> CommuteStream.parameterCheckTimer.scheduleAtFixedRate( <del> new ParameterUpdateCheckTimer( <del> CommuteStream.lastServerRequestTime) { <del> @Override <del> public void run() { <del> Log.v("CS_SDK", "TIMER FIRED"); <del> if (CommuteStream.isInitialized() <del> && (CommuteStream.lastParameterChange.getTime() > CommuteStream.lastServerRequestTime <del> .getTime())) { <del> Log.v("CS_SDK", "Updating the server."); <del> <del> CommuteStream.http_params.put("skip_fetch", "true"); <del> <del> RestClient.get("banner", CommuteStream.http_params, <del> new JsonHttpResponseHandler() { <del> @Override <del> public void onSuccess( <del> JSONObject response) { <del> CommuteStream.reportSuccessfulGet(); <del> // CommuteStream.lastServerRequestTime <del> // = <del> // CommuteStream.lastParameterChange; <del> try { <del> if (response.has("error")) { <del> String error = response <del> .getString("error"); <del> Log.e("CS_SDK", <del> "Error from banner server: " <del> + error); <del> } <del> } catch (JSONException e) { <del> // TODO Auto-generated catch <del> // block <del> e.printStackTrace(); <del> } <del> } <del> <del> @Override <del> public void onFailure(Throwable e, <del> JSONObject errorResponse) { <del> Log.v("CS_SDK", "UPDATE FAILED"); <del> } <del> }); <del> } <del> } <del> }, 20000, 20000); <add> try { <add> CommuteStream.parameterCheckTimer.scheduleAtFixedRate(CommuteStream.parameterCheckTimerTask, 20000, 20000); <add> Log.v("CS_SDK", "Timer (Re)started"); <add> } catch(Exception e) { <add> Log.v("CS_SDK", "Already Initialized"); <add> } <add> <ide> } <ide> <ide> public static String getApp_name() { <ide> CommuteStream.api_url = api_url; <ide> } <ide> <add> <add> public static void setTheme(String theme) { <add> http_params.put("theme", theme); <add> } <add> <ide> public static String getAid_sha() { <ide> return CommuteStream.aid_sha; <ide> } <ide> } <ide> <ide> public static void setLocation(Location location) { <del> //We check that the new location is a better one before sending it <del> if(isBetterLocation(location, CommuteStream.currentBestLocation)){ <add> // We check that the new location is a better one before sending it <add> if (isBetterLocation(location, CommuteStream.currentBestLocation)) { <ide> CommuteStream.currentBestLocation = location; <ide> CommuteStream.lat = Double.toString(location.getLatitude()); <ide> CommuteStream.lon = Double.toString(location.getLongitude());
Java
mit
d3e14b6c2e88851c36ea38309d1cd6e1a780307a
0
bitcoin-solutions/multibit-hd,akonring/multibit-hd-modified,akonring/multibit-hd-modified,akonring/multibit-hd-modified,bitcoin-solutions/multibit-hd,bitcoin-solutions/multibit-hd
package org.multibit.hd.ui.views.wizards.use_trezor; import com.google.common.base.Optional; import net.miginfocom.swing.MigLayout; import org.multibit.hd.ui.events.view.ViewEvents; import org.multibit.hd.ui.languages.MessageKey; import org.multibit.hd.ui.views.components.Panels; import org.multibit.hd.ui.views.components.panels.PanelDecorator; import org.multibit.hd.ui.views.fonts.AwesomeIcon; import org.multibit.hd.ui.views.wizards.AbstractWizard; import org.multibit.hd.ui.views.wizards.AbstractWizardPanelView; import org.multibit.hd.ui.views.wizards.WizardButton; import javax.swing.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; /** * <p>Wizard to provide the following to UI:</p> * <ul> * <li>Select which of the trezor related tools to run</li> * </ul> * * @since 0.0.1 */ public class UseTrezorSelectPanelView extends AbstractWizardPanelView<UseTrezorWizardModel, UseTrezorState> implements ActionListener { // Model private UseTrezorState currentSelection; /** * @param wizard The wizard managing the states * @param panelName The panel name to filter events from components */ public UseTrezorSelectPanelView(AbstractWizard<UseTrezorWizardModel> wizard, String panelName) { super(wizard, panelName, MessageKey.USE_TREZOR_TITLE, AwesomeIcon.SHIELD); } @Override public void newPanelModel() { currentSelection = UseTrezorState.BUY_TREZOR; setPanelModel(currentSelection); } @Override public void initialiseContent(JPanel contentPanel) { contentPanel.setLayout( new MigLayout( Panels.migXYLayout(), "[]", // Column constraints "[]" // Row constraints )); contentPanel.add( Panels.newUseTrezorSelector( this, UseTrezorState.BUY_TREZOR.name(), UseTrezorState.VERIFY_TREZOR.name(), UseTrezorState.REQUEST_WIPE_TREZOR.name() ), "span 2, wrap"); } @Override protected void initialiseButtons(AbstractWizard<UseTrezorWizardModel> wizard) { PanelDecorator.addExitCancelNext(this, wizard); } @Override public void fireInitialStateViewEvents() { ViewEvents.fireWizardButtonEnabledEvent(getPanelName(), WizardButton.NEXT,true); } @Override public boolean beforeShow() { ViewEvents.fireWizardButtonEnabledEvent(getPanelName(), WizardButton.NEXT,true); return true; } @Override public void updateFromComponentModels(Optional componentModel) { // Next has been clicked setPanelModel(currentSelection); // Bind this to the wizard model getWizardModel().setCurrentSelection(currentSelection); } /** * <p>Handle the "select tool" action event</p> * * @param e The action event */ @Override public void actionPerformed(ActionEvent e) { JRadioButton source = (JRadioButton) e.getSource(); currentSelection = UseTrezorState.valueOf(source.getActionCommand()); } @Override public boolean beforeHide(boolean isExitCancel) { if (!isExitCancel) { // Ensure the wizard model correctly reflects the contents of the components updateFromComponentModels(Optional.absent()); } // Must be OK to proceed return true; } }
mbhd-swing/src/main/java/org/multibit/hd/ui/views/wizards/use_trezor/UseTrezorSelectPanelView.java
package org.multibit.hd.ui.views.wizards.use_trezor; import com.google.common.base.Optional; import net.miginfocom.swing.MigLayout; import org.multibit.hd.ui.events.view.ViewEvents; import org.multibit.hd.ui.languages.MessageKey; import org.multibit.hd.ui.views.components.Panels; import org.multibit.hd.ui.views.components.panels.PanelDecorator; import org.multibit.hd.ui.views.fonts.AwesomeIcon; import org.multibit.hd.ui.views.wizards.AbstractWizard; import org.multibit.hd.ui.views.wizards.AbstractWizardPanelView; import org.multibit.hd.ui.views.wizards.WizardButton; import javax.swing.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; /** * <p>Wizard to provide the following to UI:</p> * <ul> * <li>Select which of the trezor related tools to run</li> * </ul> * * @since 0.0.1 */ public class UseTrezorSelectPanelView extends AbstractWizardPanelView<UseTrezorWizardModel, UseTrezorState> implements ActionListener { // Model private UseTrezorState currentSelection; /** * @param wizard The wizard managing the states * @param panelName The panel name to filter events from components */ public UseTrezorSelectPanelView(AbstractWizard<UseTrezorWizardModel> wizard, String panelName) { super(wizard, panelName, MessageKey.USE_TREZOR_TITLE, AwesomeIcon.SHIELD); } @Override public void newPanelModel() { currentSelection = UseTrezorState.BUY_TREZOR; setPanelModel(currentSelection); } @Override public void initialiseContent(JPanel contentPanel) { contentPanel.setLayout( new MigLayout( Panels.migXYLayout(), "[]", // Column constraints "[]" // Row constraints )); contentPanel.add( Panels.newUseTrezorSelector( this, UseTrezorState.BUY_TREZOR.name(), UseTrezorState.VERIFY_TREZOR.name(), UseTrezorState.REQUEST_WIPE_TREZOR.name() ), "span 2, wrap"); } @Override protected void initialiseButtons(AbstractWizard<UseTrezorWizardModel> wizard) { PanelDecorator.addExitCancelNext(this, wizard); } @Override public void fireInitialStateViewEvents() { ViewEvents.fireWizardButtonEnabledEvent(getPanelName(), WizardButton.NEXT,true); } @Override public boolean beforeShow() { return true; } @Override public void updateFromComponentModels(Optional componentModel) { // Next has been clicked setPanelModel(currentSelection); // Bind this to the wizard model getWizardModel().setCurrentSelection(currentSelection); } /** * <p>Handle the "select tool" action event</p> * * @param e The action event */ @Override public void actionPerformed(ActionEvent e) { JRadioButton source = (JRadioButton) e.getSource(); currentSelection = UseTrezorState.valueOf(source.getActionCommand()); } @Override public boolean beforeHide(boolean isExitCancel) { if (!isExitCancel) { // Ensure the wizard model correctly reflects the contents of the components updateFromComponentModels(Optional.absent()); } // Must be OK to proceed return true; } }
#552 Enabled next button
mbhd-swing/src/main/java/org/multibit/hd/ui/views/wizards/use_trezor/UseTrezorSelectPanelView.java
#552 Enabled next button
<ide><path>bhd-swing/src/main/java/org/multibit/hd/ui/views/wizards/use_trezor/UseTrezorSelectPanelView.java <ide> <ide> @Override <ide> public boolean beforeShow() { <add> ViewEvents.fireWizardButtonEnabledEvent(getPanelName(), WizardButton.NEXT,true); <ide> <ide> return true; <ide> }
Java
mit
bfd1ccd2abd350291caf89b70fc3afc1adec10b4
0
sake/bouncycastle-java
package org.bouncycastle.x509; import org.bouncycastle.jce.X509LDAPCertStoreParameters; import org.bouncycastle.util.Selector; import org.bouncycastle.util.Store; import java.security.InvalidAlgorithmParameterException; import java.security.cert.CertSelector; import java.security.cert.CertStore; import java.security.cert.CollectionCertStoreParameters; import java.security.cert.LDAPCertStoreParameters; import java.security.cert.PKIXParameters; import java.security.cert.TrustAnchor; import java.security.cert.X509CertSelector; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; /** * This class extends the PKIXParameters with a validity model parameter. */ public class ExtendedPKIXParameters extends PKIXParameters { private List stores; private Selector selector; private boolean additionalLocationsEnabled; private List additionalStores; private Set trustedACIssuers; private Set necessaryACAttributes; private Set prohibitedACAttributes; private Set attrCertCheckers; /** * Creates an instance of <code>PKIXParameters</code> with the specified * <code>Set</code> of most-trusted CAs. Each element of the set is a * {@link TrustAnchor TrustAnchor}. <p/> Note that the <code>Set</code> * is copied to protect against subsequent modifications. * * @param trustAnchors a <code>Set</code> of <code>TrustAnchor</code>s * @throws InvalidAlgorithmParameterException if the specified * <code>Set</code> is empty. * @throws NullPointerException if the specified <code>Set</code> is * <code>null</code> * @throws ClassCastException if any of the elements in the <code>Set</code> * is not of type <code>java.security.cert.TrustAnchor</code> */ public ExtendedPKIXParameters(Set trustAnchors) throws InvalidAlgorithmParameterException { super(trustAnchors); stores = new ArrayList(); additionalStores = new ArrayList(); trustedACIssuers = new HashSet(); necessaryACAttributes = new HashSet(); prohibitedACAttributes = new HashSet(); attrCertCheckers = new HashSet(); } /** * Returns an instance with the parameters of a given * <code>PKIXParameters</code> object. * * @param pkixParams The given <code>PKIXParameters</code> * @return an extended PKIX params object */ public static ExtendedPKIXParameters getInstance(PKIXParameters pkixParams) { ExtendedPKIXParameters params; try { params = new ExtendedPKIXParameters(pkixParams.getTrustAnchors()); } catch (Exception e) { // cannot happen throw new RuntimeException(e.getMessage()); } params.setParams(pkixParams); return params; } /** * Method to support <code>clone()</code> under J2ME. * <code>super.clone()</code> does not exist and fields are not copied. * * @param params Parameters to set. If this are * <code>ExtendedPKIXParameters</code> they are copied to. */ protected void setParams(PKIXParameters params) { setDate(params.getDate()); setCertPathCheckers(params.getCertPathCheckers()); setCertStores(params.getCertStores()); setAnyPolicyInhibited(params.isAnyPolicyInhibited()); setExplicitPolicyRequired(params.isExplicitPolicyRequired()); setPolicyMappingInhibited(params.isPolicyMappingInhibited()); setRevocationEnabled(params.isRevocationEnabled()); setInitialPolicies(params.getInitialPolicies()); setPolicyQualifiersRejected(params.getPolicyQualifiersRejected()); setSigProvider(params.getSigProvider()); setTargetCertConstraints(params.getTargetCertConstraints()); try { setTrustAnchors(params.getTrustAnchors()); } catch (Exception e) { // cannot happen throw new RuntimeException(e.getMessage()); } if (params instanceof ExtendedPKIXParameters) { ExtendedPKIXParameters _params = (ExtendedPKIXParameters) params; validityModel = _params.validityModel; useDeltas = _params.useDeltas; additionalLocationsEnabled = _params.additionalLocationsEnabled; selector = _params.selector == null ? null : (Selector) _params.selector.clone(); stores = new ArrayList(_params.stores); additionalStores = new ArrayList(_params.additionalStores); trustedACIssuers = new HashSet(_params.trustedACIssuers); prohibitedACAttributes = new HashSet(_params.prohibitedACAttributes); necessaryACAttributes = new HashSet(_params.necessaryACAttributes); attrCertCheckers = new HashSet(_params.attrCertCheckers); } } /** * This is the default PKIX validity model. Actually there are two variants * of this: The PKIX model and the modified PKIX model. The PKIX model * verifies that all involved certificates must have been valid at the * current time. The modified PKIX model verifies that all involved * certificates were valid at the signing time. Both are indirectly choosen * with the {@link PKIXParameters#setDate(java.util.Date)} method, so this * methods sets the Date when <em>all</em> certificates must have been * valid. */ public static final int PKIX_VALIDITY_MODEL = 0; /** * This model uses the following validity model. Each certificate must have * been valid at the moment where is was used. That means the end * certificate must have been valid at the time the signature was done. The * CA certificate which signed the end certificate must have been valid, * when the end certificate was signed. The CA (or Root CA) certificate must * have been valid, when the CA certificate was signed and so on. So the * {@link PKIXParameters#setDate(java.util.Date)} method sets the time, when * the <em>end certificate</em> must have been valid. <p/> It is used e.g. * in the German signature law. */ public static final int CHAIN_VALIDITY_MODEL = 1; private int validityModel = PKIX_VALIDITY_MODEL; private boolean useDeltas = true; /** * Defaults to <code>false</code>. * * @return Returns if delta CRLs should be used. */ public boolean isUseDeltasEnabled() { return useDeltas; } /** * Sets if delta CRLs should be used for checking the revocation status. * * @param useDeltas <code>true</code> if delta CRLs should be used. */ public void setUseDeltasEnabled(boolean useDeltas) { this.useDeltas = useDeltas; } /** * @return Returns the validity model. * @see #CHAIN_VALIDITY_MODEL * @see #PKIX_VALIDITY_MODEL */ public int getValidityModel() { return validityModel; } /** * Adds a Java CertStore to this extended PKIX parameters. If the store uses * initialization parameters of type * <code>CollectionCertStoreParameters</code> or <code></code> the * corresponding Bouncy Castle {@link Store} type is created additionally to * it. */ public void addCertStore(CertStore store) { super.addCertStore(store); if (store.getCertStoreParameters() instanceof CollectionCertStoreParameters) { Collection coll = ((CollectionCertStoreParameters) store .getCertStoreParameters()).getCollection(); X509CollectionStoreParameters params = new X509CollectionStoreParameters( coll); try { stores.add(X509Store.getInstance("CERTIFICATE/COLLECTION", params, "BC")); stores.add(X509Store .getInstance("CRL/COLLECTION", params, "BC")); } catch (Exception e) { // cannot happen throw new RuntimeException(e.getMessage()); } } if (store.getCertStoreParameters() instanceof LDAPCertStoreParameters || store.getCertStoreParameters() instanceof X509LDAPCertStoreParameters) { X509LDAPCertStoreParameters params; if (store.getCertStoreParameters() instanceof X509LDAPCertStoreParameters) { params = (X509LDAPCertStoreParameters) store .getCertStoreParameters(); } else { int port = ((LDAPCertStoreParameters) store .getCertStoreParameters()).getPort(); String server = ((LDAPCertStoreParameters) store .getCertStoreParameters()).getServerName(); params = new X509LDAPCertStoreParameters.Builder("ldap://" + server + ":" + port, null).build(); } try { stores.add(X509Store.getInstance("CERTIFICATE/LDAP", params, "BC")); stores.add(X509Store.getInstance("CRL/LDAP", params, "BC")); } catch (Exception e) { // cannot happen throw new RuntimeException(e.getMessage()); } } } /** * Sets the Java CertStore to this extended PKIX parameters. If the stores * use initialisation parameters of type * <code>CollectionCertStoreParameters</code> or <code></code> the * corresponding Bouncy Castle {@link Store} types are created additionally * to it. * * @throws ClassCastException if an element of <code>stores</code> is not * a <code>CertStore</code>. */ public void setCertStores(List stores) { if (stores != null) { Iterator it = stores.iterator(); while (it.hasNext()) { addCertStore((CertStore) it.next()); } } } /** * Sets the Bouncy Castle Stores for finding CRLs, certificates, attribute * certificates or cross certificates. * <p> * The <code>List</code> is cloned. * * @param stores A list of stores to use. * @see #getStores * @throws ClassCastException if an element of <code>stores</code> is not * a {@link Store}. */ public void setStores(List stores) { if (stores == null) { this.stores = new ArrayList(); } else { for (Iterator i = stores.iterator(); i.hasNext();) { if (!(i.next() instanceof Store)) { throw new ClassCastException( "All elements of list must be " + "of type org.bouncycastle.util.Store."); } } this.stores = new ArrayList(stores); } } /** * Adds a Bouncy Castle {@link Store} to find CRLs, certificates, attribute * certificates or cross certificates. * <p> * This method should be used to add local stores, like collection based * X.509 stores, if available. Local stores should be considered first, * before trying to use additional (remote) locations, because they do not * need possible additional network traffic. * <p> * If <code>store</code> is <code>null</code> it is ignored. * * @param store The store to add. * @see #getStores */ public void addStore(Store store) { if (stores != null) { stores.add(store); } } /** * Adds a additional Bouncy Castle {@link Store} to find CRLs, certificates, * attribute certificates or cross certificates. * <p> * You should not use this method. This method is used for adding additional * X.509 stores, which are used to add (remote) locations, e.g. LDAP, found * during X.509 object processing, e.g. in certificates or CRLs. This method * is used in PKIX certification path processing. * <p> * If <code>store</code> is <code>null</code> it is ignored. * * @param store The store to add. * @see #getStores() */ public void addAddionalStore(Store store) { if (store != null) { additionalStores.add(store); } } /** * Returns an immutable <code>List</code> of additional Bouncy Castle * <code>Store</code>s used for finding CRLs, certificates, attribute * certificates or cross certificates. * * @return an immutable <code>List</code> of additional Bouncy Castle * <code>Store</code>s. Never <code>null</code>. * * @see #addAddionalStore(Store) */ public List getAddionalStores() { return Collections.unmodifiableList(additionalStores); } /** * Returns an immutable <code>List</code> of Bouncy Castle * <code>Store</code>s used for finding CRLs, certificates, attribute * certificates or cross certificates. * * @return an immutable <code>List</code> of Bouncy Castle * <code>Store</code>s. Never <code>null</code>. * * @see #setStores(List) */ public List getStores() { return Collections.unmodifiableList(new ArrayList(stores)); } /** * @param validityModel The validity model to set. * @see #CHAIN_VALIDITY_MODEL * @see #PKIX_VALIDITY_MODEL */ public void setValidityModel(int validityModel) { this.validityModel = validityModel; } public Object clone() { ExtendedPKIXParameters params; try { params = new ExtendedPKIXParameters(getTrustAnchors()); } catch (Exception e) { // cannot happen throw new RuntimeException(e.getMessage()); } params.setParams(this); return params; } /** * Returns if additional {@link X509Store}s for locations like LDAP found * in certificates or CRLs should be used. * * @return Returns <code>true</code> if additional stores are used. */ public boolean isAdditionalLocationsEnabled() { return additionalLocationsEnabled; } /** * Sets if additional {@link X509Store}s for locations like LDAP found in * certificates or CRLs should be used. * * @param enabled <code>true</code> if additional stores are used. */ public void setAdditionalLocationsEnabled(boolean enabled) { additionalLocationsEnabled = enabled; } /** * Returns the required constraints on the target certificate or attribute * certificate. The constraints are returned as an instance of * <code>Selector</code>. If <code>null</code>, no constraints are * defined. * * <p> * The target certificate in a PKIX path may be a certificate or an * attribute certificate. * <p> * Note that the <code>Selector</code> returned is cloned to protect * against subsequent modifications. * * @return a <code>Selector</code> specifying the constraints on the * target certificate or attribute certificate (or <code>null</code>) * @see #setTargetConstraints * @see X509CertStoreSelector * @see X509AttributeCertStoreSelector */ public Selector getTargetConstraints() { if (selector != null) { return (Selector) selector.clone(); } else { return null; } } /** * Sets the required constraints on the target certificate or attribute * certificate. The constraints are specified as an instance of * <code>Selector</code>. If <code>null</code>, no constraints are * defined. * <p> * The target certificate in a PKIX path may be a certificate or an * attribute certificate. * <p> * Note that the <code>Selector</code> specified is cloned to protect * against subsequent modifications. * * @param selector a <code>Selector</code> specifying the constraints on * the target certificate or attribute certificate (or * <code>null</code>) * @see #getTargetConstraints * @see X509CertStoreSelector * @see X509AttributeCertStoreSelector */ public void setTargetConstraints(Selector selector) { if (selector != null) { this.selector = (Selector) selector.clone(); } else { this.selector = null; } } /** * Sets the required constraints on the target certificate. The constraints * are specified as an instance of <code>X509CertSelector</code>. If * <code>null</code>, no constraints are defined. * * <p> * This method wraps the given <code>X509CertSelector</code> into a * <code>X509CertStoreSelector</code>. * <p> * Note that the <code>X509CertSelector</code> specified is cloned to * protect against subsequent modifications. * * @param selector a <code>X509CertSelector</code> specifying the * constraints on the target certificate (or <code>null</code>) * @see #getTargetCertConstraints * @see X509CertStoreSelector */ public void setTargetCertConstraints(CertSelector selector) { super.setTargetCertConstraints(selector); if (selector != null) { this.selector = X509CertStoreSelector .getInstance((X509CertSelector) selector); } else { this.selector = null; } } /** * Returns the trusted attribute certificate issuers. If attribute * certificates is verified the trusted AC issuers must be set. * <p> * The returned <code>Set</code> consists of <code>TrustAnchor</code>s. * <p> * The returned <code>Set</code> is immutable. Never <code>null</code> * * @return Returns an immutable set of the trusted AC issuers. */ public Set getTrustedACIssuers() { return Collections.unmodifiableSet(trustedACIssuers); } /** * Sets the trusted attribute certificate issuers. If attribute certificates * is verified the trusted AC issuers must be set. * <p> * The <code>trustedACIssuers</code> must be a <code>Set</code> of * <code>TrustAnchor</code> * <p> * The given set is cloned. * * @param trustedACIssuers The trusted AC issuers to set. Is never * <code>null</code>. * @throws ClassCastException if an element of <code>stores</code> is not * a <code>TrustAnchor</code>. */ public void setTrustedACIssuers(Set trustedACIssuers) { if (trustedACIssuers == null) { trustedACIssuers.clear(); return; } for (Iterator it = trustedACIssuers.iterator(); it.hasNext();) { if (!(it.next() instanceof TrustAnchor)) { throw new ClassCastException("All elements of set must be " + "of type " + TrustAnchor.class.getName() + "."); } } this.trustedACIssuers.clear(); this.trustedACIssuers.addAll(trustedACIssuers); } /** * Returns the neccessary attributes which must be contained in an attribute * certificate. * <p> * The returned <code>Set</code> is immutable and contains * <code>String</code>s with the OIDs. * * @return Returns the necessary AC attributes. */ public Set getNecessaryACAttributes() { return Collections.unmodifiableSet(necessaryACAttributes); } /** * Sets the neccessary which must be contained in an attribute certificate. * <p> * The <code>Set</code> must contain <code>String</code>s with the * OIDs. * <p> * The set is cloned. * * @param necessaryACAttributes The necessary AC attributes to set. * @throws ClassCastException if an element of * <code>necessaryACAttributes</code> is not a * <code>String</code>. */ public void setNecessaryACAttributes(Set necessaryACAttributes) { if (necessaryACAttributes == null) { this.necessaryACAttributes.clear(); return; } for (Iterator it = necessaryACAttributes.iterator(); it.hasNext();) { if (!(it.next() instanceof String)) { throw new ClassCastException("All elements of set must be " + "of type String."); } } this.necessaryACAttributes.clear(); this.necessaryACAttributes.addAll(necessaryACAttributes); } /** * Returns the attribute certificates which are not allowed. * <p> * The returned <code>Set</code> is immutable and contains * <code>String</code>s with the OIDs. * * @return Returns the prohibited AC attributes. Is never <code>null</code>. */ public Set getProhibitedACAttributes() { return prohibitedACAttributes; } /** * Sets the attribute certificates which are not allowed. * <p> * The <code>Set</code> must contain <code>String</code>s with the * OIDs. * <p> * The set is cloned. * * @param prohibitedACAttributes The prohibited AC attributes to set. * @throws ClassCastException if an element of * <code>prohibitedACAttributes</code> is not a * <code>String</code>. */ public void setProhibitedACAttributes(Set prohibitedACAttributes) { if (prohibitedACAttributes == null) { this.prohibitedACAttributes.clear(); return; } for (Iterator it = prohibitedACAttributes.iterator(); it.hasNext();) { if (!(it.next() instanceof String)) { throw new ClassCastException("All elements of set must be " + "of type String."); } } this.prohibitedACAttributes.clear(); this.prohibitedACAttributes.addAll(prohibitedACAttributes); } /** * Returns the attribute certificate checker. The returned set contains * {@link PKIXAttrCertChecker}s and is immutable. * * @return Returns the attribute certificate checker. Is never * <code>null</code>. */ public Set getAttrCertCheckers() { return Collections.unmodifiableSet(attrCertCheckers); } /** * Sets the attribute certificate checkers. * <p> * All elements in the <code>Set</code> must a {@link PKIXAttrCertChecker}. * <p> * The given set is cloned. * * @param attrCertCheckers The attribute certificate checkers to set. Is * never <code>null</code>. * @throws ClassCastException if an element of <code>attrCertCheckers</code> * is not a <code>PKIXAttrCertChecker</code>. */ public void setAttrCertCheckers(Set attrCertCheckers) { if (attrCertCheckers == null) { this.attrCertCheckers.clear(); return; } for (Iterator it = attrCertCheckers.iterator(); it.hasNext();) { if (!(it.next() instanceof PKIXAttrCertChecker)) { throw new ClassCastException("All elements of set must be " + "of type " + PKIXAttrCertChecker.class.getName() + "."); } } this.attrCertCheckers.clear(); this.attrCertCheckers.addAll(attrCertCheckers); } }
src/org/bouncycastle/x509/ExtendedPKIXParameters.java
package org.bouncycastle.x509; import org.bouncycastle.jce.X509LDAPCertStoreParameters; import org.bouncycastle.util.Selector; import org.bouncycastle.util.Store; import java.security.InvalidAlgorithmParameterException; import java.security.cert.CertSelector; import java.security.cert.CertStore; import java.security.cert.CollectionCertStoreParameters; import java.security.cert.LDAPCertStoreParameters; import java.security.cert.PKIXParameters; import java.security.cert.TrustAnchor; import java.security.cert.X509CertSelector; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; /** * This class extends the PKIXParameters with a validity model parameter. */ public class ExtendedPKIXParameters extends PKIXParameters { private List stores; private Selector selector; private boolean additionalLocationsEnabled; private List additionalStores; private Set trustedACIssuers; private Set necessaryACAttributes; private Set prohibitedACAttributes; private Set attrCertCheckers; /** * Creates an instance of <code>PKIXParameters</code> with the specified * <code>Set</code> of most-trusted CAs. Each element of the set is a * {@link TrustAnchor TrustAnchor}. <p/> Note that the <code>Set</code> * is copied to protect against subsequent modifications. * * @param trustAnchors a <code>Set</code> of <code>TrustAnchor</code>s * @throws InvalidAlgorithmParameterException if the specified * <code>Set</code> is empty. * @throws NullPointerException if the specified <code>Set</code> is * <code>null</code> * @throws ClassCastException if any of the elements in the <code>Set</code> * is not of type <code>java.security.cert.TrustAnchor</code> */ public ExtendedPKIXParameters(Set trustAnchors) throws InvalidAlgorithmParameterException { super(trustAnchors); stores = new ArrayList(); additionalStores = new ArrayList(); trustedACIssuers = new HashSet(); necessaryACAttributes = new HashSet(); prohibitedACAttributes = new HashSet(); attrCertCheckers = new HashSet(); } /** * Returns an instance with the parameters of a given * <code>PKIXParameters</code> object. * * @param pkixParams The given <code>PKIXParameters</code> * @return an extended PKIX params object */ public static ExtendedPKIXParameters getInstance(PKIXParameters pkixParams) { ExtendedPKIXParameters params; try { params = new ExtendedPKIXParameters(pkixParams.getTrustAnchors()); } catch (Exception e) { // cannot happen throw new RuntimeException(e.getMessage()); } params.setParams(pkixParams); return params; } /** * Method to support <code>clone()</code> under J2ME. * <code>super.clone()</code> does not exist and fields are not copied. * * @param params Parameters to set. If this are * <code>ExtendedPKIXParameters</code> they are copied to. */ protected void setParams(PKIXParameters params) { setDate(params.getDate()); setCertPathCheckers(params.getCertPathCheckers()); setCertStores(params.getCertStores()); setAnyPolicyInhibited(params.isAnyPolicyInhibited()); setExplicitPolicyRequired(params.isExplicitPolicyRequired()); setPolicyMappingInhibited(params.isPolicyMappingInhibited()); setRevocationEnabled(params.isRevocationEnabled()); setInitialPolicies(params.getInitialPolicies()); setPolicyQualifiersRejected(params.getPolicyQualifiersRejected()); setSigProvider(params.getSigProvider()); setTargetCertConstraints(params.getTargetCertConstraints()); try { setTrustAnchors(params.getTrustAnchors()); } catch (Exception e) { // cannot happen throw new RuntimeException(e.getMessage()); } if (params instanceof ExtendedPKIXParameters) { ExtendedPKIXParameters _params = (ExtendedPKIXParameters) params; validityModel = _params.validityModel; useDeltas = _params.useDeltas; additionalLocationsEnabled = _params.additionalLocationsEnabled; selector = _params.selector == null ? null : (Selector) _params.selector.clone(); stores = new ArrayList(_params.stores); additionalStores = new ArrayList(_params.additionalStores); trustedACIssuers = new HashSet(_params.trustedACIssuers); prohibitedACAttributes = new HashSet(_params.prohibitedACAttributes); necessaryACAttributes = new HashSet(_params.necessaryACAttributes); attrCertCheckers = new HashSet(_params.attrCertCheckers); } } /** * This is the default PKIX validity model. Actually there are two variants * of this: The PKIX model and the modified PKIX model. The PKIX model * verifies that all involved certificates must have been valid at the * current time. The modified PKIX model verifies that all involved * certificates were valid at the signing time. Both are indirectly choosen * with the {@link PKIXParameters#setDate(java.util.Date)} method, so this * methods sets the Date when <em>all</em> certificates must have been * valid. */ public static final int PKIX_VALIDITY_MODEL = 0; /** * This model uses the following validity model. Each certificate must have * been valid at the moment where is was used. That means the end * certificate must have been valid at the time the signature was done. The * CA certificate which signed the end certificate must have been valid, * when the end certificate was signed. The CA (or Root CA) certificate must * have been valid, when the CA certificate was signed and so on. So the * {@link PKIXParameters#setDate(java.util.Date)} method sets the time, when * the <em>end certificate</em> must have been valid. <p/> It is used e.g. * in the German signature law. */ public static final int CHAIN_VALIDITY_MODEL = 1; private int validityModel = PKIX_VALIDITY_MODEL; private boolean useDeltas; /** * Defaults to <code>false</code>. * * @return Returns if delta CRLs should be used. */ public boolean isUseDeltasEnabled() { return useDeltas; } /** * Sets if delta CRLs should be used for checking the revocation status. * * @param useDeltas <code>true</code> if delta CRLs should be used. */ public void setUseDeltasEnabled(boolean useDeltas) { this.useDeltas = useDeltas; } /** * @return Returns the validity model. * @see #CHAIN_VALIDITY_MODEL * @see #PKIX_VALIDITY_MODEL */ public int getValidityModel() { return validityModel; } /** * Adds a Java CertStore to this extended PKIX parameters. If the store uses * initialisation parameters of type * <code>CollectionCertStoreParameters</code> or <code></code> the * corresponding Bouncy Castle {@link Store} type is created additionally to * it. */ public void addCertStore(CertStore store) { super.addCertStore(store); if (store.getCertStoreParameters() instanceof CollectionCertStoreParameters) { Collection coll = ((CollectionCertStoreParameters) store .getCertStoreParameters()).getCollection(); X509CollectionStoreParameters params = new X509CollectionStoreParameters( coll); try { stores.add(X509Store.getInstance("CERTIFICATE/COLLECTION", params, "BC")); stores.add(X509Store .getInstance("CRL/COLLECTION", params, "BC")); } catch (Exception e) { // cannot happen throw new RuntimeException(e.getMessage()); } } if (store.getCertStoreParameters() instanceof LDAPCertStoreParameters || store.getCertStoreParameters() instanceof X509LDAPCertStoreParameters) { X509LDAPCertStoreParameters params; if (store.getCertStoreParameters() instanceof X509LDAPCertStoreParameters) { params = (X509LDAPCertStoreParameters) store .getCertStoreParameters(); } else { int port = ((LDAPCertStoreParameters) store .getCertStoreParameters()).getPort(); String server = ((LDAPCertStoreParameters) store .getCertStoreParameters()).getServerName(); params = new X509LDAPCertStoreParameters.Builder("ldap://" + server + ":" + port, null).build(); } try { stores.add(X509Store.getInstance("CERTIFICATE/LDAP", params, "BC")); stores.add(X509Store.getInstance("CRL/LDAP", params, "BC")); } catch (Exception e) { // cannot happen throw new RuntimeException(e.getMessage()); } } } /** * Sets the Java CertStore to this extended PKIX parameters. If the stores * use initialisation parameters of type * <code>CollectionCertStoreParameters</code> or <code></code> the * corresponding Bouncy Castle {@link Store} types are created additionally * to it. * * @throws ClassCastException if an element of <code>stores</code> is not * a <code>CertStore</code>. */ public void setCertStores(List stores) { if (stores != null) { Iterator it = stores.iterator(); while (it.hasNext()) { addCertStore((CertStore) it.next()); } } } /** * Sets the Bouncy Castle Stores for finding CRLs, certificates, attribute * certificates or cross certificates. * <p> * The <code>List</code> is cloned. * * @param stores A list of stores to use. * @see #getStores * @throws ClassCastException if an element of <code>stores</code> is not * a {@link Store}. */ public void setStores(List stores) { if (stores == null) { this.stores = new ArrayList(); } else { for (Iterator i = stores.iterator(); i.hasNext();) { if (!(i.next() instanceof Store)) { throw new ClassCastException( "All elements of list must be " + "of type org.bouncycastle.util.Store."); } } this.stores = new ArrayList(stores); } } /** * Adds a Bouncy Castle {@link Store} to find CRLs, certificates, attribute * certificates or cross certificates. * <p> * This method should be used to add local stores, like collection based * X.509 stores, if available. Local stores should be considered first, * before trying to use additional (remote) locations, because they do not * need possible additional network traffic. * <p> * If <code>store</code> is <code>null</code> it is ignored. * * @param store The store to add. * @see #getStores */ public void addStore(Store store) { if (stores != null) { stores.add(store); } } /** * Adds a additional Bouncy Castle {@link Store} to find CRLs, certificates, * attribute certificates or cross certificates. * <p> * You should not use this method. This method is used for adding additional * X.509 stores, which are used to add (remote) locations, e.g. LDAP, found * during X.509 object processing, e.g. in certificates or CRLs. This method * is used in PKIX certification path processing. * <p> * If <code>store</code> is <code>null</code> it is ignored. * * @param store The store to add. * @see #getStores() */ public void addAddionalStore(Store store) { if (store != null) { additionalStores.add(store); } } /** * Returns an immutable <code>List</code> of additional Bouncy Castle * <code>Store</code>s used for finding CRLs, certificates, attribute * certificates or cross certificates. * * @return an immutable <code>List</code> of additional Bouncy Castle * <code>Store</code>s. Never <code>null</code>. * * @see #addAddionalStore(Store) */ public List getAddionalStores() { return Collections.unmodifiableList(additionalStores); } /** * Returns an immutable <code>List</code> of Bouncy Castle * <code>Store</code>s used for finding CRLs, certificates, attribute * certificates or cross certificates. * * @return an immutable <code>List</code> of Bouncy Castle * <code>Store</code>s. Never <code>null</code>. * * @see #setStores(List) */ public List getStores() { return Collections.unmodifiableList(new ArrayList(stores)); } /** * @param validityModel The validity model to set. * @see #CHAIN_VALIDITY_MODEL * @see #PKIX_VALIDITY_MODEL */ public void setValidityModel(int validityModel) { this.validityModel = validityModel; } public Object clone() { ExtendedPKIXParameters params; try { params = new ExtendedPKIXParameters(getTrustAnchors()); } catch (Exception e) { // cannot happen throw new RuntimeException(e.getMessage()); } params.setParams(this); return params; } /** * Returns if additional {@link X509Store}s for locations like LDAP found * in certificates or CRLs should be used. * * @return Returns <code>true</code> if additional stores are used. */ public boolean isAdditionalLocationsEnabled() { return additionalLocationsEnabled; } /** * Sets if additional {@link X509Store}s for locations like LDAP found in * certificates or CRLs should be used. * * @param enabled <code>true</code> if additional stores are used. */ public void setAdditionalLocationsEnabled(boolean enabled) { additionalLocationsEnabled = enabled; } /** * Returns the required constraints on the target certificate or attribute * certificate. The constraints are returned as an instance of * <code>Selector</code>. If <code>null</code>, no constraints are * defined. * * <p> * The target certificate in a PKIX path may be a certificate or an * attribute certificate. * <p> * Note that the <code>Selector</code> returned is cloned to protect * against subsequent modifications. * * @return a <code>Selector</code> specifying the constraints on the * target certificate or attribute certificate (or <code>null</code>) * @see #setTargetConstraints * @see X509CertStoreSelector * @see X509AttributeCertStoreSelector */ public Selector getTargetConstraints() { if (selector != null) { return (Selector) selector.clone(); } else { return null; } } /** * Sets the required constraints on the target certificate or attribute * certificate. The constraints are specified as an instance of * <code>Selector</code>. If <code>null</code>, no constraints are * defined. * <p> * The target certificate in a PKIX path may be a certificate or an * attribute certificate. * <p> * Note that the <code>Selector</code> specified is cloned to protect * against subsequent modifications. * * @param selector a <code>Selector</code> specifying the constraints on * the target certificate or attribute certificate (or * <code>null</code>) * @see #getTargetConstraints * @see X509CertStoreSelector * @see X509AttributeCertStoreSelector */ public void setTargetConstraints(Selector selector) { if (selector != null) { this.selector = (Selector) selector.clone(); } else { this.selector = null; } } /** * Sets the required constraints on the target certificate. The constraints * are specified as an instance of <code>X509CertSelector</code>. If * <code>null</code>, no constraints are defined. * * <p> * This method wraps the given <code>X509CertSelector</code> into a * <code>X509CertStoreSelector</code>. * <p> * Note that the <code>X509CertSelector</code> specified is cloned to * protect against subsequent modifications. * * @param selector a <code>X509CertSelector</code> specifying the * constraints on the target certificate (or <code>null</code>) * @see #getTargetCertConstraints * @see X509CertStoreSelector */ public void setTargetCertConstraints(CertSelector selector) { super.setTargetCertConstraints(selector); if (selector != null) { this.selector = X509CertStoreSelector .getInstance((X509CertSelector) selector); } else { this.selector = null; } } /** * Returns the trusted attribute certificate issuers. If attribute * certificates is verified the trusted AC issuers must be set. * <p> * The returned <code>Set</code> consists of <code>TrustAnchor</code>s. * <p> * The returned <code>Set</code> is immutable. Never <code>null</code> * * @return Returns an immutable set of the trusted AC issuers. */ public Set getTrustedACIssuers() { return Collections.unmodifiableSet(trustedACIssuers); } /** * Sets the trusted attribute certificate issuers. If attribute certificates * is verified the trusted AC issuers must be set. * <p> * The <code>trustedACIssuers</code> must be a <code>Set</code> of * <code>TrustAnchor</code> * <p> * The given set is cloned. * * @param trustedACIssuers The trusted AC issuers to set. Is never * <code>null</code>. * @throws ClassCastException if an element of <code>stores</code> is not * a <code>TrustAnchor</code>. */ public void setTrustedACIssuers(Set trustedACIssuers) { if (trustedACIssuers == null) { trustedACIssuers.clear(); return; } for (Iterator it = trustedACIssuers.iterator(); it.hasNext();) { if (!(it.next() instanceof TrustAnchor)) { throw new ClassCastException("All elements of set must be " + "of type " + TrustAnchor.class.getName() + "."); } } this.trustedACIssuers.clear(); this.trustedACIssuers.addAll(trustedACIssuers); } /** * Returns the neccessary attributes which must be contained in an attribute * certificate. * <p> * The returned <code>Set</code> is immutable and contains * <code>String</code>s with the OIDs. * * @return Returns the necessary AC attributes. */ public Set getNecessaryACAttributes() { return Collections.unmodifiableSet(necessaryACAttributes); } /** * Sets the neccessary which must be contained in an attribute certificate. * <p> * The <code>Set</code> must contain <code>String</code>s with the * OIDs. * <p> * The set is cloned. * * @param necessaryACAttributes The necessary AC attributes to set. * @throws ClassCastException if an element of * <code>necessaryACAttributes</code> is not a * <code>String</code>. */ public void setNecessaryACAttributes(Set necessaryACAttributes) { if (necessaryACAttributes == null) { this.necessaryACAttributes.clear(); return; } for (Iterator it = necessaryACAttributes.iterator(); it.hasNext();) { if (!(it.next() instanceof String)) { throw new ClassCastException("All elements of set must be " + "of type String."); } } this.necessaryACAttributes.clear(); this.necessaryACAttributes.addAll(necessaryACAttributes); } /** * Returns the attribute certificates which are not allowed. * <p> * The returned <code>Set</code> is immutable and contains * <code>String</code>s with the OIDs. * * @return Returns the prohibited AC attributes. Is never <code>null</code>. */ public Set getProhibitedACAttributes() { return prohibitedACAttributes; } /** * Sets the attribute certificates which are not allowed. * <p> * The <code>Set</code> must contain <code>String</code>s with the * OIDs. * <p> * The set is cloned. * * @param prohibitedACAttributes The prohibited AC attributes to set. * @throws ClassCastException if an element of * <code>prohibitedACAttributes</code> is not a * <code>String</code>. */ public void setProhibitedACAttributes(Set prohibitedACAttributes) { if (prohibitedACAttributes == null) { this.prohibitedACAttributes.clear(); return; } for (Iterator it = prohibitedACAttributes.iterator(); it.hasNext();) { if (!(it.next() instanceof String)) { throw new ClassCastException("All elements of set must be " + "of type String."); } } this.prohibitedACAttributes.clear(); this.prohibitedACAttributes.addAll(prohibitedACAttributes); } /** * Returns the attribute certificate checker. The returned set contains * {@link PKIXAttrCertChecker}s and is immutable. * * @return Returns the attribute certificate checker. Is never * <code>null</code>. */ public Set getAttrCertCheckers() { return Collections.unmodifiableSet(attrCertCheckers); } /** * Sets the attribute certificate checkers. * <p> * All elements in the <code>Set</code> must a {@link PKIXAttrCertChecker}. * <p> * The given set is cloned. * * @param attrCertCheckers The attribute certificate checkers to set. Is * never <code>null</code>. * @throws ClassCastException if an element of <code>attrCertCheckers</code> * is not a <code>PKIXAttrCertChecker</code>. */ public void setAttrCertCheckers(Set attrCertCheckers) { if (attrCertCheckers == null) { this.attrCertCheckers.clear(); return; } for (Iterator it = attrCertCheckers.iterator(); it.hasNext();) { if (!(it.next() instanceof PKIXAttrCertChecker)) { throw new ClassCastException("All elements of set must be " + "of type " + PKIXAttrCertChecker.class.getName() + "."); } } this.attrCertCheckers.clear(); this.attrCertCheckers.addAll(attrCertCheckers); } }
update to enable delta CRL processing by default.
src/org/bouncycastle/x509/ExtendedPKIXParameters.java
update to enable delta CRL processing by default.
<ide><path>rc/org/bouncycastle/x509/ExtendedPKIXParameters.java <ide> <ide> private int validityModel = PKIX_VALIDITY_MODEL; <ide> <del> private boolean useDeltas; <add> private boolean useDeltas = true; <ide> <ide> /** <ide> * Defaults to <code>false</code>. <ide> <ide> /** <ide> * Adds a Java CertStore to this extended PKIX parameters. If the store uses <del> * initialisation parameters of type <add> * initialization parameters of type <ide> * <code>CollectionCertStoreParameters</code> or <code></code> the <ide> * corresponding Bouncy Castle {@link Store} type is created additionally to <ide> * it.
Java
apache-2.0
error: pathspec 'src/test/java/com/whippy/poker/state/beans/TableTest.java' did not match any file(s) known to git
889c7435037a6304d74c06dfe9cc9cb05c114609
1
WhippyCleric/WhipPoker,WhippyCleric/WhipPoker,WhippyCleric/WhipPoker,WhippyCleric/WhipPoker
//J- package com.whippy.poker.state.beans; import static org.junit.Assert.fail; import org.junit.Test; public class TableTest { //~ ---------------------------------------------------------------------------------------------------------------- //~ Methods //~ ---------------------------------------------------------------------------------------------------------------- @Test public void testInvalidSeat() { Table table = new Table(10); try { table.getSeat(10); fail("Seat should not exist"); } catch (IllegalArgumentException e) { //expected } } } //J+
src/test/java/com/whippy/poker/state/beans/TableTest.java
Add invalid seat test to Table
src/test/java/com/whippy/poker/state/beans/TableTest.java
Add invalid seat test to Table
<ide><path>rc/test/java/com/whippy/poker/state/beans/TableTest.java <add>//J- <add>package com.whippy.poker.state.beans; <add> <add>import static org.junit.Assert.fail; <add>import org.junit.Test; <add> <add> <add>public class TableTest { <add> <add> //~ ---------------------------------------------------------------------------------------------------------------- <add> //~ Methods <add> //~ ---------------------------------------------------------------------------------------------------------------- <add> <add> @Test <add> public void testInvalidSeat() { <add> Table table = new Table(10); <add> try { <add> table.getSeat(10); <add> fail("Seat should not exist"); <add> } catch (IllegalArgumentException e) { <add> //expected <add> } <add> } <add> <add>} <add>//J+
Java
lgpl-2.1
01f9a886d357fc9110b3ad433ba667972c891ecc
0
xwiki/xwiki-platform,pbondoer/xwiki-platform,pbondoer/xwiki-platform,xwiki/xwiki-platform,pbondoer/xwiki-platform,pbondoer/xwiki-platform,xwiki/xwiki-platform,xwiki/xwiki-platform,xwiki/xwiki-platform,pbondoer/xwiki-platform
/* * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.xwiki.test.ui; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.net.URI; import java.net.URLEncoder; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.UriBuilder; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import javax.xml.bind.Marshaller; import javax.xml.bind.Unmarshaller; import org.apache.commons.httpclient.HttpClient; import org.apache.commons.httpclient.HttpMethod; import org.apache.commons.httpclient.UsernamePasswordCredentials; import org.apache.commons.httpclient.auth.AuthScope; import org.apache.commons.httpclient.methods.DeleteMethod; import org.apache.commons.httpclient.methods.EntityEnclosingMethod; import org.apache.commons.httpclient.methods.GetMethod; import org.apache.commons.httpclient.methods.InputStreamRequestEntity; import org.apache.commons.httpclient.methods.PostMethod; import org.apache.commons.httpclient.methods.PutMethod; import org.apache.commons.httpclient.methods.RequestEntity; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils; import org.junit.Assert; import org.openqa.selenium.By; import org.openqa.selenium.Cookie; import org.openqa.selenium.NoSuchElementException; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import org.openqa.selenium.support.ui.ExpectedCondition; import org.xwiki.component.manager.ComponentManager; import org.xwiki.model.EntityType; import org.xwiki.model.internal.reference.RelativeStringEntityReferenceResolver; import org.xwiki.model.reference.EntityReference; import org.xwiki.model.reference.EntityReferenceResolver; import org.xwiki.model.reference.EntityReferenceSerializer; import org.xwiki.rest.model.jaxb.ObjectFactory; import org.xwiki.rest.model.jaxb.Page; import org.xwiki.rest.model.jaxb.Xwiki; import org.xwiki.rest.resources.attachments.AttachmentResource; import org.xwiki.rest.resources.objects.ObjectPropertyResource; import org.xwiki.rest.resources.objects.ObjectResource; import org.xwiki.rest.resources.objects.ObjectsResource; import org.xwiki.rest.resources.pages.PageResource; import org.xwiki.test.integration.XWikiExecutor; import org.xwiki.test.ui.po.ViewPage; import org.xwiki.test.ui.po.editor.ClassEditPage; import org.xwiki.test.ui.po.editor.ObjectEditPage; /** * Helper methods for testing, not related to a specific Page Object. Also made available to tests classes. * * @version $Id$ * @since 3.2M3 */ public class TestUtils { /** * @since 5.0M2 */ public static final UsernamePasswordCredentials ADMIN_CREDENTIALS = new UsernamePasswordCredentials("Admin", "admin"); /** * @since 5.1M1 */ public static final UsernamePasswordCredentials SUPER_ADMIN_CREDENTIALS = new UsernamePasswordCredentials( "superadmin", "pass"); /** * @since 5.0M2 * @deprecated since 7.3M1, use {@link #getBaseURL()} instead */ @Deprecated public static final String BASE_URL = XWikiExecutor.URL + ":" + XWikiExecutor.DEFAULT_PORT + "/xwiki/"; /** * @since 5.0M2 * @deprecated since 7.3M1, use {@link #getBaseBinURL()} instead */ @Deprecated public static final String BASE_BIN_URL = BASE_URL + "bin/"; /** * @since 5.0M2 * @deprecated since 7.3M1, use {@link #getBaseRestURL()} instead */ @Deprecated public static final String BASE_REST_URL = BASE_URL + "rest/"; /** * @since 7.3M1 */ private static final EntityReferenceResolver<String> RELATIVE_RESOLVER = new RelativeStringEntityReferenceResolver(); /** * @since 7.3M1 */ private static final int[] STATUS_OKNOTFOUND = new int[] {Status.OK.getStatusCode(), Status.NOT_FOUND.getStatusCode()}; /** * @since 7.3M1 */ private static final int[] STATUS_OK = new int[] {Status.OK.getStatusCode()}; private static PersistentTestContext context; private static ComponentManager componentManager; private static EntityReferenceResolver<String> referenceResolver; private static EntityReferenceSerializer<String> referenceSerializer; /** * Used to convert Java object into its REST XML representation. */ private static Marshaller marshaller; /** * Used to convert REST request XML result into its Java representation. */ private static Unmarshaller unmarshaller; /** * Used to create REST Java resources. */ private static ObjectFactory objectFactory; { { try { // Initialize REST related tools JAXBContext context = JAXBContext.newInstance("org.xwiki.rest.model.jaxb" + ":org.xwiki.extension.repository.xwiki.model.jaxb"); marshaller = context.createMarshaller(); unmarshaller = context.createUnmarshaller(); objectFactory = new ObjectFactory(); } catch (JAXBException e) { throw new RuntimeException(e); } } } /** Cached secret token. TODO cache for each user. */ private String secretToken = null; private HttpClient httpClient; /** * @since 7.3M1 */ private XWikiExecutor executor; /** * @since 7.3M1 */ private String currentWiki = "xwiki"; private RestTestUtils rest = new RestTestUtils(); public TestUtils() { this.httpClient = new HttpClient(); this.httpClient.getState().setCredentials(AuthScope.ANY, SUPER_ADMIN_CREDENTIALS); this.httpClient.getParams().setAuthenticationPreemptive(true); } /** * @since 7.3M1 */ public XWikiExecutor getExecutor() { return this.executor; } /** * @since 7.3M1 */ public void setExecutor(XWikiExecutor executor) { this.executor = executor; } /** Used so that AllTests can set the persistent test context. */ public static void setContext(PersistentTestContext context) { TestUtils.context = context; } public static void initializeComponent(ComponentManager componentManager) throws Exception { TestUtils.componentManager = componentManager; TestUtils.referenceResolver = TestUtils.componentManager.getInstance(EntityReferenceResolver.TYPE_STRING); TestUtils.referenceSerializer = TestUtils.componentManager.getInstance(EntityReferenceSerializer.TYPE_STRING); } protected XWikiWebDriver getDriver() { return context.getDriver(); } public Session getSession() { return this.new Session(getDriver().manage().getCookies(), getSecretToken()); } public void setSession(Session session) { WebDriver.Options options = getDriver().manage(); options.deleteAllCookies(); if (session != null) { for (Cookie cookie : session.getCookies()) { options.addCookie(cookie); } } if (session != null && !StringUtils.isEmpty(session.getSecretToken())) { this.secretToken = session.getSecretToken(); } else { recacheSecretToken(); } } /** * @since 7.0RC1 */ public void setDefaultCredentials(String username, String password) { this.httpClient.getState().setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(username, password)); } /** * @since 7.0RC1 */ public void setDefaultCredentials(UsernamePasswordCredentials defaultCredentials) { this.httpClient.getState().setCredentials(AuthScope.ANY, defaultCredentials); } public UsernamePasswordCredentials getDefaultCredentials() { return (UsernamePasswordCredentials) this.httpClient.getState().getCredentials(AuthScope.ANY); } public void loginAsSuperAdmin() { login(SUPER_ADMIN_CREDENTIALS.getUserName(), SUPER_ADMIN_CREDENTIALS.getPassword()); } public void loginAsSuperAdminAndGotoPage(String pageURL) { loginAndGotoPage(SUPER_ADMIN_CREDENTIALS.getUserName(), SUPER_ADMIN_CREDENTIALS.getPassword(), pageURL); } public void loginAsAdmin() { login(ADMIN_CREDENTIALS.getUserName(), ADMIN_CREDENTIALS.getPassword()); } public void loginAsAdminAndGotoPage(String pageURL) { loginAndGotoPage(ADMIN_CREDENTIALS.getUserName(), ADMIN_CREDENTIALS.getPassword(), pageURL); } public void login(String username, String password) { loginAndGotoPage(username, password, null); } public void loginAndGotoPage(String username, String password, String pageURL) { if (!username.equals(getLoggedInUserName())) { // Log in and direct to a non existent page so that it loads very fast and we don't incur the time cost of // going to the home page for example. // Also recache the CSRF token getDriver().get(getURLToLoginAndGotoPage(username, password, getURL("XWiki", "Register", "register"))); recacheSecretTokenWhenOnRegisterPage(); if (pageURL != null) { // Go to the page asked getDriver().get(pageURL); } else { getDriver().get(getURLToNonExistentPage()); } setDefaultCredentials(username, password); } } /** * Consider using setSession(null) because it will drop the cookies which is faster than invoking a logout action. */ public String getURLToLogout() { return getURL("XWiki", "XWikiLogin", "logout"); } public String getURLToLoginAsAdmin() { return getURLToLoginAs(ADMIN_CREDENTIALS.getUserName(), ADMIN_CREDENTIALS.getPassword()); } public String getURLToLoginAsSuperAdmin() { return getURLToLoginAs(SUPER_ADMIN_CREDENTIALS.getUserName(), SUPER_ADMIN_CREDENTIALS.getPassword()); } public String getURLToLoginAs(final String username, final String password) { return getURLToLoginAndGotoPage(username, password, null); } /** * @param pageURL the URL of the page to go to after logging in. * @return URL to accomplish login and goto. */ public String getURLToLoginAsAdminAndGotoPage(final String pageURL) { return getURLToLoginAndGotoPage(ADMIN_CREDENTIALS.getUserName(), ADMIN_CREDENTIALS.getPassword(), pageURL); } /** * @param pageURL the URL of the page to go to after logging in. * @return URL to accomplish login and goto. */ public String getURLToLoginAsSuperAdminAndGotoPage(final String pageURL) { return getURLToLoginAndGotoPage(SUPER_ADMIN_CREDENTIALS.getUserName(), SUPER_ADMIN_CREDENTIALS.getPassword(), pageURL); } /** * @param username the name of the user to log in as. * @param password the password for the user to log in. * @param pageURL the URL of the page to go to after logging in. * @return URL to accomplish login and goto. */ public String getURLToLoginAndGotoPage(final String username, final String password, final String pageURL) { Map<String, String> parameters = new HashMap<String, String>() { { put("j_username", username); put("j_password", password); if (pageURL != null && pageURL.length() > 0) { put("xredirect", pageURL); } } }; return getURL("XWiki", "XWikiLogin", "loginsubmit", parameters); } /** * @return URL to a non existent page that loads very fast (we are using plain mode so that we don't even have to * display the skin ;)) */ public String getURLToNonExistentPage() { return getURL("NonExistentSpace", "NonExistentPage", "view", "xpage=plain"); } /** * After successful completion of this function, you are guaranteed to be logged in as the given user and on the * page passed in pageURL. */ public void assertOnPage(final String pageURL) { final String pageURI = pageURL.replaceAll("\\?.*", ""); getDriver().waitUntilCondition(new ExpectedCondition<Boolean>() { @Override public Boolean apply(WebDriver driver) { return getDriver().getCurrentUrl().contains(pageURI); } }); } public String getLoggedInUserName() { By userAvatar = By.xpath("//div[@id='xwikimainmenu']//li[contains(@class, 'navbar-avatar')]/a"); if (!getDriver().hasElementWithoutWaiting(userAvatar)) { // Guest return null; } WebElement element = getDriver().findElementWithoutWaiting(userAvatar); String href = element.getAttribute("href"); String loggedInUserName = href.substring(href.lastIndexOf("/") + 1); // Return return loggedInUserName; } public void createUserAndLogin(final String username, final String password, Object... properties) { createUserAndLoginWithRedirect(username, password, getURLToNonExistentPage(), properties); } public void createUserAndLoginWithRedirect(final String username, final String password, String url, Object... properties) { createUser(username, password, getURLToLoginAndGotoPage(username, password, url), properties); setDefaultCredentials(username, password); } public void createUser(final String username, final String password, String redirectURL, Object... properties) { Map<String, String> parameters = new HashMap<String, String>(); parameters.put("register", "1"); parameters.put("xwikiname", username); parameters.put("register_password", password); parameters.put("register2_password", password); parameters.put("register_email", ""); parameters.put("xredirect", redirectURL); parameters.put("form_token", getSecretToken()); getDriver().get(getURL("XWiki", "Register", "register", parameters)); recacheSecretToken(); if (properties.length > 0) { updateObject("XWiki", username, "XWiki.XWikiUsers", 0, properties); } } public ViewPage gotoPage(String space, String page) { gotoPage(space, page, "view"); return new ViewPage(); } /** * @since 7.2M2 */ public ViewPage gotoPage(EntityReference reference) { gotoPage(reference, "view"); return new ViewPage(); } public void gotoPage(String space, String page, String action) { gotoPage(space, page, action, ""); } /** * @since 7.2M2 */ public void gotoPage(EntityReference reference, String action) { gotoPage(reference, action, ""); } /** * @since 3.5M1 */ public void gotoPage(String space, String page, String action, Object... queryParameters) { gotoPage(space, page, action, toQueryString(queryParameters)); } public void gotoPage(String space, String page, String action, Map<String, ?> queryParameters) { gotoPage(Collections.singletonList(space), page, action, queryParameters); } /** * @since 7.2M2 */ public void gotoPage(List<String> spaces, String page, String action, Map<String, ?> queryParameters) { gotoPage(spaces, page, action, toQueryString(queryParameters)); } /** * @since 7.2M2 */ public void gotoPage(EntityReference reference, String action, Map<String, ?> queryParameters) { gotoPage(reference, action, toQueryString(queryParameters)); } public void gotoPage(String space, String page, String action, String queryString) { gotoPage(Collections.singletonList(space), page, action, queryString); } /** * @since 7.2M2 */ public void gotoPage(List<String> spaces, String page, String action, String queryString) { gotoPage(getURL(spaces, page, action, queryString)); } /** * @since 7.2M2 */ public void gotoPage(EntityReference reference, String action, String queryString) { gotoPage(getURL(reference, action, queryString)); // Update current wiki EntityReference wikiReference = reference.extractReference(EntityType.WIKI); if (wikiReference != null) { this.currentWiki = wikiReference.getName(); } } public void gotoPage(String url) { // Only navigate if the current URL is different from the one to go to, in order to improve performances. if (!getDriver().getCurrentUrl().equals(url)) { getDriver().get(url); } } public String getURLToDeletePage(String space, String page) { return getURL(space, page, "delete", "confirm=1"); } /** * @since 7.2M2 */ public String getURLToDeletePage(EntityReference reference) { return getURL(reference, "delete", "confirm=1"); } /** * @param space the name of the space to delete * @return the URL that can be used to delete the specified pace * @since 4.5 */ public String getURLToDeleteSpace(String space) { return getURL(space, "WebHome", "deletespace", "confirm=1"); } public ViewPage createPage(String space, String page, String content, String title) { return createPage(Collections.singletonList(space), page, content, title); } /** * @since 7.2M2 */ public ViewPage createPage(EntityReference reference, String content, String title) { return createPage(reference, content, title, null); } /** * @since 7.2M2 */ public ViewPage createPage(List<String> spaces, String page, String content, String title) { return createPage(spaces, page, content, title, null); } public ViewPage createPage(String space, String page, String content, String title, String syntaxId) { return createPage(Collections.singletonList(space), page, content, title, syntaxId); } /** * @since 7.2M2 */ public ViewPage createPage(EntityReference reference, String content, String title, String syntaxId) { return createPage(reference, content, title, syntaxId, null); } /** * @since 7.2M2 */ public ViewPage createPage(List<String> spaces, String page, String content, String title, String syntaxId) { return createPage(spaces, page, content, title, syntaxId, null); } public ViewPage createPage(String space, String page, String content, String title, String syntaxId, String parentFullPageName) { return createPage(Collections.singletonList(space), page, content, title, syntaxId, parentFullPageName); } /** * @since 7.2M2 */ public ViewPage createPage(List<String> spaces, String page, String content, String title, String syntaxId, String parentFullPageName) { Map<String, String> queryMap = new HashMap<String, String>(); if (content != null) { queryMap.put("content", content); } if (title != null) { queryMap.put("title", title); } if (syntaxId != null) { queryMap.put("syntaxId", syntaxId); } if (parentFullPageName != null) { queryMap.put("parent", parentFullPageName); } gotoPage(spaces, page, "save", queryMap); return new ViewPage(); } /** * @since 7.2M2 */ public ViewPage createPage(EntityReference reference, String content, String title, String syntaxId, String parentFullPageName) { Map<String, String> queryMap = new HashMap<>(); if (content != null) { queryMap.put("content", content); } if (title != null) { queryMap.put("title", title); } if (syntaxId != null) { queryMap.put("syntaxId", syntaxId); } if (parentFullPageName != null) { queryMap.put("parent", parentFullPageName); } gotoPage(reference, "save", queryMap); return new ViewPage(); } /** * @since 5.1M2 */ public ViewPage createPageWithAttachment(String space, String page, String content, String title, String syntaxId, String parentFullPageName, String attachmentName, InputStream attachmentData) throws Exception { return createPageWithAttachment(space, page, content, title, syntaxId, parentFullPageName, attachmentName, attachmentData, null); } /** * @since 5.1M2 */ public ViewPage createPageWithAttachment(String space, String page, String content, String title, String syntaxId, String parentFullPageName, String attachmentName, InputStream attachmentData, UsernamePasswordCredentials credentials) throws Exception { return createPageWithAttachment(Collections.singletonList(space), page, content, title, syntaxId, parentFullPageName, attachmentName, attachmentData, credentials); } /** * @since 7.2M2 */ public ViewPage createPageWithAttachment(List<String> spaces, String page, String content, String title, String syntaxId, String parentFullPageName, String attachmentName, InputStream attachmentData, UsernamePasswordCredentials credentials) throws Exception { ViewPage vp = createPage(spaces, page, content, title, syntaxId, parentFullPageName); attachFile(spaces, page, attachmentName, attachmentData, false, credentials); return vp; } /** * @since 5.1M2 */ public ViewPage createPageWithAttachment(String space, String page, String content, String title, String attachmentName, InputStream attachmentData) throws Exception { return createPageWithAttachment(space, page, content, title, null, null, attachmentName, attachmentData); } /** * @since 5.1M2 */ public ViewPage createPageWithAttachment(String space, String page, String content, String title, String attachmentName, InputStream attachmentData, UsernamePasswordCredentials credentials) throws Exception { ViewPage vp = createPage(space, page, content, title); attachFile(space, page, attachmentName, attachmentData, false, credentials); return vp; } public void deletePage(String space, String page) { getDriver().get(getURLToDeletePage(space, page)); } /** * @since 7.2M2 */ public void deletePage(EntityReference reference) { getDriver().get(getURLToDeletePage(reference)); } /** * @since 7.2M2 */ public EntityReference resolveDocumentReference(String referenceAsString) { return referenceResolver.resolve(referenceAsString, EntityType.DOCUMENT); } /** * @since 7.2M3 */ public EntityReference resolveSpaceReference(String referenceAsString) { return referenceResolver.resolve(referenceAsString, EntityType.SPACE); } /** * @since 7.2RC1 */ public String serializeReference(EntityReference reference) { return referenceSerializer.serialize(reference); } /** * Accesses the URL to delete the specified space. * * @param space the name of the space to delete * @since 4.5 */ public void deleteSpace(String space) { getDriver().get(getURLToDeleteSpace(space)); } public boolean pageExists(String space, String page) { return pageExists(Collections.singletonList(space), page); } /** * @since 7.2M2 */ public boolean pageExists(List<String> spaces, String page) { boolean exists; try { executeGet(getURL(spaces, page, "view", null), Status.OK.getStatusCode()); exists = true; } catch (Exception e) { exists = false; } return exists; } /** * Get the URL to view a page. * * @param space the space in which the page resides. * @param page the name of the page. */ public String getURL(String space, String page) { return getURL(space, page, "view"); } /** * Get the URL of an action on a page. * * @param space the space in which the page resides. * @param page the name of the page. * @param action the action to do on the page. */ public String getURL(String space, String page, String action) { return getURL(space, page, action, ""); } /** * Get the URL of an action on a page with a specified query string. * * @param space the space in which the page resides. * @param page the name of the page. * @param action the action to do on the page. * @param queryString the query string to pass in the URL. */ public String getURL(String space, String page, String action, String queryString) { return getURL(action, new String[] {space, page}, queryString); } /** * @since 7.2M2 */ public String getURL(List<String> spaces, String page, String action, String queryString) { List<String> path = new ArrayList<>(spaces); path.add(page); return getURL(action, path.toArray(new String[] {}), queryString); } /** * @since 7.2M2 */ public String getURL(EntityReference reference, String action, String queryString) { return getURL(action, extractListFromReference(reference).toArray(new String[] {}), queryString); } /** * @since 7.2M2 */ public String getURLFragment(EntityReference reference) { return StringUtils.join(extractListFromReference(reference), "/"); } private List<String> extractListFromReference(EntityReference reference) { List<String> path = new ArrayList<>(); // Add the spaces EntityReference spaceReference = reference.extractReference(EntityType.SPACE); EntityReference wikiReference = reference.extractReference(EntityType.WIKI); for (EntityReference singleReference : spaceReference.removeParent(wikiReference).getReversedReferenceChain()) { path.add(singleReference.getName()); } if (reference.getType() == EntityType.DOCUMENT) { path.add(reference.getName()); } return path; } /** * @since 7.3M1 */ public String getCurrentWiki() { return this.currentWiki; } /** * @since 7.3M1 */ public String getBaseURL() { return XWikiExecutor.URL + ":" + (this.executor != null ? this.executor.getPort() : XWikiExecutor.DEFAULT_PORT) + "/xwiki/"; } /** * @since 7.3M1 */ public String getBaseBinURL() { return getBaseURL() + "bin/"; } /** * @since 7.2M1 */ public String getURL(String action, String[] path, String queryString) { StringBuilder builder = new StringBuilder(getBaseBinURL()); if (!StringUtils.isEmpty(action)) { builder.append(action).append('/'); } List<String> escapedPath = new ArrayList<>(); for (String element : path) { escapedPath.add(escapeURL(element)); } builder.append(StringUtils.join(escapedPath, '/')); boolean needToAddSecretToken = !Arrays.asList("view", "register", "download").contains(action); if (needToAddSecretToken || !StringUtils.isEmpty(queryString)) { builder.append('?'); } if (needToAddSecretToken) { addQueryStringEntry(builder, "form_token", getSecretToken()); builder.append('&'); } if (!StringUtils.isEmpty(queryString)) { builder.append(queryString); } return builder.toString(); } /** * Get the URL of an action on a page with specified parameters. If you need to pass multiple parameters with the * same key, this function will not work. * * @param space the space in which the page resides. * @param page the name of the page. * @param action the action to do on the page. * @param queryParameters the parameters to pass in the URL, these will be automatically URL encoded. */ public String getURL(String space, String page, String action, Map<String, ?> queryParameters) { return getURL(space, page, action, toQueryString(queryParameters)); } /** * @param space the name of the space that contains the page with the specified attachment * @param page the name of the page that holds the attachment * @param attachment the attachment name * @param action the action to perform on the attachment * @param queryString the URL query string * @return the URL that performs the specified action on the specified attachment */ public String getAttachmentURL(String space, String page, String attachment, String action, String queryString) { return getURL(action, new String[] {space, page, attachment}, queryString); } /** * @param space the name of the space that contains the page with the specified attachment * @param page the name of the page that holds the attachment * @param attachment the attachment name * @param action the action to perform on the attachment * @return the URL that performs the specified action on the specified attachment */ public String getAttachmentURL(String space, String page, String attachment, String action) { return getAttachmentURL(space, page, attachment, action, ""); } /** * @param space the name of the space that contains the page with the specified attachment * @param page the name of the page that holds the attachment * @param attachment the attachment name * @return the URL to download the specified attachment */ public String getAttachmentURL(String space, String page, String attachment) { return getAttachmentURL(space, page, attachment, "download"); } /** * (Re)-cache the secret token used for CSRF protection. A user with edit rights on Main.WebHome must be logged in. * This method must be called before {@link #getSecretToken()} is called and after each re-login. * * @see #getSecretToken() */ public void recacheSecretToken() { // Save the current URL to be able to get back after we cache the secret token. We're not using the browser's // Back button because if the current page is the result of a POST request then by going back we are re-sending // the POST data which can have unexpected results. Moreover, some browsers pop up a modal confirmation box // which blocks the test. String previousURL = getDriver().getCurrentUrl(); // Go to the registration page because the registration form uses secret token. gotoPage(getCurrentWiki(), "Register", "register"); recacheSecretTokenWhenOnRegisterPage(); // Return to the previous page. getDriver().get(previousURL); } private void recacheSecretTokenWhenOnRegisterPage() { try { WebElement tokenInput = getDriver().findElement(By.xpath("//input[@name='form_token']")); this.secretToken = tokenInput.getAttribute("value"); } catch (NoSuchElementException exception) { // Something is really wrong if this happens. System.out.println("Warning: Failed to cache anti-CSRF secret token, some tests might fail!"); exception.printStackTrace(); } } /** * Get the secret token used for CSRF protection. Remember to call {@link #recacheSecretToken()} first. * * @return anti-CSRF secret token, or empty string if the token was not cached * @see #recacheSecretToken() */ public String getSecretToken() { if (this.secretToken == null) { System.out.println("Warning: No cached anti-CSRF token found. " + "Make sure to call recacheSecretToken() before getSecretToken(), otherwise this test might fail."); return ""; } return this.secretToken; } /** * This class represents all cookies stored in the browser. Use with getSession() and setSession() */ public class Session { private final Set<Cookie> cookies; private final String secretToken; private Session(final Set<Cookie> cookies, final String secretToken) { this.cookies = Collections.unmodifiableSet(new HashSet<Cookie>() { { addAll(cookies); } }); this.secretToken = secretToken; } private Set<Cookie> getCookies() { return this.cookies; } private String getSecretToken() { return this.secretToken; } } public boolean isInWYSIWYGEditMode() { return getDriver().findElements(By.xpath("//div[@id='editcolumn' and contains(@class, 'editor-wysiwyg')]")) .size() > 0; } public boolean isInWikiEditMode() { return getDriver().findElements(By.xpath("//div[@id='editcolumn' and contains(@class, 'editor-wiki')]")).size() > 0; } public boolean isInViewMode() { return !getDriver().hasElementWithoutWaiting(By.id("editMeta")); } public boolean isInSourceViewMode() { return getDriver().findElements(By.xpath("//textarea[@class = 'wiki-code']")).size() > 0; } public boolean isInInlineEditMode() { String currentURL = getDriver().getCurrentUrl(); // Keep checking the deprecated inline action for backward compatibility. return currentURL.contains("editor=inline") || currentURL.contains("/inline/"); } public boolean isInRightsEditMode() { return getDriver().getCurrentUrl().contains("editor=rights"); } public boolean isInObjectEditMode() { return getDriver().getCurrentUrl().contains("editor=object"); } public boolean isInClassEditMode() { return getDriver().getCurrentUrl().contains("editor=class"); } public boolean isInDeleteMode() { return getDriver().getCurrentUrl().contains("/delete/"); } public boolean isInRenameMode() { return getDriver().getCurrentUrl().contains("xpage=rename"); } public boolean isInCreateMode() { return getDriver().getCurrentUrl().contains("/create/"); } public boolean isInAdminMode() { return getDriver().getCurrentUrl().contains("/admin/"); } /** * Forces the current user to be the Guest user by clearing all coookies. */ public void forceGuestUser() { setSession(null); } public void addObject(String space, String page, String className, Object... properties) { gotoPage(space, page, "objectadd", toQueryParameters(className, null, properties)); } /** * @since 7.2RC1 */ public void addObject(EntityReference reference, String className, Object... properties) { gotoPage(reference, "objectadd", toQueryParameters(className, null, properties)); } public void addObject(String space, String page, String className, Map<String, ?> properties) { gotoPage(space, page, "objectadd", toQueryParameters(className, null, properties)); } public void deleteObject(String space, String page, String className, int objectNumber) { StringBuilder queryString = new StringBuilder(); queryString.append("classname="); queryString.append(escapeURL(className)); queryString.append('&'); queryString.append("classid="); queryString.append(objectNumber); gotoPage(space, page, "objectremove", queryString.toString()); } public void updateObject(String space, String page, String className, int objectNumber, Map<String, ?> properties) { gotoPage(space, page, "save", toQueryParameters(className, objectNumber, properties)); } public void updateObject(String space, String page, String className, int objectNumber, Object... properties) { // TODO: would be even quicker using REST Map<String, Object> queryParameters = (Map<String, Object>) toQueryParameters(className, objectNumber, properties); // Append the updateOrCreate objectPolicy since we always want this in our tests. queryParameters.put("objectPolicy", "updateOrCreate"); gotoPage(space, page, "save", queryParameters); } public ClassEditPage addClassProperty(String space, String page, String propertyName, String propertyType) { gotoPage(space, page, "propadd", "propname", propertyName, "proptype", propertyType); return new ClassEditPage(); } /** * @since 3.5M1 */ public String toQueryString(Object... queryParameters) { return toQueryString(toQueryParameters(queryParameters)); } /** * @since 3.5M1 */ public String toQueryString(Map<String, ?> queryParameters) { StringBuilder builder = new StringBuilder(); for (Map.Entry<String, ?> entry : queryParameters.entrySet()) { addQueryStringEntry(builder, entry.getKey(), entry.getValue()); builder.append('&'); } return builder.toString(); } /** * @sice 3.2M1 */ public void addQueryStringEntry(StringBuilder builder, String key, Object value) { if (value != null) { if (value instanceof Iterable) { for (Object element : (Iterable<?>) value) { addQueryStringEntry(builder, key, element.toString()); builder.append('&'); } } else { addQueryStringEntry(builder, key, value.toString()); } } else { addQueryStringEntry(builder, key, (String) null); } } /** * @sice 3.2M1 */ public void addQueryStringEntry(StringBuilder builder, String key, String value) { builder.append(escapeURL(key)); if (value != null) { builder.append('='); builder.append(escapeURL(value)); } } /** * @since 3.5M1 */ public Map<String, ?> toQueryParameters(Object... properties) { return toQueryParameters(null, null, properties); } public Map<String, ?> toQueryParameters(String className, Integer objectNumber, Object... properties) { Map<String, Object> queryParameters = new HashMap<String, Object>(); queryParameters.put("classname", className); for (int i = 0; i < properties.length; i += 2) { int nextIndex = i + 1; queryParameters.put(toQueryParameterKey(className, objectNumber, (String) properties[i]), nextIndex < properties.length ? properties[nextIndex] : null); } return queryParameters; } public Map<String, ?> toQueryParameters(String className, Integer objectNumber, Map<String, ?> properties) { Map<String, Object> queryParameters = new HashMap<String, Object>(); if (className != null) { queryParameters.put("classname", className); } for (Map.Entry<String, ?> entry : properties.entrySet()) { queryParameters.put(toQueryParameterKey(className, objectNumber, entry.getKey()), entry.getValue()); } return queryParameters; } public String toQueryParameterKey(String className, Integer objectNumber, String key) { if (className == null) { return key; } else { StringBuilder keyBuilder = new StringBuilder(className); keyBuilder.append('_'); if (objectNumber != null) { keyBuilder.append(objectNumber); keyBuilder.append('_'); } keyBuilder.append(key); return keyBuilder.toString(); } } public ObjectEditPage editObjects(String space, String page) { gotoPage(space, page, "edit", "editor=object"); return new ObjectEditPage(); } public ClassEditPage editClass(String space, String page) { gotoPage(space, page, "edit", "editor=class"); return new ClassEditPage(); } public String getVersion() throws Exception { Xwiki xwiki = rest().getResource("", null); return xwiki.getVersion(); } public String getMavenVersion() throws Exception { String version = getVersion(); int index = version.indexOf('-'); if (index > 0) { version = version.substring(0, index) + "-SNAPSHOT"; } return version; } public void attachFile(String space, String page, String name, File file, boolean failIfExists) throws Exception { InputStream is = new FileInputStream(file); try { attachFile(space, page, name, is, failIfExists); } finally { is.close(); } } /** * @since 5.1M2 */ public void attachFile(String space, String page, String name, InputStream is, boolean failIfExists, UsernamePasswordCredentials credentials) throws Exception { attachFile(Collections.singletonList(space), page, name, is, failIfExists, credentials); } /** * @since 7.2M2 */ public void attachFile(List<String> spaces, String page, String name, InputStream is, boolean failIfExists, UsernamePasswordCredentials credentials) throws Exception { UsernamePasswordCredentials currentCredentials = getDefaultCredentials(); try { if (credentials != null) { setDefaultCredentials(credentials); } attachFile(spaces, page, name, is, failIfExists); } finally { setDefaultCredentials(currentCredentials); } } public void attachFile(String space, String page, String name, InputStream is, boolean failIfExists) throws Exception { attachFile(Collections.singletonList(space), page, name, is, failIfExists); } /** * @since 7.2M2 */ public void attachFile(List<String> spaces, String page, String name, InputStream is, boolean failIfExists) throws Exception { // make sure xwiki.Import exists if (!pageExists(spaces, page)) { createPage(spaces, page, null, null); } StringBuilder url = new StringBuilder(BASE_REST_URL); url.append("wikis/xwiki"); for (String space : spaces) { url.append("/spaces/").append(escapeURL(space)); } url.append("/pages/"); url.append(escapeURL(page)); url.append("/attachments/"); url.append(escapeURL(name)); if (failIfExists) { executePut(url.toString(), is, MediaType.APPLICATION_OCTET_STREAM, Status.CREATED.getStatusCode()); } else { executePut(url.toString(), is, MediaType.APPLICATION_OCTET_STREAM, Status.CREATED.getStatusCode(), Status.ACCEPTED.getStatusCode()); } } // FIXME: improve that with a REST API to directly import a XAR public void importXar(File file) throws Exception { // attach file attachFile("XWiki", "Import", file.getName(), file, false); // import file executeGet( getBaseBinURL() + "import/XWiki/Import?historyStrategy=add&importAsBackup=true&ajax&action=import&name=" + escapeURL(file.getName()), Status.OK.getStatusCode()); } /** * Delete the latest version from the history of a page, using the {@code /deleteversions/} action. * * @param space the space name of the page * @param page the name of the page * @since 7.0M2 */ public void deleteLatestVersion(String space, String page) { deleteVersion(space, page, "latest"); } /** * Delete a specific version from the history of a page, using the {@code /deleteversions/} action. * * @param space the space name of the page * @param page the name of the page * @param version the version to delete * @since 7.0M2 */ public void deleteVersion(String space, String page, String version) { deleteVersions(space, page, version, version); } /** * Delete an interval of versions from the history of a page, using the {@code /deleteversions/} action. * * @param space the space name of the page * @param page the name of the page * @param v1 the starting version to delete * @param v2 the ending version to delete * @since 7.0M2 */ public void deleteVersions(String space, String page, String v1, String v2) { gotoPage(space, page, "deleteversions", "rev1", v1, "rev2", v2, "confirm", "1"); } /** * Roll back a page to the previous version, using the {@code /rollback/} action. * * @param space the space name of the page * @param page the name of the page * @since 7.0M2 */ public void rollbackToPreviousVersion(String space, String page) { rollBackTo(space, page, "previous"); } /** * Roll back a page to the specified version, using the {@code /rollback/} action. * * @param space the space name of the page * @param page the name of the page * @param version the version to rollback to * @since 7.0M2 */ public void rollBackTo(String space, String page, String version) { gotoPage(space, page, "rollback", "rev", version, "confirm", "1"); } /** * Set the hierarchy mode used in the wiki * * @param mode the mode to use ("reference" or "parentchild") * @since 7.2M2 */ public void setHierarchyMode(String mode) { setPropertyInXWikiPreferences("core.hierarchyMode", "String", mode); } /** * Add and set a property into XWiki.XWikiPreferences. Create XWiki.XWikiPreferences if it does not exist. * * @param propertyName name of the property to set * @param propertyType the type of the property to add * @param value value to set to the property * @since 7.2M2 */ public void setPropertyInXWikiPreferences(String propertyName, String propertyType, Object value) { addClassProperty("XWiki", "XWikiPreferences", propertyName, propertyType); gotoPage("XWiki", "XWikiPreferences", "edit", "editor", "object"); ObjectEditPage objectEditPage = new ObjectEditPage(); if (objectEditPage.hasObject("XWiki.XWikiPreferences")) { updateObject("XWiki", "XWikiPreferences", "XWiki.XWikiPreferences", 0, propertyName, value); } else { addObject("XWiki", "XWikiPreferences", "XWiki.XWikiPreferences", propertyName, value); } } /** * @since 7.3M1 */ public static void assertStatuses(int actualCode, int... expectedCodes) { if (!ArrayUtils.contains(expectedCodes, actualCode)) { Assert.fail("Unexpected code [" + actualCode + "], was expecting one of [" + Arrays.asList(expectedCodes) + "]"); } } /** * @since 7.3M1 */ public static <M extends HttpMethod> M assertStatusCodes(M method, int... expectedCodes) { if (expectedCodes.length > 0) { assertStatuses(method.getStatusCode(), expectedCodes); method.releaseConnection(); } return method; } // HTTP /** * Encodes a given string so that it may be used as a URL component. Compatable with javascript decodeURIComponent, * though more strict than encodeURIComponent: all characters except [a-zA-Z0-9], '.', '-', '*', '_' are converted * to hexadecimal, and spaces are substituted by '+'. * * @param s */ public String escapeURL(String s) { try { return URLEncoder.encode(s, "UTF-8"); } catch (UnsupportedEncodingException e) { // should not happen throw new RuntimeException(e); } } public InputStream getInputStream(String path, Map<String, ?> queryParams) throws Exception { return getInputStream(getBaseURL(), path, queryParams); } public String getString(String path, Map<String, ?> queryParams) throws Exception { try (InputStream inputStream = getInputStream(getBaseURL(), path, queryParams)) { return IOUtils.toString(inputStream); } } public InputStream getInputStream(String prefix, String path, Map<String, ?> queryParams, Object... elements) throws Exception { String cleanPrefix = prefix.endsWith("/") ? prefix.substring(0, prefix.length() - 1) : prefix; if (path.startsWith(cleanPrefix)) { cleanPrefix = ""; } UriBuilder builder = UriBuilder.fromUri(cleanPrefix).path(path.startsWith("/") ? path.substring(1) : path); if (queryParams != null) { for (Map.Entry<String, ?> entry : queryParams.entrySet()) { if (entry.getValue() instanceof Object[]) { builder.queryParam(entry.getKey(), (Object[]) entry.getValue()); } else { builder.queryParam(entry.getKey(), entry.getValue()); } } } String url = builder.build(elements).toString(); return executeGet(url, Status.OK.getStatusCode()).getResponseBodyAsStream(); } protected GetMethod executeGet(String uri) throws Exception { GetMethod getMethod = new GetMethod(uri); this.httpClient.executeMethod(getMethod); return getMethod; } protected GetMethod executeGet(String uri, int... expectedCodes) throws Exception { return assertStatusCodes(executeGet(uri), expectedCodes); } /** * @since 7.3M1 */ protected PostMethod executePost(String uri, InputStream content, String mediaType) throws Exception { PostMethod postMethod = new PostMethod(uri); RequestEntity entity = new InputStreamRequestEntity(content, mediaType); postMethod.setRequestEntity(entity); this.httpClient.executeMethod(postMethod); return postMethod; } protected PostMethod executePost(String uri, InputStream content, String mediaType, int... expectedCodes) throws Exception { return assertStatusCodes(executePost(uri, content, mediaType), expectedCodes); } /** * @since 7.3M1 */ protected DeleteMethod executeDelete(String uri) throws Exception { DeleteMethod postMethod = new DeleteMethod(uri); this.httpClient.executeMethod(postMethod); return postMethod; } /** * @since 7.3M1 */ protected DeleteMethod executeDelete(String uri, int... expectedCodes) throws Exception { return assertStatusCodes(executeDelete(uri), expectedCodes); } /** * @since 7.3M1 */ protected PutMethod executePut(String uri, InputStream content, String mediaType) throws Exception { PutMethod putMethod = new PutMethod(uri); RequestEntity entity = new InputStreamRequestEntity(content, mediaType); putMethod.setRequestEntity(entity); this.httpClient.executeMethod(putMethod); return putMethod; } protected PutMethod executePut(String uri, InputStream content, String mediaType, int... expectedCodes) throws Exception { return assertStatusCodes(executePut(uri, content, mediaType), expectedCodes); } // REST public RestTestUtils rest() { return this.rest; } /** * @since 7.3M1 */ public class RestTestUtils { public final Boolean ELEMENTS_ENCODED = new Boolean(true); public String getBaseURL() { return TestUtils.this.getBaseURL() + "rest"; } private String toSpaceElement(String spaceReference) { StringBuilder builder = new StringBuilder(); for (EntityReference reference : RELATIVE_RESOLVER.resolve(spaceReference, EntityType.SPACE) .getReversedReferenceChain()) { if (builder.length() > 0) { builder.append("/spaces/"); } builder.append(reference.getName()); } return builder.toString(); } protected Object[] toElements(Page page) { List<Object> elements = new ArrayList<>(); // Add wiki if (page.getWiki() != null) { elements.add(page.getWiki()); } else { elements.add(getCurrentWiki()); } // Add spaces elements.add(toSpaceElement(page.getSpace())); // Add name elements.add(page.getName()); return elements.toArray(); } protected Object[] toElements(org.xwiki.rest.model.jaxb.Object obj, boolean onlyDocument) { List<Object> elements = new ArrayList<>(); // Add wiki if (obj.getWiki() != null) { elements.add(obj.getWiki()); } else { elements.add(getCurrentWiki()); } // Add spaces elements.add(toSpaceElement(obj.getSpace())); // Add name elements.add(obj.getPageName()); if (!onlyDocument) { // Add class elements.add(obj.getClassName()); // Add number elements.add(obj.getNumber()); } return elements.toArray(); } public Object[] toElements(EntityReference reference) { List<EntityReference> references = reference.getReversedReferenceChain(); List<Object> elements = new ArrayList<>(references.size() + 2); // Indicate that elements are already encoded elements.add(ELEMENTS_ENCODED); // Add current wiki if the reference does not contains any if (reference.extractReference(EntityType.WIKI) == null) { elements.add(escapeURL(getCurrentWiki())); } // Add reference for (EntityReference ref : references) { if (ref.getType() == EntityType.SPACE) { // The URI builder does not support multiple elements like space reference so we hack it by doing // the opposite of what is done when reading the URL (generate a value looking like // "space1/spaces/space2") Object value = elements.get(elements.size() - 1); StringBuilder builder; if (value instanceof StringBuilder) { builder = (StringBuilder) value; builder.append("/spaces/"); } else { builder = new StringBuilder(); elements.add(builder); } builder.append(escapeURL(ref.getName())); } else { elements.add(escapeURL(ref.getName())); } } return elements.toArray(); } /** * Add or update. */ public EntityEnclosingMethod save(Page page, int... expectedCodes) throws Exception { return TestUtils.assertStatusCodes(executePut(PageResource.class, page, toElements(page))); } /** * Add a new object. */ public EntityEnclosingMethod add(org.xwiki.rest.model.jaxb.Object obj, int... expectedCodes) throws Exception { return TestUtils.assertStatusCodes(executePost(ObjectsResource.class, obj, toElements(obj, true))); } /** * Fail if the object does not exist. */ public EntityEnclosingMethod update(org.xwiki.rest.model.jaxb.Object obj, int... expectedCodes) throws Exception { return TestUtils.assertStatusCodes(executePut(ObjectResource.class, obj, toElements(obj, false))); } public DeleteMethod delete(EntityReference reference, int... expectedCodes) throws Exception { switch (reference.getType()) { case DOCUMENT: return TestUtils.assertStatusCodes(executeDelete(PageResource.class, toElements(reference)), expectedCodes); case ATTACHMENT: return TestUtils.assertStatusCodes(executeDelete(AttachmentResource.class, toElements(reference)), expectedCodes); case OBJECT: return TestUtils.assertStatusCodes(executeDelete(ObjectResource.class, toElements(reference)), expectedCodes); case OBJECT_PROPERTY: return TestUtils.assertStatusCodes( executeDelete(ObjectPropertyResource.class, toElements(reference)), expectedCodes); default: throw new Exception("Unsuported type [" + reference.getType() + "]"); } } public InputStream getInputStream(String resourceUri, Map<String, ?> queryParams, Object... elements) throws Exception { return TestUtils.this.getInputStream(getBaseURL(), resourceUri, queryParams, elements); } public InputStream postRESTInputStream(Object resourceUri, Object restObject, Object... elements) throws Exception { return postInputStream(resourceUri, restObject, Collections.<String, Object[]>emptyMap(), elements); } public InputStream postInputStream(Object resourceUri, Object restObject, Map<String, Object[]> queryParams, Object... elements) throws Exception { return executePost(resourceUri, restObject, queryParams, elements).getResponseBodyAsStream(); } protected InputStream toResourceInputStream(Object restObject) throws JAXBException { InputStream resourceStream; if (restObject instanceof InputStream) { resourceStream = (InputStream) restObject; } else { ByteArrayOutputStream stream = new ByteArrayOutputStream(); marshaller.marshal(restObject, stream); resourceStream = new ByteArrayInputStream(stream.toByteArray()); } return resourceStream; } public PostMethod executePost(Object resourceUri, Object restObject, Object... elements) throws Exception { return executePost(resourceUri, restObject, Collections.<String, Object[]>emptyMap(), elements); } public PostMethod executePost(Object resourceUri, Object restObject, Map<String, Object[]> queryParams, Object... elements) throws Exception { // Build URI String uri = createUri(resourceUri, queryParams, elements).toString(); try (InputStream resourceStream = toResourceInputStream(restObject)) { return TestUtils.this.executePost(uri, resourceStream, MediaType.APPLICATION_XML, Status.OK.getStatusCode()); } } public PutMethod executePut(Object resourceUri, Object restObject, Object... elements) throws Exception { return executePut(resourceUri, restObject, Collections.<String, Object[]>emptyMap(), elements); } public PutMethod executePut(Object resourceUri, Object restObject, Map<String, Object[]> queryParams, Object... elements) throws Exception { // Build URI String uri = createUri(resourceUri, queryParams, elements).toString(); try (InputStream resourceStream = toResourceInputStream(restObject)) { return TestUtils.this.executePut(uri, resourceStream, MediaType.APPLICATION_XML, Status.OK.getStatusCode()); } } public DeleteMethod executeDelete(Object resourceUri, Object... elements) throws Exception { return executeDelete(resourceUri, Collections.<String, Object[]>emptyMap(), elements); } public DeleteMethod executeDelete(Object resourceUri, Map<String, Object[]> queryParams, Object... elements) throws Exception { // Build URI String uri = createUri(resourceUri, queryParams, elements).toString(); return TestUtils.this.executeDelete(uri); } public URI createUri(Object resourceUri, Map<String, Object[]> queryParams, Object... elements) { // Create URI builder UriBuilder builder = getUriBuilder(resourceUri, queryParams); // Build URI URI uri; if (elements.length > 0 && elements[0] == ELEMENTS_ENCODED) { uri = builder.buildFromEncoded(Arrays.copyOfRange(elements, 1, elements.length)); } else { uri = builder.build(elements); } return uri; } public UriBuilder getUriBuilder(Object resourceUri, Map<String, Object[]> queryParams) { // Create URI builder UriBuilder builder; if (resourceUri instanceof Class) { builder = getUriBuilder((Class) resourceUri); } else { String stringResourceUri = (String) resourceUri; builder = UriBuilder.fromUri(getBaseURL().substring(0, getBaseURL().length() - 1)).path( !stringResourceUri.isEmpty() && stringResourceUri.charAt(0) == '/' ? stringResourceUri .substring(1) : stringResourceUri); } // Add query parameters if (queryParams != null) { for (Map.Entry<String, Object[]> entry : queryParams.entrySet()) { builder.queryParam(entry.getKey(), entry.getValue()); } } return builder; } protected UriBuilder getUriBuilder(Class<?> resource) { return UriBuilder.fromUri(getBaseURL()).path(resource); } public byte[] getBuffer(String resourceUri, Map<String, Object[]> queryParams, Object... elements) throws Exception { InputStream is = getInputStream(resourceUri, queryParams, elements); byte[] buffer; try { buffer = IOUtils.toByteArray(is); } finally { is.close(); } return buffer; } public <T> T getResource(String resourceUri, Map<String, Object[]> queryParams, Object... elements) throws Exception { T resource; try (InputStream is = getInputStream(resourceUri, queryParams, elements)) { resource = (T) unmarshaller.unmarshal(is); } return resource; } } }
xwiki-platform-core/xwiki-platform-test/src/main/java/org/xwiki/test/ui/TestUtils.java
/* * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.xwiki.test.ui; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.net.URI; import java.net.URLEncoder; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.UriBuilder; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import javax.xml.bind.Marshaller; import javax.xml.bind.Unmarshaller; import org.apache.commons.httpclient.HttpClient; import org.apache.commons.httpclient.HttpMethod; import org.apache.commons.httpclient.UsernamePasswordCredentials; import org.apache.commons.httpclient.auth.AuthScope; import org.apache.commons.httpclient.methods.DeleteMethod; import org.apache.commons.httpclient.methods.EntityEnclosingMethod; import org.apache.commons.httpclient.methods.GetMethod; import org.apache.commons.httpclient.methods.InputStreamRequestEntity; import org.apache.commons.httpclient.methods.PostMethod; import org.apache.commons.httpclient.methods.PutMethod; import org.apache.commons.httpclient.methods.RequestEntity; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils; import org.junit.Assert; import org.openqa.selenium.By; import org.openqa.selenium.Cookie; import org.openqa.selenium.NoSuchElementException; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import org.openqa.selenium.support.ui.ExpectedCondition; import org.xwiki.component.manager.ComponentManager; import org.xwiki.model.EntityType; import org.xwiki.model.internal.reference.RelativeStringEntityReferenceResolver; import org.xwiki.model.reference.EntityReference; import org.xwiki.model.reference.EntityReferenceResolver; import org.xwiki.model.reference.EntityReferenceSerializer; import org.xwiki.rest.model.jaxb.ObjectFactory; import org.xwiki.rest.model.jaxb.Page; import org.xwiki.rest.model.jaxb.Xwiki; import org.xwiki.rest.resources.attachments.AttachmentResource; import org.xwiki.rest.resources.objects.ObjectPropertyResource; import org.xwiki.rest.resources.objects.ObjectResource; import org.xwiki.rest.resources.objects.ObjectsResource; import org.xwiki.rest.resources.pages.PageResource; import org.xwiki.test.integration.XWikiExecutor; import org.xwiki.test.ui.po.ViewPage; import org.xwiki.test.ui.po.editor.ClassEditPage; import org.xwiki.test.ui.po.editor.ObjectEditPage; /** * Helper methods for testing, not related to a specific Page Object. Also made available to tests classes. * * @version $Id$ * @since 3.2M3 */ public class TestUtils { /** * @since 5.0M2 */ public static final UsernamePasswordCredentials ADMIN_CREDENTIALS = new UsernamePasswordCredentials("Admin", "admin"); /** * @since 5.1M1 */ public static final UsernamePasswordCredentials SUPER_ADMIN_CREDENTIALS = new UsernamePasswordCredentials( "superadmin", "pass"); /** * @since 5.0M2 * @deprecated since 7.3M1, use {@link #getBaseURL()} instead */ @Deprecated public static final String BASE_URL = XWikiExecutor.URL + ":" + XWikiExecutor.DEFAULT_PORT + "/xwiki/"; /** * @since 5.0M2 * @deprecated since 7.3M1, use {@link #getBaseBinURL()} instead */ @Deprecated public static final String BASE_BIN_URL = BASE_URL + "bin/"; /** * @since 5.0M2 * @deprecated since 7.3M1, use {@link #getBaseRestURL()} instead */ @Deprecated public static final String BASE_REST_URL = BASE_URL + "rest/"; /** * @since 7.3M1 */ private static final EntityReferenceResolver<String> RELATIVE_RESOLVER = new RelativeStringEntityReferenceResolver(); /** * @since 7.3M1 */ private static final int[] STATUS_OKNOTFOUND = new int[] {Status.OK.getStatusCode(), Status.NOT_FOUND.getStatusCode()}; /** * @since 7.3M1 */ private static final int[] STATUS_OK = new int[] {Status.OK.getStatusCode()}; private static PersistentTestContext context; private static ComponentManager componentManager; private static EntityReferenceResolver<String> referenceResolver; private static EntityReferenceSerializer<String> referenceSerializer; /** * Used to convert Java object into its REST XML representation. */ private static Marshaller marshaller; /** * Used to convert REST request XML result into its Java representation. */ private static Unmarshaller unmarshaller; /** * Used to create REST Java resources. */ private static ObjectFactory objectFactory; { { try { // Initialize REST related tools JAXBContext context = JAXBContext.newInstance("org.xwiki.rest.model.jaxb" + ":org.xwiki.extension.repository.xwiki.model.jaxb"); marshaller = context.createMarshaller(); unmarshaller = context.createUnmarshaller(); objectFactory = new ObjectFactory(); } catch (JAXBException e) { throw new RuntimeException(e); } } } /** Cached secret token. TODO cache for each user. */ private String secretToken = null; private HttpClient httpClient; /** * @since 7.3M1 */ private XWikiExecutor executor; /** * @since 7.3M1 */ private String currentWiki = "xwiki"; private RestTestUtils rest = new RestTestUtils(); public TestUtils() { this.httpClient = new HttpClient(); this.httpClient.getState().setCredentials(AuthScope.ANY, SUPER_ADMIN_CREDENTIALS); this.httpClient.getParams().setAuthenticationPreemptive(true); } /** * @since 7.3M1 */ public XWikiExecutor getExecutor() { return this.executor; } /** * @since 7.3M1 */ public void setExecutor(XWikiExecutor executor) { this.executor = executor; } /** Used so that AllTests can set the persistent test context. */ public static void setContext(PersistentTestContext context) { TestUtils.context = context; } public static void initializeComponent(ComponentManager componentManager) throws Exception { TestUtils.componentManager = componentManager; TestUtils.referenceResolver = TestUtils.componentManager.getInstance(EntityReferenceResolver.TYPE_STRING); TestUtils.referenceSerializer = TestUtils.componentManager.getInstance(EntityReferenceSerializer.TYPE_STRING); } protected XWikiWebDriver getDriver() { return context.getDriver(); } public Session getSession() { return this.new Session(getDriver().manage().getCookies(), getSecretToken()); } public void setSession(Session session) { WebDriver.Options options = getDriver().manage(); options.deleteAllCookies(); if (session != null) { for (Cookie cookie : session.getCookies()) { options.addCookie(cookie); } } if (session != null && !StringUtils.isEmpty(session.getSecretToken())) { this.secretToken = session.getSecretToken(); } else { recacheSecretToken(); } } /** * @since 7.0RC1 */ public void setDefaultCredentials(String username, String password) { this.httpClient.getState().setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(username, password)); } /** * @since 7.0RC1 */ public void setDefaultCredentials(UsernamePasswordCredentials defaultCredentials) { this.httpClient.getState().setCredentials(AuthScope.ANY, defaultCredentials); } public UsernamePasswordCredentials getDefaultCredentials() { return (UsernamePasswordCredentials) this.httpClient.getState().getCredentials(AuthScope.ANY); } public void loginAsSuperAdmin() { login(SUPER_ADMIN_CREDENTIALS.getUserName(), SUPER_ADMIN_CREDENTIALS.getPassword()); } public void loginAsSuperAdminAndGotoPage(String pageURL) { loginAndGotoPage(SUPER_ADMIN_CREDENTIALS.getUserName(), SUPER_ADMIN_CREDENTIALS.getPassword(), pageURL); } public void loginAsAdmin() { login(ADMIN_CREDENTIALS.getUserName(), ADMIN_CREDENTIALS.getPassword()); } public void loginAsAdminAndGotoPage(String pageURL) { loginAndGotoPage(ADMIN_CREDENTIALS.getUserName(), ADMIN_CREDENTIALS.getPassword(), pageURL); } public void login(String username, String password) { loginAndGotoPage(username, password, null); } public void loginAndGotoPage(String username, String password, String pageURL) { if (!username.equals(getLoggedInUserName())) { // Log in and direct to a non existent page so that it loads very fast and we don't incur the time cost of // going to the home page for example. // Also recache the CSRF token getDriver().get(getURLToLoginAndGotoPage(username, password, getURL("XWiki", "Register", "register"))); recacheSecretTokenWhenOnRegisterPage(); if (pageURL != null) { // Go to the page asked getDriver().get(pageURL); } else { getDriver().get(getURLToNonExistentPage()); } setDefaultCredentials(username, password); } } /** * Consider using setSession(null) because it will drop the cookies which is faster than invoking a logout action. */ public String getURLToLogout() { return getURL("XWiki", "XWikiLogin", "logout"); } public String getURLToLoginAsAdmin() { return getURLToLoginAs(ADMIN_CREDENTIALS.getUserName(), ADMIN_CREDENTIALS.getPassword()); } public String getURLToLoginAsSuperAdmin() { return getURLToLoginAs(SUPER_ADMIN_CREDENTIALS.getUserName(), SUPER_ADMIN_CREDENTIALS.getPassword()); } public String getURLToLoginAs(final String username, final String password) { return getURLToLoginAndGotoPage(username, password, null); } /** * @param pageURL the URL of the page to go to after logging in. * @return URL to accomplish login and goto. */ public String getURLToLoginAsAdminAndGotoPage(final String pageURL) { return getURLToLoginAndGotoPage(ADMIN_CREDENTIALS.getUserName(), ADMIN_CREDENTIALS.getPassword(), pageURL); } /** * @param pageURL the URL of the page to go to after logging in. * @return URL to accomplish login and goto. */ public String getURLToLoginAsSuperAdminAndGotoPage(final String pageURL) { return getURLToLoginAndGotoPage(SUPER_ADMIN_CREDENTIALS.getUserName(), SUPER_ADMIN_CREDENTIALS.getPassword(), pageURL); } /** * @param username the name of the user to log in as. * @param password the password for the user to log in. * @param pageURL the URL of the page to go to after logging in. * @return URL to accomplish login and goto. */ public String getURLToLoginAndGotoPage(final String username, final String password, final String pageURL) { Map<String, String> parameters = new HashMap<String, String>() { { put("j_username", username); put("j_password", password); if (pageURL != null && pageURL.length() > 0) { put("xredirect", pageURL); } } }; return getURL("XWiki", "XWikiLogin", "loginsubmit", parameters); } /** * @return URL to a non existent page that loads very fast (we are using plain mode so that we don't even have to * display the skin ;)) */ public String getURLToNonExistentPage() { return getURL("NonExistentSpace", "NonExistentPage", "view", "xpage=plain"); } /** * After successful completion of this function, you are guaranteed to be logged in as the given user and on the * page passed in pageURL. */ public void assertOnPage(final String pageURL) { final String pageURI = pageURL.replaceAll("\\?.*", ""); getDriver().waitUntilCondition(new ExpectedCondition<Boolean>() { @Override public Boolean apply(WebDriver driver) { return getDriver().getCurrentUrl().contains(pageURI); } }); } public String getLoggedInUserName() { By userAvatar = By.xpath("//div[@id='xwikimainmenu']//li[contains(@class, 'navbar-avatar')]/a"); if (!getDriver().hasElementWithoutWaiting(userAvatar)) { // Guest return null; } WebElement element = getDriver().findElementWithoutWaiting(userAvatar); String href = element.getAttribute("href"); String loggedInUserName = href.substring(href.lastIndexOf("/") + 1); // Return return loggedInUserName; } public void createUserAndLogin(final String username, final String password, Object... properties) { createUserAndLoginWithRedirect(username, password, getURLToNonExistentPage(), properties); } public void createUserAndLoginWithRedirect(final String username, final String password, String url, Object... properties) { createUser(username, password, getURLToLoginAndGotoPage(username, password, url), properties); setDefaultCredentials(username, password); } public void createUser(final String username, final String password, String redirectURL, Object... properties) { Map<String, String> parameters = new HashMap<String, String>(); parameters.put("register", "1"); parameters.put("xwikiname", username); parameters.put("register_password", password); parameters.put("register2_password", password); parameters.put("register_email", ""); parameters.put("xredirect", redirectURL); parameters.put("form_token", getSecretToken()); getDriver().get(getURL("XWiki", "Register", "register", parameters)); recacheSecretToken(); if (properties.length > 0) { updateObject("XWiki", username, "XWiki.XWikiUsers", 0, properties); } } public ViewPage gotoPage(String space, String page) { gotoPage(space, page, "view"); return new ViewPage(); } /** * @since 7.2M2 */ public ViewPage gotoPage(EntityReference reference) { gotoPage(reference, "view"); return new ViewPage(); } public void gotoPage(String space, String page, String action) { gotoPage(space, page, action, ""); } /** * @since 7.2M2 */ public void gotoPage(EntityReference reference, String action) { gotoPage(reference, action, ""); } /** * @since 3.5M1 */ public void gotoPage(String space, String page, String action, Object... queryParameters) { gotoPage(space, page, action, toQueryString(queryParameters)); } public void gotoPage(String space, String page, String action, Map<String, ?> queryParameters) { gotoPage(Collections.singletonList(space), page, action, queryParameters); } /** * @since 7.2M2 */ public void gotoPage(List<String> spaces, String page, String action, Map<String, ?> queryParameters) { gotoPage(spaces, page, action, toQueryString(queryParameters)); } /** * @since 7.2M2 */ public void gotoPage(EntityReference reference, String action, Map<String, ?> queryParameters) { gotoPage(reference, action, toQueryString(queryParameters)); } public void gotoPage(String space, String page, String action, String queryString) { gotoPage(Collections.singletonList(space), page, action, queryString); } /** * @since 7.2M2 */ public void gotoPage(List<String> spaces, String page, String action, String queryString) { gotoPage(getURL(spaces, page, action, queryString)); } /** * @since 7.2M2 */ public void gotoPage(EntityReference reference, String action, String queryString) { gotoPage(getURL(reference, action, queryString)); // Update current wiki EntityReference wikiReference = reference.extractReference(EntityType.WIKI); if (wikiReference != null) { this.currentWiki = wikiReference.getName(); } } public void gotoPage(String url) { // Only navigate if the current URL is different from the one to go to, in order to improve performances. if (!getDriver().getCurrentUrl().equals(url)) { getDriver().get(url); } } public String getURLToDeletePage(String space, String page) { return getURL(space, page, "delete", "confirm=1"); } /** * @since 7.2M2 */ public String getURLToDeletePage(EntityReference reference) { return getURL(reference, "delete", "confirm=1"); } /** * @param space the name of the space to delete * @return the URL that can be used to delete the specified pace * @since 4.5 */ public String getURLToDeleteSpace(String space) { return getURL(space, "WebHome", "deletespace", "confirm=1"); } public ViewPage createPage(String space, String page, String content, String title) { return createPage(Collections.singletonList(space), page, content, title); } /** * @since 7.2M2 */ public ViewPage createPage(EntityReference reference, String content, String title) { return createPage(reference, content, title, null); } /** * @since 7.2M2 */ public ViewPage createPage(List<String> spaces, String page, String content, String title) { return createPage(spaces, page, content, title, null); } public ViewPage createPage(String space, String page, String content, String title, String syntaxId) { return createPage(Collections.singletonList(space), page, content, title, syntaxId); } /** * @since 7.2M2 */ public ViewPage createPage(EntityReference reference, String content, String title, String syntaxId) { return createPage(reference, content, title, syntaxId, null); } /** * @since 7.2M2 */ public ViewPage createPage(List<String> spaces, String page, String content, String title, String syntaxId) { return createPage(spaces, page, content, title, syntaxId, null); } public ViewPage createPage(String space, String page, String content, String title, String syntaxId, String parentFullPageName) { return createPage(Collections.singletonList(space), page, content, title, syntaxId, parentFullPageName); } /** * @since 7.2M2 */ public ViewPage createPage(List<String> spaces, String page, String content, String title, String syntaxId, String parentFullPageName) { Map<String, String> queryMap = new HashMap<String, String>(); if (content != null) { queryMap.put("content", content); } if (title != null) { queryMap.put("title", title); } if (syntaxId != null) { queryMap.put("syntaxId", syntaxId); } if (parentFullPageName != null) { queryMap.put("parent", parentFullPageName); } gotoPage(spaces, page, "save", queryMap); return new ViewPage(); } /** * @since 7.2M2 */ public ViewPage createPage(EntityReference reference, String content, String title, String syntaxId, String parentFullPageName) { Map<String, String> queryMap = new HashMap<>(); if (content != null) { queryMap.put("content", content); } if (title != null) { queryMap.put("title", title); } if (syntaxId != null) { queryMap.put("syntaxId", syntaxId); } if (parentFullPageName != null) { queryMap.put("parent", parentFullPageName); } gotoPage(reference, "save", queryMap); return new ViewPage(); } /** * @since 5.1M2 */ public ViewPage createPageWithAttachment(String space, String page, String content, String title, String syntaxId, String parentFullPageName, String attachmentName, InputStream attachmentData) throws Exception { return createPageWithAttachment(space, page, content, title, syntaxId, parentFullPageName, attachmentName, attachmentData, null); } /** * @since 5.1M2 */ public ViewPage createPageWithAttachment(String space, String page, String content, String title, String syntaxId, String parentFullPageName, String attachmentName, InputStream attachmentData, UsernamePasswordCredentials credentials) throws Exception { return createPageWithAttachment(Collections.singletonList(space), page, content, title, syntaxId, parentFullPageName, attachmentName, attachmentData, credentials); } /** * @since 7.2M2 */ public ViewPage createPageWithAttachment(List<String> spaces, String page, String content, String title, String syntaxId, String parentFullPageName, String attachmentName, InputStream attachmentData, UsernamePasswordCredentials credentials) throws Exception { ViewPage vp = createPage(spaces, page, content, title, syntaxId, parentFullPageName); attachFile(spaces, page, attachmentName, attachmentData, false, credentials); return vp; } /** * @since 5.1M2 */ public ViewPage createPageWithAttachment(String space, String page, String content, String title, String attachmentName, InputStream attachmentData) throws Exception { return createPageWithAttachment(space, page, content, title, null, null, attachmentName, attachmentData); } /** * @since 5.1M2 */ public ViewPage createPageWithAttachment(String space, String page, String content, String title, String attachmentName, InputStream attachmentData, UsernamePasswordCredentials credentials) throws Exception { ViewPage vp = createPage(space, page, content, title); attachFile(space, page, attachmentName, attachmentData, false, credentials); return vp; } public void deletePage(String space, String page) { getDriver().get(getURLToDeletePage(space, page)); } /** * @since 7.2M2 */ public void deletePage(EntityReference reference) { getDriver().get(getURLToDeletePage(reference)); } /** * @since 7.2M2 */ public EntityReference resolveDocumentReference(String referenceAsString) { return referenceResolver.resolve(referenceAsString, EntityType.DOCUMENT); } /** * @since 7.2M3 */ public EntityReference resolveSpaceReference(String referenceAsString) { return referenceResolver.resolve(referenceAsString, EntityType.SPACE); } /** * @since 7.2RC1 */ public String serializeReference(EntityReference reference) { return referenceSerializer.serialize(reference); } /** * Accesses the URL to delete the specified space. * * @param space the name of the space to delete * @since 4.5 */ public void deleteSpace(String space) { getDriver().get(getURLToDeleteSpace(space)); } public boolean pageExists(String space, String page) { return pageExists(Collections.singletonList(space), page); } /** * @since 7.2M2 */ public boolean pageExists(List<String> spaces, String page) { boolean exists; try { executeGet(getURL(spaces, page, "view", null), Status.OK.getStatusCode()); exists = true; } catch (Exception e) { exists = false; } return exists; } /** * Get the URL to view a page. * * @param space the space in which the page resides. * @param page the name of the page. */ public String getURL(String space, String page) { return getURL(space, page, "view"); } /** * Get the URL of an action on a page. * * @param space the space in which the page resides. * @param page the name of the page. * @param action the action to do on the page. */ public String getURL(String space, String page, String action) { return getURL(space, page, action, ""); } /** * Get the URL of an action on a page with a specified query string. * * @param space the space in which the page resides. * @param page the name of the page. * @param action the action to do on the page. * @param queryString the query string to pass in the URL. */ public String getURL(String space, String page, String action, String queryString) { return getURL(action, new String[] {space, page}, queryString); } /** * @since 7.2M2 */ public String getURL(List<String> spaces, String page, String action, String queryString) { List<String> path = new ArrayList<>(spaces); path.add(page); return getURL(action, path.toArray(new String[] {}), queryString); } /** * @since 7.2M2 */ public String getURL(EntityReference reference, String action, String queryString) { return getURL(action, extractListFromReference(reference).toArray(new String[] {}), queryString); } /** * @since 7.2M2 */ public String getURLFragment(EntityReference reference) { return StringUtils.join(extractListFromReference(reference), "/"); } private List<String> extractListFromReference(EntityReference reference) { List<String> path = new ArrayList<>(); // Add the spaces EntityReference spaceReference = reference.extractReference(EntityType.SPACE); EntityReference wikiReference = reference.extractReference(EntityType.WIKI); for (EntityReference singleReference : spaceReference.removeParent(wikiReference).getReversedReferenceChain()) { path.add(singleReference.getName()); } if (reference.getType() == EntityType.DOCUMENT) { path.add(reference.getName()); } return path; } /** * @since 7.3M1 */ public String getCurrentWiki() { return this.currentWiki; } /** * @since 7.3M1 */ public String getBaseURL() { return XWikiExecutor.URL + ":" + (this.executor != null ? this.executor.getPort() : XWikiExecutor.DEFAULT_PORT) + "/xwiki/"; } /** * @since 7.3M1 */ public String getBaseBinURL() { return getBaseURL() + "bin/"; } /** * @since 7.2M1 */ public String getURL(String action, String[] path, String queryString) { StringBuilder builder = new StringBuilder(getBaseBinURL()); if (!StringUtils.isEmpty(action)) { builder.append(action).append('/'); } List<String> escapedPath = new ArrayList<>(); for (String element : path) { escapedPath.add(escapeURL(element)); } builder.append(StringUtils.join(escapedPath, '/')); boolean needToAddSecretToken = !Arrays.asList("view", "register", "download").contains(action); if (needToAddSecretToken || !StringUtils.isEmpty(queryString)) { builder.append('?'); } if (needToAddSecretToken) { addQueryStringEntry(builder, "form_token", getSecretToken()); builder.append('&'); } if (!StringUtils.isEmpty(queryString)) { builder.append(queryString); } return builder.toString(); } /** * Get the URL of an action on a page with specified parameters. If you need to pass multiple parameters with the * same key, this function will not work. * * @param space the space in which the page resides. * @param page the name of the page. * @param action the action to do on the page. * @param queryParameters the parameters to pass in the URL, these will be automatically URL encoded. */ public String getURL(String space, String page, String action, Map<String, ?> queryParameters) { return getURL(space, page, action, toQueryString(queryParameters)); } /** * @param space the name of the space that contains the page with the specified attachment * @param page the name of the page that holds the attachment * @param attachment the attachment name * @param action the action to perform on the attachment * @param queryString the URL query string * @return the URL that performs the specified action on the specified attachment */ public String getAttachmentURL(String space, String page, String attachment, String action, String queryString) { return getURL(action, new String[] {space, page, attachment}, queryString); } /** * @param space the name of the space that contains the page with the specified attachment * @param page the name of the page that holds the attachment * @param attachment the attachment name * @param action the action to perform on the attachment * @return the URL that performs the specified action on the specified attachment */ public String getAttachmentURL(String space, String page, String attachment, String action) { return getAttachmentURL(space, page, attachment, action, ""); } /** * @param space the name of the space that contains the page with the specified attachment * @param page the name of the page that holds the attachment * @param attachment the attachment name * @return the URL to download the specified attachment */ public String getAttachmentURL(String space, String page, String attachment) { return getAttachmentURL(space, page, attachment, "download"); } /** * (Re)-cache the secret token used for CSRF protection. A user with edit rights on Main.WebHome must be logged in. * This method must be called before {@link #getSecretToken()} is called and after each re-login. * * @see #getSecretToken() */ public void recacheSecretToken() { // Save the current URL to be able to get back after we cache the secret token. We're not using the browser's // Back button because if the current page is the result of a POST request then by going back we are re-sending // the POST data which can have unexpected results. Moreover, some browsers pop up a modal confirmation box // which blocks the test. String previousURL = getDriver().getCurrentUrl(); // Go to the registration page because the registration form uses secret token. gotoPage(getCurrentWiki(), "Register", "register"); recacheSecretTokenWhenOnRegisterPage(); // Return to the previous page. getDriver().get(previousURL); } private void recacheSecretTokenWhenOnRegisterPage() { try { WebElement tokenInput = getDriver().findElement(By.xpath("//input[@name='form_token']")); this.secretToken = tokenInput.getAttribute("value"); } catch (NoSuchElementException exception) { // Something is really wrong if this happens. System.out.println("Warning: Failed to cache anti-CSRF secret token, some tests might fail!"); exception.printStackTrace(); } } /** * Get the secret token used for CSRF protection. Remember to call {@link #recacheSecretToken()} first. * * @return anti-CSRF secret token, or empty string if the token was not cached * @see #recacheSecretToken() */ public String getSecretToken() { if (this.secretToken == null) { System.out.println("Warning: No cached anti-CSRF token found. " + "Make sure to call recacheSecretToken() before getSecretToken(), otherwise this test might fail."); return ""; } return this.secretToken; } /** * This class represents all cookies stored in the browser. Use with getSession() and setSession() */ public class Session { private final Set<Cookie> cookies; private final String secretToken; private Session(final Set<Cookie> cookies, final String secretToken) { this.cookies = Collections.unmodifiableSet(new HashSet<Cookie>() { { addAll(cookies); } }); this.secretToken = secretToken; } private Set<Cookie> getCookies() { return this.cookies; } private String getSecretToken() { return this.secretToken; } } public boolean isInWYSIWYGEditMode() { return getDriver().findElements(By.xpath("//div[@id='editcolumn' and contains(@class, 'editor-wysiwyg')]")) .size() > 0; } public boolean isInWikiEditMode() { return getDriver().findElements(By.xpath("//div[@id='editcolumn' and contains(@class, 'editor-wiki')]")).size() > 0; } public boolean isInViewMode() { return !getDriver().hasElementWithoutWaiting(By.id("editMeta")); } public boolean isInSourceViewMode() { return getDriver().findElements(By.xpath("//textarea[@class = 'wiki-code']")).size() > 0; } public boolean isInInlineEditMode() { String currentURL = getDriver().getCurrentUrl(); // Keep checking the deprecated inline action for backward compatibility. return currentURL.contains("editor=inline") || currentURL.contains("/inline/"); } public boolean isInRightsEditMode() { return getDriver().getCurrentUrl().contains("editor=rights"); } public boolean isInObjectEditMode() { return getDriver().getCurrentUrl().contains("editor=object"); } public boolean isInClassEditMode() { return getDriver().getCurrentUrl().contains("editor=class"); } public boolean isInDeleteMode() { return getDriver().getCurrentUrl().contains("/delete/"); } public boolean isInRenameMode() { return getDriver().getCurrentUrl().contains("xpage=rename"); } public boolean isInCreateMode() { return getDriver().getCurrentUrl().contains("/create/"); } public boolean isInAdminMode() { return getDriver().getCurrentUrl().contains("/admin/"); } /** * Forces the current user to be the Guest user by clearing all coookies. */ public void forceGuestUser() { setSession(null); } public void addObject(String space, String page, String className, Object... properties) { gotoPage(space, page, "objectadd", toQueryParameters(className, null, properties)); } /** * @since 7.2RC1 */ public void addObject(EntityReference reference, String className, Object... properties) { gotoPage(reference, "objectadd", toQueryParameters(className, null, properties)); } public void addObject(String space, String page, String className, Map<String, ?> properties) { gotoPage(space, page, "objectadd", toQueryParameters(className, null, properties)); } public void deleteObject(String space, String page, String className, int objectNumber) { StringBuilder queryString = new StringBuilder(); queryString.append("classname="); queryString.append(escapeURL(className)); queryString.append('&'); queryString.append("classid="); queryString.append(objectNumber); gotoPage(space, page, "objectremove", queryString.toString()); } public void updateObject(String space, String page, String className, int objectNumber, Map<String, ?> properties) { gotoPage(space, page, "save", toQueryParameters(className, objectNumber, properties)); } public void updateObject(String space, String page, String className, int objectNumber, Object... properties) { // TODO: would be even quicker using REST Map<String, Object> queryParameters = (Map<String, Object>) toQueryParameters(className, objectNumber, properties); // Append the updateOrCreate objectPolicy since we always want this in our tests. queryParameters.put("objectPolicy", "updateOrCreate"); gotoPage(space, page, "save", queryParameters); } public ClassEditPage addClassProperty(String space, String page, String propertyName, String propertyType) { gotoPage(space, page, "propadd", "propname", propertyName, "proptype", propertyType); return new ClassEditPage(); } /** * @since 3.5M1 */ public String toQueryString(Object... queryParameters) { return toQueryString(toQueryParameters(queryParameters)); } /** * @since 3.5M1 */ public String toQueryString(Map<String, ?> queryParameters) { StringBuilder builder = new StringBuilder(); for (Map.Entry<String, ?> entry : queryParameters.entrySet()) { addQueryStringEntry(builder, entry.getKey(), entry.getValue()); builder.append('&'); } return builder.toString(); } /** * @sice 3.2M1 */ public void addQueryStringEntry(StringBuilder builder, String key, Object value) { if (value != null) { if (value instanceof Iterable) { for (Object element : (Iterable<?>) value) { addQueryStringEntry(builder, key, element.toString()); builder.append('&'); } } else { addQueryStringEntry(builder, key, value.toString()); } } else { addQueryStringEntry(builder, key, (String) null); } } /** * @sice 3.2M1 */ public void addQueryStringEntry(StringBuilder builder, String key, String value) { builder.append(escapeURL(key)); if (value != null) { builder.append('='); builder.append(escapeURL(value)); } } /** * @since 3.5M1 */ public Map<String, ?> toQueryParameters(Object... properties) { return toQueryParameters(null, null, properties); } public Map<String, ?> toQueryParameters(String className, Integer objectNumber, Object... properties) { Map<String, Object> queryParameters = new HashMap<String, Object>(); queryParameters.put("classname", className); for (int i = 0; i < properties.length; i += 2) { int nextIndex = i + 1; queryParameters.put(toQueryParameterKey(className, objectNumber, (String) properties[i]), nextIndex < properties.length ? properties[nextIndex] : null); } return queryParameters; } public Map<String, ?> toQueryParameters(String className, Integer objectNumber, Map<String, ?> properties) { Map<String, Object> queryParameters = new HashMap<String, Object>(); if (className != null) { queryParameters.put("classname", className); } for (Map.Entry<String, ?> entry : properties.entrySet()) { queryParameters.put(toQueryParameterKey(className, objectNumber, entry.getKey()), entry.getValue()); } return queryParameters; } public String toQueryParameterKey(String className, Integer objectNumber, String key) { if (className == null) { return key; } else { StringBuilder keyBuilder = new StringBuilder(className); keyBuilder.append('_'); if (objectNumber != null) { keyBuilder.append(objectNumber); keyBuilder.append('_'); } keyBuilder.append(key); return keyBuilder.toString(); } } public ObjectEditPage editObjects(String space, String page) { gotoPage(space, page, "edit", "editor=object"); return new ObjectEditPage(); } public ClassEditPage editClass(String space, String page) { gotoPage(space, page, "edit", "editor=class"); return new ClassEditPage(); } public String getVersion() throws Exception { Xwiki xwiki = rest().getResource("", null); return xwiki.getVersion(); } public String getMavenVersion() throws Exception { String version = getVersion(); int index = version.indexOf('-'); if (index > 0) { version = version.substring(0, index) + "-SNAPSHOT"; } return version; } public void attachFile(String space, String page, String name, File file, boolean failIfExists) throws Exception { InputStream is = new FileInputStream(file); try { attachFile(space, page, name, is, failIfExists); } finally { is.close(); } } /** * @since 5.1M2 */ public void attachFile(String space, String page, String name, InputStream is, boolean failIfExists, UsernamePasswordCredentials credentials) throws Exception { attachFile(Collections.singletonList(space), page, name, is, failIfExists, credentials); } /** * @since 7.2M2 */ public void attachFile(List<String> spaces, String page, String name, InputStream is, boolean failIfExists, UsernamePasswordCredentials credentials) throws Exception { UsernamePasswordCredentials currentCredentials = getDefaultCredentials(); try { if (credentials != null) { setDefaultCredentials(credentials); } attachFile(spaces, page, name, is, failIfExists); } finally { setDefaultCredentials(currentCredentials); } } public void attachFile(String space, String page, String name, InputStream is, boolean failIfExists) throws Exception { attachFile(Collections.singletonList(space), page, name, is, failIfExists); } /** * @since 7.2M2 */ public void attachFile(List<String> spaces, String page, String name, InputStream is, boolean failIfExists) throws Exception { // make sure xwiki.Import exists if (!pageExists(spaces, page)) { createPage(spaces, page, null, null); } StringBuilder url = new StringBuilder(BASE_REST_URL); url.append("wikis/xwiki"); for (String space : spaces) { url.append("/spaces/").append(escapeURL(space)); } url.append("/pages/"); url.append(escapeURL(page)); url.append("/attachments/"); url.append(escapeURL(name)); if (failIfExists) { executePut(url.toString(), is, MediaType.APPLICATION_OCTET_STREAM, Status.CREATED.getStatusCode()); } else { executePut(url.toString(), is, MediaType.APPLICATION_OCTET_STREAM, Status.CREATED.getStatusCode(), Status.ACCEPTED.getStatusCode()); } } // FIXME: improve that with a REST API to directly import a XAR public void importXar(File file) throws Exception { // attach file attachFile("XWiki", "Import", file.getName(), file, false); // import file executeGet( getBaseBinURL() + "import/XWiki/Import?historyStrategy=add&importAsBackup=true&ajax&action=import&name=" + escapeURL(file.getName()), Status.OK.getStatusCode()); } /** * Delete the latest version from the history of a page, using the {@code /deleteversions/} action. * * @param space the space name of the page * @param page the name of the page * @since 7.0M2 */ public void deleteLatestVersion(String space, String page) { deleteVersion(space, page, "latest"); } /** * Delete a specific version from the history of a page, using the {@code /deleteversions/} action. * * @param space the space name of the page * @param page the name of the page * @param version the version to delete * @since 7.0M2 */ public void deleteVersion(String space, String page, String version) { deleteVersions(space, page, version, version); } /** * Delete an interval of versions from the history of a page, using the {@code /deleteversions/} action. * * @param space the space name of the page * @param page the name of the page * @param v1 the starting version to delete * @param v2 the ending version to delete * @since 7.0M2 */ public void deleteVersions(String space, String page, String v1, String v2) { gotoPage(space, page, "deleteversions", "rev1", v1, "rev2", v2, "confirm", "1"); } /** * Roll back a page to the previous version, using the {@code /rollback/} action. * * @param space the space name of the page * @param page the name of the page * @since 7.0M2 */ public void rollbackToPreviousVersion(String space, String page) { rollBackTo(space, page, "previous"); } /** * Roll back a page to the specified version, using the {@code /rollback/} action. * * @param space the space name of the page * @param page the name of the page * @param version the version to rollback to * @since 7.0M2 */ public void rollBackTo(String space, String page, String version) { gotoPage(space, page, "rollback", "rev", version, "confirm", "1"); } /** * Set the hierarchy mode used in the wiki * * @param mode the mode to use ("reference" or "parentchild") * @since 7.2M2 */ public void setHierarchyMode(String mode) { setPropertyInXWikiPreferences("core.hierarchyMode", "String", mode); } /** * Add and set a property into XWiki.XWikiPreferences. Create XWiki.XWikiPreferences if it does not exist. * * @param propertyName name of the property to set * @param propertyType the type of the property to add * @param value value to set to the property * @since 7.2M2 */ public void setPropertyInXWikiPreferences(String propertyName, String propertyType, Object value) { addClassProperty("XWiki", "XWikiPreferences", propertyName, propertyType); gotoPage("XWiki", "XWikiPreferences", "edit", "editor", "object"); ObjectEditPage objectEditPage = new ObjectEditPage(); if (objectEditPage.hasObject("XWiki.XWikiPreferences")) { updateObject("XWiki", "XWikiPreferences", "XWiki.XWikiPreferences", 0, propertyName, value); } else { addObject("XWiki", "XWikiPreferences", "XWiki.XWikiPreferences", propertyName, value); } } /** * @since 7.3M1 */ public static void assertStatuses(int actualCode, int... expectedCodes) { if (!ArrayUtils.contains(expectedCodes, actualCode)) { Assert.fail("Unexpected code [" + actualCode + "], was expecting one of [" + Arrays.asList(expectedCodes) + "]"); } } /** * @since 7.3M1 */ public static <M extends HttpMethod> M assertStatusCodes(M method, int... expectedCodes) { if (expectedCodes.length > 0) { assertStatuses(method.getStatusCode(), expectedCodes); method.releaseConnection(); } return method; } // HTTP /** * Encodes a given string so that it may be used as a URL component. Compatable with javascript decodeURIComponent, * though more strict than encodeURIComponent: all characters except [a-zA-Z0-9], '.', '-', '*', '_' are converted * to hexadecimal, and spaces are substituted by '+'. * * @param s */ public String escapeURL(String s) { try { return URLEncoder.encode(s, "UTF-8"); } catch (UnsupportedEncodingException e) { // should not happen throw new RuntimeException(e); } } public InputStream getInputStream(String path, Map<String, ?> queryParams) throws Exception { return getInputStream(getBaseURL(), path, queryParams); } public String getString(String path, Map<String, ?> queryParams) throws Exception { try (InputStream inputStream = getInputStream(getBaseURL(), path, queryParams)) { return IOUtils.toString(inputStream); } } public InputStream getInputStream(String prefix, String path, Map<String, ?> queryParams, Object... elements) throws Exception { String cleanPrefix = prefix.endsWith("/") ? prefix.substring(0, prefix.length() - 1) : prefix; if (path.startsWith(cleanPrefix)) { cleanPrefix = ""; } UriBuilder builder = UriBuilder.fromUri(cleanPrefix).path(path.startsWith("/") ? path.substring(1) : path); if (queryParams != null) { for (Map.Entry<String, ?> entry : queryParams.entrySet()) { if (entry.getValue() instanceof Object[]) { builder.queryParam(entry.getKey(), (Object[]) entry.getValue()); } else { builder.queryParam(entry.getKey(), entry.getValue()); } } } String url = builder.build(elements).toString(); return executeGet(url, Status.OK.getStatusCode()).getResponseBodyAsStream(); } protected GetMethod executeGet(String uri) throws Exception { GetMethod getMethod = new GetMethod(uri); this.httpClient.executeMethod(getMethod); return getMethod; } protected GetMethod executeGet(String uri, int... expectedCodes) throws Exception { return assertStatusCodes(executeGet(uri), expectedCodes); } /** * @since 7.3M1 */ protected PostMethod executePost(String uri, InputStream content, String mediaType) throws Exception { PostMethod postMethod = new PostMethod(uri); RequestEntity entity = new InputStreamRequestEntity(content, mediaType); postMethod.setRequestEntity(entity); this.httpClient.executeMethod(postMethod); return postMethod; } protected PostMethod executePost(String uri, InputStream content, String mediaType, int... expectedCodes) throws Exception { return assertStatusCodes(executePost(uri, content, mediaType), expectedCodes); } /** * @since 7.3M1 */ protected DeleteMethod executeDelete(String uri) throws Exception { DeleteMethod postMethod = new DeleteMethod(uri); this.httpClient.executeMethod(postMethod); return postMethod; } /** * @since 7.3M1 */ protected DeleteMethod executeDelete(String uri, int... expectedCodes) throws Exception { return assertStatusCodes(executeDelete(uri), expectedCodes); } /** * @since 7.3M1 */ protected PutMethod executePut(String uri, InputStream content, String mediaType) throws Exception { PutMethod putMethod = new PutMethod(uri); RequestEntity entity = new InputStreamRequestEntity(content, mediaType); putMethod.setRequestEntity(entity); this.httpClient.executeMethod(putMethod); return putMethod; } protected PutMethod executePut(String uri, InputStream content, String mediaType, int... expectedCodes) throws Exception { return assertStatusCodes(executePut(uri, content, mediaType), expectedCodes); } // REST public RestTestUtils rest() { return this.rest; } /** * @since 7.3M1 */ public class RestTestUtils { public final Boolean ELEMENTS_ENCODED = new Boolean(true); /** * @since 7.3M1 */ public String getBaseURL() { return TestUtils.this.getBaseURL() + "rest"; } private String toSpaceElement(String spaceReference) { StringBuilder builder = new StringBuilder(); for (EntityReference reference : RELATIVE_RESOLVER.resolve(spaceReference, EntityType.SPACE) .getReversedReferenceChain()) { if (builder.length() > 0) { builder.append("/spaces/"); } builder.append(reference.getName()); } return builder.toString(); } protected Object[] toElements(Page page) { List<Object> elements = new ArrayList<>(); // Add wiki if (page.getWiki() != null) { elements.add(page.getWiki()); } else { elements.add(getCurrentWiki()); } // Add spaces elements.add(toSpaceElement(page.getSpace())); // Add name elements.add(page.getName()); return elements.toArray(); } protected Object[] toElements(org.xwiki.rest.model.jaxb.Object obj, boolean onlyDocument) { List<Object> elements = new ArrayList<>(); // Add wiki if (obj.getWiki() != null) { elements.add(obj.getWiki()); } else { elements.add(getCurrentWiki()); } // Add spaces elements.add(toSpaceElement(obj.getSpace())); // Add name elements.add(obj.getPageName()); if (!onlyDocument) { // Add class elements.add(obj.getClassName()); // Add number elements.add(obj.getNumber()); } return elements.toArray(); } public Object[] toElements(EntityReference reference) { List<EntityReference> references = reference.getReversedReferenceChain(); List<Object> elements = new ArrayList<>(references.size() + 2); // Indicate that elements are already encoded elements.add(ELEMENTS_ENCODED); // Add current wiki if the reference does not contains any if (reference.extractReference(EntityType.WIKI) == null) { elements.add(escapeURL(getCurrentWiki())); } // Add reference for (EntityReference ref : references) { if (ref.getType() == EntityType.SPACE) { // The URI builder does not support multiple elements like space reference so we hack it by doing // the opposite of what is done when reading the URL (generate a value looking like // "space1/spaces/space2") Object value = elements.get(elements.size() - 1); StringBuilder builder; if (value instanceof StringBuilder) { builder = (StringBuilder) value; builder.append("/spaces/"); } else { builder = new StringBuilder(); elements.add(builder); } builder.append(escapeURL(ref.getName())); } else { elements.add(escapeURL(ref.getName())); } } return elements.toArray(); } /** * Add or update. */ public EntityEnclosingMethod save(Page page, int... expectedCodes) throws Exception { return TestUtils.assertStatusCodes(executePut(PageResource.class, page, toElements(page))); } /** * Add a new object. */ public EntityEnclosingMethod add(org.xwiki.rest.model.jaxb.Object obj, int... expectedCodes) throws Exception { return TestUtils.assertStatusCodes(executePost(ObjectsResource.class, obj, toElements(obj, true))); } /** * Fail if the object does not exist. */ public EntityEnclosingMethod update(org.xwiki.rest.model.jaxb.Object obj, int... expectedCodes) throws Exception { return TestUtils.assertStatusCodes(executePut(ObjectResource.class, obj, toElements(obj, false))); } public DeleteMethod delete(EntityReference reference, int... expectedCodes) throws Exception { switch (reference.getType()) { case DOCUMENT: return TestUtils.assertStatusCodes(executeDelete(PageResource.class, toElements(reference)), expectedCodes); case ATTACHMENT: return TestUtils.assertStatusCodes(executeDelete(AttachmentResource.class, toElements(reference)), expectedCodes); case OBJECT: return TestUtils.assertStatusCodes(executeDelete(ObjectResource.class, toElements(reference)), expectedCodes); case OBJECT_PROPERTY: return TestUtils.assertStatusCodes( executeDelete(ObjectPropertyResource.class, toElements(reference)), expectedCodes); default: throw new Exception("Unsuported type [" + reference.getType() + "]"); } } public InputStream getInputStream(String resourceUri, Map<String, ?> queryParams, Object... elements) throws Exception { return TestUtils.this.getInputStream(getBaseURL(), resourceUri, queryParams, elements); } public InputStream postRESTInputStream(Object resourceUri, Object restObject, Object... elements) throws Exception { return postInputStream(resourceUri, restObject, Collections.<String, Object[]>emptyMap(), elements); } public InputStream postInputStream(Object resourceUri, Object restObject, Map<String, Object[]> queryParams, Object... elements) throws Exception { return executePost(resourceUri, restObject, queryParams, elements).getResponseBodyAsStream(); } protected InputStream toResourceInputStream(Object restObject) throws JAXBException { InputStream resourceStream; if (restObject instanceof InputStream) { resourceStream = (InputStream) restObject; } else { ByteArrayOutputStream stream = new ByteArrayOutputStream(); marshaller.marshal(restObject, stream); resourceStream = new ByteArrayInputStream(stream.toByteArray()); } return resourceStream; } public PostMethod executePost(Object resourceUri, Object restObject, Object... elements) throws Exception { return executePost(resourceUri, restObject, Collections.<String, Object[]>emptyMap(), elements); } public PostMethod executePost(Object resourceUri, Object restObject, Map<String, Object[]> queryParams, Object... elements) throws Exception { // Build URI String uri = createUri(resourceUri, queryParams, elements).toString(); try (InputStream resourceStream = toResourceInputStream(restObject)) { return TestUtils.this.executePost(uri, resourceStream, MediaType.APPLICATION_XML, Status.OK.getStatusCode()); } } public PutMethod executePut(Object resourceUri, Object restObject, Object... elements) throws Exception { return executePut(resourceUri, restObject, Collections.<String, Object[]>emptyMap(), elements); } public PutMethod executePut(Object resourceUri, Object restObject, Map<String, Object[]> queryParams, Object... elements) throws Exception { // Build URI String uri = createUri(resourceUri, queryParams, elements).toString(); try (InputStream resourceStream = toResourceInputStream(restObject)) { return TestUtils.this.executePut(uri, resourceStream, MediaType.APPLICATION_XML, Status.OK.getStatusCode()); } } public DeleteMethod executeDelete(Object resourceUri, Object... elements) throws Exception { return executeDelete(resourceUri, Collections.<String, Object[]>emptyMap(), elements); } public DeleteMethod executeDelete(Object resourceUri, Map<String, Object[]> queryParams, Object... elements) throws Exception { // Build URI String uri = createUri(resourceUri, queryParams, elements).toString(); return TestUtils.this.executeDelete(uri); } public URI createUri(Object resourceUri, Map<String, Object[]> queryParams, Object... elements) { // Create URI builder UriBuilder builder = getUriBuilder(resourceUri, queryParams); // Build URI URI uri; if (elements.length > 0 && elements[0] == ELEMENTS_ENCODED) { uri = builder.buildFromEncoded(Arrays.copyOfRange(elements, 1, elements.length)); } else { uri = builder.build(elements); } return uri; } public UriBuilder getUriBuilder(Object resourceUri, Map<String, Object[]> queryParams) { // Create URI builder UriBuilder builder; if (resourceUri instanceof Class) { builder = getUriBuilder((Class) resourceUri); } else { String stringResourceUri = (String) resourceUri; builder = UriBuilder.fromUri(getBaseURL().substring(0, getBaseURL().length() - 1)).path( !stringResourceUri.isEmpty() && stringResourceUri.charAt(0) == '/' ? stringResourceUri .substring(1) : stringResourceUri); } // Add query parameters if (queryParams != null) { for (Map.Entry<String, Object[]> entry : queryParams.entrySet()) { builder.queryParam(entry.getKey(), entry.getValue()); } } return builder; } protected UriBuilder getUriBuilder(Class<?> resource) { return UriBuilder.fromUri(getBaseURL()).path(resource); } public byte[] getBuffer(String resourceUri, Map<String, Object[]> queryParams, Object... elements) throws Exception { InputStream is = getInputStream(resourceUri, queryParams, elements); byte[] buffer; try { buffer = IOUtils.toByteArray(is); } finally { is.close(); } return buffer; } public <T> T getResource(String resourceUri, Map<String, Object[]> queryParams, Object... elements) throws Exception { T resource; try (InputStream is = getInputStream(resourceUri, queryParams, elements)) { resource = (T) unmarshaller.unmarshal(is); } return resource; } } }
[misc] Remove redundant @since
xwiki-platform-core/xwiki-platform-test/src/main/java/org/xwiki/test/ui/TestUtils.java
[misc] Remove redundant @since
<ide><path>wiki-platform-core/xwiki-platform-test/src/main/java/org/xwiki/test/ui/TestUtils.java <ide> { <ide> public final Boolean ELEMENTS_ENCODED = new Boolean(true); <ide> <del> /** <del> * @since 7.3M1 <del> */ <ide> public String getBaseURL() <ide> { <ide> return TestUtils.this.getBaseURL() + "rest";
Java
apache-2.0
ca385ad45ef46ea3f021986121f2d04846d2e7a5
0
xfournet/intellij-community,idea4bsd/idea4bsd,da1z/intellij-community,xfournet/intellij-community,fitermay/intellij-community,apixandru/intellij-community,semonte/intellij-community,FHannes/intellij-community,signed/intellij-community,suncycheng/intellij-community,signed/intellij-community,signed/intellij-community,semonte/intellij-community,suncycheng/intellij-community,allotria/intellij-community,youdonghai/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,xfournet/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,fitermay/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,idea4bsd/idea4bsd,apixandru/intellij-community,apixandru/intellij-community,allotria/intellij-community,FHannes/intellij-community,apixandru/intellij-community,asedunov/intellij-community,idea4bsd/idea4bsd,vvv1559/intellij-community,youdonghai/intellij-community,da1z/intellij-community,da1z/intellij-community,asedunov/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,signed/intellij-community,youdonghai/intellij-community,youdonghai/intellij-community,fitermay/intellij-community,signed/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,semonte/intellij-community,FHannes/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,fitermay/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,ibinti/intellij-community,da1z/intellij-community,asedunov/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,signed/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,allotria/intellij-community,fitermay/intellij-community,semonte/intellij-community,semonte/intellij-community,fitermay/intellij-community,ibinti/intellij-community,apixandru/intellij-community,semonte/intellij-community,asedunov/intellij-community,semonte/intellij-community,semonte/intellij-community,fitermay/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,signed/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,fitermay/intellij-community,ibinti/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,fitermay/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,ibinti/intellij-community,da1z/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,signed/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,da1z/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,allotria/intellij-community,apixandru/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,fitermay/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,signed/intellij-community,asedunov/intellij-community,da1z/intellij-community,signed/intellij-community,semonte/intellij-community,ibinti/intellij-community,asedunov/intellij-community,allotria/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,signed/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,da1z/intellij-community,FHannes/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,da1z/intellij-community,youdonghai/intellij-community,da1z/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,semonte/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,apixandru/intellij-community,ibinti/intellij-community,FHannes/intellij-community,apixandru/intellij-community,allotria/intellij-community,fitermay/intellij-community,signed/intellij-community,apixandru/intellij-community,semonte/intellij-community,da1z/intellij-community,semonte/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,allotria/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,fitermay/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,ibinti/intellij-community,ibinti/intellij-community,xfournet/intellij-community,youdonghai/intellij-community
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.gradle.model.data; import java.io.Serializable; import java.util.List; /** * @author Vladislav.Soroka * @since 1/31/14 */ public class ScalaCompileOptionsData implements Serializable { private static final long serialVersionUID = 1L; private boolean useCompileDaemon; private String daemonServer; private boolean failOnError; private boolean deprecation; private boolean unchecked; private String debugLevel; private boolean optimize; private String encoding; private String force; private String targetCompatibility; private List<String> additionalParameters; private boolean listFiles; private String loggingLevel; private List<String> loggingPhases; private boolean fork; private ScalaForkOptions forkOptions; private boolean useAnt; /** * @deprecated see https://docs.gradle.org/3.0/release-notes#ant-based-scala-compiler-has-been-removed */ public boolean isUseCompileDaemon() { return useCompileDaemon; } public void setUseCompileDaemon(boolean useCompileDaemon) { this.useCompileDaemon = useCompileDaemon; } /** * @deprecated see https://docs.gradle.org/3.0/release-notes#ant-based-scala-compiler-has-been-removed */ public String getDaemonServer() { return daemonServer; } public void setDaemonServer(String daemonServer) { this.daemonServer = daemonServer; } public boolean isFailOnError() { return failOnError; } public void setFailOnError(boolean failOnError) { this.failOnError = failOnError; } public boolean isDeprecation() { return deprecation; } public void setDeprecation(boolean deprecation) { this.deprecation = deprecation; } public boolean isUnchecked() { return unchecked; } public void setUnchecked(boolean unchecked) { this.unchecked = unchecked; } public String getDebugLevel() { return debugLevel; } public void setDebugLevel(String debugLevel) { this.debugLevel = debugLevel; } public boolean isOptimize() { return optimize; } public void setOptimize(boolean optimize) { this.optimize = optimize; } public String getEncoding() { return encoding; } public void setEncoding(String encoding) { this.encoding = encoding; } public String getForce() { return force; } public void setForce(String force) { this.force = force; } public String getTargetCompatibility() { return targetCompatibility; } public void setTargetCompatibility(String targetCompatibility) { this.targetCompatibility = targetCompatibility; } public List<String> getAdditionalParameters() { return additionalParameters; } public void setAdditionalParameters(List<String> additionalParameters) { this.additionalParameters = additionalParameters; } public boolean isListFiles() { return listFiles; } public void setListFiles(boolean listFiles) { this.listFiles = listFiles; } public String getLoggingLevel() { return loggingLevel; } public void setLoggingLevel(String loggingLevel) { this.loggingLevel = loggingLevel; } public List<String> getLoggingPhases() { return loggingPhases; } public void setLoggingPhases(List<String> loggingPhases) { this.loggingPhases = loggingPhases; } /** * @deprecated see https://docs.gradle.org/3.0/release-notes#ant-based-scala-compiler-has-been-removed */ public boolean isFork() { return fork; } public void setFork(boolean fork) { this.fork = fork; } /** * @deprecated see https://docs.gradle.org/3.0/release-notes#ant-based-scala-compiler-has-been-removed */ public boolean isUseAnt() { return useAnt; } public void setUseAnt(boolean useAnt) { this.useAnt = useAnt; } public ScalaForkOptions getForkOptions() { return forkOptions; } public void setForkOptions(ScalaForkOptions forkOptions) { this.forkOptions = forkOptions; } public static class ScalaForkOptions implements Serializable { private static final long serialVersionUID = 1L; private String memoryInitialSize; private String memoryMaximumSize; private List<String> jvmArgs; public String getMemoryInitialSize() { return memoryInitialSize; } public void setMemoryInitialSize(String memoryInitialSize) { this.memoryInitialSize = memoryInitialSize; } public String getMemoryMaximumSize() { return memoryMaximumSize; } public void setMemoryMaximumSize(String memoryMaximumSize) { this.memoryMaximumSize = memoryMaximumSize; } public List<String> getJvmArgs() { return jvmArgs; } public void setJvmArgs(List<String> jvmArgs) { this.jvmArgs = jvmArgs; } } }
plugins/gradle/src/org/jetbrains/plugins/gradle/model/data/ScalaCompileOptionsData.java
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.gradle.model.data; import java.io.Serializable; import java.util.List; /** * @author Vladislav.Soroka * @since 1/31/14 */ public class ScalaCompileOptionsData implements Serializable { private static final long serialVersionUID = 1L; private boolean useCompileDaemon; private String daemonServer; private boolean failOnError; private boolean deprecation; private boolean unchecked; private String debugLevel; private boolean optimize; private String encoding; private String force; private String targetCompatibility; private List<String> additionalParameters; private boolean listFiles; private String loggingLevel; private List<String> loggingPhases; private boolean fork; private ScalaForkOptions forkOptions; private boolean useAnt; public boolean isUseCompileDaemon() { return useCompileDaemon; } public void setUseCompileDaemon(boolean useCompileDaemon) { this.useCompileDaemon = useCompileDaemon; } public String getDaemonServer() { return daemonServer; } public void setDaemonServer(String daemonServer) { this.daemonServer = daemonServer; } public boolean isFailOnError() { return failOnError; } public void setFailOnError(boolean failOnError) { this.failOnError = failOnError; } public boolean isDeprecation() { return deprecation; } public void setDeprecation(boolean deprecation) { this.deprecation = deprecation; } public boolean isUnchecked() { return unchecked; } public void setUnchecked(boolean unchecked) { this.unchecked = unchecked; } public String getDebugLevel() { return debugLevel; } public void setDebugLevel(String debugLevel) { this.debugLevel = debugLevel; } public boolean isOptimize() { return optimize; } public void setOptimize(boolean optimize) { this.optimize = optimize; } public String getEncoding() { return encoding; } public void setEncoding(String encoding) { this.encoding = encoding; } public String getForce() { return force; } public void setForce(String force) { this.force = force; } public String getTargetCompatibility() { return targetCompatibility; } public void setTargetCompatibility(String targetCompatibility) { this.targetCompatibility = targetCompatibility; } public List<String> getAdditionalParameters() { return additionalParameters; } public void setAdditionalParameters(List<String> additionalParameters) { this.additionalParameters = additionalParameters; } public boolean isListFiles() { return listFiles; } public void setListFiles(boolean listFiles) { this.listFiles = listFiles; } public String getLoggingLevel() { return loggingLevel; } public void setLoggingLevel(String loggingLevel) { this.loggingLevel = loggingLevel; } public List<String> getLoggingPhases() { return loggingPhases; } public void setLoggingPhases(List<String> loggingPhases) { this.loggingPhases = loggingPhases; } public boolean isFork() { return fork; } public void setFork(boolean fork) { this.fork = fork; } public boolean isUseAnt() { return useAnt; } public void setUseAnt(boolean useAnt) { this.useAnt = useAnt; } public ScalaForkOptions getForkOptions() { return forkOptions; } public void setForkOptions(ScalaForkOptions forkOptions) { this.forkOptions = forkOptions; } public static class ScalaForkOptions implements Serializable { private static final long serialVersionUID = 1L; private String memoryInitialSize; private String memoryMaximumSize; private List<String> jvmArgs; public String getMemoryInitialSize() { return memoryInitialSize; } public void setMemoryInitialSize(String memoryInitialSize) { this.memoryInitialSize = memoryInitialSize; } public String getMemoryMaximumSize() { return memoryMaximumSize; } public void setMemoryMaximumSize(String memoryMaximumSize) { this.memoryMaximumSize = memoryMaximumSize; } public List<String> getJvmArgs() { return jvmArgs; } public void setJvmArgs(List<String> jvmArgs) { this.jvmArgs = jvmArgs; } } }
IDEA-160175 Error importing Scala project from Gradle 3.0
plugins/gradle/src/org/jetbrains/plugins/gradle/model/data/ScalaCompileOptionsData.java
IDEA-160175 Error importing Scala project from Gradle 3.0
<ide><path>lugins/gradle/src/org/jetbrains/plugins/gradle/model/data/ScalaCompileOptionsData.java <ide> private ScalaForkOptions forkOptions; <ide> private boolean useAnt; <ide> <add> /** <add> * @deprecated see https://docs.gradle.org/3.0/release-notes#ant-based-scala-compiler-has-been-removed <add> */ <ide> public boolean isUseCompileDaemon() { <ide> return useCompileDaemon; <ide> } <ide> this.useCompileDaemon = useCompileDaemon; <ide> } <ide> <add> /** <add> * @deprecated see https://docs.gradle.org/3.0/release-notes#ant-based-scala-compiler-has-been-removed <add> */ <ide> public String getDaemonServer() { <ide> return daemonServer; <ide> } <ide> this.loggingPhases = loggingPhases; <ide> } <ide> <add> /** <add> * @deprecated see https://docs.gradle.org/3.0/release-notes#ant-based-scala-compiler-has-been-removed <add> */ <ide> public boolean isFork() { <ide> return fork; <ide> } <ide> this.fork = fork; <ide> } <ide> <add> /** <add> * @deprecated see https://docs.gradle.org/3.0/release-notes#ant-based-scala-compiler-has-been-removed <add> */ <ide> public boolean isUseAnt() { <ide> return useAnt; <ide> }
Java
apache-2.0
3ad379f0007281a4d292cac72ddc66e6ff6f8b39
0
StQuote/VisEditor,piotr-j/VisEditor,piotr-j/VisEditor,kotcrab/vis-editor,kotcrab/vis-editor,kotcrab/VisEditor
/* * Copyright 2014-2015 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.kotcrab.vis.ui.widget.tabbedpane; import com.badlogic.gdx.scenes.scene2d.ui.Table; import com.badlogic.gdx.utils.Disposable; /** * Base class for tabs used in TabbedPane. Tab can be savable, meaning that it can be saved and will display warning dialog 'do you want to save changes' * before closing. Tab can be also closeable by user meaning that user can close this tab manually from tabbed pane (using 'X' button or by pressing mouse wheel on tab) * @author Kotcrab */ public abstract class Tab implements Disposable { private boolean activeTab; private TabbedPane pane; private boolean closeableByUser = true; private boolean savable = false; private boolean dirty = false; public Tab () { } /** @param savable if true tab can be saved and marked as dirty */ public Tab (boolean savable) { this.savable = savable; } /** * @param savable if true tab can be saved and marked as dirty * @param closeableByUser if true tab can be closed by user from tabbed pane */ public Tab (boolean savable, boolean closeableByUser) { this.savable = savable; this.closeableByUser = closeableByUser; } /** @return tab title used by tabbed pane */ public abstract String getTabTitle (); /** @return table that contains this tab view, will be passed to tabbed pane listener */ public abstract Table getContentTable (); /** Called by pane when this tab becomes shown. Class overriding this should call super.onShow() */ public void onShow () { activeTab = true; } /** Called by pane when this tab becomes hidden. Class overriding this should call super.onHide() */ public void onHide () { activeTab = false; } /** @return true is this tab is currently active */ public boolean isActiveTab () { return activeTab; } /** @return pane that this tab belongs to, or null */ public TabbedPane getPane () { return pane; } /** Should be called by TabbedPane only, when tab is added to pane */ public void setPane (TabbedPane pane) { this.pane = pane; } public boolean isSavable () { return savable; } public boolean isCloseableByUser () { return closeableByUser; } public boolean isDirty () { return dirty; } public void setDirty (boolean dirty) { checkSavable(); boolean update = (dirty != this.dirty); if (update) { this.dirty = dirty; if (pane != null) getPane().updateTabTitle(this); } } /** Marks this tab as dirty */ public void dirty () { setDirty(true); } /** * Called when this tab should save its own state. After saving setDirty(false) must be called manually to remove dirty state * @return true when save succeeded, false otherwise */ public boolean save () { checkSavable(); return false; } private void checkSavable () { if (isSavable() == false) throw new IllegalStateException("Tab " + getTabTitle() + " is not savable!"); } /** Removes this tab from pane (if any) */ public void removeFromTabPane () { if (pane != null) pane.remove(this); } /** Called when tab is being removed from scene */ @Override public void dispose () { } }
UI/src/com/kotcrab/vis/ui/widget/tabbedpane/Tab.java
/* * Copyright 2014-2015 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.kotcrab.vis.ui.widget.tabbedpane; import com.badlogic.gdx.scenes.scene2d.ui.Table; import com.badlogic.gdx.utils.Disposable; /** * Base class for tabs used in TabbedPane. Tab can be savable, meaning that it can be saved and will display warning dialog 'do you want to save changes' * before closing. Tab can be also closeable by user meaning that user can close this tab manually from tabbed pane (using 'X' button or by pressing mouse wheel on tab) * @author Kotcrab */ public abstract class Tab implements Disposable { private boolean activeTab; private TabbedPane pane; private boolean closeableByUser = true; private boolean savable = false; private boolean dirty = false; public Tab () { } /** @param savable if true tab can be saved and marked as dirty */ public Tab (boolean savable) { this.savable = savable; } /** * @param savable if true tab can be saved and marked as dirty * @param closeableByUser if true tab can be closed by user from tabbed pane */ public Tab (boolean savable, boolean closeableByUser) { this.savable = savable; this.closeableByUser = closeableByUser; } /** @return tab title used by tabbed pane */ public abstract String getTabTitle (); /** @return table that contains this tab view, will be passed to tabbed pane listener */ public abstract Table getContentTable (); /** Called by pane when this tab becomes shown. Class overriding this should call super.onShow() */ public void onShow () { activeTab = true; } /** Called by pane when this tab becomes hidden. Class overriding this should call super.onHide() */ public void onHide () { activeTab = false; } /** @return true is this tab is currently active */ public boolean isActiveTab () { return activeTab; } /** @return pane that this tab belongs to, or null */ public TabbedPane getPane () { return pane; } /** Should be called by TabbedPane only, when tab is added to pane */ public void setPane (TabbedPane pane) { this.pane = pane; } public boolean isSavable () { return savable; } public boolean isCloseableByUser () { return closeableByUser; } public boolean isDirty () { return dirty; } public void setDirty (boolean dirty) { checkSavable(); boolean update = (dirty != this.dirty); if (update) { this.dirty = dirty; if (pane != null) getPane().updateTabTitle(this); } } /** Marks this tab as dirty */ public void dirty () { setDirty(true); } /** * Called when this tab should save its own state. After saving setDirty(false) must be called manually to remove dirty state * @return true when save succeeded, false otherwise */ public boolean save () { checkSavable(); return false; } private void checkSavable () { if (savable == false) throw new IllegalStateException("Tab " + getTabTitle() + " is not savable!"); } /** Removes this tab from pane (if any) */ public void removeFromTabPane () { if (pane != null) pane.remove(this); } /** Called when tab is being removed from scene */ @Override public void dispose () { } }
Use getter
UI/src/com/kotcrab/vis/ui/widget/tabbedpane/Tab.java
Use getter
<ide><path>I/src/com/kotcrab/vis/ui/widget/tabbedpane/Tab.java <ide> } <ide> <ide> private void checkSavable () { <del> if (savable == false) throw new IllegalStateException("Tab " + getTabTitle() + " is not savable!"); <add> if (isSavable() == false) throw new IllegalStateException("Tab " + getTabTitle() + " is not savable!"); <ide> } <ide> <ide> /** Removes this tab from pane (if any) */
Java
epl-1.0
635701e854839001f01b655377169c45b33be812
0
rrimmana/birt-1,rrimmana/birt-1,sguan-actuate/birt,Charling-Huang/birt,Charling-Huang/birt,rrimmana/birt-1,sguan-actuate/birt,sguan-actuate/birt,Charling-Huang/birt,sguan-actuate/birt,rrimmana/birt-1,Charling-Huang/birt,rrimmana/birt-1,sguan-actuate/birt,Charling-Huang/birt
/*********************************************************************** * Copyright (c) 2004 Actuate Corporation. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Actuate Corporation - initial API and implementation ***********************************************************************/ package org.eclipse.birt.report.engine.layout.pdf; import java.text.Bidi; import java.util.HashSet; import org.eclipse.birt.report.engine.content.Dimension; import org.eclipse.birt.report.engine.content.IContent; import org.eclipse.birt.report.engine.content.IStyle; import org.eclipse.birt.report.engine.content.ITextContent; import org.eclipse.birt.report.engine.css.dom.AreaStyle; import org.eclipse.birt.report.engine.css.dom.ComputedStyle; import org.eclipse.birt.report.engine.css.engine.StyleConstants; import org.eclipse.birt.report.engine.executor.IReportItemExecutor; import org.eclipse.birt.report.engine.layout.ITextLayoutManager; import org.eclipse.birt.report.engine.layout.PDFConstants; import org.eclipse.birt.report.engine.layout.area.IArea; import org.eclipse.birt.report.engine.layout.area.impl.AbstractArea; import org.eclipse.birt.report.engine.layout.area.impl.AreaFactory; import org.eclipse.birt.report.engine.layout.area.impl.ContainerArea; import org.eclipse.birt.report.engine.layout.pdf.font.FontInfo; import org.eclipse.birt.report.engine.layout.pdf.hyphen.DefaultHyphenationManager; import org.eclipse.birt.report.engine.layout.pdf.hyphen.DefaultWordRecognizer; import org.eclipse.birt.report.engine.layout.pdf.hyphen.Hyphenation; import org.eclipse.birt.report.engine.layout.pdf.hyphen.IHyphenationManager; import org.eclipse.birt.report.engine.layout.pdf.hyphen.IWordRecognizer; import org.eclipse.birt.report.engine.layout.pdf.hyphen.Word; import org.eclipse.birt.report.engine.layout.pdf.text.Chunk; import org.eclipse.birt.report.engine.layout.pdf.text.ChunkGenerator; import org.eclipse.birt.report.engine.layout.pdf.util.PropertyUtil; /** * * This layout mananger implements formatting and locating of text chunk. * <p> * A text chunk can contain hard line break(such as "\n", "\n\r"). This layout * manager splits a text content to many text chunk due to different actual * font, soft line break etc. */ public class PDFTextLM extends PDFLeafItemLM implements ITextLayoutManager { private PDFLineAreaLM lineLM; /** * Checks if the compositor needs to pause. */ private boolean pause = false; private Compositor comp = null; private ITextContent textContent = null; public PDFTextLM( PDFLayoutEngineContext context, PDFStackingLM parent, IContent content, IReportItemExecutor executor ) { super( context, parent, content, executor ); lineLM = (PDFLineAreaLM) parent; ITextContent textContent = (ITextContent) content; String text = textContent.getText( ); if ( text != null && text.length( ) != 0 ) { transform( textContent ); this.textContent = textContent; comp = new Compositor(); } } protected boolean layoutChildren( ) { if ( null == textContent ) return false; pause = false; return comp.compose( ); } public void addSpaceHolder( IArea con ) { lineLM.addArea( con ); } public boolean needPause( ) { return this.pause; } public void addTextLine( IArea textLine ) { lineLM.addArea( textLine ); } public void newLine( ) { if ( lineLM.endLine( ) ) pause = false; else pause = true; } public int getFreeSpace( ) { int freeSpace = lineLM.getMaxAvaWidth( ) - lineLM.getCurrentIP( ); return freeSpace; } public void setBaseLevel( int baseLevel ) { lineLM.setBaseLevel( baseLevel ); } public void transform( ITextContent textContent ) { String transformType = textContent.getComputedStyle( ) .getTextTransform( ); if ( transformType.equalsIgnoreCase( "uppercase" ) ) //$NON-NLS-1$ { textContent.setText( textContent.getText( ).toUpperCase( ) ); } else if ( transformType.equalsIgnoreCase( "lowercase" ) ) //$NON-NLS-1$ { textContent.setText( textContent.getText( ).toLowerCase( ) ); } else if ( transformType.equalsIgnoreCase( "capitalize" ) ) //$NON-NLS-1$ { textContent.setText( capitalize( textContent.getText( ) ) ); } } private String capitalize( String text ) { HashSet splitChar = new HashSet( ); splitChar.add( new Character( ' ' ) ); splitChar.add( new Character( (char) 0x0A ) ); char[] array = text.toCharArray( ); int index = 0; while ( index < array.length ) { Character c = new Character( text.charAt( index ) ); while ( splitChar.contains( c ) ) { index++; if ( index == array.length ) return new String( array ); c = new Character( text.charAt( index ) ); } array[index] = Character.toUpperCase( array[index] ); while ( !splitChar.contains( c ) ) { index++; if ( index == array.length ) break; c = new Character( text.charAt( index ) ); } } return new String( array ); } private class Compositor { private ChunkGenerator cg = null; private Chunk chunk = null; private ITextContent content; private boolean isInline; private boolean isNew = true; private int leftSpaceHolder = 0; private int rightSpaceHolder = 0; /** * The vestige is the word which can not be added into last line, * or the remain clip after hyphenation. * vestigeIndex saves the position of the vestige relative to * the text in chunk. */ private int vestigeIndex = -1; private int vestigeLength = 0; private int currentPos = 0; private int areaStartPos = 0; private int letterSpacing = 0; private int wordSpacing = 0; private int maxLineSpace = 0; private IWordRecognizer wr = null; /** * The flag to indicate whether the current TextArea needs to be added * into the line by force. */ private boolean addByForce = false; /** * The flag to indicate whether we need to split off the first character next time. */ private boolean nothingSplitted = false; private int leftMargin; private int leftBorder; private int leftPadding; private int rightMargin; private int rightBorder; private int rightPadding; private int topBorder; private int topPadding; private int bottomBorder; private int bottomPadding; public Compositor() { this.content = textContent; cg = new ChunkGenerator(content); this.isInline = PropertyUtil.isInlineElement(content); this.maxLineSpace = lineLM.getMaxAvaWidth( ); IStyle style = content.getComputedStyle(); letterSpacing = getDimensionValue(style .getProperty(StyleConstants.STYLE_LETTER_SPACING)); wordSpacing = getDimensionValue(style .getProperty(StyleConstants.STYLE_WORD_SPACING)); IStyle boxStyle = new AreaStyle((ComputedStyle)style); validateBoxProperty(boxStyle, maxLineSpace, context.getMaxHeight( )); leftMargin = getDimensionValue(boxStyle.getProperty(StyleConstants.STYLE_MARGIN_LEFT)); leftBorder = getDimensionValue(boxStyle.getProperty(StyleConstants.STYLE_BORDER_LEFT_WIDTH)); leftPadding = getDimensionValue(boxStyle.getProperty(StyleConstants.STYLE_PADDING_LEFT)); rightMargin = getDimensionValue(boxStyle.getProperty(StyleConstants.STYLE_MARGIN_RIGHT)); rightBorder = getDimensionValue(boxStyle.getProperty(StyleConstants.STYLE_BORDER_RIGHT_WIDTH)); rightPadding = getDimensionValue(boxStyle.getProperty(StyleConstants.STYLE_PADDING_RIGHT)); topBorder = getDimensionValue(boxStyle.getProperty(StyleConstants.STYLE_BORDER_TOP_WIDTH)); topPadding = getDimensionValue(boxStyle.getProperty(StyleConstants.STYLE_PADDING_TOP)); bottomBorder = getDimensionValue(boxStyle.getProperty(StyleConstants.STYLE_BORDER_BOTTOM_WIDTH)); bottomPadding = getDimensionValue(boxStyle.getProperty(StyleConstants.STYLE_PADDING_BOTTOM)); } public boolean compose() { boolean hasMore; while ( (hasMore = hasMore()) && ! PDFTextLM.this.needPause() ) { handleNext(); } return hasMore; } private boolean hasMore() { if (cg.hasMore()) return true; else if ( null == chunk ) return false; else if (currentPos < chunk.getText().length()) return true; else { if (isInline) { ContainerArea con = (ContainerArea)createInlineContainer(content, false, true); con.setWidth(rightBorder+rightPadding); if (null == chunk.getFontInfo()) { IStyle style = content.getComputedStyle(); con.setHeight( getDimensionValue(style.getProperty(StyleConstants.STYLE_FONT_SIZE)) + topBorder + topPadding + bottomBorder + bottomPadding); }else { con.setHeight( (int)(chunk.getFontInfo().getWordHeight()*PDFConstants.LAYOUT_TO_PDF_RATIO) + topBorder + topPadding + bottomBorder + bottomPadding); } PDFTextLM.this.addSpaceHolder(con); } return false; } } private void handleNext() { int freeSpace = PDFTextLM.this.getFreeSpace(); // current chunk is over, get the next one. if ( isNew || currentPos == chunk.getText().length() ) { if (cg.hasMore()) { chunk = cg.getNext(); if (chunk == Chunk.HARD_LINE_BREAK) { currentPos = chunk.getText().length(); PDFTextLM.this.newLine(); vestigeIndex = -1; return; } currentPos = 0; this.wr = new DefaultWordRecognizer(chunk.getText()); } else { return; } } if (isNew) { isNew = false; if (isInline) { AbstractArea con = (AbstractArea)createInlineContainer(content, true, false); con.setWidth(leftBorder+leftPadding); con.setHeight( (int)(chunk.getFontInfo().getWordHeight()*PDFConstants.LAYOUT_TO_PDF_RATIO) + topBorder + topPadding + bottomBorder + bottomPadding); PDFTextLM.this.addSpaceHolder(con); leftSpaceHolder = leftMargin + leftBorder + leftPadding; freeSpace -= leftSpaceHolder; } } String str = null; Word currentWord = null; if (-1 == vestigeIndex) { currentWord = wr.getNextWord(); // The first word of the chunk is empty, so it means this chunk is a blank one. if (null == currentWord) { Dimension d = new Dimension( 0, (int)(chunk.getFontInfo().getWordHeight() * PDFConstants.LAYOUT_TO_PDF_RATIO )); IArea builtArea = buildArea("", content, //$NON-NLS-1$ chunk.getFontInfo(), d); PDFTextLM.this.addTextLine(builtArea); return; } str = currentWord.getValue(); areaStartPos = chunk.getOffset() + currentWord.getStart(); } else // This is a vestige. { str = chunk.getText().substring(vestigeIndex, vestigeIndex+vestigeLength); areaStartPos = chunk.getOffset() + vestigeIndex; } int prevAreaWidth = 0; int areaWidth = (int)(chunk.getFontInfo().getWordWidth( chunk.getText().substring(currentPos, currentPos+str.length())) * PDFConstants.LAYOUT_TO_PDF_RATIO) + letterSpacing * str.length() + wordSpacing; // holds space for inline text to draw the right border, padding etc. if (isInline) { if (isAtLast(chunk.getOffset() + currentPos + str.length())) { rightSpaceHolder = rightMargin + rightBorder + rightPadding; freeSpace -= rightSpaceHolder; } } while ( freeSpace >= areaWidth ) { currentPos += str.length(); currentWord = wr.getNextWord(); if (null == currentWord) { str = null; break; } str = currentWord.getValue(); prevAreaWidth = areaWidth; areaWidth += (int)(chunk.getFontInfo().getWordWidth( chunk.getText().substring(currentPos, currentPos+str.length())) * PDFConstants.LAYOUT_TO_PDF_RATIO) + letterSpacing * str.length() + wordSpacing; // holds space for inline text to draw the border, padding etc. if (isAtLast(chunk.getOffset() + currentPos + str.length())) { rightSpaceHolder = rightMargin + rightBorder + rightPadding; freeSpace -= rightSpaceHolder; } } //the chunk ends, build the TextArea. int length = chunk.getText().length(); if (currentPos == length ) { Dimension d = new Dimension( areaWidth, (int)(chunk.getFontInfo().getWordHeight() * PDFConstants.LAYOUT_TO_PDF_RATIO )); String originalText = chunk.getText().substring(areaStartPos - chunk.getOffset(), chunk.getText().length()); IArea builtArea = buildArea(getReverseText(originalText), content, chunk.getFontInfo(), d); PDFTextLM.this.addTextLine(builtArea); vestigeIndex = -1; vestigeLength = 0; return; } if( maxLineSpace < chunk.getFontInfo().getWordWidth(str)* PDFConstants.LAYOUT_TO_PDF_RATIO + letterSpacing * str.length()+ wordSpacing ) { if ( 0 == str.length() ) { vestigeIndex = -1; vestigeLength = 0; return; } // does hyphenation. IHyphenationManager hm = new DefaultHyphenationManager(); Hyphenation hyph = hm.getHyphenation(str); int endHyphenIndex = hyphen( 0, freeSpace-prevAreaWidth, hyph, chunk.getFontInfo() ); // forces to add the first character if the hyphen index is 0 for the second time. if (endHyphenIndex == 0) { if (nothingSplitted) { str = hyph.getHyphenText( 0, endHyphenIndex + 1 ); addByForce = true; nothingSplitted = false; } else { nothingSplitted = true; vestigeIndex = currentPos; vestigeLength = (null == currentWord) ? vestigeLength : currentWord.getLength(); return; } } else { str = hyph.getHyphenText( 0, endHyphenIndex ); } //int startHyphenIndex = (null == currentWord) ? vestigeIndex : currentWord.getStart(); currentPos += str.length(); vestigeIndex = currentPos; vestigeLength = (null == currentWord) ? vestigeLength - str.length() : currentWord.getLength() - str.length(); Dimension d = null; if ( addByForce ) { d = new Dimension( freeSpace, (int)(chunk.getFontInfo().getWordHeight() * PDFConstants.LAYOUT_TO_PDF_RATIO )); addByForce = false; } else { d = new Dimension( prevAreaWidth + (int)(chunk.getFontInfo().getWordWidth(str) * PDFConstants.LAYOUT_TO_PDF_RATIO) +letterSpacing*str.length(), (int)(chunk.getFontInfo().getWordHeight() * PDFConstants.LAYOUT_TO_PDF_RATIO )); } String originalText = chunk.getText().substring(areaStartPos - chunk.getOffset(), vestigeIndex); IArea builtArea = buildArea(getReverseText(originalText), content, chunk.getFontInfo(), d); PDFTextLM.this.addTextLine(builtArea); PDFTextLM.this.newLine(); return; } else { // builds the text area and ends current line. Dimension d = new Dimension( prevAreaWidth, (int)(chunk.getFontInfo().getWordHeight() * PDFConstants.LAYOUT_TO_PDF_RATIO)); String originalText = chunk.getText().substring(areaStartPos - chunk.getOffset(), currentPos); IArea builtArea = buildArea(getReverseText(originalText), content, chunk.getFontInfo(), d); PDFTextLM.this.addTextLine(builtArea); PDFTextLM.this.newLine(); vestigeIndex = (null == currentWord) ? -1 : currentWord.getStart(); vestigeLength = (null == currentWord) ? 0 : currentWord.getLength(); return; } } /** * build areas by specified properties for text content. the return area * should be an container area which contains a text chunk or only a text * chunk. * <p> * <ul> * <li>For inline text, the return value should be a container area. The * container area inherit border and margin from text content. The position * of the container area in its container is decided by the margin value of * text content. * <li>For block text, the return value should be a text chunk. The * position of text chunk in its container is decided by the padding value * of text content. * </ul> * * @param content the TextContent which the TextArea shares the style with. * @param startOffset the start offset of the text in the TextArea relative to content. * @param endOffset the end offset of the text in the TextArea relative to content. * @param fi the FontInfo of the text in the TextArea. * * @return the built TextArea. */ private IArea buildArea(String text, ITextContent content, FontInfo fi, Dimension dimension) { if ( isInline ) { return createInlineTextArea( text, content, fi, dimension ); } else { return createBlockTextArea( text, content, fi, dimension ); } } /** * Gets the hyphenation index * * @param startIndex the start index * @param width the width of the free space * @param hyphenation the hyphenation * @param fi the FontInfo object of the text to be hyphened. * @return the hyphenation index */ private int hyphen(int startIndex, int width, Hyphenation hyphenation, FontInfo fi) { assert (startIndex >= 0); if (startIndex > hyphenation.length() - 1) { return -1; } int last = 0; int current = 0; for (int i = startIndex + 1; i < hyphenation.length(); i++) { last = current; String pre = hyphenation.getHyphenText(startIndex, i); current = (int)(fi.getWordWidth(pre) * PDFConstants.LAYOUT_TO_PDF_RATIO) +letterSpacing * pre.length(); if (width > last && width <= current) { return i-1; } } return hyphenation.length() - 1; } /** * Gets the reverse text if the run direction is RtL, * If the run direction is LtR, the text keeps the same. * @param text the original text. * @return the reverse text. */ private String getReverseText(String text) { if (chunk.getRunDirection() == Bidi.DIRECTION_LEFT_TO_RIGHT) { return text; } else { return flip(text); } } /** * Reverse text * @param text * @return */ private String flip(String text) { char[] indexChars = text.toCharArray(); int start = 0; int end = indexChars.length; int mid = (start + end) / 2; --end; for (; start < mid; ++start, --end) { char temp = indexChars[start]; indexChars[start] = indexChars[end]; indexChars[end] = temp; } return new String(indexChars); } private boolean isAtLast(int index) { return index >= content.getText().length(); } /** * create inline text area by text content * @param content the text content * @param text the text string * @param contentDimension the content dimension * @param isFirst if this area is the first area of the content * @param isLast if this area is the last area of the content * @return */ private IArea createInlineTextArea(String text, ITextContent content, FontInfo fi, Dimension contentDimension) { ContainerArea con = (ContainerArea)createInlineContainer(content, false, false); int textHeight = contentDimension.getHeight(); int textWidth = contentDimension.getWidth(); con.setWidth(textWidth); con.setHeight( textHeight + topPadding + topBorder + bottomPadding + bottomBorder ); AbstractArea textArea = (AbstractArea) AreaFactory.createTextArea( content, text, fi ); con.addChild( textArea ); textArea.setHeight( textHeight ); textArea.setWidth( textWidth ); textArea.setPosition( 0, topPadding + topBorder ); return con; } } }
engine/org.eclipse.birt.report.engine/src/org/eclipse/birt/report/engine/layout/pdf/PDFTextLM.java
/*********************************************************************** * Copyright (c) 2004 Actuate Corporation. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Actuate Corporation - initial API and implementation ***********************************************************************/ package org.eclipse.birt.report.engine.layout.pdf; import java.text.Bidi; import java.util.HashSet; import org.eclipse.birt.report.engine.content.Dimension; import org.eclipse.birt.report.engine.content.IContent; import org.eclipse.birt.report.engine.content.IStyle; import org.eclipse.birt.report.engine.content.ITextContent; import org.eclipse.birt.report.engine.css.dom.AreaStyle; import org.eclipse.birt.report.engine.css.dom.ComputedStyle; import org.eclipse.birt.report.engine.css.engine.StyleConstants; import org.eclipse.birt.report.engine.executor.IReportItemExecutor; import org.eclipse.birt.report.engine.layout.ITextLayoutManager; import org.eclipse.birt.report.engine.layout.PDFConstants; import org.eclipse.birt.report.engine.layout.area.IArea; import org.eclipse.birt.report.engine.layout.area.impl.AbstractArea; import org.eclipse.birt.report.engine.layout.area.impl.AreaFactory; import org.eclipse.birt.report.engine.layout.area.impl.ContainerArea; import org.eclipse.birt.report.engine.layout.pdf.font.FontInfo; import org.eclipse.birt.report.engine.layout.pdf.hyphen.DefaultHyphenationManager; import org.eclipse.birt.report.engine.layout.pdf.hyphen.DefaultWordRecognizer; import org.eclipse.birt.report.engine.layout.pdf.hyphen.Hyphenation; import org.eclipse.birt.report.engine.layout.pdf.hyphen.IHyphenationManager; import org.eclipse.birt.report.engine.layout.pdf.hyphen.IWordRecognizer; import org.eclipse.birt.report.engine.layout.pdf.hyphen.Word; import org.eclipse.birt.report.engine.layout.pdf.text.Chunk; import org.eclipse.birt.report.engine.layout.pdf.text.ChunkGenerator; import org.eclipse.birt.report.engine.layout.pdf.util.PropertyUtil; /** * * This layout mananger implements formatting and locating of text chunk. * <p> * A text chunk can contain hard line break(such as "\n", "\n\r"). This layout * manager splits a text content to many text chunk due to different actual * font, soft line break etc. */ public class PDFTextLM extends PDFLeafItemLM implements ITextLayoutManager { private PDFLineAreaLM lineLM; /** * Checks if the compositor needs to pause. */ private boolean pause = false; private Compositor comp = null; private ITextContent textContent = null; public PDFTextLM( PDFLayoutEngineContext context, PDFStackingLM parent, IContent content, IReportItemExecutor executor ) { super( context, parent, content, executor ); lineLM = (PDFLineAreaLM) parent; ITextContent textContent = (ITextContent) content; String text = textContent.getText( ); if ( text != null && text.length( ) != 0 ) { transform( textContent ); this.textContent = textContent; comp = new Compositor(); } } protected boolean layoutChildren( ) { if ( null == textContent ) return false; pause = false; return comp.compose( ); } public void addSpaceHolder( IArea con ) { lineLM.addArea( con ); } public boolean needPause( ) { return this.pause; } public void addTextLine( IArea textLine ) { lineLM.addArea( textLine ); } public void newLine( ) { if ( lineLM.endLine( ) ) pause = false; else pause = true; } public int getFreeSpace( ) { int freeSpace = lineLM.getMaxAvaWidth( ) - lineLM.getCurrentIP( ); return freeSpace; } public void setBaseLevel( int baseLevel ) { lineLM.setBaseLevel( baseLevel ); } public void transform( ITextContent textContent ) { String transformType = textContent.getComputedStyle( ) .getTextTransform( ); if ( transformType.equalsIgnoreCase( "uppercase" ) ) //$NON-NLS-1$ { textContent.setText( textContent.getText( ).toUpperCase( ) ); } else if ( transformType.equalsIgnoreCase( "lowercase" ) ) //$NON-NLS-1$ { textContent.setText( textContent.getText( ).toLowerCase( ) ); } else if ( transformType.equalsIgnoreCase( "capitalize" ) ) //$NON-NLS-1$ { textContent.setText( capitalize( textContent.getText( ) ) ); } } private String capitalize( String text ) { HashSet splitChar = new HashSet( ); splitChar.add( new Character( ' ' ) ); splitChar.add( new Character( (char) 0x0A ) ); char[] array = text.toCharArray( ); int index = 0; while ( index < array.length ) { Character c = new Character( text.charAt( index ) ); while ( splitChar.contains( c ) ) { index++; if ( index == array.length ) return new String( array ); c = new Character( text.charAt( index ) ); } array[index] = Character.toUpperCase( array[index] ); while ( !splitChar.contains( c ) ) { index++; if ( index == array.length ) break; c = new Character( text.charAt( index ) ); } } return new String( array ); } private class Compositor { private ChunkGenerator cg = null; private Chunk chunk = null; private ITextContent content; private boolean isInline; private boolean isNew = true; private int leftSpaceHolder = 0; private int rightSpaceHolder = 0; /** * The vestige is the word which can not be added into last line, * or the remain clip after hyphenation. * vestigeIndex saves the position of the vestige relative to * the text in chunk. */ private int vestigeIndex = -1; private int vestigeLength = 0; private int currentPos = 0; private int areaStartPos = 0; private int letterSpacing = 0; private int wordSpacing = 0; private int maxLineSpace = 0; private IWordRecognizer wr = null; /** * The flag to indicate whether the current TextArea needs to be added * into the line by force. */ private boolean addByForce = false; /** * The flag to indicate whether we need to split off the first character next time. */ private boolean nothingSplitted = false; private int leftMargin; private int leftBorder; private int leftPadding; private int rightMargin; private int rightBorder; private int rightPadding; private int topBorder; private int topPadding; private int bottomBorder; private int bottomPadding; public Compositor() { this.content = textContent; cg = new ChunkGenerator(content); this.isInline = PropertyUtil.isInlineElement(content); this.maxLineSpace = lineLM.getMaxAvaWidth( ); IStyle style = content.getComputedStyle(); letterSpacing = getDimensionValue(style .getProperty(StyleConstants.STYLE_LETTER_SPACING)); wordSpacing = getDimensionValue(style .getProperty(StyleConstants.STYLE_WORD_SPACING)); IStyle boxStyle = new AreaStyle((ComputedStyle)style); validateBoxProperty(boxStyle, maxLineSpace, context.getMaxHeight( )); leftMargin = getDimensionValue(boxStyle.getProperty(StyleConstants.STYLE_MARGIN_LEFT)); leftBorder = getDimensionValue(boxStyle.getProperty(StyleConstants.STYLE_BORDER_LEFT_WIDTH)); leftPadding = getDimensionValue(boxStyle.getProperty(StyleConstants.STYLE_PADDING_LEFT)); rightMargin = getDimensionValue(boxStyle.getProperty(StyleConstants.STYLE_MARGIN_RIGHT)); rightBorder = getDimensionValue(boxStyle.getProperty(StyleConstants.STYLE_BORDER_RIGHT_WIDTH)); rightPadding = getDimensionValue(boxStyle.getProperty(StyleConstants.STYLE_PADDING_RIGHT)); topBorder = getDimensionValue(boxStyle.getProperty(StyleConstants.STYLE_BORDER_TOP_WIDTH)); topPadding = getDimensionValue(boxStyle.getProperty(StyleConstants.STYLE_PADDING_TOP)); bottomBorder = getDimensionValue(boxStyle.getProperty(StyleConstants.STYLE_BORDER_BOTTOM_WIDTH)); bottomPadding = getDimensionValue(boxStyle.getProperty(StyleConstants.STYLE_PADDING_BOTTOM)); } public boolean compose() { boolean hasMore; while ( (hasMore = hasMore()) && ! PDFTextLM.this.needPause() ) { handleNext(); } return hasMore; } private boolean hasMore() { if (cg.hasMore()) return true; else if ( null == chunk ) return false; else if (currentPos < chunk.getText().length()) return true; else { if (isInline) { ContainerArea con = (ContainerArea)createInlineContainer(content, false, true); con.setWidth(rightBorder+rightPadding); if (null == chunk.getFontInfo()) { IStyle style = content.getComputedStyle(); con.setHeight( getDimensionValue(style.getProperty(StyleConstants.STYLE_FONT_SIZE)) + topBorder + topPadding + bottomBorder + bottomPadding); }else { con.setHeight( (int)(chunk.getFontInfo().getWordHeight()*PDFConstants.LAYOUT_TO_PDF_RATIO) + topBorder + topPadding + bottomBorder + bottomPadding); } PDFTextLM.this.addSpaceHolder(con); } return false; } } private void handleNext() { int freeSpace = PDFTextLM.this.getFreeSpace(); // current chunk is over, get the next one. if ( isNew || currentPos == chunk.getText().length() ) { if (cg.hasMore()) { chunk = cg.getNext(); if (chunk == Chunk.HARD_LINE_BREAK) { currentPos = chunk.getText().length(); PDFTextLM.this.newLine(); return; } currentPos = 0; this.wr = new DefaultWordRecognizer(chunk.getText()); } else { return; } } if (isNew) { isNew = false; if (isInline) { AbstractArea con = (AbstractArea)createInlineContainer(content, true, false); con.setWidth(leftBorder+leftPadding); con.setHeight( (int)(chunk.getFontInfo().getWordHeight()*PDFConstants.LAYOUT_TO_PDF_RATIO) + topBorder + topPadding + bottomBorder + bottomPadding); PDFTextLM.this.addSpaceHolder(con); leftSpaceHolder = leftMargin + leftBorder + leftPadding; freeSpace -= leftSpaceHolder; } } String str = null; Word currentWord = null; if (-1 == vestigeIndex) { currentWord = wr.getNextWord(); // The first word of the chunk is empty, so it means this chunk is a blank one. if (null == currentWord) { Dimension d = new Dimension( 0, (int)(chunk.getFontInfo().getWordHeight() * PDFConstants.LAYOUT_TO_PDF_RATIO )); IArea builtArea = buildArea("", content, //$NON-NLS-1$ chunk.getFontInfo(), d); PDFTextLM.this.addTextLine(builtArea); return; } str = currentWord.getValue(); areaStartPos = chunk.getOffset() + currentWord.getStart(); } else // This is a vestige. { str = chunk.getText().substring(vestigeIndex, vestigeIndex+vestigeLength); areaStartPos = chunk.getOffset() + vestigeIndex; } int prevAreaWidth = 0; int areaWidth = (int)(chunk.getFontInfo().getWordWidth( chunk.getText().substring(currentPos, currentPos+str.length())) * PDFConstants.LAYOUT_TO_PDF_RATIO) + letterSpacing * str.length() + wordSpacing; // holds space for inline text to draw the right border, padding etc. if (isInline) { if (isAtLast(chunk.getOffset() + currentPos + str.length())) { rightSpaceHolder = rightMargin + rightBorder + rightPadding; freeSpace -= rightSpaceHolder; } } while ( freeSpace >= areaWidth ) { currentPos += str.length(); currentWord = wr.getNextWord(); if (null == currentWord) { str = null; break; } str = currentWord.getValue(); prevAreaWidth = areaWidth; areaWidth += (int)(chunk.getFontInfo().getWordWidth( chunk.getText().substring(currentPos, currentPos+str.length())) * PDFConstants.LAYOUT_TO_PDF_RATIO) + letterSpacing * str.length() + wordSpacing; // holds space for inline text to draw the border, padding etc. if (isAtLast(chunk.getOffset() + currentPos + str.length())) { rightSpaceHolder = rightMargin + rightBorder + rightPadding; freeSpace -= rightSpaceHolder; } } //the chunk ends, build the TextArea. int length = chunk.getText().length(); if (currentPos == length ) { Dimension d = new Dimension( areaWidth, (int)(chunk.getFontInfo().getWordHeight() * PDFConstants.LAYOUT_TO_PDF_RATIO )); String originalText = chunk.getText().substring(areaStartPos - chunk.getOffset(), chunk.getText().length()); IArea builtArea = buildArea(getReverseText(originalText), content, chunk.getFontInfo(), d); PDFTextLM.this.addTextLine(builtArea); vestigeIndex = -1; vestigeLength = 0; return; } if( maxLineSpace < chunk.getFontInfo().getWordWidth(str)* PDFConstants.LAYOUT_TO_PDF_RATIO + letterSpacing * str.length()+ wordSpacing ) { if ( 0 == str.length() ) { vestigeIndex = -1; vestigeLength = 0; return; } // does hyphenation. IHyphenationManager hm = new DefaultHyphenationManager(); Hyphenation hyph = hm.getHyphenation(str); int endHyphenIndex = hyphen( 0, freeSpace-prevAreaWidth, hyph, chunk.getFontInfo() ); // forces to add the first character if the hyphen index is 0 for the second time. if (endHyphenIndex == 0) { if (nothingSplitted) { str = hyph.getHyphenText( 0, endHyphenIndex + 1 ); addByForce = true; nothingSplitted = false; } else { nothingSplitted = true; vestigeIndex = currentPos; vestigeLength = (null == currentWord) ? vestigeLength : currentWord.getLength(); return; } } else { str = hyph.getHyphenText( 0, endHyphenIndex ); } //int startHyphenIndex = (null == currentWord) ? vestigeIndex : currentWord.getStart(); currentPos += str.length(); vestigeIndex = currentPos; vestigeLength = (null == currentWord) ? vestigeLength - str.length() : currentWord.getLength() - str.length(); Dimension d = null; if ( addByForce ) { d = new Dimension( freeSpace, (int)(chunk.getFontInfo().getWordHeight() * PDFConstants.LAYOUT_TO_PDF_RATIO )); addByForce = false; } else { d = new Dimension( prevAreaWidth + (int)(chunk.getFontInfo().getWordWidth(str) * PDFConstants.LAYOUT_TO_PDF_RATIO) +letterSpacing*str.length(), (int)(chunk.getFontInfo().getWordHeight() * PDFConstants.LAYOUT_TO_PDF_RATIO )); } String originalText = chunk.getText().substring(areaStartPos - chunk.getOffset(), vestigeIndex); IArea builtArea = buildArea(getReverseText(originalText), content, chunk.getFontInfo(), d); PDFTextLM.this.addTextLine(builtArea); PDFTextLM.this.newLine(); return; } else { // builds the text area and ends current line. Dimension d = new Dimension( prevAreaWidth, (int)(chunk.getFontInfo().getWordHeight() * PDFConstants.LAYOUT_TO_PDF_RATIO)); String originalText = chunk.getText().substring(areaStartPos - chunk.getOffset(), currentPos); IArea builtArea = buildArea(getReverseText(originalText), content, chunk.getFontInfo(), d); PDFTextLM.this.addTextLine(builtArea); PDFTextLM.this.newLine(); vestigeIndex = (null == currentWord) ? -1 : currentWord.getStart(); vestigeLength = (null == currentWord) ? 0 : currentWord.getLength(); return; } } /** * build areas by specified properties for text content. the return area * should be an container area which contains a text chunk or only a text * chunk. * <p> * <ul> * <li>For inline text, the return value should be a container area. The * container area inherit border and margin from text content. The position * of the container area in its container is decided by the margin value of * text content. * <li>For block text, the return value should be a text chunk. The * position of text chunk in its container is decided by the padding value * of text content. * </ul> * * @param content the TextContent which the TextArea shares the style with. * @param startOffset the start offset of the text in the TextArea relative to content. * @param endOffset the end offset of the text in the TextArea relative to content. * @param fi the FontInfo of the text in the TextArea. * * @return the built TextArea. */ private IArea buildArea(String text, ITextContent content, FontInfo fi, Dimension dimension) { if ( isInline ) { return createInlineTextArea( text, content, fi, dimension ); } else { return createBlockTextArea( text, content, fi, dimension ); } } /** * Gets the hyphenation index * * @param startIndex the start index * @param width the width of the free space * @param hyphenation the hyphenation * @param fi the FontInfo object of the text to be hyphened. * @return the hyphenation index */ private int hyphen(int startIndex, int width, Hyphenation hyphenation, FontInfo fi) { assert (startIndex >= 0); if (startIndex > hyphenation.length() - 1) { return -1; } int last = 0; int current = 0; for (int i = startIndex + 1; i < hyphenation.length(); i++) { last = current; String pre = hyphenation.getHyphenText(startIndex, i); current = (int)(fi.getWordWidth(pre) * PDFConstants.LAYOUT_TO_PDF_RATIO) +letterSpacing * pre.length(); if (width > last && width <= current) { return i-1; } } return hyphenation.length() - 1; } /** * Gets the reverse text if the run direction is RtL, * If the run direction is LtR, the text keeps the same. * @param text the original text. * @return the reverse text. */ private String getReverseText(String text) { if (chunk.getRunDirection() == Bidi.DIRECTION_LEFT_TO_RIGHT) { return text; } else { return flip(text); } } /** * Reverse text * @param text * @return */ private String flip(String text) { char[] indexChars = text.toCharArray(); int start = 0; int end = indexChars.length; int mid = (start + end) / 2; --end; for (; start < mid; ++start, --end) { char temp = indexChars[start]; indexChars[start] = indexChars[end]; indexChars[end] = temp; } return new String(indexChars); } private boolean isAtLast(int index) { return index >= content.getText().length(); } /** * create inline text area by text content * @param content the text content * @param text the text string * @param contentDimension the content dimension * @param isFirst if this area is the first area of the content * @param isLast if this area is the last area of the content * @return */ private IArea createInlineTextArea(String text, ITextContent content, FontInfo fi, Dimension contentDimension) { ContainerArea con = (ContainerArea)createInlineContainer(content, false, false); int textHeight = contentDimension.getHeight(); int textWidth = contentDimension.getWidth(); con.setWidth(textWidth); con.setHeight( textHeight + topPadding + topBorder + bottomPadding + bottomBorder ); AbstractArea textArea = (AbstractArea) AreaFactory.createTextArea( content, text, fi ); con.addChild( textArea ); textArea.setHeight( textHeight ); textArea.setWidth( textWidth ); textArea.setPosition( 0, topPadding + topBorder ); return con; } } }
Fix Bugzilla Bug 156658--Error occour when previewing a text with a style set word-spacing 20cm in PDF
engine/org.eclipse.birt.report.engine/src/org/eclipse/birt/report/engine/layout/pdf/PDFTextLM.java
Fix Bugzilla Bug 156658--Error occour when previewing a text with a style set word-spacing 20cm in PDF
<ide><path>ngine/org.eclipse.birt.report.engine/src/org/eclipse/birt/report/engine/layout/pdf/PDFTextLM.java <ide> private Compositor comp = null; <ide> <ide> private ITextContent textContent = null; <del> <add> <ide> public PDFTextLM( PDFLayoutEngineContext context, PDFStackingLM parent, <ide> IContent content, IReportItemExecutor executor ) <ide> { <ide> { <ide> currentPos = chunk.getText().length(); <ide> PDFTextLM.this.newLine(); <add> vestigeIndex = -1; <ide> return; <ide> } <ide> currentPos = 0;
JavaScript
mit
a5d89ee98150adb89637d997cf4a91ef0d926b5a
0
makc/three.js.fork,Samsy/three.js,greggman/three.js,Liuer/three.js,kaisalmen/three.js,stanford-gfx/three.js,QingchaoHu/three.js,fraguada/three.js,kaisalmen/three.js,Samsy/three.js,fraguada/three.js,TristanVALCKE/three.js,TristanVALCKE/three.js,looeee/three.js,jpweeks/three.js,gero3/three.js,stanford-gfx/three.js,Samsy/three.js,sttz/three.js,Samsy/three.js,aardgoose/three.js,zhoushijie163/three.js,aardgoose/three.js,fyoudine/three.js,06wj/three.js,looeee/three.js,makc/three.js.fork,TristanVALCKE/three.js,06wj/three.js,zhoushijie163/three.js,gero3/three.js,SpinVR/three.js,TristanVALCKE/three.js,fraguada/three.js,QingchaoHu/three.js,TristanVALCKE/three.js,greggman/three.js,fraguada/three.js,Samsy/three.js,Itee/three.js,WestLangley/three.js,sttz/three.js,fyoudine/three.js,mrdoob/three.js,Samsy/three.js,fraguada/three.js,donmccurdy/three.js,TristanVALCKE/three.js,jpweeks/three.js,fraguada/three.js,mrdoob/three.js,WestLangley/three.js,SpinVR/three.js,donmccurdy/three.js,Liuer/three.js,Itee/three.js
import { Quaternion } from './Quaternion.js'; import { Vector3 } from './Vector3.js'; import { Matrix4 } from './Matrix4.js'; import { _Math } from './Math.js'; /** * @author mrdoob / http://mrdoob.com/ * @author WestLangley / http://github.com/WestLangley * @author bhouston / http://clara.io */ function Euler( x, y, z, order ) { this._x = x || 0; this._y = y || 0; this._z = z || 0; this._order = order || Euler.DefaultOrder; } Euler.RotationOrders = [ 'XYZ', 'YZX', 'ZXY', 'XZY', 'YXZ', 'ZYX' ]; Euler.DefaultOrder = 'XYZ'; Object.defineProperties( Euler.prototype, { x: { get: function () { return this._x; }, set: function ( value ) { this._x = value; this._onChangeCallback(); } }, y: { get: function () { return this._y; }, set: function ( value ) { this._y = value; this._onChangeCallback(); } }, z: { get: function () { return this._z; }, set: function ( value ) { this._z = value; this._onChangeCallback(); } }, order: { get: function () { return this._order; }, set: function ( value ) { this._order = value; this._onChangeCallback(); } } } ); Object.assign( Euler.prototype, { isEuler: true, set: function ( x, y, z, order ) { this._x = x; this._y = y; this._z = z; this._order = order || this._order; this._onChangeCallback(); return this; }, clone: function () { return new this.constructor( this._x, this._y, this._z, this._order ); }, copy: function ( euler ) { this._x = euler._x; this._y = euler._y; this._z = euler._z; this._order = euler._order; this._onChangeCallback(); return this; }, setFromRotationMatrix: function ( m, order, update ) { var clamp = _Math.clamp; // assumes the upper 3x3 of m is a pure rotation matrix (i.e, unscaled) var te = m.elements; var m11 = te[ 0 ], m12 = te[ 4 ], m13 = te[ 8 ]; var m21 = te[ 1 ], m22 = te[ 5 ], m23 = te[ 9 ]; var m31 = te[ 2 ], m32 = te[ 6 ], m33 = te[ 10 ]; order = order || this._order; if ( order === 'XYZ' ) { this._y = Math.asin( clamp( m13, - 1, 1 ) ); if ( Math.abs( m13 ) < 1 ) { this._x = Math.atan2( - m23, m33 ); this._z = Math.atan2( - m12, m11 ); } else { this._x = Math.atan2( m32, m22 ); this._z = 0; } } else if ( order === 'YXZ' ) { this._x = Math.asin( - clamp( m23, - 1, 1 ) ); if ( Math.abs( m23 ) < 1 ) { this._y = Math.atan2( m13, m33 ); this._z = Math.atan2( m21, m22 ); } else { this._y = Math.atan2( - m31, m11 ); this._z = 0; } } else if ( order === 'ZXY' ) { this._x = Math.asin( clamp( m32, - 1, 1 ) ); if ( Math.abs( m32 ) < 1 ) { this._y = Math.atan2( - m31, m33 ); this._z = Math.atan2( - m12, m22 ); } else { this._y = 0; this._z = Math.atan2( m21, m11 ); } } else if ( order === 'ZYX' ) { this._y = Math.asin( - clamp( m31, - 1, 1 ) ); if ( Math.abs( m31 ) < 1 ) { this._x = Math.atan2( m32, m33 ); this._z = Math.atan2( m21, m11 ); } else { this._x = 0; this._z = Math.atan2( - m12, m22 ); } } else if ( order === 'YZX' ) { this._z = Math.asin( clamp( m21, - 1, 1 ) ); if ( Math.abs( m21 ) < 1 ) { this._x = Math.atan2( - m23, m22 ); this._y = Math.atan2( - m31, m11 ); } else { this._x = 0; this._y = Math.atan2( m13, m33 ); } } else if ( order === 'XZY' ) { this._z = Math.asin( - clamp( m12, - 1, 1 ) ); if ( Math.abs( m12 ) < 1 ) { this._x = Math.atan2( m32, m22 ); this._y = Math.atan2( m13, m11 ); } else { this._x = Math.atan2( - m23, m33 ); this._y = 0; } } else { console.warn( 'THREE.Euler: .setFromRotationMatrix() given unsupported order: ' + order ); } this._order = order; if ( update !== false ) this._onChangeCallback(); return this; }, setFromQuaternion: function () { var matrix = new Matrix4(); return function setFromQuaternion( q, order, update ) { matrix.makeRotationFromQuaternion( q ); return this.setFromRotationMatrix( matrix, order, update ); }; }(), setFromVector3: function ( v, order ) { return this.set( v.x, v.y, v.z, order || this._order ); }, reorder: function () { // WARNING: this discards revolution information -bhouston var q = new Quaternion(); return function reorder( newOrder ) { q.setFromEuler( this ); return this.setFromQuaternion( q, newOrder ); }; }(), equals: function ( euler ) { return ( euler._x === this._x ) && ( euler._y === this._y ) && ( euler._z === this._z ) && ( euler._order === this._order ); }, fromArray: function ( array ) { this._x = array[ 0 ]; this._y = array[ 1 ]; this._z = array[ 2 ]; if ( array[ 3 ] !== undefined ) this._order = array[ 3 ]; this._onChangeCallback(); return this; }, toArray: function ( array, offset ) { if ( array === undefined ) array = []; if ( offset === undefined ) offset = 0; array[ offset ] = this._x; array[ offset + 1 ] = this._y; array[ offset + 2 ] = this._z; array[ offset + 3 ] = this._order; return array; }, toVector3: function ( optionalResult ) { if ( optionalResult ) { return optionalResult.set( this._x, this._y, this._z ); } else { return new Vector3( this._x, this._y, this._z ); } }, _onChange: function ( callback ) { this._onChangeCallback = callback; return this; }, _onChangeCallback: function () {} } ); export { Euler };
src/math/Euler.js
import { Quaternion } from './Quaternion.js'; import { Vector3 } from './Vector3.js'; import { Matrix4 } from './Matrix4.js'; import { _Math } from './Math.js'; /** * @author mrdoob / http://mrdoob.com/ * @author WestLangley / http://github.com/WestLangley * @author bhouston / http://clara.io */ function Euler( x, y, z, order ) { this._x = x || 0; this._y = y || 0; this._z = z || 0; this._order = order || Euler.DefaultOrder; } Euler.RotationOrders = [ 'XYZ', 'YZX', 'ZXY', 'XZY', 'YXZ', 'ZYX' ]; Euler.DefaultOrder = 'XYZ'; Object.defineProperties( Euler.prototype, { x: { get: function () { return this._x; }, set: function ( value ) { this._x = value; this._onChangeCallback(); }, }, y: { get: function () { return this._y; }, set: function ( value ) { this._y = value; this._onChangeCallback(); }, }, z: { get: function () { return this._z; }, set: function ( value ) { this._z = value; this._onChangeCallback(); }, }, order: { get: function () { return this._order; }, set: function ( value ) { this._order = value; this._onChangeCallback(); }, }, } ); Object.assign( Euler.prototype, { isEuler: true, set: function ( x, y, z, order ) { this._x = x; this._y = y; this._z = z; this._order = order || this._order; this._onChangeCallback(); return this; }, clone: function () { return new this.constructor( this._x, this._y, this._z, this._order ); }, copy: function ( euler ) { this._x = euler._x; this._y = euler._y; this._z = euler._z; this._order = euler._order; this._onChangeCallback(); return this; }, setFromRotationMatrix: function ( m, order, update ) { var clamp = _Math.clamp; // assumes the upper 3x3 of m is a pure rotation matrix (i.e, unscaled) var te = m.elements; var m11 = te[ 0 ], m12 = te[ 4 ], m13 = te[ 8 ]; var m21 = te[ 1 ], m22 = te[ 5 ], m23 = te[ 9 ]; var m31 = te[ 2 ], m32 = te[ 6 ], m33 = te[ 10 ]; order = order || this._order; if ( order === 'XYZ' ) { this._y = Math.asin( clamp( m13, - 1, 1 ) ); if ( Math.abs( m13 ) < 1 ) { this._x = Math.atan2( - m23, m33 ); this._z = Math.atan2( - m12, m11 ); } else { this._x = Math.atan2( m32, m22 ); this._z = 0; } } else if ( order === 'YXZ' ) { this._x = Math.asin( - clamp( m23, - 1, 1 ) ); if ( Math.abs( m23 ) < 1 ) { this._y = Math.atan2( m13, m33 ); this._z = Math.atan2( m21, m22 ); } else { this._y = Math.atan2( - m31, m11 ); this._z = 0; } } else if ( order === 'ZXY' ) { this._x = Math.asin( clamp( m32, - 1, 1 ) ); if ( Math.abs( m32 ) < 1 ) { this._y = Math.atan2( - m31, m33 ); this._z = Math.atan2( - m12, m22 ); } else { this._y = 0; this._z = Math.atan2( m21, m11 ); } } else if ( order === 'ZYX' ) { this._y = Math.asin( - clamp( m31, - 1, 1 ) ); if ( Math.abs( m31 ) < 1 ) { this._x = Math.atan2( m32, m33 ); this._z = Math.atan2( m21, m11 ); } else { this._x = 0; this._z = Math.atan2( - m12, m22 ); } } else if ( order === 'YZX' ) { this._z = Math.asin( clamp( m21, - 1, 1 ) ); if ( Math.abs( m21 ) < 1 ) { this._x = Math.atan2( - m23, m22 ); this._y = Math.atan2( - m31, m11 ); } else { this._x = 0; this._y = Math.atan2( m13, m33 ); } } else if ( order === 'XZY' ) { this._z = Math.asin( - clamp( m12, - 1, 1 ) ); if ( Math.abs( m12 ) < 1 ) { this._x = Math.atan2( m32, m22 ); this._y = Math.atan2( m13, m11 ); } else { this._x = Math.atan2( - m23, m33 ); this._y = 0; } } else { console.warn( 'THREE.Euler: .setFromRotationMatrix() given unsupported order: ' + order ); } this._order = order; if ( update !== false ) this._onChangeCallback(); return this; }, setFromQuaternion: ( function () { var matrix = new Matrix4(); return function setFromQuaternion( q, order, update ) { matrix.makeRotationFromQuaternion( q ); return this.setFromRotationMatrix( matrix, order, update ); }; } )(), setFromVector3: function ( v, order ) { return this.set( v.x, v.y, v.z, order || this._order ); }, reorder: ( function () { // WARNING: this discards revolution information -bhouston var q = new Quaternion(); return function reorder( newOrder ) { q.setFromEuler( this ); return this.setFromQuaternion( q, newOrder ); }; } )(), equals: function ( euler ) { return ( euler._x === this._x && euler._y === this._y && euler._z === this._z && euler._order === this._order ); }, fromArray: function ( array ) { this._x = array[ 0 ]; this._y = array[ 1 ]; this._z = array[ 2 ]; if ( array[ 3 ] !== undefined ) this._order = array[ 3 ]; this._onChangeCallback(); return this; }, toArray: function ( array, offset ) { if ( array === undefined ) array = []; if ( offset === undefined ) offset = 0; array[ offset ] = this._x; array[ offset + 1 ] = this._y; array[ offset + 2 ] = this._z; array[ offset + 3 ] = this._order; return array; }, toVector3: function ( optionalResult ) { if ( optionalResult ) { return optionalResult.set( this._x, this._y, this._z ); } else { return new Vector3( this._x, this._y, this._z ); } }, _onChange: function ( callback ) { this._onChangeCallback = callback; return this; }, _onChangeCallback: function () {}, } ); export { Euler };
mrdoob style
src/math/Euler.js
mrdoob style
<ide><path>rc/math/Euler.js <ide> Euler.DefaultOrder = 'XYZ'; <ide> <ide> Object.defineProperties( Euler.prototype, { <add> <ide> x: { <add> <ide> get: function () { <ide> <ide> return this._x; <ide> this._x = value; <ide> this._onChangeCallback(); <ide> <del> }, <add> } <add> <ide> }, <ide> <ide> y: { <add> <ide> get: function () { <ide> <ide> return this._y; <ide> this._y = value; <ide> this._onChangeCallback(); <ide> <del> }, <add> } <add> <ide> }, <ide> <ide> z: { <add> <ide> get: function () { <ide> <ide> return this._z; <ide> this._z = value; <ide> this._onChangeCallback(); <ide> <del> }, <add> } <add> <ide> }, <ide> <ide> order: { <add> <ide> get: function () { <ide> <ide> return this._order; <ide> this._order = value; <ide> this._onChangeCallback(); <ide> <del> }, <del> }, <add> } <add> <add> } <add> <ide> } ); <ide> <ide> Object.assign( Euler.prototype, { <add> <ide> isEuler: true, <ide> <ide> set: function ( x, y, z, order ) { <ide> // assumes the upper 3x3 of m is a pure rotation matrix (i.e, unscaled) <ide> <ide> var te = m.elements; <del> var m11 = te[ 0 ], <del> m12 = te[ 4 ], <del> m13 = te[ 8 ]; <del> var m21 = te[ 1 ], <del> m22 = te[ 5 ], <del> m23 = te[ 9 ]; <del> var m31 = te[ 2 ], <del> m32 = te[ 6 ], <del> m33 = te[ 10 ]; <add> var m11 = te[ 0 ], m12 = te[ 4 ], m13 = te[ 8 ]; <add> var m21 = te[ 1 ], m22 = te[ 5 ], m23 = te[ 9 ]; <add> var m31 = te[ 2 ], m32 = te[ 6 ], m33 = te[ 10 ]; <ide> <ide> order = order || this._order; <ide> <ide> <ide> } else { <ide> <del> console.warn( <del> 'THREE.Euler: .setFromRotationMatrix() given unsupported order: ' + <del> order <del> ); <add> console.warn( 'THREE.Euler: .setFromRotationMatrix() given unsupported order: ' + order ); <ide> <ide> } <ide> <ide> <ide> }, <ide> <del> setFromQuaternion: ( function () { <add> setFromQuaternion: function () { <ide> <ide> var matrix = new Matrix4(); <ide> <ide> <ide> }; <ide> <del> } )(), <add> }(), <ide> <ide> setFromVector3: function ( v, order ) { <ide> <ide> <ide> }, <ide> <del> reorder: ( function () { <add> reorder: function () { <ide> <ide> // WARNING: this discards revolution information -bhouston <ide> <ide> <ide> }; <ide> <del> } )(), <add> }(), <ide> <ide> equals: function ( euler ) { <ide> <del> return ( <del> euler._x === this._x && <del> euler._y === this._y && <del> euler._z === this._z && <del> euler._order === this._order <del> ); <add> return ( euler._x === this._x ) && ( euler._y === this._y ) && ( euler._z === this._z ) && ( euler._order === this._order ); <ide> <ide> }, <ide> <ide> <ide> }, <ide> <del> _onChangeCallback: function () {}, <add> _onChangeCallback: function () {} <add> <ide> } ); <ide> <add> <ide> export { Euler };
JavaScript
mit
7c715a7ae682f13417ca2595eab44297e81ad167
0
maximz/recal,maximz/recal,maximz/recal,maximz/recal,maximz/recal
function Cal_init() { if (CAL_INIT) return; CAL_INIT = true; var height = '410';//window.innerHeight * 0.6; Cal_options.height = height; Cal_options.header = false; Cal_options.columnFormat = { month: 'ddd', // Mon week: 'ddd', // Mon day: 'dddd M/d' // Monday 9/7 } $('#calendarui').fullCalendar(Cal_options); EventsMan_addUpdateListener(function(){ Cal_reload(); }); } function Cal_reload() { //try{ var eventIDs = EventsMan_getEnrolledEvents(); Cal_eventSource.events = []; $.each(eventIDs, function(index){ eventDict = EventsMan_getEventByID(this); Cal_eventSource.events.push({ id: eventDict.event_id, title: CourseMan_getCourseByID(eventDict.course_id).course_listings, start: moment.unix(eventDict.event_start).tz(MAIN_TIMEZONE).toISOString(), end: moment.unix(eventDict.event_end).tz(MAIN_TIMEZONE).toISOString(), backgroundColor: '#123456', // SECTION_COLOR_MAP[eventDict.section_id]['color'], borderColor: '#123456' // SECTION_COLOR_MAP[eventDict.section_id]['color'] }); }); var start = moment.unix(CUR_SEM.start_date); $('#calendarui').fullCalendar('gotoDate', start.year(), start.month(), start.date()); $("#calendarui").fullCalendar("refetchEvents"); //} //catch(err){ //} }
nice/static/js/profile/calendar.js
function Cal_init() { if (CAL_INIT) return; CAL_INIT = true; var height = '410';//window.innerHeight * 0.6; Cal_options.height = height; Cal_options.header = false; Cal_options.columnFormat = { month: 'ddd', // Mon week: 'ddd', // Mon day: 'dddd M/d' // Monday 9/7 } $('#calendarui').fullCalendar(Cal_options); EventsMan_addUpdateListener(function(){ Cal_reload(); }); } function Cal_reload() { //try{ var eventIDs = EventsMan_getEnrolledEvents(); Cal_eventSource.events = []; $.each(eventIDs, function(index){ eventDict = EventsMan_getEventByID(this); Cal_eventSource.events.push({ id: eventDict.event_id, title: eventDict.event_title, start: moment.unix(eventDict.event_start).tz(MAIN_TIMEZONE).toISOString(), end: moment.unix(eventDict.event_end).tz(MAIN_TIMEZONE).toISOString(), backgroundColor: '#123456', // SECTION_COLOR_MAP[eventDict.section_id]['color'], borderColor: '#123456' // SECTION_COLOR_MAP[eventDict.section_id]['color'] }); }); var start = moment.unix(CUR_SEM.start_date); $('#calendarui').fullCalendar('gotoDate', start.year(), start.month(), start.date()); $("#calendarui").fullCalendar("refetchEvents"); //} //catch(err){ //} }
now displaying course name in profile page
nice/static/js/profile/calendar.js
now displaying course name in profile page
<ide><path>ice/static/js/profile/calendar.js <ide> eventDict = EventsMan_getEventByID(this); <ide> Cal_eventSource.events.push({ <ide> id: eventDict.event_id, <del> title: eventDict.event_title, <add> title: CourseMan_getCourseByID(eventDict.course_id).course_listings, <ide> start: moment.unix(eventDict.event_start).tz(MAIN_TIMEZONE).toISOString(), <ide> end: moment.unix(eventDict.event_end).tz(MAIN_TIMEZONE).toISOString(), <ide> backgroundColor: '#123456', // SECTION_COLOR_MAP[eventDict.section_id]['color'],
JavaScript
bsd-3-clause
326131201f8b2efe6b995778004432926ae4f76d
0
lloydbenson/subtext
// Load modules var Fs = require('fs'); var Http = require('http'); var Path = require('path'); var Stream = require('stream'); var Zlib = require('zlib'); var FormData = require('form-data'); var Hoek = require('hoek'); var Lab = require('lab'); var Subtext = require('..'); var Wreck = require('wreck'); // Declare internals var internals = {}; // Test shortcuts var lab = exports.lab = Lab.script(); var describe = lab.describe; var it = lab.it; var expect = Lab.expect; describe('parse()', function () { it('returns a raw body', function (done) { var payload = '{"x":"1","y":"2","z":"3"}'; var request = Wreck.toReadableStream(payload); request.headers = {}; Subtext.parse(request, null, { parse: false, output: 'data' }, function (err, parsed) { expect(err).to.not.exist; expect(parsed.mime).to.equal('application/json'); expect(Buffer.isBuffer(parsed.payload)).to.be.true; expect(parsed.payload.toString()).to.equal(payload); done(); }); }); it('returns a parsed body', function (done) { var payload = '{"x":"1","y":"2","z":"3"}'; var request = Wreck.toReadableStream(payload); request.headers = {}; Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { expect(err).to.not.exist; expect(parsed.mime).to.equal('application/json'); expect(parsed.payload).to.deep.equal(JSON.parse(payload)); done(); }); }); it('returns a parsed body (json-derived media type)', function (done) { var payload = '{"x":"1","y":"2","z":"3"}'; var request = Wreck.toReadableStream(payload); request.headers = { 'content-type': 'application/json-patch+json' }; Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { expect(err).to.not.exist; expect(parsed.mime).to.equal('application/json-patch+json'); expect(parsed.payload).to.deep.equal(JSON.parse(payload)); done(); }); }); it('errors on invalid content type header', function (done) { var payload = '{"x":"1","y":"2","z":"3"}'; var request = Wreck.toReadableStream(payload); request.headers = { 'content-type': 'steve' }; Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { expect(err).to.exist; expect(err.message).to.equal('Invalid content-type header'); done(); }); }); it('errors on unsupported content type', function (done) { var payload = '{"x":"1","y":"2","z":"3"}'; var request = Wreck.toReadableStream(payload); request.headers = { 'content-type': 'james/bond' }; Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { expect(err).to.exist; expect(err.message).to.equal('Unsupported Media Type'); done(); }); }); it('errors when content-length header greater than maxBytes', function (done) { var payload = '{"x":"1","y":"2","z":"3"}'; var request = Wreck.toReadableStream(payload); request.headers = { 'content-length': '50' }; Subtext.parse(request, null, { parse: false, output: 'data', maxBytes: 10 }, function (err, parsed) { expect(err).to.exist; expect(err.message).to.equal('Payload content length greater than maximum allowed: 10'); done(); }); }); it('peeks at the unparsed stream of a parsed body', function (done) { var payload = '{"x":"1","y":"2","z":"3"}'; var request = Wreck.toReadableStream(payload); request.headers = {}; var raw = ''; var tap = new Stream.Transform(); tap._transform = function (chunk, encoding, callback) { raw += chunk.toString(); this.push(chunk, encoding); callback(); }; Subtext.parse(request, tap, { parse: true, output: 'data' }, function (err, parsed) { expect(err).to.not.exist; expect(parsed.payload).to.deep.equal(JSON.parse(payload)); expect(raw).to.equal(payload); done(); }); }); it('saves file', function (done) { var request = Wreck.toReadableStream('payload'); request.headers = {}; Subtext.parse(request, null, { parse: false, output: 'file' }, function (err, parsed) { expect(err).to.not.exist; var receivedContents = Fs.readFileSync(parsed.payload.path); Fs.unlinkSync(parsed.payload.path); expect(receivedContents.toString()).to.equal('payload'); done(); }); }); it('saves a file after content decoding', function (done) { var path = Path.join(__dirname, './file/image.jpg'); var sourceContents = Fs.readFileSync(path); var stats = Fs.statSync(path); Zlib.gzip(sourceContents, function (err, compressed) { var request = Wreck.toReadableStream(compressed); request.headers = { 'content-encoding': 'gzip' }; Subtext.parse(request, null, { parse: true, output: 'file' }, function (err, parsed) { expect(err).to.not.exist; var receivedContents = Fs.readFileSync(parsed.payload.path); Fs.unlinkSync(parsed.payload.path); expect(receivedContents).to.deep.equal(sourceContents); expect(parsed.payload.bytes).to.equal(stats.size); done(); }); }); }); it('saves a file ignoring content decoding when parse is false', function (done) { var path = Path.join(__dirname, './file/image.jpg'); var sourceContents = Fs.readFileSync(path); Zlib.gzip(sourceContents, function (err, compressed) { var request = Wreck.toReadableStream(compressed); request.headers = { 'content-encoding': 'gzip' }; Subtext.parse(request, null, { parse: false, output: 'file' }, function (err, parsed) { expect(err).to.not.exist; var receivedContents = Fs.readFileSync(parsed.payload.path); Fs.unlinkSync(parsed.payload.path); expect(receivedContents).to.deep.equal(compressed); done(); }); }); }); it('errors on invalid upload directory (parse false)', function (done) { var request = Wreck.toReadableStream('payload'); request.headers = {}; Subtext.parse(request, null, { parse: false, output: 'file', uploads: '/a/b/c/no/such/folder' }, function (err, parsed) { expect(err).to.exist; expect(err.message).to.contain('ENOENT'); done(); }); }); it('errors on invalid upload directory (parse true)', function (done) { var request = Wreck.toReadableStream('payload'); request.headers = {}; Subtext.parse(request, null, { parse: true, output: 'file', uploads: '/a/b/c/no/such/folder' }, function (err, parsed) { expect(err).to.exist; expect(err.message).to.contain('ENOENT'); done(); }); }); it('processes application/octet-stream', function (done) { var payload = '{"x":"1","y":"2","z":"3"}'; var request = Wreck.toReadableStream(payload); request.headers = { 'content-type': 'application/octet-stream' }; Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { expect(err).to.not.exist; expect(parsed.mime).to.equal('application/octet-stream'); expect(Buffer.isBuffer(parsed.payload)).to.be.true; expect(parsed.payload.toString()).to.equal(payload); done(); }); }); it('overrides content-type', function (done) { var payload = '{"x":"1","y":"2","z":"3"}'; var request = Wreck.toReadableStream(payload); request.headers = { 'content-type': 'text/plain' }; Subtext.parse(request, null, { parse: true, output: 'data', override: 'application/json' }, function (err, parsed) { expect(err).to.not.exist; expect(parsed.mime).to.equal('application/json'); expect(parsed.payload).to.deep.equal(JSON.parse(payload)); done(); }); }); it('returns a parsed text payload', function (done) { var payload = '{"x":"1","y":"2","z":"3"}'; var request = Wreck.toReadableStream(payload); request.headers = { 'content-type': 'text/plain' }; Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { expect(err).to.not.exist; expect(parsed.mime).to.equal('text/plain'); expect(parsed.payload).to.deep.equal(payload); done(); }); }); it('parses an allowed content-type', function (done) { var payload = '{"x":"1","y":"2","z":"3"}'; var request = Wreck.toReadableStream(payload); request.headers = { 'content-type': 'text/plain' }; Subtext.parse(request, null, { parse: true, output: 'data', allow: 'text/plain' }, function (err, parsed) { expect(err).to.not.exist; expect(parsed.mime).to.equal('text/plain'); expect(parsed.payload).to.deep.equal(payload); done(); }); }); it('parses an allowed content-type (array)', function (done) { var payload = '{"x":"1","y":"2","z":"3"}'; var request = Wreck.toReadableStream(payload); request.headers = { 'content-type': 'text/plain' }; Subtext.parse(request, null, { parse: true, output: 'data', allow: ['text/plain'] }, function (err, parsed) { expect(err).to.not.exist; expect(parsed.mime).to.equal('text/plain'); expect(parsed.payload).to.deep.equal(payload); done(); }); }); it('errors on an unallowed content-type', function (done) { var payload = '{"x":"1","y":"2","z":"3"}'; var request = Wreck.toReadableStream(payload); request.headers = { 'content-type': 'text/plain' }; Subtext.parse(request, null, { parse: true, output: 'data', allow: 'application/json' }, function (err, parsed) { expect(err).to.exist; expect(err.message).to.equal('Unsupported Media Type'); done(); }); }); it('errors on an unallowed content-type (array)', function (done) { var payload = '{"x":"1","y":"2","z":"3"}'; var request = Wreck.toReadableStream(payload); request.headers = { 'content-type': 'text/plain' }; Subtext.parse(request, null, { parse: true, output: 'data', allow: ['application/json'] }, function (err, parsed) { expect(err).to.exist; expect(err.message).to.equal('Unsupported Media Type'); done(); }); }); it('parses form encoded payload', function (done) { var payload = 'x=abc'; var request = Wreck.toReadableStream(payload); request.headers = { 'content-type': 'application/x-www-form-urlencoded' }; Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { expect(err).to.not.exist; expect(parsed.mime).to.equal('application/x-www-form-urlencoded'); expect(parsed.payload.x).to.equal('abc'); done(); }); }); it('parses form encoded payload (array keys)', function (done) { var payload = 'x[y]=1&x[z]=2'; var request = Wreck.toReadableStream(payload); request.headers = { 'content-type': 'application/x-www-form-urlencoded' }; Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { expect(err).to.not.exist; expect(parsed.mime).to.equal('application/x-www-form-urlencoded'); expect(parsed.payload).to.deep.equal({ x: { y: '1', z: '2' } }); done(); }); }); it('errors on maformed zipped payload', function (done) { var payload = '7d8d78347h8347d58w347hd58w374d58w37h5d8w37hd4'; var request = Wreck.toReadableStream(payload); request.headers = { 'content-encoding': 'gzip' }; Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { expect(err).to.exist; expect(err.message).to.equal('Invalid compressed payload'); done(); }); }); it('errors on maformed zipped payload (parse gunzip only)', function (done) { var payload = '7d8d78347h8347d58w347hd58w374d58w37h5d8w37hd4'; var request = Wreck.toReadableStream(payload); request.headers = { 'content-encoding': 'gzip' }; Subtext.parse(request, null, { parse: 'gunzip', output: 'data' }, function (err, parsed) { expect(err).to.exist; expect(err.message).to.equal('Invalid compressed payload'); done(); }); }); it('parses a gzipped payload', function (done) { var payload = '{"x":"1","y":"2","z":"3"}'; Zlib.gzip(payload, function (err, compressed) { var request = Wreck.toReadableStream(compressed); request.headers = { 'content-encoding': 'gzip' }; Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { expect(err).to.not.exist; expect(parsed.payload.toString()).to.equal(payload); done(); }); }); }); describe('unzip', function () { it('does not return an error when the payload has the correct gzip header and gzipped payload', function (done) { var payload = '{"hi":"hello"}'; Zlib.gzip(payload, function (err, result) { var handler = function (request, reply) { reply('Success'); }; var server = new Hapi.Server(); server.route({ method: 'POST', path: '/', config: { handler: handler } }); server.inject({ method: 'POST', url: '/', payload: result, headers: { 'content-encoding': 'gzip' } }, function (res) { expect(res.statusCode).to.equal(200); done(); }); }); }); it('does not return an error when the payload has the correct deflate header and deflated payload', function (done) { var payload = '{"hi":"hello"}'; Zlib.deflate(payload, function (err, result) { var handler = function (request, reply) { reply('Success'); }; var server = new Hapi.Server(); server.route({ method: 'POST', path: '/', config: { handler: handler } }); server.inject({ method: 'POST', url: '/', payload: result, headers: { 'content-encoding': 'deflate' } }, function (res) { expect(res.statusCode).to.equal(200); done(); }); }); }); it('does not return an error when the payload has the correct gzip header and gzipped payload (gunzip only)', function (done) { var payload = '{"hi":"hello"}'; Zlib.gzip(payload, function (err, result) { var handler = function (request, reply) { reply('Success'); }; var server = new Hapi.Server(); server.route({ method: 'POST', path: '/', config: { handler: handler, payload: { parse: 'gunzip' } } }); server.inject({ method: 'POST', url: '/', payload: result, headers: { 'content-encoding': 'gzip' } }, function (res) { expect(res.statusCode).to.equal(200); done(); }); }); }); it('does not return an error when the payload has the correct deflate header and deflated payload (gunzip only)', function (done) { var payload = '{"hi":"hello"}'; Zlib.deflate(payload, function (err, result) { var handler = function (request, reply) { reply('Success'); }; var server = new Hapi.Server(); server.route({ method: 'POST', path: '/', config: { handler: handler, payload: { parse: 'gunzip' } } }); server.inject({ method: 'POST', url: '/', payload: result, headers: { 'content-encoding': 'deflate' } }, function (res) { expect(res.statusCode).to.equal(200); done(); }); }); }); }); describe('multi-part', function () { var multipartPayload = '--AaB03x\r\n' + 'content-disposition: form-data; name="x"\r\n' + '\r\n' + 'First\r\n' + '--AaB03x\r\n' + 'content-disposition: form-data; name="x"\r\n' + '\r\n' + 'Second\r\n' + '--AaB03x\r\n' + 'content-disposition: form-data; name="x"\r\n' + '\r\n' + 'Third\r\n' + '--AaB03x\r\n' + 'content-disposition: form-data; name="field1"\r\n' + '\r\n' + 'Joe Blow\r\nalmost tricked you!\r\n' + '--AaB03x\r\n' + 'content-disposition: form-data; name="field1"\r\n' + '\r\n' + 'Repeated name segment\r\n' + '--AaB03x\r\n' + 'content-disposition: form-data; name="pics"; filename="file1.txt"\r\n' + 'Content-Type: text/plain\r\n' + '\r\n' + '... contents of file1.txt ...\r\r\n' + '--AaB03x--\r\n'; var echo = function (request, reply) { var result = {}; var keys = Object.keys(request.payload); for (var i = 0, il = keys.length; i < il; ++i) { var key = keys[i]; var value = request.payload[key]; result[key] = value._readableState ? true : value; } reply(result); }; it('errors on missing boundary in content-type header', function (done) { var invalidHandler = function (request) { expect(request).to.not.exist; // Must not be called }; var server = new Hapi.Server(); server.route({ method: 'POST', path: '/invalid', config: { handler: invalidHandler } }); server.inject({ method: 'POST', url: '/invalid', payload: multipartPayload, headers: { 'content-type': 'multipart/form-data' } }, function (res) { expect(res.result).to.exist; expect(res.result.statusCode).to.equal(400); done(); }); }); it('errors on empty separator in content-type header', function (done) { var invalidHandler = function (request) { expect(request).to.not.exist; // Must not be called }; var server = new Hapi.Server(); server.route({ method: 'POST', path: '/invalid', config: { handler: invalidHandler } }); server.inject({ method: 'POST', url: '/invalid', payload: multipartPayload, headers: { 'content-type': 'multipart/form-data; boundary=' } }, function (res) { expect(res.result).to.exist; expect(res.result.statusCode).to.equal(400); done(); }); }); it('returns parsed multipart data', function (done) { var server = new Hapi.Server(); server.route({ method: 'POST', path: '/echo', config: { handler: echo } }); server.inject({ method: 'POST', url: '/echo', payload: multipartPayload, headers: { 'content-type': 'multipart/form-data; boundary=AaB03x' } }, function (res) { expect(Object.keys(res.result).length).to.equal(3); expect(res.result.field1).to.exist; expect(res.result.field1.length).to.equal(2); expect(res.result.field1[1]).to.equal('Repeated name segment'); expect(res.result.pics).to.exist; done(); }); }); it('parses file without content-type', function (done) { var multipartPayload = '--AaB03x\r\n' + 'content-disposition: form-data; name="pics"; filename="file1.txt"\r\n' + '\r\n' + '... contents of file1.txt ...\r\r\n' + '--AaB03x--\r\n'; var server = new Hapi.Server(); server.route({ method: 'POST', path: '/echo', config: { handler: function (request, reply) { reply(request.payload.pics); } } }); server.inject({ method: 'POST', url: '/echo', payload: multipartPayload, headers: { 'content-type': 'multipart/form-data; boundary=AaB03x' } }, function (res) { expect(res.result.toString()).to.equal('... contents of file1.txt ...\r'); done(); }); }); it('parses empty file', function (done) { var multipartPayload = '--AaB03x\r\n' + 'content-disposition: form-data; name="pics"; filename="file1.txt"\r\n' + 'Content-Type: text/plain\r\n' + '\r\n' + '\r\n' + '--AaB03x--\r\n'; var server = new Hapi.Server(); server.route({ method: 'POST', path: '/echo', config: { handler: function (request, reply) { reply(request.payload); } } }); server.inject({ method: 'POST', url: '/echo', payload: multipartPayload, headers: { 'content-type': 'multipart/form-data; boundary=AaB03x' } }, function (res) { expect(res.result).to.deep.equal({ pics: {} }); done(); }); }); it('errors on missing upload folder', function (done) { var multipartPayload = '--AaB03x\r\n' + 'content-disposition: form-data; name="pics"; filename="file1.txt"\r\n' + 'Content-Type: text/plain\r\n' + '\r\n' + 'something to fail with\r\n' + '--AaB03x--\r\n'; var server = new Hapi.Server({ payload: { uploads: '/a/b/c/d/e/f/g/not' } }); server.route({ method: 'POST', path: '/echo', config: { handler: function (request, reply) { reply(request.payload); }, payload: { output: 'file' } } }); server.inject({ method: 'POST', url: '/echo', payload: multipartPayload, headers: { 'content-type': 'multipart/form-data; boundary=AaB03x' } }, function (res) { expect(res.statusCode).to.equal(500); done(); }); }); it('errors while processing a parsed data stream in multiple form', function (done) { var payload = '--AaB03x\r\n' + 'content-disposition: form-data; name="pics"; filename="file1.txt"\r\n' + 'Content-Type: text/plain\r\n' + '\r\n'; var server = new Hapi.Server(0); server.route({ method: 'POST', path: '/', handler: function () { } }); server.ext('onPreResponse', function (request, reply) { expect(request.response.isBoom).to.equal(true); expect(request.response.output.statusCode).to.equal(400); expect(request.response.message).to.equal('Invalid multipart payload format'); done(); }); server.start(function () { var options = { hostname: '127.0.0.1', port: server.info.port, path: '/', method: 'POST', headers: { 'content-type': 'multipart/form-data; boundary=AaB03x' } }; var req = Http.request(options, function (res) { }); req.write(payload); setTimeout(function () { req.destroy(); }, 100); req.on('error', function () { }); }); }); it('parses multiple files as streams', function (done) { var multipartPayload = '--AaB03x\r\n' + 'content-disposition: form-data; name="files"; filename="file1.txt"\r\n' + 'Content-Type: text/plain\r\n' + '\r\n' + 'one\r\n' + '--AaB03x\r\n' + 'content-disposition: form-data; name="files"; filename="file2.txt"\r\n' + 'Content-Type: text/plain\r\n' + '\r\n' + 'two\r\n' + '--AaB03x\r\n' + 'content-disposition: form-data; name="files"; filename="file3.txt"\r\n' + 'Content-Type: text/plain\r\n' + '\r\n' + 'three\r\n' + '--AaB03x--\r\n'; var handler = function (request, reply) { expect(request.payload.files[0].hapi).to.deep.equal({ filename: 'file1.txt', headers: { 'content-disposition': 'form-data; name="files"; filename="file1.txt"', 'content-type': 'text/plain' } }); expect(request.payload.files[1].hapi).to.deep.equal({ filename: 'file2.txt', headers: { 'content-disposition': 'form-data; name="files"; filename="file2.txt"', 'content-type': 'text/plain' } }); expect(request.payload.files[2].hapi).to.deep.equal({ filename: 'file3.txt', headers: { 'content-disposition': 'form-data; name="files"; filename="file3.txt"', 'content-type': 'text/plain' } }); Wreck.read(request.payload.files[1], null, function (err, payload2) { Wreck.read(request.payload.files[0], null, function (err, payload1) { Wreck.read(request.payload.files[2], null, function (err, payload3) { reply([payload1, payload2, payload3].join('-')); }); }); }); } var server = new Hapi.Server(); server.route({ method: 'POST', path: '/echo', config: { handler: handler, payload: { output: 'stream' } } }); server.inject({ method: 'POST', url: '/echo', payload: multipartPayload, headers: { 'content-type': 'multipart/form-data; boundary=AaB03x' } }, function (res) { expect(res.result).to.equal('one-two-three'); done(); }); }); it('parses a file as file', function (done) { var path = Path.join(__dirname, './file/image.jpg'); var stats = Fs.statSync(path); var handler = function (request, reply) { expect(request.headers['content-type']).to.contain('multipart/form-data'); expect(request.payload.my_file.bytes).to.equal(stats.size); var sourceContents = Fs.readFileSync(path); var receivedContents = Fs.readFileSync(request.payload['my_file'].path); Fs.unlinkSync(request.payload['my_file'].path); expect(sourceContents).to.deep.equal(receivedContents); done(); }; var server = new Hapi.Server(0); server.route({ method: 'POST', path: '/file', config: { handler: handler, payload: { output: 'file' } } }); server.start(function () { var form = new FormData(); form.append('my_file', Fs.createReadStream(path)); Wreck.post(server.info.uri + '/file', { payload: form, headers: form.getHeaders() }, function (err, res, payload) { }); }); }); it('parses multiple files as files', function (done) { var path = Path.join(__dirname, './file/image.jpg'); var stats = Fs.statSync(path); var handler = function (request, reply) { expect(request.payload.file1.bytes).to.equal(stats.size); expect(request.payload.file2.bytes).to.equal(stats.size); done(); }; var server = new Hapi.Server(0); server.route({ method: 'POST', path: '/file', config: { handler: handler, payload: { output: 'file' } } }); server.start(function () { var form = new FormData(); form.append('file1', Fs.createReadStream(path)); form.append('file2', Fs.createReadStream(path)); Wreck.post(server.info.uri + '/file', { payload: form, headers: form.getHeaders() }, function (err, res, payload) { }); }); }); it('parses multiple files while waiting for last file to be written', { parallel: false }, function (done) { var path = Path.join(__dirname, './file/image.jpg'); var stats = Fs.statSync(path); var orig = Fs.createWriteStream; Fs.createWriteStream = function () { // Make the first file write happen faster by bypassing the disk Fs.createWriteStream = orig; var stream = new Stream.Writable(); stream._write = function (chunk, encoding, callback) { callback(); }; stream.once('finish', function () { stream.emit('close'); }); return stream; }; var handler = function (request, reply) { expect(request.payload.file1.bytes).to.equal(stats.size); expect(request.payload.file2.bytes).to.equal(stats.size); done(); }; var server = new Hapi.Server(0); server.route({ method: 'POST', path: '/file', config: { handler: handler, payload: { output: 'file' } } }); server.start(function () { var form = new FormData(); form.append('file1', Fs.createReadStream(path)); form.append('file2', Fs.createReadStream(path)); Wreck.post(server.info.uri + '/file', { payload: form, headers: form.getHeaders() }, function (err, res, payload) { }); }); }); it('parses a file as data', function (done) { var path = Path.join(__dirname, '../package.json'); var handler = function (request, reply) { var fileContents = Fs.readFileSync(path); expect(request.payload.my_file.name).to.equal('hapi'); done(); }; var server = new Hapi.Server(0); server.route({ method: 'POST', path: '/file', config: { handler: handler, payload: { output: 'data' } } }); server.start(function () { var form = new FormData(); form.append('my_file', Fs.createReadStream(path)); Wreck.post(server.info.uri + '/file', { payload: form, headers: form.getHeaders() }, function (err, res, payload) { }); }); }); it('returns fields when multipart is set to stream mode', function (done) { var server = new Hapi.Server(); server.route({ method: 'POST', path: '/echo', config: { handler: echo, payload: { output: 'stream' } } }); server.inject({ method: 'POST', url: '/echo', payload: multipartPayload, headers: { 'content-type': 'multipart/form-data; boundary=AaB03x' } }, function (res) { expect(Object.keys(res.result).length).to.equal(3); expect(res.result.field1).to.exist; expect(res.result.field1.length).to.equal(2); expect(res.result.field1[1]).to.equal('Repeated name segment'); expect(res.result.pics).to.exist; done(); }); }); it('parses a file correctly on stream mode', function (done) { var path = Path.join(__dirname, './file/image.jpg'); var stats = Fs.statSync(path); var fileStream = Fs.createReadStream(path); var fileContents = Fs.readFileSync(path); var fileHandler = function (request) { expect(request.headers['content-type']).to.contain('multipart/form-data'); expect(request.payload['my_file'].hapi).to.deep.equal({ filename: 'image.jpg', headers: { 'content-disposition': 'form-data; name="my_file"; filename="image.jpg"', 'content-type': 'image/jpeg' } }); Wreck.read(request.payload['my_file'], null, function (err, buffer) { expect(err).to.not.exist; expect(fileContents.length).to.equal(buffer.length); expect(fileContents.toString('binary') === buffer.toString('binary')).to.equal(true); done(); }); }; var server = new Hapi.Server(0); server.route({ method: 'POST', path: '/file', config: { handler: fileHandler, payload: { output: 'stream' } } }); server.start(function () { var form = new FormData(); form.append('my_file', fileStream); Wreck.post(server.info.uri + '/file', { payload: form, headers: form.getHeaders() }, function (err, res, payload) { }); }); }); it('peeks at parsed multipart data', function (done) { var data = null; var ext = function (request, reply) { var chunks = []; request.on('peek', function (chunk) { chunks.push(chunk); }); request.once('finish', function () { data = Buffer.concat(chunks); }); reply(); }; var handler = function (request, reply) { reply(data); }; var server = new Hapi.Server(); server.ext('onRequest', ext); server.route({ method: 'POST', path: '/', config: { handler: handler } }); server.inject({ method: 'POST', url: '/', payload: multipartPayload, headers: { 'content-type': 'multipart/form-data; boundary=AaB03x' } }, function (res) { expect(res.result).to.equal(multipartPayload); done(); }); }); it('parses field names with arrays', function (done) { var payload = '--AaB03x\r\n' + 'Content-Disposition: form-data; name="a[b]"\r\n' + '\r\n' + '3\r\n' + '--AaB03x\r\n' + 'Content-Disposition: form-data; name="a[c]"\r\n' + '\r\n' + '4\r\n' + '--AaB03x--\r\n'; var handler = function (request, reply) { reply(request.payload.a.b + request.payload.a.c); }; var server = new Hapi.Server(); server.route({ method: 'POST', path: '/', handler: handler }); server.inject({ method: 'POST', url: '/', payload: payload, headers: { 'content-Type': 'multipart/form-data; boundary=AaB03x' } }, function (res) { expect(res.result).to.equal('34'); done(); }); }); it('parses field names with arrays and file', function (done) { var payload = '----WebKitFormBoundaryE19zNvXGzXaLvS5C\r\n' + 'Content-Disposition: form-data; name="a[b]"\r\n' + '\r\n' + '3\r\n' + '----WebKitFormBoundaryE19zNvXGzXaLvS5C\r\n' + 'Content-Disposition: form-data; name="a[c]"\r\n' + '\r\n' + '4\r\n' + '----WebKitFormBoundaryE19zNvXGzXaLvS5C\r\n' + 'Content-Disposition: form-data; name="file"; filename="test.txt"\r\n' + 'Content-Type: plain/text\r\n' + '\r\n' + 'and\r\n' + '----WebKitFormBoundaryE19zNvXGzXaLvS5C--\r\n'; var handler = function (request, reply) { reply(request.payload.a.b + request.payload.file + request.payload.a.c); }; var server = new Hapi.Server(); server.route({ method: 'POST', path: '/', handler: handler }); server.inject({ method: 'POST', url: '/', payload: payload, headers: { 'content-Type': 'multipart/form-data; boundary=--WebKitFormBoundaryE19zNvXGzXaLvS5C' } }, function (res) { expect(res.result).to.equal('3and4'); done(); }); }); }); describe('timeout', function () { it('returns client error message when client request taking too long', function (done) { var server = new Hapi.Server(0, { timeout: { client: 50 } }); server.route({ method: 'POST', path: '/fast', config: { handler: function (request, reply) { reply('fast'); } } }); server.start(function () { var timer = new Hoek.Bench(); var options = { hostname: '127.0.0.1', port: server.info.port, path: '/fast', method: 'POST' }; var req = Http.request(options, function (res) { expect(res.statusCode).to.equal(408); expect(timer.elapsed()).to.be.at.least(45); done(); }); req.on('error', function (err) { }); // Will error out, so don't allow error to escape test req.write('{}\n'); var now = Date.now(); setTimeout(function () { req.end(); }, 100); }); }); it('returns client error message when client request taking too long (route override', function (done) { var server = new Hapi.Server(0, { timeout: { client: false } }); server.route({ method: 'POST', path: '/fast', config: { payload: { timeout: 50 }, handler: function (request, reply) { reply('fast'); } } }); server.start(function () { var timer = new Hoek.Bench(); var options = { hostname: '127.0.0.1', port: server.info.port, path: '/fast', method: 'POST' }; var req = Http.request(options, function (res) { expect(res.statusCode).to.equal(408); expect(timer.elapsed()).to.be.at.least(45); done(); }); req.on('error', function (err) { }); // Will error out, so don't allow error to escape test req.write('{}\n'); var now = Date.now(); setTimeout(function () { req.end(); }, 100); }); }); it('does not return a client error message when client request is fast', function (done) { var server = new Hapi.Server(0, { timeout: { client: 50 } }); server.route({ method: 'POST', path: '/fast', config: { handler: function (request, reply) { reply('fast'); } } }); server.start(function () { var options = { hostname: '127.0.0.1', port: server.info.port, path: '/fast', method: 'POST' }; var req = Http.request(options, function (res) { expect(res.statusCode).to.equal(200); done(); }); req.end(); }); }); it('does not return a client error message when response is taking a long time to send', function (done) { var streamHandler = function (request, reply) { var TestStream = function () { Stream.Readable.call(this); }; Hoek.inherits(TestStream, Stream.Readable); TestStream.prototype._read = function (size) { var self = this; if (this.isDone) { return; } this.isDone = true; setTimeout(function () { self.push('Hello'); }, 60); setTimeout(function () { self.push(null); }, 70); }; reply(new TestStream()); }; var server = new Hapi.Server(0, { timeout: { client: 50 } }); server.route({ method: 'GET', path: '/', config: { handler: streamHandler } }); server.start(function () { var timer = new Hoek.Bench(); var options = { hostname: '127.0.0.1', port: server.info.port, path: '/', method: 'GET' }; var req = Http.request(options, function (res) { expect(timer.elapsed()).to.be.at.least(50); expect(res.statusCode).to.equal(200); done(); }); req.once('error', function (err) { done(); }); req.end(); }); }); it('does not return an error with timeout disabled', function (done) { var server = new Hapi.Server(0, { timeout: { client: false } }); server.route({ method: 'POST', path: '/', config: { handler: function (request, reply) { reply('fast'); } } }); server.start(function () { var timer = new Hoek.Bench(); var options = { hostname: '127.0.0.1', port: server.info.port, path: '/', method: 'POST' }; var req = Http.request(options, function (res) { expect(res.statusCode).to.equal(200); expect(timer.elapsed()).to.be.at.least(90); done(); }); setTimeout(function () { req.end(); }, 100); }); }); }); });
test/index.js
// Load modules var Fs = require('fs'); var Http = require('http'); var Path = require('path'); var Stream = require('stream'); var Zlib = require('zlib'); var FormData = require('form-data'); var Hoek = require('hoek'); var Lab = require('lab'); var Subtext = require('..'); var Wreck = require('wreck'); // Declare internals var internals = {}; // Test shortcuts var lab = exports.lab = Lab.script(); var describe = lab.describe; var it = lab.it; var expect = Lab.expect; describe('parse()', function () { it('returns a raw body', function (done) { var payload = '{"x":"1","y":"2","z":"3"}'; var request = Wreck.toReadableStream(payload); request.headers = {}; Subtext.parse(request, null, { parse: false, output: 'data' }, function (err, parsed) { expect(err).to.not.exist; expect(parsed.mime).to.equal('application/json'); expect(Buffer.isBuffer(parsed.payload)).to.be.true; expect(parsed.payload.toString()).to.equal(payload); done(); }); }); it('returns a parsed body', function (done) { var payload = '{"x":"1","y":"2","z":"3"}'; var request = Wreck.toReadableStream(payload); request.headers = {}; Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { expect(err).to.not.exist; expect(parsed.mime).to.equal('application/json'); expect(parsed.payload).to.deep.equal(JSON.parse(payload)); done(); }); }); it('returns a parsed body (json-derived media type)', function (done) { var payload = '{"x":"1","y":"2","z":"3"}'; var request = Wreck.toReadableStream(payload); request.headers = { 'content-type': 'application/json-patch+json' }; Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { expect(err).to.not.exist; expect(parsed.mime).to.equal('application/json-patch+json'); expect(parsed.payload).to.deep.equal(JSON.parse(payload)); done(); }); }); it('errors on invalid content type header', function (done) { var payload = '{"x":"1","y":"2","z":"3"}'; var request = Wreck.toReadableStream(payload); request.headers = { 'content-type': 'steve' }; Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { expect(err).to.exist; expect(err.message).to.equal('Invalid content-type header'); done(); }); }); it('errors on unsupported content type', function (done) { var payload = '{"x":"1","y":"2","z":"3"}'; var request = Wreck.toReadableStream(payload); request.headers = { 'content-type': 'james/bond' }; Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { expect(err).to.exist; expect(err.message).to.equal('Unsupported Media Type'); done(); }); }); it('errors when content-length header greater than maxBytes', function (done) { var payload = '{"x":"1","y":"2","z":"3"}'; var request = Wreck.toReadableStream(payload); request.headers = { 'content-length': '50' }; Subtext.parse(request, null, { parse: false, output: 'data', maxBytes: 10 }, function (err, parsed) { expect(err).to.exist; expect(err.message).to.equal('Payload content length greater than maximum allowed: 10'); done(); }); }); it('peeks at the unparsed stream of a parsed body', function (done) { var payload = '{"x":"1","y":"2","z":"3"}'; var request = Wreck.toReadableStream(payload); request.headers = {}; var raw = ''; var tap = new Stream.Transform(); tap._transform = function (chunk, encoding, callback) { raw += chunk.toString(); this.push(chunk, encoding); callback(); }; Subtext.parse(request, tap, { parse: true, output: 'data' }, function (err, parsed) { expect(err).to.not.exist; expect(parsed.payload).to.deep.equal(JSON.parse(payload)); expect(raw).to.equal(payload); done(); }); }); it('saves file', function (done) { var request = Wreck.toReadableStream('payload'); request.headers = {}; Subtext.parse(request, null, { parse: false, output: 'file' }, function (err, parsed) { expect(err).to.not.exist; var receivedContents = Fs.readFileSync(parsed.payload.path); Fs.unlinkSync(parsed.payload.path); expect(receivedContents.toString()).to.equal('payload'); done(); }); }); it('saves a file after content decoding', function (done) { var path = Path.join(__dirname, './file/image.jpg'); var sourceContents = Fs.readFileSync(path); var stats = Fs.statSync(path); Zlib.gzip(sourceContents, function (err, compressed) { var request = Wreck.toReadableStream(compressed); request.headers = { 'content-encoding': 'gzip' }; Subtext.parse(request, null, { parse: true, output: 'file' }, function (err, parsed) { expect(err).to.not.exist; var receivedContents = Fs.readFileSync(parsed.payload.path); Fs.unlinkSync(parsed.payload.path); expect(receivedContents).to.deep.equal(sourceContents); expect(parsed.payload.bytes).to.equal(stats.size); done(); }); }); }); it('saves a file ignoring content decoding when parse is false', function (done) { var path = Path.join(__dirname, './file/image.jpg'); var sourceContents = Fs.readFileSync(path); Zlib.gzip(sourceContents, function (err, compressed) { var request = Wreck.toReadableStream(compressed); request.headers = { 'content-encoding': 'gzip' }; Subtext.parse(request, null, { parse: false, output: 'file' }, function (err, parsed) { expect(err).to.not.exist; var receivedContents = Fs.readFileSync(parsed.payload.path); Fs.unlinkSync(parsed.payload.path); expect(receivedContents).to.deep.equal(compressed); done(); }); }); }); it('errors on invalid upload directory (parse false)', function (done) { var request = Wreck.toReadableStream('payload'); request.headers = {}; Subtext.parse(request, null, { parse: false, output: 'file', uploads: '/a/b/c/no/such/folder' }, function (err, parsed) { expect(err).to.exist; expect(err.message).to.contain('ENOENT'); done(); }); }); it('errors on invalid upload directory (parse true)', function (done) { var request = Wreck.toReadableStream('payload'); request.headers = {}; Subtext.parse(request, null, { parse: true, output: 'file', uploads: '/a/b/c/no/such/folder' }, function (err, parsed) { expect(err).to.exist; expect(err.message).to.contain('ENOENT'); done(); }); }); it('processes application/octet-stream', function (done) { var payload = '{"x":"1","y":"2","z":"3"}'; var request = Wreck.toReadableStream(payload); request.headers = { 'content-type': 'application/octet-stream' }; Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { expect(err).to.not.exist; expect(parsed.mime).to.equal('application/octet-stream'); expect(Buffer.isBuffer(parsed.payload)).to.be.true; expect(parsed.payload.toString()).to.equal(payload); done(); }); }); it('overrides content-type', function (done) { var payload = '{"x":"1","y":"2","z":"3"}'; var request = Wreck.toReadableStream(payload); request.headers = { 'content-type': 'text/plain' }; Subtext.parse(request, null, { parse: true, output: 'data', override: 'application/json' }, function (err, parsed) { expect(err).to.not.exist; expect(parsed.mime).to.equal('application/json'); expect(parsed.payload).to.deep.equal(JSON.parse(payload)); done(); }); }); it('returns a parsed text payload', function (done) { var payload = '{"x":"1","y":"2","z":"3"}'; var request = Wreck.toReadableStream(payload); request.headers = { 'content-type': 'text/plain' }; Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { expect(err).to.not.exist; expect(parsed.mime).to.equal('text/plain'); expect(parsed.payload).to.deep.equal(payload); done(); }); }); describe('parse mode', function () { it('returns 200 on text mime type when allowed', function (done) { var textHandler = function (request, reply) { reply(request.payload + '+456'); }; var server = new Hapi.Server(); server.route({ method: 'POST', path: '/textOnly', config: { handler: textHandler, payload: { allow: 'text/plain' } } }); server.inject({ method: 'POST', url: '/textOnly', payload: 'testing123', headers: { 'content-type': 'text/plain' } }, function (res) { expect(res.statusCode).to.equal(200); expect(res.result).to.equal('testing123+456'); done(); }); }); it('returns 415 on non text mime type when disallowed', function (done) { var textHandler = function (request, reply) { reply(request.payload + '+456'); }; var server = new Hapi.Server(); server.route({ method: 'POST', path: '/textOnly', config: { handler: textHandler, payload: { allow: 'text/plain' } } }); server.inject({ method: 'POST', url: '/textOnly', payload: 'testing123', headers: { 'content-type': 'application/octet-stream' } }, function (res) { expect(res.statusCode).to.equal(415); done(); }); }); it('returns 200 on text mime type when allowed (array)', function (done) { var textHandler = function (request, reply) { reply(request.payload + '+456'); }; var server = new Hapi.Server(); server.route({ method: 'POST', path: '/textOnlyArray', config: { handler: textHandler, payload: { allow: ['text/plain'] } } }); server.inject({ method: 'POST', url: '/textOnlyArray', payload: 'testing123', headers: { 'content-type': 'text/plain' } }, function (res) { expect(res.statusCode).to.equal(200); expect(res.result).to.equal('testing123+456'); done(); }); }); it('returns 415 on non text mime type when disallowed (array)', function (done) { var textHandler = function (request, reply) { reply(request.payload + '+456'); }; var server = new Hapi.Server(); server.route({ method: 'POST', path: '/textOnlyArray', config: { handler: textHandler, payload: { allow: ['text/plain'] } } }); server.inject({ method: 'POST', url: '/textOnlyArray', payload: 'testing123', headers: { 'content-type': 'application/octet-stream' } }, function (res) { expect(res.statusCode).to.equal(415); done(); }); }); it('parses application/x-www-form-urlencoded', function (done) { var server = new Hapi.Server(); server.route({ method: 'POST', path: '/', handler: function (request, reply) { reply('got ' + request.payload.x); } }); server.inject({ method: 'POST', url: '/', payload: 'x=abc', headers: { 'content-type': 'application/x-www-form-urlencoded' } }, function (res) { expect(res.statusCode).to.equal(200); expect(res.result).to.equal('got abc'); done(); }); }); it('parses application/x-www-form-urlencoded with arrays', function (done) { var server = new Hapi.Server(); server.route({ method: 'POST', path: '/', handler: function (request, reply) { reply(request.payload.x.y + request.payload.x.z); } }); server.inject({ method: 'POST', url: '/', payload: 'x[y]=1&x[z]=2', headers: { 'content-type': 'application/x-www-form-urlencoded' } }, function (res) { expect(res.statusCode).to.equal(200); expect(res.result).to.equal('12'); done(); }); }); }); describe('unzip', function () { it('errors on malformed payload', function (done) { var payload = '7d8d78347h8347d58w347hd58w374d58w37h5d8w37hd4'; var handler = function () { throw new Error('never called'); }; var server = new Hapi.Server(); server.route({ method: 'POST', path: '/', config: { handler: handler } }); server.inject({ method: 'POST', url: '/', payload: payload, headers: { 'content-encoding': 'gzip' } }, function (res) { expect(res.result).to.exist; expect(res.result.statusCode).to.equal(400); done(); }); }); it('errors on malformed payload (gunzip only)', function (done) { var payload = '7d8d78347h8347d58w347hd58w374d58w37h5d8w37hd4'; var handler = function () { throw new Error('never called'); }; var server = new Hapi.Server(); server.route({ method: 'POST', path: '/', config: { handler: handler, payload: { parse: 'gunzip' } } }); server.inject({ method: 'POST', url: '/', payload: payload, headers: { 'content-encoding': 'gzip' } }, function (res) { expect(res.result).to.exist; expect(res.result.statusCode).to.equal(400); done(); }); }); it('does not return an error when the payload has the correct gzip header and gzipped payload', function (done) { var payload = '{"hi":"hello"}'; Zlib.gzip(payload, function (err, result) { var handler = function (request, reply) { reply('Success'); }; var server = new Hapi.Server(); server.route({ method: 'POST', path: '/', config: { handler: handler } }); server.inject({ method: 'POST', url: '/', payload: result, headers: { 'content-encoding': 'gzip' } }, function (res) { expect(res.statusCode).to.equal(200); done(); }); }); }); it('does not return an error when the payload has the correct deflate header and deflated payload', function (done) { var payload = '{"hi":"hello"}'; Zlib.deflate(payload, function (err, result) { var handler = function (request, reply) { reply('Success'); }; var server = new Hapi.Server(); server.route({ method: 'POST', path: '/', config: { handler: handler } }); server.inject({ method: 'POST', url: '/', payload: result, headers: { 'content-encoding': 'deflate' } }, function (res) { expect(res.statusCode).to.equal(200); done(); }); }); }); it('does not return an error when the payload has the correct gzip header and gzipped payload (gunzip only)', function (done) { var payload = '{"hi":"hello"}'; Zlib.gzip(payload, function (err, result) { var handler = function (request, reply) { reply('Success'); }; var server = new Hapi.Server(); server.route({ method: 'POST', path: '/', config: { handler: handler, payload: { parse: 'gunzip' } } }); server.inject({ method: 'POST', url: '/', payload: result, headers: { 'content-encoding': 'gzip' } }, function (res) { expect(res.statusCode).to.equal(200); done(); }); }); }); it('does not return an error when the payload has the correct deflate header and deflated payload (gunzip only)', function (done) { var payload = '{"hi":"hello"}'; Zlib.deflate(payload, function (err, result) { var handler = function (request, reply) { reply('Success'); }; var server = new Hapi.Server(); server.route({ method: 'POST', path: '/', config: { handler: handler, payload: { parse: 'gunzip' } } }); server.inject({ method: 'POST', url: '/', payload: result, headers: { 'content-encoding': 'deflate' } }, function (res) { expect(res.statusCode).to.equal(200); done(); }); }); }); }); describe('multi-part', function () { var multipartPayload = '--AaB03x\r\n' + 'content-disposition: form-data; name="x"\r\n' + '\r\n' + 'First\r\n' + '--AaB03x\r\n' + 'content-disposition: form-data; name="x"\r\n' + '\r\n' + 'Second\r\n' + '--AaB03x\r\n' + 'content-disposition: form-data; name="x"\r\n' + '\r\n' + 'Third\r\n' + '--AaB03x\r\n' + 'content-disposition: form-data; name="field1"\r\n' + '\r\n' + 'Joe Blow\r\nalmost tricked you!\r\n' + '--AaB03x\r\n' + 'content-disposition: form-data; name="field1"\r\n' + '\r\n' + 'Repeated name segment\r\n' + '--AaB03x\r\n' + 'content-disposition: form-data; name="pics"; filename="file1.txt"\r\n' + 'Content-Type: text/plain\r\n' + '\r\n' + '... contents of file1.txt ...\r\r\n' + '--AaB03x--\r\n'; var echo = function (request, reply) { var result = {}; var keys = Object.keys(request.payload); for (var i = 0, il = keys.length; i < il; ++i) { var key = keys[i]; var value = request.payload[key]; result[key] = value._readableState ? true : value; } reply(result); }; it('errors on missing boundary in content-type header', function (done) { var invalidHandler = function (request) { expect(request).to.not.exist; // Must not be called }; var server = new Hapi.Server(); server.route({ method: 'POST', path: '/invalid', config: { handler: invalidHandler } }); server.inject({ method: 'POST', url: '/invalid', payload: multipartPayload, headers: { 'content-type': 'multipart/form-data' } }, function (res) { expect(res.result).to.exist; expect(res.result.statusCode).to.equal(400); done(); }); }); it('errors on empty separator in content-type header', function (done) { var invalidHandler = function (request) { expect(request).to.not.exist; // Must not be called }; var server = new Hapi.Server(); server.route({ method: 'POST', path: '/invalid', config: { handler: invalidHandler } }); server.inject({ method: 'POST', url: '/invalid', payload: multipartPayload, headers: { 'content-type': 'multipart/form-data; boundary=' } }, function (res) { expect(res.result).to.exist; expect(res.result.statusCode).to.equal(400); done(); }); }); it('returns parsed multipart data', function (done) { var server = new Hapi.Server(); server.route({ method: 'POST', path: '/echo', config: { handler: echo } }); server.inject({ method: 'POST', url: '/echo', payload: multipartPayload, headers: { 'content-type': 'multipart/form-data; boundary=AaB03x' } }, function (res) { expect(Object.keys(res.result).length).to.equal(3); expect(res.result.field1).to.exist; expect(res.result.field1.length).to.equal(2); expect(res.result.field1[1]).to.equal('Repeated name segment'); expect(res.result.pics).to.exist; done(); }); }); it('parses file without content-type', function (done) { var multipartPayload = '--AaB03x\r\n' + 'content-disposition: form-data; name="pics"; filename="file1.txt"\r\n' + '\r\n' + '... contents of file1.txt ...\r\r\n' + '--AaB03x--\r\n'; var server = new Hapi.Server(); server.route({ method: 'POST', path: '/echo', config: { handler: function (request, reply) { reply(request.payload.pics); } } }); server.inject({ method: 'POST', url: '/echo', payload: multipartPayload, headers: { 'content-type': 'multipart/form-data; boundary=AaB03x' } }, function (res) { expect(res.result.toString()).to.equal('... contents of file1.txt ...\r'); done(); }); }); it('parses empty file', function (done) { var multipartPayload = '--AaB03x\r\n' + 'content-disposition: form-data; name="pics"; filename="file1.txt"\r\n' + 'Content-Type: text/plain\r\n' + '\r\n' + '\r\n' + '--AaB03x--\r\n'; var server = new Hapi.Server(); server.route({ method: 'POST', path: '/echo', config: { handler: function (request, reply) { reply(request.payload); } } }); server.inject({ method: 'POST', url: '/echo', payload: multipartPayload, headers: { 'content-type': 'multipart/form-data; boundary=AaB03x' } }, function (res) { expect(res.result).to.deep.equal({ pics: {} }); done(); }); }); it('errors on missing upload folder', function (done) { var multipartPayload = '--AaB03x\r\n' + 'content-disposition: form-data; name="pics"; filename="file1.txt"\r\n' + 'Content-Type: text/plain\r\n' + '\r\n' + 'something to fail with\r\n' + '--AaB03x--\r\n'; var server = new Hapi.Server({ payload: { uploads: '/a/b/c/d/e/f/g/not' } }); server.route({ method: 'POST', path: '/echo', config: { handler: function (request, reply) { reply(request.payload); }, payload: { output: 'file' } } }); server.inject({ method: 'POST', url: '/echo', payload: multipartPayload, headers: { 'content-type': 'multipart/form-data; boundary=AaB03x' } }, function (res) { expect(res.statusCode).to.equal(500); done(); }); }); it('errors while processing a parsed data stream in multiple form', function (done) { var payload = '--AaB03x\r\n' + 'content-disposition: form-data; name="pics"; filename="file1.txt"\r\n' + 'Content-Type: text/plain\r\n' + '\r\n'; var server = new Hapi.Server(0); server.route({ method: 'POST', path: '/', handler: function () { } }); server.ext('onPreResponse', function (request, reply) { expect(request.response.isBoom).to.equal(true); expect(request.response.output.statusCode).to.equal(400); expect(request.response.message).to.equal('Invalid multipart payload format'); done(); }); server.start(function () { var options = { hostname: '127.0.0.1', port: server.info.port, path: '/', method: 'POST', headers: { 'content-type': 'multipart/form-data; boundary=AaB03x' } }; var req = Http.request(options, function (res) { }); req.write(payload); setTimeout(function () { req.destroy(); }, 100); req.on('error', function () { }); }); }); it('parses multiple files as streams', function (done) { var multipartPayload = '--AaB03x\r\n' + 'content-disposition: form-data; name="files"; filename="file1.txt"\r\n' + 'Content-Type: text/plain\r\n' + '\r\n' + 'one\r\n' + '--AaB03x\r\n' + 'content-disposition: form-data; name="files"; filename="file2.txt"\r\n' + 'Content-Type: text/plain\r\n' + '\r\n' + 'two\r\n' + '--AaB03x\r\n' + 'content-disposition: form-data; name="files"; filename="file3.txt"\r\n' + 'Content-Type: text/plain\r\n' + '\r\n' + 'three\r\n' + '--AaB03x--\r\n'; var handler = function (request, reply) { expect(request.payload.files[0].hapi).to.deep.equal({ filename: 'file1.txt', headers: { 'content-disposition': 'form-data; name="files"; filename="file1.txt"', 'content-type': 'text/plain' } }); expect(request.payload.files[1].hapi).to.deep.equal({ filename: 'file2.txt', headers: { 'content-disposition': 'form-data; name="files"; filename="file2.txt"', 'content-type': 'text/plain' } }); expect(request.payload.files[2].hapi).to.deep.equal({ filename: 'file3.txt', headers: { 'content-disposition': 'form-data; name="files"; filename="file3.txt"', 'content-type': 'text/plain' } }); Wreck.read(request.payload.files[1], null, function (err, payload2) { Wreck.read(request.payload.files[0], null, function (err, payload1) { Wreck.read(request.payload.files[2], null, function (err, payload3) { reply([payload1, payload2, payload3].join('-')); }); }); }); } var server = new Hapi.Server(); server.route({ method: 'POST', path: '/echo', config: { handler: handler, payload: { output: 'stream' } } }); server.inject({ method: 'POST', url: '/echo', payload: multipartPayload, headers: { 'content-type': 'multipart/form-data; boundary=AaB03x' } }, function (res) { expect(res.result).to.equal('one-two-three'); done(); }); }); it('parses a file as file', function (done) { var path = Path.join(__dirname, './file/image.jpg'); var stats = Fs.statSync(path); var handler = function (request, reply) { expect(request.headers['content-type']).to.contain('multipart/form-data'); expect(request.payload.my_file.bytes).to.equal(stats.size); var sourceContents = Fs.readFileSync(path); var receivedContents = Fs.readFileSync(request.payload['my_file'].path); Fs.unlinkSync(request.payload['my_file'].path); expect(sourceContents).to.deep.equal(receivedContents); done(); }; var server = new Hapi.Server(0); server.route({ method: 'POST', path: '/file', config: { handler: handler, payload: { output: 'file' } } }); server.start(function () { var form = new FormData(); form.append('my_file', Fs.createReadStream(path)); Wreck.post(server.info.uri + '/file', { payload: form, headers: form.getHeaders() }, function (err, res, payload) { }); }); }); it('parses multiple files as files', function (done) { var path = Path.join(__dirname, './file/image.jpg'); var stats = Fs.statSync(path); var handler = function (request, reply) { expect(request.payload.file1.bytes).to.equal(stats.size); expect(request.payload.file2.bytes).to.equal(stats.size); done(); }; var server = new Hapi.Server(0); server.route({ method: 'POST', path: '/file', config: { handler: handler, payload: { output: 'file' } } }); server.start(function () { var form = new FormData(); form.append('file1', Fs.createReadStream(path)); form.append('file2', Fs.createReadStream(path)); Wreck.post(server.info.uri + '/file', { payload: form, headers: form.getHeaders() }, function (err, res, payload) { }); }); }); it('parses multiple files while waiting for last file to be written', { parallel: false }, function (done) { var path = Path.join(__dirname, './file/image.jpg'); var stats = Fs.statSync(path); var orig = Fs.createWriteStream; Fs.createWriteStream = function () { // Make the first file write happen faster by bypassing the disk Fs.createWriteStream = orig; var stream = new Stream.Writable(); stream._write = function (chunk, encoding, callback) { callback(); }; stream.once('finish', function () { stream.emit('close'); }); return stream; }; var handler = function (request, reply) { expect(request.payload.file1.bytes).to.equal(stats.size); expect(request.payload.file2.bytes).to.equal(stats.size); done(); }; var server = new Hapi.Server(0); server.route({ method: 'POST', path: '/file', config: { handler: handler, payload: { output: 'file' } } }); server.start(function () { var form = new FormData(); form.append('file1', Fs.createReadStream(path)); form.append('file2', Fs.createReadStream(path)); Wreck.post(server.info.uri + '/file', { payload: form, headers: form.getHeaders() }, function (err, res, payload) { }); }); }); it('parses a file as data', function (done) { var path = Path.join(__dirname, '../package.json'); var handler = function (request, reply) { var fileContents = Fs.readFileSync(path); expect(request.payload.my_file.name).to.equal('hapi'); done(); }; var server = new Hapi.Server(0); server.route({ method: 'POST', path: '/file', config: { handler: handler, payload: { output: 'data' } } }); server.start(function () { var form = new FormData(); form.append('my_file', Fs.createReadStream(path)); Wreck.post(server.info.uri + '/file', { payload: form, headers: form.getHeaders() }, function (err, res, payload) { }); }); }); it('returns fields when multipart is set to stream mode', function (done) { var server = new Hapi.Server(); server.route({ method: 'POST', path: '/echo', config: { handler: echo, payload: { output: 'stream' } } }); server.inject({ method: 'POST', url: '/echo', payload: multipartPayload, headers: { 'content-type': 'multipart/form-data; boundary=AaB03x' } }, function (res) { expect(Object.keys(res.result).length).to.equal(3); expect(res.result.field1).to.exist; expect(res.result.field1.length).to.equal(2); expect(res.result.field1[1]).to.equal('Repeated name segment'); expect(res.result.pics).to.exist; done(); }); }); it('parses a file correctly on stream mode', function (done) { var path = Path.join(__dirname, './file/image.jpg'); var stats = Fs.statSync(path); var fileStream = Fs.createReadStream(path); var fileContents = Fs.readFileSync(path); var fileHandler = function (request) { expect(request.headers['content-type']).to.contain('multipart/form-data'); expect(request.payload['my_file'].hapi).to.deep.equal({ filename: 'image.jpg', headers: { 'content-disposition': 'form-data; name="my_file"; filename="image.jpg"', 'content-type': 'image/jpeg' } }); Wreck.read(request.payload['my_file'], null, function (err, buffer) { expect(err).to.not.exist; expect(fileContents.length).to.equal(buffer.length); expect(fileContents.toString('binary') === buffer.toString('binary')).to.equal(true); done(); }); }; var server = new Hapi.Server(0); server.route({ method: 'POST', path: '/file', config: { handler: fileHandler, payload: { output: 'stream' } } }); server.start(function () { var form = new FormData(); form.append('my_file', fileStream); Wreck.post(server.info.uri + '/file', { payload: form, headers: form.getHeaders() }, function (err, res, payload) { }); }); }); it('peeks at parsed multipart data', function (done) { var data = null; var ext = function (request, reply) { var chunks = []; request.on('peek', function (chunk) { chunks.push(chunk); }); request.once('finish', function () { data = Buffer.concat(chunks); }); reply(); }; var handler = function (request, reply) { reply(data); }; var server = new Hapi.Server(); server.ext('onRequest', ext); server.route({ method: 'POST', path: '/', config: { handler: handler } }); server.inject({ method: 'POST', url: '/', payload: multipartPayload, headers: { 'content-type': 'multipart/form-data; boundary=AaB03x' } }, function (res) { expect(res.result).to.equal(multipartPayload); done(); }); }); it('parses field names with arrays', function (done) { var payload = '--AaB03x\r\n' + 'Content-Disposition: form-data; name="a[b]"\r\n' + '\r\n' + '3\r\n' + '--AaB03x\r\n' + 'Content-Disposition: form-data; name="a[c]"\r\n' + '\r\n' + '4\r\n' + '--AaB03x--\r\n'; var handler = function (request, reply) { reply(request.payload.a.b + request.payload.a.c); }; var server = new Hapi.Server(); server.route({ method: 'POST', path: '/', handler: handler }); server.inject({ method: 'POST', url: '/', payload: payload, headers: { 'content-Type': 'multipart/form-data; boundary=AaB03x' } }, function (res) { expect(res.result).to.equal('34'); done(); }); }); it('parses field names with arrays and file', function (done) { var payload = '----WebKitFormBoundaryE19zNvXGzXaLvS5C\r\n' + 'Content-Disposition: form-data; name="a[b]"\r\n' + '\r\n' + '3\r\n' + '----WebKitFormBoundaryE19zNvXGzXaLvS5C\r\n' + 'Content-Disposition: form-data; name="a[c]"\r\n' + '\r\n' + '4\r\n' + '----WebKitFormBoundaryE19zNvXGzXaLvS5C\r\n' + 'Content-Disposition: form-data; name="file"; filename="test.txt"\r\n' + 'Content-Type: plain/text\r\n' + '\r\n' + 'and\r\n' + '----WebKitFormBoundaryE19zNvXGzXaLvS5C--\r\n'; var handler = function (request, reply) { reply(request.payload.a.b + request.payload.file + request.payload.a.c); }; var server = new Hapi.Server(); server.route({ method: 'POST', path: '/', handler: handler }); server.inject({ method: 'POST', url: '/', payload: payload, headers: { 'content-Type': 'multipart/form-data; boundary=--WebKitFormBoundaryE19zNvXGzXaLvS5C' } }, function (res) { expect(res.result).to.equal('3and4'); done(); }); }); }); describe('timeout', function () { it('returns client error message when client request taking too long', function (done) { var server = new Hapi.Server(0, { timeout: { client: 50 } }); server.route({ method: 'POST', path: '/fast', config: { handler: function (request, reply) { reply('fast'); } } }); server.start(function () { var timer = new Hoek.Bench(); var options = { hostname: '127.0.0.1', port: server.info.port, path: '/fast', method: 'POST' }; var req = Http.request(options, function (res) { expect(res.statusCode).to.equal(408); expect(timer.elapsed()).to.be.at.least(45); done(); }); req.on('error', function (err) { }); // Will error out, so don't allow error to escape test req.write('{}\n'); var now = Date.now(); setTimeout(function () { req.end(); }, 100); }); }); it('returns client error message when client request taking too long (route override', function (done) { var server = new Hapi.Server(0, { timeout: { client: false } }); server.route({ method: 'POST', path: '/fast', config: { payload: { timeout: 50 }, handler: function (request, reply) { reply('fast'); } } }); server.start(function () { var timer = new Hoek.Bench(); var options = { hostname: '127.0.0.1', port: server.info.port, path: '/fast', method: 'POST' }; var req = Http.request(options, function (res) { expect(res.statusCode).to.equal(408); expect(timer.elapsed()).to.be.at.least(45); done(); }); req.on('error', function (err) { }); // Will error out, so don't allow error to escape test req.write('{}\n'); var now = Date.now(); setTimeout(function () { req.end(); }, 100); }); }); it('does not return a client error message when client request is fast', function (done) { var server = new Hapi.Server(0, { timeout: { client: 50 } }); server.route({ method: 'POST', path: '/fast', config: { handler: function (request, reply) { reply('fast'); } } }); server.start(function () { var options = { hostname: '127.0.0.1', port: server.info.port, path: '/fast', method: 'POST' }; var req = Http.request(options, function (res) { expect(res.statusCode).to.equal(200); done(); }); req.end(); }); }); it('does not return a client error message when response is taking a long time to send', function (done) { var streamHandler = function (request, reply) { var TestStream = function () { Stream.Readable.call(this); }; Hoek.inherits(TestStream, Stream.Readable); TestStream.prototype._read = function (size) { var self = this; if (this.isDone) { return; } this.isDone = true; setTimeout(function () { self.push('Hello'); }, 60); setTimeout(function () { self.push(null); }, 70); }; reply(new TestStream()); }; var server = new Hapi.Server(0, { timeout: { client: 50 } }); server.route({ method: 'GET', path: '/', config: { handler: streamHandler } }); server.start(function () { var timer = new Hoek.Bench(); var options = { hostname: '127.0.0.1', port: server.info.port, path: '/', method: 'GET' }; var req = Http.request(options, function (res) { expect(timer.elapsed()).to.be.at.least(50); expect(res.statusCode).to.equal(200); done(); }); req.once('error', function (err) { done(); }); req.end(); }); }); it('does not return an error with timeout disabled', function (done) { var server = new Hapi.Server(0, { timeout: { client: false } }); server.route({ method: 'POST', path: '/', config: { handler: function (request, reply) { reply('fast'); } } }); server.start(function () { var timer = new Hoek.Bench(); var options = { hostname: '127.0.0.1', port: server.info.port, path: '/', method: 'POST' }; var req = Http.request(options, function (res) { expect(res.statusCode).to.equal(200); expect(timer.elapsed()).to.be.at.least(90); done(); }); setTimeout(function () { req.end(); }, 100); }); }); }); });
More tests
test/index.js
More tests
<ide><path>est/index.js <ide> }); <ide> }); <ide> <del> describe('parse mode', function () { <del> <del> it('returns 200 on text mime type when allowed', function (done) { <del> <del> var textHandler = function (request, reply) { <del> <del> reply(request.payload + '+456'); <del> }; <del> <del> var server = new Hapi.Server(); <del> server.route({ method: 'POST', path: '/textOnly', config: { handler: textHandler, payload: { allow: 'text/plain' } } }); <del> <del> server.inject({ method: 'POST', url: '/textOnly', payload: 'testing123', headers: { 'content-type': 'text/plain' } }, function (res) { <del> <del> expect(res.statusCode).to.equal(200); <del> expect(res.result).to.equal('testing123+456'); <del> done(); <del> }); <del> }); <del> <del> it('returns 415 on non text mime type when disallowed', function (done) { <del> <del> var textHandler = function (request, reply) { <del> <del> reply(request.payload + '+456'); <del> }; <del> <del> var server = new Hapi.Server(); <del> server.route({ method: 'POST', path: '/textOnly', config: { handler: textHandler, payload: { allow: 'text/plain' } } }); <del> <del> server.inject({ method: 'POST', url: '/textOnly', payload: 'testing123', headers: { 'content-type': 'application/octet-stream' } }, function (res) { <del> <del> expect(res.statusCode).to.equal(415); <del> done(); <del> }); <del> }); <del> <del> it('returns 200 on text mime type when allowed (array)', function (done) { <del> <del> var textHandler = function (request, reply) { <del> <del> reply(request.payload + '+456'); <del> }; <del> <del> var server = new Hapi.Server(); <del> server.route({ method: 'POST', path: '/textOnlyArray', config: { handler: textHandler, payload: { allow: ['text/plain'] } } }); <del> <del> server.inject({ method: 'POST', url: '/textOnlyArray', payload: 'testing123', headers: { 'content-type': 'text/plain' } }, function (res) { <del> <del> expect(res.statusCode).to.equal(200); <del> expect(res.result).to.equal('testing123+456'); <del> done(); <del> }); <del> }); <del> <del> it('returns 415 on non text mime type when disallowed (array)', function (done) { <del> <del> var textHandler = function (request, reply) { <del> <del> reply(request.payload + '+456'); <del> }; <del> <del> var server = new Hapi.Server(); <del> server.route({ method: 'POST', path: '/textOnlyArray', config: { handler: textHandler, payload: { allow: ['text/plain'] } } }); <del> <del> server.inject({ method: 'POST', url: '/textOnlyArray', payload: 'testing123', headers: { 'content-type': 'application/octet-stream' } }, function (res) { <del> <del> expect(res.statusCode).to.equal(415); <del> done(); <del> }); <del> }); <del> <del> it('parses application/x-www-form-urlencoded', function (done) { <del> <del> var server = new Hapi.Server(); <del> <del> server.route({ <del> method: 'POST', <del> path: '/', <del> handler: function (request, reply) { <del> <del> reply('got ' + request.payload.x); <del> } <del> }); <del> <del> server.inject({ method: 'POST', url: '/', payload: 'x=abc', headers: { 'content-type': 'application/x-www-form-urlencoded' } }, function (res) { <del> <del> expect(res.statusCode).to.equal(200); <del> expect(res.result).to.equal('got abc'); <del> done(); <del> }); <del> }); <del> <del> it('parses application/x-www-form-urlencoded with arrays', function (done) { <del> <del> var server = new Hapi.Server(); <del> <del> server.route({ <del> method: 'POST', <del> path: '/', <del> handler: function (request, reply) { <del> <del> reply(request.payload.x.y + request.payload.x.z); <del> } <del> }); <del> <del> server.inject({ method: 'POST', url: '/', payload: 'x[y]=1&x[z]=2', headers: { 'content-type': 'application/x-www-form-urlencoded' } }, function (res) { <del> <del> expect(res.statusCode).to.equal(200); <del> expect(res.result).to.equal('12'); <add> it('parses an allowed content-type', function (done) { <add> <add> var payload = '{"x":"1","y":"2","z":"3"}'; <add> var request = Wreck.toReadableStream(payload); <add> request.headers = { <add> 'content-type': 'text/plain' <add> }; <add> <add> Subtext.parse(request, null, { parse: true, output: 'data', allow: 'text/plain' }, function (err, parsed) { <add> <add> expect(err).to.not.exist; <add> expect(parsed.mime).to.equal('text/plain'); <add> expect(parsed.payload).to.deep.equal(payload); <add> done(); <add> }); <add> }); <add> <add> it('parses an allowed content-type (array)', function (done) { <add> <add> var payload = '{"x":"1","y":"2","z":"3"}'; <add> var request = Wreck.toReadableStream(payload); <add> request.headers = { <add> 'content-type': 'text/plain' <add> }; <add> <add> Subtext.parse(request, null, { parse: true, output: 'data', allow: ['text/plain'] }, function (err, parsed) { <add> <add> expect(err).to.not.exist; <add> expect(parsed.mime).to.equal('text/plain'); <add> expect(parsed.payload).to.deep.equal(payload); <add> done(); <add> }); <add> }); <add> <add> it('errors on an unallowed content-type', function (done) { <add> <add> var payload = '{"x":"1","y":"2","z":"3"}'; <add> var request = Wreck.toReadableStream(payload); <add> request.headers = { <add> 'content-type': 'text/plain' <add> }; <add> <add> Subtext.parse(request, null, { parse: true, output: 'data', allow: 'application/json' }, function (err, parsed) { <add> <add> expect(err).to.exist; <add> expect(err.message).to.equal('Unsupported Media Type'); <add> done(); <add> }); <add> }); <add> <add> it('errors on an unallowed content-type (array)', function (done) { <add> <add> var payload = '{"x":"1","y":"2","z":"3"}'; <add> var request = Wreck.toReadableStream(payload); <add> request.headers = { <add> 'content-type': 'text/plain' <add> }; <add> <add> Subtext.parse(request, null, { parse: true, output: 'data', allow: ['application/json'] }, function (err, parsed) { <add> <add> expect(err).to.exist; <add> expect(err.message).to.equal('Unsupported Media Type'); <add> done(); <add> }); <add> }); <add> <add> it('parses form encoded payload', function (done) { <add> <add> var payload = 'x=abc'; <add> var request = Wreck.toReadableStream(payload); <add> request.headers = { <add> 'content-type': 'application/x-www-form-urlencoded' <add> }; <add> <add> Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { <add> <add> expect(err).to.not.exist; <add> expect(parsed.mime).to.equal('application/x-www-form-urlencoded'); <add> expect(parsed.payload.x).to.equal('abc'); <add> done(); <add> }); <add> }); <add> <add> it('parses form encoded payload (array keys)', function (done) { <add> <add> var payload = 'x[y]=1&x[z]=2'; <add> var request = Wreck.toReadableStream(payload); <add> request.headers = { <add> 'content-type': 'application/x-www-form-urlencoded' <add> }; <add> <add> Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { <add> <add> expect(err).to.not.exist; <add> expect(parsed.mime).to.equal('application/x-www-form-urlencoded'); <add> expect(parsed.payload).to.deep.equal({ x: { y: '1', z: '2' } }); <add> done(); <add> }); <add> }); <add> <add> it('errors on maformed zipped payload', function (done) { <add> <add> var payload = '7d8d78347h8347d58w347hd58w374d58w37h5d8w37hd4'; <add> var request = Wreck.toReadableStream(payload); <add> request.headers = { <add> 'content-encoding': 'gzip' <add> }; <add> <add> Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { <add> <add> expect(err).to.exist; <add> expect(err.message).to.equal('Invalid compressed payload'); <add> done(); <add> }); <add> }); <add> <add> it('errors on maformed zipped payload (parse gunzip only)', function (done) { <add> <add> var payload = '7d8d78347h8347d58w347hd58w374d58w37h5d8w37hd4'; <add> var request = Wreck.toReadableStream(payload); <add> request.headers = { <add> 'content-encoding': 'gzip' <add> }; <add> <add> Subtext.parse(request, null, { parse: 'gunzip', output: 'data' }, function (err, parsed) { <add> <add> expect(err).to.exist; <add> expect(err.message).to.equal('Invalid compressed payload'); <add> done(); <add> }); <add> }); <add> <add> it('parses a gzipped payload', function (done) { <add> <add> var payload = '{"x":"1","y":"2","z":"3"}'; <add> Zlib.gzip(payload, function (err, compressed) { <add> <add> var request = Wreck.toReadableStream(compressed); <add> request.headers = { <add> 'content-encoding': 'gzip' <add> }; <add> <add> Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { <add> <add> expect(err).to.not.exist; <add> expect(parsed.payload.toString()).to.equal(payload); <ide> done(); <ide> }); <ide> }); <ide> }); <ide> <ide> describe('unzip', function () { <del> <del> it('errors on malformed payload', function (done) { <del> <del> var payload = '7d8d78347h8347d58w347hd58w374d58w37h5d8w37hd4'; <del> <del> var handler = function () { <del> <del> throw new Error('never called'); <del> }; <del> <del> var server = new Hapi.Server(); <del> server.route({ method: 'POST', path: '/', config: { handler: handler } }); <del> <del> server.inject({ method: 'POST', url: '/', payload: payload, headers: { 'content-encoding': 'gzip' } }, function (res) { <del> <del> expect(res.result).to.exist; <del> expect(res.result.statusCode).to.equal(400); <del> done(); <del> }); <del> }); <del> <del> it('errors on malformed payload (gunzip only)', function (done) { <del> <del> var payload = '7d8d78347h8347d58w347hd58w374d58w37h5d8w37hd4'; <del> <del> var handler = function () { <del> <del> throw new Error('never called'); <del> }; <del> <del> var server = new Hapi.Server(); <del> server.route({ method: 'POST', path: '/', config: { handler: handler, payload: { parse: 'gunzip' } } }); <del> <del> server.inject({ method: 'POST', url: '/', payload: payload, headers: { 'content-encoding': 'gzip' } }, function (res) { <del> <del> expect(res.result).to.exist; <del> expect(res.result.statusCode).to.equal(400); <del> done(); <del> }); <del> }); <ide> <ide> it('does not return an error when the payload has the correct gzip header and gzipped payload', function (done) { <ide>
Java
epl-1.0
2598db6abe4b00540491da89f401ea59ccda57b1
0
jtrfp/terminal-recall,jtrfp/terminal-recall,jtrfp/terminal-recall
/******************************************************************************* * This file is part of TERMINAL RECALL * Copyright (c) 2012-2016 Chuck Ritola * Part of the jTRFP.org project * All rights reserved. This program and the accompanying materials * are made available under the terms of the GNU Public License v3.0 * which accompanies this distribution, and is available at * http://www.gnu.org/licenses/gpl.html * * Contributors: * chuck - initial API and implementation ******************************************************************************/ package org.jtrfp.trcl.obj; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeSupport; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.concurrent.Callable; import java.util.concurrent.locks.ReentrantLock; import org.apache.commons.math3.exception.MathArithmeticException; import org.apache.commons.math3.geometry.euclidean.threed.Vector3D; import org.jtrfp.trcl.ObjectDefinitionWindow; import org.jtrfp.trcl.PrimitiveList; import org.jtrfp.trcl.SpacePartitioningGrid; import org.jtrfp.trcl.Submitter; import org.jtrfp.trcl.WeakPropertyChangeSupport; import org.jtrfp.trcl.World; import org.jtrfp.trcl.beh.Behavior; import org.jtrfp.trcl.beh.BehaviorNotFoundException; import org.jtrfp.trcl.beh.CollisionBehavior; import org.jtrfp.trcl.coll.CollectionActionDispatcher; import org.jtrfp.trcl.coll.PropertyListenable; import org.jtrfp.trcl.core.NotReadyException; import org.jtrfp.trcl.core.TR; import org.jtrfp.trcl.core.TRFuture; import org.jtrfp.trcl.gpu.GPU; import org.jtrfp.trcl.gpu.Model; import org.jtrfp.trcl.gpu.RenderList; import org.jtrfp.trcl.gpu.Renderer; import org.jtrfp.trcl.math.Mat4x4; import org.jtrfp.trcl.math.Vect3D; import org.jtrfp.trcl.mem.VEC4Address; public class WorldObject implements PositionedRenderable, PropertyListenable, Rotatable { public static final String HEADING ="heading"; public static final String TOP ="top"; public static final String ACTIVE ="active"; private double[] heading = new double[] { 0, 0, 1 }, oldHeading= new double[] {Double.NEGATIVE_INFINITY,Double.NEGATIVE_INFINITY,Double.NEGATIVE_INFINITY}; private double[] top = new double[] { 0, 1, 0 }, oldTop = new double[] {Double.NEGATIVE_INFINITY,Double.NEGATIVE_INFINITY,Double.NEGATIVE_INFINITY}; protected volatile double[] position = new double[3], positionAfterLoop = new double[3], oldPosition = new double[]{Double.NEGATIVE_INFINITY,Double.NEGATIVE_INFINITY,Double.NEGATIVE_INFINITY}; private boolean loopedBefore = false; protected double[] modelOffset= new double[3]; private final double[]positionWithOffset = new double[3]; private boolean needToRecalcMatrix=true; private final TR tr; private boolean visible = true; private TRFuture<Model>model; private int[] triangleObjectDefinitions; private int[] transparentTriangleObjectDefinitions; protected Integer matrixID; private volatile WeakReference<SpacePartitioningGrid> containingGrid; private ArrayList<Behavior> inactiveBehaviors = new ArrayList<Behavior>(); private ArrayList<CollisionBehavior>collisionBehaviors = new ArrayList<CollisionBehavior>(); private ArrayList<Behavior> tickBehaviors = new ArrayList<Behavior>(); private boolean active = true; private byte renderFlags=0; private boolean immuneToOpaqueDepthTest = false; private boolean objectDefsInitialized = false; protected final double[] aX = new double[3]; protected final double[] aY = new double[3]; protected final double[] aZ = new double[3]; protected final double[] rotTransM = new double[16]; protected final double[] camM = new double[16]; protected final double[] rMd = new double[16]; protected final double[] tMd = new double[16]; protected double[] cMd = new double[16]; private boolean respondToTick = true; private double scale = 1.; private final ReentrantLock lock = new ReentrantLock(); private CollectionActionDispatcher<VEC4Address> opaqueObjectDefinitionAddressesInVEC4 = new CollectionActionDispatcher<VEC4Address>(new ArrayList<VEC4Address>()); private CollectionActionDispatcher<VEC4Address> transparentObjectDefinitionAddressesInVEC4 = new CollectionActionDispatcher<VEC4Address>(new ArrayList<VEC4Address>()); protected final WeakPropertyChangeSupport pcs = new WeakPropertyChangeSupport(new PropertyChangeSupport(this)); public enum RenderFlags{ IgnoreCamera((byte)0x1); private final byte mask; private RenderFlags(byte mask){ this.mask=mask; } public byte getMask() { return mask; } }; public WorldObject(TR tr) { this.tr = tr; // Matrix constants setup rMd[15] = 1; tMd[0] = 1; tMd[5] = 1; tMd[10] = 1; tMd[15] = 1; } public WorldObject(TR tr, Model m) { this(tr); setModel(m); }// end constructor void proposeCollision(WorldObject other) { for (int i = 0; i < collisionBehaviors.size(); i++) { collisionBehaviors.get(i).proposeCollision(other); }// end for(collisionBehaviors) }// end proposeCollision(...) public boolean isCollideable(){ return !collisionBehaviors.isEmpty(); } public <T extends Behavior> T addBehavior(T ob) { if (ob.isEnabled()) { if (ob instanceof CollisionBehavior) collisionBehaviors.add((CollisionBehavior) ob); tickBehaviors.add(ob); } else { inactiveBehaviors.add(ob); } ob.setParent(this); return ob; } public <T extends Behavior> T removeBehavior(T beh) { if (beh.isEnabled()) { if (beh instanceof CollisionBehavior) collisionBehaviors.remove((CollisionBehavior) beh); tickBehaviors.remove(beh); } else inactiveBehaviors.remove(beh); return beh; }//end removeBehavior() protected boolean recalcMatrixWithEachFrame(){ return false; } public <T> T probeForBehavior(Class<T> bC) { if (bC.isAssignableFrom(CollisionBehavior.class)) { for (int i = 0; i < collisionBehaviors.size(); i++) { if (bC.isAssignableFrom(collisionBehaviors.get(i).getClass())) { return (T) collisionBehaviors.get(i); } }// end if(instanceof) }// emd if(isAssignableFrom(CollisionBehavior.class)) for (int i = 0; i < inactiveBehaviors.size(); i++) { if (bC.isAssignableFrom(inactiveBehaviors.get(i).getClass())) { return (T) inactiveBehaviors.get(i); } }// end if(instanceof) for (int i = 0; i < tickBehaviors.size(); i++) { if (bC.isAssignableFrom(tickBehaviors.get(i).getClass())) { return (T) tickBehaviors.get(i); } }// end if(instanceof) throw new BehaviorNotFoundException("Cannot find behavior of type " + bC.getName() + " in behavior sandwich owned by " + this.toString()); }// end probeForBehavior public <T> void probeForBehaviors(Submitter<T> sub, Class<T> type) { final ArrayList<T> result = new ArrayList<T>(); synchronized(collisionBehaviors){ if (type.isAssignableFrom(CollisionBehavior.class)) { for (int i = 0; i < collisionBehaviors.size(); i++) { if (type.isAssignableFrom(collisionBehaviors.get(i).getClass())) { result.add((T) collisionBehaviors.get(i)); } }// end if(instanceof) }// end isAssignableFrom(CollisionBehavior.class) }synchronized(inactiveBehaviors){ for (int i = 0; i < inactiveBehaviors.size(); i++) { if (type.isAssignableFrom(inactiveBehaviors.get(i).getClass())) result.add((T) inactiveBehaviors.get(i)); }// end if(instanceof) }synchronized(tickBehaviors){ for (int i = 0; i < tickBehaviors.size(); i++) { if (type.isAssignableFrom(tickBehaviors.get(i).getClass())) result.add((T) tickBehaviors.get(i)); }// end for (tickBehaviors) }//end sync(tickBehaviors) sub.submit(result); }// end probeForBehaviors(...) public void tick(long time) { if(!respondToTick)return; synchronized(tickBehaviors){ for (int i = 0; i < tickBehaviors.size() && isActive(); i++) tickBehaviors.get(i).proposeTick(time); }//end sync(tickBehaviors) }// end tick(...) private final int [] emptyIntArray = new int[0]; public void setModel(Model m) { if (m == null) throw new RuntimeException("Passed model cannot be null."); final TRFuture<Model> thisModelFuture = this.model; if(thisModelFuture != null) releaseCurrentModel(); try{this.model = m.finalizeModel();}catch(Exception e){throw new RuntimeException(e);} }// end setModel(...) private void releaseCurrentModel(){ if(transparentTriangleObjectDefinitions!=null) for(int def:transparentTriangleObjectDefinitions) tr.gpu.get().objectDefinitionWindow.get().freeLater(def); if(triangleObjectDefinitions!=null) for(int def:triangleObjectDefinitions) tr.gpu.get().objectDefinitionWindow.get().freeLater(def); RenderList.RENDER_LIST_EXECUTOR.submit(new Runnable(){ @Override public void run() { getOpaqueObjectDefinitionAddressesInVEC4() .clear(); getTransparentObjectDefinitionAddressesInVEC4().clear(); }}); transparentTriangleObjectDefinitions = null; triangleObjectDefinitions = null; this.model = null; objectDefsInitialized = false; }//end releaseCurrentModel() public /*synchronized*/ void setDirection(ObjectDirection dir) { lock.lock(); try{ if (dir.getHeading().getNorm() == 0 || dir.getTop().getNorm() == 0) { System.err .println("Warning: Rejecting zero-norm for object direction. " + dir); new Exception().printStackTrace(); return; } setHeading(dir.getHeading()); setTop(dir.getTop()); }finally {lock.unlock();} }//end setDirection(...) @Override public String toString() { final String modelDebugName; if(model!=null)modelDebugName=getModel().getDebugName(); else modelDebugName="[null model]"; return "WorldObject Model=" + modelDebugName + " pos=" + this.getPosition() + " class=" + getClass().getName()+" hash="+hashCode(); } public final void initializeObjectDefinitions() throws NotReadyException { if(objectDefsInitialized) return; if (model == null) throw new NullPointerException( "Model is null. Did you forget to set it? Object in question is: \n"+this.toString()); final Model model = getModelRealtime(); tr.getThreadManager().submitToThreadPool(new Callable<Void>(){ @Override public Void call() throws Exception { tr.getThreadManager().submitToGPUMemAccess(new Callable<Void>(){ @Override public Void call() throws Exception { processPrimitiveList(model.getTriangleList(), getTriangleObjectDefinitions(), getOpaqueObjectDefinitionAddressesInVEC4()); processPrimitiveList(model.getTransparentTriangleList(), getTransparentTriangleObjectDefinitions(), getTransparentObjectDefinitionAddressesInVEC4()); return null; }}).get(); updateAllRenderFlagStates(); objectDefsInitialized = true; return null; }}); }// end initializeObjectDefinitions() private void processPrimitiveList(PrimitiveList<?> primitiveList, int[] objectDefinitions, final CollectionActionDispatcher<VEC4Address> objectDefinitionAddressesInVEC4) { if (primitiveList == null) return; // Nothing to do, no primitives here final int gpuVerticesPerElement = primitiveList.getGPUVerticesPerElement(); final int elementsPerBlock = GPU.GPU_VERTICES_PER_BLOCK / gpuVerticesPerElement; int gpuVerticesRemaining = primitiveList.getNumElements()*gpuVerticesPerElement; // For each of the allocated-but-not-yet-initialized object definitions. final ObjectDefinitionWindow odw = tr.gpu.get().objectDefinitionWindow.get(); int odCounter=0; final int memoryWindowIndicesPerElement = primitiveList.getNumMemoryWindowIndicesPerElement(); final Integer matrixID = getMatrixID(); //Cache to hold new addresses for submission in bulk final ArrayList<VEC4Address> addressesToAdd = new ArrayList<VEC4Address>(); for (final int index : objectDefinitions) { final int vertexOffsetVec4s=new VEC4Address(primitiveList.getMemoryWindow().getPhysicalAddressInBytes(odCounter*elementsPerBlock*memoryWindowIndicesPerElement)).intValue(); final int matrixOffsetVec4s=new VEC4Address(tr.gpu.get().matrixWindow.get() .getPhysicalAddressInBytes(matrixID)).intValue(); odw.matrixOffset.set(index,matrixOffsetVec4s); odw.vertexOffset.set(index,vertexOffsetVec4s); odw.modelScale.set(index, (byte) primitiveList.getPackedScale()); if (gpuVerticesRemaining >= GPU.GPU_VERTICES_PER_BLOCK) { odw.numVertices.set(index, (byte) GPU.GPU_VERTICES_PER_BLOCK); } else if (gpuVerticesRemaining > 0) { odw.numVertices.set(index, (byte) (gpuVerticesRemaining)); } else { throw new RuntimeException("Ran out of vec4s."); } gpuVerticesRemaining -= GPU.GPU_VERTICES_PER_BLOCK; addressesToAdd.add(new VEC4Address(odw.getPhysicalAddressInBytes(index))); odCounter++; }// end for(ObjectDefinition) RenderList.RENDER_LIST_EXECUTOR.submit(new Runnable(){ @Override public void run() { objectDefinitionAddressesInVEC4.addAll(addressesToAdd); }}); }// end processPrimitiveList(...) protected void updateAllRenderFlagStates(){ final Model model = getModel(); if(model == null) return; updateRenderFlagStatesPL(model.getTriangleList(),getTriangleObjectDefinitions()); updateRenderFlagStatesPL(model.getTransparentTriangleList(),getTransparentTriangleObjectDefinitions()); } protected void updateRenderFlagStatesPL(PrimitiveList<?> pl, int [] objectDefinitionIndices){ final ObjectDefinitionWindow odw = tr.gpu.get().objectDefinitionWindow.get(); for(int index : objectDefinitionIndices) odw.mode.set(index, (byte)(pl.getPrimitiveRenderMode() | (renderFlags << 4)&0xF0)); }//end updateRenderFlagStatesPL public /*synchronized*/ final void updateStateToGPU(Renderer renderer) throws NotReadyException { if(!lock.tryLock()) throw new NotReadyException(); try{ initializeObjectDefinitions(); System.arraycopy(position, 0, positionAfterLoop, 0, 3); attemptLoop(renderer); if(needToRecalcMatrix){ needToRecalcMatrix=recalcMatrixWithEachFrame(); recalculateTransRotMBuffer(); } if(model!=null)getModel().proposeAnimationUpdate(); }finally{lock.unlock();} }//end updateStateToGPU() public boolean supportsLoop(){ return true; } protected void attemptLoop(Renderer renderer){ if (supportsLoop()) { boolean change = false; final Vector3D camPos = renderer.getCamera().getCameraPosition(); final double [] delta = new double[]{ positionAfterLoop[0] - camPos.getX(), positionAfterLoop[1] - camPos.getY(), positionAfterLoop[2] - camPos.getZ()}; if (delta[0] > TR.mapWidth / 2.) { positionAfterLoop[0] -= TR.mapWidth; change = true; needToRecalcMatrix=true; } else if (delta[0] < -TR.mapWidth / 2.) { positionAfterLoop[0] += TR.mapWidth; change = true; needToRecalcMatrix=true; } if (delta[1] > TR.mapWidth / 2.) { positionAfterLoop[1] -= TR.mapWidth; change = true; needToRecalcMatrix=true; } else if (delta[1] < -TR.mapWidth / 2.) { positionAfterLoop[1] += TR.mapWidth; change = true; needToRecalcMatrix=true; } if (delta[2] > TR.mapWidth / 2.) { positionAfterLoop[2] -= TR.mapWidth; change = true; needToRecalcMatrix=true; } else if (delta[2] < -TR.mapWidth / 2.) { positionAfterLoop[2] += TR.mapWidth; change = true; needToRecalcMatrix=true; } if(change){ needToRecalcMatrix = true; loopedBefore = true; }else{ if(loopedBefore) needToRecalcMatrix = true; loopedBefore = false; } }//end if(LOOP) }//end attemptLoop() protected void recalculateTransRotMBuffer() { try { Vect3D.normalize(heading, aZ); Vect3D.normalize(top,aY); Vect3D.cross(top, aZ, aX); recalculateRotBuffer(); if (translate()) { recalculateTransBuffer(); Mat4x4.mul(tMd, rMd, rotTransM); } else { System.arraycopy(rMd, 0, rotTransM, 0, 16); } tr.gpu.get().matrixWindow.get().setTransposed(rotTransM, getMatrixID(), scratchMatrixArray);//New version } catch (MathArithmeticException e) {e.printStackTrace(); }// Don't crash. }// end recalculateTransRotMBuffer() protected void recalculateRotBuffer(){ //Scale Vect3D.scalarMultiply(aX, getScale(), aX); Vect3D.scalarMultiply(aY, getScale(), aY); Vect3D.scalarMultiply(aZ, getScale(), aZ); rMd[0] = aX[0]; rMd[1] = aY[0]; rMd[2] = aZ[0]; rMd[4] = aX[1]; rMd[5] = aY[1]; rMd[6] = aZ[1]; rMd[8] = aX[2]; rMd[9] = aY[2]; rMd[10] = aZ[2]; }//end recalculateRotBuffer protected void recalculateTransBuffer(){ if(isVisible() && isActive()){ tMd[3] = positionAfterLoop[0]+modelOffset[0]; tMd[7] = positionAfterLoop[1]+modelOffset[1]; tMd[11]= positionAfterLoop[2]+modelOffset[2]; }else{ tMd[3] = Double.POSITIVE_INFINITY; tMd[7] = Double.POSITIVE_INFINITY; tMd[11]= Double.POSITIVE_INFINITY; }//end (!visible) }//end recalculateTransBuffer() protected final double [] scratchMatrixArray = new double[16]; protected boolean translate() { return true; } /** * @return the visible */ public boolean isVisible() { return visible; } /** * @param visible * the visible to set */ public void setVisible(boolean visible) { if(this.visible==visible) return; needToRecalcMatrix=true; if(!this.visible && visible){ this.visible = true; }else this.visible = visible; }//end setvisible() /** * @return the position */ public final double[] getPosition() { return position; } /** * @param position * the position to set */ public WorldObject setPosition(double[] position) { this.position[0]=position[0]; this.position[1]=position[1]; this.position[2]=position[2]; notifyPositionChange(); return this; }// end setPosition() public /*synchronized*/ WorldObject notifyPositionChange(){ lock.lock(); try{ if(position[0]==Double.NaN) throw new RuntimeException("Invalid position."); pcs.firePropertyChange(POSITION, oldPosition, position); needToRecalcMatrix=true; updateOldPosition(); }finally{lock.unlock();} return this; }//end notifyPositionChange() private void updateOldPosition(){ System.arraycopy(position, 0, oldPosition, 0, 3); } /** * @return the heading */ public final Vector3D getLookAt() { return new Vector3D(heading); } /** * @param heading * the heading to set */ public /*synchronized*/ void setHeading(Vector3D nHeading) { lock.lock(); try{ System.arraycopy(heading, 0, oldHeading, 0, 3); heading[0] = nHeading.getX(); heading[1] = nHeading.getY(); heading[2] = nHeading.getZ(); pcs.firePropertyChange(HEADING, oldHeading, nHeading); needToRecalcMatrix=true; }finally{lock.unlock();} } public Vector3D getHeading() { assert !(top[0]==0 && top[1]==0 && top[2]==0); return new Vector3D(heading); } /** * @return the top */ public final Vector3D getTop() { assert !(top[0]==0 && top[1]==0 && top[2]==0); return new Vector3D(top); } /** * @param top * the top to set */ public /*synchronized*/ void setTop(Vector3D nTop) { lock.lock(); try{ System.arraycopy(top, 0, oldTop, 0, 3); top[0] = nTop.getX(); top[1] = nTop.getY(); top[2] = nTop.getZ(); pcs.firePropertyChange(TOP, oldTop, nTop); needToRecalcMatrix=true; }finally{lock.unlock();} }//end setTop(...) public final CollectionActionDispatcher<VEC4Address> getOpaqueObjectDefinitionAddresses(){ return opaqueObjectDefinitionAddressesInVEC4; } public final CollectionActionDispatcher<VEC4Address> getTransparentObjectDefinitionAddresses(){ return transparentObjectDefinitionAddressesInVEC4; } /** * @return the tr */ public TR getTr() { return tr; } public /*synchronized*/ void destroy() { lock.lock(); try{ final SpacePartitioningGrid grid = getContainingGrid(); if(grid !=null){ try{World.relevanceExecutor.submit(new Runnable(){ @Override public void run() { grid.remove(WorldObject.this); }}).get();}catch(Exception e){throw new RuntimeException(e);} }//end if(NEW MODE and have grid) setContainingGrid(null); // Send it to the land of wind and ghosts. setActive(false); notifyPositionChange(); }finally{lock.unlock();} }//end destroy() @Override public void setContainingGrid(SpacePartitioningGrid grid) { containingGrid = new WeakReference<SpacePartitioningGrid>(grid); notifyPositionChange(); } public SpacePartitioningGrid<PositionedRenderable> getContainingGrid() { try{return containingGrid.get();} catch(NullPointerException e){return null;} } public Model getModel() { try{return model.get();} catch(NullPointerException e){return null;} catch(Exception e){throw new RuntimeException(e);} } public Model getModelRealtime() throws NotReadyException{ return model.getRealtime(); } /** * @return the active */ public boolean isActive() { return active; } /** * @param active * the active to set */ public void setActive(boolean active) { final boolean oldState = this.active; if(this.active!=active) needToRecalcMatrix=true; if(!this.active && active && isVisible()){ this.active=true; } this.active = active; pcs.firePropertyChange(ACTIVE,oldState,active); }//end setActive(...) public /*synchronized*/ void movePositionBy(Vector3D delta) { lock.lock(); try{ position[0] += delta.getX(); position[1] += delta.getY(); position[2] += delta.getZ(); notifyPositionChange(); }finally{lock.unlock();} }//end movePositionBy(...) public /*synchronized*/ void setPosition(double x, double y, double z) { lock.lock(); try{ position[0] = x; position[1] = y; position[2] = z; notifyPositionChange();} finally{lock.unlock();} } public double[] getHeadingArray() { return heading; } public double[] getTopArray() { return top; } public void enableBehavior(Behavior behavior) { if (!inactiveBehaviors.contains(behavior)) { throw new RuntimeException( "Tried to enabled an unregistered behavior."); } if (behavior instanceof CollisionBehavior) { if (!collisionBehaviors.contains(behavior) && behavior instanceof CollisionBehavior) { collisionBehaviors.add((CollisionBehavior) behavior); } } if (!tickBehaviors.contains(behavior)) { tickBehaviors.add(behavior); } }// end enableBehavior(...) public void disableBehavior(Behavior behavior) { if (!inactiveBehaviors.contains(behavior)) synchronized(inactiveBehaviors){ inactiveBehaviors.add(behavior); } if (behavior instanceof CollisionBehavior) synchronized(collisionBehaviors){ collisionBehaviors.remove(behavior); } synchronized(tickBehaviors){ tickBehaviors.remove(behavior); } }//end disableBehavior(...) /** * @return the renderFlags */ public int getRenderFlags() { return renderFlags; } /** * @param renderFlags the renderFlags to set */ public void setRenderFlags(byte renderFlags) { this.renderFlags = renderFlags; updateAllRenderFlagStates(); } /** * @return the respondToTick */ public boolean isRespondToTick() { return respondToTick; } /** * @param respondToTick the respondToTick to set */ public void setRespondToTick(boolean respondToTick) { this.respondToTick = respondToTick; } @Override public void finalize() throws Throwable{ if(matrixID!=null) tr.gpu.get().matrixWindow.get().freeLater(matrixID); if(transparentTriangleObjectDefinitions!=null) for(int def:transparentTriangleObjectDefinitions) tr.gpu.get().objectDefinitionWindow.get().freeLater(def); if(triangleObjectDefinitions!=null) for(int def:triangleObjectDefinitions) tr.gpu.get().objectDefinitionWindow.get().freeLater(def); super.finalize(); }//end finalize() /** * @param modelOffset the modelOffset to set */ public void setModelOffset(double x, double y, double z) { modelOffset[0]=x; modelOffset[1]=y; modelOffset[2]=z; } public double[] getPositionWithOffset() { positionWithOffset[0]=position[0]+modelOffset[0]; positionWithOffset[1]=position[1]+modelOffset[1]; positionWithOffset[2]=position[2]+modelOffset[2]; return positionWithOffset; } public boolean isImmuneToOpaqueDepthTest() { return immuneToOpaqueDepthTest; } /** * @param immuneToDepthTest the immuneToDepthTest to set */ public WorldObject setImmuneToOpaqueDepthTest(boolean immuneToDepthTest) { this.immuneToOpaqueDepthTest = immuneToDepthTest; return this; } /** * @param arg0 * @see java.beans.PropertyChangeSupport#addPropertyChangeListener(java.beans.PropertyChangeListener) */ public void addPropertyChangeListener(PropertyChangeListener arg0) { pcs.addPropertyChangeListener(arg0); } /** * @param propertyName * @param listener * @see java.beans.PropertyChangeSupport#addPropertyChangeListener(java.lang.String, java.beans.PropertyChangeListener) */ public void addPropertyChangeListener(String propertyName, PropertyChangeListener listener) { pcs.addPropertyChangeListener(propertyName, listener); } /** * @return * @see java.beans.PropertyChangeSupport#getPropertyChangeListeners() */ public PropertyChangeListener[] getPropertyChangeListeners() { return pcs.getPropertyChangeListeners(); } /** * @param propertyName * @return * @see java.beans.PropertyChangeSupport#getPropertyChangeListeners(java.lang.String) */ public PropertyChangeListener[] getPropertyChangeListeners( String propertyName) { return pcs.getPropertyChangeListeners(propertyName); } /** * @param propertyName * @return * @see java.beans.PropertyChangeSupport#hasListeners(java.lang.String) */ public boolean hasListeners(String propertyName) { return pcs.hasListeners(propertyName); } /** * @param arg0 * @see java.beans.PropertyChangeSupport#removePropertyChangeListener(java.beans.PropertyChangeListener) */ public void removePropertyChangeListener(PropertyChangeListener arg0) { pcs.removePropertyChangeListener(arg0); } /** * @param propertyName * @param listener * @see java.beans.PropertyChangeSupport#removePropertyChangeListener(java.lang.String, java.beans.PropertyChangeListener) */ public void removePropertyChangeListener(String propertyName, PropertyChangeListener listener) { pcs.removePropertyChangeListener(propertyName, listener); } public boolean hasBehavior(Class<? extends Behavior> behaviorClass) { try{probeForBehavior(behaviorClass);} catch(BehaviorNotFoundException e){return false;} return true; } protected int[] getTriangleObjectDefinitions() { return triangleObjectDefinitions = getObjectDefinitions(triangleObjectDefinitions, getModel().getTriangleList()); } protected int[] getTransparentTriangleObjectDefinitions() { return transparentTriangleObjectDefinitions = getObjectDefinitions(transparentTriangleObjectDefinitions, getModel().getTransparentTriangleList()); } protected int[] getObjectDefinitions(int [] originalObjectDefs, PrimitiveList pList){ if(originalObjectDefs == null){ int numObjDefs, sizeInVerts; if (pList == null) originalObjectDefs = emptyIntArray; else { sizeInVerts = pList .getTotalSizeInGPUVertices(); numObjDefs = sizeInVerts / GPU.GPU_VERTICES_PER_BLOCK; if (sizeInVerts % GPU.GPU_VERTICES_PER_BLOCK != 0) numObjDefs++; originalObjectDefs = new int[numObjDefs]; for (int i = 0; i < numObjDefs; i++) { originalObjectDefs[i] = tr.gpu.get() .objectDefinitionWindow.get().create(); }//end for(numObjDefs) }//end if(!null) }//end if(null) return originalObjectDefs; }//end getObjectDefinitions(...) protected CollectionActionDispatcher<VEC4Address> getOpaqueObjectDefinitionAddressesInVEC4() { if(opaqueObjectDefinitionAddressesInVEC4==null) opaqueObjectDefinitionAddressesInVEC4 = new CollectionActionDispatcher<VEC4Address>(new ArrayList<VEC4Address>()); return opaqueObjectDefinitionAddressesInVEC4; } protected CollectionActionDispatcher<VEC4Address> getTransparentObjectDefinitionAddressesInVEC4() { if(transparentObjectDefinitionAddressesInVEC4==null) transparentObjectDefinitionAddressesInVEC4 = new CollectionActionDispatcher<VEC4Address>(new ArrayList<VEC4Address>()); return transparentObjectDefinitionAddressesInVEC4; } protected Integer getMatrixID() { if(matrixID == null) matrixID = tr.gpu.get().matrixWindow.get().create(); return matrixID; } public void setMatrixID(Integer matrixID) { this.matrixID = matrixID; } protected double getScale() { return scale; } protected void setScale(double scale) { this.scale = scale; } public void setRenderFlag(RenderFlags flag){ setRenderFlags((byte)(getRenderFlags() | flag.getMask())); } public void unsetRenderFlag(RenderFlags flag){ setRenderFlags((byte)(getRenderFlags() & ~flag.getMask())); } public boolean getRenderFlag(RenderFlags flag){ return ((getRenderFlags()&0xFF) & flag.getMask()) != 0; } }// end WorldObject
src/main/java/org/jtrfp/trcl/obj/WorldObject.java
/******************************************************************************* * This file is part of TERMINAL RECALL * Copyright (c) 2012-2016 Chuck Ritola * Part of the jTRFP.org project * All rights reserved. This program and the accompanying materials * are made available under the terms of the GNU Public License v3.0 * which accompanies this distribution, and is available at * http://www.gnu.org/licenses/gpl.html * * Contributors: * chuck - initial API and implementation ******************************************************************************/ package org.jtrfp.trcl.obj; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeSupport; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.concurrent.Callable; import org.apache.commons.math3.exception.MathArithmeticException; import org.apache.commons.math3.geometry.euclidean.threed.Vector3D; import org.jtrfp.trcl.ObjectDefinitionWindow; import org.jtrfp.trcl.PrimitiveList; import org.jtrfp.trcl.SpacePartitioningGrid; import org.jtrfp.trcl.Submitter; import org.jtrfp.trcl.WeakPropertyChangeSupport; import org.jtrfp.trcl.World; import org.jtrfp.trcl.beh.Behavior; import org.jtrfp.trcl.beh.BehaviorNotFoundException; import org.jtrfp.trcl.beh.CollisionBehavior; import org.jtrfp.trcl.coll.CollectionActionDispatcher; import org.jtrfp.trcl.coll.PropertyListenable; import org.jtrfp.trcl.core.NotReadyException; import org.jtrfp.trcl.core.TR; import org.jtrfp.trcl.core.TRFuture; import org.jtrfp.trcl.gpu.GPU; import org.jtrfp.trcl.gpu.Model; import org.jtrfp.trcl.gpu.RenderList; import org.jtrfp.trcl.gpu.Renderer; import org.jtrfp.trcl.math.Mat4x4; import org.jtrfp.trcl.math.Vect3D; import org.jtrfp.trcl.mem.VEC4Address; public class WorldObject implements PositionedRenderable, PropertyListenable, Rotatable { public static final String HEADING ="heading"; public static final String TOP ="top"; public static final String ACTIVE ="active"; private double[] heading = new double[] { 0, 0, 1 }, oldHeading= new double[] {Double.NEGATIVE_INFINITY,Double.NEGATIVE_INFINITY,Double.NEGATIVE_INFINITY}; private double[] top = new double[] { 0, 1, 0 }, oldTop = new double[] {Double.NEGATIVE_INFINITY,Double.NEGATIVE_INFINITY,Double.NEGATIVE_INFINITY}; protected volatile double[] position = new double[3], positionAfterLoop = new double[3], oldPosition = new double[]{Double.NEGATIVE_INFINITY,Double.NEGATIVE_INFINITY,Double.NEGATIVE_INFINITY}; private boolean loopedBefore = false; protected double[] modelOffset= new double[3]; private final double[]positionWithOffset = new double[3]; private boolean needToRecalcMatrix=true; private final TR tr; private boolean visible = true; private TRFuture<Model>model; private int[] triangleObjectDefinitions; private int[] transparentTriangleObjectDefinitions; protected Integer matrixID; private volatile WeakReference<SpacePartitioningGrid> containingGrid; private ArrayList<Behavior> inactiveBehaviors = new ArrayList<Behavior>(); private ArrayList<CollisionBehavior>collisionBehaviors = new ArrayList<CollisionBehavior>(); private ArrayList<Behavior> tickBehaviors = new ArrayList<Behavior>(); private boolean active = true; private byte renderFlags=0; private boolean immuneToOpaqueDepthTest = false; private boolean objectDefsInitialized = false; protected final double[] aX = new double[3]; protected final double[] aY = new double[3]; protected final double[] aZ = new double[3]; protected final double[] rotTransM = new double[16]; protected final double[] camM = new double[16]; protected final double[] rMd = new double[16]; protected final double[] tMd = new double[16]; protected double[] cMd = new double[16]; private boolean respondToTick = true; private double scale = 1.; private CollectionActionDispatcher<VEC4Address> opaqueObjectDefinitionAddressesInVEC4 = new CollectionActionDispatcher<VEC4Address>(new ArrayList<VEC4Address>()); private CollectionActionDispatcher<VEC4Address> transparentObjectDefinitionAddressesInVEC4 = new CollectionActionDispatcher<VEC4Address>(new ArrayList<VEC4Address>()); protected final WeakPropertyChangeSupport pcs = new WeakPropertyChangeSupport(new PropertyChangeSupport(this)); public enum RenderFlags{ IgnoreCamera((byte)0x1); private final byte mask; private RenderFlags(byte mask){ this.mask=mask; } public byte getMask() { return mask; } }; public WorldObject(TR tr) { this.tr = tr; // Matrix constants setup rMd[15] = 1; tMd[0] = 1; tMd[5] = 1; tMd[10] = 1; tMd[15] = 1; } public WorldObject(TR tr, Model m) { this(tr); setModel(m); }// end constructor void proposeCollision(WorldObject other) { for (int i = 0; i < collisionBehaviors.size(); i++) { collisionBehaviors.get(i).proposeCollision(other); }// end for(collisionBehaviors) }// end proposeCollision(...) public boolean isCollideable(){ return !collisionBehaviors.isEmpty(); } public <T extends Behavior> T addBehavior(T ob) { if (ob.isEnabled()) { if (ob instanceof CollisionBehavior) collisionBehaviors.add((CollisionBehavior) ob); tickBehaviors.add(ob); } else { inactiveBehaviors.add(ob); } ob.setParent(this); return ob; } public <T extends Behavior> T removeBehavior(T beh) { if (beh.isEnabled()) { if (beh instanceof CollisionBehavior) collisionBehaviors.remove((CollisionBehavior) beh); tickBehaviors.remove(beh); } else inactiveBehaviors.remove(beh); return beh; }//end removeBehavior() protected boolean recalcMatrixWithEachFrame(){ return false; } public <T> T probeForBehavior(Class<T> bC) { if (bC.isAssignableFrom(CollisionBehavior.class)) { for (int i = 0; i < collisionBehaviors.size(); i++) { if (bC.isAssignableFrom(collisionBehaviors.get(i).getClass())) { return (T) collisionBehaviors.get(i); } }// end if(instanceof) }// emd if(isAssignableFrom(CollisionBehavior.class)) for (int i = 0; i < inactiveBehaviors.size(); i++) { if (bC.isAssignableFrom(inactiveBehaviors.get(i).getClass())) { return (T) inactiveBehaviors.get(i); } }// end if(instanceof) for (int i = 0; i < tickBehaviors.size(); i++) { if (bC.isAssignableFrom(tickBehaviors.get(i).getClass())) { return (T) tickBehaviors.get(i); } }// end if(instanceof) throw new BehaviorNotFoundException("Cannot find behavior of type " + bC.getName() + " in behavior sandwich owned by " + this.toString()); }// end probeForBehavior public <T> void probeForBehaviors(Submitter<T> sub, Class<T> type) { final ArrayList<T> result = new ArrayList<T>(); synchronized(collisionBehaviors){ if (type.isAssignableFrom(CollisionBehavior.class)) { for (int i = 0; i < collisionBehaviors.size(); i++) { if (type.isAssignableFrom(collisionBehaviors.get(i).getClass())) { result.add((T) collisionBehaviors.get(i)); } }// end if(instanceof) }// end isAssignableFrom(CollisionBehavior.class) }synchronized(inactiveBehaviors){ for (int i = 0; i < inactiveBehaviors.size(); i++) { if (type.isAssignableFrom(inactiveBehaviors.get(i).getClass())) result.add((T) inactiveBehaviors.get(i)); }// end if(instanceof) }synchronized(tickBehaviors){ for (int i = 0; i < tickBehaviors.size(); i++) { if (type.isAssignableFrom(tickBehaviors.get(i).getClass())) result.add((T) tickBehaviors.get(i)); }// end for (tickBehaviors) }//end sync(tickBehaviors) sub.submit(result); }// end probeForBehaviors(...) public void tick(long time) { if(!respondToTick)return; synchronized(tickBehaviors){ for (int i = 0; i < tickBehaviors.size() && isActive(); i++) tickBehaviors.get(i).proposeTick(time); }//end sync(tickBehaviors) }// end tick(...) private final int [] emptyIntArray = new int[0]; public void setModel(Model m) { if (m == null) throw new RuntimeException("Passed model cannot be null."); final TRFuture<Model> thisModelFuture = this.model; if(thisModelFuture != null) releaseCurrentModel(); try{this.model = m.finalizeModel();}catch(Exception e){throw new RuntimeException(e);} }// end setModel(...) private void releaseCurrentModel(){ if(transparentTriangleObjectDefinitions!=null) for(int def:transparentTriangleObjectDefinitions) tr.gpu.get().objectDefinitionWindow.get().freeLater(def); if(triangleObjectDefinitions!=null) for(int def:triangleObjectDefinitions) tr.gpu.get().objectDefinitionWindow.get().freeLater(def); RenderList.RENDER_LIST_EXECUTOR.submit(new Runnable(){ @Override public void run() { getOpaqueObjectDefinitionAddressesInVEC4() .clear(); getTransparentObjectDefinitionAddressesInVEC4().clear(); }}); transparentTriangleObjectDefinitions = null; triangleObjectDefinitions = null; this.model = null; objectDefsInitialized = false; }//end releaseCurrentModel() public synchronized void setDirection(ObjectDirection dir) { if (dir.getHeading().getNorm() == 0 || dir.getTop().getNorm() == 0) { System.err .println("Warning: Rejecting zero-norm for object direction. " + dir); new Exception().printStackTrace(); return; } setHeading(dir.getHeading()); setTop(dir.getTop()); } @Override public String toString() { final String modelDebugName; if(model!=null)modelDebugName=getModel().getDebugName(); else modelDebugName="[null model]"; return "WorldObject Model=" + modelDebugName + " pos=" + this.getPosition() + " class=" + getClass().getName()+" hash="+hashCode(); } public final void initializeObjectDefinitions() throws NotReadyException { if(objectDefsInitialized) return; if (model == null) throw new NullPointerException( "Model is null. Did you forget to set it? Object in question is: \n"+this.toString()); final Model model = getModelRealtime(); tr.getThreadManager().submitToThreadPool(new Callable<Void>(){ @Override public Void call() throws Exception { tr.getThreadManager().submitToGPUMemAccess(new Callable<Void>(){ @Override public Void call() throws Exception { processPrimitiveList(model.getTriangleList(), getTriangleObjectDefinitions(), getOpaqueObjectDefinitionAddressesInVEC4()); processPrimitiveList(model.getTransparentTriangleList(), getTransparentTriangleObjectDefinitions(), getTransparentObjectDefinitionAddressesInVEC4()); return null; }}).get(); updateAllRenderFlagStates(); objectDefsInitialized = true; return null; }}); }// end initializeObjectDefinitions() private void processPrimitiveList(PrimitiveList<?> primitiveList, int[] objectDefinitions, final CollectionActionDispatcher<VEC4Address> objectDefinitionAddressesInVEC4) { if (primitiveList == null) return; // Nothing to do, no primitives here final int gpuVerticesPerElement = primitiveList.getGPUVerticesPerElement(); final int elementsPerBlock = GPU.GPU_VERTICES_PER_BLOCK / gpuVerticesPerElement; int gpuVerticesRemaining = primitiveList.getNumElements()*gpuVerticesPerElement; // For each of the allocated-but-not-yet-initialized object definitions. final ObjectDefinitionWindow odw = tr.gpu.get().objectDefinitionWindow.get(); int odCounter=0; final int memoryWindowIndicesPerElement = primitiveList.getNumMemoryWindowIndicesPerElement(); final Integer matrixID = getMatrixID(); //Cache to hold new addresses for submission in bulk final ArrayList<VEC4Address> addressesToAdd = new ArrayList<VEC4Address>(); for (final int index : objectDefinitions) { final int vertexOffsetVec4s=new VEC4Address(primitiveList.getMemoryWindow().getPhysicalAddressInBytes(odCounter*elementsPerBlock*memoryWindowIndicesPerElement)).intValue(); final int matrixOffsetVec4s=new VEC4Address(tr.gpu.get().matrixWindow.get() .getPhysicalAddressInBytes(matrixID)).intValue(); odw.matrixOffset.set(index,matrixOffsetVec4s); odw.vertexOffset.set(index,vertexOffsetVec4s); odw.modelScale.set(index, (byte) primitiveList.getPackedScale()); if (gpuVerticesRemaining >= GPU.GPU_VERTICES_PER_BLOCK) { odw.numVertices.set(index, (byte) GPU.GPU_VERTICES_PER_BLOCK); } else if (gpuVerticesRemaining > 0) { odw.numVertices.set(index, (byte) (gpuVerticesRemaining)); } else { throw new RuntimeException("Ran out of vec4s."); } gpuVerticesRemaining -= GPU.GPU_VERTICES_PER_BLOCK; addressesToAdd.add(new VEC4Address(odw.getPhysicalAddressInBytes(index))); odCounter++; }// end for(ObjectDefinition) RenderList.RENDER_LIST_EXECUTOR.submit(new Runnable(){ @Override public void run() { objectDefinitionAddressesInVEC4.addAll(addressesToAdd); }}); }// end processPrimitiveList(...) protected void updateAllRenderFlagStates(){ final Model model = getModel(); if(model == null) return; updateRenderFlagStatesPL(model.getTriangleList(),getTriangleObjectDefinitions()); updateRenderFlagStatesPL(model.getTransparentTriangleList(),getTransparentTriangleObjectDefinitions()); } protected void updateRenderFlagStatesPL(PrimitiveList<?> pl, int [] objectDefinitionIndices){ final ObjectDefinitionWindow odw = tr.gpu.get().objectDefinitionWindow.get(); for(int index : objectDefinitionIndices) odw.mode.set(index, (byte)(pl.getPrimitiveRenderMode() | (renderFlags << 4)&0xF0)); }//end updateRenderFlagStatesPL public synchronized final void updateStateToGPU(Renderer renderer) throws NotReadyException { initializeObjectDefinitions(); System.arraycopy(position, 0, positionAfterLoop, 0, 3); attemptLoop(renderer); if(needToRecalcMatrix){ needToRecalcMatrix=recalcMatrixWithEachFrame(); recalculateTransRotMBuffer(); } if(model!=null)getModel().proposeAnimationUpdate(); }//end updateStateToGPU() public boolean supportsLoop(){ return true; } protected void attemptLoop(Renderer renderer){ if (supportsLoop()) { boolean change = false; final Vector3D camPos = renderer.getCamera().getCameraPosition(); final double [] delta = new double[]{ positionAfterLoop[0] - camPos.getX(), positionAfterLoop[1] - camPos.getY(), positionAfterLoop[2] - camPos.getZ()}; if (delta[0] > TR.mapWidth / 2.) { positionAfterLoop[0] -= TR.mapWidth; change = true; needToRecalcMatrix=true; } else if (delta[0] < -TR.mapWidth / 2.) { positionAfterLoop[0] += TR.mapWidth; change = true; needToRecalcMatrix=true; } if (delta[1] > TR.mapWidth / 2.) { positionAfterLoop[1] -= TR.mapWidth; change = true; needToRecalcMatrix=true; } else if (delta[1] < -TR.mapWidth / 2.) { positionAfterLoop[1] += TR.mapWidth; change = true; needToRecalcMatrix=true; } if (delta[2] > TR.mapWidth / 2.) { positionAfterLoop[2] -= TR.mapWidth; change = true; needToRecalcMatrix=true; } else if (delta[2] < -TR.mapWidth / 2.) { positionAfterLoop[2] += TR.mapWidth; change = true; needToRecalcMatrix=true; } if(change){ needToRecalcMatrix = true; loopedBefore = true; }else{ if(loopedBefore) needToRecalcMatrix = true; loopedBefore = false; } }//end if(LOOP) }//end attemptLoop() protected void recalculateTransRotMBuffer() { try { Vect3D.normalize(heading, aZ); Vect3D.normalize(top,aY); Vect3D.cross(top, aZ, aX); recalculateRotBuffer(); if (translate()) { recalculateTransBuffer(); Mat4x4.mul(tMd, rMd, rotTransM); } else { System.arraycopy(rMd, 0, rotTransM, 0, 16); } tr.gpu.get().matrixWindow.get().setTransposed(rotTransM, getMatrixID(), scratchMatrixArray);//New version } catch (MathArithmeticException e) {e.printStackTrace(); }// Don't crash. }// end recalculateTransRotMBuffer() protected void recalculateRotBuffer(){ //Scale Vect3D.scalarMultiply(aX, getScale(), aX); Vect3D.scalarMultiply(aY, getScale(), aY); Vect3D.scalarMultiply(aZ, getScale(), aZ); rMd[0] = aX[0]; rMd[1] = aY[0]; rMd[2] = aZ[0]; rMd[4] = aX[1]; rMd[5] = aY[1]; rMd[6] = aZ[1]; rMd[8] = aX[2]; rMd[9] = aY[2]; rMd[10] = aZ[2]; }//end recalculateRotBuffer protected void recalculateTransBuffer(){ if(isVisible() && isActive()){ tMd[3] = positionAfterLoop[0]+modelOffset[0]; tMd[7] = positionAfterLoop[1]+modelOffset[1]; tMd[11]= positionAfterLoop[2]+modelOffset[2]; }else{ tMd[3] = Double.POSITIVE_INFINITY; tMd[7] = Double.POSITIVE_INFINITY; tMd[11]= Double.POSITIVE_INFINITY; }//end (!visible) }//end recalculateTransBuffer() protected final double [] scratchMatrixArray = new double[16]; protected boolean translate() { return true; } /** * @return the visible */ public boolean isVisible() { return visible; } /** * @param visible * the visible to set */ public void setVisible(boolean visible) { if(this.visible==visible) return; needToRecalcMatrix=true; if(!this.visible && visible){ this.visible = true; }else this.visible = visible; }//end setvisible() /** * @return the position */ public final double[] getPosition() { return position; } /** * @param position * the position to set */ public WorldObject setPosition(double[] position) { this.position[0]=position[0]; this.position[1]=position[1]; this.position[2]=position[2]; notifyPositionChange(); return this; }// end setPosition() public synchronized WorldObject notifyPositionChange(){ if(position[0]==Double.NaN) throw new RuntimeException("Invalid position."); pcs.firePropertyChange(POSITION, oldPosition, position); needToRecalcMatrix=true; updateOldPosition(); return this; }//end notifyPositionChange() private void updateOldPosition(){ System.arraycopy(position, 0, oldPosition, 0, 3); } /** * @return the heading */ public final Vector3D getLookAt() { return new Vector3D(heading); } /** * @param heading * the heading to set */ public synchronized void setHeading(Vector3D nHeading) { System.arraycopy(heading, 0, oldHeading, 0, 3); heading[0] = nHeading.getX(); heading[1] = nHeading.getY(); heading[2] = nHeading.getZ(); pcs.firePropertyChange(HEADING, oldHeading, nHeading); needToRecalcMatrix=true; } public Vector3D getHeading() { assert !(top[0]==0 && top[1]==0 && top[2]==0); return new Vector3D(heading); } /** * @return the top */ public final Vector3D getTop() { assert !(top[0]==0 && top[1]==0 && top[2]==0); return new Vector3D(top); } /** * @param top * the top to set */ public synchronized void setTop(Vector3D nTop) { System.arraycopy(top, 0, oldTop, 0, 3); top[0] = nTop.getX(); top[1] = nTop.getY(); top[2] = nTop.getZ(); pcs.firePropertyChange(TOP, oldTop, nTop); needToRecalcMatrix=true; } public final CollectionActionDispatcher<VEC4Address> getOpaqueObjectDefinitionAddresses(){ return opaqueObjectDefinitionAddressesInVEC4; } public final CollectionActionDispatcher<VEC4Address> getTransparentObjectDefinitionAddresses(){ return transparentObjectDefinitionAddressesInVEC4; } /** * @return the tr */ public TR getTr() { return tr; } public synchronized void destroy() { final SpacePartitioningGrid grid = getContainingGrid(); if(grid !=null){ try{World.relevanceExecutor.submit(new Runnable(){ @Override public void run() { grid.remove(WorldObject.this); }}).get();}catch(Exception e){throw new RuntimeException(e);} }//end if(NEW MODE and have grid) setContainingGrid(null); // Send it to the land of wind and ghosts. setActive(false); notifyPositionChange(); }//end destroy() @Override public void setContainingGrid(SpacePartitioningGrid grid) { containingGrid = new WeakReference<SpacePartitioningGrid>(grid); notifyPositionChange(); } public SpacePartitioningGrid<PositionedRenderable> getContainingGrid() { try{return containingGrid.get();} catch(NullPointerException e){return null;} } public Model getModel() { try{return model.get();} catch(NullPointerException e){return null;} catch(Exception e){throw new RuntimeException(e);} } public Model getModelRealtime() throws NotReadyException{ return model.getRealtime(); } /** * @return the active */ public boolean isActive() { return active; } /** * @param active * the active to set */ public void setActive(boolean active) { final boolean oldState = this.active; if(this.active!=active) needToRecalcMatrix=true; if(!this.active && active && isVisible()){ this.active=true; } this.active = active; pcs.firePropertyChange(ACTIVE,oldState,active); }//end setActive(...) public synchronized void movePositionBy(Vector3D delta) { position[0] += delta.getX(); position[1] += delta.getY(); position[2] += delta.getZ(); notifyPositionChange(); } public synchronized void setPosition(double x, double y, double z) { position[0] = x; position[1] = y; position[2] = z; notifyPositionChange(); } public double[] getHeadingArray() { return heading; } public double[] getTopArray() { return top; } public void enableBehavior(Behavior behavior) { if (!inactiveBehaviors.contains(behavior)) { throw new RuntimeException( "Tried to enabled an unregistered behavior."); } if (behavior instanceof CollisionBehavior) { if (!collisionBehaviors.contains(behavior) && behavior instanceof CollisionBehavior) { collisionBehaviors.add((CollisionBehavior) behavior); } } if (!tickBehaviors.contains(behavior)) { tickBehaviors.add(behavior); } }// end enableBehavior(...) public void disableBehavior(Behavior behavior) { if (!inactiveBehaviors.contains(behavior)) synchronized(inactiveBehaviors){ inactiveBehaviors.add(behavior); } if (behavior instanceof CollisionBehavior) synchronized(collisionBehaviors){ collisionBehaviors.remove(behavior); } synchronized(tickBehaviors){ tickBehaviors.remove(behavior); } }//end disableBehavior(...) /** * @return the renderFlags */ public int getRenderFlags() { return renderFlags; } /** * @param renderFlags the renderFlags to set */ public void setRenderFlags(byte renderFlags) { this.renderFlags = renderFlags; updateAllRenderFlagStates(); } /** * @return the respondToTick */ public boolean isRespondToTick() { return respondToTick; } /** * @param respondToTick the respondToTick to set */ public void setRespondToTick(boolean respondToTick) { this.respondToTick = respondToTick; } @Override public void finalize() throws Throwable{ if(matrixID!=null) tr.gpu.get().matrixWindow.get().freeLater(matrixID); if(transparentTriangleObjectDefinitions!=null) for(int def:transparentTriangleObjectDefinitions) tr.gpu.get().objectDefinitionWindow.get().freeLater(def); if(triangleObjectDefinitions!=null) for(int def:triangleObjectDefinitions) tr.gpu.get().objectDefinitionWindow.get().freeLater(def); super.finalize(); }//end finalize() /** * @param modelOffset the modelOffset to set */ public void setModelOffset(double x, double y, double z) { modelOffset[0]=x; modelOffset[1]=y; modelOffset[2]=z; } public double[] getPositionWithOffset() { positionWithOffset[0]=position[0]+modelOffset[0]; positionWithOffset[1]=position[1]+modelOffset[1]; positionWithOffset[2]=position[2]+modelOffset[2]; return positionWithOffset; } public boolean isImmuneToOpaqueDepthTest() { return immuneToOpaqueDepthTest; } /** * @param immuneToDepthTest the immuneToDepthTest to set */ public WorldObject setImmuneToOpaqueDepthTest(boolean immuneToDepthTest) { this.immuneToOpaqueDepthTest = immuneToDepthTest; return this; } /** * @param arg0 * @see java.beans.PropertyChangeSupport#addPropertyChangeListener(java.beans.PropertyChangeListener) */ public void addPropertyChangeListener(PropertyChangeListener arg0) { pcs.addPropertyChangeListener(arg0); } /** * @param propertyName * @param listener * @see java.beans.PropertyChangeSupport#addPropertyChangeListener(java.lang.String, java.beans.PropertyChangeListener) */ public void addPropertyChangeListener(String propertyName, PropertyChangeListener listener) { pcs.addPropertyChangeListener(propertyName, listener); } /** * @return * @see java.beans.PropertyChangeSupport#getPropertyChangeListeners() */ public PropertyChangeListener[] getPropertyChangeListeners() { return pcs.getPropertyChangeListeners(); } /** * @param propertyName * @return * @see java.beans.PropertyChangeSupport#getPropertyChangeListeners(java.lang.String) */ public PropertyChangeListener[] getPropertyChangeListeners( String propertyName) { return pcs.getPropertyChangeListeners(propertyName); } /** * @param propertyName * @return * @see java.beans.PropertyChangeSupport#hasListeners(java.lang.String) */ public boolean hasListeners(String propertyName) { return pcs.hasListeners(propertyName); } /** * @param arg0 * @see java.beans.PropertyChangeSupport#removePropertyChangeListener(java.beans.PropertyChangeListener) */ public void removePropertyChangeListener(PropertyChangeListener arg0) { pcs.removePropertyChangeListener(arg0); } /** * @param propertyName * @param listener * @see java.beans.PropertyChangeSupport#removePropertyChangeListener(java.lang.String, java.beans.PropertyChangeListener) */ public void removePropertyChangeListener(String propertyName, PropertyChangeListener listener) { pcs.removePropertyChangeListener(propertyName, listener); } public boolean hasBehavior(Class<? extends Behavior> behaviorClass) { try{probeForBehavior(behaviorClass);} catch(BehaviorNotFoundException e){return false;} return true; } protected int[] getTriangleObjectDefinitions() { return triangleObjectDefinitions = getObjectDefinitions(triangleObjectDefinitions, getModel().getTriangleList()); } protected int[] getTransparentTriangleObjectDefinitions() { return transparentTriangleObjectDefinitions = getObjectDefinitions(transparentTriangleObjectDefinitions, getModel().getTransparentTriangleList()); } protected int[] getObjectDefinitions(int [] originalObjectDefs, PrimitiveList pList){ if(originalObjectDefs == null){ int numObjDefs, sizeInVerts; if (pList == null) originalObjectDefs = emptyIntArray; else { sizeInVerts = pList .getTotalSizeInGPUVertices(); numObjDefs = sizeInVerts / GPU.GPU_VERTICES_PER_BLOCK; if (sizeInVerts % GPU.GPU_VERTICES_PER_BLOCK != 0) numObjDefs++; originalObjectDefs = new int[numObjDefs]; for (int i = 0; i < numObjDefs; i++) { originalObjectDefs[i] = tr.gpu.get() .objectDefinitionWindow.get().create(); }//end for(numObjDefs) }//end if(!null) }//end if(null) return originalObjectDefs; }//end getObjectDefinitions(...) protected CollectionActionDispatcher<VEC4Address> getOpaqueObjectDefinitionAddressesInVEC4() { if(opaqueObjectDefinitionAddressesInVEC4==null) opaqueObjectDefinitionAddressesInVEC4 = new CollectionActionDispatcher<VEC4Address>(new ArrayList<VEC4Address>()); return opaqueObjectDefinitionAddressesInVEC4; } protected CollectionActionDispatcher<VEC4Address> getTransparentObjectDefinitionAddressesInVEC4() { if(transparentObjectDefinitionAddressesInVEC4==null) transparentObjectDefinitionAddressesInVEC4 = new CollectionActionDispatcher<VEC4Address>(new ArrayList<VEC4Address>()); return transparentObjectDefinitionAddressesInVEC4; } protected Integer getMatrixID() { if(matrixID == null) matrixID = tr.gpu.get().matrixWindow.get().create(); return matrixID; } public void setMatrixID(Integer matrixID) { this.matrixID = matrixID; } protected double getScale() { return scale; } protected void setScale(double scale) { this.scale = scale; } public void setRenderFlag(RenderFlags flag){ setRenderFlags((byte)(getRenderFlags() | flag.getMask())); } public void unsetRenderFlag(RenderFlags flag){ setRenderFlags((byte)(getRenderFlags() & ~flag.getMask())); } public boolean getRenderFlag(RenderFlags flag){ return ((getRenderFlags()&0xFF) & flag.getMask()) != 0; } }// end WorldObject
✔Poss. deadlock with display() thread requesting WorldObject mutex.
src/main/java/org/jtrfp/trcl/obj/WorldObject.java
✔Poss. deadlock with display() thread requesting WorldObject mutex.
<ide><path>rc/main/java/org/jtrfp/trcl/obj/WorldObject.java <ide> import java.lang.ref.WeakReference; <ide> import java.util.ArrayList; <ide> import java.util.concurrent.Callable; <add>import java.util.concurrent.locks.ReentrantLock; <ide> <ide> import org.apache.commons.math3.exception.MathArithmeticException; <ide> import org.apache.commons.math3.geometry.euclidean.threed.Vector3D; <ide> protected double[] cMd = new double[16]; <ide> private boolean respondToTick = true; <ide> private double scale = 1.; <add> private final ReentrantLock lock = new ReentrantLock(); <ide> <ide> private CollectionActionDispatcher<VEC4Address> opaqueObjectDefinitionAddressesInVEC4 = new CollectionActionDispatcher<VEC4Address>(new ArrayList<VEC4Address>()); <ide> private CollectionActionDispatcher<VEC4Address> transparentObjectDefinitionAddressesInVEC4 = new CollectionActionDispatcher<VEC4Address>(new ArrayList<VEC4Address>()); <ide> objectDefsInitialized = false; <ide> }//end releaseCurrentModel() <ide> <del> public synchronized void setDirection(ObjectDirection dir) { <del> if (dir.getHeading().getNorm() == 0 || dir.getTop().getNorm() == 0) { <add> public /*synchronized*/ void setDirection(ObjectDirection dir) { <add> lock.lock(); <add> try{ <add> if (dir.getHeading().getNorm() == 0 || dir.getTop().getNorm() == 0) { <ide> System.err <ide> .println("Warning: Rejecting zero-norm for object direction. " <ide> + dir); <ide> new Exception().printStackTrace(); <ide> return; <del> } <del> setHeading(dir.getHeading()); <del> setTop(dir.getTop()); <del> } <add> } <add> setHeading(dir.getHeading()); <add> setTop(dir.getTop()); <add> }finally {lock.unlock();} <add> }//end setDirection(...) <ide> <ide> @Override <ide> public String toString() { <ide> odw.mode.set(index, (byte)(pl.getPrimitiveRenderMode() | (renderFlags << 4)&0xF0)); <ide> }//end updateRenderFlagStatesPL <ide> <del> public synchronized final void updateStateToGPU(Renderer renderer) throws NotReadyException { <del> initializeObjectDefinitions(); <del> System.arraycopy(position, 0, positionAfterLoop, 0, 3); <del> attemptLoop(renderer); <del> if(needToRecalcMatrix){ <del> needToRecalcMatrix=recalcMatrixWithEachFrame(); <del> recalculateTransRotMBuffer(); <del> } <del> if(model!=null)getModel().proposeAnimationUpdate(); <add> public /*synchronized*/ final void updateStateToGPU(Renderer renderer) throws NotReadyException { <add> if(!lock.tryLock()) <add> throw new NotReadyException(); <add> try{ <add> initializeObjectDefinitions(); <add> System.arraycopy(position, 0, positionAfterLoop, 0, 3); <add> attemptLoop(renderer); <add> if(needToRecalcMatrix){ <add> needToRecalcMatrix=recalcMatrixWithEachFrame(); <add> recalculateTransRotMBuffer(); <add> } <add> if(model!=null)getModel().proposeAnimationUpdate(); <add> }finally{lock.unlock();} <ide> }//end updateStateToGPU() <ide> <ide> public boolean supportsLoop(){ <ide> return this; <ide> }// end setPosition() <ide> <del> public synchronized WorldObject notifyPositionChange(){ <del> if(position[0]==Double.NaN) <del> throw new RuntimeException("Invalid position."); <del> pcs.firePropertyChange(POSITION, oldPosition, position); <del> needToRecalcMatrix=true; <del> updateOldPosition(); <add> public /*synchronized*/ WorldObject notifyPositionChange(){ <add> lock.lock(); <add> try{ <add> if(position[0]==Double.NaN) <add> throw new RuntimeException("Invalid position."); <add> pcs.firePropertyChange(POSITION, oldPosition, position); <add> needToRecalcMatrix=true; <add> updateOldPosition(); <add> }finally{lock.unlock();} <ide> return this; <ide> }//end notifyPositionChange() <ide> <ide> * @param heading <ide> * the heading to set <ide> */ <del> public synchronized void setHeading(Vector3D nHeading) { <del> System.arraycopy(heading, 0, oldHeading, 0, 3); <del> heading[0] = nHeading.getX(); <del> heading[1] = nHeading.getY(); <del> heading[2] = nHeading.getZ(); <del> pcs.firePropertyChange(HEADING, oldHeading, nHeading); <del> needToRecalcMatrix=true; <add> public /*synchronized*/ void setHeading(Vector3D nHeading) { <add> lock.lock(); <add> try{ <add> System.arraycopy(heading, 0, oldHeading, 0, 3); <add> heading[0] = nHeading.getX(); <add> heading[1] = nHeading.getY(); <add> heading[2] = nHeading.getZ(); <add> pcs.firePropertyChange(HEADING, oldHeading, nHeading); <add> needToRecalcMatrix=true; <add> }finally{lock.unlock();} <ide> } <ide> <ide> public Vector3D getHeading() { <ide> * @param top <ide> * the top to set <ide> */ <del> public synchronized void setTop(Vector3D nTop) { <del> System.arraycopy(top, 0, oldTop, 0, 3); <del> top[0] = nTop.getX(); <del> top[1] = nTop.getY(); <del> top[2] = nTop.getZ(); <del> pcs.firePropertyChange(TOP, oldTop, nTop); <del> needToRecalcMatrix=true; <del> } <add> public /*synchronized*/ void setTop(Vector3D nTop) { <add> lock.lock(); <add> try{ <add> System.arraycopy(top, 0, oldTop, 0, 3); <add> top[0] = nTop.getX(); <add> top[1] = nTop.getY(); <add> top[2] = nTop.getZ(); <add> pcs.firePropertyChange(TOP, oldTop, nTop); <add> needToRecalcMatrix=true; <add> }finally{lock.unlock();} <add> }//end setTop(...) <ide> <ide> public final CollectionActionDispatcher<VEC4Address> getOpaqueObjectDefinitionAddresses(){ <ide> return opaqueObjectDefinitionAddressesInVEC4; <ide> return tr; <ide> } <ide> <del> public synchronized void destroy() { <del> final SpacePartitioningGrid grid = getContainingGrid(); <del> if(grid !=null){ <del> try{World.relevanceExecutor.submit(new Runnable(){ <del> @Override <del> public void run() { <del> grid.remove(WorldObject.this); <del> }}).get();}catch(Exception e){throw new RuntimeException(e);} <del> }//end if(NEW MODE and have grid) <del> setContainingGrid(null); <del> // Send it to the land of wind and ghosts. <del> setActive(false); <del> notifyPositionChange(); <add> public /*synchronized*/ void destroy() { <add> lock.lock(); <add> try{ <add> final SpacePartitioningGrid grid = getContainingGrid(); <add> if(grid !=null){ <add> try{World.relevanceExecutor.submit(new Runnable(){ <add> @Override <add> public void run() { <add> grid.remove(WorldObject.this); <add> }}).get();}catch(Exception e){throw new RuntimeException(e);} <add> }//end if(NEW MODE and have grid) <add> setContainingGrid(null); <add> // Send it to the land of wind and ghosts. <add> setActive(false); <add> notifyPositionChange(); <add> }finally{lock.unlock();} <ide> }//end destroy() <ide> <ide> @Override <ide> pcs.firePropertyChange(ACTIVE,oldState,active); <ide> }//end setActive(...) <ide> <del> public synchronized void movePositionBy(Vector3D delta) { <del> position[0] += delta.getX(); <del> position[1] += delta.getY(); <del> position[2] += delta.getZ(); <del> notifyPositionChange(); <del> } <del> <del> public synchronized void setPosition(double x, double y, double z) { <del> position[0] = x; <del> position[1] = y; <del> position[2] = z; <del> notifyPositionChange(); <add> public /*synchronized*/ void movePositionBy(Vector3D delta) { <add> lock.lock(); <add> try{ <add> position[0] += delta.getX(); <add> position[1] += delta.getY(); <add> position[2] += delta.getZ(); <add> notifyPositionChange(); <add> }finally{lock.unlock();} <add> }//end movePositionBy(...) <add> <add> public /*synchronized*/ void setPosition(double x, double y, double z) { <add> lock.lock(); <add> try{ <add> position[0] = x; <add> position[1] = y; <add> position[2] = z; <add> notifyPositionChange();} <add> finally{lock.unlock();} <ide> } <ide> <ide> public double[] getHeadingArray() {
JavaScript
mit
40676f07d9e0ae0c96939fd52e8d3b0591f9b80c
0
rpgtkoolmv/corescript,rpgtkoolmv/corescript
/*: * @plugindesc Basic plugin for manipulating important parameters.. * @author RM CoreScript team * * @help * Basic plugin for manipulating important parameters.. * There is no plugin command. * * @param cacheLimit * @desc The upper limit of images' cached size (MPixel) * @default 20 * * @param screenWidth * @desc The resolution of screen width * @default 816 * * @param screenHeight * @desc The resolution of screen height * @default 624 * * @param windowScaleFactor * @desc Scale window to (screen size * value). * @default 1 * * @param changeWindowWidthTo * @desc If set, change window width to this value * * @param changeWindowHeightTo * @desc If set, change window height to this value * * @param renderingMode * @desc The rendering mode (canvas/webgl/auto) * @default auto * * @param alwaysDash * @desc The initial value whether the player always dashes (on/off) * @default off */ /*:ja * @plugindesc 基本的なパラメーターを設定するプラグインです。 * @author RM CoreScript team * * @help * 基本的なパラメーターを設定するプラグインです。 * このプラグインにはプラグインコマンドはありません。 * * @param cacheLimit * @desc 画像のメモリへのキャッシュの上限値 (MPix) * @default 20 * * @param screenWidth * @desc 画面サイズの幅 * @default 816 * * @param screenHeight * @desc 画面サイズの高さ * @default 624 * * @param windowScaleFactor * @desc ウインドウを、画面サイズの指定された値分拡大・縮小します * @default 1 * * @param changeWindowWidthTo * @desc 値が設定された場合、ウインドウの幅を指定した値に変更 * * @param changeWindowHeightTo * @desc 値が設定された場合、ウインドウの高さを指定した値に変更 * * @param renderingMode * @desc レンダリングモード (canvas/webgl/auto) * @default auto * * @param alwaysDash * @desc プレイヤーが常時ダッシュするかどうかの初期値 (on/off) * @default off */ (function() { function toNumber(str, def) { return isNaN(str) ? def : +(str || def); } var parameters = PluginManager.parameters('Community_Basic'); var cacheLimit = toNumber(parameters['cacheLimit'], 20); var scaleFactor = toNumber(parameters['windowScaleFactor'], 1); var screenWidth = toNumber(parameters['screenWidth'], 816); var screenHeight = toNumber(parameters['screenHeight'], 624); var renderingMode = parameters['renderingMode'].toLowerCase(); var alwaysDash = parameters['alwaysDash'].toLowerCase() === 'on'; var windowWidth = toNumber(parameters['changeWindowWidthTo'], 0); var windowHeight = toNumber(parameters['changeWindowHeightTo'], 0); if(screenWidth !== SceneManager._screenWidth || (scaleFactor !== 1 && !windowWidth)) { windowWidth = screenWidth * scaleFactor; } if(screenHeight !== SceneManager._screenHeight || (scaleFactor !== 1 && !windowHeight)) { windowHeight = screenHeight * scaleFactor; } ImageCache.limit = cacheLimit * 1000 * 1000; SceneManager._screenWidth = screenWidth; SceneManager._screenHeight = screenHeight; SceneManager._boxWidth = screenWidth; SceneManager._boxHeight = screenHeight; SceneManager.preferableRendererType = function() { if (Utils.isOptionValid('canvas')) { return 'canvas'; } else if (Utils.isOptionValid('webgl')) { return 'webgl'; } else if (renderingMode === 'canvas') { return 'canvas'; } else if (renderingMode === 'webgl') { return 'webgl'; } else { return 'auto'; } }; var _ConfigManager_applyData = ConfigManager.applyData; ConfigManager.applyData = function(config) { _ConfigManager_applyData.apply(this, arguments); if (config['alwaysDash'] === undefined) { this.alwaysDash = alwaysDash; } }; var _SceneManager_initNwjs = SceneManager.initNwjs; SceneManager.initNwjs = function() { _SceneManager_initNwjs.apply(this, arguments); if (Utils.isNwjs() && windowWidth && windowHeight) { var dw = windowWidth - window.innerWidth; var dh = windowHeight - window.innerHeight; window.moveBy(-dw / 2, -dh / 2); window.resizeBy(dw, dh); } }; })();
plugins/Community_Basic.js
/*: * @plugindesc Basic plugin for manipulating important parameters.. * @author RM CoreScript team * * @help * Basic plugin for manipulating important parameters.. * There is no plugin command. * * @param cacheLimit * @desc The upper limit of images' cached size (MPixel) * @default 20 * * @param screenWidth * @desc The resolution of screen width * @default 816 * * @param screenHeight * @desc The resolution of screen height * @default 624 * * @param scaleFactor * @desc Scale window to (screen size * value). * @default 1 * * @param changeWindowWidthTo * @desc If set, change window width to this value * * @param changeWindowHeightTo * @desc If set, change window height to this value * * @param renderingMode * @desc The rendering mode (canvas/webgl/auto) * @default auto * * @param alwaysDash * @desc The initial value whether the player always dashes (on/off) * @default off */ /*:ja * @plugindesc 基本的なパラメーターを設定するプラグインです。 * @author RM CoreScript team * * @help * 基本的なパラメーターを設定するプラグインです。 * このプラグインにはプラグインコマンドはありません。 * * @param cacheLimit * @desc 画像のメモリへのキャッシュの上限値 (MPix) * @default 20 * * @param screenWidth * @desc 画面サイズの幅 * @default 816 * * @param screenHeight * @desc 画面サイズの高さ * @default 624 * * @param scaleFactor * @desc ウインドウを、画面サイズの指定された値分拡大・縮小します * @default 1 * * @param changeWindowWidthTo * @desc 値が設定された場合、ウインドウの幅を指定した値に変更 * * @param changeWindowHeightTo * @desc 値が設定された場合、ウインドウの高さを指定した値に変更 * * @param renderingMode * @desc レンダリングモード (canvas/webgl/auto) * @default auto * * @param alwaysDash * @desc プレイヤーが常時ダッシュするかどうかの初期値 (on/off) * @default off */ (function() { function toNumber(str, def) { return isNaN(str) ? def : +(str || def); } var parameters = PluginManager.parameters('Community_Basic'); var cacheLimit = toNumber(parameters['cacheLimit'], 20); var scaleFactor = toNumber(parameters['scaleFactor'], 1); var screenWidth = toNumber(parameters['screenWidth'], 816); var screenHeight = toNumber(parameters['screenHeight'], 624); var renderingMode = parameters['renderingMode'].toLowerCase(); var alwaysDash = parameters['alwaysDash'].toLowerCase() === 'on'; var windowWidth = toNumber(parameters['changeWindowWidthTo'], 0); var windowHeight = toNumber(parameters['changeWindowHeightTo'], 0); if(screenWidth !== SceneManager._screenWidth || (scaleFactor !== 1 && !windowWidth)) { windowWidth = screenWidth * scaleFactor; } if(screenHeight !== SceneManager._screenHeight || (scaleFactor !== 1 && !windowHeight)) { windowHeight = screenHeight * scaleFactor; } ImageCache.limit = cacheLimit * 1000 * 1000; SceneManager._screenWidth = screenWidth; SceneManager._screenHeight = screenHeight; SceneManager._boxWidth = screenWidth; SceneManager._boxHeight = screenHeight; SceneManager.preferableRendererType = function() { if (Utils.isOptionValid('canvas')) { return 'canvas'; } else if (Utils.isOptionValid('webgl')) { return 'webgl'; } else if (renderingMode === 'canvas') { return 'canvas'; } else if (renderingMode === 'webgl') { return 'webgl'; } else { return 'auto'; } }; var _ConfigManager_applyData = ConfigManager.applyData; ConfigManager.applyData = function(config) { _ConfigManager_applyData.apply(this, arguments); if (config['alwaysDash'] === undefined) { this.alwaysDash = alwaysDash; } }; var _SceneManager_initNwjs = SceneManager.initNwjs; SceneManager.initNwjs = function() { _SceneManager_initNwjs.apply(this, arguments); if (Utils.isNwjs() && windowWidth && windowHeight) { var dw = windowWidth - window.innerWidth; var dh = windowHeight - window.innerHeight; window.moveBy(-dw / 2, -dh / 2); window.resizeBy(dw, dh); } }; })();
scaleFactor => windowScaleFactor
plugins/Community_Basic.js
scaleFactor => windowScaleFactor
<ide><path>lugins/Community_Basic.js <ide> * @desc The resolution of screen height <ide> * @default 624 <ide> * <del> * @param scaleFactor <add> * @param windowScaleFactor <ide> * @desc Scale window to (screen size * value). <ide> * @default 1 <ide> * <ide> * @desc 画面サイズの高さ <ide> * @default 624 <ide> * <del> * @param scaleFactor <add> * @param windowScaleFactor <ide> * @desc ウインドウを、画面サイズの指定された値分拡大・縮小します <ide> * @default 1 <ide> * <ide> <ide> var parameters = PluginManager.parameters('Community_Basic'); <ide> var cacheLimit = toNumber(parameters['cacheLimit'], 20); <del> var scaleFactor = toNumber(parameters['scaleFactor'], 1); <add> var scaleFactor = toNumber(parameters['windowScaleFactor'], 1); <ide> var screenWidth = toNumber(parameters['screenWidth'], 816); <ide> var screenHeight = toNumber(parameters['screenHeight'], 624); <ide> var renderingMode = parameters['renderingMode'].toLowerCase();
Java
apache-2.0
75b1082d6df72f1557642830c28aacbb41732ecd
0
idirbenouaret/relrel,basaldella/relrel
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sssw.relrel; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.URL; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.stream.Collectors; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; import org.json.simple.parser.ParseException; /** * Get the most "uncommon" entities linked by a certain Wikipedia pages. * * @author Marco Basaldella */ public class FactFinder { /** * The user agent that will be used for HTTP requests (since Wikipedia * requests it). */ private String userAgent; /** * The page search query that will be performed using the Wikipedia * OpenSearch APIs. Protocol, languages and the page queries need to be * appended before and after this string. */ private final String singlePageQuery = "wikipedia.org/w/api.php?action=query&prop=categories|extracts|links&clshow=!hidden&format=json&pllimit=500&plnamespace=0&titles="; private final String categoryQuery = "wikipedia.org/w/api.php?action=query&list=categorymembers&cmlimit=max&format=json&rawcontinue=&cmtitle=Category:"; // Blacklist of unwanted terms private static final List<String> blackTerms = Arrays.asList(new String[]{"null", "International Standard Book Number", "Digital object identifier", "Living people", "PubMed Identifier", "International Standard Serial Number", "Wikisource", "disambiguation", "stub", "Featured Articles" }); /** * Maps the categories associated with a page. */ private Map<String, Integer> categories = new HashMap<>(); /** * Maps the related links (the "See Also" section) of a Wikipedia page. */ private Map<String, Integer> links = new HashMap<>(); /** * The page we're analyzing. */ private String InputPage; /** * Set the user agent used for requests to Wikipedia. * * @param userAgent the user agent string */ public void setUserAgent(String userAgent) { this.userAgent = userAgent; } /** * Run the UncommonFacts algorithm as defined in the project document. * * @param grams the grams to analyze. */ public void findUncommonFacts(String inputPage) { this.InputPage = inputPage; scrapInputPage(); System.out.println("*** Wikipedia page: " + InputPage); System.out.println(); System.out.println("Found " + links.size() + " outgoing links"); System.out.println("Found " + categories.size() + " categories"); for (String cat : categories.keySet()) { System.out.print(cat + ";"); } System.out.println(); int counter = 1 ; for (String cat : categories.keySet()) { System.out.println("Analyzing category " + counter++ + "..."); findFactsInCategory(cat); } List<Map.Entry<String, Integer>> ordered = links.entrySet().stream().sorted(Map.Entry.comparingByValue()) //.limit(20) .collect(Collectors.toList()); System.out.println("*** Suggestions ***"); for (Map.Entry<String,Integer> entry : ordered) { System.out.println("" + entry.getKey() + " \t\t\t Score: " + entry.getValue()); } } // void findUncommonFacts /** * Lines 1-3 of the algorithm: init the table with the outgoing links (and * find the categories). */ private void scrapInputPage() { HttpURLConnection con = null; BufferedReader reader = null; InputPage = InputPage.replaceAll(" ", "_"); // do the query and save the retrieved json in an object. String queryAddress = String.format("https://%s.%s%s", Locale.ENGLISH, singlePageQuery, InputPage); try { con = (HttpURLConnection) (new URL(queryAddress)).openConnection(); con.setRequestProperty("User-Agent", userAgent); con.setRequestMethod("GET"); reader = new BufferedReader(new InputStreamReader(con.getInputStream())); Object json = (new JSONParser()).parse(reader); // closing connection con.disconnect(); // The retrieved JSON is something like: // // "query": { // "pages": { // "<PAGE ID NUMBER>": { // "pageid": "<PAGE ID NUMBER>", // "ns": 0, // "title": "<PAGE TITLE>", // "categories": [ // { // "ns": 14, // "title": "Category:<CATEGORY 1>" // }, // { // "ns": 14, // "title": "Category:<CATEGORY 2>" // }, // { // "ns": 14, // "title": "Category:<CATEGORY 3>" // } // ], // "extract":"<TEXT>", // "links": [ // { // "ns": 0, // "title": "<LINK 1>" // }, // { // "ns": 0, // "title": "<LINK 2>" // }, // { // "ns": 0, // "title": "<LINK 3>" // } // ] // } // } // } //} // note that NOT ALL the wikis have the "extract" property in the API // therefore we may not assume that it will always be there JSONObject queryblock = (JSONObject) json; JSONObject pagesBlock = (JSONObject) queryblock.get("query"); JSONObject idBlock = (JSONObject) pagesBlock.get("pages"); // if we pipe'd more than one title, we'll have more than one pageId entry for (Iterator it = idBlock.keySet().iterator(); it.hasNext();) { String pageId = (String) it.next(); JSONObject block = (JSONObject) idBlock.get(pageId); // iterate through categories JSONArray jsonCats = (JSONArray) block.get("categories"); if (jsonCats != null) { Iterator<JSONObject> iterator = jsonCats.iterator(); while (iterator.hasNext()) { JSONObject category = (iterator.next()); String catName = (String) category.get("title"); catName = catName.replaceFirst("Category:", ""); catName = catName.replaceFirst("Categoria:", ""); if (!catName.toLowerCase().contains("stub") && !catName.contains("Featured Articles") && !catName.toLowerCase().contains("disambiguation")) { if (!this.categories.containsKey(catName) && !blackTerms.contains(catName)) { if (!catName.contains("births") && (!catName.contains("deaths"))) this.categories.put(catName, 0); } } } } // We can find related entities in the text // many articles have a "See Also" section that begins with // <h2>See also</h2>\n<ul> // and ends with: // </ul> // To retrieve these links, we don't need to scrap HTML. // We can just read the list of links included in the JSON // the drawback of this approach is that some pages have huge // amounts of links and many of them are uninteresting // For example, almost any page has a reference to the // definition of ISBN (contained in the references) // or of some other kind of wide-used identifier such as: // Pub-Med index, // Digital-Object-Identifier, // International Standard Book Number, // Wikisource, and so on. JSONArray jsonLinks = (JSONArray) block.get("links"); if (jsonLinks != null) { Iterator<JSONObject> iterator = jsonLinks.iterator(); while (iterator.hasNext()) { JSONObject link = (iterator.next()); String linkname = (String) link.get("title"); if (!this.links.containsKey(linkname) && !blackTerms.contains(linkname)) { this.links.put(linkname, 0); } } } } } catch (ParseException ex) { throw new RuntimeException( "Error while parsing JSON by Wikipedia for page: " + InputPage, ex); } catch (MalformedURLException ex) { throw new RuntimeException( "Malformed Wikipedia URL: " + queryAddress, ex); } catch (IOException ex) { throw new RuntimeException( "Error while reading Wikipedia", ex); } finally { try { if (reader != null) { reader.close(); } } catch (IOException ex) { throw new RuntimeException( "Error while closing Wikipedia stream", ex); } } } private void findFactsInCategory(String cat) { HttpURLConnection con = null; BufferedReader reader = null; cat = cat.replaceAll(" ", "_"); // do the query and save the retrieved json in an object. String queryAddress = String.format("https://%s.%s%s", Locale.ENGLISH, categoryQuery, cat); try { con = (HttpURLConnection) (new URL(queryAddress)).openConnection(); con.setRequestProperty("User-Agent", userAgent); con.setRequestMethod("GET"); reader = new BufferedReader(new InputStreamReader(con.getInputStream())); Object json = (new JSONParser()).parse(reader); // closing connection con.disconnect(); JSONObject queryblock = (JSONObject) json; JSONObject mainBlock = (JSONObject) queryblock.get("query"); JSONArray categoriesBlock = (JSONArray) mainBlock.get("categorymembers"); Iterator<JSONObject> iterator = categoriesBlock.iterator(); System.out.println("This category has " + categoriesBlock.size() + " pages"); int counter = 0; while (iterator.hasNext()) { System.out.println("Page " + counter++); JSONObject singleCategoryBlock = (iterator.next()); String pageName = (String) singleCategoryBlock.get("title"); pageName = pageName.replace(" ", "_"); // Please be aware that the categories JSON returns not only // pages, but also (sub) categories and other things we don't want. // So, keep only the pages and skip the rest. // For further information, please check // https://en.wikipedia.org/wiki/Wikipedia:Namespace int pageNamespace = (Integer) singleCategoryBlock.get("ns"); if (!pageName.equals(InputPage) && pageNamespace == 0) findFactsInPage(pageName); } } catch (ParseException ex) { throw new RuntimeException( "Error while parsing JSON by Wikipedia for page: " + cat, ex); } catch (MalformedURLException ex) { throw new RuntimeException( "Malformed Wikipedia URL: " + queryAddress, ex); } catch (IOException ex) { throw new RuntimeException( "Error while reading Wikipedia", ex); } finally { try { if (reader != null) { reader.close(); } } catch (IOException ex) { throw new RuntimeException( "Error while closing Wikipedia stream", ex); } } } private void findFactsInPage(String pageName) { HttpURLConnection con = null; BufferedReader reader = null; pageName = pageName.replaceAll(" ", "_"); // do the query and save the retrieved json in an object. String queryAddress = String.format("https://%s.%s%s", Locale.ENGLISH, singlePageQuery, pageName); try { con = (HttpURLConnection) (new URL(queryAddress)).openConnection(); con.setRequestProperty("User-Agent", userAgent); con.setRequestMethod("GET"); reader = new BufferedReader(new InputStreamReader(con.getInputStream())); Object json = (new JSONParser()).parse(reader); // closing connection con.disconnect(); JSONObject queryblock = (JSONObject) json; JSONObject pagesBlock = (JSONObject) queryblock.get("query"); JSONObject idBlock = (JSONObject) pagesBlock.get("pages"); // if we pipe'd more than one title, we'll have more than one pageId entry for (Iterator it = idBlock.keySet().iterator(); it.hasNext();) { String pageId = (String) it.next(); JSONObject block = (JSONObject) idBlock.get(pageId); JSONArray jsonLinks = (JSONArray) block.get("links"); if (jsonLinks != null) { Iterator<JSONObject> iterator = jsonLinks.iterator(); while (iterator.hasNext()) { JSONObject link = (iterator.next()); String linkName = (String) link.get("title"); if (this.links.containsKey(linkName)) { int newValue = links.get(linkName) + 1; links.replace(linkName, newValue); } } } } } catch (ParseException ex) { throw new RuntimeException( "Error while parsing JSON by Wikipedia for page: " + pageName, ex); } catch (MalformedURLException ex) { throw new RuntimeException( "Malformed Wikipedia URL: " + queryAddress, ex); } catch (IOException ex) { throw new RuntimeException( "Error while reading Wikipedia", ex); } finally { try { if (reader != null) { reader.close(); } } catch (IOException ex) { throw new RuntimeException( "Error while closing Wikipedia stream", ex); } } } } // class
src/main/java/org/sssw/relrel/FactFinder.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sssw.relrel; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.URL; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.stream.Collectors; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; import org.json.simple.parser.ParseException; /** * Get the most "uncommon" entities linked by a certain Wikipedia pages. * * @author Marco Basaldella */ public class FactFinder { /** * The user agent that will be used for HTTP requests (since Wikipedia * requests it). */ private String userAgent; /** * The page search query that will be performed using the Wikipedia * OpenSearch APIs. Protocol, languages and the page queries need to be * appended before and after this string. */ private final String singlePageQuery = "wikipedia.org/w/api.php?action=query&prop=categories|extracts|links&clshow=!hidden&format=json&pllimit=500&plnamespace=0&titles="; private final String categoryQuery = "wikipedia.org/w/api.php?action=query&list=categorymembers&cmlimit=max&format=json&rawcontinue=&cmtitle=Category:"; // Blacklist of unwanted terms private static final List<String> blackTerms = Arrays.asList(new String[]{"null", "International Standard Book Number", "Digital object identifier", "Living people", "PubMed Identifier", "International Standard Serial Number", "Wikisource", "disambiguation", "stub", "Featured Articles" }); /** * Maps the categories associated with a page. */ private Map<String, Integer> categories = new HashMap<>(); /** * Maps the related links (the "See Also" section) of a Wikipedia page. */ private Map<String, Integer> links = new HashMap<>(); /** * The page we're analyzing. */ private String InputPage; /** * Set the user agent used for requests to Wikipedia. * * @param userAgent the user agent string */ public void setUserAgent(String userAgent) { this.userAgent = userAgent; } /** * Run the UncommonFacts algorithm as defined in the project document. * * @param grams the grams to analyze. */ public void findUncommonFacts(String inputPage) { this.InputPage = inputPage; scrapInputPage(); System.out.println("*** Wikipedia page: " + InputPage); System.out.println(); System.out.println("Found " + links.size() + " outgoing links"); System.out.println("Found " + categories.size() + " categories"); for (String cat : categories.keySet()) { System.out.print(cat + ";"); } System.out.println(); int counter = 1 ; for (String cat : categories.keySet()) { System.out.println("Analyzing category " + counter++ + "..."); findFactsInCategory(cat); } List<Map.Entry<String, Integer>> ordered = links.entrySet().stream().sorted(Map.Entry.comparingByValue()) //.limit(20) .collect(Collectors.toList()); System.out.println("*** Suggestions ***"); for (Map.Entry<String,Integer> entry : ordered) { System.out.println("" + entry.getKey() + " \t\t\t Score: " + entry.getValue()); } } // void findUncommonFacts /** * Lines 1-3 of the algorithm: init the table with the outgoing links (and * find the categories). */ private void scrapInputPage() { HttpURLConnection con = null; BufferedReader reader = null; InputPage = InputPage.replaceAll(" ", "_"); // do the query and save the retrieved json in an object. String queryAddress = String.format("https://%s.%s%s", Locale.ENGLISH, singlePageQuery, InputPage); try { con = (HttpURLConnection) (new URL(queryAddress)).openConnection(); con.setRequestProperty("User-Agent", userAgent); con.setRequestMethod("GET"); reader = new BufferedReader(new InputStreamReader(con.getInputStream())); Object json = (new JSONParser()).parse(reader); // closing connection con.disconnect(); // The retrieved JSON is something like: // // "query": { // "pages": { // "<PAGE ID NUMBER>": { // "pageid": "<PAGE ID NUMBER>", // "ns": 0, // "title": "<PAGE TITLE>", // "categories": [ // { // "ns": 14, // "title": "Category:<CATEGORY 1>" // }, // { // "ns": 14, // "title": "Category:<CATEGORY 2>" // }, // { // "ns": 14, // "title": "Category:<CATEGORY 3>" // } // ], // "extract":"<TEXT>", // "links": [ // { // "ns": 0, // "title": "<LINK 1>" // }, // { // "ns": 0, // "title": "<LINK 2>" // }, // { // "ns": 0, // "title": "<LINK 3>" // } // ] // } // } // } //} // note that NOT ALL the wikis have the "extract" property in the API // therefore we may not assume that it will always be there JSONObject queryblock = (JSONObject) json; JSONObject pagesBlock = (JSONObject) queryblock.get("query"); JSONObject idBlock = (JSONObject) pagesBlock.get("pages"); // if we pipe'd more than one title, we'll have more than one pageId entry for (Iterator it = idBlock.keySet().iterator(); it.hasNext();) { String pageId = (String) it.next(); JSONObject block = (JSONObject) idBlock.get(pageId); // iterate through categories JSONArray jsonCats = (JSONArray) block.get("categories"); if (jsonCats != null) { Iterator<JSONObject> iterator = jsonCats.iterator(); while (iterator.hasNext()) { JSONObject category = (iterator.next()); String catName = (String) category.get("title"); catName = catName.replaceFirst("Category:", ""); catName = catName.replaceFirst("Categoria:", ""); if (!catName.toLowerCase().contains("stub") && !catName.contains("Featured Articles") && !catName.toLowerCase().contains("disambiguation")) { if (!this.categories.containsKey(catName) && !blackTerms.contains(catName)) { if (!catName.contains("births") && (!catName.contains("deaths"))) this.categories.put(catName, 0); } } } } // We can find related entities in the text // many articles have a "See Also" section that begins with // <h2>See also</h2>\n<ul> // and ends with: // </ul> // To retrieve these links, we don't need to scrap HTML. // We can just read the list of links included in the JSON // the drawback of this approach is that some pages have huge // amounts of links and many of them are uninteresting // For example, almost any page has a reference to the // definition of ISBN (contained in the references) // or of some other kind of wide-used identifier such as: // Pub-Med index, // Digital-Object-Identifier, // International Standard Book Number, // Wikisource, and so on. JSONArray jsonLinks = (JSONArray) block.get("links"); if (jsonLinks != null) { Iterator<JSONObject> iterator = jsonLinks.iterator(); while (iterator.hasNext()) { JSONObject link = (iterator.next()); String linkname = (String) link.get("title"); if (!this.links.containsKey(linkname) && !blackTerms.contains(linkname)) { this.links.put(linkname, 0); } } } } } catch (ParseException ex) { throw new RuntimeException( "Error while parsing JSON by Wikipedia for page: " + InputPage, ex); } catch (MalformedURLException ex) { throw new RuntimeException( "Malformed Wikipedia URL: " + queryAddress, ex); } catch (IOException ex) { throw new RuntimeException( "Error while reading Wikipedia", ex); } finally { try { if (reader != null) { reader.close(); } } catch (IOException ex) { throw new RuntimeException( "Error while closing Wikipedia stream", ex); } } } private void findFactsInCategory(String cat) { HttpURLConnection con = null; BufferedReader reader = null; cat = cat.replaceAll(" ", "_"); // do the query and save the retrieved json in an object. String queryAddress = String.format("https://%s.%s%s", Locale.ENGLISH, categoryQuery, cat); try { con = (HttpURLConnection) (new URL(queryAddress)).openConnection(); con.setRequestProperty("User-Agent", userAgent); con.setRequestMethod("GET"); reader = new BufferedReader(new InputStreamReader(con.getInputStream())); Object json = (new JSONParser()).parse(reader); // closing connection con.disconnect(); JSONObject queryblock = (JSONObject) json; JSONObject mainBlock = (JSONObject) queryblock.get("query"); JSONArray categoriesBlock = (JSONArray) mainBlock.get("categorymembers"); Iterator<JSONObject> iterator = categoriesBlock.iterator(); System.out.println("This category has " + categoriesBlock.size() + " pages"); int counter = 0; while (iterator.hasNext()) { System.out.println("Page " + counter++); JSONObject singleCategoryBlock = (iterator.next()); String pageName = (String) singleCategoryBlock.get("title"); pageName = pageName.replace(" ", "_"); if (!pageName.equals(InputPage)) findFactsInPage(pageName); } } catch (ParseException ex) { throw new RuntimeException( "Error while parsing JSON by Wikipedia for page: " + cat, ex); } catch (MalformedURLException ex) { throw new RuntimeException( "Malformed Wikipedia URL: " + queryAddress, ex); } catch (IOException ex) { throw new RuntimeException( "Error while reading Wikipedia", ex); } finally { try { if (reader != null) { reader.close(); } } catch (IOException ex) { throw new RuntimeException( "Error while closing Wikipedia stream", ex); } } } private void findFactsInPage(String pageName) { HttpURLConnection con = null; BufferedReader reader = null; pageName = pageName.replaceAll(" ", "_"); // do the query and save the retrieved json in an object. String queryAddress = String.format("https://%s.%s%s", Locale.ENGLISH, singlePageQuery, pageName); try { con = (HttpURLConnection) (new URL(queryAddress)).openConnection(); con.setRequestProperty("User-Agent", userAgent); con.setRequestMethod("GET"); reader = new BufferedReader(new InputStreamReader(con.getInputStream())); Object json = (new JSONParser()).parse(reader); // closing connection con.disconnect(); JSONObject queryblock = (JSONObject) json; JSONObject pagesBlock = (JSONObject) queryblock.get("query"); JSONObject idBlock = (JSONObject) pagesBlock.get("pages"); // if we pipe'd more than one title, we'll have more than one pageId entry for (Iterator it = idBlock.keySet().iterator(); it.hasNext();) { String pageId = (String) it.next(); JSONObject block = (JSONObject) idBlock.get(pageId); JSONArray jsonLinks = (JSONArray) block.get("links"); if (jsonLinks != null) { Iterator<JSONObject> iterator = jsonLinks.iterator(); while (iterator.hasNext()) { JSONObject link = (iterator.next()); String linkName = (String) link.get("title"); if (this.links.containsKey(linkName)) { int newValue = links.get(linkName) + 1; links.replace(linkName, newValue); } } } } } catch (ParseException ex) { throw new RuntimeException( "Error while parsing JSON by Wikipedia for page: " + pageName, ex); } catch (MalformedURLException ex) { throw new RuntimeException( "Malformed Wikipedia URL: " + queryAddress, ex); } catch (IOException ex) { throw new RuntimeException( "Error while reading Wikipedia", ex); } finally { try { if (reader != null) { reader.close(); } } catch (IOException ex) { throw new RuntimeException( "Error while closing Wikipedia stream", ex); } } } } // class
filter "service" pages from the search
src/main/java/org/sssw/relrel/FactFinder.java
filter "service" pages from the search
<ide><path>rc/main/java/org/sssw/relrel/FactFinder.java <ide> String pageName = (String) singleCategoryBlock.get("title"); <ide> pageName = pageName.replace(" ", "_"); <ide> <del> <del> <del> if (!pageName.equals(InputPage)) <add> // Please be aware that the categories JSON returns not only <add> // pages, but also (sub) categories and other things we don't want. <add> // So, keep only the pages and skip the rest. <add> <add> // For further information, please check <add> // https://en.wikipedia.org/wiki/Wikipedia:Namespace <add> <add> int pageNamespace = (Integer) singleCategoryBlock.get("ns"); <add> <add> if (!pageName.equals(InputPage) && pageNamespace == 0) <ide> findFactsInPage(pageName); <ide> } <ide>
Java
apache-2.0
8e40e22dc1454c598b6a9b35adbf1dea2e6ab2d8
0
52nlp/webanno,52nlp/webanno,52nlp/webanno
/******************************************************************************* * Copyright 2012 * Ubiquitous Knowledge Processing (UKP) Lab and FG Language Technology * Technische Universität Darmstadt * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package de.tudarmstadt.ukp.clarin.webanno.api.dao; import static de.tudarmstadt.ukp.clarin.webanno.api.WebAnnoConst.CHAIN_TYPE; import static de.tudarmstadt.ukp.clarin.webanno.api.WebAnnoConst.RELATION_TYPE; import static de.tudarmstadt.ukp.clarin.webanno.api.WebAnnoConst.SPAN_TYPE; import static org.apache.commons.io.IOUtils.closeQuietly; import static org.apache.commons.io.IOUtils.copyLarge; import static org.apache.commons.lang.StringUtils.isBlank; import static org.apache.uima.cas.impl.Serialization.deserializeCASComplete; import static org.apache.uima.cas.impl.Serialization.serializeCASComplete; import static org.apache.uima.fit.factory.AnalysisEngineFactory.createEngine; import static org.apache.uima.fit.factory.AnalysisEngineFactory.createEngineDescription; import static org.apache.uima.fit.pipeline.SimplePipeline.runPipeline; import java.beans.PropertyDescriptor; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileFilter; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.OutputStream; import java.sql.Connection; import java.sql.SQLException; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.annotation.Resource; import javax.persistence.EntityManager; import javax.persistence.NoResultException; import javax.persistence.PersistenceContext; import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.LineIterator; import org.apache.commons.io.comparator.LastModifiedFileComparator; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.log4j.FileAppender; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.log4j.PatternLayout; import org.apache.uima.UIMAException; import org.apache.uima.analysis_engine.AnalysisEngine; import org.apache.uima.analysis_engine.AnalysisEngineDescription; import org.apache.uima.cas.CAS; import org.apache.uima.cas.Feature; import org.apache.uima.cas.FeatureStructure; import org.apache.uima.cas.Type; import org.apache.uima.cas.TypeSystem; import org.apache.uima.cas.impl.CASCompleteSerializer; import org.apache.uima.cas.impl.CASImpl; import org.apache.uima.cas.impl.Serialization; import org.apache.uima.collection.CollectionReader; import org.apache.uima.fit.factory.CollectionReaderFactory; import org.apache.uima.fit.factory.JCasFactory; import org.apache.uima.fit.factory.TypeSystemDescriptionFactory; import org.apache.uima.fit.util.CasUtil; import org.apache.uima.fit.util.JCasUtil; import org.apache.uima.jcas.JCas; import org.apache.uima.resource.metadata.TypeDescription; import org.apache.uima.resource.metadata.TypeSystemDescription; import org.apache.uima.resource.metadata.impl.TypeSystemDescription_impl; import org.apache.uima.util.CasCreationUtils; import org.hibernate.Session; import org.hibernate.jdbc.Work; import org.springframework.beans.BeanWrapper; import org.springframework.beans.PropertyAccessorFactory; import org.springframework.beans.factory.annotation.Value; import org.springframework.dao.DataRetrievalFailureException; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.transaction.annotation.Transactional; import de.tudarmstadt.ukp.clarin.webanno.api.AnnotationService; import de.tudarmstadt.ukp.clarin.webanno.api.RepositoryService; import de.tudarmstadt.ukp.clarin.webanno.api.UserDao; import de.tudarmstadt.ukp.clarin.webanno.api.WebAnnoConst; import de.tudarmstadt.ukp.clarin.webanno.model.AnnotationDocument; import de.tudarmstadt.ukp.clarin.webanno.model.AnnotationDocumentState; import de.tudarmstadt.ukp.clarin.webanno.model.AnnotationFeature; import de.tudarmstadt.ukp.clarin.webanno.model.AnnotationLayer; import de.tudarmstadt.ukp.clarin.webanno.model.Authority; import de.tudarmstadt.ukp.clarin.webanno.model.AutomationStatus; import de.tudarmstadt.ukp.clarin.webanno.model.CrowdJob; import de.tudarmstadt.ukp.clarin.webanno.model.MiraTemplate; import de.tudarmstadt.ukp.clarin.webanno.model.Mode; import de.tudarmstadt.ukp.clarin.webanno.model.PermissionLevel; import de.tudarmstadt.ukp.clarin.webanno.model.Project; import de.tudarmstadt.ukp.clarin.webanno.model.ProjectPermission; import de.tudarmstadt.ukp.clarin.webanno.model.SourceDocument; import de.tudarmstadt.ukp.clarin.webanno.model.SourceDocumentStateTransition; import de.tudarmstadt.ukp.clarin.webanno.model.TagSet; import de.tudarmstadt.ukp.clarin.webanno.model.User; import de.tudarmstadt.ukp.dkpro.core.api.io.JCasFileWriter_ImplBase; import de.tudarmstadt.ukp.dkpro.core.api.io.ResourceCollectionReaderBase; import de.tudarmstadt.ukp.dkpro.core.api.metadata.type.DocumentMetaData; import de.tudarmstadt.ukp.dkpro.core.api.metadata.type.TagsetDescription; import de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Sentence; import de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Token; import de.tudarmstadt.ukp.dkpro.core.tokit.BreakIteratorSegmenter; /** * Implementation of methods defined in the {@link RepositoryService} interface * * @author Seid Muhie Yimam * */ public class RepositoryServiceDbData implements RepositoryService { private final Log log = LogFactory.getLog(getClass()); public static Logger createLog(Project aProject, String aUser) throws IOException { Logger logger = Logger.getLogger(RepositoryServiceDbData.class); String targetLog = dir.getAbsolutePath() + PROJECT + "project-" + aProject.getId() + ".log"; FileAppender apndr = new FileAppender(new PatternLayout("%d [" + aUser + "] %m%n"), targetLog, true); logger.addAppender(apndr); logger.setLevel(Level.ALL); return logger; } @Resource(name = "annotationService") private AnnotationService annotationService; @Resource(name = "userRepository") private UserDao userRepository; @Value(value = "${backup.keep.time}") private long backupKeepTime; @Value(value = "${crowdsource.enabled}") private int crowdsourceEnabled; @Value(value = "${backup.interval}") private long backupInterval; @Value(value = "${backup.keep.number}") private int backupKeepNumber; @Resource(name = "formats") private Properties readWriteFileFormats; @Resource(name = "helpFile") private Properties helpProperiesFile; private static final String PROJECT = "/project/"; private static final String MIRA = "/mira/"; private static final String MIRA_TEMPLATE = "/template/"; private static final String DOCUMENT = "/document/"; private static final String SOURCE = "/source"; private static final String GUIDELINE = "/guideline/"; private static final String ANNOTATION = "/annotation"; private static final String SETTINGS = "/settings/"; private static final String META_INF = "/META-INF/"; private static final String TEMPLATE = "/crowdtemplates/"; private static final String HELP_FILE = "/help.properties"; @PersistenceContext private EntityManager entityManager; private static File dir; // The annotation preference properties File name String annotationPreferencePropertiesFileName; private final Object lock = new Object(); public RepositoryServiceDbData() { } @Override @Transactional public void createAnnotationDocument(AnnotationDocument aAnnotationDocument) throws IOException { if (aAnnotationDocument.getId() == 0) { entityManager.persist(aAnnotationDocument); } else { entityManager.merge(aAnnotationDocument); } createLog(aAnnotationDocument.getProject(), aAnnotationDocument.getUser()).info( " User [" + aAnnotationDocument.getUser() + "] creates annotation document for source document [" + aAnnotationDocument.getDocument().getId() + "] in project [" + aAnnotationDocument.getProject().getId() + "] with id [" + aAnnotationDocument.getId() + "]"); createLog(aAnnotationDocument.getProject(), aAnnotationDocument.getUser()) .removeAllAppenders(); } /** * Renames a file. * * @throws IOException * if the file cannot be renamed. * @return the target file. */ private File renameFile(File aFrom, File aTo) throws IOException { if (!aFrom.renameTo(aTo)) { throw new IOException("Cannot renamed file [" + aFrom + "] to [" + aTo + "]"); } // We are not sure if File is mutable. This makes sure we get a new file // in any case. return new File(aTo.getPath()); } /** * Get the folder where the annotations are stored. Creates the folder if necessary. * * @throws IOException * if the folder cannot be created. */ private File getAnnotationFolder(SourceDocument aDocument) throws IOException { File annotationFolder = new File(dir, PROJECT + aDocument.getProject().getId() + DOCUMENT + aDocument.getId() + ANNOTATION); FileUtils.forceMkdir(annotationFolder); return annotationFolder; } @Override public File getDocumentFolder(SourceDocument aDocument) throws IOException { File sourceDocFolder = new File(dir, PROJECT + aDocument.getProject().getId() + DOCUMENT + aDocument.getId() + SOURCE); FileUtils.forceMkdir(sourceDocFolder); return sourceDocFolder; } @Override @Transactional public void createAnnotationDocumentContent(JCas aJcas, SourceDocument aDocument, User aUser) throws IOException { writeCas(aDocument, aJcas, aUser.getUsername(), aUser); } @Override @Transactional public void createProject(Project aProject, User aUser) throws IOException { entityManager.persist(aProject); String path = dir.getAbsolutePath() + PROJECT + aProject.getId(); FileUtils.forceMkdir(new File(path)); createLog(aProject, aUser.getUsername()) .info(" Created Project [" + aProject.getName() + "] with ID [" + aProject.getId() + "]"); createLog(aProject, aUser.getUsername()).removeAllAppenders(); } @Override @Transactional public void createCrowdJob(CrowdJob aCrowdJob) throws IOException { if (aCrowdJob.getId() == 0) { entityManager.persist(aCrowdJob); } else { entityManager.merge(aCrowdJob); } createLog(aCrowdJob.getProject(), "crowd_user").info( " Created crowd job from project [" + aCrowdJob.getProject() + "] with ID [" + aCrowdJob.getId() + "]"); createLog(aCrowdJob.getProject(), "crowd_user").removeAllAppenders(); } @Override @Transactional public void createProjectPermission(ProjectPermission aPermission) throws IOException { entityManager.persist(aPermission); createLog(aPermission.getProject(), aPermission.getUser()).info( " New Permission created on Project[" + aPermission.getProject().getName() + "] for user [" + aPermission.getUser() + "] with permission [" + aPermission.getLevel() + "]" + "]"); createLog(aPermission.getProject(), aPermission.getUser()).removeAllAppenders(); } @Override @Transactional public void createSourceDocument(SourceDocument aDocument, User aUser) throws IOException { if (aDocument.getId() == 0) { entityManager.persist(aDocument); } else { entityManager.merge(aDocument); } } @Override @Transactional public boolean existsAnnotationDocument(SourceDocument aDocument, User aUser) { try { entityManager .createQuery( "FROM AnnotationDocument WHERE project = :project " + " AND document = :document AND user = :user", AnnotationDocument.class) .setParameter("project", aDocument.getProject()) .setParameter("document", aDocument).setParameter("user", aUser.getUsername()) .getSingleResult(); return true; } catch (NoResultException ex) { return false; } } @Override @Transactional public boolean existsCorrectionDocument(SourceDocument aDocument) { try { getCorrectionDocumentContent(aDocument); return true; } catch (Exception ex) { return false; } } @Override @Transactional public boolean existsProject(String aName) { try { entityManager.createQuery("FROM Project WHERE name = :name", Project.class) .setParameter("name", aName).getSingleResult(); return true; } catch (NoResultException ex) { return false; } } @Override @Transactional public boolean existsAnnotationDocumentContent(SourceDocument aSourceDocument, String aUsername) throws IOException { if (new File(getAnnotationFolder(aSourceDocument), aUsername + ".ser").exists()) { return true; } else { return false; } } @Override @Transactional(noRollbackFor = NoResultException.class) public boolean existsAutomatedDocument(SourceDocument aSourceDocument) { try { getCorrectionDocumentContent(aSourceDocument); return true; } catch (UIMAException e) { return false; } catch (DataRetrievalFailureException e) { return false; } catch (ClassNotFoundException e) { return false; } catch (IOException e) { return false; } } @Override @Transactional public boolean existsCrowdJob(String aName) { try { entityManager.createQuery("FROM CrowdJob WHERE name = :name", CrowdJob.class) .setParameter("name", aName).getSingleResult(); return true; } catch (NoResultException ex) { return false; } } @Override public boolean existsProjectPermission(User aUser, Project aProject) { List<ProjectPermission> projectPermissions = entityManager .createQuery( "FROM ProjectPermission WHERE user = :user AND " + "project =:project", ProjectPermission.class).setParameter("user", aUser.getUsername()) .setParameter("project", aProject).getResultList(); // if at least one permission level exist if (projectPermissions.size() > 0) { return true; } else { return false; } } @Override @Transactional public boolean existsProjectPermissionLevel(User aUser, Project aProject, PermissionLevel aLevel) { try { entityManager .createQuery( "FROM ProjectPermission WHERE user = :user AND " + "project =:project AND level =:level", ProjectPermission.class).setParameter("user", aUser.getUsername()) .setParameter("project", aProject).setParameter("level", aLevel) .getSingleResult(); return true; } catch (NoResultException ex) { return false; } } @Override @Transactional public boolean existsSourceDocument(Project aProject, String aFileName) { try { entityManager .createQuery( "FROM SourceDocument WHERE project = :project AND " + "name =:name ", SourceDocument.class).setParameter("project", aProject) .setParameter("name", aFileName).getSingleResult(); return true; } catch (NoResultException ex) { return false; } } @Override @Transactional public boolean existsProjectTimeStamp(Project aProject, String aUsername) { try { if (getProjectTimeStamp(aProject, aUsername) == null) { return false; } return true; } catch (NoResultException ex) { return false; } } @Override public boolean existsProjectTimeStamp(Project aProject) { try { if (getProjectTimeStamp(aProject) == null) { return false; } return true; } catch (NoResultException ex) { return false; } } @Override public boolean existsUser(String username) { try { getUser(username); return true; } catch (NoResultException e) { return false; } } /** * A new directory is created using UUID so that every exported file will reside in its own * directory. This is useful as the written file can have multiple extensions based on the * Writer class used. */ @SuppressWarnings({ "rawtypes", "unchecked" }) @Override @Transactional public File exportAnnotationDocument(SourceDocument aDocument, String aUser, Class aWriter, String aFileName, Mode aMode) throws UIMAException, IOException, ClassNotFoundException { return exportAnnotationDocument(aDocument, aUser, aWriter, aFileName, aMode, true); } /** * A new directory is created using UUID so that every exported file will reside in its own * directory. This is useful as the written file can have multiple extensions based on the * Writer class used. */ @SuppressWarnings({ "rawtypes", "unchecked" }) @Override @Transactional public File exportAnnotationDocument(SourceDocument aDocument, String aUser, Class aWriter, String aFileName, Mode aMode, boolean aStripExtension) throws UIMAException, IOException, ClassNotFoundException { File annotationFolder = getAnnotationFolder(aDocument); String serializedCasFileName; // for Correction, it will export the corrected document (of the logged in user) // (CORRECTION_USER.ser is the automated result displayed for the user to correct it, not // the final result) for automation, it will export either the corrected document // (Annotated) or the automated document if (aMode.equals(Mode.ANNOTATION) || aMode.equals(Mode.AUTOMATION) || aMode.equals(Mode.CORRECTION)) { serializedCasFileName = aUser + ".ser"; } // The merge result will be exported else { serializedCasFileName = WebAnnoConst.CURATION_USER + ".ser"; } // Read file File serializedCasFile = new File(annotationFolder, serializedCasFileName); if (!serializedCasFile.exists()) { throw new FileNotFoundException("Annotation file [" + serializedCasFileName + "] not found in [" + annotationFolder + "]"); } CAS cas = CasCreationUtils.createCas((TypeSystemDescription) null, null, null); readSerializedCas(cas.getJCas(), serializedCasFile); // Update type system the CAS upgrade(cas, aDocument.getProject()); // Update the source file name in case it is changed for some reason Project project = aDocument.getProject(); File currentDocumentUri = new File(dir.getAbsolutePath() + PROJECT + project.getId() + DOCUMENT + aDocument.getId() + SOURCE); DocumentMetaData documentMetadata = DocumentMetaData.get(cas.getJCas()); documentMetadata.setDocumentUri(new File(currentDocumentUri, aFileName).toURI().toURL() .toExternalForm()); documentMetadata.setDocumentBaseUri(currentDocumentUri.toURI().toURL().toExternalForm()); documentMetadata.setCollectionId(currentDocumentUri.toURI().toURL().toExternalForm()); documentMetadata.setDocumentUri(new File(dir.getAbsolutePath() + PROJECT + project.getId() + DOCUMENT + aDocument.getId() + SOURCE + "/" + aFileName).toURI().toURL() .toExternalForm()); // update with the correct tagset name List<AnnotationFeature> features = annotationService.listAnnotationFeature(project); for (AnnotationFeature feature : features) { TagSet tagSet = feature.getTagset(); if (tagSet == null) { continue; } else if (!feature.getLayer().getType().equals(WebAnnoConst.CHAIN_TYPE)) { updateCasWithTagSet(cas, feature.getLayer().getName(), tagSet.getName()); } } File exportTempDir = File.createTempFile("webanno", "export"); exportTempDir.delete(); exportTempDir.mkdirs(); AnalysisEngineDescription writer; if (aWriter.getName() .equals("de.tudarmstadt.ukp.clarin.webanno.tsv.WebannoCustomTsvWriter")) { List<AnnotationLayer> layers = annotationService .listAnnotationLayer(aDocument.getProject()); List<String> multipleSpans = new ArrayList<String>(); for (AnnotationLayer layer : layers) { if (layer.isMultipleTokens()) { multipleSpans.add(layer.getName()); } } writer = createEngineDescription(aWriter, JCasFileWriter_ImplBase.PARAM_TARGET_LOCATION, exportTempDir, JCasFileWriter_ImplBase.PARAM_STRIP_EXTENSION, aStripExtension, "multipleSpans", multipleSpans); } else { writer = createEngineDescription(aWriter, JCasFileWriter_ImplBase.PARAM_TARGET_LOCATION, exportTempDir, JCasFileWriter_ImplBase.PARAM_STRIP_EXTENSION, aStripExtension); } runPipeline(cas, writer); createLog(project, aUser).info( " Exported annotation file [" + aDocument.getName() + "] with ID [" + aDocument.getId() + "] for user [" + aUser + "] from project [" + project.getId() + "]"); createLog(project, aUser).removeAllAppenders(); File exportFile; if (exportTempDir.listFiles().length > 1) { exportFile = new File(exportTempDir.getAbsolutePath() + ".zip"); try { ZipUtils.zipFolder(exportTempDir, exportFile); } catch (Exception e) { createLog(project, aUser).info("Unable to create zip File"); } } else { exportFile = new File(exportTempDir.getParent(), exportTempDir.listFiles()[0].getName()); FileUtils.copyFile(exportTempDir.listFiles()[0], exportFile); } FileUtils.forceDelete(exportTempDir); return exportFile; } @Override public File exportSourceDocument(SourceDocument aDocument) { File documentUri = new File(dir.getAbsolutePath() + PROJECT + aDocument.getProject().getId() + DOCUMENT + aDocument.getId() + SOURCE); return new File(documentUri, aDocument.getName()); } @Override public File exportserializedCas(SourceDocument aDocument, String aUser) { File documentUri = new File(dir.getAbsolutePath() + PROJECT + aDocument.getProject().getId() + DOCUMENT + aDocument.getId() + ANNOTATION); return new File(documentUri, aUser + ".ser"); } @Override public File exportProjectLog(Project aProject) { return new File(dir.getAbsolutePath() + PROJECT + "project-" + aProject.getId() + ".log"); } @Override public File exportGuidelines(Project aProject) { return new File(dir.getAbsolutePath() + PROJECT + aProject.getId() + GUIDELINE); } @Override public File exportProjectMetaInf(Project aProject) { return new File(dir.getAbsolutePath() + PROJECT + aProject.getId() + META_INF); } @Override @Transactional(noRollbackFor = NoResultException.class) public AnnotationDocument getAnnotationDocument(SourceDocument aDocument, User aUser) { return entityManager .createQuery( "FROM AnnotationDocument WHERE document = :document AND " + "user =:user" + " AND project = :project", AnnotationDocument.class) .setParameter("document", aDocument).setParameter("user", aUser.getUsername()) .setParameter("project", aDocument.getProject()).getSingleResult(); } @Override @Transactional public JCas getAnnotationDocumentContent(AnnotationDocument aAnnotationDocument) throws IOException, UIMAException, ClassNotFoundException { return getAnnotationContent(aAnnotationDocument.getDocument(), aAnnotationDocument.getUser()); } @Override @Transactional(noRollbackFor = NoResultException.class) public List<Authority> listAuthorities(User aUser) { return entityManager .createQuery("FROM Authority where username =:username", Authority.class) .setParameter("username", aUser).getResultList(); } @Override public File getDir() { return dir; } @Override public File getGuideline(Project aProject, String aFilename) { return new File(dir.getAbsolutePath() + PROJECT + aProject.getId() + GUIDELINE + aFilename); } @Override public File getTemplate(String fileName) throws IOException { FileUtils.forceMkdir(new File(dir.getAbsolutePath() + TEMPLATE)); return new File(dir.getAbsolutePath() + TEMPLATE, fileName); } @Override @Transactional(noRollbackFor = NoResultException.class) public List<ProjectPermission> listProjectPermisionLevel(User aUser, Project aProject) { return entityManager .createQuery("FROM ProjectPermission WHERE user =:user AND " + "project =:project", ProjectPermission.class).setParameter("user", aUser.getUsername()) .setParameter("project", aProject).getResultList(); } @Override public List<User> listProjectUsersWithPermissions(Project aProject) { List<String> usernames = entityManager .createQuery( "SELECT DISTINCT user FROM ProjectPermission WHERE " + "project =:project ORDER BY user ASC", String.class) .setParameter("project", aProject).getResultList(); List<User> users = new ArrayList<User>(); for (String username : usernames) { if (existsUser(username)) { users.add(getUser(username)); } } return users; } @Override public List<User> listProjectUsersWithPermissions(Project aProject, PermissionLevel aPermissionLevel) { List<String> usernames = entityManager .createQuery( "SELECT DISTINCT user FROM ProjectPermission WHERE " + "project =:project AND level =:level ORDER BY user ASC", String.class).setParameter("project", aProject) .setParameter("level", aPermissionLevel).getResultList(); List<User> users = new ArrayList<User>(); for (String username : usernames) { if (existsUser(username)) { users.add(getUser(username)); } } return users; } @Override @Transactional public Project getProject(String aName) { return entityManager.createQuery("FROM Project WHERE name = :name", Project.class) .setParameter("name", aName).getSingleResult(); } @Override @Transactional public CrowdJob getCrowdJob(String aName, Project aProjec) { return entityManager .createQuery("FROM CrowdJob WHERE name = :name AND project = :project", CrowdJob.class).setParameter("name", aName) .setParameter("project", aProjec).getSingleResult(); } @Override public Project getProject(long aId) { return entityManager.createQuery("FROM Project WHERE id = :id", Project.class) .setParameter("id", aId).getSingleResult(); } @Override public void createGuideline(Project aProject, File aContent, String aFileName, String aUsername) throws IOException { String guidelinePath = dir.getAbsolutePath() + PROJECT + aProject.getId() + GUIDELINE; FileUtils.forceMkdir(new File(guidelinePath)); copyLarge(new FileInputStream(aContent), new FileOutputStream(new File(guidelinePath + aFileName))); createLog(aProject, aUsername).info( " Created Guideline file[ " + aFileName + "] for Project [" + aProject.getName() + "] with ID [" + aProject.getId() + "]"); createLog(aProject, aUsername).removeAllAppenders(); } @Override public void createTemplate(Project aProject, File aContent, String aFileName, String aUsername) throws IOException { String templatePath = dir.getAbsolutePath() + PROJECT + aProject.getId() + MIRA + MIRA_TEMPLATE; FileUtils.forceMkdir(new File(templatePath)); copyLarge(new FileInputStream(aContent), new FileOutputStream(new File(templatePath + aFileName))); createLog(aProject, aUsername).info( " Created Template file[ " + aFileName + "] for Project [" + aProject.getName() + "] with ID [" + aProject.getId() + "]"); createLog(aProject, aUsername).removeAllAppenders(); } @Override @Transactional(noRollbackFor = NoResultException.class) public List<ProjectPermission> getProjectPermisions(Project aProject) { return entityManager .createQuery("FROM ProjectPermission WHERE project =:project", ProjectPermission.class).setParameter("project", aProject).getResultList(); } @Override @Transactional(noRollbackFor = NoResultException.class) public SourceDocument getSourceDocument(Project aProject, String aDocumentName) { return entityManager .createQuery("FROM SourceDocument WHERE name = :name AND project =:project", SourceDocument.class).setParameter("name", aDocumentName) .setParameter("project", aProject).getSingleResult(); } @Override @Transactional public File getSourceDocumentContent(SourceDocument aDocument) { String path = dir.getAbsolutePath() + PROJECT + aDocument.getProject().getId() + DOCUMENT + aDocument.getId() + SOURCE; return new File(path + "/" + aDocument.getName()); } @Override @Transactional public Date getProjectTimeStamp(Project aProject, String aUsername) { return entityManager .createQuery( "SELECT max(timestamp) FROM AnnotationDocument WHERE project = :project " + " AND user = :user", Date.class) .setParameter("project", aProject).setParameter("user", aUsername) .getSingleResult(); } @Override public Date getProjectTimeStamp(Project aProject) { return entityManager .createQuery("SELECT max(timestamp) FROM SourceDocument WHERE project = :project", Date.class).setParameter("project", aProject).getSingleResult(); } @Override @Transactional(noRollbackFor = NoResultException.class) public User getUser(String aUsername) { return entityManager.createQuery("FROM User WHERE username =:username", User.class) .setParameter("username", aUsername).getSingleResult(); } @Override @Transactional(noRollbackFor = NoResultException.class) public boolean existsFinishedAnnotation(SourceDocument aDocument) { List<AnnotationDocument> annotationDocuments = entityManager .createQuery("FROM AnnotationDocument WHERE document = :document", AnnotationDocument.class).setParameter("document", aDocument) .getResultList(); for (AnnotationDocument annotationDocument : annotationDocuments) { if (annotationDocument.getState().equals(AnnotationDocumentState.FINISHED)) { return true; } } return false; } @Override @Transactional(noRollbackFor = NoResultException.class) public boolean isAnnotationFinished(SourceDocument aDocument, User aUser) { try { AnnotationDocument annotationDocument = entityManager .createQuery( "FROM AnnotationDocument WHERE document = :document AND " + "user =:user", AnnotationDocument.class) .setParameter("document", aDocument).setParameter("user", aUser.getUsername()) .getSingleResult(); if (annotationDocument.getState().equals(AnnotationDocumentState.FINISHED)) { return true; } else { return false; } } // User even didn't start annotating catch (NoResultException e) { return false; } } @Override @Transactional(noRollbackFor = NoResultException.class) public List<AnnotationDocument> listAnnotationDocuments(SourceDocument aDocument) { // Get all annotators in the project List<String> users = getAllAnnotators(aDocument.getProject()); // Bail out already. HQL doesn't seem to like queries with an empty // parameter right of "in" if (users.isEmpty()) { return new ArrayList<AnnotationDocument>(); } return entityManager .createQuery( "FROM AnnotationDocument WHERE project = :project AND document = :document " + "AND user in (:users)", AnnotationDocument.class) .setParameter("project", aDocument.getProject()).setParameter("users", users) .setParameter("document", aDocument).getResultList(); } @Override public int numberOfExpectedAnnotationDocuments(Project aProject) { // Get all annotators in the project List<String> users = getAllAnnotators(aProject); // Bail out already. HQL doesn't seem to like queries with an empty // parameter right of "in" if (users.isEmpty()) { return 0; } int ignored = 0; List<AnnotationDocument> annotationDocuments = entityManager .createQuery( "FROM AnnotationDocument WHERE project = :project AND user in (:users)", AnnotationDocument.class).setParameter("project", aProject) .setParameter("users", users).getResultList(); for (AnnotationDocument annotationDocument : annotationDocuments) { if (annotationDocument.getState().equals(AnnotationDocumentState.IGNORE)) { ignored++; } } return listSourceDocuments(aProject).size() * users.size() - ignored; } @Override public List<AnnotationDocument> listFinishedAnnotationDocuments(Project aProject) { // Get all annotators in the project List<String> users = getAllAnnotators(aProject); // Bail out already. HQL doesn't seem to like queries with an empty // parameter right of "in" if (users.isEmpty()) { return new ArrayList<AnnotationDocument>(); } return entityManager .createQuery( "FROM AnnotationDocument WHERE project = :project AND state = :state" + " AND user in (:users)", AnnotationDocument.class) .setParameter("project", aProject).setParameter("users", users) .setParameter("state", AnnotationDocumentState.FINISHED).getResultList(); } @Override @Transactional(noRollbackFor = NoResultException.class) public List<AnnotationDocument> listAllAnnotationDocuments(SourceDocument aSourceDocument) { return entityManager .createQuery( "FROM AnnotationDocument WHERE project = :project AND document = :document", AnnotationDocument.class) .setParameter("project", aSourceDocument.getProject()) .setParameter("document", aSourceDocument).getResultList(); } @Override public List<String> listGuidelines(Project aProject) { // list all guideline files File[] files = new File(dir.getAbsolutePath() + PROJECT + aProject.getId() + GUIDELINE) .listFiles(); // Name of the guideline files List<String> annotationGuidelineFiles = new ArrayList<String>(); if (files != null) { for (File file : files) { annotationGuidelineFiles.add(file.getName()); } } return annotationGuidelineFiles; } @Override public List<String> listTemplates(Project aProject) { // list all MIRA template files File[] files = new File(dir.getAbsolutePath() + PROJECT + aProject.getId() + MIRA + MIRA_TEMPLATE).listFiles(); // Name of the MIRA template files List<String> templateFiles = new ArrayList<String>(); if (files != null) { for (File file : files) { templateFiles.add(file.getName()); } } return templateFiles; } @Override @Transactional public List<Project> listProjects() { return entityManager.createQuery("FROM Project ORDER BY name ASC ", Project.class) .getResultList(); } @Override @Transactional public List<CrowdJob> listCrowdJobs() { return entityManager.createQuery("FROM CrowdJob", CrowdJob.class).getResultList(); } @Override @Transactional public List<CrowdJob> listCrowdJobs(Project aProject) { return entityManager.createQuery("FROM CrowdJob where project =:project", CrowdJob.class) .setParameter("project", aProject).getResultList(); } @Override @Transactional(noRollbackFor = NoResultException.class) public List<SourceDocument> listSourceDocuments(Project aProject) { List<SourceDocument> sourceDocuments = entityManager .createQuery("FROM SourceDocument where project =:project", SourceDocument.class) .setParameter("project", aProject).getResultList(); List<SourceDocument> tabSepDocuments = new ArrayList<SourceDocument>(); for (SourceDocument sourceDocument : sourceDocuments) { if (sourceDocument.getFormat().equals(WebAnnoConst.TAB_SEP)) { tabSepDocuments.add(sourceDocument); } } sourceDocuments.removeAll(tabSepDocuments); return sourceDocuments; } @Override @Transactional(noRollbackFor = NoResultException.class) public List<SourceDocument> listTabSepDocuments(Project aProject) { List<SourceDocument> sourceDocuments = entityManager .createQuery("FROM SourceDocument where project =:project", SourceDocument.class) .setParameter("project", aProject).getResultList(); List<SourceDocument> tabSepDocuments = new ArrayList<SourceDocument>(); for (SourceDocument sourceDocument : sourceDocuments) { if (sourceDocument.getFormat().equals(WebAnnoConst.TAB_SEP)) { tabSepDocuments.add(sourceDocument); } } return tabSepDocuments; } @Override @Transactional public List<User> listUsers() { return entityManager.createQuery("FROM User", User.class).getResultList(); } @Override public Properties loadUserSettings(String aUsername, Project aProject) throws FileNotFoundException, IOException { Properties property = new Properties(); property.load(new FileInputStream(new File(dir.getAbsolutePath() + PROJECT + aProject.getId() + SETTINGS + aUsername + "/" + annotationPreferencePropertiesFileName))); return property; } @Override public Properties loadHelpContents() throws FileNotFoundException, IOException { if (new File(dir.getAbsolutePath() + HELP_FILE).exists()) { Properties property = new Properties(); property.load(new FileInputStream(new File(dir.getAbsolutePath() + HELP_FILE))); return property; } else { return helpProperiesFile; } } @Override @Transactional public void removeProject(Project aProject, User aUser) throws IOException { // remove, if exists, a crowdsource job created from this project for (CrowdJob crowdJob : listCrowdJobs(aProject)) { removeCrowdJob(crowdJob); } for (SourceDocument document : listSourceDocuments(aProject)) { removeSourceDocument(document, aUser); } for (SourceDocument document : listTabSepDocuments(aProject)) { removeSourceDocument(document, aUser); } for (MiraTemplate template : listMiraTemplates(aProject)) { removeMiraTemplate(template); } for (AnnotationFeature feature : annotationService.listAnnotationFeature(aProject)) { annotationService.removeAnnotationFeature(feature); } // remove the layers too for (AnnotationLayer layer : annotationService.listAnnotationLayer(aProject)) { annotationService.removeAnnotationLayer(layer); } for (TagSet tagSet : annotationService.listTagSets(aProject)) { annotationService.removeTagSet(tagSet); } // remove the project directory from the file system String path = dir.getAbsolutePath() + PROJECT + aProject.getId(); try { FileUtils.deleteDirectory(new File(path)); } catch (FileNotFoundException e) { createLog(aProject, aUser.getUsername()).warn( "Project directory to be deleted was not found: [" + path + "]. Ignoring."); } for (ProjectPermission permisions : getProjectPermisions(aProject)) { entityManager.remove(permisions); } // remove metadata from DB entityManager.remove(aProject); createLog(aProject, aUser.getUsername()).info( " Removed Project [" + aProject.getName() + "] with ID [" + aProject.getId() + "]"); createLog(aProject, aUser.getUsername()).removeAllAppenders(); } @Override @Transactional public void removeCrowdJob(CrowdJob crowdProject) { entityManager.remove(entityManager.merge(crowdProject)); } @Override public void removeGuideline(Project aProject, String aFileName, String username) throws IOException { FileUtils.forceDelete(new File(dir.getAbsolutePath() + PROJECT + aProject.getId() + GUIDELINE + aFileName)); createLog(aProject, username).info( " Removed Guideline file from [" + aProject.getName() + "] with ID [" + aProject.getId() + "]"); createLog(aProject, username).removeAllAppenders(); } @Override public void removeTemplate(Project aProject, String aFileName, String username) throws IOException { FileUtils.forceDelete(new File(dir.getAbsolutePath() + PROJECT + aProject.getId() + MIRA + MIRA_TEMPLATE + aFileName)); createLog(aProject, username).info( " Removed Template file from [" + aProject.getName() + "] with ID [" + aProject.getId() + "]"); createLog(aProject, username).removeAllAppenders(); } @Override public void removeCurationDocumentContent(SourceDocument aSourceDocument, String aUsername) throws IOException { if (new File(getAnnotationFolder(aSourceDocument), WebAnnoConst.CURATION_USER + ".ser") .exists()) { FileUtils.forceDelete(new File(getAnnotationFolder(aSourceDocument), WebAnnoConst.CURATION_USER + ".ser")); createLog(aSourceDocument.getProject(), aUsername).info( " Removed Curated document from project [" + aSourceDocument.getProject() + "] for the source document [" + aSourceDocument.getId()); createLog(aSourceDocument.getProject(), aUsername).removeAllAppenders(); } } @Override @Transactional public void removeProjectPermission(ProjectPermission projectPermission) throws IOException { entityManager.remove(projectPermission); createLog(projectPermission.getProject(), projectPermission.getUser()).info( " Removed Project Permission [" + projectPermission.getLevel() + "] for the USer [" + projectPermission.getUser() + "] From project [" + projectPermission.getProject().getId() + "]"); createLog(projectPermission.getProject(), projectPermission.getUser()).removeAllAppenders(); } @Override @Transactional public void removeSourceDocument(SourceDocument aDocument, User aUser) throws IOException { for (AnnotationDocument annotationDocument : listAllAnnotationDocuments(aDocument)) { removeAnnotationDocument(annotationDocument); } // remove it from the crowd job, if it belongs already for (CrowdJob crowdJob : listCrowdJobs(aDocument.getProject())) { if (crowdJob.getDocuments().contains(aDocument)) { crowdJob.getDocuments().remove(aDocument); entityManager.persist(crowdJob); } } entityManager.remove(aDocument); String path = dir.getAbsolutePath() + PROJECT + aDocument.getProject().getId() + DOCUMENT + aDocument.getId(); // remove from file both source and related annotation file if (new File(path).exists()) { FileUtils.forceDelete(new File(path)); } createLog(aDocument.getProject(), aUser.getUsername()).info( " Removed Document [" + aDocument.getName() + "] with ID [" + aDocument.getId() + "] from Project [" + aDocument.getProject().getId() + "]"); createLog(aDocument.getProject(), aUser.getUsername()).removeAllAppenders(); } @Override @Transactional public void removeAnnotationDocument(AnnotationDocument aAnnotationDocument) { entityManager.remove(aAnnotationDocument); } public void setDir(File aDir) { dir = aDir; } @Override public void savePropertiesFile(Project aProject, InputStream aIs, String aFileName) throws IOException { String path = dir.getAbsolutePath() + PROJECT + aProject.getId() + "/" + FilenameUtils.getFullPath(aFileName); FileUtils.forceMkdir(new File(path)); File newTcfFile = new File(path, FilenameUtils.getName(aFileName)); OutputStream os = null; try { os = new FileOutputStream(newTcfFile); copyLarge(aIs, os); } finally { closeQuietly(os); closeQuietly(aIs); } } @Override public <T> void saveUserSettings(String aUsername, Project aProject, Mode aSubject, T aConfigurationObject) throws IOException { BeanWrapper wrapper = PropertyAccessorFactory.forBeanPropertyAccess(aConfigurationObject); Properties property = new Properties(); for (PropertyDescriptor value : wrapper.getPropertyDescriptors()) { if (wrapper.getPropertyValue(value.getName()) == null) { continue; } property.setProperty(aSubject + "." + value.getName(), wrapper.getPropertyValue(value.getName()).toString()); } String propertiesPath = dir.getAbsolutePath() + PROJECT + aProject.getId() + SETTINGS + aUsername; // append existing preferences for the other mode if (new File(propertiesPath, annotationPreferencePropertiesFileName).exists()) { // aSubject = aSubject.equals(Mode.ANNOTATION) ? Mode.CURATION : // Mode.ANNOTATION; for (Entry<Object, Object> entry : loadUserSettings(aUsername, aProject).entrySet()) { String key = entry.getKey().toString(); // Maintain other Modes of annotations confs than this one if (!key.substring(0, key.indexOf(".")).equals(aSubject.toString())) { property.put(entry.getKey(), entry.getValue()); } } } FileUtils .forceDeleteOnExit(new File(propertiesPath, annotationPreferencePropertiesFileName)); FileUtils.forceMkdir(new File(propertiesPath)); property.store(new FileOutputStream(new File(propertiesPath, annotationPreferencePropertiesFileName)), null); createLog(aProject, aUsername).info( " Saved preferences file [" + annotationPreferencePropertiesFileName + "] for project [" + aProject.getName() + "] with ID [" + aProject.getId() + "] to location: [" + propertiesPath + "]"); createLog(aProject, aUsername).removeAllAppenders(); } @Override public <T> void saveHelpContents(T aConfigurationObject) throws IOException { BeanWrapper wrapper = PropertyAccessorFactory.forBeanPropertyAccess(aConfigurationObject); Properties property = new Properties(); for (PropertyDescriptor value : wrapper.getPropertyDescriptors()) { if (wrapper.getPropertyValue(value.getName()) == null) { continue; } property.setProperty(value.getName(), wrapper.getPropertyValue(value.getName()) .toString()); } File helpFile = new File(dir.getAbsolutePath() + HELP_FILE); if (helpFile.exists()) { FileUtils.forceDeleteOnExit(helpFile); } else { helpFile.createNewFile(); } property.store(new FileOutputStream(helpFile), null); } @Override @Transactional public void uploadSourceDocument(File aFile, SourceDocument aDocument, User aUser) throws IOException { try { if (aDocument.getFormat().equals(WebAnnoConst.TAB_SEP)) { if (!isTabSepFileFormatCorrect(aFile)) { removeSourceDocument(aDocument, aUser); throw new IOException( "This TAB-SEP file is not in correct format. It should have two columns separated by TAB!"); } } else { convertSourceDocumentToCas(aFile, getReadableFormats().get(aDocument.getFormat()), aDocument); } } catch (IOException e) { throw e; } catch (Exception e) { removeSourceDocument(aDocument, aUser); throw new IOException(e.getMessage(), e); } String path = dir.getAbsolutePath() + PROJECT + aDocument.getProject().getId() + DOCUMENT + aDocument.getId() + SOURCE; FileUtils.forceMkdir(new File(path)); File newTcfFile = new File(path, aDocument.getName()); InputStream is = null; OutputStream os = null; try { os = new FileOutputStream(newTcfFile); is = new FileInputStream(aFile); copyLarge(is, os); } finally { closeQuietly(os); closeQuietly(is); } createLog(aDocument.getProject(), aUser.getUsername()).info( " Imported file [" + aDocument.getName() + "] with ID [" + aDocument.getId() + "] to Project [" + aDocument.getProject().getId() + "]"); createLog(aDocument.getProject(), aUser.getUsername()).removeAllAppenders(); } @Override @Transactional public void uploadSourceDocument(InputStream aIs, SourceDocument aDocument, User aUser) throws IOException { String path = dir.getAbsolutePath() + PROJECT + aDocument.getProject().getId() + DOCUMENT + aDocument.getId() + SOURCE; FileUtils.forceMkdir(new File(path)); File newTcfFile = new File(path, aDocument.getName()); OutputStream os = null; try { os = new FileOutputStream(newTcfFile); copyLarge(aIs, os); } finally { closeQuietly(os); closeQuietly(aIs); } createLog(aDocument.getProject(), aUser.getUsername()).info( " Imported file [" + aDocument.getName() + "] with ID [" + aDocument.getId() + "] to Project [" + aDocument.getProject().getId() + "]"); createLog(aDocument.getProject(), aUser.getUsername()).removeAllAppenders(); } @Override public List<String> getReadableFormatLabels() throws ClassNotFoundException { List<String> readableFormats = new ArrayList<String>(); for (String key : readWriteFileFormats.stringPropertyNames()) { if (key.contains(".label") && !isBlank(readWriteFileFormats.getProperty(key))) { String readerLabel = key.substring(0, key.lastIndexOf(".label")); if (!isBlank(readWriteFileFormats.getProperty(readerLabel + ".reader"))) { readableFormats.add(readWriteFileFormats.getProperty(key)); } } } Collections.sort(readableFormats); return readableFormats; } @Override public String getReadableFormatId(String aLabel) throws ClassNotFoundException { String readableFormat = ""; for (String key : readWriteFileFormats.stringPropertyNames()) { if (key.contains(".label") && !isBlank(readWriteFileFormats.getProperty(key))) { if (readWriteFileFormats.getProperty(key).equals(aLabel)) { readableFormat = key.substring(0, key.lastIndexOf(".label")); break; } } } return readableFormat; } @SuppressWarnings("rawtypes") @Override public Map<String, Class> getReadableFormats() throws ClassNotFoundException { Map<String, Class> readableFormats = new HashMap<String, Class>(); for (String key : readWriteFileFormats.stringPropertyNames()) { if (key.contains(".label") && !isBlank(readWriteFileFormats.getProperty(key))) { String readerLabel = key.substring(0, key.lastIndexOf(".label")); if (!isBlank(readWriteFileFormats.getProperty(readerLabel + ".reader"))) { readableFormats.put(readerLabel, Class.forName(readWriteFileFormats .getProperty(readerLabel + ".reader"))); } } } return readableFormats; } @Override public List<String> getWritableFormatLabels() throws ClassNotFoundException { List<String> writableFormats = new ArrayList<String>(); for (String key : readWriteFileFormats.stringPropertyNames()) { if (key.contains(".label") && !isBlank(readWriteFileFormats.getProperty(key))) { String writerLabel = key.substring(0, key.lastIndexOf(".label")); if (!isBlank(readWriteFileFormats.getProperty(writerLabel + ".writer"))) { writableFormats.add(readWriteFileFormats.getProperty(key)); } } } Collections.sort(writableFormats); return writableFormats; } @Override public String getWritableFormatId(String aLabel) throws ClassNotFoundException { String writableFormat = ""; for (String key : readWriteFileFormats.stringPropertyNames()) { if (key.contains(".label") && !isBlank(readWriteFileFormats.getProperty(key))) { if (readWriteFileFormats.getProperty(key).equals(aLabel)) { writableFormat = key.substring(0, key.lastIndexOf(".label")); break; } } } return writableFormat; } @SuppressWarnings({ "rawtypes", "unchecked" }) @Override public Map<String, Class> getWritableFormats() throws ClassNotFoundException { Map<String, Class> writableFormats = new HashMap<String, Class>(); Set<String> keys = (Set) readWriteFileFormats.keySet(); for (String keyvalue : keys) { if (keyvalue.contains(".label")) { String writerLabel = keyvalue.substring(0, keyvalue.lastIndexOf(".label")); if (readWriteFileFormats.getProperty(writerLabel + ".writer") != null) { writableFormats.put(writerLabel, Class.forName(readWriteFileFormats .getProperty(writerLabel + ".writer"))); } } } return writableFormats; } public String getAnnotationPreferencePropertiesFileName() { return annotationPreferencePropertiesFileName; } public void setAnnotationPreferencePropertiesFileName( String aAnnotationPreferencePropertiesFileName) { annotationPreferencePropertiesFileName = aAnnotationPreferencePropertiesFileName; } @Override @PreAuthorize("hasAnyRole('ROLE_ADMIN','ROLE_USER')") public void createCorrectionDocumentContent(JCas aJcas, SourceDocument aDocument, User aUser) throws IOException { writeCas(aDocument, aJcas, WebAnnoConst.CORRECTION_USER, aUser); } @Override @PreAuthorize("hasAnyRole('ROLE_ADMIN','ROLE_USER')") public void createCurationDocumentContent(JCas aJcas, SourceDocument aDocument, User aUser) throws IOException { writeCas(aDocument, aJcas, WebAnnoConst.CURATION_USER, aUser); } @Override public JCas getCorrectionDocumentContent(SourceDocument aDocument) throws UIMAException, IOException, ClassNotFoundException { return getAnnotationContent(aDocument, WebAnnoConst.CORRECTION_USER); } @Override public JCas getCurationDocumentContent(SourceDocument aDocument) throws UIMAException, IOException, ClassNotFoundException { return getAnnotationContent(aDocument, WebAnnoConst.CURATION_USER); } /** * Creates an annotation document (either user's annotation document or CURATION_USER's * annotation document) * * @param aDocument * the {@link SourceDocument} * @param aJcas * The annotated CAS object * @param aUserName * the user who annotates the document if it is user's annotation document OR the * CURATION_USER * @param aUser * The user who annotates the document OR the curator who curates the document * @throws IOException */ private void writeCas(SourceDocument aDocument, JCas aJcas, String aUserName, User aUser) throws IOException { log.debug("Updating annotation document [" + aDocument.getName() + "] " + "with ID [" + aDocument.getId() + "] in project ID [" + aDocument.getProject().getId() + "] for user [" + aUser.getUsername() + "]"); //DebugUtils.smallStack(); synchronized (lock) { File annotationFolder = getAnnotationFolder(aDocument); FileUtils.forceMkdir(annotationFolder); final String username = aUserName; File currentVersion = new File(annotationFolder, username + ".ser"); File oldVersion = new File(annotationFolder, username + ".ser.old"); // Save current version try { // Make a backup of the current version of the file before overwriting if (currentVersion.exists()) { renameFile(currentVersion, oldVersion); } // Now write the new version to "<username>.ser" or CURATION_USER.ser DocumentMetaData md; try { md = DocumentMetaData.get(aJcas); } catch (IllegalArgumentException e) { md = DocumentMetaData.create(aJcas); } md.setDocumentId(aUserName); File targetPath = getAnnotationFolder(aDocument); writeSerializedCas(aJcas, new File(targetPath, aUserName+".ser")); createLog(aDocument.getProject(), aUser.getUsername()).info( "Updated annotation document [" + aDocument.getName() + "] " + "with ID [" + aDocument.getId() + "] in project ID [" + aDocument.getProject().getId() + "] for user [" + aUser.getUsername() + "]"); createLog(aDocument.getProject(), aUser.getUsername()).removeAllAppenders(); // If the saving was successful, we delete the old version if (oldVersion.exists()) { FileUtils.forceDelete(oldVersion); } } catch (IOException e) { // If we could not save the new version, restore the old one. FileUtils.forceDelete(currentVersion); // If this is the first version, there is no old version, so do not restore anything if (oldVersion.exists()) { renameFile(oldVersion, currentVersion); } // Now abort anyway throw e; } // Manage history if (backupInterval > 0) { // Determine the reference point in time based on the current version long now = currentVersion.lastModified(); // Get all history files for the current user File[] history = annotationFolder.listFiles(new FileFilter() { private final Matcher matcher = Pattern.compile( Pattern.quote(username) + "\\.ser\\.[0-9]+\\.bak").matcher(""); @Override public boolean accept(File aFile) { // Check if the filename matches the pattern given above. return matcher.reset(aFile.getName()).matches(); } }); // Sort the files (oldest one first) Arrays.sort(history, LastModifiedFileComparator.LASTMODIFIED_COMPARATOR); // Check if we need to make a new history file boolean historyFileCreated = false; File historyFile = new File(annotationFolder, username + ".ser." + now + ".bak"); if (history.length == 0) { // If there is no history yet but we should keep history, then we create a // history file in any case. FileUtils.copyFile(currentVersion, historyFile); historyFileCreated = true; } else { // Check if the newest history file is significantly older than the current one File latestHistory = history[history.length - 1]; if (latestHistory.lastModified() + backupInterval < now) { FileUtils.copyFile(currentVersion, historyFile); historyFileCreated = true; } } // Prune history based on number of backup if (historyFileCreated) { // The new version is not in the history, so we keep that in any case. That // means we need to keep one less. int toKeep = Math.max(backupKeepNumber - 1, 0); if ((backupKeepNumber > 0) && (toKeep < history.length)) { // Copy the oldest files to a new array File[] toRemove = new File[history.length - toKeep]; System.arraycopy(history, 0, toRemove, 0, toRemove.length); // Restrict the history to what is left File[] newHistory = new File[toKeep]; if (toKeep > 0) { System.arraycopy(history, toRemove.length, newHistory, 0, newHistory.length); } history = newHistory; // Remove these old files for (File file : toRemove) { FileUtils.forceDelete(file); createLog(aDocument.getProject(), aUser.getUsername()).info( "Removed surplus history file [" + file.getName() + "] " + "for document with ID [" + aDocument.getId() + "] in project ID [" + aDocument.getProject().getId() + "]"); createLog(aDocument.getProject(), aUser.getUsername()) .removeAllAppenders(); } } // Prune history based on time if (backupKeepTime > 0) { for (File file : history) { if ((file.lastModified() + backupKeepTime) < now) { FileUtils.forceDelete(file); createLog(aDocument.getProject(), aUser.getUsername()).info( "Removed outdated history file [" + file.getName() + "] " + " for document with ID [" + aDocument.getId() + "] in project ID [" + aDocument.getProject().getId() + "]"); createLog(aDocument.getProject(), aUser.getUsername()) .removeAllAppenders(); } } } } } } } /** * For a given {@link SourceDocument}, return the {@link AnnotationDocument} for the user or for * the CURATION_USER * * @param aDocument * the {@link SourceDocument} * @param aUsername * the {@link User} who annotates the {@link SourceDocument} or the CURATION_USER */ private JCas getAnnotationContent(SourceDocument aDocument, String aUsername) throws IOException { if (log.isDebugEnabled()) { log.debug("Getting annotation document [" + aDocument.getName() + "] with ID [" + aDocument.getId() + "] in project ID [" + aDocument.getProject().getId() + "] for user [" + aUsername + "]"); } //DebugUtils.smallStack(); synchronized (lock) { File annotationFolder = getAnnotationFolder(aDocument); String file = aUsername + ".ser"; try { File serializedCasFile = new File(annotationFolder, file); if (!serializedCasFile.exists()) { throw new FileNotFoundException("Annotation document of user [" + aUsername + "] for source document [" + aDocument.getName() + "] (" + aDocument.getId() + "). not found in project[" + aDocument.getProject().getName() + "] (" + aDocument.getProject().getId() + ")"); } CAS cas = CasCreationUtils.createCas((TypeSystemDescription) null, null, null); readSerializedCas(cas.getJCas(), serializedCasFile); return cas.getJCas(); } catch (UIMAException e) { throw new DataRetrievalFailureException("Unable to parse annotation", e); } } } @Override public boolean isRemoteProject(Project project) { return new File(dir, PROJECT + project.getId() + META_INF).exists(); } private List<String> getAllAnnotators(Project aProject) { // Get all annotators in the project List<String> users = entityManager .createQuery( "SELECT DISTINCT user FROM ProjectPermission WHERE project = :project " + "AND level = :level", String.class) .setParameter("project", aProject).setParameter("level", PermissionLevel.USER) .getResultList(); // check if the username is in the Users database (imported projects // might have username // in the ProjectPermission entry while it is not in the Users database List<String> notInUsers = new ArrayList<String>(); for (String user : users) { if (!userRepository.exists(user)) { notInUsers.add(user); } } users.removeAll(notInUsers); return users; } @Override public void upgradeCasAndSave(SourceDocument aDocument, Mode aMode, String aUsername) throws IOException { User user = getUser(aUsername); if (existsAnnotationDocument(aDocument, user)) { log.debug("Upgrading annotation document [" + aDocument.getName() + "] " + "with ID [" + aDocument.getId() + "] in project ID [" + aDocument.getProject().getId() + "] for user [" + aUsername + "] in mode [" + aMode + "]"); //DebugUtils.smallStack(); AnnotationDocument annotationDocument = getAnnotationDocument(aDocument, user); try { CAS cas = getAnnotationDocumentContent(annotationDocument).getCas(); upgrade(cas, aDocument.getProject()); createAnnotationDocumentContent(cas.getJCas(), annotationDocument.getDocument(), user); if (aMode.equals(Mode.ANNOTATION)) { // In this case we only need to upgrade to annotation document } else if (aMode.equals(Mode.AUTOMATION) || aMode.equals(Mode.CORRECTION)) { CAS corrCas = getCorrectionDocumentContent(aDocument).getCas(); upgrade(corrCas, aDocument.getProject()); createCorrectionDocumentContent(corrCas.getJCas(), aDocument, user); } else { CAS curCas = getCurationDocumentContent(aDocument).getCas(); upgrade(curCas, aDocument.getProject()); createCurationDocumentContent(curCas.getJCas(), aDocument, user); } } catch (Exception e) { // no need to catch, it is acceptable that no curation document // exists to be upgraded while there are annotation documents } createLog(aDocument.getProject(), aUsername).info( "Upgraded annotation document [" + aDocument.getName() + "] " + "with ID [" + aDocument.getId() + "] in project ID [" + aDocument.getProject().getId() + "] for user [" + aUsername + "] in mode [" + aMode + "]"); createLog(aDocument.getProject(), aUsername).removeAllAppenders(); } } @Override public void upgrade(CAS aCas, Project aProject) throws UIMAException, IOException { TypeSystemDescription builtInTypes = TypeSystemDescriptionFactory .createTypeSystemDescription(); List<TypeSystemDescription> projectTypes = getProjectTypes(aProject); projectTypes.add(builtInTypes); TypeSystemDescription allTypes = CasCreationUtils.mergeTypeSystems(projectTypes); // Prepare template for new CAS CAS newCas = JCasFactory.createJCas(allTypes).getCas(); CASCompleteSerializer serializer = Serialization.serializeCASComplete((CASImpl) newCas); // Save old type system TypeSystem oldTypeSystem = aCas.getTypeSystem(); // Save old CAS contents ByteArrayOutputStream os2 = new ByteArrayOutputStream(); Serialization.serializeWithCompression(aCas, os2, oldTypeSystem); // Prepare CAS with new type system Serialization.deserializeCASComplete(serializer, (CASImpl) aCas); // Restore CAS data to new type system Serialization.deserializeCAS(aCas, new ByteArrayInputStream(os2.toByteArray()), oldTypeSystem, null); } @Override @Transactional public JCas readJCas(SourceDocument aDocument, Project aProject, User aUser) throws UIMAException, IOException, ClassNotFoundException { AnnotationDocument annotationDocument = null; JCas jCas = null; try { annotationDocument = getAnnotationDocument(aDocument, aUser); if (annotationDocument.getState().equals(AnnotationDocumentState.NEW) && !existsAnnotationDocumentContent(aDocument, aUser.getUsername())) { jCas = createJCas(aDocument, annotationDocument, aProject, aUser); } else { jCas = getAnnotationDocumentContent(annotationDocument); } } // it is new, create it and get CAS object catch (NoResultException ex) { jCas = createJCas(aDocument, annotationDocument, aProject, aUser); } catch (DataRetrievalFailureException e) { throw e; } return jCas; } @Override @Transactional public void updateJCas(Mode aMode, SourceDocument aSourceDocument, User aUser, JCas aJcas) throws IOException { if (aMode.equals(Mode.ANNOTATION) || aMode.equals(Mode.AUTOMATION) || aMode.equals(Mode.CORRECTION) || aMode.equals(Mode.CORRECTION_MERGE)) { createAnnotationDocumentContent(aJcas, aSourceDocument, aUser); } else if (aMode.equals(Mode.CURATION) || aMode.equals(Mode.CURATION_MERGE)) { createCurationDocumentContent(aJcas, aSourceDocument, aUser); } } @Override @Transactional public JCas createJCas(SourceDocument aDocument, AnnotationDocument aAnnotationDocument, Project aProject, User aUser) throws IOException { JCas jCas; // change the state of the source document to in progress aDocument.setState(SourceDocumentStateTransition .transition(SourceDocumentStateTransition.NEW_TO_ANNOTATION_IN_PROGRESS)); try { jCas = convertSourceDocumentToCas(getSourceDocumentContent(aDocument), getReadableFormats().get(aDocument.getFormat()), aDocument); if (!existsAnnotationDocument(aDocument, aUser)) { aAnnotationDocument = new AnnotationDocument(); aAnnotationDocument.setDocument(aDocument); aAnnotationDocument.setName(aDocument.getName()); aAnnotationDocument.setUser(aUser.getUsername()); aAnnotationDocument.setProject(aProject); createAnnotationDocument(aAnnotationDocument); } } catch (UIMAException e) { throw new IOException(e); } catch (ClassNotFoundException e) { throw new IOException(e); } catch (Exception e) { throw new IOException(e.getMessage() != null ? e.getMessage() : "This is an invalid file. The reader for the document " + aDocument.getName() + " can't read this " + aDocument.getFormat() + " file type"); } createAnnotationDocumentContent(jCas, aDocument, aUser); return jCas; } @Override @SuppressWarnings({ "rawtypes", "unchecked" }) public JCas convertSourceDocumentToCas(File aFile, Class aReader, SourceDocument aDocument) throws UIMAException, IOException { // Prepare a CAS with the project type system TypeSystemDescription builtInTypes = TypeSystemDescriptionFactory .createTypeSystemDescription(); List<TypeSystemDescription> projectTypes = getProjectTypes(aDocument.getProject()); projectTypes.add(builtInTypes); TypeSystemDescription allTypes = CasCreationUtils.mergeTypeSystems(projectTypes); CAS cas = JCasFactory.createJCas(allTypes).getCas(); // Convert the source document to CAS CollectionReader reader = CollectionReaderFactory.createReader(aReader, ResourceCollectionReaderBase.PARAM_SOURCE_LOCATION, aFile.getParentFile().getAbsolutePath(), ResourceCollectionReaderBase.PARAM_PATTERNS, new String[] { "[+]" + aFile.getName() }); if (!reader.hasNext()) { throw new FileNotFoundException("Annotation file [" + aFile.getName() + "] not found in [" + aFile.getPath() + "]"); } reader.getNext(cas); JCas jCas = cas.getJCas(); // Create sentence / token annotations if they are missing boolean hasTokens = JCasUtil.exists(jCas, Token.class); boolean hasSentences = JCasUtil.exists(jCas, Sentence.class); if (!hasTokens || !hasSentences) { AnalysisEngine pipeline = createEngine(createEngineDescription( BreakIteratorSegmenter.class, BreakIteratorSegmenter.PARAM_WRITE_TOKEN, !hasTokens, BreakIteratorSegmenter.PARAM_WRITE_SENTENCE, !hasSentences)); pipeline.process(cas.getJCas()); } return jCas; } @Override @Transactional public void updateTimeStamp(SourceDocument aDocument, User aUser, Mode aMode) throws IOException { if (aMode.equals(Mode.CURATION)) { aDocument.setTimestamp(new Timestamp(new Date().getTime())); entityManager.merge(aDocument); } else { AnnotationDocument annotationDocument = getAnnotationDocument(aDocument, aUser); annotationDocument.setSentenceAccessed(aDocument.getSentenceAccessed()); annotationDocument.setTimestamp(new Timestamp(new Date().getTime())); annotationDocument.setState(AnnotationDocumentState.IN_PROGRESS); entityManager.merge(annotationDocument); } } @Override public String getDatabaseDriverName() { final StringBuilder sb = new StringBuilder(); Session session = entityManager.unwrap(Session.class); session.doWork(new Work() { @Override public void execute(Connection aConnection) throws SQLException { sb.append(aConnection.getMetaData().getDriverName()); } }); return sb.toString(); } @Override public int isCrowdSourceEnabled() { return crowdsourceEnabled; } @Override public File getMiraModel(AnnotationFeature aFeature, boolean aOtherLayer, SourceDocument aDocument) { if (aDocument != null) { return new File(getMiraDir(aFeature), aDocument.getId() + "- " + aDocument.getProject().getId() + "-model"); } else if (aOtherLayer) { return new File(getMiraDir(aFeature), aFeature.getId() + "-model"); } else { return new File(getMiraDir(aFeature), aFeature.getLayer().getId() + "-" + aFeature.getId() + "-model"); } } @Override public File getMiraDir(AnnotationFeature aFeature) { return new File(dir, PROJECT + aFeature.getProject().getId() + MIRA); } @Override @Transactional public void createTemplate(MiraTemplate aTemplate) { if (aTemplate.getId() == 0) { entityManager.persist(aTemplate); } else { entityManager.merge(aTemplate); } } @Override @Transactional(noRollbackFor = NoResultException.class) public MiraTemplate getMiraTemplate(AnnotationFeature aFeature) { return entityManager .createQuery("FROM MiraTemplate WHERE trainFeature =:trainFeature", MiraTemplate.class).setParameter("trainFeature", aFeature) .getSingleResult(); } @Override public boolean existsMiraTemplate(AnnotationFeature aFeature) { try { entityManager .createQuery("FROM MiraTemplate WHERE trainFeature =:trainFeature", MiraTemplate.class).setParameter("trainFeature", aFeature) .getSingleResult(); return true; } catch (NoResultException ex) { return false; } } @Override public List<MiraTemplate> listMiraTemplates(Project aProject) { List<MiraTemplate> allTenplates = entityManager.createQuery( "FROM MiraTemplate ORDER BY trainFeature ASC ", MiraTemplate.class).getResultList(); List<MiraTemplate> templatesInThisProject = new ArrayList<MiraTemplate>(); for (MiraTemplate miraTemplate : allTenplates) { if (miraTemplate.getTrainFeature() != null && miraTemplate.getTrainFeature().getProject().getId() == aProject.getId()) { templatesInThisProject.add(miraTemplate); } } return templatesInThisProject; } @Override @Transactional public void removeMiraTemplate(MiraTemplate aTemplate) { try { removeAutomationStatus(getAutomationStatus(aTemplate)); } catch (NoResultException e) { // do nothing - automation was not started and no status created for this template } entityManager.remove(aTemplate); } @Override @Transactional public void removeAutomationStatus(AutomationStatus aStstus) { entityManager.remove(aStstus); } private List<TypeSystemDescription> getProjectTypes(Project aProject) { // Create a new type system from scratch List<TypeSystemDescription> types = new ArrayList<TypeSystemDescription>(); for (AnnotationLayer type : annotationService.listAnnotationLayer(aProject)) { if (type.getType().equals(SPAN_TYPE) && !type.isBuiltIn()) { TypeSystemDescription tsd = new TypeSystemDescription_impl(); TypeDescription td = tsd.addType(type.getName(), "", CAS.TYPE_NAME_ANNOTATION); List<AnnotationFeature> features = annotationService.listAnnotationFeature(type); for (AnnotationFeature feature : features) { generateFeature(tsd, td, feature); } types.add(tsd); } else if (type.getType().equals(RELATION_TYPE) && !type.isBuiltIn()) { TypeSystemDescription tsd = new TypeSystemDescription_impl(); TypeDescription td = tsd.addType(type.getName(), "", CAS.TYPE_NAME_ANNOTATION); AnnotationLayer attachType = type.getAttachType(); td.addFeature("Dependent", "", attachType.getName()); td.addFeature("Governor", "", attachType.getName()); List<AnnotationFeature> features = annotationService.listAnnotationFeature(type); for (AnnotationFeature feature : features) { generateFeature(tsd, td, feature); } types.add(tsd); } else if (type.getType().equals(CHAIN_TYPE) && !type.isBuiltIn()) { TypeSystemDescription tsdchains = new TypeSystemDescription_impl(); TypeDescription tdChains = tsdchains.addType(type.getName() + "Chain", "", CAS.TYPE_NAME_ANNOTATION); tdChains.addFeature("first", "", type.getName() + "Link"); types.add(tsdchains); TypeSystemDescription tsdLink = new TypeSystemDescription_impl(); TypeDescription tdLink = tsdLink.addType(type.getName() + "Link", "", CAS.TYPE_NAME_ANNOTATION); tdLink.addFeature("next", "", type.getName() + "Link"); tdLink.addFeature("referenceType", "", CAS.TYPE_NAME_STRING); tdLink.addFeature("referenceRelation", "", CAS.TYPE_NAME_STRING); types.add(tsdLink); } } return types; } private void generateFeature(TypeSystemDescription aTSD, TypeDescription aTD, AnnotationFeature aFeature) { switch (aFeature.getMultiValueMode()) { case NONE: aTD.addFeature(aFeature.getName(), "", aFeature.getType()); break; case ARRAY: { switch (aFeature.getLinkMode()) { case WITH_ROLE: { // Link type TypeDescription linkTD = aTSD.addType(aFeature.getLinkTypeName(), "", CAS.TYPE_NAME_TOP); linkTD.addFeature(aFeature.getLinkTypeRoleFeatureName(), "", CAS.TYPE_NAME_STRING); linkTD.addFeature(aFeature.getLinkTypeTargetFeatureName(), "", aFeature.getType()); // Link feature aTD.addFeature(aFeature.getName(), "", CAS.TYPE_NAME_FS_ARRAY, linkTD.getName(), false); break; } default: throw new IllegalArgumentException("Unsupported link mode [" + aFeature.getLinkMode() + "] on feature [" + aFeature.getName() + "]"); } break; } default: throw new IllegalArgumentException("Unsupported multi-value mode [" + aFeature.getMultiValueMode() + "] on feature [" + aFeature.getName() + "]"); } } @Override @Transactional public void createAutomationStatus(AutomationStatus aStatus) { entityManager.persist(aStatus); } @Override public boolean existsAutomationStatus(MiraTemplate aTemplate) { try { entityManager .createQuery("FROM AutomationStatus WHERE template =:template", AutomationStatus.class).setParameter("template", aTemplate) .getSingleResult(); return true; } catch (NoResultException ex) { return false; } } @Override public AutomationStatus getAutomationStatus(MiraTemplate aTemplate) { return entityManager .createQuery("FROM AutomationStatus WHERE template =:template", AutomationStatus.class).setParameter("template", aTemplate) .getSingleResult(); } /** * Check if a TAB-Sep training file is in correct format before importing */ private boolean isTabSepFileFormatCorrect(File aFile) { try { LineIterator it = new LineIterator(new FileReader(aFile)); while (it.hasNext()) { String line = it.next(); if (line.trim().length() == 0) { continue; } if (line.split("\t").length != 2) { return false; } } } catch (Exception e) { return false; } return true; } /** * A Helper method to add {@link TagsetDescription} to {@link CAS} * * @param aCas the CAA. * @param aLayer the layer. * @param aTagSetName the tagset. */ public static void updateCasWithTagSet(CAS aCas, String aLayer, String aTagSetName) { Type TagsetType = CasUtil.getType(aCas, TagsetDescription.class); Feature layerFeature = TagsetType.getFeatureByBaseName("layer"); Feature nameFeature = TagsetType.getFeatureByBaseName("name"); boolean tagSetModified = false; // modify existing tagset Name for (FeatureStructure fs : CasUtil.select(aCas, TagsetType)) { String layer = fs.getStringValue(layerFeature); String tagSetName = fs.getStringValue(nameFeature); if (layer.equals(aLayer)) { // only if the tagset name is changed if (!aTagSetName.equals(tagSetName)) { fs.setStringValue(nameFeature, aTagSetName); aCas.addFsToIndexes(fs); } tagSetModified = true; break; } } if (!tagSetModified) { FeatureStructure fs = aCas.createFS(TagsetType); fs.setStringValue(layerFeature, aLayer); fs.setStringValue(nameFeature, aTagSetName); aCas.addFsToIndexes(fs); } } @Override public List<Project> listAccessibleProjects() { List<Project> allowedProject = new ArrayList<Project>(); String username = SecurityContextHolder.getContext() .getAuthentication().getName(); User user = getUser(username); List<Project> allProjects = listProjects(); List<Authority> authorities = listAuthorities(user); // if global admin, show all projects for (Authority authority : authorities) { if (authority.getAuthority().equals("ROLE_ADMIN")) { return allProjects; } } // else only projects she is admin of for (Project project : allProjects) { if (SecurityUtil.isProjectAdmin(project, this, user)) { allowedProject.add(project); } } return allowedProject; } /** * Return true if there exist at least one annotation document FINISHED for annotation for this * {@link SourceDocument} * * @param aSourceDocument * the source document. * @param aUser * the user. * @param aProject * the project. * @return if a finished document exists. */ @Override public boolean existFinishedDocument( SourceDocument aSourceDocument, User aUser, Project aProject) { List<de.tudarmstadt.ukp.clarin.webanno.model.AnnotationDocument> annotationDocuments = listAnnotationDocuments(aSourceDocument); boolean finishedAnnotationDocumentExist = false; for (de.tudarmstadt.ukp.clarin.webanno.model.AnnotationDocument annotationDocument : annotationDocuments) { if (annotationDocument.getState().equals(AnnotationDocumentState.FINISHED)) { finishedAnnotationDocumentExist = true; break; } } return finishedAnnotationDocumentExist; } private static void writeSerializedCas(JCas aJCas, File aFile) throws IOException { try (ObjectOutputStream os = new ObjectOutputStream(new FileOutputStream(aFile))) { CASCompleteSerializer serializer = serializeCASComplete(aJCas.getCasImpl()); os.writeObject(serializer); } } private static void readSerializedCas(JCas aJCas, File aFile) throws IOException { try (ObjectInputStream is = new ObjectInputStream(new FileInputStream(aFile))) { CASCompleteSerializer serializer = (CASCompleteSerializer) is.readObject(); deserializeCASComplete(serializer, aJCas.getCasImpl()); } catch (ClassNotFoundException e) { throw new IOException(e); } } }
webanno-api-dao/src/main/java/de/tudarmstadt/ukp/clarin/webanno/api/dao/RepositoryServiceDbData.java
/******************************************************************************* * Copyright 2012 * Ubiquitous Knowledge Processing (UKP) Lab and FG Language Technology * Technische Universität Darmstadt * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package de.tudarmstadt.ukp.clarin.webanno.api.dao; import static de.tudarmstadt.ukp.clarin.webanno.api.WebAnnoConst.CHAIN_TYPE; import static de.tudarmstadt.ukp.clarin.webanno.api.WebAnnoConst.RELATION_TYPE; import static de.tudarmstadt.ukp.clarin.webanno.api.WebAnnoConst.SPAN_TYPE; import static org.apache.commons.io.IOUtils.closeQuietly; import static org.apache.commons.io.IOUtils.copyLarge; import static org.apache.commons.lang.StringUtils.isBlank; import static org.apache.uima.cas.impl.Serialization.deserializeCASComplete; import static org.apache.uima.cas.impl.Serialization.serializeCASComplete; import static org.apache.uima.fit.factory.AnalysisEngineFactory.createEngine; import static org.apache.uima.fit.factory.AnalysisEngineFactory.createEngineDescription; import static org.apache.uima.fit.pipeline.SimplePipeline.runPipeline; import java.beans.PropertyDescriptor; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileFilter; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.OutputStream; import java.sql.Connection; import java.sql.SQLException; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.annotation.Resource; import javax.persistence.EntityManager; import javax.persistence.NoResultException; import javax.persistence.PersistenceContext; import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.LineIterator; import org.apache.commons.io.comparator.LastModifiedFileComparator; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.log4j.FileAppender; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.log4j.PatternLayout; import org.apache.uima.UIMAException; import org.apache.uima.analysis_engine.AnalysisEngine; import org.apache.uima.analysis_engine.AnalysisEngineDescription; import org.apache.uima.cas.CAS; import org.apache.uima.cas.Feature; import org.apache.uima.cas.FeatureStructure; import org.apache.uima.cas.Type; import org.apache.uima.cas.TypeSystem; import org.apache.uima.cas.impl.CASCompleteSerializer; import org.apache.uima.cas.impl.CASImpl; import org.apache.uima.cas.impl.Serialization; import org.apache.uima.collection.CollectionReader; import org.apache.uima.fit.factory.CollectionReaderFactory; import org.apache.uima.fit.factory.JCasFactory; import org.apache.uima.fit.factory.TypeSystemDescriptionFactory; import org.apache.uima.fit.util.CasUtil; import org.apache.uima.fit.util.JCasUtil; import org.apache.uima.jcas.JCas; import org.apache.uima.resource.metadata.TypeDescription; import org.apache.uima.resource.metadata.TypeSystemDescription; import org.apache.uima.resource.metadata.impl.TypeSystemDescription_impl; import org.apache.uima.util.CasCreationUtils; import org.hibernate.Session; import org.hibernate.jdbc.Work; import org.springframework.beans.BeanWrapper; import org.springframework.beans.PropertyAccessorFactory; import org.springframework.beans.factory.annotation.Value; import org.springframework.dao.DataRetrievalFailureException; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.transaction.annotation.Transactional; import de.tudarmstadt.ukp.clarin.webanno.api.AnnotationService; import de.tudarmstadt.ukp.clarin.webanno.api.RepositoryService; import de.tudarmstadt.ukp.clarin.webanno.api.UserDao; import de.tudarmstadt.ukp.clarin.webanno.api.WebAnnoConst; import de.tudarmstadt.ukp.clarin.webanno.model.AnnotationDocument; import de.tudarmstadt.ukp.clarin.webanno.model.AnnotationDocumentState; import de.tudarmstadt.ukp.clarin.webanno.model.AnnotationFeature; import de.tudarmstadt.ukp.clarin.webanno.model.AnnotationLayer; import de.tudarmstadt.ukp.clarin.webanno.model.Authority; import de.tudarmstadt.ukp.clarin.webanno.model.AutomationStatus; import de.tudarmstadt.ukp.clarin.webanno.model.CrowdJob; import de.tudarmstadt.ukp.clarin.webanno.model.MiraTemplate; import de.tudarmstadt.ukp.clarin.webanno.model.Mode; import de.tudarmstadt.ukp.clarin.webanno.model.PermissionLevel; import de.tudarmstadt.ukp.clarin.webanno.model.Project; import de.tudarmstadt.ukp.clarin.webanno.model.ProjectPermission; import de.tudarmstadt.ukp.clarin.webanno.model.SourceDocument; import de.tudarmstadt.ukp.clarin.webanno.model.SourceDocumentStateTransition; import de.tudarmstadt.ukp.clarin.webanno.model.TagSet; import de.tudarmstadt.ukp.clarin.webanno.model.User; import de.tudarmstadt.ukp.dkpro.core.api.io.JCasFileWriter_ImplBase; import de.tudarmstadt.ukp.dkpro.core.api.io.ResourceCollectionReaderBase; import de.tudarmstadt.ukp.dkpro.core.api.metadata.type.DocumentMetaData; import de.tudarmstadt.ukp.dkpro.core.api.metadata.type.TagsetDescription; import de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Sentence; import de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Token; import de.tudarmstadt.ukp.dkpro.core.tokit.BreakIteratorSegmenter; /** * Implementation of methods defined in the {@link RepositoryService} interface * * @author Seid Muhie Yimam * */ public class RepositoryServiceDbData implements RepositoryService { private final Log log = LogFactory.getLog(getClass()); public static Logger createLog(Project aProject, String aUser) throws IOException { Logger logger = Logger.getLogger(RepositoryServiceDbData.class); String targetLog = dir.getAbsolutePath() + PROJECT + "project-" + aProject.getId() + ".log"; FileAppender apndr = new FileAppender(new PatternLayout("%d [" + aUser + "] %m%n"), targetLog, true); logger.addAppender(apndr); logger.setLevel(Level.ALL); return logger; } @Resource(name = "annotationService") private AnnotationService annotationService; @Resource(name = "userRepository") private UserDao userRepository; @Value(value = "${backup.keep.time}") private long backupKeepTime; @Value(value = "${crowdsource.enabled}") private int crowdsourceEnabled; @Value(value = "${backup.interval}") private long backupInterval; @Value(value = "${backup.keep.number}") private int backupKeepNumber; @Resource(name = "formats") private Properties readWriteFileFormats; @Resource(name = "helpFile") private Properties helpProperiesFile; private static final String PROJECT = "/project/"; private static final String MIRA = "/mira/"; private static final String MIRA_TEMPLATE = "/template/"; private static final String DOCUMENT = "/document/"; private static final String SOURCE = "/source"; private static final String GUIDELINE = "/guideline/"; private static final String ANNOTATION = "/annotation"; private static final String SETTINGS = "/settings/"; private static final String META_INF = "/META-INF/"; private static final String TEMPLATE = "/crowdtemplates/"; private static final String HELP_FILE = "/help.properties"; @PersistenceContext private EntityManager entityManager; private static File dir; // The annotation preference properties File name String annotationPreferencePropertiesFileName; private final Object lock = new Object(); public RepositoryServiceDbData() { } @Override @Transactional public void createAnnotationDocument(AnnotationDocument aAnnotationDocument) throws IOException { if (aAnnotationDocument.getId() == 0) { entityManager.persist(aAnnotationDocument); } else { entityManager.merge(aAnnotationDocument); } createLog(aAnnotationDocument.getProject(), aAnnotationDocument.getUser()).info( " User [" + aAnnotationDocument.getUser() + "] creates annotation document for source document [" + aAnnotationDocument.getDocument().getId() + "] in project [" + aAnnotationDocument.getProject().getId() + "] with id [" + aAnnotationDocument.getId() + "]"); createLog(aAnnotationDocument.getProject(), aAnnotationDocument.getUser()) .removeAllAppenders(); } /** * Renames a file. * * @throws IOException * if the file cannot be renamed. * @return the target file. */ private File renameFile(File aFrom, File aTo) throws IOException { if (!aFrom.renameTo(aTo)) { throw new IOException("Cannot renamed file [" + aFrom + "] to [" + aTo + "]"); } // We are not sure if File is mutable. This makes sure we get a new file // in any case. return new File(aTo.getPath()); } /** * Get the folder where the annotations are stored. Creates the folder if necessary. * * @throws IOException * if the folder cannot be created. */ private File getAnnotationFolder(SourceDocument aDocument) throws IOException { File annotationFolder = new File(dir, PROJECT + aDocument.getProject().getId() + DOCUMENT + aDocument.getId() + ANNOTATION); FileUtils.forceMkdir(annotationFolder); return annotationFolder; } @Override public File getDocumentFolder(SourceDocument aDocument) throws IOException { File sourceDocFolder = new File(dir, PROJECT + aDocument.getProject().getId() + DOCUMENT + aDocument.getId() + SOURCE); FileUtils.forceMkdir(sourceDocFolder); return sourceDocFolder; } @Override @Transactional public void createAnnotationDocumentContent(JCas aJcas, SourceDocument aDocument, User aUser) throws IOException { createAnnotationContent(aDocument, aJcas, aUser.getUsername(), aUser); } @Override @Transactional public void createProject(Project aProject, User aUser) throws IOException { entityManager.persist(aProject); String path = dir.getAbsolutePath() + PROJECT + aProject.getId(); FileUtils.forceMkdir(new File(path)); createLog(aProject, aUser.getUsername()) .info(" Created Project [" + aProject.getName() + "] with ID [" + aProject.getId() + "]"); createLog(aProject, aUser.getUsername()).removeAllAppenders(); } @Override @Transactional public void createCrowdJob(CrowdJob aCrowdJob) throws IOException { if (aCrowdJob.getId() == 0) { entityManager.persist(aCrowdJob); } else { entityManager.merge(aCrowdJob); } createLog(aCrowdJob.getProject(), "crowd_user").info( " Created crowd job from project [" + aCrowdJob.getProject() + "] with ID [" + aCrowdJob.getId() + "]"); createLog(aCrowdJob.getProject(), "crowd_user").removeAllAppenders(); } @Override @Transactional public void createProjectPermission(ProjectPermission aPermission) throws IOException { entityManager.persist(aPermission); createLog(aPermission.getProject(), aPermission.getUser()).info( " New Permission created on Project[" + aPermission.getProject().getName() + "] for user [" + aPermission.getUser() + "] with permission [" + aPermission.getLevel() + "]" + "]"); createLog(aPermission.getProject(), aPermission.getUser()).removeAllAppenders(); } @Override @Transactional public void createSourceDocument(SourceDocument aDocument, User aUser) throws IOException { if (aDocument.getId() == 0) { entityManager.persist(aDocument); } else { entityManager.merge(aDocument); } } @Override @Transactional public boolean existsAnnotationDocument(SourceDocument aDocument, User aUser) { try { entityManager .createQuery( "FROM AnnotationDocument WHERE project = :project " + " AND document = :document AND user = :user", AnnotationDocument.class) .setParameter("project", aDocument.getProject()) .setParameter("document", aDocument).setParameter("user", aUser.getUsername()) .getSingleResult(); return true; } catch (NoResultException ex) { return false; } } @Override @Transactional public boolean existsCorrectionDocument(SourceDocument aDocument) { try { getCorrectionDocumentContent(aDocument); return true; } catch (Exception ex) { return false; } } @Override @Transactional public boolean existsProject(String aName) { try { entityManager.createQuery("FROM Project WHERE name = :name", Project.class) .setParameter("name", aName).getSingleResult(); return true; } catch (NoResultException ex) { return false; } } @Override @Transactional public boolean existsAnnotationDocumentContent(SourceDocument aSourceDocument, String aUsername) throws IOException { if (new File(getAnnotationFolder(aSourceDocument), aUsername + ".ser").exists()) { return true; } else { return false; } } @Override @Transactional(noRollbackFor = NoResultException.class) public boolean existsAutomatedDocument(SourceDocument aSourceDocument) { try { getCorrectionDocumentContent(aSourceDocument); return true; } catch (UIMAException e) { return false; } catch (DataRetrievalFailureException e) { return false; } catch (ClassNotFoundException e) { return false; } catch (IOException e) { return false; } } @Override @Transactional public boolean existsCrowdJob(String aName) { try { entityManager.createQuery("FROM CrowdJob WHERE name = :name", CrowdJob.class) .setParameter("name", aName).getSingleResult(); return true; } catch (NoResultException ex) { return false; } } @Override public boolean existsProjectPermission(User aUser, Project aProject) { List<ProjectPermission> projectPermissions = entityManager .createQuery( "FROM ProjectPermission WHERE user = :user AND " + "project =:project", ProjectPermission.class).setParameter("user", aUser.getUsername()) .setParameter("project", aProject).getResultList(); // if at least one permission level exist if (projectPermissions.size() > 0) { return true; } else { return false; } } @Override @Transactional public boolean existsProjectPermissionLevel(User aUser, Project aProject, PermissionLevel aLevel) { try { entityManager .createQuery( "FROM ProjectPermission WHERE user = :user AND " + "project =:project AND level =:level", ProjectPermission.class).setParameter("user", aUser.getUsername()) .setParameter("project", aProject).setParameter("level", aLevel) .getSingleResult(); return true; } catch (NoResultException ex) { return false; } } @Override @Transactional public boolean existsSourceDocument(Project aProject, String aFileName) { try { entityManager .createQuery( "FROM SourceDocument WHERE project = :project AND " + "name =:name ", SourceDocument.class).setParameter("project", aProject) .setParameter("name", aFileName).getSingleResult(); return true; } catch (NoResultException ex) { return false; } } @Override @Transactional public boolean existsProjectTimeStamp(Project aProject, String aUsername) { try { if (getProjectTimeStamp(aProject, aUsername) == null) { return false; } return true; } catch (NoResultException ex) { return false; } } @Override public boolean existsProjectTimeStamp(Project aProject) { try { if (getProjectTimeStamp(aProject) == null) { return false; } return true; } catch (NoResultException ex) { return false; } } @Override public boolean existsUser(String username) { try { getUser(username); return true; } catch (NoResultException e) { return false; } } /** * A new directory is created using UUID so that every exported file will reside in its own * directory. This is useful as the written file can have multiple extensions based on the * Writer class used. */ @SuppressWarnings({ "rawtypes", "unchecked" }) @Override @Transactional public File exportAnnotationDocument(SourceDocument aDocument, String aUser, Class aWriter, String aFileName, Mode aMode) throws UIMAException, IOException, ClassNotFoundException { return exportAnnotationDocument(aDocument, aUser, aWriter, aFileName, aMode, true); } /** * A new directory is created using UUID so that every exported file will reside in its own * directory. This is useful as the written file can have multiple extensions based on the * Writer class used. */ @SuppressWarnings({ "rawtypes", "unchecked" }) @Override @Transactional public File exportAnnotationDocument(SourceDocument aDocument, String aUser, Class aWriter, String aFileName, Mode aMode, boolean aStripExtension) throws UIMAException, IOException, ClassNotFoundException { File annotationFolder = getAnnotationFolder(aDocument); String serializedCasFileName; // for Correction, it will export the corrected document (of the logged in user) // (CORRECTION_USER.ser is the automated result displayed for the user to correct it, not // the final result) for automation, it will export either the corrected document // (Annotated) or the automated document if (aMode.equals(Mode.ANNOTATION) || aMode.equals(Mode.AUTOMATION) || aMode.equals(Mode.CORRECTION)) { serializedCasFileName = aUser + ".ser"; } // The merge result will be exported else { serializedCasFileName = WebAnnoConst.CURATION_USER + ".ser"; } // Read file File serializedCasFile = new File(annotationFolder, serializedCasFileName); if (!serializedCasFile.exists()) { throw new FileNotFoundException("Annotation file [" + serializedCasFileName + "] not found in [" + annotationFolder + "]"); } CAS cas = CasCreationUtils.createCas((TypeSystemDescription) null, null, null); readSerializedCas(cas.getJCas(), serializedCasFile); // Update type system the CAS upgrade(cas, aDocument.getProject()); // Update the source file name in case it is changed for some reason Project project = aDocument.getProject(); File currentDocumentUri = new File(dir.getAbsolutePath() + PROJECT + project.getId() + DOCUMENT + aDocument.getId() + SOURCE); DocumentMetaData documentMetadata = DocumentMetaData.get(cas.getJCas()); documentMetadata.setDocumentUri(new File(currentDocumentUri, aFileName).toURI().toURL() .toExternalForm()); documentMetadata.setDocumentBaseUri(currentDocumentUri.toURI().toURL().toExternalForm()); documentMetadata.setCollectionId(currentDocumentUri.toURI().toURL().toExternalForm()); documentMetadata.setDocumentUri(new File(dir.getAbsolutePath() + PROJECT + project.getId() + DOCUMENT + aDocument.getId() + SOURCE + "/" + aFileName).toURI().toURL() .toExternalForm()); // update with the correct tagset name List<AnnotationFeature> features = annotationService.listAnnotationFeature(project); for (AnnotationFeature feature : features) { TagSet tagSet = feature.getTagset(); if (tagSet == null) { continue; } else if (!feature.getLayer().getType().equals(WebAnnoConst.CHAIN_TYPE)) { updateCasWithTagSet(cas, feature.getLayer().getName(), tagSet.getName()); } } File exportTempDir = File.createTempFile("webanno", "export"); exportTempDir.delete(); exportTempDir.mkdirs(); AnalysisEngineDescription writer; if (aWriter.getName() .equals("de.tudarmstadt.ukp.clarin.webanno.tsv.WebannoCustomTsvWriter")) { List<AnnotationLayer> layers = annotationService .listAnnotationLayer(aDocument.getProject()); List<String> multipleSpans = new ArrayList<String>(); for (AnnotationLayer layer : layers) { if (layer.isMultipleTokens()) { multipleSpans.add(layer.getName()); } } writer = createEngineDescription(aWriter, JCasFileWriter_ImplBase.PARAM_TARGET_LOCATION, exportTempDir, JCasFileWriter_ImplBase.PARAM_STRIP_EXTENSION, aStripExtension, "multipleSpans", multipleSpans); } else { writer = createEngineDescription(aWriter, JCasFileWriter_ImplBase.PARAM_TARGET_LOCATION, exportTempDir, JCasFileWriter_ImplBase.PARAM_STRIP_EXTENSION, aStripExtension); } runPipeline(cas, writer); createLog(project, aUser).info( " Exported annotation file [" + aDocument.getName() + "] with ID [" + aDocument.getId() + "] for user [" + aUser + "] from project [" + project.getId() + "]"); createLog(project, aUser).removeAllAppenders(); File exportFile; if (exportTempDir.listFiles().length > 1) { exportFile = new File(exportTempDir.getAbsolutePath() + ".zip"); try { ZipUtils.zipFolder(exportTempDir, exportFile); } catch (Exception e) { createLog(project, aUser).info("Unable to create zip File"); } } else { exportFile = new File(exportTempDir.getParent(), exportTempDir.listFiles()[0].getName()); FileUtils.copyFile(exportTempDir.listFiles()[0], exportFile); } FileUtils.forceDelete(exportTempDir); return exportFile; } @Override public File exportSourceDocument(SourceDocument aDocument) { File documentUri = new File(dir.getAbsolutePath() + PROJECT + aDocument.getProject().getId() + DOCUMENT + aDocument.getId() + SOURCE); return new File(documentUri, aDocument.getName()); } @Override public File exportserializedCas(SourceDocument aDocument, String aUser) { File documentUri = new File(dir.getAbsolutePath() + PROJECT + aDocument.getProject().getId() + DOCUMENT + aDocument.getId() + ANNOTATION); return new File(documentUri, aUser + ".ser"); } @Override public File exportProjectLog(Project aProject) { return new File(dir.getAbsolutePath() + PROJECT + "project-" + aProject.getId() + ".log"); } @Override public File exportGuidelines(Project aProject) { return new File(dir.getAbsolutePath() + PROJECT + aProject.getId() + GUIDELINE); } @Override public File exportProjectMetaInf(Project aProject) { return new File(dir.getAbsolutePath() + PROJECT + aProject.getId() + META_INF); } @Override @Transactional(noRollbackFor = NoResultException.class) public AnnotationDocument getAnnotationDocument(SourceDocument aDocument, User aUser) { return entityManager .createQuery( "FROM AnnotationDocument WHERE document = :document AND " + "user =:user" + " AND project = :project", AnnotationDocument.class) .setParameter("document", aDocument).setParameter("user", aUser.getUsername()) .setParameter("project", aDocument.getProject()).getSingleResult(); } @Override @Transactional public JCas getAnnotationDocumentContent(AnnotationDocument aAnnotationDocument) throws IOException, UIMAException, ClassNotFoundException { return getAnnotationContent(aAnnotationDocument.getDocument(), aAnnotationDocument.getUser()); } @Override @Transactional(noRollbackFor = NoResultException.class) public List<Authority> listAuthorities(User aUser) { return entityManager .createQuery("FROM Authority where username =:username", Authority.class) .setParameter("username", aUser).getResultList(); } @Override public File getDir() { return dir; } @Override public File getGuideline(Project aProject, String aFilename) { return new File(dir.getAbsolutePath() + PROJECT + aProject.getId() + GUIDELINE + aFilename); } @Override public File getTemplate(String fileName) throws IOException { FileUtils.forceMkdir(new File(dir.getAbsolutePath() + TEMPLATE)); return new File(dir.getAbsolutePath() + TEMPLATE, fileName); } @Override @Transactional(noRollbackFor = NoResultException.class) public List<ProjectPermission> listProjectPermisionLevel(User aUser, Project aProject) { return entityManager .createQuery("FROM ProjectPermission WHERE user =:user AND " + "project =:project", ProjectPermission.class).setParameter("user", aUser.getUsername()) .setParameter("project", aProject).getResultList(); } @Override public List<User> listProjectUsersWithPermissions(Project aProject) { List<String> usernames = entityManager .createQuery( "SELECT DISTINCT user FROM ProjectPermission WHERE " + "project =:project ORDER BY user ASC", String.class) .setParameter("project", aProject).getResultList(); List<User> users = new ArrayList<User>(); for (String username : usernames) { if (existsUser(username)) { users.add(getUser(username)); } } return users; } @Override public List<User> listProjectUsersWithPermissions(Project aProject, PermissionLevel aPermissionLevel) { List<String> usernames = entityManager .createQuery( "SELECT DISTINCT user FROM ProjectPermission WHERE " + "project =:project AND level =:level ORDER BY user ASC", String.class).setParameter("project", aProject) .setParameter("level", aPermissionLevel).getResultList(); List<User> users = new ArrayList<User>(); for (String username : usernames) { if (existsUser(username)) { users.add(getUser(username)); } } return users; } @Override @Transactional public Project getProject(String aName) { return entityManager.createQuery("FROM Project WHERE name = :name", Project.class) .setParameter("name", aName).getSingleResult(); } @Override @Transactional public CrowdJob getCrowdJob(String aName, Project aProjec) { return entityManager .createQuery("FROM CrowdJob WHERE name = :name AND project = :project", CrowdJob.class).setParameter("name", aName) .setParameter("project", aProjec).getSingleResult(); } @Override public Project getProject(long aId) { return entityManager.createQuery("FROM Project WHERE id = :id", Project.class) .setParameter("id", aId).getSingleResult(); } @Override public void createGuideline(Project aProject, File aContent, String aFileName, String aUsername) throws IOException { String guidelinePath = dir.getAbsolutePath() + PROJECT + aProject.getId() + GUIDELINE; FileUtils.forceMkdir(new File(guidelinePath)); copyLarge(new FileInputStream(aContent), new FileOutputStream(new File(guidelinePath + aFileName))); createLog(aProject, aUsername).info( " Created Guideline file[ " + aFileName + "] for Project [" + aProject.getName() + "] with ID [" + aProject.getId() + "]"); createLog(aProject, aUsername).removeAllAppenders(); } @Override public void createTemplate(Project aProject, File aContent, String aFileName, String aUsername) throws IOException { String templatePath = dir.getAbsolutePath() + PROJECT + aProject.getId() + MIRA + MIRA_TEMPLATE; FileUtils.forceMkdir(new File(templatePath)); copyLarge(new FileInputStream(aContent), new FileOutputStream(new File(templatePath + aFileName))); createLog(aProject, aUsername).info( " Created Template file[ " + aFileName + "] for Project [" + aProject.getName() + "] with ID [" + aProject.getId() + "]"); createLog(aProject, aUsername).removeAllAppenders(); } @Override @Transactional(noRollbackFor = NoResultException.class) public List<ProjectPermission> getProjectPermisions(Project aProject) { return entityManager .createQuery("FROM ProjectPermission WHERE project =:project", ProjectPermission.class).setParameter("project", aProject).getResultList(); } @Override @Transactional(noRollbackFor = NoResultException.class) public SourceDocument getSourceDocument(Project aProject, String aDocumentName) { return entityManager .createQuery("FROM SourceDocument WHERE name = :name AND project =:project", SourceDocument.class).setParameter("name", aDocumentName) .setParameter("project", aProject).getSingleResult(); } @Override @Transactional public File getSourceDocumentContent(SourceDocument aDocument) { String path = dir.getAbsolutePath() + PROJECT + aDocument.getProject().getId() + DOCUMENT + aDocument.getId() + SOURCE; return new File(path + "/" + aDocument.getName()); } @Override @Transactional public Date getProjectTimeStamp(Project aProject, String aUsername) { return entityManager .createQuery( "SELECT max(timestamp) FROM AnnotationDocument WHERE project = :project " + " AND user = :user", Date.class) .setParameter("project", aProject).setParameter("user", aUsername) .getSingleResult(); } @Override public Date getProjectTimeStamp(Project aProject) { return entityManager .createQuery("SELECT max(timestamp) FROM SourceDocument WHERE project = :project", Date.class).setParameter("project", aProject).getSingleResult(); } @Override @Transactional(noRollbackFor = NoResultException.class) public User getUser(String aUsername) { return entityManager.createQuery("FROM User WHERE username =:username", User.class) .setParameter("username", aUsername).getSingleResult(); } @Override @Transactional(noRollbackFor = NoResultException.class) public boolean existsFinishedAnnotation(SourceDocument aDocument) { List<AnnotationDocument> annotationDocuments = entityManager .createQuery("FROM AnnotationDocument WHERE document = :document", AnnotationDocument.class).setParameter("document", aDocument) .getResultList(); for (AnnotationDocument annotationDocument : annotationDocuments) { if (annotationDocument.getState().equals(AnnotationDocumentState.FINISHED)) { return true; } } return false; } @Override @Transactional(noRollbackFor = NoResultException.class) public boolean isAnnotationFinished(SourceDocument aDocument, User aUser) { try { AnnotationDocument annotationDocument = entityManager .createQuery( "FROM AnnotationDocument WHERE document = :document AND " + "user =:user", AnnotationDocument.class) .setParameter("document", aDocument).setParameter("user", aUser.getUsername()) .getSingleResult(); if (annotationDocument.getState().equals(AnnotationDocumentState.FINISHED)) { return true; } else { return false; } } // User even didn't start annotating catch (NoResultException e) { return false; } } @Override @Transactional(noRollbackFor = NoResultException.class) public List<AnnotationDocument> listAnnotationDocuments(SourceDocument aDocument) { // Get all annotators in the project List<String> users = getAllAnnotators(aDocument.getProject()); // Bail out already. HQL doesn't seem to like queries with an empty // parameter right of "in" if (users.isEmpty()) { return new ArrayList<AnnotationDocument>(); } return entityManager .createQuery( "FROM AnnotationDocument WHERE project = :project AND document = :document " + "AND user in (:users)", AnnotationDocument.class) .setParameter("project", aDocument.getProject()).setParameter("users", users) .setParameter("document", aDocument).getResultList(); } @Override public int numberOfExpectedAnnotationDocuments(Project aProject) { // Get all annotators in the project List<String> users = getAllAnnotators(aProject); // Bail out already. HQL doesn't seem to like queries with an empty // parameter right of "in" if (users.isEmpty()) { return 0; } int ignored = 0; List<AnnotationDocument> annotationDocuments = entityManager .createQuery( "FROM AnnotationDocument WHERE project = :project AND user in (:users)", AnnotationDocument.class).setParameter("project", aProject) .setParameter("users", users).getResultList(); for (AnnotationDocument annotationDocument : annotationDocuments) { if (annotationDocument.getState().equals(AnnotationDocumentState.IGNORE)) { ignored++; } } return listSourceDocuments(aProject).size() * users.size() - ignored; } @Override public List<AnnotationDocument> listFinishedAnnotationDocuments(Project aProject) { // Get all annotators in the project List<String> users = getAllAnnotators(aProject); // Bail out already. HQL doesn't seem to like queries with an empty // parameter right of "in" if (users.isEmpty()) { return new ArrayList<AnnotationDocument>(); } return entityManager .createQuery( "FROM AnnotationDocument WHERE project = :project AND state = :state" + " AND user in (:users)", AnnotationDocument.class) .setParameter("project", aProject).setParameter("users", users) .setParameter("state", AnnotationDocumentState.FINISHED).getResultList(); } @Override @Transactional(noRollbackFor = NoResultException.class) public List<AnnotationDocument> listAllAnnotationDocuments(SourceDocument aSourceDocument) { return entityManager .createQuery( "FROM AnnotationDocument WHERE project = :project AND document = :document", AnnotationDocument.class) .setParameter("project", aSourceDocument.getProject()) .setParameter("document", aSourceDocument).getResultList(); } @Override public List<String> listGuidelines(Project aProject) { // list all guideline files File[] files = new File(dir.getAbsolutePath() + PROJECT + aProject.getId() + GUIDELINE) .listFiles(); // Name of the guideline files List<String> annotationGuidelineFiles = new ArrayList<String>(); if (files != null) { for (File file : files) { annotationGuidelineFiles.add(file.getName()); } } return annotationGuidelineFiles; } @Override public List<String> listTemplates(Project aProject) { // list all MIRA template files File[] files = new File(dir.getAbsolutePath() + PROJECT + aProject.getId() + MIRA + MIRA_TEMPLATE).listFiles(); // Name of the MIRA template files List<String> templateFiles = new ArrayList<String>(); if (files != null) { for (File file : files) { templateFiles.add(file.getName()); } } return templateFiles; } @Override @Transactional public List<Project> listProjects() { return entityManager.createQuery("FROM Project ORDER BY name ASC ", Project.class) .getResultList(); } @Override @Transactional public List<CrowdJob> listCrowdJobs() { return entityManager.createQuery("FROM CrowdJob", CrowdJob.class).getResultList(); } @Override @Transactional public List<CrowdJob> listCrowdJobs(Project aProject) { return entityManager.createQuery("FROM CrowdJob where project =:project", CrowdJob.class) .setParameter("project", aProject).getResultList(); } @Override @Transactional(noRollbackFor = NoResultException.class) public List<SourceDocument> listSourceDocuments(Project aProject) { List<SourceDocument> sourceDocuments = entityManager .createQuery("FROM SourceDocument where project =:project", SourceDocument.class) .setParameter("project", aProject).getResultList(); List<SourceDocument> tabSepDocuments = new ArrayList<SourceDocument>(); for (SourceDocument sourceDocument : sourceDocuments) { if (sourceDocument.getFormat().equals(WebAnnoConst.TAB_SEP)) { tabSepDocuments.add(sourceDocument); } } sourceDocuments.removeAll(tabSepDocuments); return sourceDocuments; } @Override @Transactional(noRollbackFor = NoResultException.class) public List<SourceDocument> listTabSepDocuments(Project aProject) { List<SourceDocument> sourceDocuments = entityManager .createQuery("FROM SourceDocument where project =:project", SourceDocument.class) .setParameter("project", aProject).getResultList(); List<SourceDocument> tabSepDocuments = new ArrayList<SourceDocument>(); for (SourceDocument sourceDocument : sourceDocuments) { if (sourceDocument.getFormat().equals(WebAnnoConst.TAB_SEP)) { tabSepDocuments.add(sourceDocument); } } return tabSepDocuments; } @Override @Transactional public List<User> listUsers() { return entityManager.createQuery("FROM User", User.class).getResultList(); } @Override public Properties loadUserSettings(String aUsername, Project aProject) throws FileNotFoundException, IOException { Properties property = new Properties(); property.load(new FileInputStream(new File(dir.getAbsolutePath() + PROJECT + aProject.getId() + SETTINGS + aUsername + "/" + annotationPreferencePropertiesFileName))); return property; } @Override public Properties loadHelpContents() throws FileNotFoundException, IOException { if (new File(dir.getAbsolutePath() + HELP_FILE).exists()) { Properties property = new Properties(); property.load(new FileInputStream(new File(dir.getAbsolutePath() + HELP_FILE))); return property; } else { return helpProperiesFile; } } @Override @Transactional public void removeProject(Project aProject, User aUser) throws IOException { // remove, if exists, a crowdsource job created from this project for (CrowdJob crowdJob : listCrowdJobs(aProject)) { removeCrowdJob(crowdJob); } for (SourceDocument document : listSourceDocuments(aProject)) { removeSourceDocument(document, aUser); } for (SourceDocument document : listTabSepDocuments(aProject)) { removeSourceDocument(document, aUser); } for (MiraTemplate template : listMiraTemplates(aProject)) { removeMiraTemplate(template); } for (AnnotationFeature feature : annotationService.listAnnotationFeature(aProject)) { annotationService.removeAnnotationFeature(feature); } // remove the layers too for (AnnotationLayer layer : annotationService.listAnnotationLayer(aProject)) { annotationService.removeAnnotationLayer(layer); } for (TagSet tagSet : annotationService.listTagSets(aProject)) { annotationService.removeTagSet(tagSet); } // remove the project directory from the file system String path = dir.getAbsolutePath() + PROJECT + aProject.getId(); try { FileUtils.deleteDirectory(new File(path)); } catch (FileNotFoundException e) { createLog(aProject, aUser.getUsername()).warn( "Project directory to be deleted was not found: [" + path + "]. Ignoring."); } for (ProjectPermission permisions : getProjectPermisions(aProject)) { entityManager.remove(permisions); } // remove metadata from DB entityManager.remove(aProject); createLog(aProject, aUser.getUsername()).info( " Removed Project [" + aProject.getName() + "] with ID [" + aProject.getId() + "]"); createLog(aProject, aUser.getUsername()).removeAllAppenders(); } @Override @Transactional public void removeCrowdJob(CrowdJob crowdProject) { entityManager.remove(entityManager.merge(crowdProject)); } @Override public void removeGuideline(Project aProject, String aFileName, String username) throws IOException { FileUtils.forceDelete(new File(dir.getAbsolutePath() + PROJECT + aProject.getId() + GUIDELINE + aFileName)); createLog(aProject, username).info( " Removed Guideline file from [" + aProject.getName() + "] with ID [" + aProject.getId() + "]"); createLog(aProject, username).removeAllAppenders(); } @Override public void removeTemplate(Project aProject, String aFileName, String username) throws IOException { FileUtils.forceDelete(new File(dir.getAbsolutePath() + PROJECT + aProject.getId() + MIRA + MIRA_TEMPLATE + aFileName)); createLog(aProject, username).info( " Removed Template file from [" + aProject.getName() + "] with ID [" + aProject.getId() + "]"); createLog(aProject, username).removeAllAppenders(); } @Override public void removeCurationDocumentContent(SourceDocument aSourceDocument, String aUsername) throws IOException { if (new File(getAnnotationFolder(aSourceDocument), WebAnnoConst.CURATION_USER + ".ser") .exists()) { FileUtils.forceDelete(new File(getAnnotationFolder(aSourceDocument), WebAnnoConst.CURATION_USER + ".ser")); createLog(aSourceDocument.getProject(), aUsername).info( " Removed Curated document from project [" + aSourceDocument.getProject() + "] for the source document [" + aSourceDocument.getId()); createLog(aSourceDocument.getProject(), aUsername).removeAllAppenders(); } } @Override @Transactional public void removeProjectPermission(ProjectPermission projectPermission) throws IOException { entityManager.remove(projectPermission); createLog(projectPermission.getProject(), projectPermission.getUser()).info( " Removed Project Permission [" + projectPermission.getLevel() + "] for the USer [" + projectPermission.getUser() + "] From project [" + projectPermission.getProject().getId() + "]"); createLog(projectPermission.getProject(), projectPermission.getUser()).removeAllAppenders(); } @Override @Transactional public void removeSourceDocument(SourceDocument aDocument, User aUser) throws IOException { for (AnnotationDocument annotationDocument : listAllAnnotationDocuments(aDocument)) { removeAnnotationDocument(annotationDocument); } // remove it from the crowd job, if it belongs already for (CrowdJob crowdJob : listCrowdJobs(aDocument.getProject())) { if (crowdJob.getDocuments().contains(aDocument)) { crowdJob.getDocuments().remove(aDocument); entityManager.persist(crowdJob); } } entityManager.remove(aDocument); String path = dir.getAbsolutePath() + PROJECT + aDocument.getProject().getId() + DOCUMENT + aDocument.getId(); // remove from file both source and related annotation file if (new File(path).exists()) { FileUtils.forceDelete(new File(path)); } createLog(aDocument.getProject(), aUser.getUsername()).info( " Removed Document [" + aDocument.getName() + "] with ID [" + aDocument.getId() + "] from Project [" + aDocument.getProject().getId() + "]"); createLog(aDocument.getProject(), aUser.getUsername()).removeAllAppenders(); } @Override @Transactional public void removeAnnotationDocument(AnnotationDocument aAnnotationDocument) { entityManager.remove(aAnnotationDocument); } public void setDir(File aDir) { dir = aDir; } @Override public void savePropertiesFile(Project aProject, InputStream aIs, String aFileName) throws IOException { String path = dir.getAbsolutePath() + PROJECT + aProject.getId() + "/" + FilenameUtils.getFullPath(aFileName); FileUtils.forceMkdir(new File(path)); File newTcfFile = new File(path, FilenameUtils.getName(aFileName)); OutputStream os = null; try { os = new FileOutputStream(newTcfFile); copyLarge(aIs, os); } finally { closeQuietly(os); closeQuietly(aIs); } } @Override public <T> void saveUserSettings(String aUsername, Project aProject, Mode aSubject, T aConfigurationObject) throws IOException { BeanWrapper wrapper = PropertyAccessorFactory.forBeanPropertyAccess(aConfigurationObject); Properties property = new Properties(); for (PropertyDescriptor value : wrapper.getPropertyDescriptors()) { if (wrapper.getPropertyValue(value.getName()) == null) { continue; } property.setProperty(aSubject + "." + value.getName(), wrapper.getPropertyValue(value.getName()).toString()); } String propertiesPath = dir.getAbsolutePath() + PROJECT + aProject.getId() + SETTINGS + aUsername; // append existing preferences for the other mode if (new File(propertiesPath, annotationPreferencePropertiesFileName).exists()) { // aSubject = aSubject.equals(Mode.ANNOTATION) ? Mode.CURATION : // Mode.ANNOTATION; for (Entry<Object, Object> entry : loadUserSettings(aUsername, aProject).entrySet()) { String key = entry.getKey().toString(); // Maintain other Modes of annotations confs than this one if (!key.substring(0, key.indexOf(".")).equals(aSubject.toString())) { property.put(entry.getKey(), entry.getValue()); } } } FileUtils .forceDeleteOnExit(new File(propertiesPath, annotationPreferencePropertiesFileName)); FileUtils.forceMkdir(new File(propertiesPath)); property.store(new FileOutputStream(new File(propertiesPath, annotationPreferencePropertiesFileName)), null); createLog(aProject, aUsername).info( " Saved preferences file [" + annotationPreferencePropertiesFileName + "] for project [" + aProject.getName() + "] with ID [" + aProject.getId() + "] to location: [" + propertiesPath + "]"); createLog(aProject, aUsername).removeAllAppenders(); } @Override public <T> void saveHelpContents(T aConfigurationObject) throws IOException { BeanWrapper wrapper = PropertyAccessorFactory.forBeanPropertyAccess(aConfigurationObject); Properties property = new Properties(); for (PropertyDescriptor value : wrapper.getPropertyDescriptors()) { if (wrapper.getPropertyValue(value.getName()) == null) { continue; } property.setProperty(value.getName(), wrapper.getPropertyValue(value.getName()) .toString()); } File helpFile = new File(dir.getAbsolutePath() + HELP_FILE); if (helpFile.exists()) { FileUtils.forceDeleteOnExit(helpFile); } else { helpFile.createNewFile(); } property.store(new FileOutputStream(helpFile), null); } @Override @Transactional public void uploadSourceDocument(File aFile, SourceDocument aDocument, User aUser) throws IOException { try { if (aDocument.getFormat().equals(WebAnnoConst.TAB_SEP)) { if (!isTabSepFileFormatCorrect(aFile)) { removeSourceDocument(aDocument, aUser); throw new IOException( "This TAB-SEP file is not in correct format. It should have two columns separated by TAB!"); } } else { convertSourceDocumentToCas(aFile, getReadableFormats().get(aDocument.getFormat()), aDocument); } } catch (IOException e) { throw e; } catch (Exception e) { removeSourceDocument(aDocument, aUser); throw new IOException(e.getMessage(), e); } String path = dir.getAbsolutePath() + PROJECT + aDocument.getProject().getId() + DOCUMENT + aDocument.getId() + SOURCE; FileUtils.forceMkdir(new File(path)); File newTcfFile = new File(path, aDocument.getName()); InputStream is = null; OutputStream os = null; try { os = new FileOutputStream(newTcfFile); is = new FileInputStream(aFile); copyLarge(is, os); } finally { closeQuietly(os); closeQuietly(is); } createLog(aDocument.getProject(), aUser.getUsername()).info( " Imported file [" + aDocument.getName() + "] with ID [" + aDocument.getId() + "] to Project [" + aDocument.getProject().getId() + "]"); createLog(aDocument.getProject(), aUser.getUsername()).removeAllAppenders(); } @Override @Transactional public void uploadSourceDocument(InputStream aIs, SourceDocument aDocument, User aUser) throws IOException { String path = dir.getAbsolutePath() + PROJECT + aDocument.getProject().getId() + DOCUMENT + aDocument.getId() + SOURCE; FileUtils.forceMkdir(new File(path)); File newTcfFile = new File(path, aDocument.getName()); OutputStream os = null; try { os = new FileOutputStream(newTcfFile); copyLarge(aIs, os); } finally { closeQuietly(os); closeQuietly(aIs); } createLog(aDocument.getProject(), aUser.getUsername()).info( " Imported file [" + aDocument.getName() + "] with ID [" + aDocument.getId() + "] to Project [" + aDocument.getProject().getId() + "]"); createLog(aDocument.getProject(), aUser.getUsername()).removeAllAppenders(); } @Override public List<String> getReadableFormatLabels() throws ClassNotFoundException { List<String> readableFormats = new ArrayList<String>(); for (String key : readWriteFileFormats.stringPropertyNames()) { if (key.contains(".label") && !isBlank(readWriteFileFormats.getProperty(key))) { String readerLabel = key.substring(0, key.lastIndexOf(".label")); if (!isBlank(readWriteFileFormats.getProperty(readerLabel + ".reader"))) { readableFormats.add(readWriteFileFormats.getProperty(key)); } } } Collections.sort(readableFormats); return readableFormats; } @Override public String getReadableFormatId(String aLabel) throws ClassNotFoundException { String readableFormat = ""; for (String key : readWriteFileFormats.stringPropertyNames()) { if (key.contains(".label") && !isBlank(readWriteFileFormats.getProperty(key))) { if (readWriteFileFormats.getProperty(key).equals(aLabel)) { readableFormat = key.substring(0, key.lastIndexOf(".label")); break; } } } return readableFormat; } @SuppressWarnings("rawtypes") @Override public Map<String, Class> getReadableFormats() throws ClassNotFoundException { Map<String, Class> readableFormats = new HashMap<String, Class>(); for (String key : readWriteFileFormats.stringPropertyNames()) { if (key.contains(".label") && !isBlank(readWriteFileFormats.getProperty(key))) { String readerLabel = key.substring(0, key.lastIndexOf(".label")); if (!isBlank(readWriteFileFormats.getProperty(readerLabel + ".reader"))) { readableFormats.put(readerLabel, Class.forName(readWriteFileFormats .getProperty(readerLabel + ".reader"))); } } } return readableFormats; } @Override public List<String> getWritableFormatLabels() throws ClassNotFoundException { List<String> writableFormats = new ArrayList<String>(); for (String key : readWriteFileFormats.stringPropertyNames()) { if (key.contains(".label") && !isBlank(readWriteFileFormats.getProperty(key))) { String writerLabel = key.substring(0, key.lastIndexOf(".label")); if (!isBlank(readWriteFileFormats.getProperty(writerLabel + ".writer"))) { writableFormats.add(readWriteFileFormats.getProperty(key)); } } } Collections.sort(writableFormats); return writableFormats; } @Override public String getWritableFormatId(String aLabel) throws ClassNotFoundException { String writableFormat = ""; for (String key : readWriteFileFormats.stringPropertyNames()) { if (key.contains(".label") && !isBlank(readWriteFileFormats.getProperty(key))) { if (readWriteFileFormats.getProperty(key).equals(aLabel)) { writableFormat = key.substring(0, key.lastIndexOf(".label")); break; } } } return writableFormat; } @SuppressWarnings({ "rawtypes", "unchecked" }) @Override public Map<String, Class> getWritableFormats() throws ClassNotFoundException { Map<String, Class> writableFormats = new HashMap<String, Class>(); Set<String> keys = (Set) readWriteFileFormats.keySet(); for (String keyvalue : keys) { if (keyvalue.contains(".label")) { String writerLabel = keyvalue.substring(0, keyvalue.lastIndexOf(".label")); if (readWriteFileFormats.getProperty(writerLabel + ".writer") != null) { writableFormats.put(writerLabel, Class.forName(readWriteFileFormats .getProperty(writerLabel + ".writer"))); } } } return writableFormats; } public String getAnnotationPreferencePropertiesFileName() { return annotationPreferencePropertiesFileName; } public void setAnnotationPreferencePropertiesFileName( String aAnnotationPreferencePropertiesFileName) { annotationPreferencePropertiesFileName = aAnnotationPreferencePropertiesFileName; } @Override @PreAuthorize("hasAnyRole('ROLE_ADMIN','ROLE_USER')") public void createCorrectionDocumentContent(JCas aJcas, SourceDocument aDocument, User aUser) throws IOException { createAnnotationContent(aDocument, aJcas, WebAnnoConst.CORRECTION_USER, aUser); } @Override @PreAuthorize("hasAnyRole('ROLE_ADMIN','ROLE_USER')") public void createCurationDocumentContent(JCas aJcas, SourceDocument aDocument, User aUser) throws IOException { createAnnotationContent(aDocument, aJcas, WebAnnoConst.CURATION_USER, aUser); } @Override public JCas getCorrectionDocumentContent(SourceDocument aDocument) throws UIMAException, IOException, ClassNotFoundException { return getAnnotationContent(aDocument, WebAnnoConst.CORRECTION_USER); } @Override public JCas getCurationDocumentContent(SourceDocument aDocument) throws UIMAException, IOException, ClassNotFoundException { return getAnnotationContent(aDocument, WebAnnoConst.CURATION_USER); } /** * Creates an annotation document (either user's annotation document or CURATION_USER's * annotation document) * * @param aDocument * the {@link SourceDocument} * @param aJcas * The annotated CAS object * @param aUserName * the user who annotates the document if it is user's annotation document OR the * CURATION_USER * @param aUser * The user who annotates the document OR the curator who curates the document * @throws IOException */ private void createAnnotationContent(SourceDocument aDocument, JCas aJcas, String aUserName, User aUser) throws IOException { log.debug("Updating annotation document [" + aDocument.getName() + "] " + "with ID [" + aDocument.getId() + "] in project ID [" + aDocument.getProject().getId() + "] for user [" + aUser.getUsername() + "]"); //DebugUtils.smallStack(); synchronized (lock) { File annotationFolder = getAnnotationFolder(aDocument); FileUtils.forceMkdir(annotationFolder); final String username = aUserName; File currentVersion = new File(annotationFolder, username + ".ser"); File oldVersion = new File(annotationFolder, username + ".ser.old"); // Save current version try { // Make a backup of the current version of the file before // overwriting if (currentVersion.exists()) { renameFile(currentVersion, oldVersion); } // Now write the new version to "<username>.ser" or // CURATION_USER.ser DocumentMetaData md; try { md = DocumentMetaData.get(aJcas); } catch (IllegalArgumentException e) { md = DocumentMetaData.create(aJcas); } md.setDocumentId(aUserName); File targetPath = getAnnotationFolder(aDocument); writeSerializedCas(aJcas, new File(targetPath, aUserName+".ser")); createLog(aDocument.getProject(), aUser.getUsername()).info( "Updated annotation document [" + aDocument.getName() + "] " + "with ID [" + aDocument.getId() + "] in project ID [" + aDocument.getProject().getId() + "] for user [" + aUser.getUsername() + "]"); createLog(aDocument.getProject(), aUser.getUsername()).removeAllAppenders(); // If the saving was successful, we delete the old version if (oldVersion.exists()) { FileUtils.forceDelete(oldVersion); } } catch (IOException e) { // If we could not save the new version, restore the old one. FileUtils.forceDelete(currentVersion); // If this is the first version, there is no old version, so do // not restore anything if (oldVersion.exists()) { renameFile(oldVersion, currentVersion); } // Now abort anyway throw e; } // Manage history if (backupInterval > 0) { // Determine the reference point in time based on the current // version long now = currentVersion.lastModified(); // Get all history files for the current user File[] history = annotationFolder.listFiles(new FileFilter() { private final Matcher matcher = Pattern.compile( Pattern.quote(username) + "\\.ser\\.[0-9]+\\.bak").matcher(""); @Override public boolean accept(File aFile) { // Check if the filename matches the pattern given // above. return matcher.reset(aFile.getName()).matches(); } }); // Sort the files (oldest one first) Arrays.sort(history, LastModifiedFileComparator.LASTMODIFIED_COMPARATOR); // Check if we need to make a new history file boolean historyFileCreated = false; File historyFile = new File(annotationFolder, username + ".ser." + now + ".bak"); if (history.length == 0) { // If there is no history yet but we should keep history, // then we create a // history file in any case. FileUtils.copyFile(currentVersion, historyFile); historyFileCreated = true; } else { // Check if the newest history file is significantly older // than the current one File latestHistory = history[history.length - 1]; if (latestHistory.lastModified() + backupInterval < now) { FileUtils.copyFile(currentVersion, historyFile); historyFileCreated = true; } } // Prune history based on number of backup if (historyFileCreated) { // The new version is not in the history, so we keep that in // any case. That // means we need to keep one less. int toKeep = Math.max(backupKeepNumber - 1, 0); if ((backupKeepNumber > 0) && (toKeep < history.length)) { // Copy the oldest files to a new array File[] toRemove = new File[history.length - toKeep]; System.arraycopy(history, 0, toRemove, 0, toRemove.length); // Restrict the history to what is left File[] newHistory = new File[toKeep]; if (toKeep > 0) { System.arraycopy(history, toRemove.length, newHistory, 0, newHistory.length); } history = newHistory; // Remove these old files for (File file : toRemove) { FileUtils.forceDelete(file); createLog(aDocument.getProject(), aUser.getUsername()).info( "Removed surplus history file [" + file.getName() + "] " + "for document with ID [" + aDocument.getId() + "] in project ID [" + aDocument.getProject().getId() + "]"); createLog(aDocument.getProject(), aUser.getUsername()) .removeAllAppenders(); } } // Prune history based on time if (backupKeepTime > 0) { for (File file : history) { if ((file.lastModified() + backupKeepTime) < now) { FileUtils.forceDelete(file); createLog(aDocument.getProject(), aUser.getUsername()).info( "Removed outdated history file [" + file.getName() + "] " + " for document with ID [" + aDocument.getId() + "] in project ID [" + aDocument.getProject().getId() + "]"); createLog(aDocument.getProject(), aUser.getUsername()) .removeAllAppenders(); } } } } } } } /** * For a given {@link SourceDocument}, return the {@link AnnotationDocument} for the user or for * the CURATION_USER * * @param aDocument * the {@link SourceDocument} * @param aUsername * the {@link User} who annotates the {@link SourceDocument} or the CURATION_USER */ private JCas getAnnotationContent(SourceDocument aDocument, String aUsername) throws IOException { if (log.isDebugEnabled()) { log.debug("Getting annotation document [" + aDocument.getName() + "] with ID [" + aDocument.getId() + "] in project ID [" + aDocument.getProject().getId() + "] for user [" + aUsername + "]"); } //DebugUtils.smallStack(); synchronized (lock) { File annotationFolder = getAnnotationFolder(aDocument); String file = aUsername + ".ser"; try { File serializedCasFile = new File(annotationFolder, file); if (!serializedCasFile.exists()) { throw new FileNotFoundException("Annotation document of user [" + aUsername + "] for source document [" + aDocument.getName() + "] (" + aDocument.getId() + "). not found in project[" + aDocument.getProject().getName() + "] (" + aDocument.getProject().getId() + ")"); } CAS cas = CasCreationUtils.createCas((TypeSystemDescription) null, null, null); readSerializedCas(cas.getJCas(), serializedCasFile); return cas.getJCas(); } catch (UIMAException e) { throw new DataRetrievalFailureException("Unable to parse annotation", e); } } } @Override public boolean isRemoteProject(Project project) { return new File(dir, PROJECT + project.getId() + META_INF).exists(); } private List<String> getAllAnnotators(Project aProject) { // Get all annotators in the project List<String> users = entityManager .createQuery( "SELECT DISTINCT user FROM ProjectPermission WHERE project = :project " + "AND level = :level", String.class) .setParameter("project", aProject).setParameter("level", PermissionLevel.USER) .getResultList(); // check if the username is in the Users database (imported projects // might have username // in the ProjectPermission entry while it is not in the Users database List<String> notInUsers = new ArrayList<String>(); for (String user : users) { if (!userRepository.exists(user)) { notInUsers.add(user); } } users.removeAll(notInUsers); return users; } @Override public void upgradeCasAndSave(SourceDocument aDocument, Mode aMode, String aUsername) throws IOException { User user = getUser(aUsername); if (existsAnnotationDocument(aDocument, user)) { log.debug("Upgrading annotation document [" + aDocument.getName() + "] " + "with ID [" + aDocument.getId() + "] in project ID [" + aDocument.getProject().getId() + "] for user [" + aUsername + "] in mode [" + aMode + "]"); //DebugUtils.smallStack(); AnnotationDocument annotationDocument = getAnnotationDocument(aDocument, user); try { CAS cas = getAnnotationDocumentContent(annotationDocument).getCas(); upgrade(cas, aDocument.getProject()); createAnnotationDocumentContent(cas.getJCas(), annotationDocument.getDocument(), user); if (aMode.equals(Mode.ANNOTATION)) { // In this case we only need to upgrade to annotation document } else if (aMode.equals(Mode.AUTOMATION) || aMode.equals(Mode.CORRECTION)) { CAS corrCas = getCorrectionDocumentContent(aDocument).getCas(); upgrade(corrCas, aDocument.getProject()); createCorrectionDocumentContent(corrCas.getJCas(), aDocument, user); } else { CAS curCas = getCurationDocumentContent(aDocument).getCas(); upgrade(curCas, aDocument.getProject()); createCurationDocumentContent(curCas.getJCas(), aDocument, user); } } catch (Exception e) { // no need to catch, it is acceptable that no curation document // exists to be upgraded while there are annotation documents } createLog(aDocument.getProject(), aUsername).info( "Upgraded annotation document [" + aDocument.getName() + "] " + "with ID [" + aDocument.getId() + "] in project ID [" + aDocument.getProject().getId() + "] for user [" + aUsername + "] in mode [" + aMode + "]"); createLog(aDocument.getProject(), aUsername).removeAllAppenders(); } } @Override public void upgrade(CAS aCas, Project aProject) throws UIMAException, IOException { TypeSystemDescription builtInTypes = TypeSystemDescriptionFactory .createTypeSystemDescription(); List<TypeSystemDescription> projectTypes = getProjectTypes(aProject); projectTypes.add(builtInTypes); TypeSystemDescription allTypes = CasCreationUtils.mergeTypeSystems(projectTypes); // Prepare template for new CAS CAS newCas = JCasFactory.createJCas(allTypes).getCas(); CASCompleteSerializer serializer = Serialization.serializeCASComplete((CASImpl) newCas); // Save old type system TypeSystem oldTypeSystem = aCas.getTypeSystem(); // Save old CAS contents ByteArrayOutputStream os2 = new ByteArrayOutputStream(); Serialization.serializeWithCompression(aCas, os2, oldTypeSystem); // Prepare CAS with new type system Serialization.deserializeCASComplete(serializer, (CASImpl) aCas); // Restore CAS data to new type system Serialization.deserializeCAS(aCas, new ByteArrayInputStream(os2.toByteArray()), oldTypeSystem, null); } @Override @Transactional public JCas readJCas(SourceDocument aDocument, Project aProject, User aUser) throws UIMAException, IOException, ClassNotFoundException { AnnotationDocument annotationDocument = null; JCas jCas = null; try { annotationDocument = getAnnotationDocument(aDocument, aUser); if (annotationDocument.getState().equals(AnnotationDocumentState.NEW) && !existsAnnotationDocumentContent(aDocument, aUser.getUsername())) { jCas = createJCas(aDocument, annotationDocument, aProject, aUser); } else { jCas = getAnnotationDocumentContent(annotationDocument); } } // it is new, create it and get CAS object catch (NoResultException ex) { jCas = createJCas(aDocument, annotationDocument, aProject, aUser); } catch (DataRetrievalFailureException e) { throw e; } return jCas; } @Override @Transactional public void updateJCas(Mode aMode, SourceDocument aSourceDocument, User aUser, JCas aJcas) throws IOException { if (aMode.equals(Mode.ANNOTATION) || aMode.equals(Mode.AUTOMATION) || aMode.equals(Mode.CORRECTION) || aMode.equals(Mode.CORRECTION_MERGE)) { createAnnotationDocumentContent(aJcas, aSourceDocument, aUser); } else if (aMode.equals(Mode.CURATION) || aMode.equals(Mode.CURATION_MERGE)) { createCurationDocumentContent(aJcas, aSourceDocument, aUser); } } @Override @Transactional public JCas createJCas(SourceDocument aDocument, AnnotationDocument aAnnotationDocument, Project aProject, User aUser) throws IOException { JCas jCas; // change the state of the source document to in progress aDocument.setState(SourceDocumentStateTransition .transition(SourceDocumentStateTransition.NEW_TO_ANNOTATION_IN_PROGRESS)); try { jCas = convertSourceDocumentToCas(getSourceDocumentContent(aDocument), getReadableFormats().get(aDocument.getFormat()), aDocument); if (!existsAnnotationDocument(aDocument, aUser)) { aAnnotationDocument = new AnnotationDocument(); aAnnotationDocument.setDocument(aDocument); aAnnotationDocument.setName(aDocument.getName()); aAnnotationDocument.setUser(aUser.getUsername()); aAnnotationDocument.setProject(aProject); createAnnotationDocument(aAnnotationDocument); } } catch (UIMAException e) { throw new IOException(e); } catch (ClassNotFoundException e) { throw new IOException(e); } catch (Exception e) { throw new IOException(e.getMessage() != null ? e.getMessage() : "This is an invalid file. The reader for the document " + aDocument.getName() + " can't read this " + aDocument.getFormat() + " file type"); } createAnnotationDocumentContent(jCas, aDocument, aUser); return jCas; } @Override @SuppressWarnings({ "rawtypes", "unchecked" }) public JCas convertSourceDocumentToCas(File aFile, Class aReader, SourceDocument aDocument) throws UIMAException, IOException { // Prepare a CAS with the project type system TypeSystemDescription builtInTypes = TypeSystemDescriptionFactory .createTypeSystemDescription(); List<TypeSystemDescription> projectTypes = getProjectTypes(aDocument.getProject()); projectTypes.add(builtInTypes); TypeSystemDescription allTypes = CasCreationUtils.mergeTypeSystems(projectTypes); CAS cas = JCasFactory.createJCas(allTypes).getCas(); // Convert the source document to CAS CollectionReader reader = CollectionReaderFactory.createReader(aReader, ResourceCollectionReaderBase.PARAM_SOURCE_LOCATION, aFile.getParentFile().getAbsolutePath(), ResourceCollectionReaderBase.PARAM_PATTERNS, new String[] { "[+]" + aFile.getName() }); if (!reader.hasNext()) { throw new FileNotFoundException("Annotation file [" + aFile.getName() + "] not found in [" + aFile.getPath() + "]"); } reader.getNext(cas); JCas jCas = cas.getJCas(); // Create sentence / token annotations if they are missing boolean hasTokens = JCasUtil.exists(jCas, Token.class); boolean hasSentences = JCasUtil.exists(jCas, Sentence.class); if (!hasTokens || !hasSentences) { AnalysisEngine pipeline = createEngine(createEngineDescription( BreakIteratorSegmenter.class, BreakIteratorSegmenter.PARAM_WRITE_TOKEN, !hasTokens, BreakIteratorSegmenter.PARAM_WRITE_SENTENCE, !hasSentences)); pipeline.process(cas.getJCas()); } return jCas; } @Override @Transactional public void updateTimeStamp(SourceDocument aDocument, User aUser, Mode aMode) throws IOException { if (aMode.equals(Mode.CURATION)) { aDocument.setTimestamp(new Timestamp(new Date().getTime())); entityManager.merge(aDocument); } else { AnnotationDocument annotationDocument = getAnnotationDocument(aDocument, aUser); annotationDocument.setSentenceAccessed(aDocument.getSentenceAccessed()); annotationDocument.setTimestamp(new Timestamp(new Date().getTime())); annotationDocument.setState(AnnotationDocumentState.IN_PROGRESS); entityManager.merge(annotationDocument); } } @Override public String getDatabaseDriverName() { final StringBuilder sb = new StringBuilder(); Session session = entityManager.unwrap(Session.class); session.doWork(new Work() { @Override public void execute(Connection aConnection) throws SQLException { sb.append(aConnection.getMetaData().getDriverName()); } }); return sb.toString(); } @Override public int isCrowdSourceEnabled() { return crowdsourceEnabled; } @Override public File getMiraModel(AnnotationFeature aFeature, boolean aOtherLayer, SourceDocument aDocument) { if (aDocument != null) { return new File(getMiraDir(aFeature), aDocument.getId() + "- " + aDocument.getProject().getId() + "-model"); } else if (aOtherLayer) { return new File(getMiraDir(aFeature), aFeature.getId() + "-model"); } else { return new File(getMiraDir(aFeature), aFeature.getLayer().getId() + "-" + aFeature.getId() + "-model"); } } @Override public File getMiraDir(AnnotationFeature aFeature) { return new File(dir, PROJECT + aFeature.getProject().getId() + MIRA); } @Override @Transactional public void createTemplate(MiraTemplate aTemplate) { if (aTemplate.getId() == 0) { entityManager.persist(aTemplate); } else { entityManager.merge(aTemplate); } } @Override @Transactional(noRollbackFor = NoResultException.class) public MiraTemplate getMiraTemplate(AnnotationFeature aFeature) { return entityManager .createQuery("FROM MiraTemplate WHERE trainFeature =:trainFeature", MiraTemplate.class).setParameter("trainFeature", aFeature) .getSingleResult(); } @Override public boolean existsMiraTemplate(AnnotationFeature aFeature) { try { entityManager .createQuery("FROM MiraTemplate WHERE trainFeature =:trainFeature", MiraTemplate.class).setParameter("trainFeature", aFeature) .getSingleResult(); return true; } catch (NoResultException ex) { return false; } } @Override public List<MiraTemplate> listMiraTemplates(Project aProject) { List<MiraTemplate> allTenplates = entityManager.createQuery( "FROM MiraTemplate ORDER BY trainFeature ASC ", MiraTemplate.class).getResultList(); List<MiraTemplate> templatesInThisProject = new ArrayList<MiraTemplate>(); for (MiraTemplate miraTemplate : allTenplates) { if (miraTemplate.getTrainFeature() != null && miraTemplate.getTrainFeature().getProject().getId() == aProject.getId()) { templatesInThisProject.add(miraTemplate); } } return templatesInThisProject; } @Override @Transactional public void removeMiraTemplate(MiraTemplate aTemplate) { try { removeAutomationStatus(getAutomationStatus(aTemplate)); } catch (NoResultException e) { // do nothing - automation was not started and no status created for this template } entityManager.remove(aTemplate); } @Override @Transactional public void removeAutomationStatus(AutomationStatus aStstus) { entityManager.remove(aStstus); } private List<TypeSystemDescription> getProjectTypes(Project aProject) { // Create a new type system from scratch List<TypeSystemDescription> types = new ArrayList<TypeSystemDescription>(); for (AnnotationLayer type : annotationService.listAnnotationLayer(aProject)) { if (type.getType().equals(SPAN_TYPE) && !type.isBuiltIn()) { TypeSystemDescription tsd = new TypeSystemDescription_impl(); TypeDescription td = tsd.addType(type.getName(), "", CAS.TYPE_NAME_ANNOTATION); List<AnnotationFeature> features = annotationService.listAnnotationFeature(type); for (AnnotationFeature feature : features) { generateFeature(tsd, td, feature); } types.add(tsd); } else if (type.getType().equals(RELATION_TYPE) && !type.isBuiltIn()) { TypeSystemDescription tsd = new TypeSystemDescription_impl(); TypeDescription td = tsd.addType(type.getName(), "", CAS.TYPE_NAME_ANNOTATION); AnnotationLayer attachType = type.getAttachType(); td.addFeature("Dependent", "", attachType.getName()); td.addFeature("Governor", "", attachType.getName()); List<AnnotationFeature> features = annotationService.listAnnotationFeature(type); for (AnnotationFeature feature : features) { generateFeature(tsd, td, feature); } types.add(tsd); } else if (type.getType().equals(CHAIN_TYPE) && !type.isBuiltIn()) { TypeSystemDescription tsdchains = new TypeSystemDescription_impl(); TypeDescription tdChains = tsdchains.addType(type.getName() + "Chain", "", CAS.TYPE_NAME_ANNOTATION); tdChains.addFeature("first", "", type.getName() + "Link"); types.add(tsdchains); TypeSystemDescription tsdLink = new TypeSystemDescription_impl(); TypeDescription tdLink = tsdLink.addType(type.getName() + "Link", "", CAS.TYPE_NAME_ANNOTATION); tdLink.addFeature("next", "", type.getName() + "Link"); tdLink.addFeature("referenceType", "", CAS.TYPE_NAME_STRING); tdLink.addFeature("referenceRelation", "", CAS.TYPE_NAME_STRING); types.add(tsdLink); } } return types; } private void generateFeature(TypeSystemDescription aTSD, TypeDescription aTD, AnnotationFeature aFeature) { switch (aFeature.getMultiValueMode()) { case NONE: aTD.addFeature(aFeature.getName(), "", aFeature.getType()); break; case ARRAY: { switch (aFeature.getLinkMode()) { case WITH_ROLE: { // Link type TypeDescription linkTD = aTSD.addType(aFeature.getLinkTypeName(), "", CAS.TYPE_NAME_TOP); linkTD.addFeature(aFeature.getLinkTypeRoleFeatureName(), "", CAS.TYPE_NAME_STRING); linkTD.addFeature(aFeature.getLinkTypeTargetFeatureName(), "", aFeature.getType()); // Link feature aTD.addFeature(aFeature.getName(), "", CAS.TYPE_NAME_FS_ARRAY, linkTD.getName(), false); break; } default: throw new IllegalArgumentException("Unsupported link mode [" + aFeature.getLinkMode() + "] on feature [" + aFeature.getName() + "]"); } break; } default: throw new IllegalArgumentException("Unsupported multi-value mode [" + aFeature.getMultiValueMode() + "] on feature [" + aFeature.getName() + "]"); } } @Override @Transactional public void createAutomationStatus(AutomationStatus aStatus) { entityManager.persist(aStatus); } @Override public boolean existsAutomationStatus(MiraTemplate aTemplate) { try { entityManager .createQuery("FROM AutomationStatus WHERE template =:template", AutomationStatus.class).setParameter("template", aTemplate) .getSingleResult(); return true; } catch (NoResultException ex) { return false; } } @Override public AutomationStatus getAutomationStatus(MiraTemplate aTemplate) { return entityManager .createQuery("FROM AutomationStatus WHERE template =:template", AutomationStatus.class).setParameter("template", aTemplate) .getSingleResult(); } /** * Check if a TAB-Sep training file is in correct format before importing */ private boolean isTabSepFileFormatCorrect(File aFile) { try { LineIterator it = new LineIterator(new FileReader(aFile)); while (it.hasNext()) { String line = it.next(); if (line.trim().length() == 0) { continue; } if (line.split("\t").length != 2) { return false; } } } catch (Exception e) { return false; } return true; } /** * A Helper method to add {@link TagsetDescription} to {@link CAS} * * @param aCas the CAA. * @param aLayer the layer. * @param aTagSetName the tagset. */ public static void updateCasWithTagSet(CAS aCas, String aLayer, String aTagSetName) { Type TagsetType = CasUtil.getType(aCas, TagsetDescription.class); Feature layerFeature = TagsetType.getFeatureByBaseName("layer"); Feature nameFeature = TagsetType.getFeatureByBaseName("name"); boolean tagSetModified = false; // modify existing tagset Name for (FeatureStructure fs : CasUtil.select(aCas, TagsetType)) { String layer = fs.getStringValue(layerFeature); String tagSetName = fs.getStringValue(nameFeature); if (layer.equals(aLayer)) { // only if the tagset name is changed if (!aTagSetName.equals(tagSetName)) { fs.setStringValue(nameFeature, aTagSetName); aCas.addFsToIndexes(fs); } tagSetModified = true; break; } } if (!tagSetModified) { FeatureStructure fs = aCas.createFS(TagsetType); fs.setStringValue(layerFeature, aLayer); fs.setStringValue(nameFeature, aTagSetName); aCas.addFsToIndexes(fs); } } @Override public List<Project> listAccessibleProjects() { List<Project> allowedProject = new ArrayList<Project>(); String username = SecurityContextHolder.getContext() .getAuthentication().getName(); User user = getUser(username); List<Project> allProjects = listProjects(); List<Authority> authorities = listAuthorities(user); // if global admin, show all projects for (Authority authority : authorities) { if (authority.getAuthority().equals("ROLE_ADMIN")) { return allProjects; } } // else only projects she is admin of for (Project project : allProjects) { if (SecurityUtil.isProjectAdmin(project, this, user)) { allowedProject.add(project); } } return allowedProject; } /** * Return true if there exist at least one annotation document FINISHED for annotation for this * {@link SourceDocument} * * @param aSourceDocument * the source document. * @param aUser * the user. * @param aProject * the project. * @return if a finished document exists. */ @Override public boolean existFinishedDocument( SourceDocument aSourceDocument, User aUser, Project aProject) { List<de.tudarmstadt.ukp.clarin.webanno.model.AnnotationDocument> annotationDocuments = listAnnotationDocuments(aSourceDocument); boolean finishedAnnotationDocumentExist = false; for (de.tudarmstadt.ukp.clarin.webanno.model.AnnotationDocument annotationDocument : annotationDocuments) { if (annotationDocument.getState().equals(AnnotationDocumentState.FINISHED)) { finishedAnnotationDocumentExist = true; break; } } return finishedAnnotationDocumentExist; } private static void writeSerializedCas(JCas aJCas, File aFile) throws IOException { try (ObjectOutputStream os = new ObjectOutputStream(new FileOutputStream(aFile))) { CASCompleteSerializer serializer = serializeCASComplete(aJCas.getCasImpl()); os.writeObject(serializer); } } private static void readSerializedCas(JCas aJCas, File aFile) throws IOException { try (ObjectInputStream is = new ObjectInputStream(new FileInputStream(aFile))) { CASCompleteSerializer serializer = (CASCompleteSerializer) is.readObject(); deserializeCASComplete(serializer, aJCas.getCasImpl()); } catch (ClassNotFoundException e) { throw new IOException(e); } } }
#1062 - Remember and re-use CAS after first conversion from source format - Preparatory refactoring: rename createAnnotationContent to writeCas
webanno-api-dao/src/main/java/de/tudarmstadt/ukp/clarin/webanno/api/dao/RepositoryServiceDbData.java
#1062 - Remember and re-use CAS after first conversion from source format - Preparatory refactoring: rename createAnnotationContent to writeCas
<ide><path>ebanno-api-dao/src/main/java/de/tudarmstadt/ukp/clarin/webanno/api/dao/RepositoryServiceDbData.java <ide> public void createAnnotationDocumentContent(JCas aJcas, SourceDocument aDocument, User aUser) <ide> throws IOException <ide> { <del> createAnnotationContent(aDocument, aJcas, aUser.getUsername(), aUser); <add> writeCas(aDocument, aJcas, aUser.getUsername(), aUser); <ide> } <ide> <ide> @Override <ide> public void createCorrectionDocumentContent(JCas aJcas, SourceDocument aDocument, User aUser) <ide> throws IOException <ide> { <del> createAnnotationContent(aDocument, aJcas, WebAnnoConst.CORRECTION_USER, aUser); <add> writeCas(aDocument, aJcas, WebAnnoConst.CORRECTION_USER, aUser); <ide> } <ide> <ide> @Override <ide> public void createCurationDocumentContent(JCas aJcas, SourceDocument aDocument, User aUser) <ide> throws IOException <ide> { <del> createAnnotationContent(aDocument, aJcas, WebAnnoConst.CURATION_USER, aUser); <add> writeCas(aDocument, aJcas, WebAnnoConst.CURATION_USER, aUser); <ide> } <ide> <ide> @Override <ide> * @throws IOException <ide> */ <ide> <del> private void createAnnotationContent(SourceDocument aDocument, JCas aJcas, String aUserName, <del> User aUser) <add> private void writeCas(SourceDocument aDocument, JCas aJcas, String aUserName, User aUser) <ide> throws IOException <ide> { <ide> log.debug("Updating annotation document [" + aDocument.getName() + "] " + "with ID [" <ide> <ide> // Save current version <ide> try { <del> // Make a backup of the current version of the file before <del> // overwriting <add> // Make a backup of the current version of the file before overwriting <ide> if (currentVersion.exists()) { <ide> renameFile(currentVersion, oldVersion); <ide> } <ide> <del> // Now write the new version to "<username>.ser" or <del> // CURATION_USER.ser <add> // Now write the new version to "<username>.ser" or CURATION_USER.ser <ide> DocumentMetaData md; <ide> try { <ide> md = DocumentMetaData.get(aJcas); <ide> catch (IOException e) { <ide> // If we could not save the new version, restore the old one. <ide> FileUtils.forceDelete(currentVersion); <del> // If this is the first version, there is no old version, so do <del> // not restore anything <add> // If this is the first version, there is no old version, so do not restore anything <ide> if (oldVersion.exists()) { <ide> renameFile(oldVersion, currentVersion); <ide> } <ide> <ide> // Manage history <ide> if (backupInterval > 0) { <del> // Determine the reference point in time based on the current <del> // version <add> // Determine the reference point in time based on the current version <ide> long now = currentVersion.lastModified(); <ide> <ide> // Get all history files for the current user <ide> @Override <ide> public boolean accept(File aFile) <ide> { <del> // Check if the filename matches the pattern given <del> // above. <add> // Check if the filename matches the pattern given above. <ide> return matcher.reset(aFile.getName()).matches(); <ide> } <ide> }); <ide> boolean historyFileCreated = false; <ide> File historyFile = new File(annotationFolder, username + ".ser." + now + ".bak"); <ide> if (history.length == 0) { <del> // If there is no history yet but we should keep history, <del> // then we create a <add> // If there is no history yet but we should keep history, then we create a <ide> // history file in any case. <ide> FileUtils.copyFile(currentVersion, historyFile); <ide> historyFileCreated = true; <ide> } <ide> else { <del> // Check if the newest history file is significantly older <del> // than the current one <add> // Check if the newest history file is significantly older than the current one <ide> File latestHistory = history[history.length - 1]; <ide> if (latestHistory.lastModified() + backupInterval < now) { <ide> FileUtils.copyFile(currentVersion, historyFile); <ide> <ide> // Prune history based on number of backup <ide> if (historyFileCreated) { <del> // The new version is not in the history, so we keep that in <del> // any case. That <add> // The new version is not in the history, so we keep that in any case. That <ide> // means we need to keep one less. <ide> int toKeep = Math.max(backupKeepNumber - 1, 0); <ide> if ((backupKeepNumber > 0) && (toKeep < history.length)) {
JavaScript
mit
599aa76c68d1a4414c3905a67c97e02c587b5097
0
appdev-academy/appdev.academy-react,appdev-academy/appdev.academy-react,appdev-academy/appdev.academy-react
import React from 'react' import { inject, observer } from 'mobx-react' @inject('articlesStore') @observer export default class Show extends React.Component { componentDidMount() { let articleID = this.props.params.articleID this.props.articlesStore.fetchShow(articleID).then(response => { if (response.status == 200) { this.props.articlesStore.article = response.data } }) } render() { let article = this.props.articlesStore.article let authorName = '' if (article.author) { authorName = article.author.full_name } return ( <div className='article-container'> <h2 className='center'>{ article.title }</h2> <div>Published by { authorName } on { article.published_at }</div> <div>last update on { article.updated_at }</div> <div dangerouslySetInnerHTML={{ __html: article.html_content }} /> </div> ) } }
src/js/components/Frontend/Articles/Show.js
import React from 'react' import { inject, observer } from 'mobx-react' @inject('articlesStore') @observer export default class Show extends React.Component { componentDidMount() { let articleID = this.props.params.articleID this.props.articlesStore.fetchShow(articleID).then(response => { if (response.status == 200) { this.props.articlesStore.article = response.data } }) } render() { let article = this.props.articlesStore.article return ( <div className='article-container'> <h2 className='center'>{ article.title }</h2> <div dangerouslySetInnerHTML={{ __html: article.html_content }} /> </div> ) } }
Show author, published_at and updated_ar in frontend
src/js/components/Frontend/Articles/Show.js
Show author, published_at and updated_ar in frontend
<ide><path>rc/js/components/Frontend/Articles/Show.js <ide> <ide> render() { <ide> let article = this.props.articlesStore.article <add> let authorName = '' <add> if (article.author) { <add> authorName = article.author.full_name <add> } <ide> return ( <ide> <div className='article-container'> <ide> <h2 className='center'>{ article.title }</h2> <add> <div>Published by { authorName } on { article.published_at }</div> <add> <div>last update on { article.updated_at }</div> <ide> <div dangerouslySetInnerHTML={{ __html: article.html_content }} /> <ide> </div> <ide> )
Java
apache-2.0
7bc74e8cc925fd6220a571c9ff2e91c681e6ef1b
0
jaamsim/jaamsim,jaamsim/jaamsim,jaamsim/jaamsim,jaamsim/jaamsim
/* * JaamSim Discrete Event Simulation * Copyright (C) 2009-2013 Ausenco Engineering Canada Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. */ package com.sandwell.JavaSimulation3D; import com.jaamsim.input.Keyword; import com.jaamsim.input.OutputHandle; import com.jaamsim.input.OutputInput; import com.jaamsim.input.ValueInput; import com.jaamsim.units.DistanceUnit; import com.jaamsim.units.Unit; import com.sandwell.JavaSimulation.EntityInput; import com.sandwell.JavaSimulation.Input; import com.sandwell.JavaSimulation.StringInput; /** * The "Text" object displays written text within the 3D model universe. Both fixed and variable text can be displayed. * @author Harry King * */ public class Text extends DisplayEntity { @Keyword(description = "The fixed and variable text to be displayed. If spaces are included, enclose the text in single quotes. " + "If variable text is to be displayed using the OutputName keyword, include the appropriate Java format in the text, " + "e.g. %s, %.6f, %.6g", example = "Text-1 Format { 'Present speed = %.3f m/s' }") protected final StringInput formatText; @Keyword(description = "The output value chain that returns the variable text to be displayed. " + "If more than one output value is given, all outputs but the last should point to an entity output to query" + " for the next output. The example returns the name of the product in a tank", example = "Text-1 OutputName { Tank1 Product Name }") protected final OutputInput<Object> outputName; @Keyword(description = "The unit in which to express the output value", example = "Text-1 Unit { m/s }") protected final EntityInput<Unit> unit; @Keyword(description = "The height of the font as displayed in the view window.", example = "Text-1 TextHeight { 15 m }") protected final ValueInput textHeight; @Keyword(description = "The text to display if there is any failure while formatting" + "the dynamic text, or while reading the output's value.", example = "Text-1 FailText { '' }") private final StringInput failText; protected String renderText = ""; { formatText = new StringInput("Format", "Key Inputs", "abc"); this.addInput(formatText, true); outputName = new OutputInput<Object>(Object.class, "OutputName", "Key Inputs", null); this.addInput(outputName, true); unit = new EntityInput<Unit>( Unit.class, "Unit", "Key Inputs", null); this.addInput(unit, true); textHeight = new ValueInput("TextHeight", "Key Inputs", 0.3d); textHeight.setValidRange(0.0d, Double.POSITIVE_INFINITY); textHeight.setUnitType(DistanceUnit.class); this.addInput(textHeight, true); failText = new StringInput("FailText", "Key Inputs", ""); this.addInput(failText, true); } public Text() {} @Override public void updateForInput(Input<?> in) { super.updateForInput(in); if (in == outputName) { OutputHandle h = outputName.getOutputHandle(0.0); if (h != null) unit.setSubClass(h.getUnitType()); return; } } public String getRenderText(double simTime) { if( outputName.getValue() == null ) return formatText.getValue(); try { OutputHandle out = outputName.getOutputHandle(simTime); if( out == null ) return failText.getValue(); if (out.isNumericValue()) { double d = out.getValueAsDouble(simTime, 0.0d, unit.getValue()); return String.format(formatText.getValue(), d); } else { Object o = out.getValue(simTime, out.getReturnType()); return String.format(formatText.getValue(), o); } } catch (Throwable e) { return failText.getValue(); } } @Override public void updateGraphics(double simTime) { super.updateGraphics(simTime); // This text is cached because reflection is used to get it, so who knows how long it will take String newRenderText = getRenderText(simTime); if (newRenderText.equals(renderText)) { // Nothing important has changed return; } // The text has been updated renderText = newRenderText; } public String getCachedText() { return renderText; } public double getTextHeight() { return textHeight.getValue().doubleValue(); } }
src/main/java/com/sandwell/JavaSimulation3D/Text.java
/* * JaamSim Discrete Event Simulation * Copyright (C) 2009-2013 Ausenco Engineering Canada Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. */ package com.sandwell.JavaSimulation3D; import com.jaamsim.input.Keyword; import com.jaamsim.input.OutputHandle; import com.jaamsim.input.OutputInput; import com.jaamsim.input.ValueInput; import com.jaamsim.units.DistanceUnit; import com.jaamsim.units.Unit; import com.sandwell.JavaSimulation.EntityInput; import com.sandwell.JavaSimulation.Input; import com.sandwell.JavaSimulation.StringInput; /** * The "Text" object displays written text within the 3D model universe. Both fixed and variable text can be displayed. * @author Harry King * */ public class Text extends DisplayEntity { @Keyword(description = "The fixed and variable text to be displayed. If spaces are included, enclose the text in single quotes. " + "If variable text is to be displayed using the OutputName keyword, include the appropriate Java format in the text, " + "e.g. %s, %.6f, %.6g", example = "Text-1 Format { 'Present speed = %.3f m/s' }") protected final StringInput formatText; @Keyword(description = "The output value chain that returns the variable text to be displayed. " + "If more than one output value is given, all outputs but the last should point to an entity output to query" + " for the next output. The example returns the name of the product in a tank", example = "Text-1 OutputName { Tank1 Product Name }") protected final OutputInput<Object> outputName; @Keyword(description = "The unit in which to express the output value", example = "Text-1 Unit { m/s }") protected final EntityInput<Unit> unit; @Keyword(description = "The height of the font as displayed in the view window.", example = "Text-1 TextHeight { 15 m }") protected final ValueInput textHeight; @Keyword(description = "The text to display if there is any failure while formatting" + "the dynamic text, or while reading the output's value.", example = "Text-1 FailText { '' }") private final StringInput failText; protected String renderText = ""; { formatText = new StringInput("Format", "Key Inputs", "abc"); this.addInput(formatText, true); outputName = new OutputInput<Object>(Object.class, "OutputName", "Key Inputs", null); this.addInput(outputName, true); unit = new EntityInput<Unit>( Unit.class, "Unit", "Key Inputs", null); this.addInput(unit, true); textHeight = new ValueInput("TextHeight", "Key Inputs", 0.3d); textHeight.setValidRange(0.0d, Double.POSITIVE_INFINITY); textHeight.setUnitType(DistanceUnit.class); this.addInput(textHeight, true); failText = new StringInput("FailText", "Key Inputs", ""); this.addInput(failText, true); } public Text() {} @Override public void updateForInput(Input<?> in) { super.updateForInput(in); if (in == outputName) { Class<? extends Unit> ut = outputName.getOutputHandle(0.0).getUnitType(); unit.setSubClass(ut); return; } } public String getRenderText(double simTime) { if( outputName.getValue() == null ) return formatText.getValue(); try { OutputHandle out = outputName.getOutputHandle(simTime); if( out == null ) return failText.getValue(); if (out.isNumericValue()) { double d = out.getValueAsDouble(simTime, 0.0d, unit.getValue()); return String.format(formatText.getValue(), d); } else { Object o = out.getValue(simTime, out.getReturnType()); return String.format(formatText.getValue(), o); } } catch (Throwable e) { return failText.getValue(); } } @Override public void updateGraphics(double simTime) { super.updateGraphics(simTime); // This text is cached because reflection is used to get it, so who knows how long it will take String newRenderText = getRenderText(simTime); if (newRenderText.equals(renderText)) { // Nothing important has changed return; } // The text has been updated renderText = newRenderText; } public String getCachedText() { return renderText; } public double getTextHeight() { return textHeight.getValue().doubleValue(); } }
JS: avoid a null pointer exception when an Output produces no handle This can occur when a chained output is requested and somewhere in the chain returns a null. Signed-off-by: Harvey Harrison <[email protected]>
src/main/java/com/sandwell/JavaSimulation3D/Text.java
JS: avoid a null pointer exception when an Output produces no handle
<ide><path>rc/main/java/com/sandwell/JavaSimulation3D/Text.java <ide> super.updateForInput(in); <ide> <ide> if (in == outputName) { <del> Class<? extends Unit> ut = outputName.getOutputHandle(0.0).getUnitType(); <del> unit.setSubClass(ut); <add> OutputHandle h = outputName.getOutputHandle(0.0); <add> if (h != null) <add> unit.setSubClass(h.getUnitType()); <ide> return; <ide> } <ide> }
JavaScript
mit
8d7777577771b580c31476524fc04fafef6d0f3a
0
douglasduteil/component-publisher,douglasduteil/component-publisher
'use strict'; var cm = require('./common'); var fs = require('fs'); var sh = require('shelljs'); var path = require('path'); var gutil = require('gulp-util'); module.exports = function (opt, done) { if (arguments.length === 1) { done = opt; opt = {}; } var env = this.env; var options = cm._.extend({ cloneLocation: path.resolve(path.join(process.cwd(), cm.PUBLISH_DIR, this.env.branch)), dirSrc: path.resolve(path.join(process.cwd(), cm.BUILD_DIR ,this.env.branch)), branch: this.env.branch, remote: 'origin', message: 'Updates', tag: 'v' + cm.pkg.version //cloneLocation : path.join(process.env.HOME, 'tmp', this.name, this.target) }, opt); function e(cmd_tmpl, data) { var cmd = cm._.template(cmd_tmpl, cm._.extend(data || {}, options)); if (env.verbose) gutil.log('$', gutil.colors.cyan(cmd)); return sh.exec(cmd, { cwd: '123'}); } var origin_cwd = process.cwd(); sh.cd('../..'); var res; // Get the remote.origin.url res = e('git config --get remote.origin.url 2>&1 >/dev/null'); if (res.code > 0) throw new Error('Can\'t get no remote.origin.url !'); options.repoUrl = process.env.REPO || String(res.output).split(/[\n\r]/).shift(); if (!options.repoUrl) throw new Error('No repo link !'); // Remove tmp file if (fs.existsSync(options.cloneLocation)) { e('rm -rf <%= cloneLocation %>'); } // Clone the repo branch to a special location (clonedRepoLocation) res = e('git clone --branch=<%= branch %> --single-branch <%= repoUrl %> <%= cloneLocation %>'); if (res.code > 0) { // try again without banch options res = e('git clone <%= repoUrl %> <%= cloneLocation %>'); if (res.code > 0) throw new Error('Can\'t clone !'); } // Go to the cloneLocation sh.cd(options.cloneLocation); if (sh.pwd() !== options.cloneLocation) { throw new Error('Can\'t access to the clone location : ' + options.cloneLocation + ' from ' + sh.pwd()); } e('git clean -f -d'); e('git fetch <%= remote %>'); // Checkout a branch (create an orphan if it doesn't exist on the remote). res = e('git ls-remote --exit-code . <%= remote %>/<%= branch %>'); if (res.code > 0) { // branch doesn't exist, create an orphan res = e('git checkout --orphan <%= branch %>'); if (res.code > 0) throw new Error('Can\'t clone !'); } else { // branch exists on remote, hard reset e('git checkout <%= branch %>'); } if (!options.add) { // Empty the clone e('git rm --ignore-unmatch -rfq \'\\.[^\\.]*\' *'); } // Copie the targeted files res = e('cp -rf ' + options.dirSrc + '/* ' + options.dirSrc + '/.[a-zA-Z0-9]*' + ' ./'); if (res && res.code > 0) throw new Error(res.output); // Add and commit all the files e('git add .'); res = e('git commit -m \'<%= message %>\''); if (options.tag) { res = e('git tag <%= tag %>'); if (res.code > 0) console.log('Can\'t tag failed, continuing !'); } // Push :) if (options.push) { e('git push --tags <%= remote %> <%= branch %>'); } // Restor path... sh.cd(origin_cwd); done(); };
lib/publish.js
'use strict'; var cm = require('./common'); var fs = require('fs'); var sh = require('shelljs'); var path = require('path'); var gutil = require('gulp-util'); module.exports = function (opt, done) { if (arguments.length === 1) { done = opt; opt = {}; } var env = this.env; var options = cm._.extend({ cloneLocation: path.resolve(path.join(process.cwd(), cm.PUBLISH_DIR, this.env.branch)), dirSrc: path.resolve(path.join(process.cwd(), cm.BUILD_DIR ,this.env.branch)), branch: this.env.branch, remote: 'origin', message: 'Updates', tag: 'v' + cm.pkg.version //cloneLocation : path.join(process.env.HOME, 'tmp', this.name, this.target) }, opt); function e(cmd_tmpl, data) { var cmd = cm._.template(cmd_tmpl, cm._.extend(data || {}, options)); if (env.verbose) gutil.log('$', gutil.colors.cyan(cmd)); return sh.exec(cmd, { cwd: '123'}); } var origin_cwd = process.cwd(); sh.cd('../..'); var res; // Get the remote.origin.url res = e('git config --get remote.origin.url 2>&1 >/dev/null'); if (res.code > 0) throw new Error('Can\'t get no remote.origin.url !'); options.repoUrl = process.env.REPO || String(res.output).split(/[\n\r]/).shift(); if (!options.repoUrl) throw new Error('No repo link !'); // Remove tmp file if (fs.existsSync(options.cloneLocation)) { e('rm -rf <%= cloneLocation %>'); } // Clone the repo branch to a special location (clonedRepoLocation) res = e('git clone --branch=<%= branch %> --single-branch <%= repoUrl %> <%= cloneLocation %>'); if (res.code > 0) { // try again without banch options res = e('git clone <%= repoUrl %> <%= cloneLocation %>'); if (res.code > 0) throw new Error('Can\'t clone !'); } // Go to the cloneLocation sh.cd(options.cloneLocation); if (sh.pwd() !== options.cloneLocation) { throw new Error('Can\'t access to the clone location : ' + options.cloneLocation + ' from ' + sh.pwd()); } e('git clean -f -d'); e('git fetch <%= remote %>'); // Checkout a branch (create an orphan if it doesn't exist on the remote). res = e('git ls-remote --exit-code . <%= remote %>/<%= branch %>'); if (res.code > 0) { // branch doesn't exist, create an orphan res = e('git checkout --orphan <%= branch %>'); if (res.code > 0) throw new Error('Can\'t clone !'); } else { // branch exists on remote, hard reset e('git checkout <%= branch %>'); } if (!options.add) { // Empty the clone e('git rm --ignore-unmatch -rfq \'\\.[^\\.]*\' *'); } // Copie the targeted files res = sh.cp('-rf', options.dirSrc + '/*', './'); res = sh.cp('-rf', options.dirSrc + '/.[a-zA-Z0-9]*', './'); if (res && res.code > 0) throw new Error(res.output); // Add and commit all the files e('git add .'); res = e('git commit -m \'<%= message%>\''); if (options.tag) { res = e('git tag <%= tag %>'); if (res.code > 0) console.log('Can\'t tag failed, continuing !'); } // Push :) if (options.push) { e('git push --tags <%= remote %> <%= branch %>'); } // Restor path... sh.cd(origin_cwd); done(); };
fix(travis): use the correct message and inline the file copy
lib/publish.js
fix(travis): use the correct message and inline the file copy
<ide><path>ib/publish.js <ide> <ide> <ide> // Copie the targeted files <del> res = sh.cp('-rf', options.dirSrc + '/*', './'); <del> res = sh.cp('-rf', options.dirSrc + '/.[a-zA-Z0-9]*', './'); <add> res = e('cp -rf ' + options.dirSrc + '/* ' + options.dirSrc + '/.[a-zA-Z0-9]*' + ' ./'); <ide> if (res && res.code > 0) throw new Error(res.output); <ide> <ide> // Add and commit all the files <ide> <ide> e('git add .'); <del> res = e('git commit -m \'<%= message%>\''); <add> res = e('git commit -m \'<%= message %>\''); <ide> <ide> <ide> if (options.tag) {
Java
mit
d98e24dc58f23497b0a3cfc62bb9cc8c9bb9db70
0
sodash/open-code,sodash/open-code,sodash/open-code,sodash/open-code,sodash/open-code,sodash/open-code
package com.winterwell.datalog; import java.io.File; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.regex.Pattern; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.MoreExecutors; import com.winterwell.datalog.DataLog.KInterpolate; import com.winterwell.datalog.server.DataLogSettings; import com.winterwell.depot.Desc; import com.winterwell.es.ESType; import com.winterwell.es.client.ESConfig; import com.winterwell.es.client.ESHttpClient; import com.winterwell.es.client.ESHttpResponse; import com.winterwell.es.client.IESResponse; import com.winterwell.es.client.IndexRequestBuilder; import com.winterwell.es.client.SearchRequestBuilder; import com.winterwell.es.client.SearchResponse; import com.winterwell.es.client.admin.CreateIndexRequest; import com.winterwell.es.client.admin.CreateIndexRequest.Analyzer; import com.winterwell.es.client.admin.PutMappingRequestBuilder; import com.winterwell.maths.stats.distributions.d1.MeanVar1D; import com.winterwell.maths.timeseries.IDataStream; import com.winterwell.utils.MathUtils; import com.winterwell.utils.StrUtils; import com.winterwell.utils.TodoException; import com.winterwell.utils.Utils; import com.winterwell.utils.containers.ArrayMap; import com.winterwell.utils.containers.Pair2; import com.winterwell.utils.io.ArgsParser; import com.winterwell.utils.log.Log; import com.winterwell.utils.threads.IFuture; import com.winterwell.utils.time.Dt; import com.winterwell.utils.time.Period; import com.winterwell.utils.time.Time; public class ESStorage implements IDataLogStorage { private ESConfig settings; private ESHttpClient client; @Override public void save(Period period, Map<String, Double> tag2count, Map<String, MeanVar1D> tag2mean) { // TODO Auto-generated method stub } @Override public void saveHistory(Map<Pair2<String, Time>, Double> tag2time2count) { // TODO Auto-generated method stub } @Override public IFuture<IDataStream> getData(Pattern id, Time start, Time end) { // TODO Auto-generated method stub return null; } @Override public StatReq<IDataStream> getData(String tag, Time start, Time end, KInterpolate fn, Dt bucketSize) { // TODO Auto-generated method stub return null; } @Override public StatReq<Double> getTotal(String tag, Time start, Time end) { // TODO Auto-generated method stub return null; } @Override public Iterator getReader(String server, Time start, Time end, Pattern tagMatcher, String tag) { // TODO Auto-generated method stub return null; } @Override public IFuture<MeanRate> getMean(Time start, Time end, String tag) { // TODO Auto-generated method stub return null; } @Override public StatReq<IDataStream> getMeanData(String tag, Time start, Time end, KInterpolate fn, Dt bucketSize) { // TODO Auto-generated method stub return null; } @Override public void setHistory(Map<Pair2<String, Time>, Double> tagTime2set) { // TODO Auto-generated method stub } public IDataLogStorage init(StatConfig config) { if (settings == null) { settings = new ESConfig(); } client = new ESHttpClient(settings); String idx = indexFromDataspace(DataLog.getDataspace()); initIndex(idx); return this; } private void initIndex(String index) { if (client.admin().indices().indexExists(index)) { return; } // make it CreateIndexRequest pc = client.admin().indices().prepareCreate(index); pc.setDefaultAnalyzer(Analyzer.keyword); IESResponse res = pc.get(); res.check(); // register some standard event types?? } private String indexFromDataspace(String dataspace) { assert ! Utils.isBlank(dataspace); String idx = "datalog."+dataspace; return idx; } public ESStorage() { } /** * * * @param cnt * @param dataspace * @param event * @param period * @return */ @Override public ListenableFuture<ESHttpResponse> saveEvent(String dataspace, DataLogEvent event, Period period) { Log.d("datalog.es", "saveEvent: "+event); String index = indexFromDataspace(dataspace); String type = typeFromEventType(event.eventType); // put a time marker on it -- the end in seconds is enough long secs = period.getEnd().getTime() % 1000; String id = event.getId()+"_"+secs; IndexRequestBuilder prepIndex = client.prepareIndex(index, type, id); if (event.time==null) event.time = period.getEnd(); // set doc prepIndex.setSource(event.toJson2()); ListenableFuture<ESHttpResponse> f = prepIndex.execute(); // log stuff f.addListener(() -> { try { ESHttpResponse response = f.get(); response.check(); Log.d("datalog.es", "...saveEvent done :) event: "+event); } catch(Throwable ex) { Log.d("datalog.es", "...saveEvent FAIL :( "+ex+" from event: "+event); } }, MoreExecutors.directExecutor()); return f; } /** * * @param eventType Could come from the wild, so lets not use it directly. * Lets insist on latin chars and protect the base namespace. * @return */ private String typeFromEventType(String eventType) { return "evt."+StrUtils.toCanonical(eventType); } @Override public void registerEventType(String dataspace, String eventType) { String index = indexFromDataspace(dataspace); String type = typeFromEventType(eventType); PutMappingRequestBuilder putMapping = client.admin().indices().preparePutMapping(index, type); // Set the time property as time. The rest it can auto-figure Map msrc = new ESType() .property("time", new ESType().date()); putMapping.setSource(msrc); IESResponse res = putMapping.get(); res.check(); Map<String, Object> jout = res.getParsedJson(); } public double getEventTotal(String dataspace, Time start, Time end, DataLogEvent spec) { String index = indexFromDataspace(dataspace); SearchRequestBuilder search = client.prepareSearch(index); search.setType(typeFromEventType(spec.eventType)); // stats or just sum?? search.addAggregation("event_total", "stats", "count"); search.setSize(0); SearchResponse sr = search.get(); Map<String, Object> jobj = sr.getParsedJson(); List<Map> hits = sr.getHits(); Map aggs = sr.getAggregations(); Map stats = (Map) aggs.get("event_total"); Object sum = stats.get("sum"); return MathUtils.toNum(sum); } @Override public void saveEvents(Collection<DataLogEvent> events, Period period) { // TODO use a batch-save for speed for (DataLogEvent e : events) { saveEvent(e.dataspace, e, period); } } public void registerDataspace(String dataspace) { String index = indexFromDataspace(dataspace); initIndex(index); } }
winterwell.datalog/src/com/winterwell/datalog/ESStorage.java
package com.winterwell.datalog; import java.io.File; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.regex.Pattern; import com.google.common.util.concurrent.ListenableFuture; import com.winterwell.datalog.DataLog.KInterpolate; import com.winterwell.datalog.server.DataLogSettings; import com.winterwell.depot.Desc; import com.winterwell.es.ESType; import com.winterwell.es.client.ESConfig; import com.winterwell.es.client.ESHttpClient; import com.winterwell.es.client.ESHttpResponse; import com.winterwell.es.client.IESResponse; import com.winterwell.es.client.IndexRequestBuilder; import com.winterwell.es.client.SearchRequestBuilder; import com.winterwell.es.client.SearchResponse; import com.winterwell.es.client.admin.CreateIndexRequest; import com.winterwell.es.client.admin.CreateIndexRequest.Analyzer; import com.winterwell.es.client.admin.PutMappingRequestBuilder; import com.winterwell.maths.stats.distributions.d1.MeanVar1D; import com.winterwell.maths.timeseries.IDataStream; import com.winterwell.utils.MathUtils; import com.winterwell.utils.StrUtils; import com.winterwell.utils.TodoException; import com.winterwell.utils.Utils; import com.winterwell.utils.containers.ArrayMap; import com.winterwell.utils.containers.Pair2; import com.winterwell.utils.io.ArgsParser; import com.winterwell.utils.threads.IFuture; import com.winterwell.utils.time.Dt; import com.winterwell.utils.time.Period; import com.winterwell.utils.time.Time; public class ESStorage implements IDataLogStorage { private ESConfig settings; private ESHttpClient client; @Override public void save(Period period, Map<String, Double> tag2count, Map<String, MeanVar1D> tag2mean) { // TODO Auto-generated method stub } @Override public void saveHistory(Map<Pair2<String, Time>, Double> tag2time2count) { // TODO Auto-generated method stub } @Override public IFuture<IDataStream> getData(Pattern id, Time start, Time end) { // TODO Auto-generated method stub return null; } @Override public StatReq<IDataStream> getData(String tag, Time start, Time end, KInterpolate fn, Dt bucketSize) { // TODO Auto-generated method stub return null; } @Override public StatReq<Double> getTotal(String tag, Time start, Time end) { // TODO Auto-generated method stub return null; } @Override public Iterator getReader(String server, Time start, Time end, Pattern tagMatcher, String tag) { // TODO Auto-generated method stub return null; } @Override public IFuture<MeanRate> getMean(Time start, Time end, String tag) { // TODO Auto-generated method stub return null; } @Override public StatReq<IDataStream> getMeanData(String tag, Time start, Time end, KInterpolate fn, Dt bucketSize) { // TODO Auto-generated method stub return null; } @Override public void setHistory(Map<Pair2<String, Time>, Double> tagTime2set) { // TODO Auto-generated method stub } public IDataLogStorage init(StatConfig config) { if (settings == null) { settings = new ESConfig(); } client = new ESHttpClient(settings); String idx = indexFromDataspace(DataLog.getDataspace()); initIndex(idx); return this; } private void initIndex(String index) { if (client.admin().indices().indexExists(index)) { return; } // make it CreateIndexRequest pc = client.admin().indices().prepareCreate(index); pc.setDefaultAnalyzer(Analyzer.keyword); IESResponse res = pc.get(); res.check(); // register some standard event types?? } private String indexFromDataspace(String dataspace) { assert ! Utils.isBlank(dataspace); String idx = "datalog."+dataspace; return idx; } public ESStorage() { } /** * * * @param cnt * @param dataspace * @param event * @param period * @return */ @Override public ListenableFuture<ESHttpResponse> saveEvent(String dataspace, DataLogEvent event, Period period) { String index = indexFromDataspace(dataspace); String type = typeFromEventType(event.eventType); // put a time marker on it -- the end in seconds is enough long secs = period.getEnd().getTime() % 1000; String id = event.getId()+"_"+secs; IndexRequestBuilder prepIndex = client.prepareIndex(index, type, id); if (event.time==null) event.time = period.getEnd(); // set doc prepIndex.setSource(event.toJson2()); return prepIndex.execute(); } /** * * @param eventType Could come from the wild, so lets not use it directly. * Lets insist on latin chars and protect the base namespace. * @return */ private String typeFromEventType(String eventType) { return "evt."+StrUtils.toCanonical(eventType); } @Override public void registerEventType(String dataspace, String eventType) { String index = indexFromDataspace(dataspace); String type = typeFromEventType(eventType); PutMappingRequestBuilder putMapping = client.admin().indices().preparePutMapping(index, type); // Set the time property as time. The rest it can auto-figure Map msrc = new ESType() .property("time", new ESType().date()); putMapping.setSource(msrc); IESResponse res = putMapping.get(); res.check(); Map<String, Object> jout = res.getParsedJson(); } public double getEventTotal(String dataspace, Time start, Time end, DataLogEvent spec) { String index = indexFromDataspace(dataspace); SearchRequestBuilder search = client.prepareSearch(index); search.setType(typeFromEventType(spec.eventType)); // stats or just sum?? search.addAggregation("event_total", "stats", "count"); search.setSize(0); SearchResponse sr = search.get(); Map<String, Object> jobj = sr.getParsedJson(); List<Map> hits = sr.getHits(); Map aggs = sr.getAggregations(); Map stats = (Map) aggs.get("event_total"); Object sum = stats.get("sum"); return MathUtils.toNum(sum); } @Override public void saveEvents(Collection<DataLogEvent> events, Period period) { // TODO use a batch-save for speed for (DataLogEvent e : events) { saveEvent(e.dataspace, e, period); } } public void registerDataspace(String dataspace) { String index = indexFromDataspace(dataspace); initIndex(index); } }
extra datalog logging
winterwell.datalog/src/com/winterwell/datalog/ESStorage.java
extra datalog logging
<ide><path>interwell.datalog/src/com/winterwell/datalog/ESStorage.java <ide> import java.util.regex.Pattern; <ide> <ide> import com.google.common.util.concurrent.ListenableFuture; <add>import com.google.common.util.concurrent.MoreExecutors; <ide> import com.winterwell.datalog.DataLog.KInterpolate; <ide> import com.winterwell.datalog.server.DataLogSettings; <ide> import com.winterwell.depot.Desc; <ide> import com.winterwell.utils.containers.ArrayMap; <ide> import com.winterwell.utils.containers.Pair2; <ide> import com.winterwell.utils.io.ArgsParser; <add>import com.winterwell.utils.log.Log; <ide> import com.winterwell.utils.threads.IFuture; <ide> import com.winterwell.utils.time.Dt; <ide> import com.winterwell.utils.time.Period; <ide> */ <ide> @Override <ide> public ListenableFuture<ESHttpResponse> saveEvent(String dataspace, DataLogEvent event, Period period) { <add> Log.d("datalog.es", "saveEvent: "+event); <ide> String index = indexFromDataspace(dataspace); <ide> String type = typeFromEventType(event.eventType); <ide> // put a time marker on it -- the end in seconds is enough <ide> if (event.time==null) event.time = period.getEnd(); <ide> // set doc <ide> prepIndex.setSource(event.toJson2()); <del> return prepIndex.execute(); <add> ListenableFuture<ESHttpResponse> f = prepIndex.execute(); <add> // log stuff <add> f.addListener(() -> { <add> try { <add> ESHttpResponse response = f.get(); <add> response.check(); <add> Log.d("datalog.es", "...saveEvent done :) event: "+event); <add> } catch(Throwable ex) { <add> Log.d("datalog.es", "...saveEvent FAIL :( "+ex+" from event: "+event); <add> } <add> }, MoreExecutors.directExecutor()); <add> return f; <ide> } <ide> <ide> /**
Java
bsd-3-clause
7dd775e034fc55e3aa8690097955a8043b3b1a97
0
asamgir/openspecimen,asamgir/openspecimen,NCIP/catissue-core,asamgir/openspecimen,NCIP/catissue-core,krishagni/openspecimen,krishagni/openspecimen,NCIP/catissue-core,krishagni/openspecimen
/* * <p>Title: AppletConstants.java</p> * <p>Description: This class initializes the fields of AppletConstants.java</p> * Copyright: Copyright (c) year 2006 * Company: Washington University, School of Medicine, St. Louis. * @version 1.1 * Created on Sep 18, 2006 */ package edu.wustl.catissuecore.applet; import java.awt.Color; /** * <p> * AppletConstants interface is used to contain constants required for applet/components like * Image path,font for components etc. * </p> * @author Ashwin Gupta * @version 1.1 */ public interface AppletConstants { /** * Array grid component key used in map. */ String ARRAY_GRID_COMPONENT_KEY = "arrayGridComponentKey"; /** * selected cell color */ Color CELL_SELECTION_COLOR = Color.blue; /** * delimiter */ String delimiter = "_"; /** * key prefix */ String ARRAY_CONTENT_KEY_PREFIX = "SpecimenArrayContent:"; /** * Arrau specimen prefix */ String SPECIMEN_PREFIX = "Specimen:"; /** * Array specimen prefix */ String ARRAY_CONTENT_SPECIMEN_PREFIX = "Specimen_"; /** * Array specimen prefix */ String ARRAY_CONTENT_QUANTITY_PREFIX = "initialQuantity_value"; /** * Array specimen prefix */ String ARRAY_CONTENT_QUANTITY_ID_PREFIX = "initialQuantity_id"; /** * array attributes name */ String[] ARRAY_CONTENT_ATTRIBUTE_NAMES = {ARRAY_CONTENT_SPECIMEN_PREFIX + "label",ARRAY_CONTENT_SPECIMEN_PREFIX + "barcode",ARRAY_CONTENT_QUANTITY_PREFIX,"concentrationInMicrogramPerMicroliter","positionDimensionOne","positionDimensionTwo","id",ARRAY_CONTENT_QUANTITY_ID_PREFIX}; // ,ARRAY_CONTENT_SPECIMEN_PREFIX + "id" /** * Specify the ARRAY_CONTENT_ATTR_LABEL_INDEX field */ int ARRAY_CONTENT_ATTR_LABEL_INDEX = 0; /** * Specify the ARRAY_CONTENT_ATTR_BARCODE_INDEX field */ int ARRAY_CONTENT_ATTR_BARCODE_INDEX = 1; /** * Specify the ARRAY_CONTENT_ATTR_QUANTITY_INDEX field */ int ARRAY_CONTENT_ATTR_QUANTITY_INDEX = 2; /** * Specify the ARRAY_CONTENT_ATTR_CONC_INDEX field */ int ARRAY_CONTENT_ATTR_CONC_INDEX = 3; /** * Specify the ARRAY_CONTENT_ATTR_POS_DIM_ONE_INDEX field */ int ARRAY_CONTENT_ATTR_POS_DIM_ONE_INDEX = 4; /** * Specify the ARRAY_CONTENT_ATTR_POS_DIM_TWO_INDEX field */ int ARRAY_CONTENT_ATTR_POS_DIM_TWO_INDEX = 5; /** * Specify the ARRAY_CONTENT_ATTR_ID_INDEX field */ int ARRAY_CONTENT_ATTR_ID_INDEX = 6; /** * Specify the ARRAY_CONTENT_ATTR_QUANTITY_ID_INDEX field */ int ARRAY_CONTENT_ATTR_QUANTITY_ID_INDEX = 7; /** * Specify the SPECIMEN_ARRAY_APPLET_ACTION field */ String SPECIMEN_ARRAY_APPLET_ACTION = "/SpecimenArrayAppletAction.do"; /** * Specimen Attributes Row Nos * */ short SPECIMEN_COLLECTION_GROUP_ROW_NO = 0; short SPECIMEN_PARENT_ROW_NO = 1; short SPECIMEN_LABEL_ROW_NO = 2; short SPECIMEN_BARCODE_ROW_NO = 3; short SPECIMEN_CLASS_ROW_NO = 4; short SPECIMEN_TYPE_ROW_NO = 5; short SPECIMEN_TISSUE_SITE_ROW_NO = 6; short SPECIMEN_TISSUE_SIDE_ROW_NO = 7; short SPECIMEN_PATHOLOGICAL_STATUS_ROW_NO = 8; short SPECIMEN_QUANTITY_ROW_NO = 9; short SPECIMEN_CONCENTRATION_ROW_NO = 10; short SPECIMEN_STORAGE_LOCATION_ROW_NO = 11; short SPECIMEN_COMMENTS_ROW_NO = 12; short SPECIMEN_EVENTS_ROW_NO = 13; short SPECIMEN_EXTERNAL_IDENTIFIERS_ROW_NO = 14; short SPECIMEN_BIOHAZARDS_ROW_NO = 15; short SPECIMEN_DERIVE_ROW_NO = 16; String NO_OF_SPECIMENS = "NO_OF_SPECIMENS"; // this is key to put specimen map in session. String APPLET_ACTION_PARAM_NAME = "method"; //Constants for buttons public static final String MULTIPLE_SPECIMEN_EXTERNAL_IDENTIFIERS = "ExternalIdentifiers"; public static final String MULTIPLE_SPECIMEN_BIOHAZARDS = "BioHazards"; public static final String MULTIPLE_SPECIMEN_EVENTS = "Events"; public static final String MULTIPLE_SPECIMEN_DERIVE = "Derive"; public static final String MULTIPLE_SPECIMEN_MAP = "Map"; public static final String MULTIPLE_SPECIMEN_COMMENTS = "Add Comments"; public static final String MULTIPLE_SPECIMEN_ADD_SPECIMEN = "Add Specimen"; public static final String MULTIPLE_SPECIMEN_COPY = "Copy"; public static final String MULTIPLE_SPECIMEN_PASTE = "Paste"; public static final String MULTIPLE_SPECIMEN_MANDATORY = "*"; public static final String MULTIPLE_SPECIMEN_LOCATION_LABEL = "Containerlabel_temp"; }
WEB-INF/src/edu/wustl/catissuecore/applet/AppletConstants.java
/* * <p>Title: AppletConstants.java</p> * <p>Description: This class initializes the fields of AppletConstants.java</p> * Copyright: Copyright (c) year 2006 * Company: Washington University, School of Medicine, St. Louis. * @version 1.1 * Created on Sep 18, 2006 */ package edu.wustl.catissuecore.applet; import java.awt.Color; /** * <p> * AppletConstants interface is used to contain constants required for applet/components like * Image path,font for components etc. * </p> * @author Ashwin Gupta * @version 1.1 */ public interface AppletConstants { /** * Array grid component key used in map. */ String ARRAY_GRID_COMPONENT_KEY = "arrayGridComponentKey"; /** * selected cell color */ Color CELL_SELECTION_COLOR = Color.BLUE; /** * delimiter */ String delimiter = "_"; /** * key prefix */ String ARRAY_CONTENT_KEY_PREFIX = "SpecimenArrayContent:"; /** * Arrau specimen prefix */ String SPECIMEN_PREFIX = "Specimen:"; /** * Array specimen prefix */ String ARRAY_CONTENT_SPECIMEN_PREFIX = "Specimen_"; /** * Array specimen prefix */ String ARRAY_CONTENT_QUANTITY_PREFIX = "initialQuantity_value"; /** * Array specimen prefix */ String ARRAY_CONTENT_QUANTITY_ID_PREFIX = "initialQuantity_id"; /** * array attributes name */ String[] ARRAY_CONTENT_ATTRIBUTE_NAMES = {ARRAY_CONTENT_SPECIMEN_PREFIX + "label",ARRAY_CONTENT_SPECIMEN_PREFIX + "barcode",ARRAY_CONTENT_QUANTITY_PREFIX,"concentrationInMicrogramPerMicroliter","positionDimensionOne","positionDimensionTwo","id",ARRAY_CONTENT_QUANTITY_ID_PREFIX}; // ,ARRAY_CONTENT_SPECIMEN_PREFIX + "id" /** * Specify the ARRAY_CONTENT_ATTR_LABEL_INDEX field */ int ARRAY_CONTENT_ATTR_LABEL_INDEX = 0; /** * Specify the ARRAY_CONTENT_ATTR_BARCODE_INDEX field */ int ARRAY_CONTENT_ATTR_BARCODE_INDEX = 1; /** * Specify the ARRAY_CONTENT_ATTR_QUANTITY_INDEX field */ int ARRAY_CONTENT_ATTR_QUANTITY_INDEX = 2; /** * Specify the ARRAY_CONTENT_ATTR_CONC_INDEX field */ int ARRAY_CONTENT_ATTR_CONC_INDEX = 3; /** * Specify the ARRAY_CONTENT_ATTR_POS_DIM_ONE_INDEX field */ int ARRAY_CONTENT_ATTR_POS_DIM_ONE_INDEX = 4; /** * Specify the ARRAY_CONTENT_ATTR_POS_DIM_TWO_INDEX field */ int ARRAY_CONTENT_ATTR_POS_DIM_TWO_INDEX = 5; /** * Specify the ARRAY_CONTENT_ATTR_ID_INDEX field */ int ARRAY_CONTENT_ATTR_ID_INDEX = 6; /** * Specify the ARRAY_CONTENT_ATTR_QUANTITY_ID_INDEX field */ int ARRAY_CONTENT_ATTR_QUANTITY_ID_INDEX = 7; /** * Specify the SPECIMEN_ARRAY_APPLET_ACTION field */ String SPECIMEN_ARRAY_APPLET_ACTION = "/SpecimenArrayAppletAction.do"; /** * Specimen Attributes Row Nos * */ short SPECIMEN_COLLECTION_GROUP_ROW_NO = 0; short SPECIMEN_PARENT_ROW_NO = 1; short SPECIMEN_LABEL_ROW_NO = 2; short SPECIMEN_BARCODE_ROW_NO = 3; short SPECIMEN_CLASS_ROW_NO = 4; short SPECIMEN_TYPE_ROW_NO = 5; short SPECIMEN_TISSUE_SITE_ROW_NO = 6; short SPECIMEN_TISSUE_SIDE_ROW_NO = 7; short SPECIMEN_PATHOLOGICAL_STATUS_ROW_NO = 8; short SPECIMEN_QUANTITY_ROW_NO = 9; short SPECIMEN_CONCENTRATION_ROW_NO = 10; short SPECIMEN_STORAGE_LOCATION_ROW_NO = 11; short SPECIMEN_COMMENTS_ROW_NO = 12; short SPECIMEN_EVENTS_ROW_NO = 13; short SPECIMEN_EXTERNAL_IDENTIFIERS_ROW_NO = 14; short SPECIMEN_BIOHAZARDS_ROW_NO = 15; short SPECIMEN_DERIVE_ROW_NO = 16; String NO_OF_SPECIMENS = "NO_OF_SPECIMENS"; // this is key to put specimen map in session. String APPLET_ACTION_PARAM_NAME = "method"; }
constants added SVN-Revision: 4598
WEB-INF/src/edu/wustl/catissuecore/applet/AppletConstants.java
constants added
<ide><path>EB-INF/src/edu/wustl/catissuecore/applet/AppletConstants.java <ide> /** <ide> * selected cell color <ide> */ <del> Color CELL_SELECTION_COLOR = Color.BLUE; <add> Color CELL_SELECTION_COLOR = Color.blue; <ide> <ide> /** <ide> * delimiter <ide> // this is key to put specimen map in session. <ide> <ide> String APPLET_ACTION_PARAM_NAME = "method"; <add> <add> //Constants for buttons <add> public static final String MULTIPLE_SPECIMEN_EXTERNAL_IDENTIFIERS = "ExternalIdentifiers"; <add> public static final String MULTIPLE_SPECIMEN_BIOHAZARDS = "BioHazards"; <add> public static final String MULTIPLE_SPECIMEN_EVENTS = "Events"; <add> public static final String MULTIPLE_SPECIMEN_DERIVE = "Derive"; <add> public static final String MULTIPLE_SPECIMEN_MAP = "Map"; <add> public static final String MULTIPLE_SPECIMEN_COMMENTS = "Add Comments"; <add> public static final String MULTIPLE_SPECIMEN_ADD_SPECIMEN = "Add Specimen"; <add> public static final String MULTIPLE_SPECIMEN_COPY = "Copy"; <add> public static final String MULTIPLE_SPECIMEN_PASTE = "Paste"; <add> public static final String MULTIPLE_SPECIMEN_MANDATORY = "*"; <add> <add> public static final String MULTIPLE_SPECIMEN_LOCATION_LABEL = "Containerlabel_temp"; <ide> }
Java
apache-2.0
0cca889ae7fdbae81f8c39aea13c81b04552b275
0
ibinti/intellij-community,clumsy/intellij-community,fnouama/intellij-community,jagguli/intellij-community,izonder/intellij-community,ryano144/intellij-community,petteyg/intellij-community,da1z/intellij-community,semonte/intellij-community,tmpgit/intellij-community,apixandru/intellij-community,MichaelNedzelsky/intellij-community,salguarnieri/intellij-community,muntasirsyed/intellij-community,nicolargo/intellij-community,retomerz/intellij-community,MER-GROUP/intellij-community,muntasirsyed/intellij-community,slisson/intellij-community,fengbaicanhe/intellij-community,TangHao1987/intellij-community,vvv1559/intellij-community,ivan-fedorov/intellij-community,wreckJ/intellij-community,caot/intellij-community,fengbaicanhe/intellij-community,pwoodworth/intellij-community,fitermay/intellij-community,allotria/intellij-community,slisson/intellij-community,blademainer/intellij-community,michaelgallacher/intellij-community,MichaelNedzelsky/intellij-community,wreckJ/intellij-community,retomerz/intellij-community,ivan-fedorov/intellij-community,MER-GROUP/intellij-community,clumsy/intellij-community,semonte/intellij-community,ernestp/consulo,ibinti/intellij-community,samthor/intellij-community,idea4bsd/idea4bsd,fitermay/intellij-community,signed/intellij-community,da1z/intellij-community,vladmm/intellij-community,retomerz/intellij-community,ThiagoGarciaAlves/intellij-community,fnouama/intellij-community,robovm/robovm-studio,asedunov/intellij-community,jagguli/intellij-community,ryano144/intellij-community,retomerz/intellij-community,amith01994/intellij-community,clumsy/intellij-community,michaelgallacher/intellij-community,FHannes/intellij-community,allotria/intellij-community,adedayo/intellij-community,slisson/intellij-community,Lekanich/intellij-community,muntasirsyed/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,fnouama/intellij-community,holmes/intellij-community,FHannes/intellij-community,holmes/intellij-community,MER-GROUP/intellij-community,Lekanich/intellij-community,ahb0327/intellij-community,caot/intellij-community,supersven/intellij-community,wreckJ/intellij-community,orekyuu/intellij-community,caot/intellij-community,ThiagoGarciaAlves/intellij-community,supersven/intellij-community,da1z/intellij-community,nicolargo/intellij-community,MichaelNedzelsky/intellij-community,alphafoobar/intellij-community,fitermay/intellij-community,signed/intellij-community,amith01994/intellij-community,apixandru/intellij-community,pwoodworth/intellij-community,hurricup/intellij-community,hurricup/intellij-community,kool79/intellij-community,ivan-fedorov/intellij-community,TangHao1987/intellij-community,hurricup/intellij-community,wreckJ/intellij-community,nicolargo/intellij-community,caot/intellij-community,apixandru/intellij-community,supersven/intellij-community,wreckJ/intellij-community,dslomov/intellij-community,ol-loginov/intellij-community,apixandru/intellij-community,dslomov/intellij-community,youdonghai/intellij-community,adedayo/intellij-community,xfournet/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,ivan-fedorov/intellij-community,slisson/intellij-community,ol-loginov/intellij-community,ol-loginov/intellij-community,semonte/intellij-community,ftomassetti/intellij-community,ol-loginov/intellij-community,adedayo/intellij-community,vladmm/intellij-community,samthor/intellij-community,fnouama/intellij-community,ryano144/intellij-community,michaelgallacher/intellij-community,holmes/intellij-community,vvv1559/intellij-community,retomerz/intellij-community,idea4bsd/idea4bsd,slisson/intellij-community,gnuhub/intellij-community,supersven/intellij-community,hurricup/intellij-community,caot/intellij-community,amith01994/intellij-community,ivan-fedorov/intellij-community,blademainer/intellij-community,MichaelNedzelsky/intellij-community,blademainer/intellij-community,nicolargo/intellij-community,kool79/intellij-community,idea4bsd/idea4bsd,TangHao1987/intellij-community,da1z/intellij-community,alphafoobar/intellij-community,adedayo/intellij-community,kool79/intellij-community,salguarnieri/intellij-community,SerCeMan/intellij-community,youdonghai/intellij-community,holmes/intellij-community,michaelgallacher/intellij-community,xfournet/intellij-community,kool79/intellij-community,dslomov/intellij-community,holmes/intellij-community,ibinti/intellij-community,hurricup/intellij-community,muntasirsyed/intellij-community,diorcety/intellij-community,diorcety/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,fengbaicanhe/intellij-community,asedunov/intellij-community,clumsy/intellij-community,Distrotech/intellij-community,akosyakov/intellij-community,muntasirsyed/intellij-community,ivan-fedorov/intellij-community,caot/intellij-community,pwoodworth/intellij-community,tmpgit/intellij-community,ftomassetti/intellij-community,ahb0327/intellij-community,muntasirsyed/intellij-community,samthor/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,holmes/intellij-community,wreckJ/intellij-community,ThiagoGarciaAlves/intellij-community,fitermay/intellij-community,supersven/intellij-community,amith01994/intellij-community,adedayo/intellij-community,xfournet/intellij-community,petteyg/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,ahb0327/intellij-community,kdwink/intellij-community,lucafavatella/intellij-community,FHannes/intellij-community,samthor/intellij-community,kdwink/intellij-community,vvv1559/intellij-community,ol-loginov/intellij-community,fitermay/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,ernestp/consulo,pwoodworth/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,supersven/intellij-community,youdonghai/intellij-community,slisson/intellij-community,orekyuu/intellij-community,allotria/intellij-community,dslomov/intellij-community,lucafavatella/intellij-community,ahb0327/intellij-community,fengbaicanhe/intellij-community,SerCeMan/intellij-community,vladmm/intellij-community,asedunov/intellij-community,suncycheng/intellij-community,Distrotech/intellij-community,da1z/intellij-community,asedunov/intellij-community,kdwink/intellij-community,amith01994/intellij-community,holmes/intellij-community,semonte/intellij-community,kool79/intellij-community,kdwink/intellij-community,jagguli/intellij-community,semonte/intellij-community,gnuhub/intellij-community,nicolargo/intellij-community,signed/intellij-community,diorcety/intellij-community,tmpgit/intellij-community,supersven/intellij-community,ol-loginov/intellij-community,retomerz/intellij-community,michaelgallacher/intellij-community,samthor/intellij-community,orekyuu/intellij-community,izonder/intellij-community,suncycheng/intellij-community,MichaelNedzelsky/intellij-community,adedayo/intellij-community,ahb0327/intellij-community,lucafavatella/intellij-community,petteyg/intellij-community,nicolargo/intellij-community,semonte/intellij-community,MichaelNedzelsky/intellij-community,semonte/intellij-community,ivan-fedorov/intellij-community,alphafoobar/intellij-community,adedayo/intellij-community,tmpgit/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,blademainer/intellij-community,ernestp/consulo,orekyuu/intellij-community,FHannes/intellij-community,michaelgallacher/intellij-community,asedunov/intellij-community,semonte/intellij-community,TangHao1987/intellij-community,gnuhub/intellij-community,fitermay/intellij-community,fitermay/intellij-community,fitermay/intellij-community,apixandru/intellij-community,retomerz/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,amith01994/intellij-community,salguarnieri/intellij-community,FHannes/intellij-community,da1z/intellij-community,adedayo/intellij-community,ftomassetti/intellij-community,alphafoobar/intellij-community,allotria/intellij-community,fnouama/intellij-community,clumsy/intellij-community,amith01994/intellij-community,fitermay/intellij-community,caot/intellij-community,apixandru/intellij-community,vladmm/intellij-community,idea4bsd/idea4bsd,da1z/intellij-community,hurricup/intellij-community,SerCeMan/intellij-community,muntasirsyed/intellij-community,MER-GROUP/intellij-community,ol-loginov/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,muntasirsyed/intellij-community,blademainer/intellij-community,akosyakov/intellij-community,SerCeMan/intellij-community,ftomassetti/intellij-community,izonder/intellij-community,wreckJ/intellij-community,fengbaicanhe/intellij-community,kdwink/intellij-community,allotria/intellij-community,youdonghai/intellij-community,allotria/intellij-community,vvv1559/intellij-community,kool79/intellij-community,ThiagoGarciaAlves/intellij-community,ol-loginov/intellij-community,lucafavatella/intellij-community,clumsy/intellij-community,signed/intellij-community,asedunov/intellij-community,diorcety/intellij-community,ryano144/intellij-community,FHannes/intellij-community,Lekanich/intellij-community,akosyakov/intellij-community,kool79/intellij-community,Distrotech/intellij-community,vladmm/intellij-community,ibinti/intellij-community,fitermay/intellij-community,suncycheng/intellij-community,TangHao1987/intellij-community,hurricup/intellij-community,petteyg/intellij-community,ernestp/consulo,alphafoobar/intellij-community,asedunov/intellij-community,salguarnieri/intellij-community,xfournet/intellij-community,fengbaicanhe/intellij-community,ivan-fedorov/intellij-community,signed/intellij-community,gnuhub/intellij-community,signed/intellij-community,consulo/consulo,allotria/intellij-community,ol-loginov/intellij-community,da1z/intellij-community,vvv1559/intellij-community,ahb0327/intellij-community,pwoodworth/intellij-community,asedunov/intellij-community,orekyuu/intellij-community,jagguli/intellij-community,fnouama/intellij-community,petteyg/intellij-community,MichaelNedzelsky/intellij-community,signed/intellij-community,blademainer/intellij-community,TangHao1987/intellij-community,amith01994/intellij-community,pwoodworth/intellij-community,ahb0327/intellij-community,supersven/intellij-community,FHannes/intellij-community,jagguli/intellij-community,vladmm/intellij-community,da1z/intellij-community,kool79/intellij-community,samthor/intellij-community,FHannes/intellij-community,akosyakov/intellij-community,akosyakov/intellij-community,suncycheng/intellij-community,jagguli/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,Lekanich/intellij-community,izonder/intellij-community,apixandru/intellij-community,akosyakov/intellij-community,ftomassetti/intellij-community,slisson/intellij-community,ol-loginov/intellij-community,clumsy/intellij-community,kdwink/intellij-community,lucafavatella/intellij-community,asedunov/intellij-community,lucafavatella/intellij-community,TangHao1987/intellij-community,hurricup/intellij-community,diorcety/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,Lekanich/intellij-community,clumsy/intellij-community,retomerz/intellij-community,xfournet/intellij-community,ivan-fedorov/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,consulo/consulo,suncycheng/intellij-community,orekyuu/intellij-community,xfournet/intellij-community,SerCeMan/intellij-community,petteyg/intellij-community,samthor/intellij-community,diorcety/intellij-community,pwoodworth/intellij-community,idea4bsd/idea4bsd,nicolargo/intellij-community,ryano144/intellij-community,apixandru/intellij-community,akosyakov/intellij-community,robovm/robovm-studio,fitermay/intellij-community,MER-GROUP/intellij-community,vladmm/intellij-community,MER-GROUP/intellij-community,gnuhub/intellij-community,allotria/intellij-community,tmpgit/intellij-community,SerCeMan/intellij-community,petteyg/intellij-community,MichaelNedzelsky/intellij-community,SerCeMan/intellij-community,vvv1559/intellij-community,petteyg/intellij-community,ryano144/intellij-community,allotria/intellij-community,allotria/intellij-community,salguarnieri/intellij-community,slisson/intellij-community,kool79/intellij-community,orekyuu/intellij-community,dslomov/intellij-community,ryano144/intellij-community,youdonghai/intellij-community,blademainer/intellij-community,robovm/robovm-studio,pwoodworth/intellij-community,signed/intellij-community,orekyuu/intellij-community,consulo/consulo,vvv1559/intellij-community,diorcety/intellij-community,xfournet/intellij-community,jagguli/intellij-community,consulo/consulo,ThiagoGarciaAlves/intellij-community,Distrotech/intellij-community,Lekanich/intellij-community,MER-GROUP/intellij-community,nicolargo/intellij-community,izonder/intellij-community,idea4bsd/idea4bsd,Lekanich/intellij-community,vladmm/intellij-community,SerCeMan/intellij-community,fengbaicanhe/intellij-community,caot/intellij-community,michaelgallacher/intellij-community,samthor/intellij-community,ftomassetti/intellij-community,ibinti/intellij-community,ryano144/intellij-community,jagguli/intellij-community,nicolargo/intellij-community,michaelgallacher/intellij-community,supersven/intellij-community,holmes/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,jagguli/intellij-community,ThiagoGarciaAlves/intellij-community,holmes/intellij-community,slisson/intellij-community,izonder/intellij-community,ivan-fedorov/intellij-community,akosyakov/intellij-community,ftomassetti/intellij-community,ryano144/intellij-community,akosyakov/intellij-community,diorcety/intellij-community,diorcety/intellij-community,blademainer/intellij-community,kdwink/intellij-community,wreckJ/intellij-community,kool79/intellij-community,caot/intellij-community,samthor/intellij-community,ol-loginov/intellij-community,ThiagoGarciaAlves/intellij-community,tmpgit/intellij-community,alphafoobar/intellij-community,SerCeMan/intellij-community,nicolargo/intellij-community,Distrotech/intellij-community,gnuhub/intellij-community,orekyuu/intellij-community,ryano144/intellij-community,MER-GROUP/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,gnuhub/intellij-community,diorcety/intellij-community,akosyakov/intellij-community,asedunov/intellij-community,salguarnieri/intellij-community,retomerz/intellij-community,kdwink/intellij-community,slisson/intellij-community,ibinti/intellij-community,robovm/robovm-studio,hurricup/intellij-community,orekyuu/intellij-community,ahb0327/intellij-community,supersven/intellij-community,ernestp/consulo,Distrotech/intellij-community,SerCeMan/intellij-community,kool79/intellij-community,youdonghai/intellij-community,dslomov/intellij-community,SerCeMan/intellij-community,signed/intellij-community,izonder/intellij-community,mglukhikh/intellij-community,tmpgit/intellij-community,clumsy/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,robovm/robovm-studio,nicolargo/intellij-community,xfournet/intellij-community,salguarnieri/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,alphafoobar/intellij-community,allotria/intellij-community,adedayo/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,fengbaicanhe/intellij-community,youdonghai/intellij-community,TangHao1987/intellij-community,akosyakov/intellij-community,diorcety/intellij-community,gnuhub/intellij-community,fengbaicanhe/intellij-community,salguarnieri/intellij-community,MichaelNedzelsky/intellij-community,gnuhub/intellij-community,dslomov/intellij-community,MichaelNedzelsky/intellij-community,FHannes/intellij-community,MER-GROUP/intellij-community,ryano144/intellij-community,ThiagoGarciaAlves/intellij-community,ftomassetti/intellij-community,TangHao1987/intellij-community,adedayo/intellij-community,izonder/intellij-community,akosyakov/intellij-community,muntasirsyed/intellij-community,caot/intellij-community,lucafavatella/intellij-community,orekyuu/intellij-community,jagguli/intellij-community,orekyuu/intellij-community,pwoodworth/intellij-community,fnouama/intellij-community,izonder/intellij-community,TangHao1987/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,Lekanich/intellij-community,wreckJ/intellij-community,idea4bsd/idea4bsd,apixandru/intellij-community,amith01994/intellij-community,lucafavatella/intellij-community,dslomov/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,salguarnieri/intellij-community,izonder/intellij-community,tmpgit/intellij-community,fnouama/intellij-community,jagguli/intellij-community,retomerz/intellij-community,blademainer/intellij-community,SerCeMan/intellij-community,michaelgallacher/intellij-community,fitermay/intellij-community,signed/intellij-community,semonte/intellij-community,consulo/consulo,vladmm/intellij-community,Distrotech/intellij-community,Lekanich/intellij-community,diorcety/intellij-community,Distrotech/intellij-community,idea4bsd/idea4bsd,izonder/intellij-community,dslomov/intellij-community,robovm/robovm-studio,muntasirsyed/intellij-community,gnuhub/intellij-community,amith01994/intellij-community,apixandru/intellij-community,retomerz/intellij-community,signed/intellij-community,Distrotech/intellij-community,samthor/intellij-community,MichaelNedzelsky/intellij-community,FHannes/intellij-community,robovm/robovm-studio,salguarnieri/intellij-community,clumsy/intellij-community,holmes/intellij-community,ivan-fedorov/intellij-community,xfournet/intellij-community,clumsy/intellij-community,suncycheng/intellij-community,kdwink/intellij-community,blademainer/intellij-community,amith01994/intellij-community,robovm/robovm-studio,alphafoobar/intellij-community,petteyg/intellij-community,holmes/intellij-community,ftomassetti/intellij-community,Distrotech/intellij-community,michaelgallacher/intellij-community,signed/intellij-community,asedunov/intellij-community,holmes/intellij-community,fengbaicanhe/intellij-community,youdonghai/intellij-community,gnuhub/intellij-community,muntasirsyed/intellij-community,clumsy/intellij-community,wreckJ/intellij-community,pwoodworth/intellij-community,xfournet/intellij-community,caot/intellij-community,petteyg/intellij-community,xfournet/intellij-community,caot/intellij-community,fengbaicanhe/intellij-community,kdwink/intellij-community,Lekanich/intellij-community,TangHao1987/intellij-community,fitermay/intellij-community,slisson/intellij-community,allotria/intellij-community,alphafoobar/intellij-community,youdonghai/intellij-community,dslomov/intellij-community,alphafoobar/intellij-community,consulo/consulo,pwoodworth/intellij-community,ol-loginov/intellij-community,petteyg/intellij-community,MichaelNedzelsky/intellij-community,allotria/intellij-community,MER-GROUP/intellij-community,idea4bsd/idea4bsd,ahb0327/intellij-community,ahb0327/intellij-community,vladmm/intellij-community,vladmm/intellij-community,robovm/robovm-studio,alphafoobar/intellij-community,robovm/robovm-studio,blademainer/intellij-community,lucafavatella/intellij-community,kool79/intellij-community,hurricup/intellij-community,adedayo/intellij-community,MER-GROUP/intellij-community,ThiagoGarciaAlves/intellij-community,robovm/robovm-studio,michaelgallacher/intellij-community,salguarnieri/intellij-community,izonder/intellij-community,hurricup/intellij-community,wreckJ/intellij-community,semonte/intellij-community,amith01994/intellij-community,Distrotech/intellij-community,fnouama/intellij-community,signed/intellij-community,jagguli/intellij-community,kdwink/intellij-community,alphafoobar/intellij-community,da1z/intellij-community,da1z/intellij-community,fengbaicanhe/intellij-community,slisson/intellij-community,retomerz/intellij-community,tmpgit/intellij-community,idea4bsd/idea4bsd,pwoodworth/intellij-community,muntasirsyed/intellij-community,ivan-fedorov/intellij-community,vvv1559/intellij-community,tmpgit/intellij-community,TangHao1987/intellij-community,semonte/intellij-community,fnouama/intellij-community,nicolargo/intellij-community,ftomassetti/intellij-community,Lekanich/intellij-community,ibinti/intellij-community,Lekanich/intellij-community,michaelgallacher/intellij-community,MER-GROUP/intellij-community,hurricup/intellij-community,ryano144/intellij-community,Distrotech/intellij-community,vvv1559/intellij-community,samthor/intellij-community,ThiagoGarciaAlves/intellij-community,samthor/intellij-community,ernestp/consulo,apixandru/intellij-community,adedayo/intellij-community,dslomov/intellij-community,gnuhub/intellij-community,semonte/intellij-community,hurricup/intellij-community,tmpgit/intellij-community,supersven/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,vladmm/intellij-community,fnouama/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,retomerz/intellij-community,supersven/intellij-community,lucafavatella/intellij-community,ahb0327/intellij-community,suncycheng/intellij-community,tmpgit/intellij-community,vvv1559/intellij-community,robovm/robovm-studio,salguarnieri/intellij-community,ibinti/intellij-community,kdwink/intellij-community,petteyg/intellij-community,ftomassetti/intellij-community,ftomassetti/intellij-community,suncycheng/intellij-community,dslomov/intellij-community,idea4bsd/idea4bsd,blademainer/intellij-community,fnouama/intellij-community,wreckJ/intellij-community,ahb0327/intellij-community
plugins/cvs/cvs-plugin/src/com/intellij/cvsSupport2/actions/cvsContext/CvsDataConstants.java
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.cvsSupport2.actions.cvsContext; /** * author: lesya * * @deprecated use {@link com.intellij.cvsSupport2.actions.cvsContext.CvsDataKeys} instead */ @SuppressWarnings({"UnusedDeclaration"}) public interface CvsDataConstants { String DELETED_FILE_NAMES = CvsDataKeys.DELETED_FILE_NAMES.getName(); String FILE_TO_RESTORE = CvsDataKeys.FILE_TO_RESTORE.getName(); String CVS_LIGHT_FILE = CvsDataKeys.CVS_LIGHT_FILE.getName(); String CVS_LIGHT_FILES = CvsDataKeys.CVS_LIGHT_FILES.getName(); String CVS_ENVIRONMENT = CvsDataKeys.CVS_ENVIRONMENT.getName(); String FILES_TO_ADD = CvsDataKeys.FILES_TO_ADD.getName(); }
remove deprecated constants
plugins/cvs/cvs-plugin/src/com/intellij/cvsSupport2/actions/cvsContext/CvsDataConstants.java
remove deprecated constants
<ide><path>lugins/cvs/cvs-plugin/src/com/intellij/cvsSupport2/actions/cvsContext/CvsDataConstants.java <del>/* <del> * Copyright 2000-2009 JetBrains s.r.o. <del> * <del> * Licensed under the Apache License, Version 2.0 (the "License"); <del> * you may not use this file except in compliance with the License. <del> * You may obtain a copy of the License at <del> * <del> * http://www.apache.org/licenses/LICENSE-2.0 <del> * <del> * Unless required by applicable law or agreed to in writing, software <del> * distributed under the License is distributed on an "AS IS" BASIS, <del> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <del> * See the License for the specific language governing permissions and <del> * limitations under the License. <del> */ <del>package com.intellij.cvsSupport2.actions.cvsContext; <del> <del>/** <del> * author: lesya <del> * <del> * @deprecated use {@link com.intellij.cvsSupport2.actions.cvsContext.CvsDataKeys} instead <del> */ <del>@SuppressWarnings({"UnusedDeclaration"}) <del>public interface CvsDataConstants { <del> String DELETED_FILE_NAMES = CvsDataKeys.DELETED_FILE_NAMES.getName(); <del> String FILE_TO_RESTORE = CvsDataKeys.FILE_TO_RESTORE.getName(); <del> String CVS_LIGHT_FILE = CvsDataKeys.CVS_LIGHT_FILE.getName(); <del> String CVS_LIGHT_FILES = CvsDataKeys.CVS_LIGHT_FILES.getName(); <del> String CVS_ENVIRONMENT = CvsDataKeys.CVS_ENVIRONMENT.getName(); <del> String FILES_TO_ADD = CvsDataKeys.FILES_TO_ADD.getName(); <del>}
JavaScript
mit
9184757db90aa1d1a378d9a8f1c884a8f1dbf0d3
0
reimagined/resolve,reimagined/resolve
export const causalConsistenceWaitTime = 200 const waitEventCausalConsistency = async ( readModel, aggregateId, aggregateVersion ) => { try { do { const lastTimestamp = await readModel.metaApi.getLastTimestamp() await readModel.metaApi.rollbackTransaction(true) if (lastTimestamp == null) { await new Promise(resolve => setTimeout(resolve, causalConsistenceWaitTime) ) await readModel.metaApi.beginTransaction(true) } else { break } } while (true) if (aggregateId === true) { return } const latestEvent = await readModel.eventStore.getLatestEvent({ eventTypes: readModel.eventTypes, ...(aggregateId != null ? { aggregateIds: [aggregateId] } : {}) }) if (latestEvent == null) { return } do { await readModel.metaApi.beginTransaction(true) const isLastEventProcessed = await readModel.metaApi.checkEventProcessed( latestEvent.aggregateId, aggregateVersion != null ? aggregateVersion : latestEvent.aggregateVersion ) await readModel.metaApi.rollbackTransaction(true) if (!isLastEventProcessed) { await new Promise(resolve => setTimeout(resolve, causalConsistenceWaitTime) ) } else { break } } while (true) } finally { await readModel.metaApi.beginTransaction(true) } } export default waitEventCausalConsistency
packages/adapters/readmodel-adapters/resolve-readmodel-base/src/wait-event-causal-consistency.js
export const causalConsistenceWaitTime = 200 const waitEventCausalConsistency = async ( readModel, aggregateId, aggregateVersion ) => { try { await readModel.metaApi.rollbackTransaction(true) const latestEvent = await readModel.eventStore.getLatestEvent({ eventTypes: readModel.eventTypes, ...(aggregateId != null ? { aggregateIds: [aggregateId] } : {}) }) do { await readModel.metaApi.beginTransaction(true) const lastTimestamp = await readModel.metaApi.getLastTimestamp() await readModel.metaApi.rollbackTransaction(true) if (lastTimestamp == null) { await new Promise(resolve => setTimeout(resolve, causalConsistenceWaitTime) ) } else { break } } while (true) if (latestEvent == null) { return } do { await readModel.metaApi.beginTransaction(true) const isLastEventProcessed = await readModel.metaApi.checkEventProcessed( latestEvent.aggregateId, aggregateVersion != null ? aggregateVersion : latestEvent.aggregateVersion ) await readModel.metaApi.rollbackTransaction(true) if (!isLastEventProcessed) { await new Promise(resolve => setTimeout(resolve, causalConsistenceWaitTime) ) } else { break } } while (true) } finally { await readModel.metaApi.beginTransaction(true) } } export default waitEventCausalConsistency
Implement wait for read-model Init first invokation (#988)
packages/adapters/readmodel-adapters/resolve-readmodel-base/src/wait-event-causal-consistency.js
Implement wait for read-model Init first invokation (#988)
<ide><path>ackages/adapters/readmodel-adapters/resolve-readmodel-base/src/wait-event-causal-consistency.js <ide> aggregateVersion <ide> ) => { <ide> try { <del> await readModel.metaApi.rollbackTransaction(true) <del> <del> const latestEvent = await readModel.eventStore.getLatestEvent({ <del> eventTypes: readModel.eventTypes, <del> ...(aggregateId != null ? { aggregateIds: [aggregateId] } : {}) <del> }) <del> <ide> do { <del> await readModel.metaApi.beginTransaction(true) <ide> const lastTimestamp = await readModel.metaApi.getLastTimestamp() <ide> await readModel.metaApi.rollbackTransaction(true) <ide> <ide> await new Promise(resolve => <ide> setTimeout(resolve, causalConsistenceWaitTime) <ide> ) <add> await readModel.metaApi.beginTransaction(true) <ide> } else { <ide> break <ide> } <ide> } while (true) <add> <add> if (aggregateId === true) { <add> return <add> } <add> <add> const latestEvent = await readModel.eventStore.getLatestEvent({ <add> eventTypes: readModel.eventTypes, <add> ...(aggregateId != null ? { aggregateIds: [aggregateId] } : {}) <add> }) <ide> <ide> if (latestEvent == null) { <ide> return
Java
bsd-3-clause
0f20d809862220ccdf44f211a5190f0c66e3fe54
0
TeamCohen/MinorThird,TeamCohen/MinorThird,TeamCohen/MinorThird,TeamCohen/MinorThird,TeamCohen/MinorThird
package edu.cmu.minorthird.util; import java.io.Serializable; /** * Math utilities. * */ public class MathUtil{ /** Sign function. */ static public int sign(double x){ if(x>0) return +1; else if(x<0) return -1; else return 0; } /** Absolute value function. */ static public double abs(double x){ if(x>0) return x; else return -x; } /** Logistic function. */ static public double logistic(double x){ return 1.0/(1.0+Math.exp(-x)); } /** * Accumulate a list of numbers, then report on mean, standard deviation, and * other common statistics. */ static public class Accumulator implements Serializable{ static private final long serialVersionUID=1; private double sum=0,cov=0,count=0; private boolean isBinomial=true; /** Clear the accumulator **/ public void clear() { sum = cov = count = 0; isBinomial = true; } /** Add a new number to the accumulator. */ public void add(double d){ sum+=d; cov+=d*d; count++; if(d!=0&&d!=1) isBinomial=false; } /** Combine two accumulators. Result will be be as if every call b.add(x) had been followed by a call to this.add(x). **/ public void addAll(Accumulator b) { sum += b.sum; cov += b.cov; count += b.count; isBinomial = isBinomial && b.isBinomial; } /** The mean of accumulated values. */ public double mean(){ return sum/count; } /** The number of accumulated values. */ public double numberOfValues(){ return count; } /** The variance of the accumulated values. */ public double variance(){ double avg=mean(); return cov/count-avg*avg; } /** The population standard devation of the accumulated values. */ public double populationStdDev(){ return Math.sqrt(variance()); } /** The sample standard devation of the accumulated values. */ public double stdDev(){ return populationStdDev()/Math.sqrt((count-1)/count); } /** The sample standard error of the accumulated values. */ public double stdErr(){ return stdDev()/Math.sqrt(count); } /** The standard error of binomially distributed values. */ public double binomialStdErr(){ if(!isBinomial) throw new IllegalArgumentException( "numbers in accumulator are not binomial!"); double p=mean(); return Math.sqrt(p*(1-p)/count); } /** The Z statistic. */ public double z(double expected){ return (mean()-expected)/stdErr(); } } }
src/edu/cmu/minorthird/util/MathUtil.java
package edu.cmu.minorthird.util; import java.io.Serializable; /** * Math utilities. * */ public class MathUtil{ /** Sign function. */ static public int sign(double x){ if(x>0) return +1; else if(x<0) return -1; else return 0; } /** Absolute value function. */ static public double abs(double x){ if(x>0) return x; else return -x; } /** Logistic function. */ static public double logistic(double x){ return 1.0/(1.0+Math.exp(-x)); } /** * Accumulate a list of numbers, then report on mean, standard deviation, and * other common statistics. */ static public class Accumulator implements Serializable{ static private final long serialVersionUID=1; private double sum=0,cov=0,count=0; private boolean isBinomial=true; /** Add a new number to the accumulator. */ public void add(double d){ sum+=d; cov+=d*d; count++; if(d!=0&&d!=1) isBinomial=false; } /** The mean of accumulated values. */ public double mean(){ return sum/count; } /** The number of accumulated values. */ public double numberOfValues(){ return count; } /** The variance of the accumulated values. */ public double variance(){ double avg=mean(); return cov/count-avg*avg; } /** The population standard devation of the accumulated values. */ public double populationStdDev(){ return Math.sqrt(variance()); } /** The sample standard devation of the accumulated values. */ public double stdDev(){ return populationStdDev()/Math.sqrt((count-1)/count); } /** The sample standard error of the accumulated values. */ public double stdErr(){ return stdDev()/Math.sqrt(count); } /** The standard error of binomially distributed values. */ public double binomialStdErr(){ if(!isBinomial) throw new IllegalArgumentException( "numbers in accumulator are not binomial!"); double p=mean(); return Math.sqrt(p*(1-p)/count); } /** The Z statistic. */ public double z(double expected){ return (mean()-expected)/stdErr(); } } }
added addAll to MathUtil
src/edu/cmu/minorthird/util/MathUtil.java
added addAll to MathUtil
<ide><path>rc/edu/cmu/minorthird/util/MathUtil.java <ide> <ide> private boolean isBinomial=true; <ide> <add> /** Clear the accumulator **/ <add> public void clear() { <add> sum = cov = count = 0; <add> isBinomial = true; <add> } <add> <add> <ide> /** Add a new number to the accumulator. */ <ide> public void add(double d){ <ide> sum+=d; <ide> if(d!=0&&d!=1) <ide> isBinomial=false; <ide> } <add> <add> /** Combine two accumulators. Result will be be as if every <add> call b.add(x) had been followed by a call to this.add(x). <add> **/ <add> public void addAll(Accumulator b) { <add> sum += b.sum; <add> cov += b.cov; <add> count += b.count; <add> isBinomial = isBinomial && b.isBinomial; <add> } <ide> <ide> /** The mean of accumulated values. */ <ide> public double mean(){
Java
bsd-3-clause
78d74a05c86fe56f6f8f93259486864c145a1f25
0
CPSC319-2017w1/coast.the-terminal,CPSC319-2017w1/coast.the-terminal,CPSC319-2017w1/coast.the-terminal
package server.rest.controllers; import org.springframework.web.bind.annotation.*; import server.database.DatabaseConnection; import server.model.*; import server.rest.responses.ContractorsResponse; import server.rest.responses.Response; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.UUID; import java.util.logging.Level; import java.util.logging.Logger; @CrossOrigin(origins = {"http://localhost:1234","http://theterminal.s3-website.us-west-2.amazonaws.com"}, methods = {RequestMethod.GET, RequestMethod.POST}) @RestController public class ContractorsController extends Controller { private final static String DATE_FORMAT = "yyyy-MM-dd"; private final static String getQuery = "select * from Contractor"; private final static String insertContractorQuery = "INSERT INTO Contractor(id, firstName, surname, agencySource, status, rehire) VALUES (?,?,?,?,?,?)"; private static final String editContractorQuery = "UPDATE Contractor SET firstName=?, surname=?, agencySource=?, status=? WHERE id=?"; private final static String insertEngagementContractQuery = "INSERT INTO EngagementContract(" + "id," + "startDate," + "endDate," + "rateType," + "projectName," + "chargeType," + "dailyAllowance," + "originalDocumentation," + "terminationNum," + "contractorId," + "resourceId," + "hrPositionId," + "hrPayGradeId," + "costCenterId," + "reportingManagerUserId," + "currencyCode," + "mainSkillId," + "timeAndMaterialTerms," + "poNum," + "hourlyRate)" + "VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"; private final static String editEngagementContractorQuery = "UPDATE EngagementContract SET " + "startDate=?, " + "endDate=?, " + "rateType=?, " + "projectName=?, " + "chargeType=?, " + "dailyAllowance=?, " + "originalDocumentation=?, " + "terminationNum=?, " + "contractorId=?, " + "resourceId=?, " + "hrPositionId=?, " + "hrPayGradeId=?, " + "costCenterId=?, " + "reportingManagerUserId=?, " + "currencyCode=?, " + "mainSkillId=?, " + "timeAndMaterialTerms=?, " + "poNum=?, " + "hourlyRate=? " + "WHERE id=?"; private final static String viewAllContractorDataQuery = "SELECT * FROM Contractor c\n" + "INNER JOIN EngagementContract e ON e.contractorId=c.id\n" + "INNER JOIN HRPositionRole p ON p.id=e.hrPositionId\n" + "INNER JOIN HRPayGrade pg ON pg.id=e.hrPayGradeId\n" + "INNER JOIN CostCenter cc on cc.id=e.costCenterId\n" + "INNER JOIN Skill s on s.id=e.mainSkillId\n" + "INNER JOIN HiringManager rp on rp.userId=e.reportingManagerUserId\n" + "ORDER BY c.id"; public ArrayList<Contractor> editContractorImpl(String id, String firstName, String lastName, String agencySource, String status) throws SQLException { DatabaseConnection connection = new DatabaseConnection(dbConnectionUrl, dbUsername, dbPassword); ArrayList<Contractor> contractors = new ArrayList<Contractor>(); connection.openConnection(); if (!connection.isConnected()) { throw new SQLException("Failed to connect to database"); } PreparedStatement st = connection.getPreparedStatement(editContractorQuery); int index=1; st.setString(index++, firstName); st.setString(index++, lastName); st.setString(index++, agencySource); st.setString(index++, status); st.setString(index++, id); int success = st.executeUpdate(); if (success == 0) { throw new SQLException("Failed to update Contractor data"); } connection.commitTransaction(); connection.closeConnection(); Contractor c = new Contractor(id, firstName, lastName, agencySource, status, true); contractors.add(c); return contractors; } @RequestMapping("/contractors/view") public ContractorsResponse contractors() { DatabaseConnection connection = new DatabaseConnection(dbConnectionUrl, dbUsername, dbPassword); ArrayList<Contractor> contractors = new ArrayList<Contractor>(); try { connection.openConnection(); if (!connection.isConnected()) { return ContractorsResponse.contractorsFailure("Failed to connect to database"); } PreparedStatement st = connection.getPreparedStatement(getQuery); ResultSet set = st.executeQuery(); while(set.next()) { Contractor c = new Contractor(set.getString("id"), set.getString("firstName"), set.getString("surname"), set.getString("agencySource"), set.getString("status"), set.getBoolean("rehire")); contractors.add(c); } connection.closeConnection(); } catch (SQLException e) { Logger logger = Logger.getAnonymousLogger(); logger.log(Level.INFO, "Get Contractors Failed: " + e.getMessage()); return ContractorsResponse.contractorsFailure(e.getMessage()); } return new ContractorsResponse(contractors); } @RequestMapping("/contractors/add") public ContractorsResponse addContractor( @RequestParam("firstName") String firstName, @RequestParam("surname") String surName, @RequestParam("agencySource") String agencySource, @RequestParam("status") String status) { DatabaseConnection connection = new DatabaseConnection(dbConnectionUrl, dbUsername, dbPassword); List<Contractor> newContractor = new ArrayList<>(); try { connection.openConnection(); if (!connection.isConnected()) { return ContractorsResponse.contractorsFailure("Failed to connect to database"); } String newContractorId = UUID.randomUUID().toString(); final boolean rehire = false; PreparedStatement st = connection.getPreparedStatement(insertContractorQuery); int i =1; st.setString(i++, newContractorId); st.setString(i++, firstName); st.setString(i++, surName); st.setString(i++, agencySource); st.setString(i++, status); st.setBoolean(i++, rehire); int success = st.executeUpdate(); connection.commitTransaction(); if(success == 0){ return ContractorsResponse.contractorsFailure("Failed to add contractor. SQL Update failed"); } Contractor contractor = new Contractor(newContractorId, firstName, surName, agencySource, status, false); newContractor.add(contractor); connection.closeConnection(); } catch (SQLException e) { Logger logger = Logger.getAnonymousLogger(); logger.log(Level.INFO, "Add Contractor Failed: " + e.getMessage()); return ContractorsResponse.contractorsFailure(e.getMessage()); } return new ContractorsResponse(newContractor); } @CrossOrigin("*") @RequestMapping(value = "/contractors/edit/engagementContract", method={RequestMethod.POST}) public Response editEngagementContract( @RequestParam("id") String id, @RequestParam("startDate") String startDate, @RequestParam("endDate") String endDate, @RequestParam("rateType") String rateType, @RequestParam("projectName") String projectName, @RequestParam("chargeType") String chargeType, @RequestParam("dailyAllowance") int dailyAllowance, @RequestParam("originalDocumentation") String originalDocumentation, @RequestParam("terminationNum") int terminationNum, @RequestParam("contractorId") String contractorId, @RequestParam("resourceId") String resourceId, @RequestParam("hrPositionId") String hrPositionId, @RequestParam("hrPayGradeId") String hrPayGradeId, @RequestParam("costCenterId") String costCenterId, @RequestParam("reportingManagerId") String reportingManagerId, @RequestParam("currencyCode") String currencyCode, @RequestParam("mainSkillId") String mainSkillId, @RequestParam("timeMaterialTerms") int timeMaterialTerms, @RequestParam("poNum") int poNum, @RequestParam("hourlyrate") int hourlyRate ) { DatabaseConnection connection = new DatabaseConnection(dbConnectionUrl, dbUsername, dbPassword); try { connection.openConnection(); if (!connection.isConnected()) { return Response.createErrorResponse("Edit engagement Contract: Failed to open database"); } java.sql.Date startDateSQL = getSQLDate(startDate); java.sql.Date endDateSQL = getSQLDate(endDate); PreparedStatement st = connection.getPreparedStatement(editEngagementContractorQuery); int i = 1; st.setDate(i++, startDateSQL); st.setDate(i++, endDateSQL); st.setString(i++, rateType); st.setString(i++, projectName); st.setString(i++, chargeType); st.setInt(i++, dailyAllowance); st.setString(i++, originalDocumentation); st.setInt(i++, terminationNum); st.setString(i++, contractorId); st.setString(i++, resourceId); st.setString(i++, hrPositionId); st.setString(i++, hrPayGradeId); st.setString(i++, costCenterId); st.setString(i++, reportingManagerId); st.setString(i++, currencyCode); st.setString(i++, mainSkillId); st.setInt(i++, timeMaterialTerms); st.setInt(i++, poNum); st.setInt(i++, hourlyRate); st.setString(i++, id); int success = st.executeUpdate(); if(success == 0) { return Response.createErrorResponse("Add Engagement Contract failed. SQL Update failed"); } connection.commitTransaction(); connection.closeConnection(); } catch (SQLException e) { Logger.getAnonymousLogger().log(Level.INFO, e.getMessage()); Response.createErrorResponse("Edit engagement Contract: " + e.getMessage()); } catch (ParseException e) { Logger.getAnonymousLogger().log(Level.INFO, e.getMessage()); Response.createErrorResponse("Edit engagement Contract: " + e.getMessage()); } return new Response(); } @CrossOrigin("*") @RequestMapping(value = "/contractors/add/engagementContract", method={RequestMethod.POST}) public Response addEngagementContract(@RequestParam("startDate") String startDate, @RequestParam("endDate") String endDate, @RequestParam("rateType") String rateType, @RequestParam("projectName") String projectName, @RequestParam("chargeType") String chargeType, @RequestParam("dailyAllowance") int dailyAllowance, @RequestParam("originalDocumentation") String originalDocumentation, @RequestParam("terminationNum") int terminationNum, @RequestParam("contractorId") String contractorId, @RequestParam("resourceId") String resourceId, @RequestParam("hrPositionId") String hrPositionId, @RequestParam("hrPayGradeId") String hrPayGradeId, @RequestParam("costCenterId") String costCenterId, @RequestParam("reportingManagerId") String reportingManagerId, @RequestParam("currencyCode") String currencyCode, @RequestParam("mainSkillId") String mainSkillId, @RequestParam("timeMaterialTerms") int timeMaterialTerms, @RequestParam("poNum") int poNum, @RequestParam("hourlyrate") int hourlyRate) { DatabaseConnection connection = new DatabaseConnection(dbConnectionUrl, dbUsername, dbPassword); try { connection.openConnection(); if (!connection.isConnected()) { return Response.createErrorResponse("Add Engagement Contract: Error opening database connection"); } String engagementContractId = UUID.randomUUID().toString(); java.sql.Date startDateSQL = getSQLDate(startDate); java.sql.Date endDateSQL = getSQLDate(endDate); PreparedStatement st = connection.getPreparedStatement(insertEngagementContractQuery); int i = 1; st.setString(i++, engagementContractId); st.setDate(i++, startDateSQL); st.setDate(i++, endDateSQL); st.setString(i++, rateType); st.setString(i++, projectName); st.setString(i++, chargeType); st.setInt(i++, dailyAllowance); st.setString(i++, originalDocumentation); st.setInt(i++, terminationNum); st.setString(i++, contractorId); st.setString(i++, resourceId); st.setString(i++, hrPositionId); st.setString(i++, hrPayGradeId); st.setString(i++, costCenterId); st.setString(i++, reportingManagerId); st.setString(i++, currencyCode); st.setString(i++, mainSkillId); st.setInt(i++, timeMaterialTerms); st.setInt(i++, poNum); int success = st.executeUpdate(); if(success == 0) { return Response.createErrorResponse("Add Engagement Contract failed. SQL Update failed"); } connection.commitTransaction(); connection.closeConnection(); } catch (SQLException e) { Logger logger = Logger.getAnonymousLogger(); logger.log(Level.INFO, "Add Engagement Contract Failed: " + e.getMessage()); return Response.createErrorResponse("Add Engagement Contract failed: " + e.getMessage()); } catch (ParseException e) { Logger logger = Logger.getAnonymousLogger(); logger.log(Level.INFO, "Add Engagement Contract Failed: " + e.getMessage()); return Response.createErrorResponse("Add Engagement Contract Failed:" + e.getMessage()); } return new Response(); } private java.sql.Date getSQLDate(String date) throws ParseException { Date dateParsed = new SimpleDateFormat(DATE_FORMAT).parse(date); return new java.sql.Date(dateParsed.getTime()); } @RequestMapping("/contractors/edit") public ContractorsResponse editContractor( @RequestParam("id") String id, @RequestParam("firstName") String firstName, @RequestParam("surname") String surname, @RequestParam("agencySource") String agencySource, @RequestParam("status") String status) { ArrayList<Contractor> contractors; try { contractors = editContractorImpl(id, firstName, surname, agencySource, status); } catch (SQLException e) { Logger.getAnonymousLogger().log(Level.INFO, e.getMessage()); return ContractorsResponse.contractorsFailure(e.getMessage()); } return new ContractorsResponse(contractors); } @RequestMapping("/contractors/viewAllData") public Response viewAllContractorData() { DatabaseConnection connection = new DatabaseConnection(dbConnectionUrl, dbUsername, dbPassword); List<Contractor> allContractorData = new ArrayList<>(); try { connection.openConnection(); if (!connection.isConnected()) { return ContractorsResponse.createErrorResponse("View All Data Failed: Error opening database connection"); } PreparedStatement st = connection.getPreparedStatement(viewAllContractorDataQuery); ResultSet set = st.executeQuery(); Contractor lastContractor = null; while(set.next()) { String contractorId = set.getString("c.id"); if(lastContractor == null || !lastContractor.getId().equals(contractorId)) { //get contractor data lastContractor = new Contractor(contractorId, set.getString("c.firstName"), set.getString("c.surname"), set.getString("agencySource"), set.getString("status"), set.getBoolean("rehire")); allContractorData.add(lastContractor); } CostCenter costCenter = new CostCenter( set.getString("cc.id"), set.getString("location") ); HRPositionRole positionRole = new HRPositionRole( set.getString("p.id"), set.getString("roleName"), set.getString("p.description") ); HRPayGrade payGrade = new HRPayGrade( set.getString("pg.id"), set.getInt("startAmount"), set.getInt("endAmount"), set.getString("pg.name") ); Skill mainSkill = new Skill( set.getString("s.id"), set.getString("s.name"), set.getString("type"), set.getString("s.description") ); EngagementContract newContract = new EngagementContract( set.getString("e.id"), set.getDate("startDate"), set.getDate("endDate"), set.getString("rateType"), set.getString("projectName"), set.getString("chargeType"), set.getInt("dailyAllowance"), set.getString("originalDocumentation"), set.getInt("terminationNum"), costCenter, set.getString("currencyCode"), set.getInt("timeAndMaterialTerms"), set.getInt("poNum"), set.getInt("hourlyRate"), positionRole, payGrade, mainSkill, set.getBoolean("rehire") ); lastContractor.addEngagementContract(newContract); } connection.closeConnection(); } catch (SQLException e) { Logger logger = Logger.getAnonymousLogger(); logger.log(Level.INFO, "View all contractor data failed: " + e.getMessage()); return ContractorsResponse.createErrorResponse("View all contractor data failed: " + e.getMessage()); } return new ContractorsResponse(allContractorData); } }
code/backend/src/main/java/server/rest/controllers/ContractorsController.java
package server.rest.controllers; import org.springframework.web.bind.annotation.*; import server.database.DatabaseConnection; import server.model.*; import server.rest.responses.ContractorsResponse; import server.rest.responses.Response; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.UUID; import java.util.logging.Level; import java.util.logging.Logger; @CrossOrigin(origins = {"http://localhost:1234","http://theterminal.s3-website.us-west-2.amazonaws.com"}, methods = {RequestMethod.GET, RequestMethod.POST}) @RestController public class ContractorsController extends Controller { private final static String DATE_FORMAT = "yyyy-MM-dd"; private final static String getQuery = "select * from Contractor"; private final static String insertContractorQuery = "INSERT INTO Contractor(id, firstName, surname, agencySource, status, rehire) VALUES (?,?,?,?,?,?)"; private static final String editContractorQuery = "UPDATE Contractor SET firstName=?, surname=?, agencySource=?, status=? WHERE id=?"; private final static String insertEngagementContractQuery = "INSERT INTO EngagementContract(" + "id," + "startDate," + "endDate," + "rateType," + "projectName," + "chargeType," + "dailyAllowance," + "originalDocumentation," + "terminationNum," + "contractorId," + "resourceId," + "hrPositionId," + "hrPayGradeId," + "costCenterId," + "reportingManagerUserId," + "currencyCode," + "mainSkillId," + "timeAndMaterialTerms," + "poNum," + "hourlyRate)" + "VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"; private final static String viewAllContractorDataQuery = "SELECT * FROM Contractor c\n" + "INNER JOIN EngagementContract e ON e.contractorId=c.id\n" + "INNER JOIN HRPositionRole p ON p.id=e.hrPositionId\n" + "INNER JOIN HRPayGrade pg ON pg.id=e.hrPayGradeId\n" + "INNER JOIN CostCenter cc on cc.id=e.costCenterId\n" + "INNER JOIN Skill s on s.id=e.mainSkillId\n" + "INNER JOIN HiringManager rp on rp.userId=e.reportingManagerUserId\n" + "ORDER BY c.id"; public ArrayList<Contractor> editContractorImpl(String id, String firstName, String lastName, String agencySource, String status) throws SQLException { DatabaseConnection connection = new DatabaseConnection(dbConnectionUrl, dbUsername, dbPassword); ArrayList<Contractor> contractors = new ArrayList<Contractor>(); connection.openConnection(); if (!connection.isConnected()) { throw new SQLException("Failed to connect to database"); } PreparedStatement st = connection.getPreparedStatement(editContractorQuery); int index=1; st.setString(index++, firstName); st.setString(index++, lastName); st.setString(index++, agencySource); st.setString(index++, status); st.setString(index++, id); int success = st.executeUpdate(); if (success == 0) { throw new SQLException("Failed to update Contractor data"); } connection.commitTransaction(); connection.closeConnection(); Contractor c = new Contractor(id, firstName, lastName, agencySource, status, true); contractors.add(c); return contractors; } @RequestMapping("/contractors/view") public ContractorsResponse contractors() { DatabaseConnection connection = new DatabaseConnection(dbConnectionUrl, dbUsername, dbPassword); ArrayList<Contractor> contractors = new ArrayList<Contractor>(); try { connection.openConnection(); if (!connection.isConnected()) { return ContractorsResponse.contractorsFailure("Failed to connect to database"); } PreparedStatement st = connection.getPreparedStatement(getQuery); ResultSet set = st.executeQuery(); while(set.next()) { Contractor c = new Contractor(set.getString("id"), set.getString("firstName"), set.getString("surname"), set.getString("agencySource"), set.getString("status"), set.getBoolean("rehire")); contractors.add(c); } connection.closeConnection(); } catch (SQLException e) { Logger logger = Logger.getAnonymousLogger(); logger.log(Level.INFO, "Get Contractors Failed: " + e.getMessage()); return ContractorsResponse.contractorsFailure(e.getMessage()); } return new ContractorsResponse(contractors); } @RequestMapping("/contractors/add") public ContractorsResponse addContractor( @RequestParam("firstName") String firstName, @RequestParam("surname") String surName, @RequestParam("agencySource") String agencySource, @RequestParam("status") String status) { DatabaseConnection connection = new DatabaseConnection(dbConnectionUrl, dbUsername, dbPassword); List<Contractor> newContractor = new ArrayList<>(); try { connection.openConnection(); if (!connection.isConnected()) { return ContractorsResponse.contractorsFailure("Failed to connect to database"); } String newContractorId = UUID.randomUUID().toString(); final boolean rehire = false; PreparedStatement st = connection.getPreparedStatement(insertContractorQuery); int i =1; st.setString(i++, newContractorId); st.setString(i++, firstName); st.setString(i++, surName); st.setString(i++, agencySource); st.setString(i++, status); st.setBoolean(i++, rehire); int success = st.executeUpdate(); connection.commitTransaction(); if(success == 0){ return ContractorsResponse.contractorsFailure("Failed to add contractor. SQL Update failed"); } Contractor contractor = new Contractor(newContractorId, firstName, surName, agencySource, status, false); newContractor.add(contractor); connection.closeConnection(); } catch (SQLException e) { Logger logger = Logger.getAnonymousLogger(); logger.log(Level.INFO, "Add Contractor Failed: " + e.getMessage()); return ContractorsResponse.contractorsFailure(e.getMessage()); } return new ContractorsResponse(newContractor); } @CrossOrigin("*") @RequestMapping(value = "/contractors/add/engagementContract", method={RequestMethod.POST}) public Response addEngagementContract(@RequestParam("startDate") String startDate, @RequestParam("endDate") String endDate, @RequestParam("rateType") String rateType, @RequestParam("projectName") String projectName, @RequestParam("chargeType") String chargeType, @RequestParam("dailyAllowance") int dailyAllowance, @RequestParam("originalDocumentation") String originalDocumentation, @RequestParam("terminationNum") int terminationNum, @RequestParam("contractorId") String contractorId, @RequestParam("resourceId") String resourceId, @RequestParam("hrPositionId") String hrPositionId, @RequestParam("hrPayGradeId") String hrPayGradeId, @RequestParam("costCenterId") String costCenterId, @RequestParam("reportingManagerId") String reportingManagerId, @RequestParam("currencyCode") String currencyCode, @RequestParam("mainSkillId") String mainSkillId, @RequestParam("timeMaterialTerms") int timeMaterialTerms, @RequestParam("poNum") int poNum, @RequestParam("hourlyrate") int hourlyRate) { DatabaseConnection connection = new DatabaseConnection(dbConnectionUrl, dbUsername, dbPassword); try { connection.openConnection(); if (!connection.isConnected()) { return Response.createErrorResponse("Add Engagement Contract: Error opening database connection"); } String engagementContractId = UUID.randomUUID().toString(); java.sql.Date startDateSQL = getSQLDate(startDate); java.sql.Date endDateSQL = getSQLDate(endDate); PreparedStatement st = connection.getPreparedStatement(insertEngagementContractQuery); int i = 1; st.setString(i++, engagementContractId); st.setDate(i++, startDateSQL); st.setDate(i++, endDateSQL); st.setString(i++, rateType); st.setString(i++, projectName); st.setString(i++, chargeType); st.setInt(i++, dailyAllowance); st.setString(i++, originalDocumentation); st.setInt(i++, terminationNum); st.setString(i++, contractorId); st.setString(i++, resourceId); st.setString(i++, hrPositionId); st.setString(i++, hrPayGradeId); st.setString(i++, costCenterId); st.setString(i++, reportingManagerId); st.setString(i++, currencyCode); st.setString(i++, mainSkillId); st.setInt(i++, timeMaterialTerms); st.setInt(i++, poNum); int success = st.executeUpdate(); connection.commitTransaction(); if(success == 0) { return Response.createErrorResponse("Add Engagement Contract failed. SQL Update failed"); } connection.closeConnection(); } catch (SQLException e) { Logger logger = Logger.getAnonymousLogger(); logger.log(Level.INFO, "Add Engagement Contract Failed: " + e.getMessage()); return Response.createErrorResponse("Add Engagement Contract failed: " + e.getMessage()); } catch (ParseException e) { Logger logger = Logger.getAnonymousLogger(); logger.log(Level.INFO, "Add Engagement Contract Failed: " + e.getMessage()); return Response.createErrorResponse("Add Engagement Contract Failed:" + e.getMessage()); } return new Response(); } private java.sql.Date getSQLDate(String date) throws ParseException { Date dateParsed = new SimpleDateFormat(DATE_FORMAT).parse(date); return new java.sql.Date(dateParsed.getTime()); } @RequestMapping("/contractors/edit") public ContractorsResponse editContractor( @RequestParam("id") String id, @RequestParam("firstName") String firstName, @RequestParam("surname") String surname, @RequestParam("agencySource") String agencySource, @RequestParam("status") String status) { ArrayList<Contractor> contractors; try { contractors = editContractorImpl(id, firstName, surname, agencySource, status); } catch (SQLException e) { Logger.getAnonymousLogger().log(Level.INFO, e.getMessage()); return ContractorsResponse.contractorsFailure(e.getMessage()); } return new ContractorsResponse(contractors); } @RequestMapping("/contractors/viewAllData") public Response viewAllContractorData() { DatabaseConnection connection = new DatabaseConnection(dbConnectionUrl, dbUsername, dbPassword); List<Contractor> allContractorData = new ArrayList<>(); try { connection.openConnection(); if (!connection.isConnected()) { return ContractorsResponse.createErrorResponse("View All Data Failed: Error opening database connection"); } PreparedStatement st = connection.getPreparedStatement(viewAllContractorDataQuery); ResultSet set = st.executeQuery(); Contractor lastContractor = null; while(set.next()) { String contractorId = set.getString("c.id"); if(lastContractor == null || !lastContractor.getId().equals(contractorId)) { //get contractor data lastContractor = new Contractor(contractorId, set.getString("c.firstName"), set.getString("c.surname"), set.getString("agencySource"), set.getString("status"), set.getBoolean("rehire")); allContractorData.add(lastContractor); } CostCenter costCenter = new CostCenter( set.getString("cc.id"), set.getString("location") ); HRPositionRole positionRole = new HRPositionRole( set.getString("p.id"), set.getString("roleName"), set.getString("p.description") ); HRPayGrade payGrade = new HRPayGrade( set.getString("pg.id"), set.getInt("startAmount"), set.getInt("endAmount"), set.getString("pg.name") ); Skill mainSkill = new Skill( set.getString("s.id"), set.getString("s.name"), set.getString("type"), set.getString("s.description") ); EngagementContract newContract = new EngagementContract( set.getString("e.id"), set.getDate("startDate"), set.getDate("endDate"), set.getString("rateType"), set.getString("projectName"), set.getString("chargeType"), set.getInt("dailyAllowance"), set.getString("originalDocumentation"), set.getInt("terminationNum"), costCenter, set.getString("currencyCode"), set.getInt("timeAndMaterialTerms"), set.getInt("poNum"), set.getInt("hourlyRate"), positionRole, payGrade, mainSkill, set.getBoolean("rehire") ); lastContractor.addEngagementContract(newContract); } connection.closeConnection(); } catch (SQLException e) { Logger logger = Logger.getAnonymousLogger(); logger.log(Level.INFO, "View all contractor data failed: " + e.getMessage()); return ContractorsResponse.createErrorResponse("View all contractor data failed: " + e.getMessage()); } return new ContractorsResponse(allContractorData); } }
Add link for edit engagement contract
code/backend/src/main/java/server/rest/controllers/ContractorsController.java
Add link for edit engagement contract
<ide><path>ode/backend/src/main/java/server/rest/controllers/ContractorsController.java <ide> "hourlyRate)" + <ide> "VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"; <ide> <add> private final static String editEngagementContractorQuery = "UPDATE EngagementContract SET " + <add> "startDate=?, " + <add> "endDate=?, " + <add> "rateType=?, " + <add> "projectName=?, " + <add> "chargeType=?, " + <add> "dailyAllowance=?, " + <add> "originalDocumentation=?, " + <add> "terminationNum=?, " + <add> "contractorId=?, " + <add> "resourceId=?, " + <add> "hrPositionId=?, " + <add> "hrPayGradeId=?, " + <add> "costCenterId=?, " + <add> "reportingManagerUserId=?, " + <add> "currencyCode=?, " + <add> "mainSkillId=?, " + <add> "timeAndMaterialTerms=?, " + <add> "poNum=?, " + <add> "hourlyRate=? " + <add> "WHERE id=?"; <add> <ide> private final static String viewAllContractorDataQuery = "SELECT * FROM Contractor c\n" + <ide> "INNER JOIN EngagementContract e ON e.contractorId=c.id\n" + <ide> "INNER JOIN HRPositionRole p ON p.id=e.hrPositionId\n" + <ide> } <ide> <ide> return new ContractorsResponse(newContractor); <add> } <add> <add> @CrossOrigin("*") <add> @RequestMapping(value = "/contractors/edit/engagementContract", method={RequestMethod.POST}) <add> public Response editEngagementContract( <add> @RequestParam("id") String id, <add> @RequestParam("startDate") String startDate, <add> @RequestParam("endDate") String endDate, <add> @RequestParam("rateType") String rateType, <add> @RequestParam("projectName") String projectName, <add> @RequestParam("chargeType") String chargeType, <add> @RequestParam("dailyAllowance") int dailyAllowance, <add> @RequestParam("originalDocumentation") String originalDocumentation, <add> @RequestParam("terminationNum") int terminationNum, <add> @RequestParam("contractorId") String contractorId, <add> @RequestParam("resourceId") String resourceId, <add> @RequestParam("hrPositionId") String hrPositionId, <add> @RequestParam("hrPayGradeId") String hrPayGradeId, <add> @RequestParam("costCenterId") String costCenterId, <add> @RequestParam("reportingManagerId") String reportingManagerId, <add> @RequestParam("currencyCode") String currencyCode, <add> @RequestParam("mainSkillId") String mainSkillId, <add> @RequestParam("timeMaterialTerms") int timeMaterialTerms, <add> @RequestParam("poNum") int poNum, <add> @RequestParam("hourlyrate") int hourlyRate <add> ) { <add> DatabaseConnection connection = new DatabaseConnection(dbConnectionUrl, dbUsername, dbPassword); <add> try { <add> connection.openConnection(); <add> if (!connection.isConnected()) { <add> return Response.createErrorResponse("Edit engagement Contract: Failed to open database"); <add> } <add> java.sql.Date startDateSQL = getSQLDate(startDate); <add> java.sql.Date endDateSQL = getSQLDate(endDate); <add> <add> PreparedStatement st = connection.getPreparedStatement(editEngagementContractorQuery); <add> int i = 1; <add> st.setDate(i++, startDateSQL); <add> st.setDate(i++, endDateSQL); <add> st.setString(i++, rateType); <add> st.setString(i++, projectName); <add> st.setString(i++, chargeType); <add> st.setInt(i++, dailyAllowance); <add> st.setString(i++, originalDocumentation); <add> st.setInt(i++, terminationNum); <add> st.setString(i++, contractorId); <add> st.setString(i++, resourceId); <add> st.setString(i++, hrPositionId); <add> st.setString(i++, hrPayGradeId); <add> st.setString(i++, costCenterId); <add> st.setString(i++, reportingManagerId); <add> st.setString(i++, currencyCode); <add> st.setString(i++, mainSkillId); <add> st.setInt(i++, timeMaterialTerms); <add> st.setInt(i++, poNum); <add> st.setInt(i++, hourlyRate); <add> st.setString(i++, id); <add> int success = st.executeUpdate(); <add> if(success == 0) { <add> return Response.createErrorResponse("Add Engagement Contract failed. SQL Update failed"); <add> } <add> connection.commitTransaction(); <add> connection.closeConnection(); <add> } catch (SQLException e) { <add> Logger.getAnonymousLogger().log(Level.INFO, e.getMessage()); <add> Response.createErrorResponse("Edit engagement Contract: " + e.getMessage()); <add> } catch (ParseException e) { <add> Logger.getAnonymousLogger().log(Level.INFO, e.getMessage()); <add> Response.createErrorResponse("Edit engagement Contract: " + e.getMessage()); <add> } <add> return new Response(); <ide> } <ide> <ide> @CrossOrigin("*") <ide> st.setInt(i++, poNum); <ide> <ide> int success = st.executeUpdate(); <del> connection.commitTransaction(); <ide> if(success == 0) { <ide> return Response.createErrorResponse("Add Engagement Contract failed. SQL Update failed"); <ide> } <add> connection.commitTransaction(); <ide> connection.closeConnection(); <ide> } catch (SQLException e) { <ide>
Java
apache-2.0
9ea794548c48d3d2ce26719afab477d353f5e7a1
0
Comcast/cmb,KrithikaGanesh/cmb,Comcast/cmb,Comcast/cmb,KrithikaGanesh/cmb,KrithikaGanesh/cmb
/** * Copyright 2012 Comcast Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.comcast.cmb.common.controller; import java.io.IOException; import javax.servlet.AsyncContext; import javax.servlet.ServletOutputStream; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import com.comcast.cmb.common.model.CMBPolicy; import com.comcast.cmb.common.model.User; import com.comcast.cmb.common.util.CMBProperties; import com.comcast.cmb.common.util.ValueAccumulator.AccumulatorName; /** * Abstract class representing all actions that can be performed by calling the API * @author aseem, bwolf, vvenkatraman, baosen */ public abstract class Action { protected final String actionName; public Action(String actionName) { this.actionName = actionName; } public String getName() { return actionName; } /** * Perform servlet action for cqs or cns * @param user user object for authenticated user * @param asyncContext async context for http request and response objects * @throws Exception * @return true if this action was performed, false otherwise. It is largely dependent * on the sub-classes to override this return value with what makes sense. */ public abstract boolean doAction(User user, AsyncContext asyncContext) throws Exception; /** * Check if an action on resource is allowed * @param policy contains a set of statement for user's permission of actions on resource * @param user authenticated user to perform the action * @param action a string for action */ public abstract boolean isActionAllowed(User user, HttpServletRequest request, String service, CMBPolicy policy) throws Exception; /** * Sub-classes should override this as necessary * @return true if this action requries auth */ public boolean isAuthRequired() { return true; } /** * Write response back * @param content * @param res * @throws IOException */ public static void writeResponse(String content, HttpServletResponse response) throws IOException { long ts1 = System.currentTimeMillis(); if (CMBProperties.getInstance().useCmbIOBuffers()) { byte buffer[] = content.getBytes(); int blockSize = Math.min(Math.max(buffer.length/4096, 1)*4096, 16*4096); response.setBufferSize(blockSize); response.setContentLength(buffer.length); ServletOutputStream out = response.getOutputStream(); int numBlocks = buffer.length/blockSize; for (int i=0; i<numBlocks;i++) { out.write(buffer, i*blockSize, blockSize); } int remainingBytes = buffer.length-(numBlocks*blockSize); if (remainingBytes > 0) { out.write(buffer, numBlocks*blockSize, remainingBytes); } out.flush(); } else { response.setContentLength(content.length()); response.getWriter().println(content); response.getWriter().flush(); } long ts2 = System.currentTimeMillis(); CMBControllerServlet.valueAccumulator.addToCounter(AccumulatorName.IOTime, ts2-ts1); } }
src/com/comcast/cmb/common/controller/Action.java
/** * Copyright 2012 Comcast Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.comcast.cmb.common.controller; import java.io.IOException; import javax.servlet.AsyncContext; import javax.servlet.ServletOutputStream; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import com.comcast.cmb.common.model.CMBPolicy; import com.comcast.cmb.common.model.User; /** * Abstract class representing all actions that can be performed by calling the API * @author aseem, bwolf, vvenkatraman, baosen */ public abstract class Action { protected final String actionName; public Action(String actionName) { this.actionName = actionName; } public String getName() { return actionName; } /** * Perform servlet action for cqs or cns * @param user user object for authenticated user * @param asyncContext async context for http request and response objects * @throws Exception * @return true if this action was performed, false otherwise. It is largely dependent * on the sub-classes to override this return value with what makes sense. */ public abstract boolean doAction(User user, AsyncContext asyncContext) throws Exception; /** * Check if an action on resource is allowed * @param policy contains a set of statement for user's permission of actions on resource * @param user authenticated user to perform the action * @param action a string for action */ public abstract boolean isActionAllowed(User user, HttpServletRequest request, String service, CMBPolicy policy) throws Exception; /** * Sub-classes should override this as necessary * @return true if this action requries auth */ public boolean isAuthRequired() { return true; } /** * Write response back * @param content * @param res * @throws IOException */ public static void writeResponse(String content, HttpServletResponse response) throws IOException { byte buffer[] = content.getBytes(); int blockSize = Math.min(Math.max(buffer.length/4096, 1)*4096, 16*4096); response.setBufferSize(blockSize); response.setContentLength(buffer.length); ServletOutputStream out = response.getOutputStream(); int numBlocks = buffer.length/blockSize; for (int i=0; i<numBlocks;i++) { out.write(buffer, i*blockSize, blockSize); } int remainingBytes = buffer.length-(numBlocks*blockSize); if (remainingBytes > 0) { out.write(buffer, numBlocks*blockSize, remainingBytes); } out.flush(); //response.setContentLength(content.length()); //response.getWriter().println(content); //response.getWriter().flush(); } }
io time logging
src/com/comcast/cmb/common/controller/Action.java
io time logging
<ide><path>rc/com/comcast/cmb/common/controller/Action.java <ide> <ide> import com.comcast.cmb.common.model.CMBPolicy; <ide> import com.comcast.cmb.common.model.User; <add>import com.comcast.cmb.common.util.CMBProperties; <add>import com.comcast.cmb.common.util.ValueAccumulator.AccumulatorName; <ide> /** <ide> * Abstract class representing all actions that can be performed by calling the API <ide> * @author aseem, bwolf, vvenkatraman, baosen <ide> */ <ide> public static void writeResponse(String content, HttpServletResponse response) throws IOException { <ide> <del> byte buffer[] = content.getBytes(); <del> int blockSize = Math.min(Math.max(buffer.length/4096, 1)*4096, 16*4096); <del> response.setBufferSize(blockSize); <del> response.setContentLength(buffer.length); <del> ServletOutputStream out = response.getOutputStream(); <del> int numBlocks = buffer.length/blockSize; <del> for (int i=0; i<numBlocks;i++) { <del> out.write(buffer, i*blockSize, blockSize); <add> long ts1 = System.currentTimeMillis(); <add> <add> if (CMBProperties.getInstance().useCmbIOBuffers()) { <add> <add> byte buffer[] = content.getBytes(); <add> int blockSize = Math.min(Math.max(buffer.length/4096, 1)*4096, 16*4096); <add> response.setBufferSize(blockSize); <add> response.setContentLength(buffer.length); <add> ServletOutputStream out = response.getOutputStream(); <add> int numBlocks = buffer.length/blockSize; <add> for (int i=0; i<numBlocks;i++) { <add> out.write(buffer, i*blockSize, blockSize); <add> } <add> int remainingBytes = buffer.length-(numBlocks*blockSize); <add> if (remainingBytes > 0) { <add> out.write(buffer, numBlocks*blockSize, remainingBytes); <add> } <add> out.flush(); <add> <add> } else { <add> <add> response.setContentLength(content.length()); <add> response.getWriter().println(content); <add> response.getWriter().flush(); <ide> } <del> int remainingBytes = buffer.length-(numBlocks*blockSize); <del> if (remainingBytes > 0) { <del> out.write(buffer, numBlocks*blockSize, remainingBytes); <del> } <del> out.flush(); <ide> <del> //response.setContentLength(content.length()); <del> //response.getWriter().println(content); <del> //response.getWriter().flush(); <add> long ts2 = System.currentTimeMillis(); <add> CMBControllerServlet.valueAccumulator.addToCounter(AccumulatorName.IOTime, ts2-ts1); <ide> } <ide> }
Java
mit
b97642b7cf56f3b98837fbe060b8e2a055432345
0
Skywalker-11/spongycastle,bcgit/bc-java,open-keychain/spongycastle,bcgit/bc-java,bcgit/bc-java,isghe/bc-java,isghe/bc-java,Skywalker-11/spongycastle,open-keychain/spongycastle,isghe/bc-java,open-keychain/spongycastle,Skywalker-11/spongycastle
package org.bouncycastle.math; import java.math.BigInteger; import java.security.SecureRandom; import org.bouncycastle.crypto.Digest; import org.bouncycastle.util.Arrays; import org.bouncycastle.util.BigIntegers; public abstract class Primes { private static final BigInteger ONE = BigInteger.valueOf(1); private static final BigInteger TWO = BigInteger.valueOf(2); private static final BigInteger THREE = BigInteger.valueOf(3); /** * Used to return the output from the * {@linkplain Primes#enhancedMRProbablePrimeTest(BigInteger, SecureRandom, int) Enhanced * Miller-Rabin Probabilistic Primality Test} */ public static class MROutput { private static MROutput probablyPrime() { return new MROutput(false, null); } private static MROutput provablyCompositeWithFactor(BigInteger factor) { return new MROutput(true, factor); } private static MROutput provablyCompositeNotPrimePower() { return new MROutput(true, null); } private boolean provablyComposite; private BigInteger factor; private MROutput(boolean provablyComposite, BigInteger factor) { this.provablyComposite = provablyComposite; this.factor = factor; } public BigInteger getFactor() { return factor; } public boolean isProvablyComposite() { return provablyComposite; } public boolean isNotPrimePower() { return provablyComposite && factor == null; } } /** * Used to return the output from the * {@linkplain Primes#generateSTRandomPrime(Digest, int, byte[]) Shawe-Taylor Random_Prime * Routine} */ public static class STOutput { private BigInteger prime; private byte[] primeSeed; private int primeGenCounter; private STOutput(BigInteger prime, byte[] primeSeed, int primeGenCounter) { this.prime = prime; this.primeSeed = primeSeed; this.primeGenCounter = primeGenCounter; } public BigInteger getPrime() { return prime; } public byte[] getPrimeSeed() { return primeSeed; } public int getPrimeGenCounter() { return primeGenCounter; } } /** * FIPS 186-4 C.6 Shawe-Taylor Random_Prime Routine * * Construct a provable prime number using a hash function. * * @param hash * the {@link Digest} instance to use (as "Hash()"). Cannot be null. * @param length * the length (in bits) of the prime to be generated. Must be at least 2. * @param inputSeed * the seed to be used for the generation of the requested prime. Cannot be null or * empty. * @return an {@link STOutput} instance containing the requested prime. */ public static STOutput generateSTRandomPrime(Digest hash, int length, byte[] inputSeed) { if (hash == null) { throw new IllegalArgumentException("'hash' cannot be null"); } if (length < 2) { throw new IllegalArgumentException("'length' must be >= 2"); } if (inputSeed == null || inputSeed.length == 0) { throw new IllegalArgumentException("'inputSeed' cannot be null or empty"); } return implSTRandomPrime(hash, length, Arrays.clone(inputSeed)); } /** * FIPS 186-4 C.3.2 Enhanced Miller-Rabin Probabilistic Primality Test * * Run several iterations of the Miller-Rabin algorithm with randomly-chosen bases. This is an * alternative to {@link #isMRProbablePrime(BigInteger, SecureRandom, int)} that provides more * information about a composite candidate, which may be useful when generating or validating * RSA moduli. * * @param candidate * the {@link BigInteger} instance to test for primality. * @param random * the source of randomness to use to choose bases. * @param iterations * the number of randomly-chosen bases to perform the test for. * @return an {@link MROutput} instance that can be further queried for details. */ public static MROutput enhancedMRProbablePrimeTest(BigInteger candidate, SecureRandom random, int iterations) { checkCandidate(candidate, "candidate"); if (random == null) { throw new IllegalArgumentException("'random' cannot be null"); } if (iterations < 1) { throw new IllegalArgumentException("'iterations' must be > 0"); } if (candidate.bitLength() == 2) { return MROutput.probablyPrime(); } if (!candidate.testBit(0)) { return MROutput.provablyCompositeWithFactor(TWO); } BigInteger w = candidate; BigInteger wSubOne = candidate.subtract(ONE); BigInteger wSubTwo = candidate.subtract(TWO); int a = wSubOne.getLowestSetBit(); BigInteger m = wSubOne.shiftRight(a); for (int i = 0; i < iterations; ++i) { BigInteger b = BigIntegers.createRandomInRange(TWO, wSubTwo, random); BigInteger g = b.gcd(w); if (g.compareTo(ONE) > 0) { return MROutput.provablyCompositeWithFactor(g); } BigInteger z = b.modPow(m, w); if (z.equals(ONE) || z.equals(wSubOne)) { continue; } boolean primeToBase = false; BigInteger x = z; for (int j = 1; j < a; ++j) { z = z.modPow(TWO, w); if (z.equals(wSubOne)) { primeToBase = true; break; } if (z.equals(ONE)) { break; } x = z; } if (!primeToBase) { if (!z.equals(ONE)) { x = z; z = z.modPow(TWO, w); if (!z.equals(ONE)) { x = z; } } g = x.subtract(ONE).gcd(w); if (g.compareTo(ONE) > 0) { return MROutput.provablyCompositeWithFactor(g); } return MROutput.provablyCompositeNotPrimePower(); } } return MROutput.probablyPrime(); } /** * A fast check for small divisors, up to some implementation-specific limit. * * @param candidate * the {@link BigInteger} instance to test for division by small factors. * * @return <code>true</code> if the candidate is found to have any small factors, * <code>false</code> otherwise. */ public static boolean hasAnySmallFactors(BigInteger candidate) { checkCandidate(candidate, "candidate"); return implHasAnySmallFactors(candidate); } /** * FIPS 186-4 C.3.1 Miller-Rabin Probabilistic Primality Test * * Run several iterations of the Miller-Rabin algorithm with randomly-chosen bases. * * @param candidate * the {@link BigInteger} instance to test for primality. * @param random * the source of randomness to use to choose bases. * @param iterations * the number of randomly-chosen bases to perform the test for. * @return <code>false</code> if any witness to compositeness is found amongst the chosen bases * (so <code>candidate</code> is definitely NOT prime), or else <code>true</code> * (indicating primality with some probability dependent on the number of iterations * that were performed). */ public static boolean isMRProbablePrime(BigInteger candidate, SecureRandom random, int iterations) { checkCandidate(candidate, "candidate"); if (random == null) { throw new IllegalArgumentException("'random' cannot be null"); } if (iterations < 1) { throw new IllegalArgumentException("'iterations' must be > 0"); } if (candidate.bitLength() == 2) { return true; } if (!candidate.testBit(0)) { return false; } BigInteger w = candidate; BigInteger wSubOne = candidate.subtract(ONE); BigInteger wSubTwo = candidate.subtract(TWO); int a = wSubOne.getLowestSetBit(); BigInteger m = wSubOne.shiftRight(a); for (int i = 0; i < iterations; ++i) { BigInteger b = BigIntegers.createRandomInRange(TWO, wSubTwo, random); if (!implMRProbablePrimeToBase(w, wSubOne, m, a, b)) { return false; } } return true; } /** * FIPS 186-4 C.3.1 Miller-Rabin Probabilistic Primality Test (to a fixed base). * * Run a single iteration of the Miller-Rabin algorithm against the specified base. * * @param candidate * the {@link BigInteger} instance to test for primality. * @param base * the source of randomness to use to choose bases. * @return <code>false</code> if the specified base is a witness to compositeness (so * <code>candidate</code> is definitely NOT prime), or else <code>true</code>. */ public static boolean isMRProbablePrimeToBase(BigInteger candidate, BigInteger base) { checkCandidate(candidate, "candidate"); checkCandidate(base, "base"); if (base.compareTo(candidate.subtract(ONE)) >= 0) { throw new IllegalArgumentException("'base' must be < ('candidate' - 1)"); } if (candidate.bitLength() == 2) { return true; } BigInteger w = candidate; BigInteger wSubOne = candidate.subtract(ONE); int a = wSubOne.getLowestSetBit(); BigInteger m = wSubOne.shiftRight(a); return implMRProbablePrimeToBase(w, wSubOne, m, a, base); } private static void checkCandidate(BigInteger n, String name) { if (n == null || n.signum() < 1 || n.bitLength() < 2) { throw new IllegalArgumentException("'" + name + "' must be non-null and >= 2"); } } private static boolean implHasAnySmallFactors(BigInteger x) { /* * Bundle trial divisors into ~32-bit moduli then use fast tests on the ~32-bit remainders. */ int m = 2 * 3 * 5 * 7 * 11 * 13 * 17 * 19 * 23; int r = x.mod(BigInteger.valueOf(m)).intValue(); if ((r & 1) != 0 && (r % 3) != 0 && (r % 5) != 0 && (r % 7) != 0 && (r % 11) != 0 && (r % 13) != 0 && (r % 17) != 0 && (r % 19) != 0 && (r % 23) != 0) { m = 29 * 31 * 37 * 41 * 43; r = x.mod(BigInteger.valueOf(m)).intValue(); if ((r % 29) != 0 && (r % 31) != 0 && (r % 37) != 0 && (r % 41) != 0 && (r % 43) != 0) { m = 47 * 53 * 59 * 61 * 67; r = x.mod(BigInteger.valueOf(m)).intValue(); if ((r % 47) != 0 && (r % 53) != 0 && (r % 59) != 0 && (r % 61) != 0 && (r % 67) != 0) { m = 71 * 73 * 79 * 83; r = x.mod(BigInteger.valueOf(m)).intValue(); if ((r % 71) != 0 && (r % 73) != 0 && (r % 79) != 0 && (r % 83) != 0) { m = 89 * 97 * 101 * 103; r = x.mod(BigInteger.valueOf(m)).intValue(); if ((r % 89) != 0 && (r % 97) != 0 && (r % 101) != 0 && (r % 103) != 0) { m = 107 * 109 * 113 * 127; r = x.mod(BigInteger.valueOf(m)).intValue(); if ((r % 107) != 0 && (r % 109) != 0 && (r % 113) != 0 && (r % 127) != 0) { return false; } } } } } } return true; } private static boolean implMRProbablePrimeToBase(BigInteger w, BigInteger wSubOne, BigInteger m, int a, BigInteger b) { BigInteger z = b.modPow(m, w); if (z.equals(ONE) || z.equals(wSubOne)) { return true; } boolean result = false; for (int j = 1; j < a; ++j) { z = z.modPow(TWO, w); if (z.equals(wSubOne)) { result = true; break; } if (z.equals(ONE)) { return false; } } return result; } private static STOutput implSTRandomPrime(Digest d, int length, byte[] primeSeed) { int dLen = d.getDigestSize(); if (length < 33) { int primeGenCounter = 0; byte[] c0 = new byte[dLen]; byte[] c1 = new byte[dLen]; for (;;) { hash(d, primeSeed, c0, 0); inc(primeSeed, 1); hash(d, primeSeed, c1, 0); inc(primeSeed, 1); int c = extract32(c0) ^ extract32(c1); c &= (-1 >>> (32 - length)); c |= (1 << (length - 1)) | 1; ++primeGenCounter; long c64 = c & 0xFFFFFFFFL; if (isPrime32(c64)) { return new STOutput(BigInteger.valueOf(c64), primeSeed, primeGenCounter); } if (primeGenCounter > (4 * length)) { throw new IllegalStateException("Too many iterations in Shawe-Taylor Random_Prime Routine"); } } } STOutput rec = implSTRandomPrime(d, (length + 3)/2, primeSeed); BigInteger c0 = rec.getPrime(); primeSeed = rec.getPrimeSeed(); int primeGenCounter = rec.getPrimeGenCounter(); int outlen = 8 * dLen; int iterations = (length - 1)/outlen; int oldCounter = primeGenCounter; BigInteger x = hashGen(d, primeSeed, iterations + 1); x = x.mod(ONE.shiftLeft(length - 1)).setBit(length - 1); BigInteger c0x2 = c0.shiftLeft(1); BigInteger tx2 = x.subtract(ONE).divide(c0x2).add(ONE).shiftLeft(1); int dt = 0; BigInteger c = tx2.multiply(c0).add(ONE); /* * TODO Since the candidate primes are generated by constant steps ('c0x2'), * sieving could be used here in place of the 'mightBePrime' approach. */ for (;;) { if (c.bitLength() > length) { tx2 = ONE.shiftLeft(length - 1).subtract(ONE).divide(c0x2).add(ONE).shiftLeft(1); c = tx2.multiply(c0).add(ONE); } ++primeGenCounter; /* * This is an optimization of the original algorithm, using trial division to screen out * many non-primes quickly. * * NOTE: 'primeSeed' is still incremented as if we performed the full check! */ if (!implHasAnySmallFactors(c)) { BigInteger a = hashGen(d, primeSeed, iterations + 1); a = a.mod(c.subtract(THREE)).add(TWO); tx2 = tx2.add(BigInteger.valueOf(dt)); dt = 0; BigInteger z = a.modPow(tx2, c); if (c.gcd(z.subtract(ONE)).equals(ONE) && z.modPow(c0, c).equals(ONE)) { return new STOutput(c, primeSeed, primeGenCounter); } } else { inc(primeSeed, iterations + 1); } if (primeGenCounter >= ((4 * length) + oldCounter)) { throw new IllegalStateException("Too many iterations in Shawe-Taylor Random_Prime Routine"); } dt += 2; c = c.add(c0x2); } } private static int extract32(byte[] bs) { int result = 0; int count = Math.min(4, bs.length); for (int i = 0; i < count; ++i) { int b = bs[bs.length - (i + 1)] & 0xFF; result |= (b << (8 * i)); } return result; } private static void hash(Digest d, byte[] input, byte[] output, int outPos) { d.update(input, 0, input.length); d.doFinal(output, outPos); } private static BigInteger hashGen(Digest d, byte[] seed, int count) { int dLen = d.getDigestSize(); int pos = count * dLen; byte[] buf = new byte[pos]; for (int i = 0; i < count; ++i) { pos -= dLen; hash(d, seed, buf, pos); inc(seed, 1); } return new BigInteger(1, buf); } private static void inc(byte[] seed, int c) { int pos = seed.length; while (c > 0 && --pos >= 0) { c += (seed[pos] & 0xFF); seed[pos] = (byte)c; c >>>= 8; } } private static boolean isPrime32(long x) { if (x >>> 32 != 0L) { throw new IllegalArgumentException("Size limit exceeded"); } /* * Use wheel factorization with 2, 3, 5 to select trial divisors. */ if (x <= 5L) { return x == 2L || x == 3L || x == 5L; } if ((x & 1L) == 0L || (x % 3L) == 0L || (x % 5L) == 0L) { return false; } long[] ds = new long[]{ 1L, 7L, 11L, 13L, 17L, 19L, 23L, 29L }; long base = 0L; for (int pos = 1; ; pos = 0) { /* * Trial division by wheel-selected divisors */ while (pos < ds.length) { long d = base + ds[pos]; if (x % d == 0L) { return x < 30L; } ++pos; } base += 30L; if (base * base >= x) { return true; } } } }
core/src/main/java/org/bouncycastle/math/Primes.java
package org.bouncycastle.math; import java.math.BigInteger; import java.security.SecureRandom; import org.bouncycastle.crypto.Digest; import org.bouncycastle.util.Arrays; import org.bouncycastle.util.BigIntegers; public abstract class Primes { private static final BigInteger ONE = BigInteger.valueOf(1); private static final BigInteger TWO = BigInteger.valueOf(2); private static final BigInteger THREE = BigInteger.valueOf(3); /** * Used to return the output from the * {@linkplain Primes#enhancedMRProbablePrimeTest(BigInteger, SecureRandom, int) Enhanced * Miller-Rabin Probabilistic Primality Test} */ public static class MROutput { private static MROutput probablyPrime() { return new MROutput(false, null); } private static MROutput provablyCompositeWithFactor(BigInteger factor) { return new MROutput(true, factor); } private static MROutput provablyCompositeNotPrimePower() { return new MROutput(true, null); } private boolean provablyComposite; private BigInteger factor; private MROutput(boolean provablyComposite, BigInteger factor) { this.provablyComposite = provablyComposite; this.factor = factor; } public BigInteger getFactor() { return factor; } public boolean isProvablyComposite() { return provablyComposite; } public boolean isNotPrimePower() { return provablyComposite && factor == null; } } /** * Used to return the output from the * {@linkplain Primes#generateSTRandomPrime(Digest, int, byte[]) Shawe-Taylor Random_Prime * Routine} */ public static class STOutput { private BigInteger prime; private byte[] primeSeed; private int primeGenCounter; private STOutput(BigInteger prime, byte[] primeSeed, int primeGenCounter) { this.prime = prime; this.primeSeed = primeSeed; this.primeGenCounter = primeGenCounter; } public BigInteger getPrime() { return prime; } public byte[] getPrimeSeed() { return primeSeed; } public int getPrimeGenCounter() { return primeGenCounter; } } /** * FIPS 186-4 C.6 Shawe-Taylor Random_Prime Routine * * Construct a provable prime number using a hash function. * * @param hash * the {@link Digest} instance to use (as "Hash()"). Cannot be null. * @param length * the length (in bits) of the prime to be generated. Must be at least 2. * @param inputSeed * the seed to be used for the generation of the requested prime. Cannot be null or * empty. * @return an {@link STOutput} instance containing the requested prime. */ public static STOutput generateSTRandomPrime(Digest hash, int length, byte[] inputSeed) { if (hash == null) { throw new IllegalArgumentException("'hash' cannot be null"); } if (length < 2) { throw new IllegalArgumentException("'length' must be >= 2"); } if (inputSeed == null || inputSeed.length == 0) { throw new IllegalArgumentException("'inputSeed' cannot be null or empty"); } return implSTRandomPrime(hash, length, Arrays.clone(inputSeed)); } /** * FIPS 186-4 C.3.2 Enhanced Miller-Rabin Probabilistic Primality Test * * Run several iterations of the Miller-Rabin algorithm with randomly-chosen bases. This is an * alternative to {@link #isMRProbablePrime(BigInteger, SecureRandom, int)} that provides more * information about a composite candidate, which may be useful when generating or validating * RSA moduli. * * @param candidate * the {@link BigInteger} instance to test for primality. * @param random * the source of randomness to use to choose bases. * @param iterations * the number of randomly-chosen bases to perform the test for. * @return an {@link MROutput} instance that can be further queried for details. */ public static MROutput enhancedMRProbablePrimeTest(BigInteger candidate, SecureRandom random, int iterations) { checkCandidate(candidate, "candidate"); if (random == null) { throw new IllegalArgumentException("'random' cannot be null"); } if (iterations < 1) { throw new IllegalArgumentException("'iterations' must be > 0"); } if (candidate.bitLength() == 2) { return MROutput.probablyPrime(); } if (!candidate.testBit(0)) { return MROutput.provablyCompositeWithFactor(TWO); } BigInteger w = candidate; BigInteger wSubOne = candidate.subtract(ONE); BigInteger wSubTwo = candidate.subtract(TWO); int a = wSubOne.getLowestSetBit(); BigInteger m = wSubOne.shiftRight(a); for (int i = 0; i < iterations; ++i) { BigInteger b = BigIntegers.createRandomInRange(TWO, wSubTwo, random); BigInteger g = b.gcd(w); if (g.compareTo(ONE) > 0) { return MROutput.provablyCompositeWithFactor(g); } BigInteger z = b.modPow(m, w); if (z.equals(ONE) || z.equals(wSubOne)) { continue; } boolean primeToBase = false; BigInteger x = z; for (int j = 1; j < a; ++j) { z = z.modPow(TWO, w); if (z.equals(wSubOne)) { primeToBase = true; break; } if (z.equals(ONE)) { break; } x = z; } if (!primeToBase) { if (!z.equals(ONE)) { x = z; z = z.modPow(TWO, w); if (!z.equals(ONE)) { x = z; } } g = x.subtract(ONE).gcd(w); if (g.compareTo(ONE) > 0) { return MROutput.provablyCompositeWithFactor(g); } return MROutput.provablyCompositeNotPrimePower(); } } return MROutput.probablyPrime(); } /** * A fast check for small divisors, up to some implementation-specific limit. * * @param candidate * the {@link BigInteger} instance to test for division by small factors. * * @return <code>true</code> if the candidate is found to have any small factors, * <code>false</code> otherwise. */ public static boolean hasAnySmallFactors(BigInteger candidate) { checkCandidate(candidate, "candidate"); return implHasAnySmallFactors(candidate); } /** * FIPS 186-4 C.3.1 Miller-Rabin Probabilistic Primality Test * * Run several iterations of the Miller-Rabin algorithm with randomly-chosen bases. * * @param candidate * the {@link BigInteger} instance to test for primality. * @param random * the source of randomness to use to choose bases. * @param iterations * the number of randomly-chosen bases to perform the test for. * @return <code>false</code> if any witness to compositeness is found amongst the chosen bases * (so <code>candidate</code> is definitely NOT prime), or else <code>true</code> * (indicating primality with some probability dependent on the number of iterations * that were performed). */ public static boolean isMRProbablePrime(BigInteger candidate, SecureRandom random, int iterations) { checkCandidate(candidate, "candidate"); if (random == null) { throw new IllegalArgumentException("'random' cannot be null"); } if (iterations < 1) { throw new IllegalArgumentException("'iterations' must be > 0"); } if (candidate.bitLength() == 2) { return true; } if (!candidate.testBit(0)) { return false; } BigInteger w = candidate; BigInteger wSubOne = candidate.subtract(ONE); BigInteger wSubTwo = candidate.subtract(TWO); int a = wSubOne.getLowestSetBit(); BigInteger m = wSubOne.shiftRight(a); for (int i = 0; i < iterations; ++i) { BigInteger b = BigIntegers.createRandomInRange(TWO, wSubTwo, random); if (!implMRProbablePrimeToBase(w, wSubOne, m, a, b)) { return false; } } return true; } /** * FIPS 186-4 C.3.1 Miller-Rabin Probabilistic Primality Test (to a fixed base). * * Run a single iteration of the Miller-Rabin algorithm against the specified base. * * @param candidate * the {@link BigInteger} instance to test for primality. * @param base * the source of randomness to use to choose bases. * @return <code>false</code> if the specified base is a witness to compositeness (so * <code>candidate</code> is definitely NOT prime), or else <code>true</code>. */ public static boolean isMRProbablePrimeToBase(BigInteger candidate, BigInteger base) { checkCandidate(candidate, "candidate"); checkCandidate(base, "base"); if (base.compareTo(candidate.subtract(ONE)) >= 0) { throw new IllegalArgumentException("'base' must be < ('candidate' - 1)"); } if (candidate.bitLength() == 2) { return true; } BigInteger w = candidate; BigInteger wSubOne = candidate.subtract(ONE); int a = wSubOne.getLowestSetBit(); BigInteger m = wSubOne.shiftRight(a); return implMRProbablePrimeToBase(w, wSubOne, m, a, base); } private static void checkCandidate(BigInteger n, String name) { if (n == null || n.signum() < 1 || n.bitLength() < 2) { throw new IllegalArgumentException("'" + name + "' must be non-null and >= 2"); } } private static boolean implHasAnySmallFactors(BigInteger x) { /* * Bundle trial divisors into ~32-bit moduli then use fast tests on the ~32-bit remainders. */ int m = 2 * 3 * 5 * 7 * 11 * 13 * 17 * 19 * 23; int r = x.mod(BigInteger.valueOf(m)).intValue(); if ((r & 1) != 0 && (r % 3) != 0 && (r % 5) != 0 && (r % 7) != 0 && (r % 11) != 0 && (r % 13) != 0 && (r % 17) != 0 && (r % 19) != 0 && (r % 23) != 0) { m = 29 * 31 * 37 * 41 * 43; r = x.mod(BigInteger.valueOf(m)).intValue(); if ((r % 29) != 0 && (r % 31) != 0 && (r % 37) != 0 && (r % 41) != 0 && (r % 43) != 0) { m = 47 * 53 * 59 * 61 * 67; r = x.mod(BigInteger.valueOf(m)).intValue(); if ((r % 47) != 0 && (r % 53) != 0 && (r % 59) != 0 && (r % 61) != 0 && (r % 67) != 0) { m = 71 * 73 * 79 * 83; r = x.mod(BigInteger.valueOf(m)).intValue(); if ((r % 71) != 0 && (r % 73) != 0 && (r % 79) != 0 && (r % 83) != 0) { m = 89 * 97 * 101 * 103; r = x.mod(BigInteger.valueOf(m)).intValue(); if ((r % 89) != 0 && (r % 97) != 0 && (r % 101) != 0 && (r % 103) != 0) { m = 107 * 109 * 113 * 127; r = x.mod(BigInteger.valueOf(m)).intValue(); if ((r % 107) != 0 && (r % 109) != 0 && (r % 113) != 0 && (r % 127) != 0) { return false; } } } } } } return true; } private static boolean implMRProbablePrimeToBase(BigInteger w, BigInteger wSubOne, BigInteger m, int a, BigInteger b) { BigInteger z = b.modPow(m, w); if (z.equals(ONE) || z.equals(wSubOne)) { return true; } boolean result = false; for (int j = 1; j < a; ++j) { z = z.modPow(TWO, w); if (z.equals(wSubOne)) { result = true; break; } if (z.equals(ONE)) { return false; } } return result; } private static STOutput implSTRandomPrime(Digest d, int length, byte[] primeSeed) { int dLen = d.getDigestSize(); if (length < 33) { int primeGenCounter = 0; byte[] c0 = new byte[dLen]; byte[] c1 = new byte[dLen]; for (;;) { hash(d, primeSeed, c0, 0); inc(primeSeed, 1); hash(d, primeSeed, c1, 0); inc(primeSeed, 1); int c = extract32(c0) ^ extract32(c1); c &= (-1 >>> (32 - length)); c |= (1 << (length - 1)) | 1; ++primeGenCounter; long c64 = c & 0xFFFFFFFFL; if (isPrime32(c64)) { return new STOutput(BigInteger.valueOf(c64), primeSeed, primeGenCounter); } if (primeGenCounter > (4 * length)) { throw new IllegalStateException("Too many iterations in Shawe-Taylor Random_Prime Routine"); } } } STOutput rec = implSTRandomPrime(d, (length + 3)/2, primeSeed); BigInteger c0 = rec.getPrime(); primeSeed = rec.getPrimeSeed(); int primeGenCounter = rec.getPrimeGenCounter(); int outlen = 8 * dLen; int iterations = (length - 1)/outlen; int oldCounter = primeGenCounter; BigInteger x = hashGen(d, primeSeed, iterations + 1); x = x.mod(ONE.shiftLeft(length - 1)).setBit(length - 1); BigInteger c0x2 = c0.shiftLeft(1); BigInteger tx2 = x.subtract(ONE).divide(c0x2).add(ONE).shiftLeft(1); int dt = 0; BigInteger c = tx2.multiply(c0).add(ONE); /* * TODO Since the candidate primes are generated by constant steps ('c0x2'), * sieving could be used here in place of the 'mightBePrime' approach. */ for (;;) { if (c.bitLength() > length) { tx2 = ONE.shiftLeft(length - 1).subtract(ONE).divide(c0x2).add(ONE).shiftLeft(1); c = tx2.multiply(c0).add(ONE); } ++primeGenCounter; /* * This is an optimization of the original algorithm, using trial division to screen out * many non-primes quickly. * * NOTE: 'primeSeed' is still incremented as if we performed the full check! */ if (!implHasAnySmallFactors(c)) { BigInteger a = hashGen(d, primeSeed, iterations + 1); a = a.mod(c.subtract(THREE)).add(TWO); tx2 = tx2.add(BigInteger.valueOf(dt)); dt = 0; BigInteger z = a.modPow(tx2, c); if (c.gcd(z.subtract(ONE)).equals(ONE) && z.modPow(c0, c).equals(ONE)) { return new STOutput(c, primeSeed, primeGenCounter); } } else { inc(primeSeed, iterations + 1); } if (primeGenCounter >= ((4 * length) + oldCounter)) { throw new IllegalStateException("Too many iterations in Shawe-Taylor Random_Prime Routine"); } dt += 2; c = c.add(c0x2); } } private static int extract32(byte[] bs) { int result = 0; int count = Math.min(4, bs.length); for (int i = 0; i < count; ++i) { int b = bs[bs.length - (i + 1)] & 0xFF; result |= (b << (8 * i)); } return result; } private static void hash(Digest d, byte[] input, byte[] output, int outPos) { d.update(input, 0, input.length); d.doFinal(output, outPos); } private static BigInteger hashGen(Digest d, byte[] seed, int count) { int dLen = d.getDigestSize(); int pos = count * dLen; byte[] buf = new byte[pos]; for (int i = 0; i < count; ++i) { pos -= dLen; hash(d, seed, buf, pos); inc(seed, 1); } return new BigInteger(1, buf); } private static void inc(byte[] seed, int c) { int pos = seed.length; while (c > 0 && --pos >= 0) { c += (seed[pos] & 0xFF); seed[pos] = (byte)c; c >>>= 8; } } private static boolean isPrime32(long x) { if (x >>> 32 != 0L) { throw new IllegalArgumentException("Size limit exceeded"); } /* * Use wheel factorization with 2, 3, 5 to select trial divisors. */ if (x <= 5L) { return x == 2L || x == 3L || x == 5L; } if ((x & 1L) == 0L || (x % 3L) == 0L || (x % 5L) == 0L) { return false; } long[] ds = new long[]{ 1L, 7L, 11L, 13L, 17L, 19L, 23L, 29L }; long base = 0L; for (int pos = 1; ; pos = 0) { /* * Trial division by wheel-selected divisors */ while (pos < ds.length) { long d = base + ds[pos]; if (x % d == 0L) { return x < 30L; } ++pos; } base += 30L; if (base * base >= x) { return true; } } } }
Reformat
core/src/main/java/org/bouncycastle/math/Primes.java
Reformat
<ide><path>ore/src/main/java/org/bouncycastle/math/Primes.java <ide> <ide> /** <ide> * FIPS 186-4 C.6 Shawe-Taylor Random_Prime Routine <del> * <add> * <ide> * Construct a provable prime number using a hash function. <del> * <add> * <ide> * @param hash <ide> * the {@link Digest} instance to use (as "Hash()"). Cannot be null. <ide> * @param length <ide> <ide> /** <ide> * FIPS 186-4 C.3.2 Enhanced Miller-Rabin Probabilistic Primality Test <del> * <add> * <ide> * Run several iterations of the Miller-Rabin algorithm with randomly-chosen bases. This is an <ide> * alternative to {@link #isMRProbablePrime(BigInteger, SecureRandom, int)} that provides more <ide> * information about a composite candidate, which may be useful when generating or validating <ide> * RSA moduli. <del> * <add> * <ide> * @param candidate <ide> * the {@link BigInteger} instance to test for primality. <ide> * @param random <ide> x = z; <ide> } <ide> } <del> <add> <ide> g = x.subtract(ONE).gcd(w); <ide> <ide> if (g.compareTo(ONE) > 0) <ide> <ide> /** <ide> * A fast check for small divisors, up to some implementation-specific limit. <del> * <add> * <ide> * @param candidate <ide> * the {@link BigInteger} instance to test for division by small factors. <del> * <add> * <ide> * @return <code>true</code> if the candidate is found to have any small factors, <ide> * <code>false</code> otherwise. <ide> */ <ide> <ide> /** <ide> * FIPS 186-4 C.3.1 Miller-Rabin Probabilistic Primality Test <del> * <add> * <ide> * Run several iterations of the Miller-Rabin algorithm with randomly-chosen bases. <del> * <add> * <ide> * @param candidate <ide> * the {@link BigInteger} instance to test for primality. <ide> * @param random <ide> <ide> /** <ide> * FIPS 186-4 C.3.1 Miller-Rabin Probabilistic Primality Test (to a fixed base). <del> * <add> * <ide> * Run a single iteration of the Miller-Rabin algorithm against the specified base. <del> * <add> * <ide> * @param candidate <ide> * the {@link BigInteger} instance to test for primality. <ide> * @param base <ide> /* <ide> * This is an optimization of the original algorithm, using trial division to screen out <ide> * many non-primes quickly. <del> * <add> * <ide> * NOTE: 'primeSeed' is still incremented as if we performed the full check! <ide> */ <ide> if (!implHasAnySmallFactors(c)) <ide> /* <ide> * Use wheel factorization with 2, 3, 5 to select trial divisors. <ide> */ <del> <add> <ide> if (x <= 5L) <ide> { <ide> return x == 2L || x == 3L || x == 5L;
Java
apache-2.0
3d617aa4baa0bd95db682ae60aaefbbbef2de18f
0
prateek1306/presto,ocono-tech/presto,ocono-tech/presto,facebook/presto,arhimondr/presto,Yaliang/presto,losipiuk/presto,dain/presto,11xor6/presto,ebyhr/presto,Praveen2112/presto,ebyhr/presto,zzhao0/presto,wyukawa/presto,erichwang/presto,hgschmie/presto,electrum/presto,mbeitchman/presto,facebook/presto,elonazoulay/presto,haozhun/presto,smartnews/presto,yuananf/presto,sopel39/presto,damiencarol/presto,aglne/presto,aglne/presto,smartnews/presto,ebyhr/presto,svstanev/presto,svstanev/presto,arhimondr/presto,aglne/presto,geraint0923/presto,mandusm/presto,jxiang/presto,Teradata/presto,nezihyigitbasi/presto,sumitkgec/presto,miniway/presto,sumitkgec/presto,miniway/presto,electrum/presto,gh351135612/presto,Yaliang/presto,mvp/presto,facebook/presto,losipiuk/presto,gh351135612/presto,aramesh117/presto,sopel39/presto,electrum/presto,sumitkgec/presto,ebyhr/presto,damiencarol/presto,Teradata/presto,haozhun/presto,stewartpark/presto,mandusm/presto,aramesh117/presto,treasure-data/presto,yuananf/presto,aleph-zero/presto,twitter-forks/presto,ptkool/presto,damiencarol/presto,EvilMcJerkface/presto,erichwang/presto,shixuan-fan/presto,EvilMcJerkface/presto,ptkool/presto,Yaliang/presto,ptkool/presto,ptkool/presto,arhimondr/presto,elonazoulay/presto,EvilMcJerkface/presto,martint/presto,sumitkgec/presto,aleph-zero/presto,11xor6/presto,nezihyigitbasi/presto,miniway/presto,stewartpark/presto,Praveen2112/presto,mvp/presto,zzhao0/presto,arhimondr/presto,treasure-data/presto,electrum/presto,mvp/presto,mbeitchman/presto,yuananf/presto,ebyhr/presto,jiangyifangh/presto,prateek1306/presto,martint/presto,mandusm/presto,Praveen2112/presto,troels/nz-presto,jiangyifangh/presto,arhimondr/presto,RobinUS2/presto,twitter-forks/presto,bloomberg/presto,dain/presto,zzhao0/presto,jiangyifangh/presto,smartnews/presto,sopel39/presto,electrum/presto,nezihyigitbasi/presto,youngwookim/presto,gh351135612/presto,shixuan-fan/presto,aglne/presto,geraint0923/presto,EvilMcJerkface/presto,sopel39/presto,raghavsethi/presto,yuananf/presto,youngwookim/presto,prestodb/presto,shixuan-fan/presto,11xor6/presto,prestodb/presto,svstanev/presto,prestodb/presto,prestodb/presto,troels/nz-presto,twitter-forks/presto,11xor6/presto,geraint0923/presto,aleph-zero/presto,smartnews/presto,aramesh117/presto,erichwang/presto,prateek1306/presto,treasure-data/presto,mandusm/presto,Yaliang/presto,twitter-forks/presto,troels/nz-presto,yuananf/presto,Teradata/presto,prestodb/presto,RobinUS2/presto,martint/presto,treasure-data/presto,Praveen2112/presto,jxiang/presto,mbeitchman/presto,gh351135612/presto,bloomberg/presto,Teradata/presto,facebook/presto,mvp/presto,youngwookim/presto,hgschmie/presto,shixuan-fan/presto,elonazoulay/presto,ocono-tech/presto,treasure-data/presto,aglne/presto,wyukawa/presto,treasure-data/presto,gh351135612/presto,Teradata/presto,nezihyigitbasi/presto,youngwookim/presto,ptkool/presto,martint/presto,sumitkgec/presto,dain/presto,hgschmie/presto,sopel39/presto,ocono-tech/presto,mandusm/presto,damiencarol/presto,jiangyifangh/presto,RobinUS2/presto,jxiang/presto,aramesh117/presto,wagnermarkd/presto,bloomberg/presto,wagnermarkd/presto,raghavsethi/presto,elonazoulay/presto,wagnermarkd/presto,EvilMcJerkface/presto,aleph-zero/presto,ocono-tech/presto,svstanev/presto,haozhun/presto,Praveen2112/presto,zzhao0/presto,RobinUS2/presto,Yaliang/presto,troels/nz-presto,smartnews/presto,raghavsethi/presto,wyukawa/presto,stewartpark/presto,twitter-forks/presto,svstanev/presto,prateek1306/presto,facebook/presto,dain/presto,erichwang/presto,jxiang/presto,bloomberg/presto,RobinUS2/presto,wyukawa/presto,losipiuk/presto,martint/presto,stewartpark/presto,11xor6/presto,losipiuk/presto,jiangyifangh/presto,wyukawa/presto,dain/presto,mvp/presto,troels/nz-presto,prateek1306/presto,aramesh117/presto,bloomberg/presto,stewartpark/presto,jxiang/presto,hgschmie/presto,prestodb/presto,losipiuk/presto,zzhao0/presto,wagnermarkd/presto,aleph-zero/presto,wagnermarkd/presto,youngwookim/presto,mbeitchman/presto,geraint0923/presto,damiencarol/presto,miniway/presto,raghavsethi/presto,hgschmie/presto,elonazoulay/presto,geraint0923/presto,raghavsethi/presto,nezihyigitbasi/presto,erichwang/presto,haozhun/presto,haozhun/presto,shixuan-fan/presto,miniway/presto,mbeitchman/presto
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.sql.analyzer; import com.facebook.presto.Session; import com.facebook.presto.SystemSessionProperties; import com.facebook.presto.metadata.FunctionKind; import com.facebook.presto.metadata.Metadata; import com.facebook.presto.metadata.QualifiedObjectName; import com.facebook.presto.metadata.TableHandle; import com.facebook.presto.metadata.TableMetadata; import com.facebook.presto.metadata.ViewDefinition; import com.facebook.presto.security.AccessControl; import com.facebook.presto.security.AllowAllAccessControl; import com.facebook.presto.security.ViewAccessControl; import com.facebook.presto.spi.CatalogSchemaName; import com.facebook.presto.spi.ColumnHandle; import com.facebook.presto.spi.ColumnMetadata; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.security.Identity; import com.facebook.presto.spi.type.Type; import com.facebook.presto.spi.type.TypeSignature; import com.facebook.presto.sql.ExpressionUtils; import com.facebook.presto.sql.parser.ParsingException; import com.facebook.presto.sql.parser.SqlParser; import com.facebook.presto.sql.planner.DependencyExtractor; import com.facebook.presto.sql.planner.ExpressionInterpreter; import com.facebook.presto.sql.planner.optimizations.CanonicalizeExpressions; import com.facebook.presto.sql.tree.AddColumn; import com.facebook.presto.sql.tree.AliasedRelation; import com.facebook.presto.sql.tree.AllColumns; import com.facebook.presto.sql.tree.Call; import com.facebook.presto.sql.tree.Commit; import com.facebook.presto.sql.tree.ComparisonExpression; import com.facebook.presto.sql.tree.CreateSchema; import com.facebook.presto.sql.tree.CreateTable; import com.facebook.presto.sql.tree.CreateTableAsSelect; import com.facebook.presto.sql.tree.CreateView; import com.facebook.presto.sql.tree.Deallocate; import com.facebook.presto.sql.tree.DefaultTraversalVisitor; import com.facebook.presto.sql.tree.Delete; import com.facebook.presto.sql.tree.DereferenceExpression; import com.facebook.presto.sql.tree.DropSchema; import com.facebook.presto.sql.tree.DropTable; import com.facebook.presto.sql.tree.DropView; import com.facebook.presto.sql.tree.Except; import com.facebook.presto.sql.tree.Execute; import com.facebook.presto.sql.tree.Explain; import com.facebook.presto.sql.tree.ExplainType; import com.facebook.presto.sql.tree.Expression; import com.facebook.presto.sql.tree.ExpressionRewriter; import com.facebook.presto.sql.tree.ExpressionTreeRewriter; import com.facebook.presto.sql.tree.FieldReference; import com.facebook.presto.sql.tree.FrameBound; import com.facebook.presto.sql.tree.FunctionCall; import com.facebook.presto.sql.tree.Grant; import com.facebook.presto.sql.tree.GroupingElement; import com.facebook.presto.sql.tree.Identifier; import com.facebook.presto.sql.tree.Insert; import com.facebook.presto.sql.tree.Intersect; import com.facebook.presto.sql.tree.Join; import com.facebook.presto.sql.tree.JoinCriteria; import com.facebook.presto.sql.tree.JoinOn; import com.facebook.presto.sql.tree.JoinUsing; import com.facebook.presto.sql.tree.LongLiteral; import com.facebook.presto.sql.tree.NaturalJoin; import com.facebook.presto.sql.tree.Node; import com.facebook.presto.sql.tree.OrderBy; import com.facebook.presto.sql.tree.Prepare; import com.facebook.presto.sql.tree.QualifiedName; import com.facebook.presto.sql.tree.Query; import com.facebook.presto.sql.tree.QuerySpecification; import com.facebook.presto.sql.tree.Relation; import com.facebook.presto.sql.tree.RenameColumn; import com.facebook.presto.sql.tree.RenameSchema; import com.facebook.presto.sql.tree.RenameTable; import com.facebook.presto.sql.tree.ResetSession; import com.facebook.presto.sql.tree.Revoke; import com.facebook.presto.sql.tree.Rollback; import com.facebook.presto.sql.tree.Row; import com.facebook.presto.sql.tree.SampledRelation; import com.facebook.presto.sql.tree.SelectItem; import com.facebook.presto.sql.tree.SetOperation; import com.facebook.presto.sql.tree.SetSession; import com.facebook.presto.sql.tree.SingleColumn; import com.facebook.presto.sql.tree.SortItem; import com.facebook.presto.sql.tree.StartTransaction; import com.facebook.presto.sql.tree.Statement; import com.facebook.presto.sql.tree.Table; import com.facebook.presto.sql.tree.TableSubquery; import com.facebook.presto.sql.tree.Unnest; import com.facebook.presto.sql.tree.Use; import com.facebook.presto.sql.tree.Values; import com.facebook.presto.sql.tree.Window; import com.facebook.presto.sql.tree.WindowFrame; import com.facebook.presto.sql.tree.With; import com.facebook.presto.sql.tree.WithQuery; import com.facebook.presto.type.ArrayType; import com.facebook.presto.type.MapType; import com.facebook.presto.type.RowType; import com.facebook.presto.util.maps.IdentityLinkedHashMap; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Multimap; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import static com.facebook.presto.SystemSessionProperties.LEGACY_ORDER_BY; import static com.facebook.presto.metadata.FunctionKind.AGGREGATE; import static com.facebook.presto.metadata.FunctionKind.WINDOW; import static com.facebook.presto.metadata.MetadataUtil.createQualifiedObjectName; import static com.facebook.presto.spi.StandardErrorCode.INVALID_FUNCTION_ARGUMENT; import static com.facebook.presto.spi.type.BigintType.BIGINT; import static com.facebook.presto.spi.type.BooleanType.BOOLEAN; import static com.facebook.presto.spi.type.VarcharType.VARCHAR; import static com.facebook.presto.sql.analyzer.ExpressionAnalyzer.createConstantAnalyzer; import static com.facebook.presto.sql.analyzer.ExpressionAnalyzer.getExpressionTypes; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.AMBIGUOUS_ATTRIBUTE; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.COLUMN_NAME_NOT_SPECIFIED; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.COLUMN_TYPE_UNKNOWN; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.DUPLICATE_COLUMN_NAME; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.DUPLICATE_RELATION; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.INVALID_ORDINAL; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.INVALID_WINDOW_FRAME; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISMATCHED_COLUMN_ALIASES; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISMATCHED_SET_COLUMN_TYPES; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISSING_CATALOG; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISSING_COLUMN; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISSING_SCHEMA; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISSING_TABLE; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MUST_BE_WINDOW_FUNCTION; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.NESTED_WINDOW; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.NON_NUMERIC_SAMPLE_PERCENTAGE; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.NOT_SUPPORTED; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.ORDER_BY_MUST_BE_IN_SELECT; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.TABLE_ALREADY_EXISTS; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.TYPE_MISMATCH; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.VIEW_ANALYSIS_ERROR; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.VIEW_IS_RECURSIVE; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.VIEW_IS_STALE; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.VIEW_PARSE_ERROR; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.WILDCARD_WITHOUT_FROM; import static com.facebook.presto.sql.analyzer.TypeSignatureProvider.fromTypeSignatures; import static com.facebook.presto.sql.planner.ExpressionInterpreter.expressionOptimizer; import static com.facebook.presto.sql.tree.ComparisonExpressionType.EQUAL; import static com.facebook.presto.sql.tree.ExplainType.Type.DISTRIBUTED; import static com.facebook.presto.sql.tree.FrameBound.Type.CURRENT_ROW; import static com.facebook.presto.sql.tree.FrameBound.Type.FOLLOWING; import static com.facebook.presto.sql.tree.FrameBound.Type.PRECEDING; import static com.facebook.presto.sql.tree.FrameBound.Type.UNBOUNDED_FOLLOWING; import static com.facebook.presto.sql.tree.FrameBound.Type.UNBOUNDED_PRECEDING; import static com.facebook.presto.sql.tree.WindowFrame.Type.RANGE; import static com.facebook.presto.type.UnknownType.UNKNOWN; import static com.facebook.presto.util.ImmutableCollectors.toImmutableList; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static com.google.common.collect.Iterables.getLast; import static com.google.common.collect.Iterables.transform; import static java.lang.Math.toIntExact; import static java.util.Collections.emptyList; import static java.util.Objects.requireNonNull; class StatementAnalyzer { private final Analysis analysis; private final Metadata metadata; private final Session session; private final SqlParser sqlParser; private final AccessControl accessControl; public StatementAnalyzer( Analysis analysis, Metadata metadata, SqlParser sqlParser, AccessControl accessControl, Session session) { this.analysis = requireNonNull(analysis, "analysis is null"); this.metadata = requireNonNull(metadata, "metadata is null"); this.sqlParser = requireNonNull(sqlParser, "sqlParser is null"); this.accessControl = requireNonNull(accessControl, "accessControl is null"); this.session = requireNonNull(session, "session is null"); } public Scope analyze(Node node, Scope scope) { return new Visitor().process(node, scope); } private void analyzeWhere(Node node, Scope scope, Expression predicate) { Visitor visitor = new Visitor(); visitor.analyzeWhere(node, scope, predicate); } private class Visitor extends DefaultTraversalVisitor<Scope, Scope> { @Override protected Scope visitUse(Use node, Scope scope) { throw new SemanticException(NOT_SUPPORTED, node, "USE statement is not supported"); } @Override protected Scope visitInsert(Insert insert, Scope scope) { QualifiedObjectName targetTable = createQualifiedObjectName(session, insert, insert.getTarget()); if (metadata.getView(session, targetTable).isPresent()) { throw new SemanticException(NOT_SUPPORTED, insert, "Inserting into views is not supported"); } // analyze the query that creates the data Scope queryScope = process(insert.getQuery(), scope); analysis.setUpdateType("INSERT"); // verify the insert destination columns match the query Optional<TableHandle> targetTableHandle = metadata.getTableHandle(session, targetTable); if (!targetTableHandle.isPresent()) { throw new SemanticException(MISSING_TABLE, insert, "Table '%s' does not exist", targetTable); } accessControl.checkCanInsertIntoTable(session.getRequiredTransactionId(), session.getIdentity(), targetTable); TableMetadata tableMetadata = metadata.getTableMetadata(session, targetTableHandle.get()); List<String> tableColumns = tableMetadata.getColumns().stream() .filter(column -> !column.isHidden()) .map(ColumnMetadata::getName) .collect(toImmutableList()); List<String> insertColumns; if (insert.getColumns().isPresent()) { insertColumns = insert.getColumns().get().stream() .map(String::toLowerCase) .collect(toImmutableList()); Set<String> columnNames = new HashSet<>(); for (String insertColumn : insertColumns) { if (!tableColumns.contains(insertColumn)) { throw new SemanticException(MISSING_COLUMN, insert, "Insert column name does not exist in target table: %s", insertColumn); } if (!columnNames.add(insertColumn)) { throw new SemanticException(DUPLICATE_COLUMN_NAME, insert, "Insert column name is specified more than once: %s", insertColumn); } } } else { insertColumns = tableColumns; } Map<String, ColumnHandle> columnHandles = metadata.getColumnHandles(session, targetTableHandle.get()); analysis.setInsert(new Analysis.Insert( targetTableHandle.get(), insertColumns.stream().map(columnHandles::get).collect(toImmutableList()))); Iterable<Type> tableTypes = insertColumns.stream() .map(insertColumn -> tableMetadata.getColumn(insertColumn).getType()) .collect(toImmutableList()); Iterable<Type> queryTypes = transform(queryScope.getRelationType().getVisibleFields(), Field::getType); if (!typesMatchForInsert(tableTypes, queryTypes)) { throw new SemanticException(MISMATCHED_SET_COLUMN_TYPES, insert, "Insert query has mismatched column types: " + "Table: [" + Joiner.on(", ").join(tableTypes) + "], " + "Query: [" + Joiner.on(", ").join(queryTypes) + "]"); } return createAndAssignScope(insert, scope, Field.newUnqualified("rows", BIGINT)); } private boolean typesMatchForInsert(Iterable<Type> tableTypes, Iterable<Type> queryTypes) { if (Iterables.size(tableTypes) != Iterables.size(queryTypes)) { return false; } Iterator<Type> tableTypesIterator = tableTypes.iterator(); Iterator<Type> queryTypesIterator = queryTypes.iterator(); while (tableTypesIterator.hasNext()) { Type tableType = tableTypesIterator.next(); Type queryType = queryTypesIterator.next(); if (!metadata.getTypeManager().canCoerce(queryType, tableType)) { return false; } } return true; } @Override protected Scope visitDelete(Delete node, Scope scope) { Table table = node.getTable(); QualifiedObjectName tableName = createQualifiedObjectName(session, table, table.getName()); if (metadata.getView(session, tableName).isPresent()) { throw new SemanticException(NOT_SUPPORTED, node, "Deleting from views is not supported"); } // Analyzer checks for select permissions but DELETE has a separate permission, so disable access checks // TODO: we shouldn't need to create a new analyzer. The access control should be carried in the context object StatementAnalyzer analyzer = new StatementAnalyzer( analysis, metadata, sqlParser, new AllowAllAccessControl(), session); Scope tableScope = analyzer.analyze(table, scope); node.getWhere().ifPresent(where -> analyzer.analyzeWhere(node, tableScope, where)); analysis.setUpdateType("DELETE"); accessControl.checkCanDeleteFromTable(session.getRequiredTransactionId(), session.getIdentity(), tableName); return createAndAssignScope(node, scope, Field.newUnqualified("rows", BIGINT)); } @Override protected Scope visitCreateTableAsSelect(CreateTableAsSelect node, Scope scope) { analysis.setUpdateType("CREATE TABLE"); // turn this into a query that has a new table writer node on top. QualifiedObjectName targetTable = createQualifiedObjectName(session, node, node.getName()); analysis.setCreateTableDestination(targetTable); Optional<TableHandle> targetTableHandle = metadata.getTableHandle(session, targetTable); if (targetTableHandle.isPresent()) { if (node.isNotExists()) { analysis.setCreateTableAsSelectNoOp(true); return createAndAssignScope(node, scope, Field.newUnqualified("rows", BIGINT)); } throw new SemanticException(TABLE_ALREADY_EXISTS, node, "Destination table '%s' already exists", targetTable); } for (Expression expression : node.getProperties().values()) { // analyze table property value expressions which must be constant createConstantAnalyzer(metadata, session, analysis.getParameters(), analysis.isDescribe()) .analyze(expression, scope); } analysis.setCreateTableProperties(node.getProperties()); accessControl.checkCanCreateTable(session.getRequiredTransactionId(), session.getIdentity(), targetTable); analysis.setCreateTableAsSelectWithData(node.isWithData()); // analyze the query that creates the table Scope queryScope = process(node.getQuery(), scope); validateColumns(node, queryScope.getRelationType()); return createAndAssignScope(node, scope, Field.newUnqualified("rows", BIGINT)); } @Override protected Scope visitCreateView(CreateView node, Scope scope) { analysis.setUpdateType("CREATE VIEW"); QualifiedObjectName viewName = createQualifiedObjectName(session, node, node.getName()); // analyze the query that creates the view StatementAnalyzer analyzer = new StatementAnalyzer( analysis, metadata, sqlParser, new ViewAccessControl(accessControl), session); Scope queryScope = analyzer.analyze(node.getQuery(), scope); accessControl.checkCanCreateView(session.getRequiredTransactionId(), session.getIdentity(), viewName); validateColumns(node, queryScope.getRelationType()); return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitSetSession(SetSession node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitResetSession(ResetSession node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitAddColumn(AddColumn node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitCreateSchema(CreateSchema node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitDropSchema(DropSchema node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitRenameSchema(RenameSchema node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitCreateTable(CreateTable node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitDropTable(DropTable node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitRenameTable(RenameTable node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitRenameColumn(RenameColumn node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitDropView(DropView node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitStartTransaction(StartTransaction node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitCommit(Commit node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitRollback(Rollback node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitPrepare(Prepare node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitDeallocate(Deallocate node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitExecute(Execute node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitGrant(Grant node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitRevoke(Revoke node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitCall(Call node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } private void validateColumns(Statement node, RelationType descriptor) { // verify that all column names are specified and unique // TODO: collect errors and return them all at once Set<String> names = new HashSet<>(); for (Field field : descriptor.getVisibleFields()) { Optional<String> fieldName = field.getName(); if (!fieldName.isPresent()) { throw new SemanticException(COLUMN_NAME_NOT_SPECIFIED, node, "Column name not specified at position %s", descriptor.indexOf(field) + 1); } if (!names.add(fieldName.get())) { throw new SemanticException(DUPLICATE_COLUMN_NAME, node, "Column name '%s' specified more than once", fieldName.get()); } if (field.getType().equals(UNKNOWN)) { throw new SemanticException(COLUMN_TYPE_UNKNOWN, node, "Column type is unknown: %s", fieldName.get()); } } } @Override protected Scope visitExplain(Explain node, Scope scope) throws SemanticException { checkState(node.isAnalyze(), "Non analyze explain should be rewritten to Query"); if (node.getOptions().stream().anyMatch(option -> !option.equals(new ExplainType(DISTRIBUTED)))) { throw new SemanticException(NOT_SUPPORTED, node, "EXPLAIN ANALYZE only supports TYPE DISTRIBUTED option"); } process(node.getStatement(), scope); analysis.setUpdateType(null); return createAndAssignScope(node, scope, Field.newUnqualified("Query Plan", VARCHAR)); } @Override protected Scope visitQuery(Query node, Scope scope) { Scope withScope = analyzeWith(node, scope); Scope queryScope = Scope.builder() .withParent(withScope) .build(); Scope queryBodyScope = process(node.getQueryBody(), queryScope); analyzeOrderBy(node, queryBodyScope); // Input fields == Output fields analysis.setOutputExpressions(node, descriptorToFields(queryBodyScope)); queryScope = Scope.builder() .withParent(withScope) .withRelationType(queryBodyScope.getRelationType()) .build(); analysis.setScope(node, queryScope); return queryScope; } @Override protected Scope visitUnnest(Unnest node, Scope scope) { ImmutableList.Builder<Field> outputFields = ImmutableList.builder(); for (Expression expression : node.getExpressions()) { ExpressionAnalysis expressionAnalysis = analyzeExpression(expression, scope); Type expressionType = expressionAnalysis.getType(expression); if (expressionType instanceof ArrayType) { outputFields.add(Field.newUnqualified(Optional.empty(), ((ArrayType) expressionType).getElementType())); } else if (expressionType instanceof MapType) { outputFields.add(Field.newUnqualified(Optional.empty(), ((MapType) expressionType).getKeyType())); outputFields.add(Field.newUnqualified(Optional.empty(), ((MapType) expressionType).getValueType())); } else { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, "Cannot unnest type: " + expressionType); } } if (node.isWithOrdinality()) { outputFields.add(Field.newUnqualified(Optional.empty(), BIGINT)); } return createAndAssignScope(node, scope, outputFields.build()); } @Override protected Scope visitTable(Table table, Scope scope) { if (!table.getName().getPrefix().isPresent()) { // is this a reference to a WITH query? String name = table.getName().getSuffix(); Optional<WithQuery> withQuery = scope.getNamedQuery(name); if (withQuery.isPresent()) { Query query = withQuery.get().getQuery(); analysis.registerNamedQuery(table, query); // re-alias the fields with the name assigned to the query in the WITH declaration RelationType queryDescriptor = analysis.getOutputDescriptor(query); List<Field> fields; Optional<List<String>> columnNames = withQuery.get().getColumnNames(); if (columnNames.isPresent()) { // if columns are explicitly aliased -> WITH cte(alias1, alias2 ...) ImmutableList.Builder<Field> fieldBuilder = ImmutableList.builder(); int field = 0; for (String columnName : columnNames.get()) { Field inputField = queryDescriptor.getFieldByIndex(field); fieldBuilder.add(Field.newQualified( QualifiedName.of(name), Optional.of(columnName), inputField.getType(), false, inputField.getOriginTable(), inputField.isAliased())); field++; } fields = fieldBuilder.build(); } else { fields = queryDescriptor.getAllFields().stream() .map(field -> Field.newQualified( QualifiedName.of(name), field.getName(), field.getType(), field.isHidden(), field.getOriginTable(), field.isAliased())) .collect(toImmutableList()); } return createAndAssignScope(table, scope, fields); } } QualifiedObjectName name = createQualifiedObjectName(session, table, table.getName()); Optional<ViewDefinition> optionalView = metadata.getView(session, name); if (optionalView.isPresent()) { Statement statement = analysis.getStatement(); if (statement instanceof CreateView) { CreateView viewStatement = (CreateView) statement; QualifiedObjectName viewNameFromStatement = createQualifiedObjectName(session, viewStatement, viewStatement.getName()); if (viewStatement.isReplace() && viewNameFromStatement.equals(name)) { throw new SemanticException(VIEW_IS_RECURSIVE, table, "Statement would create a recursive view"); } } if (analysis.hasTableInView(table)) { throw new SemanticException(VIEW_IS_RECURSIVE, table, "View is recursive"); } ViewDefinition view = optionalView.get(); Query query = parseView(view.getOriginalSql(), name, table); analysis.registerNamedQuery(table, query); accessControl.checkCanSelectFromView(session.getRequiredTransactionId(), session.getIdentity(), name); analysis.registerTableForView(table); RelationType descriptor = analyzeView(query, name, view.getCatalog(), view.getSchema(), view.getOwner(), table); analysis.unregisterTableForView(); if (isViewStale(view.getColumns(), descriptor.getVisibleFields())) { throw new SemanticException(VIEW_IS_STALE, table, "View '%s' is stale; it must be re-created", name); } // Derive the type of the view from the stored definition, not from the analysis of the underlying query. // This is needed in case the underlying table(s) changed and the query in the view now produces types that // are implicitly coercible to the declared view types. List<Field> outputFields = view.getColumns().stream() .map(column -> Field.newQualified( QualifiedName.of(name.getObjectName()), Optional.of(column.getName()), column.getType(), false, Optional.of(name), false)) .collect(toImmutableList()); analysis.addRelationCoercion(table, outputFields.stream().map(Field::getType).toArray(Type[]::new)); return createAndAssignScope(table, scope, outputFields); } Optional<TableHandle> tableHandle = metadata.getTableHandle(session, name); if (!tableHandle.isPresent()) { if (!metadata.getCatalogHandle(session, name.getCatalogName()).isPresent()) { throw new SemanticException(MISSING_CATALOG, table, "Catalog %s does not exist", name.getCatalogName()); } if (!metadata.schemaExists(session, new CatalogSchemaName(name.getCatalogName(), name.getSchemaName()))) { throw new SemanticException(MISSING_SCHEMA, table, "Schema %s does not exist", name.getSchemaName()); } throw new SemanticException(MISSING_TABLE, table, "Table %s does not exist", name); } accessControl.checkCanSelectFromTable(session.getRequiredTransactionId(), session.getIdentity(), name); TableMetadata tableMetadata = metadata.getTableMetadata(session, tableHandle.get()); Map<String, ColumnHandle> columnHandles = metadata.getColumnHandles(session, tableHandle.get()); // TODO: discover columns lazily based on where they are needed (to support connectors that can't enumerate all tables) ImmutableList.Builder<Field> fields = ImmutableList.builder(); for (ColumnMetadata column : tableMetadata.getColumns()) { Field field = Field.newQualified( table.getName(), Optional.of(column.getName()), column.getType(), column.isHidden(), Optional.of(name), false); fields.add(field); ColumnHandle columnHandle = columnHandles.get(column.getName()); checkArgument(columnHandle != null, "Unknown field %s", field); analysis.setColumn(field, columnHandle); } analysis.registerTable(table, tableHandle.get()); return createAndAssignScope(table, scope, fields.build()); } @Override protected Scope visitAliasedRelation(AliasedRelation relation, Scope scope) { Scope relationScope = process(relation.getRelation(), scope); // todo this check should be inside of TupleDescriptor.withAlias, but the exception needs the node object RelationType relationType = relationScope.getRelationType(); if (relation.getColumnNames() != null) { int totalColumns = relationType.getVisibleFieldCount(); if (totalColumns != relation.getColumnNames().size()) { throw new SemanticException(MISMATCHED_COLUMN_ALIASES, relation, "Column alias list has %s entries but '%s' has %s columns available", relation.getColumnNames().size(), relation.getAlias(), totalColumns); } } RelationType descriptor = relationType.withAlias(relation.getAlias(), relation.getColumnNames()); return createAndAssignScope(relation, scope, descriptor); } @Override protected Scope visitSampledRelation(SampledRelation relation, Scope scope) { if (!DependencyExtractor.extractNames(relation.getSamplePercentage(), analysis.getColumnReferences()).isEmpty()) { throw new SemanticException(NON_NUMERIC_SAMPLE_PERCENTAGE, relation.getSamplePercentage(), "Sample percentage cannot contain column references"); } IdentityLinkedHashMap<Expression, Type> expressionTypes = getExpressionTypes( session, metadata, sqlParser, ImmutableMap.of(), relation.getSamplePercentage(), analysis.getParameters(), analysis.isDescribe()); ExpressionInterpreter samplePercentageEval = expressionOptimizer(relation.getSamplePercentage(), metadata, session, expressionTypes); Object samplePercentageObject = samplePercentageEval.optimize(symbol -> { throw new SemanticException(NON_NUMERIC_SAMPLE_PERCENTAGE, relation.getSamplePercentage(), "Sample percentage cannot contain column references"); }); if (!(samplePercentageObject instanceof Number)) { throw new SemanticException(NON_NUMERIC_SAMPLE_PERCENTAGE, relation.getSamplePercentage(), "Sample percentage should evaluate to a numeric expression"); } double samplePercentageValue = ((Number) samplePercentageObject).doubleValue(); if (samplePercentageValue < 0.0) { throw new SemanticException(SemanticErrorCode.SAMPLE_PERCENTAGE_OUT_OF_RANGE, relation.getSamplePercentage(), "Sample percentage must be greater than or equal to 0"); } if ((samplePercentageValue > 100.0)) { throw new SemanticException(SemanticErrorCode.SAMPLE_PERCENTAGE_OUT_OF_RANGE, relation.getSamplePercentage(), "Sample percentage must be less than or equal to 100"); } analysis.setSampleRatio(relation, samplePercentageValue / 100); Scope relationScope = process(relation.getRelation(), scope); return createAndAssignScope(relation, scope, relationScope.getRelationType()); } @Override protected Scope visitTableSubquery(TableSubquery node, Scope scope) { StatementAnalyzer analyzer = new StatementAnalyzer(analysis, metadata, sqlParser, accessControl, session); Scope queryScope = analyzer.analyze(node.getQuery(), scope); return createAndAssignScope(node, scope, queryScope.getRelationType()); } @Override protected Scope visitQuerySpecification(QuerySpecification node, Scope scope) { // TODO: extract candidate names from SELECT, WHERE, HAVING, GROUP BY and ORDER BY expressions // to pass down to analyzeFrom Scope sourceScope = analyzeFrom(node, scope); node.getWhere().ifPresent(where -> analyzeWhere(node, sourceScope, where)); List<Expression> outputExpressions = analyzeSelect(node, sourceScope); List<List<Expression>> groupByExpressions = analyzeGroupBy(node, sourceScope, outputExpressions); Scope outputScope = computeOutputScope(node, sourceScope); List<Expression> orderByExpressions = analyzeOrderBy(node, sourceScope, outputScope, outputExpressions); analyzeHaving(node, sourceScope); List<Expression> expressions = new ArrayList<>(); expressions.addAll(outputExpressions); expressions.addAll(orderByExpressions); node.getHaving().ifPresent(expressions::add); analyzeAggregations(node, sourceScope, groupByExpressions, analysis.getColumnReferences(), expressions); analyzeWindowFunctions(node, outputExpressions, orderByExpressions); return outputScope; } @Override protected Scope visitSetOperation(SetOperation node, Scope scope) { checkState(node.getRelations().size() >= 2); List<Scope> relationScopes = node.getRelations().stream() .map(relation -> { Scope relationScope = process(relation, scope); return createAndAssignScope(relation, scope, relationScope.getRelationType().withOnlyVisibleFields()); }) .collect(toImmutableList()); Type[] outputFieldTypes = relationScopes.get(0).getRelationType().getVisibleFields().stream() .map(Field::getType) .toArray(Type[]::new); for (Scope relationScope : relationScopes) { int outputFieldSize = outputFieldTypes.length; RelationType relationType = relationScope.getRelationType(); int descFieldSize = relationType.getVisibleFields().size(); String setOperationName = node.getClass().getSimpleName(); if (outputFieldSize != descFieldSize) { throw new SemanticException(MISMATCHED_SET_COLUMN_TYPES, node, "%s query has different number of fields: %d, %d", setOperationName, outputFieldSize, descFieldSize); } for (int i = 0; i < descFieldSize; i++) { Type descFieldType = relationType.getFieldByIndex(i).getType(); Optional<Type> commonSuperType = metadata.getTypeManager().getCommonSuperType(outputFieldTypes[i], descFieldType); if (!commonSuperType.isPresent()) { throw new SemanticException(TYPE_MISMATCH, node, "column %d in %s query has incompatible types: %s, %s", i, outputFieldTypes[i].getDisplayName(), setOperationName, descFieldType.getDisplayName()); } outputFieldTypes[i] = commonSuperType.get(); } } Field[] outputDescriptorFields = new Field[outputFieldTypes.length]; RelationType firstDescriptor = relationScopes.get(0).getRelationType().withOnlyVisibleFields(); for (int i = 0; i < outputFieldTypes.length; i++) { Field oldField = firstDescriptor.getFieldByIndex(i); outputDescriptorFields[i] = new Field( oldField.getRelationAlias(), oldField.getName(), outputFieldTypes[i], oldField.isHidden(), oldField.getOriginTable(), oldField.isAliased()); } for (int i = 0; i < node.getRelations().size(); i++) { Relation relation = node.getRelations().get(i); Scope relationScope = relationScopes.get(i); RelationType relationType = relationScope.getRelationType(); for (int j = 0; j < relationType.getVisibleFields().size(); j++) { Type outputFieldType = outputFieldTypes[j]; Type descFieldType = relationType.getFieldByIndex(j).getType(); if (!outputFieldType.equals(descFieldType)) { analysis.addRelationCoercion(relation, outputFieldTypes); break; } } } return createAndAssignScope(node, scope, outputDescriptorFields); } @Override protected Scope visitIntersect(Intersect node, Scope scope) { if (!node.isDistinct()) { throw new SemanticException(NOT_SUPPORTED, node, "INTERSECT ALL not yet implemented"); } return visitSetOperation(node, scope); } @Override protected Scope visitExcept(Except node, Scope scope) { if (!node.isDistinct()) { throw new SemanticException(NOT_SUPPORTED, node, "EXCEPT ALL not yet implemented"); } return visitSetOperation(node, scope); } @Override protected Scope visitJoin(Join node, Scope scope) { JoinCriteria criteria = node.getCriteria().orElse(null); if (criteria instanceof NaturalJoin) { throw new SemanticException(NOT_SUPPORTED, node, "Natural join not supported"); } Scope left = process(node.getLeft(), scope); Scope right = process(node.getRight(), isUnnestRelation(node.getRight()) ? left : scope); Scope output = createAndAssignScope(node, scope, left.getRelationType().joinWith(right.getRelationType())); if (node.getType() == Join.Type.CROSS || node.getType() == Join.Type.IMPLICIT) { return output; } if (criteria instanceof JoinUsing) { // TODO: implement proper "using" semantics with respect to output columns List<String> columns = ((JoinUsing) criteria).getColumns(); List<Expression> expressions = new ArrayList<>(); for (String column : columns) { Expression leftExpression = new Identifier(column); Expression rightExpression = new Identifier(column); ExpressionAnalysis leftExpressionAnalysis = analyzeExpression(leftExpression, left); ExpressionAnalysis rightExpressionAnalysis = analyzeExpression(rightExpression, right); checkState(leftExpressionAnalysis.getSubqueryInPredicates().isEmpty(), "INVARIANT"); checkState(rightExpressionAnalysis.getSubqueryInPredicates().isEmpty(), "INVARIANT"); checkState(leftExpressionAnalysis.getScalarSubqueries().isEmpty(), "INVARIANT"); checkState(rightExpressionAnalysis.getScalarSubqueries().isEmpty(), "INVARIANT"); addCoercionForJoinCriteria(node, leftExpression, rightExpression); expressions.add(new ComparisonExpression(EQUAL, leftExpression, rightExpression)); } analysis.setJoinCriteria(node, ExpressionUtils.and(expressions)); } else if (criteria instanceof JoinOn) { Expression expression = ((JoinOn) criteria).getExpression(); // need to register coercions in case when join criteria requires coercion (e.g. join on char(1) = char(2)) ExpressionAnalysis expressionAnalysis = analyzeExpression(expression, output); Type clauseType = expressionAnalysis.getType(expression); if (!clauseType.equals(BOOLEAN)) { if (!clauseType.equals(UNKNOWN)) { throw new SemanticException(TYPE_MISMATCH, expression, "JOIN ON clause must evaluate to a boolean: actual type %s", clauseType); } // coerce null to boolean analysis.addCoercion(expression, BOOLEAN, false); } Analyzer.verifyNoAggregatesOrWindowFunctions(metadata.getFunctionRegistry(), expression, "JOIN clause"); Expression canonicalized = CanonicalizeExpressions.canonicalizeExpression(expression); analyzeExpression(canonicalized, output); Set<Expression> postJoinConjuncts = new HashSet<>(); for (Expression conjunct : ExpressionUtils.extractConjuncts(canonicalized)) { conjunct = ExpressionUtils.normalize(conjunct); if (conjunct instanceof ComparisonExpression && (((ComparisonExpression) conjunct).getType() == EQUAL || node.getType() == Join.Type.INNER)) { Expression conjunctFirst = ((ComparisonExpression) conjunct).getLeft(); Expression conjunctSecond = ((ComparisonExpression) conjunct).getRight(); Set<QualifiedName> firstDependencies = DependencyExtractor.extractNames(conjunctFirst, expressionAnalysis.getColumnReferences()); Set<QualifiedName> secondDependencies = DependencyExtractor.extractNames(conjunctSecond, expressionAnalysis.getColumnReferences()); Expression leftExpression = null; Expression rightExpression = null; if (firstDependencies.stream().allMatch(left.getRelationType()::canResolve) && secondDependencies.stream().allMatch(right.getRelationType()::canResolve)) { leftExpression = conjunctFirst; rightExpression = conjunctSecond; } else if (firstDependencies.stream().allMatch(right.getRelationType()::canResolve) && secondDependencies.stream().allMatch(left.getRelationType()::canResolve)) { leftExpression = conjunctSecond; rightExpression = conjunctFirst; } // expression on each side of comparison operator references only symbols from one side of join. // analyze the clauses to record the types of all subexpressions and resolve names against the left/right underlying tuples if (rightExpression != null) { ExpressionAnalysis leftExpressionAnalysis = analyzeExpression(leftExpression, left); ExpressionAnalysis rightExpressionAnalysis = analyzeExpression(rightExpression, right); analysis.recordSubqueries(node, leftExpressionAnalysis); analysis.recordSubqueries(node, rightExpressionAnalysis); addCoercionForJoinCriteria(node, leftExpression, rightExpression); } else { // mixed references to both left and right join relation on one side of comparison operator. // expression will be put in post-join condition; analyze in context of output table. postJoinConjuncts.add(conjunct); } } else { // non-comparison expression. // expression will be put in post-join condition; analyze in context of output table. postJoinConjuncts.add(conjunct); } } Expression postJoinPredicate = ExpressionUtils.combineConjuncts(postJoinConjuncts); analysis.recordSubqueries(node, analyzeExpression(postJoinPredicate, output)); analysis.setJoinCriteria(node, canonicalized); } else { throw new UnsupportedOperationException("unsupported join criteria: " + criteria.getClass().getName()); } return output; } private boolean isUnnestRelation(Relation node) { if (node instanceof AliasedRelation) { return isUnnestRelation(((AliasedRelation) node).getRelation()); } return node instanceof Unnest; } private void addCoercionForJoinCriteria(Join node, Expression leftExpression, Expression rightExpression) { Type leftType = analysis.getTypeWithCoercions(leftExpression); Type rightType = analysis.getTypeWithCoercions(rightExpression); Optional<Type> superType = metadata.getTypeManager().getCommonSuperType(leftType, rightType); if (!superType.isPresent()) { throw new SemanticException(TYPE_MISMATCH, node, "Join criteria has incompatible types: %s, %s", leftType.getDisplayName(), rightType.getDisplayName()); } if (!leftType.equals(superType.get())) { analysis.addCoercion(leftExpression, superType.get(), metadata.getTypeManager().isTypeOnlyCoercion(leftType, rightType)); } if (!rightType.equals(superType.get())) { analysis.addCoercion(rightExpression, superType.get(), metadata.getTypeManager().isTypeOnlyCoercion(rightType, leftType)); } } @Override protected Scope visitValues(Values node, Scope scope) { checkState(node.getRows().size() >= 1); List<List<Type>> rowTypes = node.getRows().stream() .map(row -> analyzeExpression(row, scope).getType(row)) .map(type -> { if (type instanceof RowType) { return type.getTypeParameters(); } return ImmutableList.of(type); }) .collect(toImmutableList()); // determine common super type of the rows List<Type> fieldTypes = new ArrayList<>(rowTypes.iterator().next()); for (List<Type> rowType : rowTypes) { // check field count consistency for rows if (rowType.size() != fieldTypes.size()) { throw new SemanticException(MISMATCHED_SET_COLUMN_TYPES, node, "Values rows have mismatched types: %s vs %s", rowTypes.get(0), rowType); } for (int i = 0; i < rowType.size(); i++) { Type fieldType = rowType.get(i); Type superType = fieldTypes.get(i); Optional<Type> commonSuperType = metadata.getTypeManager().getCommonSuperType(fieldType, superType); if (!commonSuperType.isPresent()) { throw new SemanticException(MISMATCHED_SET_COLUMN_TYPES, node, "Values rows have mismatched types: %s vs %s", rowTypes.get(0), rowType); } fieldTypes.set(i, commonSuperType.get()); } } // add coercions for the rows for (Expression row : node.getRows()) { if (row instanceof Row) { List<Expression> items = ((Row) row).getItems(); for (int i = 0; i < items.size(); i++) { Type expectedType = fieldTypes.get(i); Expression item = items.get(i); Type actualType = analysis.getType(item); if (!actualType.equals(expectedType)) { analysis.addCoercion(item, expectedType, metadata.getTypeManager().isTypeOnlyCoercion(actualType, expectedType)); } } } else { Type actualType = analysis.getType(row); Type expectedType = fieldTypes.get(0); if (!actualType.equals(expectedType)) { analysis.addCoercion(row, expectedType, metadata.getTypeManager().isTypeOnlyCoercion(actualType, expectedType)); } } } List<Field> fields = fieldTypes.stream() .map(valueType -> Field.newUnqualified(Optional.empty(), valueType)) .collect(toImmutableList()); return createAndAssignScope(node, scope, fields); } private void analyzeWindowFunctions(QuerySpecification node, List<Expression> outputExpressions, List<Expression> orderByExpressions) { WindowFunctionExtractor extractor = new WindowFunctionExtractor(); for (Expression expression : Iterables.concat(outputExpressions, orderByExpressions)) { extractor.process(expression, null); new WindowFunctionValidator().process(expression, analysis); } List<FunctionCall> windowFunctions = extractor.getWindowFunctions(); for (FunctionCall windowFunction : windowFunctions) { // filter with window function is not supported yet if (windowFunction.getFilter().isPresent()) { throw new SemanticException(NOT_SUPPORTED, node, "FILTER is not yet supported for window functions"); } Window window = windowFunction.getWindow().get(); WindowFunctionExtractor nestedExtractor = new WindowFunctionExtractor(); for (Expression argument : windowFunction.getArguments()) { nestedExtractor.process(argument, null); } for (Expression expression : window.getPartitionBy()) { nestedExtractor.process(expression, null); } for (SortItem sortItem : window.getOrderBy()) { nestedExtractor.process(sortItem.getSortKey(), null); } if (window.getFrame().isPresent()) { nestedExtractor.process(window.getFrame().get(), null); } if (!nestedExtractor.getWindowFunctions().isEmpty()) { throw new SemanticException(NESTED_WINDOW, node, "Cannot nest window functions inside window function '%s': %s", windowFunction, extractor.getWindowFunctions()); } if (windowFunction.isDistinct()) { throw new SemanticException(NOT_SUPPORTED, node, "DISTINCT in window function parameters not yet supported: %s", windowFunction); } if (window.getFrame().isPresent()) { analyzeWindowFrame(window.getFrame().get()); } List<TypeSignature> argumentTypes = Lists.transform(windowFunction.getArguments(), expression -> analysis.getType(expression).getTypeSignature()); FunctionKind kind = metadata.getFunctionRegistry().resolveFunction(windowFunction.getName(), fromTypeSignatures(argumentTypes)).getKind(); if (kind != AGGREGATE && kind != WINDOW) { throw new SemanticException(MUST_BE_WINDOW_FUNCTION, node, "Not a window function: %s", windowFunction.getName()); } } analysis.setWindowFunctions(node, windowFunctions); } private void analyzeWindowFrame(WindowFrame frame) { FrameBound.Type startType = frame.getStart().getType(); FrameBound.Type endType = frame.getEnd().orElse(new FrameBound(CURRENT_ROW)).getType(); if (startType == UNBOUNDED_FOLLOWING) { throw new SemanticException(INVALID_WINDOW_FRAME, frame, "Window frame start cannot be UNBOUNDED FOLLOWING"); } if (endType == UNBOUNDED_PRECEDING) { throw new SemanticException(INVALID_WINDOW_FRAME, frame, "Window frame end cannot be UNBOUNDED PRECEDING"); } if ((startType == CURRENT_ROW) && (endType == PRECEDING)) { throw new SemanticException(INVALID_WINDOW_FRAME, frame, "Window frame starting from CURRENT ROW cannot end with PRECEDING"); } if ((startType == FOLLOWING) && (endType == PRECEDING)) { throw new SemanticException(INVALID_WINDOW_FRAME, frame, "Window frame starting from FOLLOWING cannot end with PRECEDING"); } if ((startType == FOLLOWING) && (endType == CURRENT_ROW)) { throw new SemanticException(INVALID_WINDOW_FRAME, frame, "Window frame starting from FOLLOWING cannot end with CURRENT ROW"); } if ((frame.getType() == RANGE) && ((startType == PRECEDING) || (endType == PRECEDING))) { throw new SemanticException(INVALID_WINDOW_FRAME, frame, "Window frame RANGE PRECEDING is only supported with UNBOUNDED"); } if ((frame.getType() == RANGE) && ((startType == FOLLOWING) || (endType == FOLLOWING))) { throw new SemanticException(INVALID_WINDOW_FRAME, frame, "Window frame RANGE FOLLOWING is only supported with UNBOUNDED"); } } private void analyzeHaving(QuerySpecification node, Scope scope) { if (node.getHaving().isPresent()) { Expression predicate = node.getHaving().get(); ExpressionAnalysis expressionAnalysis = analyzeExpression(predicate, scope); analysis.recordSubqueries(node, expressionAnalysis); Type predicateType = expressionAnalysis.getType(predicate); if (!predicateType.equals(BOOLEAN) && !predicateType.equals(UNKNOWN)) { throw new SemanticException(TYPE_MISMATCH, predicate, "HAVING clause must evaluate to a boolean: actual type %s", predicateType); } analysis.setHaving(node, predicate); } } private List<Expression> analyzeOrderBy(QuerySpecification node, Scope sourceScope, Scope outputScope, List<Expression> outputExpressions) { if (SystemSessionProperties.isLegacyOrderByEnabled(session)) { return legacyAnalyzeOrderBy(node, sourceScope, outputScope, outputExpressions); } List<SortItem> items = node.getOrderBy() .map(OrderBy::getSortItems) .orElse(emptyList()); ImmutableList.Builder<Expression> orderByExpressionsBuilder = ImmutableList.builder(); if (!items.isEmpty()) { for (SortItem item : items) { Expression expression = item.getSortKey(); Expression orderByExpression; if (expression instanceof LongLiteral) { // this is an ordinal in the output tuple long ordinal = ((LongLiteral) expression).getValue(); if (ordinal < 1 || ordinal > outputExpressions.size()) { throw new SemanticException(INVALID_ORDINAL, expression, "ORDER BY position %s is not in select list", ordinal); } int field = toIntExact(ordinal - 1); Type type = outputScope.getRelationType().getFieldByIndex(field).getType(); if (!type.isOrderable()) { throw new SemanticException(TYPE_MISMATCH, node, "The type of expression in position %s is not orderable (actual: %s), and therefore cannot be used in ORDER BY", ordinal, type); } orderByExpression = outputExpressions.get(field); } else { // Analyze the original expression using a synthetic scope (which delegates to the source scope for any missing name) // to catch any semantic errors (due to type mismatch, etc) Scope synthetic = Scope.builder() .withParent(sourceScope) .withRelationType(outputScope.getRelationType()) .build(); analyzeExpression(expression, synthetic); orderByExpression = ExpressionTreeRewriter.rewriteWith(new OrderByExpressionRewriter(extractNamedOutputExpressions(node)), expression); ExpressionAnalysis expressionAnalysis = analyzeExpression(orderByExpression, sourceScope); analysis.recordSubqueries(node, expressionAnalysis); } Type type = analysis.getType(orderByExpression); if (!type.isOrderable()) { throw new SemanticException(TYPE_MISMATCH, node, "Type %s is not orderable, and therefore cannot be used in ORDER BY: %s", type, expression); } orderByExpressionsBuilder.add(orderByExpression); } } List<Expression> orderByExpressions = orderByExpressionsBuilder.build(); analysis.setOrderByExpressions(node, orderByExpressions); if (node.getSelect().isDistinct() && !outputExpressions.containsAll(orderByExpressions)) { throw new SemanticException(ORDER_BY_MUST_BE_IN_SELECT, node.getSelect(), "For SELECT DISTINCT, ORDER BY expressions must appear in select list"); } return orderByExpressions; } /** * Preserve the old column resolution behavior for ORDER BY while we transition workloads to new semantics * TODO: remove this */ private List<Expression> legacyAnalyzeOrderBy(QuerySpecification node, Scope sourceScope, Scope outputScope, List<Expression> outputExpressions) { List<SortItem> items = node.getOrderBy() .map(OrderBy::getSortItems) .orElse(emptyList()); ImmutableList.Builder<Expression> orderByExpressionsBuilder = ImmutableList.builder(); if (!items.isEmpty()) { // Compute aliased output terms so we can resolve order by expressions against them first ImmutableMultimap.Builder<QualifiedName, Expression> byAliasBuilder = ImmutableMultimap.builder(); for (SelectItem item : node.getSelect().getSelectItems()) { if (item instanceof SingleColumn) { Optional<String> alias = ((SingleColumn) item).getAlias(); if (alias.isPresent()) { byAliasBuilder.put(QualifiedName.of(alias.get()), ((SingleColumn) item).getExpression()); // TODO: need to know if alias was quoted } } } Multimap<QualifiedName, Expression> byAlias = byAliasBuilder.build(); for (SortItem item : items) { Expression expression = item.getSortKey(); Expression orderByExpression = null; if (expression instanceof Identifier) { // if this is a simple name reference, try to resolve against output columns QualifiedName name = QualifiedName.of(((Identifier) expression).getName()); Collection<Expression> expressions = byAlias.get(name); if (expressions.size() > 1) { throw new SemanticException(AMBIGUOUS_ATTRIBUTE, expression, "'%s' in ORDER BY is ambiguous", name.getSuffix()); } if (expressions.size() == 1) { orderByExpression = Iterables.getOnlyElement(expressions); } // otherwise, couldn't resolve name against output aliases, so fall through... } else if (expression instanceof LongLiteral) { // this is an ordinal in the output tuple long ordinal = ((LongLiteral) expression).getValue(); if (ordinal < 1 || ordinal > outputExpressions.size()) { throw new SemanticException(INVALID_ORDINAL, expression, "ORDER BY position %s is not in select list", ordinal); } int field = toIntExact(ordinal - 1); Type type = outputScope.getRelationType().getFieldByIndex(field).getType(); if (!type.isOrderable()) { throw new SemanticException(TYPE_MISMATCH, node, "The type of expression in position %s is not orderable (actual: %s), and therefore cannot be used in ORDER BY", ordinal, type); } orderByExpression = outputExpressions.get(field); } // otherwise, just use the expression as is if (orderByExpression == null) { orderByExpression = expression; } ExpressionAnalysis expressionAnalysis = analyzeExpression(orderByExpression, sourceScope); analysis.recordSubqueries(node, expressionAnalysis); Type type = expressionAnalysis.getType(orderByExpression); if (!type.isOrderable()) { throw new SemanticException(TYPE_MISMATCH, node, "Type %s is not orderable, and therefore cannot be used in ORDER BY: %s", type, expression); } orderByExpressionsBuilder.add(orderByExpression); } } List<Expression> orderByExpressions = orderByExpressionsBuilder.build(); analysis.setOrderByExpressions(node, orderByExpressions); if (node.getSelect().isDistinct() && !outputExpressions.containsAll(orderByExpressions)) { throw new SemanticException(ORDER_BY_MUST_BE_IN_SELECT, node.getSelect(), "For SELECT DISTINCT, ORDER BY expressions must appear in select list"); } return orderByExpressions; } private Multimap<QualifiedName, Expression> extractNamedOutputExpressions(QuerySpecification node) { // Compute aliased output terms so we can resolve order by expressions against them first ImmutableMultimap.Builder<QualifiedName, Expression> assignments = ImmutableMultimap.builder(); for (SelectItem item : node.getSelect().getSelectItems()) { if (item instanceof SingleColumn) { SingleColumn column = (SingleColumn) item; Optional<String> alias = column.getAlias(); if (alias.isPresent()) { assignments.put(QualifiedName.of(alias.get()), column.getExpression()); // TODO: need to know if alias was quoted } else if (column.getExpression() instanceof Identifier) { assignments.put(QualifiedName.of(((Identifier) column.getExpression()).getName()), column.getExpression()); } } } return assignments.build(); } private class OrderByExpressionRewriter extends ExpressionRewriter<Void> { private final Multimap<QualifiedName, Expression> assignments; public OrderByExpressionRewriter(Multimap<QualifiedName, Expression> assignments) { this.assignments = assignments; } @Override public Expression rewriteIdentifier(Identifier reference, Void context, ExpressionTreeRewriter<Void> treeRewriter) { // if this is a simple name reference, try to resolve against output columns QualifiedName name = QualifiedName.of(reference.getName()); Set<Expression> expressions = assignments.get(name) .stream() .collect(Collectors.toSet()); if (expressions.size() > 1) { throw new SemanticException(AMBIGUOUS_ATTRIBUTE, reference, "'%s' in ORDER BY is ambiguous", name); } if (expressions.size() == 1) { return Iterables.getOnlyElement(expressions); } // otherwise, couldn't resolve name against output aliases, so fall through... return reference; } } private List<List<Expression>> analyzeGroupBy(QuerySpecification node, Scope scope, List<Expression> outputExpressions) { List<Set<Expression>> computedGroupingSets = ImmutableList.of(); // empty list = no aggregations if (node.getGroupBy().isPresent()) { List<List<Set<Expression>>> enumeratedGroupingSets = node.getGroupBy().get().getGroupingElements().stream() .map(GroupingElement::enumerateGroupingSets) .collect(toImmutableList()); // compute cross product of enumerated grouping sets, if there are any computedGroupingSets = computeGroupingSetsCrossProduct(enumeratedGroupingSets, node.getGroupBy().get().isDistinct()); checkState(!computedGroupingSets.isEmpty(), "computed grouping sets cannot be empty"); } else if (hasAggregates(node)) { // if there are aggregates, but no group by, create a grand total grouping set (global aggregation) computedGroupingSets = ImmutableList.of(ImmutableSet.of()); } List<List<Expression>> analyzedGroupingSets = computedGroupingSets.stream() .map(groupingSet -> analyzeGroupingColumns(groupingSet, node, scope, outputExpressions)) .collect(toImmutableList()); analysis.setGroupingSets(node, analyzedGroupingSets); return analyzedGroupingSets; } private List<Set<Expression>> computeGroupingSetsCrossProduct(List<List<Set<Expression>>> enumeratedGroupingSets, boolean isDistinct) { checkState(!enumeratedGroupingSets.isEmpty(), "enumeratedGroupingSets cannot be empty"); List<Set<Expression>> groupingSetsCrossProduct = new ArrayList<>(); enumeratedGroupingSets.get(0) .stream() .map(ImmutableSet::copyOf) .forEach(groupingSetsCrossProduct::add); for (int i = 1; i < enumeratedGroupingSets.size(); i++) { List<Set<Expression>> groupingSets = enumeratedGroupingSets.get(i); List<Set<Expression>> oldGroupingSetsCrossProduct = ImmutableList.copyOf(groupingSetsCrossProduct); groupingSetsCrossProduct.clear(); for (Set<Expression> existingSet : oldGroupingSetsCrossProduct) { for (Set<Expression> groupingSet : groupingSets) { Set<Expression> concatenatedSet = ImmutableSet.<Expression>builder() .addAll(existingSet) .addAll(groupingSet) .build(); groupingSetsCrossProduct.add(concatenatedSet); } } } if (isDistinct) { return ImmutableList.copyOf(ImmutableSet.copyOf(groupingSetsCrossProduct)); } return groupingSetsCrossProduct; } private List<Expression> analyzeGroupingColumns(Set<Expression> groupingColumns, QuerySpecification node, Scope scope, List<Expression> outputExpressions) { ImmutableList.Builder<Expression> groupingColumnsBuilder = ImmutableList.builder(); for (Expression groupingColumn : groupingColumns) { // first, see if this is an ordinal Expression groupByExpression; if (groupingColumn instanceof LongLiteral) { long ordinal = ((LongLiteral) groupingColumn).getValue(); if (ordinal < 1 || ordinal > outputExpressions.size()) { throw new SemanticException(INVALID_ORDINAL, groupingColumn, "GROUP BY position %s is not in select list", ordinal); } groupByExpression = outputExpressions.get(toIntExact(ordinal - 1)); } else { ExpressionAnalysis expressionAnalysis = analyzeExpression(groupingColumn, scope); analysis.recordSubqueries(node, expressionAnalysis); groupByExpression = groupingColumn; } Analyzer.verifyNoAggregatesOrWindowFunctions(metadata.getFunctionRegistry(), groupByExpression, "GROUP BY clause"); Type type = analysis.getType(groupByExpression); if (!type.isComparable()) { throw new SemanticException(TYPE_MISMATCH, node, "%s is not comparable, and therefore cannot be used in GROUP BY", type); } groupingColumnsBuilder.add(groupByExpression); } return groupingColumnsBuilder.build(); } private Scope computeOutputScope(QuerySpecification node, Scope scope) { ImmutableList.Builder<Field> outputFields = ImmutableList.builder(); for (SelectItem item : node.getSelect().getSelectItems()) { if (item instanceof AllColumns) { // expand * and T.* Optional<QualifiedName> starPrefix = ((AllColumns) item).getPrefix(); for (Field field : scope.getRelationType().resolveFieldsWithPrefix(starPrefix)) { outputFields.add(Field.newUnqualified(field.getName(), field.getType(), field.getOriginTable(), false)); } } else if (item instanceof SingleColumn) { SingleColumn column = (SingleColumn) item; Expression expression = column.getExpression(); Optional<String> fieldName = column.getAlias(); Optional<QualifiedObjectName> originTable = Optional.empty(); QualifiedName name = null; if (expression instanceof Identifier) { name = QualifiedName.of(((Identifier) expression).getName()); } else if (expression instanceof DereferenceExpression) { name = DereferenceExpression.getQualifiedName((DereferenceExpression) expression); } if (name != null) { List<Field> matchingFields = scope.getRelationType().resolveFields(name); if (!matchingFields.isEmpty()) { originTable = matchingFields.get(0).getOriginTable(); } } if (!fieldName.isPresent()) { if (name != null) { fieldName = Optional.of(getLast(name.getOriginalParts())); } } outputFields.add(Field.newUnqualified(fieldName, analysis.getType(expression), originTable, column.getAlias().isPresent())); // TODO don't use analysis as a side-channel. Use outputExpressions to look up the type } else { throw new IllegalArgumentException("Unsupported SelectItem type: " + item.getClass().getName()); } } return createAndAssignScope(node, scope, outputFields.build()); } private List<Expression> analyzeSelect(QuerySpecification node, Scope scope) { ImmutableList.Builder<Expression> outputExpressionBuilder = ImmutableList.builder(); for (SelectItem item : node.getSelect().getSelectItems()) { if (item instanceof AllColumns) { // expand * and T.* Optional<QualifiedName> starPrefix = ((AllColumns) item).getPrefix(); RelationType relationType = scope.getRelationType(); List<Field> fields = relationType.resolveFieldsWithPrefix(starPrefix); if (fields.isEmpty()) { if (starPrefix.isPresent()) { throw new SemanticException(MISSING_TABLE, item, "Table '%s' not found", starPrefix.get()); } throw new SemanticException(WILDCARD_WITHOUT_FROM, item, "SELECT * not allowed in queries without FROM clause"); } for (Field field : fields) { int fieldIndex = relationType.indexOf(field); FieldReference expression = new FieldReference(fieldIndex); outputExpressionBuilder.add(expression); ExpressionAnalysis expressionAnalysis = analyzeExpression(expression, scope); Type type = expressionAnalysis.getType(expression); if (node.getSelect().isDistinct() && !type.isComparable()) { throw new SemanticException(TYPE_MISMATCH, node.getSelect(), "DISTINCT can only be applied to comparable types (actual: %s)", type); } } } else if (item instanceof SingleColumn) { SingleColumn column = (SingleColumn) item; ExpressionAnalysis expressionAnalysis = analyzeExpression(column.getExpression(), scope); analysis.recordSubqueries(node, expressionAnalysis); outputExpressionBuilder.add(column.getExpression()); Type type = expressionAnalysis.getType(column.getExpression()); if (node.getSelect().isDistinct() && !type.isComparable()) { throw new SemanticException(TYPE_MISMATCH, node.getSelect(), "DISTINCT can only be applied to comparable types (actual: %s): %s", type, column.getExpression()); } } else { throw new IllegalArgumentException("Unsupported SelectItem type: " + item.getClass().getName()); } } ImmutableList<Expression> result = outputExpressionBuilder.build(); analysis.setOutputExpressions(node, result); return result; } public void analyzeWhere(Node node, Scope scope, Expression predicate) { Analyzer.verifyNoAggregatesOrWindowFunctions(metadata.getFunctionRegistry(), predicate, "WHERE clause"); ExpressionAnalysis expressionAnalysis = analyzeExpression(predicate, scope); analysis.recordSubqueries(node, expressionAnalysis); Type predicateType = expressionAnalysis.getType(predicate); if (!predicateType.equals(BOOLEAN)) { if (!predicateType.equals(UNKNOWN)) { throw new SemanticException(TYPE_MISMATCH, predicate, "WHERE clause must evaluate to a boolean: actual type %s", predicateType); } // coerce null to boolean analysis.addCoercion(predicate, BOOLEAN, false); } analysis.setWhere(node, predicate); } private Scope analyzeFrom(QuerySpecification node, Scope scope) { if (node.getFrom().isPresent()) { return process(node.getFrom().get(), scope); } return scope; } private void analyzeAggregations( QuerySpecification node, Scope scope, List<List<Expression>> groupingSets, Set<Expression> columnReferences, List<Expression> expressions) { AggregateExtractor extractor = new AggregateExtractor(metadata.getFunctionRegistry()); for (Expression expression : expressions) { extractor.process(expression); } analysis.setAggregates(node, extractor.getAggregates()); // is this an aggregation query? if (!groupingSets.isEmpty()) { // ensure SELECT, ORDER BY and HAVING are constant with respect to group // e.g, these are all valid expressions: // SELECT f(a) GROUP BY a // SELECT f(a + 1) GROUP BY a + 1 // SELECT a + sum(b) GROUP BY a ImmutableList<Expression> distinctGroupingColumns = groupingSets.stream() .flatMap(Collection::stream) .distinct() .collect(toImmutableList()); for (Expression expression : expressions) { verifyAggregations(distinctGroupingColumns, scope, expression, columnReferences); } } } private boolean hasAggregates(QuerySpecification node) { AggregateExtractor extractor = new AggregateExtractor(metadata.getFunctionRegistry()); node.getSelect() .getSelectItems().stream() .filter(SingleColumn.class::isInstance) .forEach(extractor::process); node.getOrderBy().map(OrderBy::getSortItems).ifPresent( sortItems -> sortItems .forEach(extractor::process)); node.getHaving() .ifPresent(extractor::process); return !extractor.getAggregates().isEmpty(); } private void verifyAggregations( List<Expression> groupByExpressions, Scope scope, Expression expression, Set<Expression> columnReferences) { AggregationAnalyzer analyzer = new AggregationAnalyzer(groupByExpressions, metadata, scope, columnReferences, analysis.getParameters(), analysis.isDescribe()); analyzer.analyze(expression); } private RelationType analyzeView(Query query, QualifiedObjectName name, Optional<String> catalog, Optional<String> schema, Optional<String> owner, Table node) { try { // run view as view owner if set; otherwise, run as session user Identity identity; AccessControl viewAccessControl; if (owner.isPresent()) { identity = new Identity(owner.get(), Optional.empty()); viewAccessControl = new ViewAccessControl(accessControl); } else { identity = session.getIdentity(); viewAccessControl = accessControl; } Session viewSession = Session.builder(metadata.getSessionPropertyManager()) .setQueryId(session.getQueryId()) .setTransactionId(session.getTransactionId().orElse(null)) .setIdentity(identity) .setSource(session.getSource().orElse(null)) .setCatalog(catalog.orElse(null)) .setSchema(schema.orElse(null)) .setTimeZoneKey(session.getTimeZoneKey()) .setLocale(session.getLocale()) .setRemoteUserAddress(session.getRemoteUserAddress().orElse(null)) .setUserAgent(session.getUserAgent().orElse(null)) .setClientInfo(session.getClientInfo().orElse(null)) .setStartTime(session.getStartTime()) .setSystemProperty(LEGACY_ORDER_BY, session.getSystemProperty(LEGACY_ORDER_BY, Boolean.class).toString()) .build(); StatementAnalyzer analyzer = new StatementAnalyzer(analysis, metadata, sqlParser, viewAccessControl, viewSession); Scope queryScope = analyzer.analyze(query, Scope.create()); return queryScope.getRelationType().withAlias(name.getObjectName(), null); } catch (RuntimeException e) { throw new SemanticException(VIEW_ANALYSIS_ERROR, node, "Failed analyzing stored view '%s': %s", name, e.getMessage()); } } private Query parseView(String view, QualifiedObjectName name, Node node) { try { return (Query) sqlParser.createStatement(view); } catch (ParsingException e) { throw new SemanticException(VIEW_PARSE_ERROR, node, "Failed parsing stored view '%s': %s", name, e.getMessage()); } } private boolean isViewStale(List<ViewDefinition.ViewColumn> columns, Collection<Field> fields) { if (columns.size() != fields.size()) { return true; } List<Field> fieldList = ImmutableList.copyOf(fields); for (int i = 0; i < columns.size(); i++) { ViewDefinition.ViewColumn column = columns.get(i); Field field = fieldList.get(i); if (!column.getName().equalsIgnoreCase(field.getName().orElse(null)) || !metadata.getTypeManager().canCoerce(field.getType(), column.getType())) { return true; } } return false; } private ExpressionAnalysis analyzeExpression(Expression expression, Scope scope) { return ExpressionAnalyzer.analyzeExpression( session, metadata, accessControl, sqlParser, scope, analysis, expression); } private List<Expression> descriptorToFields(Scope scope) { ImmutableList.Builder<Expression> builder = ImmutableList.builder(); for (int fieldIndex = 0; fieldIndex < scope.getRelationType().getAllFieldCount(); fieldIndex++) { FieldReference expression = new FieldReference(fieldIndex); builder.add(expression); analyzeExpression(expression, scope); } return builder.build(); } private Scope analyzeWith(Query node, Scope scope) { // analyze WITH clause if (!node.getWith().isPresent()) { return scope; } With with = node.getWith().get(); if (with.isRecursive()) { throw new SemanticException(NOT_SUPPORTED, with, "Recursive WITH queries are not supported"); } Scope.Builder withScopeBuilder = Scope.builder() .withParent(scope); for (WithQuery withQuery : with.getQueries()) { Query query = withQuery.getQuery(); process(query, withScopeBuilder.build()); String name = withQuery.getName(); if (withScopeBuilder.containsNamedQuery(name)) { throw new SemanticException(DUPLICATE_RELATION, withQuery, "WITH query name '%s' specified more than once", name); } // check if all or none of the columns are explicitly alias if (withQuery.getColumnNames().isPresent()) { List<String> columnNames = withQuery.getColumnNames().get(); RelationType queryDescriptor = analysis.getOutputDescriptor(query); if (columnNames.size() != queryDescriptor.getVisibleFieldCount()) { throw new SemanticException(MISMATCHED_COLUMN_ALIASES, withQuery, "WITH column alias list has %s entries but WITH query(%s) has %s columns", columnNames.size(), name, queryDescriptor.getVisibleFieldCount()); } } withScopeBuilder.withNamedQuery(name, withQuery); } Scope withScope = withScopeBuilder.build(); analysis.setScope(with, withScope); return withScope; } private void analyzeOrderBy(Query node, Scope scope) { List<SortItem> items = node.getOrderBy() .map(OrderBy::getSortItems) .orElse(emptyList()); ImmutableList.Builder<Expression> orderByFieldsBuilder = ImmutableList.builder(); for (SortItem item : items) { Expression expression = item.getSortKey(); if (expression instanceof LongLiteral) { // this is an ordinal in the output tuple long ordinal = ((LongLiteral) expression).getValue(); if (ordinal < 1 || ordinal > scope.getRelationType().getVisibleFieldCount()) { throw new SemanticException(INVALID_ORDINAL, expression, "ORDER BY position %s is not in select list", ordinal); } expression = new FieldReference(toIntExact(ordinal - 1)); } ExpressionAnalysis expressionAnalysis = ExpressionAnalyzer.analyzeExpression(session, metadata, accessControl, sqlParser, scope, analysis, expression); analysis.recordSubqueries(node, expressionAnalysis); orderByFieldsBuilder.add(expression); } analysis.setOrderByExpressions(node, orderByFieldsBuilder.build()); } private Scope createAndAssignScope(Node node, Scope parent, Field... fields) { return createAndAssignScope(node, parent, new RelationType(fields)); } private Scope createAndAssignScope(Node node, Scope parent, List<Field> fields) { return createAndAssignScope(node, parent, new RelationType(fields)); } private Scope createAndAssignScope(Node node, Scope parent, RelationType relationType) { Scope scope = Scope.builder() .withParent(parent) .withRelationType(relationType) .build(); analysis.setScope(node, scope); return scope; } } }
presto-main/src/main/java/com/facebook/presto/sql/analyzer/StatementAnalyzer.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.sql.analyzer; import com.facebook.presto.Session; import com.facebook.presto.SystemSessionProperties; import com.facebook.presto.metadata.FunctionKind; import com.facebook.presto.metadata.Metadata; import com.facebook.presto.metadata.QualifiedObjectName; import com.facebook.presto.metadata.TableHandle; import com.facebook.presto.metadata.TableMetadata; import com.facebook.presto.metadata.ViewDefinition; import com.facebook.presto.security.AccessControl; import com.facebook.presto.security.AllowAllAccessControl; import com.facebook.presto.security.ViewAccessControl; import com.facebook.presto.spi.CatalogSchemaName; import com.facebook.presto.spi.ColumnHandle; import com.facebook.presto.spi.ColumnMetadata; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.security.Identity; import com.facebook.presto.spi.type.Type; import com.facebook.presto.spi.type.TypeSignature; import com.facebook.presto.sql.ExpressionUtils; import com.facebook.presto.sql.parser.ParsingException; import com.facebook.presto.sql.parser.SqlParser; import com.facebook.presto.sql.planner.DependencyExtractor; import com.facebook.presto.sql.planner.ExpressionInterpreter; import com.facebook.presto.sql.planner.NoOpSymbolResolver; import com.facebook.presto.sql.planner.optimizations.CanonicalizeExpressions; import com.facebook.presto.sql.tree.AddColumn; import com.facebook.presto.sql.tree.AliasedRelation; import com.facebook.presto.sql.tree.AllColumns; import com.facebook.presto.sql.tree.Call; import com.facebook.presto.sql.tree.Commit; import com.facebook.presto.sql.tree.ComparisonExpression; import com.facebook.presto.sql.tree.CreateSchema; import com.facebook.presto.sql.tree.CreateTable; import com.facebook.presto.sql.tree.CreateTableAsSelect; import com.facebook.presto.sql.tree.CreateView; import com.facebook.presto.sql.tree.Deallocate; import com.facebook.presto.sql.tree.DefaultTraversalVisitor; import com.facebook.presto.sql.tree.Delete; import com.facebook.presto.sql.tree.DereferenceExpression; import com.facebook.presto.sql.tree.DropSchema; import com.facebook.presto.sql.tree.DropTable; import com.facebook.presto.sql.tree.DropView; import com.facebook.presto.sql.tree.Except; import com.facebook.presto.sql.tree.Execute; import com.facebook.presto.sql.tree.Explain; import com.facebook.presto.sql.tree.ExplainType; import com.facebook.presto.sql.tree.Expression; import com.facebook.presto.sql.tree.ExpressionRewriter; import com.facebook.presto.sql.tree.ExpressionTreeRewriter; import com.facebook.presto.sql.tree.FieldReference; import com.facebook.presto.sql.tree.FrameBound; import com.facebook.presto.sql.tree.FunctionCall; import com.facebook.presto.sql.tree.Grant; import com.facebook.presto.sql.tree.GroupingElement; import com.facebook.presto.sql.tree.Identifier; import com.facebook.presto.sql.tree.Insert; import com.facebook.presto.sql.tree.Intersect; import com.facebook.presto.sql.tree.Join; import com.facebook.presto.sql.tree.JoinCriteria; import com.facebook.presto.sql.tree.JoinOn; import com.facebook.presto.sql.tree.JoinUsing; import com.facebook.presto.sql.tree.LongLiteral; import com.facebook.presto.sql.tree.NaturalJoin; import com.facebook.presto.sql.tree.Node; import com.facebook.presto.sql.tree.OrderBy; import com.facebook.presto.sql.tree.Prepare; import com.facebook.presto.sql.tree.QualifiedName; import com.facebook.presto.sql.tree.Query; import com.facebook.presto.sql.tree.QuerySpecification; import com.facebook.presto.sql.tree.Relation; import com.facebook.presto.sql.tree.RenameColumn; import com.facebook.presto.sql.tree.RenameSchema; import com.facebook.presto.sql.tree.RenameTable; import com.facebook.presto.sql.tree.ResetSession; import com.facebook.presto.sql.tree.Revoke; import com.facebook.presto.sql.tree.Rollback; import com.facebook.presto.sql.tree.Row; import com.facebook.presto.sql.tree.SampledRelation; import com.facebook.presto.sql.tree.SelectItem; import com.facebook.presto.sql.tree.SetOperation; import com.facebook.presto.sql.tree.SetSession; import com.facebook.presto.sql.tree.SingleColumn; import com.facebook.presto.sql.tree.SortItem; import com.facebook.presto.sql.tree.StartTransaction; import com.facebook.presto.sql.tree.Statement; import com.facebook.presto.sql.tree.Table; import com.facebook.presto.sql.tree.TableSubquery; import com.facebook.presto.sql.tree.Unnest; import com.facebook.presto.sql.tree.Use; import com.facebook.presto.sql.tree.Values; import com.facebook.presto.sql.tree.Window; import com.facebook.presto.sql.tree.WindowFrame; import com.facebook.presto.sql.tree.With; import com.facebook.presto.sql.tree.WithQuery; import com.facebook.presto.type.ArrayType; import com.facebook.presto.type.MapType; import com.facebook.presto.type.RowType; import com.facebook.presto.util.maps.IdentityLinkedHashMap; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Multimap; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import static com.facebook.presto.SystemSessionProperties.LEGACY_ORDER_BY; import static com.facebook.presto.metadata.FunctionKind.AGGREGATE; import static com.facebook.presto.metadata.FunctionKind.WINDOW; import static com.facebook.presto.metadata.MetadataUtil.createQualifiedObjectName; import static com.facebook.presto.spi.StandardErrorCode.INVALID_FUNCTION_ARGUMENT; import static com.facebook.presto.spi.type.BigintType.BIGINT; import static com.facebook.presto.spi.type.BooleanType.BOOLEAN; import static com.facebook.presto.spi.type.VarcharType.VARCHAR; import static com.facebook.presto.sql.analyzer.ExpressionAnalyzer.createConstantAnalyzer; import static com.facebook.presto.sql.analyzer.ExpressionAnalyzer.getExpressionTypes; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.AMBIGUOUS_ATTRIBUTE; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.COLUMN_NAME_NOT_SPECIFIED; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.COLUMN_TYPE_UNKNOWN; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.DUPLICATE_COLUMN_NAME; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.DUPLICATE_RELATION; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.INVALID_ORDINAL; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.INVALID_WINDOW_FRAME; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISMATCHED_COLUMN_ALIASES; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISMATCHED_SET_COLUMN_TYPES; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISSING_CATALOG; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISSING_COLUMN; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISSING_SCHEMA; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISSING_TABLE; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MUST_BE_WINDOW_FUNCTION; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.NESTED_WINDOW; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.NON_NUMERIC_SAMPLE_PERCENTAGE; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.NOT_SUPPORTED; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.ORDER_BY_MUST_BE_IN_SELECT; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.TABLE_ALREADY_EXISTS; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.TYPE_MISMATCH; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.VIEW_ANALYSIS_ERROR; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.VIEW_IS_RECURSIVE; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.VIEW_IS_STALE; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.VIEW_PARSE_ERROR; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.WILDCARD_WITHOUT_FROM; import static com.facebook.presto.sql.analyzer.TypeSignatureProvider.fromTypeSignatures; import static com.facebook.presto.sql.planner.ExpressionInterpreter.expressionOptimizer; import static com.facebook.presto.sql.tree.ComparisonExpressionType.EQUAL; import static com.facebook.presto.sql.tree.ExplainType.Type.DISTRIBUTED; import static com.facebook.presto.sql.tree.FrameBound.Type.CURRENT_ROW; import static com.facebook.presto.sql.tree.FrameBound.Type.FOLLOWING; import static com.facebook.presto.sql.tree.FrameBound.Type.PRECEDING; import static com.facebook.presto.sql.tree.FrameBound.Type.UNBOUNDED_FOLLOWING; import static com.facebook.presto.sql.tree.FrameBound.Type.UNBOUNDED_PRECEDING; import static com.facebook.presto.sql.tree.WindowFrame.Type.RANGE; import static com.facebook.presto.type.UnknownType.UNKNOWN; import static com.facebook.presto.util.ImmutableCollectors.toImmutableList; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static com.google.common.collect.Iterables.getLast; import static com.google.common.collect.Iterables.transform; import static java.lang.Math.toIntExact; import static java.util.Collections.emptyList; import static java.util.Objects.requireNonNull; class StatementAnalyzer { private final Analysis analysis; private final Metadata metadata; private final Session session; private final SqlParser sqlParser; private final AccessControl accessControl; public StatementAnalyzer( Analysis analysis, Metadata metadata, SqlParser sqlParser, AccessControl accessControl, Session session) { this.analysis = requireNonNull(analysis, "analysis is null"); this.metadata = requireNonNull(metadata, "metadata is null"); this.sqlParser = requireNonNull(sqlParser, "sqlParser is null"); this.accessControl = requireNonNull(accessControl, "accessControl is null"); this.session = requireNonNull(session, "session is null"); } public Scope analyze(Node node, Scope scope) { return new Visitor().process(node, scope); } private void analyzeWhere(Node node, Scope scope, Expression predicate) { Visitor visitor = new Visitor(); visitor.analyzeWhere(node, scope, predicate); } private class Visitor extends DefaultTraversalVisitor<Scope, Scope> { @Override protected Scope visitUse(Use node, Scope scope) { throw new SemanticException(NOT_SUPPORTED, node, "USE statement is not supported"); } @Override protected Scope visitInsert(Insert insert, Scope scope) { QualifiedObjectName targetTable = createQualifiedObjectName(session, insert, insert.getTarget()); if (metadata.getView(session, targetTable).isPresent()) { throw new SemanticException(NOT_SUPPORTED, insert, "Inserting into views is not supported"); } // analyze the query that creates the data Scope queryScope = process(insert.getQuery(), scope); analysis.setUpdateType("INSERT"); // verify the insert destination columns match the query Optional<TableHandle> targetTableHandle = metadata.getTableHandle(session, targetTable); if (!targetTableHandle.isPresent()) { throw new SemanticException(MISSING_TABLE, insert, "Table '%s' does not exist", targetTable); } accessControl.checkCanInsertIntoTable(session.getRequiredTransactionId(), session.getIdentity(), targetTable); TableMetadata tableMetadata = metadata.getTableMetadata(session, targetTableHandle.get()); List<String> tableColumns = tableMetadata.getColumns().stream() .filter(column -> !column.isHidden()) .map(ColumnMetadata::getName) .collect(toImmutableList()); List<String> insertColumns; if (insert.getColumns().isPresent()) { insertColumns = insert.getColumns().get().stream() .map(String::toLowerCase) .collect(toImmutableList()); Set<String> columnNames = new HashSet<>(); for (String insertColumn : insertColumns) { if (!tableColumns.contains(insertColumn)) { throw new SemanticException(MISSING_COLUMN, insert, "Insert column name does not exist in target table: %s", insertColumn); } if (!columnNames.add(insertColumn)) { throw new SemanticException(DUPLICATE_COLUMN_NAME, insert, "Insert column name is specified more than once: %s", insertColumn); } } } else { insertColumns = tableColumns; } Map<String, ColumnHandle> columnHandles = metadata.getColumnHandles(session, targetTableHandle.get()); analysis.setInsert(new Analysis.Insert( targetTableHandle.get(), insertColumns.stream().map(columnHandles::get).collect(toImmutableList()))); Iterable<Type> tableTypes = insertColumns.stream() .map(insertColumn -> tableMetadata.getColumn(insertColumn).getType()) .collect(toImmutableList()); Iterable<Type> queryTypes = transform(queryScope.getRelationType().getVisibleFields(), Field::getType); if (!typesMatchForInsert(tableTypes, queryTypes)) { throw new SemanticException(MISMATCHED_SET_COLUMN_TYPES, insert, "Insert query has mismatched column types: " + "Table: [" + Joiner.on(", ").join(tableTypes) + "], " + "Query: [" + Joiner.on(", ").join(queryTypes) + "]"); } return createAndAssignScope(insert, scope, Field.newUnqualified("rows", BIGINT)); } private boolean typesMatchForInsert(Iterable<Type> tableTypes, Iterable<Type> queryTypes) { if (Iterables.size(tableTypes) != Iterables.size(queryTypes)) { return false; } Iterator<Type> tableTypesIterator = tableTypes.iterator(); Iterator<Type> queryTypesIterator = queryTypes.iterator(); while (tableTypesIterator.hasNext()) { Type tableType = tableTypesIterator.next(); Type queryType = queryTypesIterator.next(); if (!metadata.getTypeManager().canCoerce(queryType, tableType)) { return false; } } return true; } @Override protected Scope visitDelete(Delete node, Scope scope) { Table table = node.getTable(); QualifiedObjectName tableName = createQualifiedObjectName(session, table, table.getName()); if (metadata.getView(session, tableName).isPresent()) { throw new SemanticException(NOT_SUPPORTED, node, "Deleting from views is not supported"); } // Analyzer checks for select permissions but DELETE has a separate permission, so disable access checks // TODO: we shouldn't need to create a new analyzer. The access control should be carried in the context object StatementAnalyzer analyzer = new StatementAnalyzer( analysis, metadata, sqlParser, new AllowAllAccessControl(), session); Scope tableScope = analyzer.analyze(table, scope); node.getWhere().ifPresent(where -> analyzer.analyzeWhere(node, tableScope, where)); analysis.setUpdateType("DELETE"); accessControl.checkCanDeleteFromTable(session.getRequiredTransactionId(), session.getIdentity(), tableName); return createAndAssignScope(node, scope, Field.newUnqualified("rows", BIGINT)); } @Override protected Scope visitCreateTableAsSelect(CreateTableAsSelect node, Scope scope) { analysis.setUpdateType("CREATE TABLE"); // turn this into a query that has a new table writer node on top. QualifiedObjectName targetTable = createQualifiedObjectName(session, node, node.getName()); analysis.setCreateTableDestination(targetTable); Optional<TableHandle> targetTableHandle = metadata.getTableHandle(session, targetTable); if (targetTableHandle.isPresent()) { if (node.isNotExists()) { analysis.setCreateTableAsSelectNoOp(true); return createAndAssignScope(node, scope, Field.newUnqualified("rows", BIGINT)); } throw new SemanticException(TABLE_ALREADY_EXISTS, node, "Destination table '%s' already exists", targetTable); } for (Expression expression : node.getProperties().values()) { // analyze table property value expressions which must be constant createConstantAnalyzer(metadata, session, analysis.getParameters(), analysis.isDescribe()) .analyze(expression, scope); } analysis.setCreateTableProperties(node.getProperties()); accessControl.checkCanCreateTable(session.getRequiredTransactionId(), session.getIdentity(), targetTable); analysis.setCreateTableAsSelectWithData(node.isWithData()); // analyze the query that creates the table Scope queryScope = process(node.getQuery(), scope); validateColumns(node, queryScope.getRelationType()); return createAndAssignScope(node, scope, Field.newUnqualified("rows", BIGINT)); } @Override protected Scope visitCreateView(CreateView node, Scope scope) { analysis.setUpdateType("CREATE VIEW"); QualifiedObjectName viewName = createQualifiedObjectName(session, node, node.getName()); // analyze the query that creates the view StatementAnalyzer analyzer = new StatementAnalyzer( analysis, metadata, sqlParser, new ViewAccessControl(accessControl), session); Scope queryScope = analyzer.analyze(node.getQuery(), scope); accessControl.checkCanCreateView(session.getRequiredTransactionId(), session.getIdentity(), viewName); validateColumns(node, queryScope.getRelationType()); return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitSetSession(SetSession node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitResetSession(ResetSession node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitAddColumn(AddColumn node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitCreateSchema(CreateSchema node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitDropSchema(DropSchema node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitRenameSchema(RenameSchema node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitCreateTable(CreateTable node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitDropTable(DropTable node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitRenameTable(RenameTable node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitRenameColumn(RenameColumn node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitDropView(DropView node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitStartTransaction(StartTransaction node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitCommit(Commit node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitRollback(Rollback node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitPrepare(Prepare node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitDeallocate(Deallocate node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitExecute(Execute node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitGrant(Grant node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitRevoke(Revoke node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } @Override protected Scope visitCall(Call node, Scope scope) { return createAndAssignScope(node, scope, emptyList()); } private void validateColumns(Statement node, RelationType descriptor) { // verify that all column names are specified and unique // TODO: collect errors and return them all at once Set<String> names = new HashSet<>(); for (Field field : descriptor.getVisibleFields()) { Optional<String> fieldName = field.getName(); if (!fieldName.isPresent()) { throw new SemanticException(COLUMN_NAME_NOT_SPECIFIED, node, "Column name not specified at position %s", descriptor.indexOf(field) + 1); } if (!names.add(fieldName.get())) { throw new SemanticException(DUPLICATE_COLUMN_NAME, node, "Column name '%s' specified more than once", fieldName.get()); } if (field.getType().equals(UNKNOWN)) { throw new SemanticException(COLUMN_TYPE_UNKNOWN, node, "Column type is unknown: %s", fieldName.get()); } } } @Override protected Scope visitExplain(Explain node, Scope scope) throws SemanticException { checkState(node.isAnalyze(), "Non analyze explain should be rewritten to Query"); if (node.getOptions().stream().anyMatch(option -> !option.equals(new ExplainType(DISTRIBUTED)))) { throw new SemanticException(NOT_SUPPORTED, node, "EXPLAIN ANALYZE only supports TYPE DISTRIBUTED option"); } process(node.getStatement(), scope); analysis.setUpdateType(null); return createAndAssignScope(node, scope, Field.newUnqualified("Query Plan", VARCHAR)); } @Override protected Scope visitQuery(Query node, Scope scope) { Scope withScope = analyzeWith(node, scope); Scope queryScope = Scope.builder() .withParent(withScope) .build(); Scope queryBodyScope = process(node.getQueryBody(), queryScope); analyzeOrderBy(node, queryBodyScope); // Input fields == Output fields analysis.setOutputExpressions(node, descriptorToFields(queryBodyScope)); queryScope = Scope.builder() .withParent(withScope) .withRelationType(queryBodyScope.getRelationType()) .build(); analysis.setScope(node, queryScope); return queryScope; } @Override protected Scope visitUnnest(Unnest node, Scope scope) { ImmutableList.Builder<Field> outputFields = ImmutableList.builder(); for (Expression expression : node.getExpressions()) { ExpressionAnalysis expressionAnalysis = analyzeExpression(expression, scope); Type expressionType = expressionAnalysis.getType(expression); if (expressionType instanceof ArrayType) { outputFields.add(Field.newUnqualified(Optional.empty(), ((ArrayType) expressionType).getElementType())); } else if (expressionType instanceof MapType) { outputFields.add(Field.newUnqualified(Optional.empty(), ((MapType) expressionType).getKeyType())); outputFields.add(Field.newUnqualified(Optional.empty(), ((MapType) expressionType).getValueType())); } else { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, "Cannot unnest type: " + expressionType); } } if (node.isWithOrdinality()) { outputFields.add(Field.newUnqualified(Optional.empty(), BIGINT)); } return createAndAssignScope(node, scope, outputFields.build()); } @Override protected Scope visitTable(Table table, Scope scope) { if (!table.getName().getPrefix().isPresent()) { // is this a reference to a WITH query? String name = table.getName().getSuffix(); Optional<WithQuery> withQuery = scope.getNamedQuery(name); if (withQuery.isPresent()) { Query query = withQuery.get().getQuery(); analysis.registerNamedQuery(table, query); // re-alias the fields with the name assigned to the query in the WITH declaration RelationType queryDescriptor = analysis.getOutputDescriptor(query); List<Field> fields; Optional<List<String>> columnNames = withQuery.get().getColumnNames(); if (columnNames.isPresent()) { // if columns are explicitly aliased -> WITH cte(alias1, alias2 ...) ImmutableList.Builder<Field> fieldBuilder = ImmutableList.builder(); int field = 0; for (String columnName : columnNames.get()) { Field inputField = queryDescriptor.getFieldByIndex(field); fieldBuilder.add(Field.newQualified( QualifiedName.of(name), Optional.of(columnName), inputField.getType(), false, inputField.getOriginTable(), inputField.isAliased())); field++; } fields = fieldBuilder.build(); } else { fields = queryDescriptor.getAllFields().stream() .map(field -> Field.newQualified( QualifiedName.of(name), field.getName(), field.getType(), field.isHidden(), field.getOriginTable(), field.isAliased())) .collect(toImmutableList()); } return createAndAssignScope(table, scope, fields); } } QualifiedObjectName name = createQualifiedObjectName(session, table, table.getName()); Optional<ViewDefinition> optionalView = metadata.getView(session, name); if (optionalView.isPresent()) { Statement statement = analysis.getStatement(); if (statement instanceof CreateView) { CreateView viewStatement = (CreateView) statement; QualifiedObjectName viewNameFromStatement = createQualifiedObjectName(session, viewStatement, viewStatement.getName()); if (viewStatement.isReplace() && viewNameFromStatement.equals(name)) { throw new SemanticException(VIEW_IS_RECURSIVE, table, "Statement would create a recursive view"); } } if (analysis.hasTableInView(table)) { throw new SemanticException(VIEW_IS_RECURSIVE, table, "View is recursive"); } ViewDefinition view = optionalView.get(); Query query = parseView(view.getOriginalSql(), name, table); analysis.registerNamedQuery(table, query); accessControl.checkCanSelectFromView(session.getRequiredTransactionId(), session.getIdentity(), name); analysis.registerTableForView(table); RelationType descriptor = analyzeView(query, name, view.getCatalog(), view.getSchema(), view.getOwner(), table); analysis.unregisterTableForView(); if (isViewStale(view.getColumns(), descriptor.getVisibleFields())) { throw new SemanticException(VIEW_IS_STALE, table, "View '%s' is stale; it must be re-created", name); } // Derive the type of the view from the stored definition, not from the analysis of the underlying query. // This is needed in case the underlying table(s) changed and the query in the view now produces types that // are implicitly coercible to the declared view types. List<Field> outputFields = view.getColumns().stream() .map(column -> Field.newQualified( QualifiedName.of(name.getObjectName()), Optional.of(column.getName()), column.getType(), false, Optional.of(name), false)) .collect(toImmutableList()); analysis.addRelationCoercion(table, outputFields.stream().map(Field::getType).toArray(Type[]::new)); return createAndAssignScope(table, scope, outputFields); } Optional<TableHandle> tableHandle = metadata.getTableHandle(session, name); if (!tableHandle.isPresent()) { if (!metadata.getCatalogHandle(session, name.getCatalogName()).isPresent()) { throw new SemanticException(MISSING_CATALOG, table, "Catalog %s does not exist", name.getCatalogName()); } if (!metadata.schemaExists(session, new CatalogSchemaName(name.getCatalogName(), name.getSchemaName()))) { throw new SemanticException(MISSING_SCHEMA, table, "Schema %s does not exist", name.getSchemaName()); } throw new SemanticException(MISSING_TABLE, table, "Table %s does not exist", name); } accessControl.checkCanSelectFromTable(session.getRequiredTransactionId(), session.getIdentity(), name); TableMetadata tableMetadata = metadata.getTableMetadata(session, tableHandle.get()); Map<String, ColumnHandle> columnHandles = metadata.getColumnHandles(session, tableHandle.get()); // TODO: discover columns lazily based on where they are needed (to support connectors that can't enumerate all tables) ImmutableList.Builder<Field> fields = ImmutableList.builder(); for (ColumnMetadata column : tableMetadata.getColumns()) { Field field = Field.newQualified( table.getName(), Optional.of(column.getName()), column.getType(), column.isHidden(), Optional.of(name), false); fields.add(field); ColumnHandle columnHandle = columnHandles.get(column.getName()); checkArgument(columnHandle != null, "Unknown field %s", field); analysis.setColumn(field, columnHandle); } analysis.registerTable(table, tableHandle.get()); return createAndAssignScope(table, scope, fields.build()); } @Override protected Scope visitAliasedRelation(AliasedRelation relation, Scope scope) { Scope relationScope = process(relation.getRelation(), scope); // todo this check should be inside of TupleDescriptor.withAlias, but the exception needs the node object RelationType relationType = relationScope.getRelationType(); if (relation.getColumnNames() != null) { int totalColumns = relationType.getVisibleFieldCount(); if (totalColumns != relation.getColumnNames().size()) { throw new SemanticException(MISMATCHED_COLUMN_ALIASES, relation, "Column alias list has %s entries but '%s' has %s columns available", relation.getColumnNames().size(), relation.getAlias(), totalColumns); } } RelationType descriptor = relationType.withAlias(relation.getAlias(), relation.getColumnNames()); return createAndAssignScope(relation, scope, descriptor); } @Override protected Scope visitSampledRelation(SampledRelation relation, Scope scope) { if (!DependencyExtractor.extractNames(relation.getSamplePercentage(), analysis.getColumnReferences()).isEmpty()) { throw new SemanticException(NON_NUMERIC_SAMPLE_PERCENTAGE, relation.getSamplePercentage(), "Sample percentage cannot contain column references"); } IdentityLinkedHashMap<Expression, Type> expressionTypes = getExpressionTypes( session, metadata, sqlParser, ImmutableMap.of(), relation.getSamplePercentage(), analysis.getParameters(), analysis.isDescribe()); ExpressionInterpreter samplePercentageEval = expressionOptimizer(relation.getSamplePercentage(), metadata, session, expressionTypes); Object samplePercentageObject = samplePercentageEval.optimize(symbol -> { throw new SemanticException(NON_NUMERIC_SAMPLE_PERCENTAGE, relation.getSamplePercentage(), "Sample percentage cannot contain column references"); }); if (!(samplePercentageObject instanceof Number)) { throw new SemanticException(NON_NUMERIC_SAMPLE_PERCENTAGE, relation.getSamplePercentage(), "Sample percentage should evaluate to a numeric expression"); } double samplePercentageValue = ((Number) samplePercentageObject).doubleValue(); if (samplePercentageValue < 0.0) { throw new SemanticException(SemanticErrorCode.SAMPLE_PERCENTAGE_OUT_OF_RANGE, relation.getSamplePercentage(), "Sample percentage must be greater than or equal to 0"); } if ((samplePercentageValue > 100.0)) { throw new SemanticException(SemanticErrorCode.SAMPLE_PERCENTAGE_OUT_OF_RANGE, relation.getSamplePercentage(), "Sample percentage must be less than or equal to 100"); } analysis.setSampleRatio(relation, samplePercentageValue / 100); Scope relationScope = process(relation.getRelation(), scope); return createAndAssignScope(relation, scope, relationScope.getRelationType()); } @Override protected Scope visitTableSubquery(TableSubquery node, Scope scope) { StatementAnalyzer analyzer = new StatementAnalyzer(analysis, metadata, sqlParser, accessControl, session); Scope queryScope = analyzer.analyze(node.getQuery(), scope); return createAndAssignScope(node, scope, queryScope.getRelationType()); } @Override protected Scope visitQuerySpecification(QuerySpecification node, Scope scope) { // TODO: extract candidate names from SELECT, WHERE, HAVING, GROUP BY and ORDER BY expressions // to pass down to analyzeFrom Scope sourceScope = analyzeFrom(node, scope); node.getWhere().ifPresent(where -> analyzeWhere(node, sourceScope, where)); List<Expression> outputExpressions = analyzeSelect(node, sourceScope); List<List<Expression>> groupByExpressions = analyzeGroupBy(node, sourceScope, outputExpressions); Scope outputScope = computeOutputScope(node, sourceScope); List<Expression> orderByExpressions = analyzeOrderBy(node, sourceScope, outputScope, outputExpressions); analyzeHaving(node, sourceScope); List<Expression> expressions = new ArrayList<>(); expressions.addAll(outputExpressions); expressions.addAll(orderByExpressions); node.getHaving().ifPresent(expressions::add); analyzeAggregations(node, sourceScope, groupByExpressions, analysis.getColumnReferences(), expressions); analyzeWindowFunctions(node, outputExpressions, orderByExpressions); return outputScope; } @Override protected Scope visitSetOperation(SetOperation node, Scope scope) { checkState(node.getRelations().size() >= 2); List<Scope> relationScopes = node.getRelations().stream() .map(relation -> { Scope relationScope = process(relation, scope); return createAndAssignScope(relation, scope, relationScope.getRelationType().withOnlyVisibleFields()); }) .collect(toImmutableList()); Type[] outputFieldTypes = relationScopes.get(0).getRelationType().getVisibleFields().stream() .map(Field::getType) .toArray(Type[]::new); for (Scope relationScope : relationScopes) { int outputFieldSize = outputFieldTypes.length; RelationType relationType = relationScope.getRelationType(); int descFieldSize = relationType.getVisibleFields().size(); String setOperationName = node.getClass().getSimpleName(); if (outputFieldSize != descFieldSize) { throw new SemanticException(MISMATCHED_SET_COLUMN_TYPES, node, "%s query has different number of fields: %d, %d", setOperationName, outputFieldSize, descFieldSize); } for (int i = 0; i < descFieldSize; i++) { Type descFieldType = relationType.getFieldByIndex(i).getType(); Optional<Type> commonSuperType = metadata.getTypeManager().getCommonSuperType(outputFieldTypes[i], descFieldType); if (!commonSuperType.isPresent()) { throw new SemanticException(TYPE_MISMATCH, node, "column %d in %s query has incompatible types: %s, %s", i, outputFieldTypes[i].getDisplayName(), setOperationName, descFieldType.getDisplayName()); } outputFieldTypes[i] = commonSuperType.get(); } } Field[] outputDescriptorFields = new Field[outputFieldTypes.length]; RelationType firstDescriptor = relationScopes.get(0).getRelationType().withOnlyVisibleFields(); for (int i = 0; i < outputFieldTypes.length; i++) { Field oldField = firstDescriptor.getFieldByIndex(i); outputDescriptorFields[i] = new Field( oldField.getRelationAlias(), oldField.getName(), outputFieldTypes[i], oldField.isHidden(), oldField.getOriginTable(), oldField.isAliased()); } for (int i = 0; i < node.getRelations().size(); i++) { Relation relation = node.getRelations().get(i); Scope relationScope = relationScopes.get(i); RelationType relationType = relationScope.getRelationType(); for (int j = 0; j < relationType.getVisibleFields().size(); j++) { Type outputFieldType = outputFieldTypes[j]; Type descFieldType = relationType.getFieldByIndex(j).getType(); if (!outputFieldType.equals(descFieldType)) { analysis.addRelationCoercion(relation, outputFieldTypes); break; } } } return createAndAssignScope(node, scope, outputDescriptorFields); } @Override protected Scope visitIntersect(Intersect node, Scope scope) { if (!node.isDistinct()) { throw new SemanticException(NOT_SUPPORTED, node, "INTERSECT ALL not yet implemented"); } return visitSetOperation(node, scope); } @Override protected Scope visitExcept(Except node, Scope scope) { if (!node.isDistinct()) { throw new SemanticException(NOT_SUPPORTED, node, "EXCEPT ALL not yet implemented"); } return visitSetOperation(node, scope); } @Override protected Scope visitJoin(Join node, Scope scope) { JoinCriteria criteria = node.getCriteria().orElse(null); if (criteria instanceof NaturalJoin) { throw new SemanticException(NOT_SUPPORTED, node, "Natural join not supported"); } Scope left = process(node.getLeft(), scope); Scope right = process(node.getRight(), isUnnestRelation(node.getRight()) ? left : scope); Scope output = createAndAssignScope(node, scope, left.getRelationType().joinWith(right.getRelationType())); if (node.getType() == Join.Type.CROSS || node.getType() == Join.Type.IMPLICIT) { return output; } if (criteria instanceof JoinUsing) { // TODO: implement proper "using" semantics with respect to output columns List<String> columns = ((JoinUsing) criteria).getColumns(); List<Expression> expressions = new ArrayList<>(); for (String column : columns) { Expression leftExpression = new Identifier(column); Expression rightExpression = new Identifier(column); ExpressionAnalysis leftExpressionAnalysis = analyzeExpression(leftExpression, left); ExpressionAnalysis rightExpressionAnalysis = analyzeExpression(rightExpression, right); checkState(leftExpressionAnalysis.getSubqueryInPredicates().isEmpty(), "INVARIANT"); checkState(rightExpressionAnalysis.getSubqueryInPredicates().isEmpty(), "INVARIANT"); checkState(leftExpressionAnalysis.getScalarSubqueries().isEmpty(), "INVARIANT"); checkState(rightExpressionAnalysis.getScalarSubqueries().isEmpty(), "INVARIANT"); addCoercionForJoinCriteria(node, leftExpression, rightExpression); expressions.add(new ComparisonExpression(EQUAL, leftExpression, rightExpression)); } analysis.setJoinCriteria(node, ExpressionUtils.and(expressions)); } else if (criteria instanceof JoinOn) { Expression expression = ((JoinOn) criteria).getExpression(); // ensure all names can be resolved, types match, etc (we don't need to record resolved names, subexpression types, etc. because // we do it further down when after we determine which subexpressions apply to left vs right tuple) ExpressionAnalyzer analyzer = ExpressionAnalyzer.create(analysis, session, metadata, sqlParser, accessControl); // need to register coercions in case when join criteria requires coercion (e.g. join on char(1) = char(2)) ExpressionAnalysis expressionAnalysis = analyzeExpression(expression, output); Type clauseType = expressionAnalysis.getType(expression); if (!clauseType.equals(BOOLEAN)) { if (!clauseType.equals(UNKNOWN)) { throw new SemanticException(TYPE_MISMATCH, expression, "JOIN ON clause must evaluate to a boolean: actual type %s", clauseType); } // coerce null to boolean analysis.addCoercion(expression, BOOLEAN, false); } Analyzer.verifyNoAggregatesOrWindowFunctions(metadata.getFunctionRegistry(), expression, "JOIN clause"); // expressionInterpreter/optimizer only understands a subset of expression types // TODO: remove this when the new expression tree is implemented Expression canonicalized = CanonicalizeExpressions.canonicalizeExpression(expression); analyzeExpression(canonicalized, output); Object optimizedExpression = expressionOptimizer(canonicalized, metadata, session, analyzer.getExpressionTypes()).optimize(NoOpSymbolResolver.INSTANCE); if (!(optimizedExpression instanceof Expression) && (optimizedExpression instanceof Boolean || optimizedExpression == null)) { // If the JoinOn clause evaluates to a boolean expression, simulate a cross join by adding the relevant redundant expression // optimizedExpression can be TRUE, FALSE or NULL here if (optimizedExpression != null && optimizedExpression.equals(Boolean.TRUE)) { optimizedExpression = new ComparisonExpression(EQUAL, new LongLiteral("0"), new LongLiteral("0")); } else { optimizedExpression = new ComparisonExpression(EQUAL, new LongLiteral("0"), new LongLiteral("1")); } } if (!(optimizedExpression instanceof Expression)) { throw new SemanticException(TYPE_MISMATCH, node, "Join clause must be a boolean expression"); } // The optimization above may have rewritten the expression tree which breaks all the identity maps, so redo the analysis // to re-analyze coercions that might be necessary analyzer = ExpressionAnalyzer.create(analysis, session, metadata, sqlParser, accessControl); analyzer.analyze((Expression) optimizedExpression, output); analysis.addCoercions(analyzer.getExpressionCoercions(), analyzer.getTypeOnlyCoercions()); Set<Expression> postJoinConjuncts = new HashSet<>(); for (Expression conjunct : ExpressionUtils.extractConjuncts((Expression) optimizedExpression)) { conjunct = ExpressionUtils.normalize(conjunct); if (conjunct instanceof ComparisonExpression && (((ComparisonExpression) conjunct).getType() == EQUAL || node.getType() == Join.Type.INNER)) { Expression conjunctFirst = ((ComparisonExpression) conjunct).getLeft(); Expression conjunctSecond = ((ComparisonExpression) conjunct).getRight(); Set<QualifiedName> firstDependencies = DependencyExtractor.extractNames(conjunctFirst, analyzer.getColumnReferences()); Set<QualifiedName> secondDependencies = DependencyExtractor.extractNames(conjunctSecond, analyzer.getColumnReferences()); Expression leftExpression = null; Expression rightExpression = null; if (firstDependencies.stream().allMatch(left.getRelationType()::canResolve) && secondDependencies.stream().allMatch(right.getRelationType()::canResolve)) { leftExpression = conjunctFirst; rightExpression = conjunctSecond; } else if (firstDependencies.stream().allMatch(right.getRelationType()::canResolve) && secondDependencies.stream().allMatch(left.getRelationType()::canResolve)) { leftExpression = conjunctSecond; rightExpression = conjunctFirst; } // expression on each side of comparison operator references only symbols from one side of join. // analyze the clauses to record the types of all subexpressions and resolve names against the left/right underlying tuples if (rightExpression != null) { ExpressionAnalysis leftExpressionAnalysis = analyzeExpression(leftExpression, left); ExpressionAnalysis rightExpressionAnalysis = analyzeExpression(rightExpression, right); analysis.recordSubqueries(node, leftExpressionAnalysis); analysis.recordSubqueries(node, rightExpressionAnalysis); addCoercionForJoinCriteria(node, leftExpression, rightExpression); } else { // mixed references to both left and right join relation on one side of comparison operator. // expression will be put in post-join condition; analyze in context of output table. postJoinConjuncts.add(conjunct); } } else { // non-comparison expression. // expression will be put in post-join condition; analyze in context of output table. postJoinConjuncts.add(conjunct); } } Expression postJoinPredicate = ExpressionUtils.combineConjuncts(postJoinConjuncts); analysis.recordSubqueries(node, analyzeExpression(postJoinPredicate, output)); analysis.setJoinCriteria(node, (Expression) optimizedExpression); } else { throw new UnsupportedOperationException("unsupported join criteria: " + criteria.getClass().getName()); } return output; } private boolean isUnnestRelation(Relation node) { if (node instanceof AliasedRelation) { return isUnnestRelation(((AliasedRelation) node).getRelation()); } return node instanceof Unnest; } private void addCoercionForJoinCriteria(Join node, Expression leftExpression, Expression rightExpression) { Type leftType = analysis.getTypeWithCoercions(leftExpression); Type rightType = analysis.getTypeWithCoercions(rightExpression); Optional<Type> superType = metadata.getTypeManager().getCommonSuperType(leftType, rightType); if (!superType.isPresent()) { throw new SemanticException(TYPE_MISMATCH, node, "Join criteria has incompatible types: %s, %s", leftType.getDisplayName(), rightType.getDisplayName()); } if (!leftType.equals(superType.get())) { analysis.addCoercion(leftExpression, superType.get(), metadata.getTypeManager().isTypeOnlyCoercion(leftType, rightType)); } if (!rightType.equals(superType.get())) { analysis.addCoercion(rightExpression, superType.get(), metadata.getTypeManager().isTypeOnlyCoercion(rightType, leftType)); } } @Override protected Scope visitValues(Values node, Scope scope) { checkState(node.getRows().size() >= 1); List<List<Type>> rowTypes = node.getRows().stream() .map(row -> analyzeExpression(row, scope).getType(row)) .map(type -> { if (type instanceof RowType) { return type.getTypeParameters(); } return ImmutableList.of(type); }) .collect(toImmutableList()); // determine common super type of the rows List<Type> fieldTypes = new ArrayList<>(rowTypes.iterator().next()); for (List<Type> rowType : rowTypes) { // check field count consistency for rows if (rowType.size() != fieldTypes.size()) { throw new SemanticException(MISMATCHED_SET_COLUMN_TYPES, node, "Values rows have mismatched types: %s vs %s", rowTypes.get(0), rowType); } for (int i = 0; i < rowType.size(); i++) { Type fieldType = rowType.get(i); Type superType = fieldTypes.get(i); Optional<Type> commonSuperType = metadata.getTypeManager().getCommonSuperType(fieldType, superType); if (!commonSuperType.isPresent()) { throw new SemanticException(MISMATCHED_SET_COLUMN_TYPES, node, "Values rows have mismatched types: %s vs %s", rowTypes.get(0), rowType); } fieldTypes.set(i, commonSuperType.get()); } } // add coercions for the rows for (Expression row : node.getRows()) { if (row instanceof Row) { List<Expression> items = ((Row) row).getItems(); for (int i = 0; i < items.size(); i++) { Type expectedType = fieldTypes.get(i); Expression item = items.get(i); Type actualType = analysis.getType(item); if (!actualType.equals(expectedType)) { analysis.addCoercion(item, expectedType, metadata.getTypeManager().isTypeOnlyCoercion(actualType, expectedType)); } } } else { Type actualType = analysis.getType(row); Type expectedType = fieldTypes.get(0); if (!actualType.equals(expectedType)) { analysis.addCoercion(row, expectedType, metadata.getTypeManager().isTypeOnlyCoercion(actualType, expectedType)); } } } List<Field> fields = fieldTypes.stream() .map(valueType -> Field.newUnqualified(Optional.empty(), valueType)) .collect(toImmutableList()); return createAndAssignScope(node, scope, fields); } private void analyzeWindowFunctions(QuerySpecification node, List<Expression> outputExpressions, List<Expression> orderByExpressions) { WindowFunctionExtractor extractor = new WindowFunctionExtractor(); for (Expression expression : Iterables.concat(outputExpressions, orderByExpressions)) { extractor.process(expression, null); new WindowFunctionValidator().process(expression, analysis); } List<FunctionCall> windowFunctions = extractor.getWindowFunctions(); for (FunctionCall windowFunction : windowFunctions) { // filter with window function is not supported yet if (windowFunction.getFilter().isPresent()) { throw new SemanticException(NOT_SUPPORTED, node, "FILTER is not yet supported for window functions"); } Window window = windowFunction.getWindow().get(); WindowFunctionExtractor nestedExtractor = new WindowFunctionExtractor(); for (Expression argument : windowFunction.getArguments()) { nestedExtractor.process(argument, null); } for (Expression expression : window.getPartitionBy()) { nestedExtractor.process(expression, null); } for (SortItem sortItem : window.getOrderBy()) { nestedExtractor.process(sortItem.getSortKey(), null); } if (window.getFrame().isPresent()) { nestedExtractor.process(window.getFrame().get(), null); } if (!nestedExtractor.getWindowFunctions().isEmpty()) { throw new SemanticException(NESTED_WINDOW, node, "Cannot nest window functions inside window function '%s': %s", windowFunction, extractor.getWindowFunctions()); } if (windowFunction.isDistinct()) { throw new SemanticException(NOT_SUPPORTED, node, "DISTINCT in window function parameters not yet supported: %s", windowFunction); } if (window.getFrame().isPresent()) { analyzeWindowFrame(window.getFrame().get()); } List<TypeSignature> argumentTypes = Lists.transform(windowFunction.getArguments(), expression -> analysis.getType(expression).getTypeSignature()); FunctionKind kind = metadata.getFunctionRegistry().resolveFunction(windowFunction.getName(), fromTypeSignatures(argumentTypes)).getKind(); if (kind != AGGREGATE && kind != WINDOW) { throw new SemanticException(MUST_BE_WINDOW_FUNCTION, node, "Not a window function: %s", windowFunction.getName()); } } analysis.setWindowFunctions(node, windowFunctions); } private void analyzeWindowFrame(WindowFrame frame) { FrameBound.Type startType = frame.getStart().getType(); FrameBound.Type endType = frame.getEnd().orElse(new FrameBound(CURRENT_ROW)).getType(); if (startType == UNBOUNDED_FOLLOWING) { throw new SemanticException(INVALID_WINDOW_FRAME, frame, "Window frame start cannot be UNBOUNDED FOLLOWING"); } if (endType == UNBOUNDED_PRECEDING) { throw new SemanticException(INVALID_WINDOW_FRAME, frame, "Window frame end cannot be UNBOUNDED PRECEDING"); } if ((startType == CURRENT_ROW) && (endType == PRECEDING)) { throw new SemanticException(INVALID_WINDOW_FRAME, frame, "Window frame starting from CURRENT ROW cannot end with PRECEDING"); } if ((startType == FOLLOWING) && (endType == PRECEDING)) { throw new SemanticException(INVALID_WINDOW_FRAME, frame, "Window frame starting from FOLLOWING cannot end with PRECEDING"); } if ((startType == FOLLOWING) && (endType == CURRENT_ROW)) { throw new SemanticException(INVALID_WINDOW_FRAME, frame, "Window frame starting from FOLLOWING cannot end with CURRENT ROW"); } if ((frame.getType() == RANGE) && ((startType == PRECEDING) || (endType == PRECEDING))) { throw new SemanticException(INVALID_WINDOW_FRAME, frame, "Window frame RANGE PRECEDING is only supported with UNBOUNDED"); } if ((frame.getType() == RANGE) && ((startType == FOLLOWING) || (endType == FOLLOWING))) { throw new SemanticException(INVALID_WINDOW_FRAME, frame, "Window frame RANGE FOLLOWING is only supported with UNBOUNDED"); } } private void analyzeHaving(QuerySpecification node, Scope scope) { if (node.getHaving().isPresent()) { Expression predicate = node.getHaving().get(); ExpressionAnalysis expressionAnalysis = analyzeExpression(predicate, scope); analysis.recordSubqueries(node, expressionAnalysis); Type predicateType = expressionAnalysis.getType(predicate); if (!predicateType.equals(BOOLEAN) && !predicateType.equals(UNKNOWN)) { throw new SemanticException(TYPE_MISMATCH, predicate, "HAVING clause must evaluate to a boolean: actual type %s", predicateType); } analysis.setHaving(node, predicate); } } private List<Expression> analyzeOrderBy(QuerySpecification node, Scope sourceScope, Scope outputScope, List<Expression> outputExpressions) { if (SystemSessionProperties.isLegacyOrderByEnabled(session)) { return legacyAnalyzeOrderBy(node, sourceScope, outputScope, outputExpressions); } List<SortItem> items = node.getOrderBy() .map(OrderBy::getSortItems) .orElse(emptyList()); ImmutableList.Builder<Expression> orderByExpressionsBuilder = ImmutableList.builder(); if (!items.isEmpty()) { for (SortItem item : items) { Expression expression = item.getSortKey(); Expression orderByExpression; if (expression instanceof LongLiteral) { // this is an ordinal in the output tuple long ordinal = ((LongLiteral) expression).getValue(); if (ordinal < 1 || ordinal > outputExpressions.size()) { throw new SemanticException(INVALID_ORDINAL, expression, "ORDER BY position %s is not in select list", ordinal); } int field = toIntExact(ordinal - 1); Type type = outputScope.getRelationType().getFieldByIndex(field).getType(); if (!type.isOrderable()) { throw new SemanticException(TYPE_MISMATCH, node, "The type of expression in position %s is not orderable (actual: %s), and therefore cannot be used in ORDER BY", ordinal, type); } orderByExpression = outputExpressions.get(field); } else { // Analyze the original expression using a synthetic scope (which delegates to the source scope for any missing name) // to catch any semantic errors (due to type mismatch, etc) Scope synthetic = Scope.builder() .withParent(sourceScope) .withRelationType(outputScope.getRelationType()) .build(); analyzeExpression(expression, synthetic); orderByExpression = ExpressionTreeRewriter.rewriteWith(new OrderByExpressionRewriter(extractNamedOutputExpressions(node)), expression); ExpressionAnalysis expressionAnalysis = analyzeExpression(orderByExpression, sourceScope); analysis.recordSubqueries(node, expressionAnalysis); } Type type = analysis.getType(orderByExpression); if (!type.isOrderable()) { throw new SemanticException(TYPE_MISMATCH, node, "Type %s is not orderable, and therefore cannot be used in ORDER BY: %s", type, expression); } orderByExpressionsBuilder.add(orderByExpression); } } List<Expression> orderByExpressions = orderByExpressionsBuilder.build(); analysis.setOrderByExpressions(node, orderByExpressions); if (node.getSelect().isDistinct() && !outputExpressions.containsAll(orderByExpressions)) { throw new SemanticException(ORDER_BY_MUST_BE_IN_SELECT, node.getSelect(), "For SELECT DISTINCT, ORDER BY expressions must appear in select list"); } return orderByExpressions; } /** * Preserve the old column resolution behavior for ORDER BY while we transition workloads to new semantics * TODO: remove this */ private List<Expression> legacyAnalyzeOrderBy(QuerySpecification node, Scope sourceScope, Scope outputScope, List<Expression> outputExpressions) { List<SortItem> items = node.getOrderBy() .map(OrderBy::getSortItems) .orElse(emptyList()); ImmutableList.Builder<Expression> orderByExpressionsBuilder = ImmutableList.builder(); if (!items.isEmpty()) { // Compute aliased output terms so we can resolve order by expressions against them first ImmutableMultimap.Builder<QualifiedName, Expression> byAliasBuilder = ImmutableMultimap.builder(); for (SelectItem item : node.getSelect().getSelectItems()) { if (item instanceof SingleColumn) { Optional<String> alias = ((SingleColumn) item).getAlias(); if (alias.isPresent()) { byAliasBuilder.put(QualifiedName.of(alias.get()), ((SingleColumn) item).getExpression()); // TODO: need to know if alias was quoted } } } Multimap<QualifiedName, Expression> byAlias = byAliasBuilder.build(); for (SortItem item : items) { Expression expression = item.getSortKey(); Expression orderByExpression = null; if (expression instanceof Identifier) { // if this is a simple name reference, try to resolve against output columns QualifiedName name = QualifiedName.of(((Identifier) expression).getName()); Collection<Expression> expressions = byAlias.get(name); if (expressions.size() > 1) { throw new SemanticException(AMBIGUOUS_ATTRIBUTE, expression, "'%s' in ORDER BY is ambiguous", name.getSuffix()); } if (expressions.size() == 1) { orderByExpression = Iterables.getOnlyElement(expressions); } // otherwise, couldn't resolve name against output aliases, so fall through... } else if (expression instanceof LongLiteral) { // this is an ordinal in the output tuple long ordinal = ((LongLiteral) expression).getValue(); if (ordinal < 1 || ordinal > outputExpressions.size()) { throw new SemanticException(INVALID_ORDINAL, expression, "ORDER BY position %s is not in select list", ordinal); } int field = toIntExact(ordinal - 1); Type type = outputScope.getRelationType().getFieldByIndex(field).getType(); if (!type.isOrderable()) { throw new SemanticException(TYPE_MISMATCH, node, "The type of expression in position %s is not orderable (actual: %s), and therefore cannot be used in ORDER BY", ordinal, type); } orderByExpression = outputExpressions.get(field); } // otherwise, just use the expression as is if (orderByExpression == null) { orderByExpression = expression; } ExpressionAnalysis expressionAnalysis = analyzeExpression(orderByExpression, sourceScope); analysis.recordSubqueries(node, expressionAnalysis); Type type = expressionAnalysis.getType(orderByExpression); if (!type.isOrderable()) { throw new SemanticException(TYPE_MISMATCH, node, "Type %s is not orderable, and therefore cannot be used in ORDER BY: %s", type, expression); } orderByExpressionsBuilder.add(orderByExpression); } } List<Expression> orderByExpressions = orderByExpressionsBuilder.build(); analysis.setOrderByExpressions(node, orderByExpressions); if (node.getSelect().isDistinct() && !outputExpressions.containsAll(orderByExpressions)) { throw new SemanticException(ORDER_BY_MUST_BE_IN_SELECT, node.getSelect(), "For SELECT DISTINCT, ORDER BY expressions must appear in select list"); } return orderByExpressions; } private Multimap<QualifiedName, Expression> extractNamedOutputExpressions(QuerySpecification node) { // Compute aliased output terms so we can resolve order by expressions against them first ImmutableMultimap.Builder<QualifiedName, Expression> assignments = ImmutableMultimap.builder(); for (SelectItem item : node.getSelect().getSelectItems()) { if (item instanceof SingleColumn) { SingleColumn column = (SingleColumn) item; Optional<String> alias = column.getAlias(); if (alias.isPresent()) { assignments.put(QualifiedName.of(alias.get()), column.getExpression()); // TODO: need to know if alias was quoted } else if (column.getExpression() instanceof Identifier) { assignments.put(QualifiedName.of(((Identifier) column.getExpression()).getName()), column.getExpression()); } } } return assignments.build(); } private class OrderByExpressionRewriter extends ExpressionRewriter<Void> { private final Multimap<QualifiedName, Expression> assignments; public OrderByExpressionRewriter(Multimap<QualifiedName, Expression> assignments) { this.assignments = assignments; } @Override public Expression rewriteIdentifier(Identifier reference, Void context, ExpressionTreeRewriter<Void> treeRewriter) { // if this is a simple name reference, try to resolve against output columns QualifiedName name = QualifiedName.of(reference.getName()); Set<Expression> expressions = assignments.get(name) .stream() .collect(Collectors.toSet()); if (expressions.size() > 1) { throw new SemanticException(AMBIGUOUS_ATTRIBUTE, reference, "'%s' in ORDER BY is ambiguous", name); } if (expressions.size() == 1) { return Iterables.getOnlyElement(expressions); } // otherwise, couldn't resolve name against output aliases, so fall through... return reference; } } private List<List<Expression>> analyzeGroupBy(QuerySpecification node, Scope scope, List<Expression> outputExpressions) { List<Set<Expression>> computedGroupingSets = ImmutableList.of(); // empty list = no aggregations if (node.getGroupBy().isPresent()) { List<List<Set<Expression>>> enumeratedGroupingSets = node.getGroupBy().get().getGroupingElements().stream() .map(GroupingElement::enumerateGroupingSets) .collect(toImmutableList()); // compute cross product of enumerated grouping sets, if there are any computedGroupingSets = computeGroupingSetsCrossProduct(enumeratedGroupingSets, node.getGroupBy().get().isDistinct()); checkState(!computedGroupingSets.isEmpty(), "computed grouping sets cannot be empty"); } else if (hasAggregates(node)) { // if there are aggregates, but no group by, create a grand total grouping set (global aggregation) computedGroupingSets = ImmutableList.of(ImmutableSet.of()); } List<List<Expression>> analyzedGroupingSets = computedGroupingSets.stream() .map(groupingSet -> analyzeGroupingColumns(groupingSet, node, scope, outputExpressions)) .collect(toImmutableList()); analysis.setGroupingSets(node, analyzedGroupingSets); return analyzedGroupingSets; } private List<Set<Expression>> computeGroupingSetsCrossProduct(List<List<Set<Expression>>> enumeratedGroupingSets, boolean isDistinct) { checkState(!enumeratedGroupingSets.isEmpty(), "enumeratedGroupingSets cannot be empty"); List<Set<Expression>> groupingSetsCrossProduct = new ArrayList<>(); enumeratedGroupingSets.get(0) .stream() .map(ImmutableSet::copyOf) .forEach(groupingSetsCrossProduct::add); for (int i = 1; i < enumeratedGroupingSets.size(); i++) { List<Set<Expression>> groupingSets = enumeratedGroupingSets.get(i); List<Set<Expression>> oldGroupingSetsCrossProduct = ImmutableList.copyOf(groupingSetsCrossProduct); groupingSetsCrossProduct.clear(); for (Set<Expression> existingSet : oldGroupingSetsCrossProduct) { for (Set<Expression> groupingSet : groupingSets) { Set<Expression> concatenatedSet = ImmutableSet.<Expression>builder() .addAll(existingSet) .addAll(groupingSet) .build(); groupingSetsCrossProduct.add(concatenatedSet); } } } if (isDistinct) { return ImmutableList.copyOf(ImmutableSet.copyOf(groupingSetsCrossProduct)); } return groupingSetsCrossProduct; } private List<Expression> analyzeGroupingColumns(Set<Expression> groupingColumns, QuerySpecification node, Scope scope, List<Expression> outputExpressions) { ImmutableList.Builder<Expression> groupingColumnsBuilder = ImmutableList.builder(); for (Expression groupingColumn : groupingColumns) { // first, see if this is an ordinal Expression groupByExpression; if (groupingColumn instanceof LongLiteral) { long ordinal = ((LongLiteral) groupingColumn).getValue(); if (ordinal < 1 || ordinal > outputExpressions.size()) { throw new SemanticException(INVALID_ORDINAL, groupingColumn, "GROUP BY position %s is not in select list", ordinal); } groupByExpression = outputExpressions.get(toIntExact(ordinal - 1)); } else { ExpressionAnalysis expressionAnalysis = analyzeExpression(groupingColumn, scope); analysis.recordSubqueries(node, expressionAnalysis); groupByExpression = groupingColumn; } Analyzer.verifyNoAggregatesOrWindowFunctions(metadata.getFunctionRegistry(), groupByExpression, "GROUP BY clause"); Type type = analysis.getType(groupByExpression); if (!type.isComparable()) { throw new SemanticException(TYPE_MISMATCH, node, "%s is not comparable, and therefore cannot be used in GROUP BY", type); } groupingColumnsBuilder.add(groupByExpression); } return groupingColumnsBuilder.build(); } private Scope computeOutputScope(QuerySpecification node, Scope scope) { ImmutableList.Builder<Field> outputFields = ImmutableList.builder(); for (SelectItem item : node.getSelect().getSelectItems()) { if (item instanceof AllColumns) { // expand * and T.* Optional<QualifiedName> starPrefix = ((AllColumns) item).getPrefix(); for (Field field : scope.getRelationType().resolveFieldsWithPrefix(starPrefix)) { outputFields.add(Field.newUnqualified(field.getName(), field.getType(), field.getOriginTable(), false)); } } else if (item instanceof SingleColumn) { SingleColumn column = (SingleColumn) item; Expression expression = column.getExpression(); Optional<String> fieldName = column.getAlias(); Optional<QualifiedObjectName> originTable = Optional.empty(); QualifiedName name = null; if (expression instanceof Identifier) { name = QualifiedName.of(((Identifier) expression).getName()); } else if (expression instanceof DereferenceExpression) { name = DereferenceExpression.getQualifiedName((DereferenceExpression) expression); } if (name != null) { List<Field> matchingFields = scope.getRelationType().resolveFields(name); if (!matchingFields.isEmpty()) { originTable = matchingFields.get(0).getOriginTable(); } } if (!fieldName.isPresent()) { if (name != null) { fieldName = Optional.of(getLast(name.getOriginalParts())); } } outputFields.add(Field.newUnqualified(fieldName, analysis.getType(expression), originTable, column.getAlias().isPresent())); // TODO don't use analysis as a side-channel. Use outputExpressions to look up the type } else { throw new IllegalArgumentException("Unsupported SelectItem type: " + item.getClass().getName()); } } return createAndAssignScope(node, scope, outputFields.build()); } private List<Expression> analyzeSelect(QuerySpecification node, Scope scope) { ImmutableList.Builder<Expression> outputExpressionBuilder = ImmutableList.builder(); for (SelectItem item : node.getSelect().getSelectItems()) { if (item instanceof AllColumns) { // expand * and T.* Optional<QualifiedName> starPrefix = ((AllColumns) item).getPrefix(); RelationType relationType = scope.getRelationType(); List<Field> fields = relationType.resolveFieldsWithPrefix(starPrefix); if (fields.isEmpty()) { if (starPrefix.isPresent()) { throw new SemanticException(MISSING_TABLE, item, "Table '%s' not found", starPrefix.get()); } throw new SemanticException(WILDCARD_WITHOUT_FROM, item, "SELECT * not allowed in queries without FROM clause"); } for (Field field : fields) { int fieldIndex = relationType.indexOf(field); FieldReference expression = new FieldReference(fieldIndex); outputExpressionBuilder.add(expression); ExpressionAnalysis expressionAnalysis = analyzeExpression(expression, scope); Type type = expressionAnalysis.getType(expression); if (node.getSelect().isDistinct() && !type.isComparable()) { throw new SemanticException(TYPE_MISMATCH, node.getSelect(), "DISTINCT can only be applied to comparable types (actual: %s)", type); } } } else if (item instanceof SingleColumn) { SingleColumn column = (SingleColumn) item; ExpressionAnalysis expressionAnalysis = analyzeExpression(column.getExpression(), scope); analysis.recordSubqueries(node, expressionAnalysis); outputExpressionBuilder.add(column.getExpression()); Type type = expressionAnalysis.getType(column.getExpression()); if (node.getSelect().isDistinct() && !type.isComparable()) { throw new SemanticException(TYPE_MISMATCH, node.getSelect(), "DISTINCT can only be applied to comparable types (actual: %s): %s", type, column.getExpression()); } } else { throw new IllegalArgumentException("Unsupported SelectItem type: " + item.getClass().getName()); } } ImmutableList<Expression> result = outputExpressionBuilder.build(); analysis.setOutputExpressions(node, result); return result; } public void analyzeWhere(Node node, Scope scope, Expression predicate) { Analyzer.verifyNoAggregatesOrWindowFunctions(metadata.getFunctionRegistry(), predicate, "WHERE clause"); ExpressionAnalysis expressionAnalysis = analyzeExpression(predicate, scope); analysis.recordSubqueries(node, expressionAnalysis); Type predicateType = expressionAnalysis.getType(predicate); if (!predicateType.equals(BOOLEAN)) { if (!predicateType.equals(UNKNOWN)) { throw new SemanticException(TYPE_MISMATCH, predicate, "WHERE clause must evaluate to a boolean: actual type %s", predicateType); } // coerce null to boolean analysis.addCoercion(predicate, BOOLEAN, false); } analysis.setWhere(node, predicate); } private Scope analyzeFrom(QuerySpecification node, Scope scope) { if (node.getFrom().isPresent()) { return process(node.getFrom().get(), scope); } return scope; } private void analyzeAggregations( QuerySpecification node, Scope scope, List<List<Expression>> groupingSets, Set<Expression> columnReferences, List<Expression> expressions) { AggregateExtractor extractor = new AggregateExtractor(metadata.getFunctionRegistry()); for (Expression expression : expressions) { extractor.process(expression); } analysis.setAggregates(node, extractor.getAggregates()); // is this an aggregation query? if (!groupingSets.isEmpty()) { // ensure SELECT, ORDER BY and HAVING are constant with respect to group // e.g, these are all valid expressions: // SELECT f(a) GROUP BY a // SELECT f(a + 1) GROUP BY a + 1 // SELECT a + sum(b) GROUP BY a ImmutableList<Expression> distinctGroupingColumns = groupingSets.stream() .flatMap(Collection::stream) .distinct() .collect(toImmutableList()); for (Expression expression : expressions) { verifyAggregations(distinctGroupingColumns, scope, expression, columnReferences); } } } private boolean hasAggregates(QuerySpecification node) { AggregateExtractor extractor = new AggregateExtractor(metadata.getFunctionRegistry()); node.getSelect() .getSelectItems().stream() .filter(SingleColumn.class::isInstance) .forEach(extractor::process); node.getOrderBy().map(OrderBy::getSortItems).ifPresent( sortItems -> sortItems .forEach(extractor::process)); node.getHaving() .ifPresent(extractor::process); return !extractor.getAggregates().isEmpty(); } private void verifyAggregations( List<Expression> groupByExpressions, Scope scope, Expression expression, Set<Expression> columnReferences) { AggregationAnalyzer analyzer = new AggregationAnalyzer(groupByExpressions, metadata, scope, columnReferences, analysis.getParameters(), analysis.isDescribe()); analyzer.analyze(expression); } private RelationType analyzeView(Query query, QualifiedObjectName name, Optional<String> catalog, Optional<String> schema, Optional<String> owner, Table node) { try { // run view as view owner if set; otherwise, run as session user Identity identity; AccessControl viewAccessControl; if (owner.isPresent()) { identity = new Identity(owner.get(), Optional.empty()); viewAccessControl = new ViewAccessControl(accessControl); } else { identity = session.getIdentity(); viewAccessControl = accessControl; } Session viewSession = Session.builder(metadata.getSessionPropertyManager()) .setQueryId(session.getQueryId()) .setTransactionId(session.getTransactionId().orElse(null)) .setIdentity(identity) .setSource(session.getSource().orElse(null)) .setCatalog(catalog.orElse(null)) .setSchema(schema.orElse(null)) .setTimeZoneKey(session.getTimeZoneKey()) .setLocale(session.getLocale()) .setRemoteUserAddress(session.getRemoteUserAddress().orElse(null)) .setUserAgent(session.getUserAgent().orElse(null)) .setClientInfo(session.getClientInfo().orElse(null)) .setStartTime(session.getStartTime()) .setSystemProperty(LEGACY_ORDER_BY, session.getSystemProperty(LEGACY_ORDER_BY, Boolean.class).toString()) .build(); StatementAnalyzer analyzer = new StatementAnalyzer(analysis, metadata, sqlParser, viewAccessControl, viewSession); Scope queryScope = analyzer.analyze(query, Scope.create()); return queryScope.getRelationType().withAlias(name.getObjectName(), null); } catch (RuntimeException e) { throw new SemanticException(VIEW_ANALYSIS_ERROR, node, "Failed analyzing stored view '%s': %s", name, e.getMessage()); } } private Query parseView(String view, QualifiedObjectName name, Node node) { try { return (Query) sqlParser.createStatement(view); } catch (ParsingException e) { throw new SemanticException(VIEW_PARSE_ERROR, node, "Failed parsing stored view '%s': %s", name, e.getMessage()); } } private boolean isViewStale(List<ViewDefinition.ViewColumn> columns, Collection<Field> fields) { if (columns.size() != fields.size()) { return true; } List<Field> fieldList = ImmutableList.copyOf(fields); for (int i = 0; i < columns.size(); i++) { ViewDefinition.ViewColumn column = columns.get(i); Field field = fieldList.get(i); if (!column.getName().equalsIgnoreCase(field.getName().orElse(null)) || !metadata.getTypeManager().canCoerce(field.getType(), column.getType())) { return true; } } return false; } private ExpressionAnalysis analyzeExpression(Expression expression, Scope scope) { return ExpressionAnalyzer.analyzeExpression( session, metadata, accessControl, sqlParser, scope, analysis, expression); } private List<Expression> descriptorToFields(Scope scope) { ImmutableList.Builder<Expression> builder = ImmutableList.builder(); for (int fieldIndex = 0; fieldIndex < scope.getRelationType().getAllFieldCount(); fieldIndex++) { FieldReference expression = new FieldReference(fieldIndex); builder.add(expression); analyzeExpression(expression, scope); } return builder.build(); } private Scope analyzeWith(Query node, Scope scope) { // analyze WITH clause if (!node.getWith().isPresent()) { return scope; } With with = node.getWith().get(); if (with.isRecursive()) { throw new SemanticException(NOT_SUPPORTED, with, "Recursive WITH queries are not supported"); } Scope.Builder withScopeBuilder = Scope.builder() .withParent(scope); for (WithQuery withQuery : with.getQueries()) { Query query = withQuery.getQuery(); process(query, withScopeBuilder.build()); String name = withQuery.getName(); if (withScopeBuilder.containsNamedQuery(name)) { throw new SemanticException(DUPLICATE_RELATION, withQuery, "WITH query name '%s' specified more than once", name); } // check if all or none of the columns are explicitly alias if (withQuery.getColumnNames().isPresent()) { List<String> columnNames = withQuery.getColumnNames().get(); RelationType queryDescriptor = analysis.getOutputDescriptor(query); if (columnNames.size() != queryDescriptor.getVisibleFieldCount()) { throw new SemanticException(MISMATCHED_COLUMN_ALIASES, withQuery, "WITH column alias list has %s entries but WITH query(%s) has %s columns", columnNames.size(), name, queryDescriptor.getVisibleFieldCount()); } } withScopeBuilder.withNamedQuery(name, withQuery); } Scope withScope = withScopeBuilder.build(); analysis.setScope(with, withScope); return withScope; } private void analyzeOrderBy(Query node, Scope scope) { List<SortItem> items = node.getOrderBy() .map(OrderBy::getSortItems) .orElse(emptyList()); ImmutableList.Builder<Expression> orderByFieldsBuilder = ImmutableList.builder(); for (SortItem item : items) { Expression expression = item.getSortKey(); if (expression instanceof LongLiteral) { // this is an ordinal in the output tuple long ordinal = ((LongLiteral) expression).getValue(); if (ordinal < 1 || ordinal > scope.getRelationType().getVisibleFieldCount()) { throw new SemanticException(INVALID_ORDINAL, expression, "ORDER BY position %s is not in select list", ordinal); } expression = new FieldReference(toIntExact(ordinal - 1)); } ExpressionAnalysis expressionAnalysis = ExpressionAnalyzer.analyzeExpression(session, metadata, accessControl, sqlParser, scope, analysis, expression); analysis.recordSubqueries(node, expressionAnalysis); orderByFieldsBuilder.add(expression); } analysis.setOrderByExpressions(node, orderByFieldsBuilder.build()); } private Scope createAndAssignScope(Node node, Scope parent, Field... fields) { return createAndAssignScope(node, parent, new RelationType(fields)); } private Scope createAndAssignScope(Node node, Scope parent, List<Field> fields) { return createAndAssignScope(node, parent, new RelationType(fields)); } private Scope createAndAssignScope(Node node, Scope parent, RelationType relationType) { Scope scope = Scope.builder() .withParent(parent) .withRelationType(relationType) .build(); analysis.setScope(node, scope); return scope; } } }
Remove expressionOptimizer from StatementAnalyzer.visitJoin The optimization will happen in SimplifyExpressions optimizer later. Optimizer has a broader context (including coercions) to make the right decision. RelationPlanner will create a separate FilterNode for a join criteria which does not involve both probe and build sides. This node can be simplified later by aforementioned rewrite. This fixes: #7520
presto-main/src/main/java/com/facebook/presto/sql/analyzer/StatementAnalyzer.java
Remove expressionOptimizer from StatementAnalyzer.visitJoin
<ide><path>resto-main/src/main/java/com/facebook/presto/sql/analyzer/StatementAnalyzer.java <ide> import com.facebook.presto.sql.parser.SqlParser; <ide> import com.facebook.presto.sql.planner.DependencyExtractor; <ide> import com.facebook.presto.sql.planner.ExpressionInterpreter; <del>import com.facebook.presto.sql.planner.NoOpSymbolResolver; <ide> import com.facebook.presto.sql.planner.optimizations.CanonicalizeExpressions; <ide> import com.facebook.presto.sql.tree.AddColumn; <ide> import com.facebook.presto.sql.tree.AliasedRelation; <ide> else if (criteria instanceof JoinOn) { <ide> Expression expression = ((JoinOn) criteria).getExpression(); <ide> <del> // ensure all names can be resolved, types match, etc (we don't need to record resolved names, subexpression types, etc. because <del> // we do it further down when after we determine which subexpressions apply to left vs right tuple) <del> ExpressionAnalyzer analyzer = ExpressionAnalyzer.create(analysis, session, metadata, sqlParser, accessControl); <del> <ide> // need to register coercions in case when join criteria requires coercion (e.g. join on char(1) = char(2)) <ide> ExpressionAnalysis expressionAnalysis = analyzeExpression(expression, output); <ide> Type clauseType = expressionAnalysis.getType(expression); <ide> <ide> Analyzer.verifyNoAggregatesOrWindowFunctions(metadata.getFunctionRegistry(), expression, "JOIN clause"); <ide> <del> // expressionInterpreter/optimizer only understands a subset of expression types <del> // TODO: remove this when the new expression tree is implemented <ide> Expression canonicalized = CanonicalizeExpressions.canonicalizeExpression(expression); <ide> analyzeExpression(canonicalized, output); <ide> <del> Object optimizedExpression = expressionOptimizer(canonicalized, metadata, session, analyzer.getExpressionTypes()).optimize(NoOpSymbolResolver.INSTANCE); <del> <del> if (!(optimizedExpression instanceof Expression) && (optimizedExpression instanceof Boolean || optimizedExpression == null)) { <del> // If the JoinOn clause evaluates to a boolean expression, simulate a cross join by adding the relevant redundant expression <del> // optimizedExpression can be TRUE, FALSE or NULL here <del> if (optimizedExpression != null && optimizedExpression.equals(Boolean.TRUE)) { <del> optimizedExpression = new ComparisonExpression(EQUAL, new LongLiteral("0"), new LongLiteral("0")); <del> } <del> else { <del> optimizedExpression = new ComparisonExpression(EQUAL, new LongLiteral("0"), new LongLiteral("1")); <del> } <del> } <del> <del> if (!(optimizedExpression instanceof Expression)) { <del> throw new SemanticException(TYPE_MISMATCH, node, "Join clause must be a boolean expression"); <del> } <del> // The optimization above may have rewritten the expression tree which breaks all the identity maps, so redo the analysis <del> // to re-analyze coercions that might be necessary <del> analyzer = ExpressionAnalyzer.create(analysis, session, metadata, sqlParser, accessControl); <del> analyzer.analyze((Expression) optimizedExpression, output); <del> analysis.addCoercions(analyzer.getExpressionCoercions(), analyzer.getTypeOnlyCoercions()); <del> <ide> Set<Expression> postJoinConjuncts = new HashSet<>(); <ide> <del> for (Expression conjunct : ExpressionUtils.extractConjuncts((Expression) optimizedExpression)) { <add> for (Expression conjunct : ExpressionUtils.extractConjuncts(canonicalized)) { <ide> conjunct = ExpressionUtils.normalize(conjunct); <ide> <ide> if (conjunct instanceof ComparisonExpression <ide> && (((ComparisonExpression) conjunct).getType() == EQUAL || node.getType() == Join.Type.INNER)) { <ide> Expression conjunctFirst = ((ComparisonExpression) conjunct).getLeft(); <ide> Expression conjunctSecond = ((ComparisonExpression) conjunct).getRight(); <del> Set<QualifiedName> firstDependencies = DependencyExtractor.extractNames(conjunctFirst, analyzer.getColumnReferences()); <del> Set<QualifiedName> secondDependencies = DependencyExtractor.extractNames(conjunctSecond, analyzer.getColumnReferences()); <add> Set<QualifiedName> firstDependencies = DependencyExtractor.extractNames(conjunctFirst, expressionAnalysis.getColumnReferences()); <add> Set<QualifiedName> secondDependencies = DependencyExtractor.extractNames(conjunctSecond, expressionAnalysis.getColumnReferences()); <ide> <ide> Expression leftExpression = null; <ide> Expression rightExpression = null; <ide> } <ide> Expression postJoinPredicate = ExpressionUtils.combineConjuncts(postJoinConjuncts); <ide> analysis.recordSubqueries(node, analyzeExpression(postJoinPredicate, output)); <del> analysis.setJoinCriteria(node, (Expression) optimizedExpression); <add> analysis.setJoinCriteria(node, canonicalized); <ide> } <ide> else { <ide> throw new UnsupportedOperationException("unsupported join criteria: " + criteria.getClass().getName());
Java
mit
ff414315d3f17ad82a584d462ede7979dbba0298
0
johanaschan/queue-ticket-api,johanaschan/queue-ticket-api
package se.jaitco.queueticketapi.filter; import io.jsonwebtoken.Claims; import io.jsonwebtoken.Jwts; import io.jsonwebtoken.SignatureException; import org.springframework.web.filter.GenericFilterBean; import javax.servlet.FilterChain; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; public class AuthenticationFilter extends GenericFilterBean { private static final String AUTHORIZATION = "Authorization"; private static final String BEARER_WITH_SPACE = "Bearer "; private static final String OPTIONS = "OPTIONS"; private static final int BEARER_WITH_SPACE_LENGTH = BEARER_WITH_SPACE.length(); @Override public void doFilter(final ServletRequest req, final ServletResponse res, final FilterChain chain) throws IOException, ServletException { final HttpServletRequest request = (HttpServletRequest) req; final HttpServletResponse response = (HttpServletResponse) res; if(isPreflight(request)) { chain.doFilter(req, res); }else{ final String authHeader = getAuthHeader(request); if(isHeaderValid(authHeader)){ final String token = getToken(authHeader); try { final Claims claims = verifySignatureAndGetClaims(token); request.setAttribute("claims", claims); chain.doFilter(req, res); }catch (SignatureException e){ response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Invalid token."); } }else{ response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Missing or invalid Authorization header."); return; } } } private Claims verifySignatureAndGetClaims(String token) { return Jwts.parser() .setSigningKey("secretkey") .parseClaimsJws(token) .getBody(); } private String getToken(String authHeader) { return authHeader.substring(BEARER_WITH_SPACE_LENGTH); } private boolean isHeaderValid(String authHeader) { return authHeader != null && authHeader.startsWith(BEARER_WITH_SPACE); } private String getAuthHeader(HttpServletRequest request) { return request.getHeader(AUTHORIZATION); } private boolean isPreflight(HttpServletRequest request) { return "OPTIONS".equals(request.getMethod()); } }
src/main/java/se/jaitco/queueticketapi/filter/AuthenticationFilter.java
package se.jaitco.queueticketapi.filter; import io.jsonwebtoken.Claims; import io.jsonwebtoken.Jwts; import io.jsonwebtoken.SignatureException; import org.springframework.web.filter.GenericFilterBean; import javax.servlet.FilterChain; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; public class AuthenticationFilter extends GenericFilterBean { private static final String AUTHORIZATION = "Authorization"; private static final String BEARER_WITH_SPACE = "Bearer "; private static final String OPTIONS = "OPTIONS"; private static final int BEARER_WITH_SPACE_LENGTH = BEARER_WITH_SPACE.length(); @Override public void doFilter(final ServletRequest req, final ServletResponse res, final FilterChain chain) throws IOException, ServletException { final HttpServletRequest request = (HttpServletRequest) req; final HttpServletResponse response = (HttpServletResponse) res; if(isPreflight(request)) { chain.doFilter(req, res); }else{ final String authHeader = getAuthHeader(request); if(!isHeaderValid(authHeader)){ response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Missing or invalid Authorization header."); return; } final String token = getToken(authHeader); try { final Claims claims = verifySignatureAndGetClaims(token); request.setAttribute("claims", claims); }catch (SignatureException e){ response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Invalid token."); } } } private Claims verifySignatureAndGetClaims(String token) { return Jwts.parser() .setSigningKey("secretkey") .parseClaimsJws(token) .getBody(); } private String getToken(String authHeader) { return authHeader.substring(BEARER_WITH_SPACE_LENGTH); } private boolean isHeaderValid(String authHeader) { return authHeader == null || !authHeader.startsWith(BEARER_WITH_SPACE); } private String getAuthHeader(HttpServletRequest request) { return request.getHeader(AUTHORIZATION); } private boolean isPreflight(HttpServletRequest request) { return "OPTIONS".equals(request.getMethod()); } }
Corrected URL
src/main/java/se/jaitco/queueticketapi/filter/AuthenticationFilter.java
Corrected URL
<ide><path>rc/main/java/se/jaitco/queueticketapi/filter/AuthenticationFilter.java <ide> chain.doFilter(req, res); <ide> }else{ <ide> final String authHeader = getAuthHeader(request); <del> if(!isHeaderValid(authHeader)){ <add> if(isHeaderValid(authHeader)){ <add> final String token = getToken(authHeader); <add> try { <add> final Claims claims = verifySignatureAndGetClaims(token); <add> request.setAttribute("claims", claims); <add> chain.doFilter(req, res); <add> }catch (SignatureException e){ <add> response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Invalid token."); <add> } <add> }else{ <ide> response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Missing or invalid Authorization header."); <ide> return; <del> } <del> final String token = getToken(authHeader); <del> try { <del> final Claims claims = verifySignatureAndGetClaims(token); <del> request.setAttribute("claims", claims); <del> }catch (SignatureException e){ <del> response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Invalid token."); <ide> } <ide> } <ide> } <ide> } <ide> <ide> private boolean isHeaderValid(String authHeader) { <del> return authHeader == null || !authHeader.startsWith(BEARER_WITH_SPACE); <add> return authHeader != null && authHeader.startsWith(BEARER_WITH_SPACE); <ide> } <ide> <ide> private String getAuthHeader(HttpServletRequest request) {
Java
mit
80bbc9c3f88a2756a8a1b7f32b1646500036a184
0
whichonespink44/TerrainControl,PG85/OpenTerrainGenerator,LinkBR/TerrainControl,MCTCP/TerrainControl,Ezoteric/TerrainControl
package com.khorn.terraincontrol.customobjects; import com.khorn.terraincontrol.configuration.io.BracketSettingsReader; import com.khorn.terraincontrol.util.helpers.FileHelper; import java.io.File; import java.util.*; /** * Represents a collection of custom objects. Those objects can be loaded from * a directory, or can be loaded manually and then added to this collection. * */ public class CustomObjectCollection implements Iterable<CustomObject> { private final List<CustomObject> objects; private final Map<String, CustomObject> objectsByName; private CustomObjectCollection fallback; /** * Creates a new {@link CustomObjectCollection} instance with no loaded objects. */ public CustomObjectCollection() { this.objects = new ArrayList<CustomObject>(); this.objectsByName = new HashMap<String, CustomObject>(); } /** * Creates a new {@link CustomObjectCollection} instance that loads the objects immediately. * @param loaders Map of all custom object loaders, indexed by lowercase * extension without the dot, like "bo3". * @param directory The directory to load from. Subdirectories will be * searched too. */ public CustomObjectCollection(Map<String, CustomObjectLoader> loaders, File directory) { this(); load(loaders, directory); } /** * Loads all custom objects from the given directory and its * subdirectories. Any objects that were already loaded will be unloaded. * If the directory does not exist it will be created. * @param loaders Map of all custom object loaders, indexed by lowercase * extension without the dot, like "bo3". * @param directory The directory to load from. Subdirectories will be * searched too. */ public void load(Map<String, CustomObjectLoader> loaders, File directory) { if (!FileHelper.makeFolder(directory)) { return; } Map<String, CustomObject> objects = loadObjectsRecursive(loaders, directory); for (CustomObject object : objects.values()) { object.onEnable(objects); addLoadedObject(object); } } /** * Adds an object to the list of loaded objects. If an object with the * same name (case insensitive) already exists, nothing happens. * @param object The object to add to the list of loaded objects. */ public void addLoadedObject(CustomObject object) { String lowerCaseName = object.getName().toLowerCase(); if (!objectsByName.containsKey(lowerCaseName)) { objectsByName.put(lowerCaseName, object); objects.add(object); } } /** * When a lookup by name fails, the given fallback is used instead to look * up by name. * @param customObjects The fallback. */ public void setFallback(CustomObjectCollection customObjects) { this.fallback = customObjects; } /** * Loads all objects in a directory, and calls itself for any * subdirectories. * * @param loaders The custom object loaders. * @param directory The directory to load from. */ private static Map<String, CustomObject> loadObjectsRecursive(Map<String, CustomObjectLoader> loaders, File directory) { if (!directory.isDirectory()) { throw new IllegalArgumentException("Given file is not a directory: " + directory.getAbsolutePath()); } Map<String, CustomObject> objects = new HashMap<String, CustomObject>(); // Load all objects from the files and folders under the directory for (File file : directory.listFiles()) { // Get name and extension String fileName = file.getName(); int index = fileName.lastIndexOf('.'); // If we come across a directory descend into it without enabling // the objects if (file.isDirectory()) { objects.putAll(loadObjectsRecursive(loaders, file)); } else if (index != -1) { String objectType = fileName.substring(index + 1, fileName.length()); String objectName = fileName.substring(0, index); // Get the object CustomObjectLoader loader = loaders.get(objectType.toLowerCase()); if (loader != null) { CustomObject object = loader.loadFromFile(objectName, file); objects.put(objectName.toLowerCase(), object); } } } return objects; } /** * Gets a random custom object from this collection. * @param random Random number generator. * @return The object, or null if there are no objects at all. */ public CustomObject getRandomObject(Random random) { if (objects.isEmpty()) { return null; } return objects.get(random.nextInt(objects.size())); } /** * Gets the object with the given name. * @param name Name of the object. * @return The object, or null if not found. */ public CustomObject getObjectByName(String name) { CustomObject object = objectsByName.get(name.toLowerCase()); if (object == null && fallback != null) { return fallback.getObjectByName(name); } return object; } /** * Parses a string in the format <code>name(setting1=foo,setting2=bar) * </code>. The object is retrieved using {@link #getObjectByName(String)}. * If the object doesn't exist this method will return null. Otherwise, it * will apply the given parameters (if any) to a copy of the object, and * it will return this modified copy. * * @param string The string to parse. * @param config The config to search in * @return A CustomObject, or null if no one was found. */ public CustomObject parseCustomObject(String string) { String objectName = string; String objectExtraSettings = ""; int start = string.indexOf('('); int end = string.lastIndexOf(')'); if (start != -1 && end != -1) { objectName = string.substring(0, start); objectExtraSettings = string.substring(start + 1, end); } CustomObject object = getObjectByName(objectName); if (object != null && objectExtraSettings.length() != 0) { object = object.applySettings(new BracketSettingsReader(object.getName(), objectExtraSettings)); } return object; } /** * Gets an unmodifiable view of all currently loaded objects. * @return A view. */ public List<CustomObject> getAll() { return Collections.unmodifiableList(objects); } @Override public Iterator<CustomObject> iterator() { return objects.iterator(); } /** * Gets whether there are no objects loaded. * @return True if there are no objects loaded, false otherwise. */ public boolean isEmpty() { return objects.isEmpty(); } /** * Internal method to provide backwards compatibility in CustomObjectManager. */ Map<String, CustomObject> accessMap() { return objectsByName; } }
common/src/main/java/com/khorn/terraincontrol/customobjects/CustomObjectCollection.java
package com.khorn.terraincontrol.customobjects; import com.khorn.terraincontrol.configuration.io.BracketSettingsReader; import com.khorn.terraincontrol.util.helpers.FileHelper; import java.io.File; import java.util.*; /** * Represents a collection of custom objects. Those objects can be loaded from * a directory, or can be loaded manually and then added to this collection. * */ public class CustomObjectCollection implements Iterable<CustomObject> { private final List<CustomObject> objects; private final Map<String, CustomObject> objectsByName; private CustomObjectCollection fallback; /** * Creates a new {@link CustomObjectCollection} instance with no loaded objects. */ public CustomObjectCollection() { this.objects = new ArrayList<CustomObject>(); this.objectsByName = new HashMap<String, CustomObject>(); } /** * Creates a new {@link CustomObjectCollection} instance that loads the objects immediately. * @param loaders Map of all custom object loaders, indexed by lowercase * extension without the dot, like "bo3". * @param directory The directory to load from. Subdirectories will be * searched too. */ public CustomObjectCollection(Map<String, CustomObjectLoader> loaders, File directory) { this(); load(loaders, directory); } /** * Loads all custom objects from the given directory and its * subdirectories. Any objects that were already loaded will be unloaded. * If the directory does not exist it will be created. * @param loaders Map of all custom object loaders, indexed by lowercase * extension without the dot, like "bo3". * @param directory The directory to load from. Subdirectories will be * searched too. */ public void load(Map<String, CustomObjectLoader> loaders, File directory) { if (!FileHelper.makeFolder(directory)) { return; } Map<String, CustomObject> objects = loadObjectsRecursive(loaders, directory); for (CustomObject object : objects.values()) { object.onEnable(objects); addLoadedObject(object); } } /** * Adds an object to the list of loaded objects. If an object with the * same name (case insensitive) already exists, nothing happens. * @param object The object to add to the list of loaded objects. */ public void addLoadedObject(CustomObject object) { String lowerCaseName = object.getName().toLowerCase(); if (!objectsByName.containsKey(lowerCaseName)) { objectsByName.put(lowerCaseName, object); objects.add(object); } } /** * When a lookup by name fails, the given fallback is used instead to look * up by name. * @param customObjects The fallback. */ public void setFallback(CustomObjectCollection customObjects) { this.fallback = customObjects; } /** * Loads all objects in a directory, and calls itself for any * subdirectories. * * @param loaders The custom object loaders. * @param directory The directory to load from. */ private static Map<String, CustomObject> loadObjectsRecursive(Map<String, CustomObjectLoader> loaders, File directory) { if (!directory.isDirectory()) { throw new IllegalArgumentException("Given file is not a directory: " + directory.getAbsolutePath()); } Map<String, CustomObject> objects = new HashMap<String, CustomObject>(); // Load all objects from the files and folders under the directory for (File file : directory.listFiles()) { // Get name and extension String fileName = file.getName(); int index = fileName.lastIndexOf('.'); // If we come across a directory descend into it without enabling // the objects if (file.isDirectory()) { objects.putAll(loadObjectsRecursive(loaders, file)); } else if (index != -1) { String objectType = fileName.substring(index + 1, fileName.length()); String objectName = fileName.substring(0, index); // Get the object CustomObjectLoader loader = loaders.get(objectType.toLowerCase()); if (loader != null) { CustomObject object = loader.loadFromFile(objectName, file); objects.put(objectName.toLowerCase(), object); } } } return objects; } /** * Gets a random custom object from this collection. * @param random Random number generator. * @return The object, or null if there are no objects at all. */ public CustomObject getRandomObject(Random random) { if (objects.isEmpty()) { return null; } return objects.get(random.nextInt(objects.size())); } /** * Gets the object with the given name. * @param name Name of the object. * @return The object, or null if not found. */ public CustomObject getObjectByName(String name) { CustomObject object = objectsByName.get(name.toLowerCase()); if (object == null && fallback != null) { return fallback.getObjectByName(name); } if (object == null) { System.out.println("Didn't find " + name); } return object; } /** * Parses a string in the format <code>name(setting1=foo,setting2=bar) * </code>. The object is retrieved using {@link #getObjectByName(String)}. * If the object doesn't exist this method will return null. Otherwise, it * will apply the given parameters (if any) to a copy of the object, and * it will return this modified copy. * * @param string The string to parse. * @param config The config to search in * @return A CustomObject, or null if no one was found. */ public CustomObject parseCustomObject(String string) { String objectName = string; String objectExtraSettings = ""; int start = string.indexOf('('); int end = string.lastIndexOf(')'); if (start != -1 && end != -1) { objectName = string.substring(0, start); objectExtraSettings = string.substring(start + 1, end); } CustomObject object = getObjectByName(objectName); if (object != null && objectExtraSettings.length() != 0) { object = object.applySettings(new BracketSettingsReader(object.getName(), objectExtraSettings)); } return object; } /** * Gets an unmodifiable view of all currently loaded objects. * @return A view. */ public List<CustomObject> getAll() { return Collections.unmodifiableList(objects); } @Override public Iterator<CustomObject> iterator() { return objects.iterator(); } /** * Gets whether there are no objects loaded. * @return True if there are no objects loaded, false otherwise. */ public boolean isEmpty() { return objects.isEmpty(); } /** * Internal method to provide backwards compatibility in CustomObjectManager. */ Map<String, CustomObject> accessMap() { return objectsByName; } }
Remove leftover debug code
common/src/main/java/com/khorn/terraincontrol/customobjects/CustomObjectCollection.java
Remove leftover debug code
<ide><path>ommon/src/main/java/com/khorn/terraincontrol/customobjects/CustomObjectCollection.java <ide> { <ide> return fallback.getObjectByName(name); <ide> } <del> if (object == null) <del> { <del> System.out.println("Didn't find " + name); <del> } <ide> return object; <ide> } <ide>
Java
mit
30ca234bd31414b8309db799f5f80d727030f93e
0
digibib/ls.ext,digibib/ls.ext,digibib/ls.ext,digibib/ls.ext
package no.deichman.services.search; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.jamonapi.Monitor; import com.jamonapi.MonitorFactory; import no.deichman.services.entity.EntityService; import no.deichman.services.entity.EntityType; import no.deichman.services.uridefaults.XURI; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.tuple.Pair; import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.NameValuePair; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpRequestBase; import org.apache.http.client.utils.URIBuilder; import org.apache.http.entity.InputStreamEntity; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.message.BasicNameValuePair; import org.apache.jena.rdf.model.Model; import org.apache.jena.rdf.model.Property; import org.apache.jena.rdf.model.ResIterator; import org.apache.jena.rdf.model.Resource; import org.apache.jena.rdf.model.ResourceFactory; import org.apache.jena.rdf.model.SimpleSelector; import org.apache.jena.rdf.model.Statement; import org.apache.jena.vocabulary.RDF; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.ws.rs.ServerErrorException; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.core.Response; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.Function; import static com.google.common.collect.ImmutableMap.of; import static com.google.common.collect.Lists.newArrayList; import static java.lang.String.format; import static java.net.HttpURLConnection.HTTP_INTERNAL_ERROR; import static java.net.HttpURLConnection.HTTP_OK; import static java.net.URLEncoder.encode; import static java.util.Arrays.stream; import static java.util.stream.Collectors.toList; import static javax.ws.rs.core.HttpHeaders.CONTENT_LENGTH; import static javax.ws.rs.core.HttpHeaders.CONTENT_TYPE; import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR; import static no.deichman.services.uridefaults.BaseURI.ontology; import static org.apache.http.entity.ContentType.APPLICATION_JSON; import static org.apache.http.impl.client.HttpClients.createDefault; import static org.apache.jena.rdf.model.ResourceFactory.createProperty; /** * Responsibility: perform indexing and searching. */ public class SearchServiceImpl implements SearchService { public static final Property AGENT = createProperty(ontology("agent")); private static final Logger LOG = LoggerFactory.getLogger(SearchServiceImpl.class); private static final String UTF_8 = "UTF-8"; public static final int SIXTY_ONE = 61; public static final String[] LOCAL_INDEX_SEARCH_FIELDS = { ontology("name"), ontology("prefLabel"), ontology("mainTitle") }; public static final Resource MAIN_ENTRY = ResourceFactory.createResource(ontology("MainEntry")); private final EntityService entityService; private final String elasticSearchBaseUrl; private ModelToIndexMapper workModelToIndexMapper = new ModelToIndexMapper("work"); private ModelToIndexMapper eventModelToIndexMapper = new ModelToIndexMapper("event"); private ModelToIndexMapper serialModelToIndexMapper = new ModelToIndexMapper("serial"); private ModelToIndexMapper personModelToIndexMapper = new ModelToIndexMapper("person"); private ModelToIndexMapper corporationModelToIndexMapper = new ModelToIndexMapper("corporation"); private ModelToIndexMapper publicationModelToIndexMapper = new ModelToIndexMapper("publication"); public static final Gson GSON = new GsonBuilder().setPrettyPrinting().create(); public SearchServiceImpl(String elasticSearchBaseUrl, EntityService entityService) { this.elasticSearchBaseUrl = elasticSearchBaseUrl; this.entityService = entityService; getIndexUriBuilder(); } @Override public final void index(XURI xuri) throws Exception { switch (xuri.getTypeAsEntityType()) { case WORK: doIndexWork(xuri, false, false); break; case PERSON: case CORPORATION: doIndexWorkCreator(xuri, false); break; case PUBLICATION: doIndexPublication(xuri); break; case EVENT: doIndexEvent(xuri); break; case SERIAL: doIndexSerial(xuri); break; default: doIndex(xuri); } } public final void indexOnly(XURI xuri) throws Exception { switch (xuri.getTypeAsEntityType()) { case WORK: doIndexWork(xuri, true, true); break; case PERSON: case CORPORATION: doIndexWorkCreator(xuri, true); break; case PUBLICATION: doIndexPublication(xuri); break; case EVENT: doIndexEvent(xuri); break; case SERIAL: doIndexSerial(xuri); break; default: doIndex(xuri); } } private void doIndexEvent(XURI xuri) { Model eventModelWithLinkedResources = entityService.retrieveEventWithLinkedResources(xuri); indexDocument(xuri, eventModelToIndexMapper.createIndexDocument(eventModelWithLinkedResources, xuri)); cacheNameIndex(xuri, eventModelWithLinkedResources); } private void doIndexSerial(XURI xuri) { Model serialModelWithLinkedResources = entityService.retrieveSerialWithLinkedResources(xuri); indexDocument(xuri, serialModelToIndexMapper.createIndexDocument(serialModelWithLinkedResources, xuri)); cacheNameIndex(xuri, serialModelWithLinkedResources); } @Override public final Response searchPersonWithJson(String json) { return searchWithJson(json, getPersonSearchUriBuilder()); } @Override public final Response searchWorkWithJson(String json, MultivaluedMap<String, String> queryParams) { return searchWithJson(json, getWorkSearchUriBuilder(queryParams)); } @Override public final Response searchPublicationWithJson(String json) { return searchWithJson(json, getPublicationSearchUriBuilder()); } @Override public final Response searchInstrument(String query) { return doSearch(query, getInstrumentSearchUriBuilder()); } @Override public final Response searchCompositionType(String query) { return doSearch(query, getCompositionTypeSearchUriBuilder()); } @Override public final Response searchEvent(String query) { return doSearch(query, getEventSearchUriBuilder()); } @Override public final Response clearIndex() { try (CloseableHttpClient httpclient = createDefault()) { URI uri = getIndexUriBuilder().setPath("/search").build(); try (CloseableHttpResponse getExistingIndex = httpclient.execute(new HttpGet(uri))) { if (getExistingIndex.getStatusLine().getStatusCode() == HTTP_OK) { try (CloseableHttpResponse delete = httpclient.execute(new HttpDelete(uri))) { int statusCode = delete.getStatusLine().getStatusCode(); LOG.info("Delete index request returned status " + statusCode); if (statusCode != HTTP_OK) { throw new ServerErrorException("Failed to delete elasticsearch index", HTTP_INTERNAL_ERROR); } } } } HttpPut createIndexRequest = new HttpPut(uri); createIndexRequest.setEntity(new InputStreamEntity(getClass().getResourceAsStream("/search_index.json"), APPLICATION_JSON)); try (CloseableHttpResponse create = httpclient.execute(createIndexRequest)) { int statusCode = create.getStatusLine().getStatusCode(); LOG.info("Create index request returned status " + statusCode); if (statusCode != HTTP_OK) { throw new ServerErrorException("Failed to create elasticsearch index", HTTP_INTERNAL_ERROR); } } putIndexMapping(httpclient, "work"); putIndexMapping(httpclient, "person"); putIndexMapping(httpclient, "serial"); putIndexMapping(httpclient, "corporation"); putIndexMapping(httpclient, "place"); putIndexMapping(httpclient, "subject"); putIndexMapping(httpclient, "genre"); putIndexMapping(httpclient, "publication"); putIndexMapping(httpclient, "instrument"); putIndexMapping(httpclient, "compositionType"); putIndexMapping(httpclient, "event"); putIndexMapping(httpclient, "workSeries"); return Response.status(Response.Status.OK).build(); } catch (Exception e) { LOG.error(e.getMessage(), e); throw new ServerErrorException(e.getMessage(), INTERNAL_SERVER_ERROR); } } private void putIndexMapping(CloseableHttpClient httpclient, String type) throws URISyntaxException, IOException { URI workIndexUri = getIndexUriBuilder().setPath("/search/_mapping/" + type).build(); HttpPut putWorkMappingRequest = new HttpPut(workIndexUri); putWorkMappingRequest.setEntity(new InputStreamEntity(getClass().getResourceAsStream("/" + type + "_mapping.json"), APPLICATION_JSON)); try (CloseableHttpResponse create = httpclient.execute(putWorkMappingRequest)) { int statusCode = create.getStatusLine().getStatusCode(); LOG.info("Create mapping request for " + type + " returned status " + statusCode); if (statusCode != HTTP_OK) { throw new ServerErrorException("Failed to create elasticsearch mapping for " + type, HTTP_INTERNAL_ERROR); } } } private Response searchWithJson(String body, URIBuilder searchUriBuilder, Function<String, String>... jsonTranformer) { try { HttpPost httpPost = new HttpPost(searchUriBuilder.build()); httpPost.setEntity(new StringEntity(body, StandardCharsets.UTF_8)); httpPost.setHeader(CONTENT_TYPE, "application/json"); Pair<String, Header[]> searchResult = executeHttpRequest(httpPost); if (jsonTranformer != null && jsonTranformer.length > 0) { String transformed = jsonTranformer[0].apply(searchResult.getLeft()); Header[] headers = searchResult.getRight(); searchResult = Pair.of(transformed, removeHeader(headers, CONTENT_LENGTH)); } return createResponse(searchResult); } catch (Exception e) { LOG.error(e.getMessage(), e); throw new ServerErrorException(e.getMessage(), INTERNAL_SERVER_ERROR); } } private Header[] removeHeader(Header[] headers, String headerName) { return stream(headers) .filter(header -> !header .getName() .toLowerCase() .equalsIgnoreCase(headerName)) .toArray(Header[]::new); } private Response createResponse(Pair<String, Header[]> searchResult) { Response.ResponseBuilder responseBuilder = Response.ok(searchResult.getLeft()); for (Header header : searchResult.getRight()) { responseBuilder = responseBuilder.header(header.getName(), header.getValue()); } return responseBuilder.build(); } private Pair<String, Header[]> executeHttpRequest(HttpRequestBase httpRequestBase) throws IOException { try (CloseableHttpClient httpclient = createDefault(); CloseableHttpResponse response = httpclient.execute(httpRequestBase)) { HttpEntity responseEntity = response.getEntity(); String jsonContent = IOUtils.toString(responseEntity.getContent()); Header[] headers = response.getAllHeaders(); return Pair.<String, Header[]>of(jsonContent, headers); } catch (Exception e) { throw e; } } @Override public final Response searchWork(String query) { return doSearch(query, getWorkSearchUriBuilder(null)); } @Override public final Response searchPerson(String query) { return doSearch(query, getPersonSearchUriBuilder()); } @Override public final Response searchPlace(String query) { return doSearch(query, getPlaceUriBuilder()); } @Override public final Response searchCorporation(String query) { return doSearch(query, getCorporationSearchUriBuilder()); } @Override public final Response searchSerial(String query) { return doSearch(query, getSerialSearchUriBuilder()); } @Override public final Response searchSubject(String query) { return doSearch(query, getSubjectSearchUriBuilder()); } @Override public final Response searchGenre(String query) { return doSearch(query, getGenreSearchUriBuilder()); } @Override public final Response searchPublication(String query) { return doSearch(query, getPublicationSearchUriBuilder()); } @Override public final void delete(XURI xuri) { try (CloseableHttpClient httpclient = createDefault()) { HttpDelete httpDelete = new HttpDelete(getIndexUriBuilder() .setPath(format("/search/%s/%s", xuri.getType(), encode(xuri.getUri(), UTF_8))) .build()); try (CloseableHttpResponse putResponse = httpclient.execute(httpDelete)) { LOG.debug(putResponse.getStatusLine().toString()); } } catch (Exception e) { LOG.error(format("Failed to delete %s in elasticsearch", xuri.getUri()), e); throw new ServerErrorException(e.getMessage(), INTERNAL_SERVER_ERROR); } } @Override public final Response sortedList(String type, String prefix, int minSize, String field) { EntityType entityType = EntityType.get(type); URIBuilder searchUriBuilder = getIndexUriBuilder().setPath("/search/" + type + "/_search").setParameter("size", Integer.toString(minSize)); switch (entityType) { case PERSON: case CORPORATION: case PLACE: case SUBJECT: case EVENT: case WORK_SERIES: case SERIAL: case GENRE: case MUSICAL_INSTRUMENT: case MUSICAL_COMPOSITION_TYPE: Collection<NameEntry> nameEntries = entityService.neighbourhoodOfName(entityType, prefix, minSize); return searchWithJson(createPreIndexedSearchQuery(minSize, nameEntries), searchUriBuilder, orderResultByIdOrder(nameEntries .stream() .map(NameEntry::getUri) .collect(toList()))); default: return searchWithJson(createSortedListQuery(prefix, minSize, field), searchUriBuilder); } } private Function<String, String> orderResultByIdOrder(Collection<String> ids) { Map<String, Integer> desiredOrder = new HashMap<>(ids.size()); final int[] i = new int[]{0}; ids.forEach(id -> desiredOrder.put(urlEncode(id), i[0]++)); return s -> { Map fromJson = GSON.fromJson(s, Map.class); ((List) ((Map) fromJson.get("hits")).get("hits")).sort((o1, o2) -> { String id1 = (String) ((Map) o1).get("_id"); String id2 = (String) ((Map) o2).get("_id"); return desiredOrder.get(id1).compareTo(desiredOrder.get(id2)); }); return GSON.toJson(fromJson); }; } private String createSortedListQuery(String prefix, int minSize, String field) { String sortedListQuery; List<Map> should = new ArrayList<>(); for (int i = 0; i < prefix.length(); i++) { should.add( of("constant_score", of("boost", 2 << Math.max(prefix.length() - i, SIXTY_ONE), "query", of("match_phrase_prefix", of(field, prefix.substring(0, prefix.length() - i)))))); } sortedListQuery = GSON.toJson(of( "size", minSize, "query", of( "bool", of( "should", should) ) )); return sortedListQuery; } private String createPreIndexedSearchQuery(int minSize, Collection<NameEntry> nameEntries) { List<Map> should = new ArrayList<>(); should.addAll(nameEntries .stream() .filter(NameEntry::isBestMatch) .map(e -> of( "ids", of("values", newArrayList(urlEncode(e.getUri()))))) .collect(toList())); should.add(of( "ids", of("values", nameEntries .stream() .map(NameEntry::getUri) .map(SearchServiceImpl::urlEncode) .collect(toList()) ) )); return GSON.toJson( of( "size", minSize, "query", of( "bool", of("should", should) ) ) ); } private static String urlEncode(String uri) { return uri.replace(":", "%3A").replace("/", "%2F"); } @Override public final Response searchWorkWhereUriIsSubject(String subjectUri, int maxSize) { String body = GSON.toJson(of( "size", maxSize, "query", of( "nested", of( "path", "subjects", "query", of("term", of( "subjects.uri", subjectUri) ) ) ) )); return searchWithJson(body, getIndexUriBuilder().setPath("/search/work/_search")); } @Override public final Response searchWorkSeries(String query) { return doSearch(query, getWorkSeriesSearchUriBuilder()); } private URIBuilder getWorkSeriesSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/workSeries/_search"); } private void doIndexPublication(XURI pubUri) throws Exception { Model pubModel = entityService.retrieveById(pubUri); Property publicationOfProperty = ResourceFactory.createProperty(ontology("publicationOf")); if (pubModel.getProperty(null, publicationOfProperty) != null) { String workUri = pubModel.getProperty(ResourceFactory.createResource(pubUri.toString()), publicationOfProperty).getObject().toString(); XURI workXURI = new XURI(workUri); pubModel = entityService.retrieveWorkWithLinkedResources(workXURI); } indexDocument(pubUri, publicationModelToIndexMapper.createIndexDocument(pubModel, pubUri)); } private void doIndexWork(XURI xuri, boolean indexedPerson, boolean indexedPublication) throws Exception { Monitor mon = MonitorFactory.start("doIndexWork1"); Model workModelWithLinkedResources = entityService.retrieveWorkWithLinkedResources(xuri); indexDocument(xuri, workModelToIndexMapper.createIndexDocument(workModelWithLinkedResources, xuri)); mon.stop(); mon = MonitorFactory.start("doIndexWork2"); if (!indexedPerson) { workModelWithLinkedResources.listStatements(new SimpleSelector() { @Override public boolean test(Statement s) { return (s.getPredicate().equals(AGENT) && workModelWithLinkedResources.contains(s.getSubject(), RDF.type, MAIN_ENTRY)); } }).forEachRemaining(stmt -> { try { XURI creatorXuri = new XURI(stmt.getObject().asNode().getURI()); doIndexWorkCreatorOnly(creatorXuri); } catch (Exception e) { e.printStackTrace(); } }); } mon.stop(); if (indexedPublication) { return; } // Index all publications belonging to work // TODO instead of iterating over all subjects, find only subjects of triples with publicationOf as predicate mon = MonitorFactory.start("doIndexWork3"); ResIterator subjectIterator = workModelWithLinkedResources.listSubjects(); while (subjectIterator.hasNext()) { Resource subj = subjectIterator.next(); if (subj.isAnon()) { continue; } if (subj.toString().contains("publication")) { XURI pubUri = new XURI(subj.toString()); indexDocument(pubUri, publicationModelToIndexMapper.createIndexDocument(workModelWithLinkedResources, pubUri)); } } mon.stop(); } private void doIndexWorkCreator(XURI creatorUri, boolean indexedWork) throws Exception { Monitor mon = MonitorFactory.start("doIndexWorkCreator"); Model works = entityService.retrieveWorksByCreator(creatorUri); if (!indexedWork) { ResIterator subjectIterator = works.listSubjects(); while (subjectIterator.hasNext()) { Resource subj = subjectIterator.next(); if (subj.isAnon() || subj.toString().indexOf('#') != -1) { continue; } XURI workUri = new XURI(subj.toString()); if (!workUri.getUri().equals(creatorUri.getUri())) { doIndexWorkOnly(workUri); } } } switch (creatorUri.getTypeAsEntityType()) { case PERSON: indexDocument(creatorUri, personModelToIndexMapper .createIndexDocument(entityService.retrievePersonWithLinkedResources(creatorUri).add(works), creatorUri)); cacheNameIndex(creatorUri, works); break; case CORPORATION: indexDocument(creatorUri, corporationModelToIndexMapper .createIndexDocument(entityService.retrieveCorporationWithLinkedResources(creatorUri).add(works), creatorUri)); cacheNameIndex(creatorUri, works); break; default: throw new RuntimeException(format( "Tried to index work creator of type %1$s. Should be %2$s or %3$s", creatorUri.getTypeAsEntityType(), EntityType.PERSON, EntityType.CORPORATION )); } mon.stop(); } private void doIndex(XURI xuri) throws Exception { Model indexModel = entityService.retrieveById(xuri); Monitor mon = MonitorFactory.start("createIndexDocument"); String indexDocument = new ModelToIndexMapper(xuri.getTypeAsEntityType().getPath()).createIndexDocument(indexModel, xuri); mon.stop(); indexDocument(xuri, indexDocument); cacheNameIndex(xuri, indexModel); } private void cacheNameIndex(XURI xuri, Model indexModel) { entityService.statementsInModelAbout(xuri, indexModel, LOCAL_INDEX_SEARCH_FIELDS) .forEachRemaining(statement -> { entityService.addIndexedName( xuri.getTypeAsEntityType(), statement.getObject().asLiteral().toString(), statement.getSubject().getURI()); }); } private void doIndexWorkOnly(XURI xuri) throws Exception { doIndexWork(xuri, true, false); } private void indexDocument(XURI xuri, String document) { try (CloseableHttpClient httpclient = createDefault()) { HttpPut httpPut = new HttpPut(getIndexUriBuilder() .setPath(format("/search/%s/%s", xuri.getType(), encode(xuri.getUri(), UTF_8))) // TODO drop urlencoded ID, and define _id in mapping from field uri .build()); httpPut.setEntity(new StringEntity(document, Charset.forName(UTF_8))); httpPut.setHeader(CONTENT_TYPE, APPLICATION_JSON.withCharset(UTF_8).toString()); Monitor mon = MonitorFactory.start("indexDocument"); try (CloseableHttpResponse putResponse = httpclient.execute(httpPut)) { LOG.debug(putResponse.getStatusLine().toString()); } finally { mon.stop(); } } catch (Exception e) { LOG.error(format("Failed to index %s in elasticsearch", xuri.getUri()), e); throw new ServerErrorException(e.getMessage(), INTERNAL_SERVER_ERROR); } } private Response doSearch(String query, URIBuilder searchUriBuilder) { try { HttpGet httpGet = new HttpGet(searchUriBuilder .setParameter("q", query) .setParameter("size", "100") .build()); return createResponse(executeHttpRequest(httpGet)); } catch (Exception e) { LOG.error(e.getMessage(), e); throw new ServerErrorException(e.getMessage(), INTERNAL_SERVER_ERROR); } } private void doIndexWorkCreatorOnly(XURI xuri) throws Exception { doIndexWorkCreator(xuri, true); } private URIBuilder getIndexUriBuilder() { try { return new URIBuilder(this.elasticSearchBaseUrl); } catch (URISyntaxException e) { LOG.error("Failed to create uri builder for elasticsearch"); throw new RuntimeException(e); } } private URIBuilder getWorkSearchUriBuilder(MultivaluedMap<String, String> queryParams) { URIBuilder uriBuilder = getIndexUriBuilder().setPath("/search/work/_search"); if (queryParams != null && !queryParams.isEmpty()) { List<NameValuePair> nvpList = new ArrayList<>(queryParams.size()); queryParams.forEach((key, values) -> { values.forEach(value -> { nvpList.add(new BasicNameValuePair(key, value)); }); }); uriBuilder.setParameters(nvpList); } return uriBuilder; } private URIBuilder getPersonSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/person/_search"); } public final URIBuilder getPlaceUriBuilder() { return getIndexUriBuilder().setPath("/search/place/_search"); } public final URIBuilder getCorporationSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/corporation/_search"); } public final URIBuilder getSerialSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/serial/_search"); } public final URIBuilder getSubjectSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/subject/_search"); } public final URIBuilder getGenreSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/genre/_search"); } public final URIBuilder getPublicationSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/publication/_search"); } public final URIBuilder getInstrumentSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/instrument/_search"); } public final URIBuilder getCompositionTypeSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/compositionType/_search"); } private URIBuilder getEventSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/event/_search"); } }
redef/services/src/main/java/no/deichman/services/search/SearchServiceImpl.java
package no.deichman.services.search; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.jamonapi.Monitor; import com.jamonapi.MonitorFactory; import no.deichman.services.entity.EntityService; import no.deichman.services.entity.EntityType; import no.deichman.services.uridefaults.XURI; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.tuple.Pair; import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.NameValuePair; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpRequestBase; import org.apache.http.client.utils.URIBuilder; import org.apache.http.entity.InputStreamEntity; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.message.BasicNameValuePair; import org.apache.jena.rdf.model.Model; import org.apache.jena.rdf.model.Property; import org.apache.jena.rdf.model.ResIterator; import org.apache.jena.rdf.model.Resource; import org.apache.jena.rdf.model.ResourceFactory; import org.apache.jena.rdf.model.Statement; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.ws.rs.ServerErrorException; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.core.Response; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.Function; import static com.google.common.collect.ImmutableMap.of; import static com.google.common.collect.Lists.newArrayList; import static java.lang.String.format; import static java.net.HttpURLConnection.HTTP_INTERNAL_ERROR; import static java.net.HttpURLConnection.HTTP_OK; import static java.net.URLEncoder.encode; import static java.util.Arrays.stream; import static java.util.stream.Collectors.toList; import static javax.ws.rs.core.HttpHeaders.CONTENT_LENGTH; import static javax.ws.rs.core.HttpHeaders.CONTENT_TYPE; import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR; import static no.deichman.services.uridefaults.BaseURI.ontology; import static org.apache.http.entity.ContentType.APPLICATION_JSON; import static org.apache.http.impl.client.HttpClients.createDefault; import static org.apache.jena.rdf.model.ResourceFactory.createProperty; /** * Responsibility: perform indexing and searching. */ public class SearchServiceImpl implements SearchService { public static final Property AGENT = createProperty(ontology("agent")); private static final Logger LOG = LoggerFactory.getLogger(SearchServiceImpl.class); private static final String UTF_8 = "UTF-8"; public static final int SIXTY_ONE = 61; public static final String[] LOCAL_INDEX_SEARCH_FIELDS = { ontology("name"), ontology("prefLabel"), ontology("mainTitle") }; private final EntityService entityService; private final String elasticSearchBaseUrl; private ModelToIndexMapper workModelToIndexMapper = new ModelToIndexMapper("work"); private ModelToIndexMapper eventModelToIndexMapper = new ModelToIndexMapper("event"); private ModelToIndexMapper serialModelToIndexMapper = new ModelToIndexMapper("serial"); private ModelToIndexMapper personModelToIndexMapper = new ModelToIndexMapper("person"); private ModelToIndexMapper corporationModelToIndexMapper = new ModelToIndexMapper("corporation"); private ModelToIndexMapper publicationModelToIndexMapper = new ModelToIndexMapper("publication"); public static final Gson GSON = new GsonBuilder().setPrettyPrinting().create(); public SearchServiceImpl(String elasticSearchBaseUrl, EntityService entityService) { this.elasticSearchBaseUrl = elasticSearchBaseUrl; this.entityService = entityService; getIndexUriBuilder(); } @Override public final void index(XURI xuri) throws Exception { switch (xuri.getTypeAsEntityType()) { case WORK: doIndexWork(xuri, false, false); break; case PERSON: case CORPORATION: doIndexWorkCreator(xuri, false); break; case PUBLICATION: doIndexPublication(xuri); break; case EVENT: doIndexEvent(xuri); break; case SERIAL: doIndexSerial(xuri); break; default: doIndex(xuri); } } public final void indexOnly(XURI xuri) throws Exception { switch (xuri.getTypeAsEntityType()) { case WORK: doIndexWork(xuri, true, true); break; case PERSON: case CORPORATION: doIndexWorkCreator(xuri, true); break; case PUBLICATION: doIndexPublication(xuri); break; case EVENT: doIndexEvent(xuri); break; case SERIAL: doIndexSerial(xuri); break; default: doIndex(xuri); } } private void doIndexEvent(XURI xuri) { Model eventModelWithLinkedResources = entityService.retrieveEventWithLinkedResources(xuri); indexDocument(xuri, eventModelToIndexMapper.createIndexDocument(eventModelWithLinkedResources, xuri)); cacheNameIndex(xuri, eventModelWithLinkedResources); } private void doIndexSerial(XURI xuri) { Model serialModelWithLinkedResources = entityService.retrieveSerialWithLinkedResources(xuri); indexDocument(xuri, serialModelToIndexMapper.createIndexDocument(serialModelWithLinkedResources, xuri)); cacheNameIndex(xuri, serialModelWithLinkedResources); } @Override public final Response searchPersonWithJson(String json) { return searchWithJson(json, getPersonSearchUriBuilder()); } @Override public final Response searchWorkWithJson(String json, MultivaluedMap<String, String> queryParams) { return searchWithJson(json, getWorkSearchUriBuilder(queryParams)); } @Override public final Response searchPublicationWithJson(String json) { return searchWithJson(json, getPublicationSearchUriBuilder()); } @Override public final Response searchInstrument(String query) { return doSearch(query, getInstrumentSearchUriBuilder()); } @Override public final Response searchCompositionType(String query) { return doSearch(query, getCompositionTypeSearchUriBuilder()); } @Override public final Response searchEvent(String query) { return doSearch(query, getEventSearchUriBuilder()); } @Override public final Response clearIndex() { try (CloseableHttpClient httpclient = createDefault()) { URI uri = getIndexUriBuilder().setPath("/search").build(); try (CloseableHttpResponse getExistingIndex = httpclient.execute(new HttpGet(uri))) { if (getExistingIndex.getStatusLine().getStatusCode() == HTTP_OK) { try (CloseableHttpResponse delete = httpclient.execute(new HttpDelete(uri))) { int statusCode = delete.getStatusLine().getStatusCode(); LOG.info("Delete index request returned status " + statusCode); if (statusCode != HTTP_OK) { throw new ServerErrorException("Failed to delete elasticsearch index", HTTP_INTERNAL_ERROR); } } } } HttpPut createIndexRequest = new HttpPut(uri); createIndexRequest.setEntity(new InputStreamEntity(getClass().getResourceAsStream("/search_index.json"), APPLICATION_JSON)); try (CloseableHttpResponse create = httpclient.execute(createIndexRequest)) { int statusCode = create.getStatusLine().getStatusCode(); LOG.info("Create index request returned status " + statusCode); if (statusCode != HTTP_OK) { throw new ServerErrorException("Failed to create elasticsearch index", HTTP_INTERNAL_ERROR); } } putIndexMapping(httpclient, "work"); putIndexMapping(httpclient, "person"); putIndexMapping(httpclient, "serial"); putIndexMapping(httpclient, "corporation"); putIndexMapping(httpclient, "place"); putIndexMapping(httpclient, "subject"); putIndexMapping(httpclient, "genre"); putIndexMapping(httpclient, "publication"); putIndexMapping(httpclient, "instrument"); putIndexMapping(httpclient, "compositionType"); putIndexMapping(httpclient, "event"); putIndexMapping(httpclient, "workSeries"); return Response.status(Response.Status.OK).build(); } catch (Exception e) { LOG.error(e.getMessage(), e); throw new ServerErrorException(e.getMessage(), INTERNAL_SERVER_ERROR); } } private void putIndexMapping(CloseableHttpClient httpclient, String type) throws URISyntaxException, IOException { URI workIndexUri = getIndexUriBuilder().setPath("/search/_mapping/" + type).build(); HttpPut putWorkMappingRequest = new HttpPut(workIndexUri); putWorkMappingRequest.setEntity(new InputStreamEntity(getClass().getResourceAsStream("/" + type + "_mapping.json"), APPLICATION_JSON)); try (CloseableHttpResponse create = httpclient.execute(putWorkMappingRequest)) { int statusCode = create.getStatusLine().getStatusCode(); LOG.info("Create mapping request for " + type + " returned status " + statusCode); if (statusCode != HTTP_OK) { throw new ServerErrorException("Failed to create elasticsearch mapping for " + type, HTTP_INTERNAL_ERROR); } } } private Response searchWithJson(String body, URIBuilder searchUriBuilder, Function<String, String>... jsonTranformer) { try { HttpPost httpPost = new HttpPost(searchUriBuilder.build()); httpPost.setEntity(new StringEntity(body, StandardCharsets.UTF_8)); httpPost.setHeader(CONTENT_TYPE, "application/json"); Pair<String, Header[]> searchResult = executeHttpRequest(httpPost); if (jsonTranformer != null && jsonTranformer.length > 0) { String transformed = jsonTranformer[0].apply(searchResult.getLeft()); Header[] headers = searchResult.getRight(); searchResult = Pair.of(transformed, removeHeader(headers, CONTENT_LENGTH)); } return createResponse(searchResult); } catch (Exception e) { LOG.error(e.getMessage(), e); throw new ServerErrorException(e.getMessage(), INTERNAL_SERVER_ERROR); } } private Header[] removeHeader(Header[] headers, String headerName) { return stream(headers) .filter(header -> !header .getName() .toLowerCase() .equalsIgnoreCase(headerName)) .toArray(Header[]::new); } private Response createResponse(Pair<String, Header[]> searchResult) { Response.ResponseBuilder responseBuilder = Response.ok(searchResult.getLeft()); for (Header header : searchResult.getRight()) { responseBuilder = responseBuilder.header(header.getName(), header.getValue()); } return responseBuilder.build(); } private Pair<String, Header[]> executeHttpRequest(HttpRequestBase httpRequestBase) throws IOException { try (CloseableHttpClient httpclient = createDefault(); CloseableHttpResponse response = httpclient.execute(httpRequestBase)) { HttpEntity responseEntity = response.getEntity(); String jsonContent = IOUtils.toString(responseEntity.getContent()); Header[] headers = response.getAllHeaders(); return Pair.<String, Header[]>of(jsonContent, headers); } catch (Exception e) { throw e; } } @Override public final Response searchWork(String query) { return doSearch(query, getWorkSearchUriBuilder(null)); } @Override public final Response searchPerson(String query) { return doSearch(query, getPersonSearchUriBuilder()); } @Override public final Response searchPlace(String query) { return doSearch(query, getPlaceUriBuilder()); } @Override public final Response searchCorporation(String query) { return doSearch(query, getCorporationSearchUriBuilder()); } @Override public final Response searchSerial(String query) { return doSearch(query, getSerialSearchUriBuilder()); } @Override public final Response searchSubject(String query) { return doSearch(query, getSubjectSearchUriBuilder()); } @Override public final Response searchGenre(String query) { return doSearch(query, getGenreSearchUriBuilder()); } @Override public final Response searchPublication(String query) { return doSearch(query, getPublicationSearchUriBuilder()); } @Override public final void delete(XURI xuri) { try (CloseableHttpClient httpclient = createDefault()) { HttpDelete httpDelete = new HttpDelete(getIndexUriBuilder() .setPath(format("/search/%s/%s", xuri.getType(), encode(xuri.getUri(), UTF_8))) .build()); try (CloseableHttpResponse putResponse = httpclient.execute(httpDelete)) { LOG.debug(putResponse.getStatusLine().toString()); } } catch (Exception e) { LOG.error(format("Failed to delete %s in elasticsearch", xuri.getUri()), e); throw new ServerErrorException(e.getMessage(), INTERNAL_SERVER_ERROR); } } @Override public final Response sortedList(String type, String prefix, int minSize, String field) { EntityType entityType = EntityType.get(type); URIBuilder searchUriBuilder = getIndexUriBuilder().setPath("/search/" + type + "/_search").setParameter("size", Integer.toString(minSize)); switch (entityType) { case PERSON: case CORPORATION: case PLACE: case SUBJECT: case EVENT: case WORK_SERIES: case SERIAL: case GENRE: case MUSICAL_INSTRUMENT: case MUSICAL_COMPOSITION_TYPE: Collection<NameEntry> nameEntries = entityService.neighbourhoodOfName(entityType, prefix, minSize); return searchWithJson(createPreIndexedSearchQuery(minSize, nameEntries), searchUriBuilder, orderResultByIdOrder(nameEntries .stream() .map(NameEntry::getUri) .collect(toList()))); default: return searchWithJson(createSortedListQuery(prefix, minSize, field), searchUriBuilder); } } private Function<String, String> orderResultByIdOrder(Collection<String> ids) { Map<String, Integer> desiredOrder = new HashMap<>(ids.size()); final int[] i = new int[]{0}; ids.forEach(id -> desiredOrder.put(urlEncode(id), i[0]++)); return s -> { Map fromJson = GSON.fromJson(s, Map.class); ((List) ((Map) fromJson.get("hits")).get("hits")).sort((o1, o2) -> { String id1 = (String) ((Map) o1).get("_id"); String id2 = (String) ((Map) o2).get("_id"); return desiredOrder.get(id1).compareTo(desiredOrder.get(id2)); }); return GSON.toJson(fromJson); }; } private String createSortedListQuery(String prefix, int minSize, String field) { String sortedListQuery; List<Map> should = new ArrayList<>(); for (int i = 0; i < prefix.length(); i++) { should.add( of("constant_score", of("boost", 2 << Math.max(prefix.length() - i, SIXTY_ONE), "query", of("match_phrase_prefix", of(field, prefix.substring(0, prefix.length() - i)))))); } sortedListQuery = GSON.toJson(of( "size", minSize, "query", of( "bool", of( "should", should) ) )); return sortedListQuery; } private String createPreIndexedSearchQuery(int minSize, Collection<NameEntry> nameEntries) { List<Map> should = new ArrayList<>(); should.addAll(nameEntries .stream() .filter(NameEntry::isBestMatch) .map(e -> of( "ids", of("values", newArrayList(urlEncode(e.getUri()))))) .collect(toList())); should.add(of( "ids", of("values", nameEntries .stream() .map(NameEntry::getUri) .map(SearchServiceImpl::urlEncode) .collect(toList()) ) )); return GSON.toJson( of( "size", minSize, "query", of( "bool", of("should", should) ) ) ); } private static String urlEncode(String uri) { return uri.replace(":", "%3A").replace("/", "%2F"); } @Override public final Response searchWorkWhereUriIsSubject(String subjectUri, int maxSize) { String body = GSON.toJson(of( "size", maxSize, "query", of( "nested", of( "path", "subjects", "query", of("term", of( "subjects.uri", subjectUri) ) ) ) )); return searchWithJson(body, getIndexUriBuilder().setPath("/search/work/_search")); } @Override public final Response searchWorkSeries(String query) { return doSearch(query, getWorkSeriesSearchUriBuilder()); } private URIBuilder getWorkSeriesSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/workSeries/_search"); } private void doIndexPublication(XURI pubUri) throws Exception { Model pubModel = entityService.retrieveById(pubUri); Property publicationOfProperty = ResourceFactory.createProperty(ontology("publicationOf")); if (pubModel.getProperty(null, publicationOfProperty) != null) { String workUri = pubModel.getProperty(ResourceFactory.createResource(pubUri.toString()), publicationOfProperty).getObject().toString(); XURI workXURI = new XURI(workUri); pubModel = entityService.retrieveWorkWithLinkedResources(workXURI); } indexDocument(pubUri, publicationModelToIndexMapper.createIndexDocument(pubModel, pubUri)); } private void doIndexWork(XURI xuri, boolean indexedPerson, boolean indexedPublication) throws Exception { Monitor mon = MonitorFactory.start("doIndexWork1"); Model workModelWithLinkedResources = entityService.retrieveWorkWithLinkedResources(xuri); indexDocument(xuri, workModelToIndexMapper.createIndexDocument(workModelWithLinkedResources, xuri)); mon.stop(); mon = MonitorFactory.start("doIndexWork2"); if (!indexedPerson) { for (Statement stmt : workModelWithLinkedResources.listStatements().toList()) { if (stmt.getPredicate().equals(AGENT)) { XURI creatorXuri = new XURI(stmt.getObject().asNode().getURI()); doIndexWorkCreatorOnly(creatorXuri); } } } mon.stop(); if (indexedPublication) { return; } // Index all publications belonging to work // TODO instead of iterating over all subjects, find only subjects of triples with publicationOf as predicate mon = MonitorFactory.start("doIndexWork3"); ResIterator subjectIterator = workModelWithLinkedResources.listSubjects(); while (subjectIterator.hasNext()) { Resource subj = subjectIterator.next(); if (subj.isAnon()) { continue; } if (subj.toString().contains("publication")) { XURI pubUri = new XURI(subj.toString()); indexDocument(pubUri, publicationModelToIndexMapper.createIndexDocument(workModelWithLinkedResources, pubUri)); } } mon.stop(); } private void doIndexWorkCreator(XURI creatorUri, boolean indexedWork) throws Exception { Monitor mon = MonitorFactory.start("doIndexWorkCreator"); Model works = entityService.retrieveWorksByCreator(creatorUri); if (!indexedWork) { ResIterator subjectIterator = works.listSubjects(); while (subjectIterator.hasNext()) { Resource subj = subjectIterator.next(); if (subj.isAnon() || subj.toString().indexOf('#') != -1) { continue; } XURI workUri = new XURI(subj.toString()); if (!workUri.getUri().equals(creatorUri.getUri())) { doIndexWorkOnly(workUri); } } } switch (creatorUri.getTypeAsEntityType()) { case PERSON: indexDocument(creatorUri, personModelToIndexMapper .createIndexDocument(entityService.retrievePersonWithLinkedResources(creatorUri).add(works), creatorUri)); cacheNameIndex(creatorUri, works); break; case CORPORATION: indexDocument(creatorUri, corporationModelToIndexMapper .createIndexDocument(entityService.retrieveCorporationWithLinkedResources(creatorUri).add(works), creatorUri)); cacheNameIndex(creatorUri, works); break; default: throw new RuntimeException(format( "Tried to index work creator of type %1$s. Should be %2$s or %3$s", creatorUri.getTypeAsEntityType(), EntityType.PERSON, EntityType.CORPORATION )); } mon.stop(); } private void doIndex(XURI xuri) throws Exception { Model indexModel = entityService.retrieveById(xuri); Monitor mon = MonitorFactory.start("createIndexDocument"); String indexDocument = new ModelToIndexMapper(xuri.getTypeAsEntityType().getPath()).createIndexDocument(indexModel, xuri); mon.stop(); indexDocument(xuri, indexDocument); cacheNameIndex(xuri, indexModel); } private void cacheNameIndex(XURI xuri, Model indexModel) { entityService.statementsInModelAbout(xuri, indexModel, LOCAL_INDEX_SEARCH_FIELDS) .forEachRemaining(statement -> { entityService.addIndexedName( xuri.getTypeAsEntityType(), statement.getObject().asLiteral().toString(), statement.getSubject().getURI()); }); } private void doIndexWorkOnly(XURI xuri) throws Exception { doIndexWork(xuri, true, false); } private void indexDocument(XURI xuri, String document) { try (CloseableHttpClient httpclient = createDefault()) { HttpPut httpPut = new HttpPut(getIndexUriBuilder() .setPath(format("/search/%s/%s", xuri.getType(), encode(xuri.getUri(), UTF_8))) // TODO drop urlencoded ID, and define _id in mapping from field uri .build()); httpPut.setEntity(new StringEntity(document, Charset.forName(UTF_8))); httpPut.setHeader(CONTENT_TYPE, APPLICATION_JSON.withCharset(UTF_8).toString()); Monitor mon = MonitorFactory.start("indexDocument"); try (CloseableHttpResponse putResponse = httpclient.execute(httpPut)) { LOG.debug(putResponse.getStatusLine().toString()); } finally { mon.stop(); } } catch (Exception e) { LOG.error(format("Failed to index %s in elasticsearch", xuri.getUri()), e); throw new ServerErrorException(e.getMessage(), INTERNAL_SERVER_ERROR); } } private Response doSearch(String query, URIBuilder searchUriBuilder) { try { HttpGet httpGet = new HttpGet(searchUriBuilder .setParameter("q", query) .setParameter("size", "100") .build()); return createResponse(executeHttpRequest(httpGet)); } catch (Exception e) { LOG.error(e.getMessage(), e); throw new ServerErrorException(e.getMessage(), INTERNAL_SERVER_ERROR); } } private void doIndexWorkCreatorOnly(XURI xuri) throws Exception { doIndexWorkCreator(xuri, true); } private URIBuilder getIndexUriBuilder() { try { return new URIBuilder(this.elasticSearchBaseUrl); } catch (URISyntaxException e) { LOG.error("Failed to create uri builder for elasticsearch"); throw new RuntimeException(e); } } private URIBuilder getWorkSearchUriBuilder(MultivaluedMap<String, String> queryParams) { URIBuilder uriBuilder = getIndexUriBuilder().setPath("/search/work/_search"); if (queryParams != null && !queryParams.isEmpty()) { List<NameValuePair> nvpList = new ArrayList<>(queryParams.size()); queryParams.forEach((key, values) -> { values.forEach(value -> { nvpList.add(new BasicNameValuePair(key, value)); }); }); uriBuilder.setParameters(nvpList); } return uriBuilder; } private URIBuilder getPersonSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/person/_search"); } public final URIBuilder getPlaceUriBuilder() { return getIndexUriBuilder().setPath("/search/place/_search"); } public final URIBuilder getCorporationSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/corporation/_search"); } public final URIBuilder getSerialSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/serial/_search"); } public final URIBuilder getSubjectSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/subject/_search"); } public final URIBuilder getGenreSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/genre/_search"); } public final URIBuilder getPublicationSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/publication/_search"); } public final URIBuilder getInstrumentSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/instrument/_search"); } public final URIBuilder getCompositionTypeSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/compositionType/_search"); } private URIBuilder getEventSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/event/_search"); } }
services: when indexing work only index person in main entry contribution
redef/services/src/main/java/no/deichman/services/search/SearchServiceImpl.java
services: when indexing work only index person in main entry contribution
<ide><path>edef/services/src/main/java/no/deichman/services/search/SearchServiceImpl.java <ide> import org.apache.jena.rdf.model.ResIterator; <ide> import org.apache.jena.rdf.model.Resource; <ide> import org.apache.jena.rdf.model.ResourceFactory; <add>import org.apache.jena.rdf.model.SimpleSelector; <ide> import org.apache.jena.rdf.model.Statement; <add>import org.apache.jena.vocabulary.RDF; <ide> import org.slf4j.Logger; <ide> import org.slf4j.LoggerFactory; <ide> <ide> ontology("prefLabel"), <ide> ontology("mainTitle") <ide> }; <add> public static final Resource MAIN_ENTRY = ResourceFactory.createResource(ontology("MainEntry")); <ide> private final EntityService entityService; <ide> private final String elasticSearchBaseUrl; <ide> private ModelToIndexMapper workModelToIndexMapper = new ModelToIndexMapper("work"); <ide> mon.stop(); <ide> mon = MonitorFactory.start("doIndexWork2"); <ide> if (!indexedPerson) { <del> for (Statement stmt : workModelWithLinkedResources.listStatements().toList()) { <del> if (stmt.getPredicate().equals(AGENT)) { <add> workModelWithLinkedResources.listStatements(new SimpleSelector() { <add> @Override <add> public boolean test(Statement s) { <add> return (s.getPredicate().equals(AGENT) <add> && workModelWithLinkedResources.contains(s.getSubject(), RDF.type, MAIN_ENTRY)); <add> } <add> }).forEachRemaining(stmt -> { <add> try { <ide> XURI creatorXuri = new XURI(stmt.getObject().asNode().getURI()); <ide> doIndexWorkCreatorOnly(creatorXuri); <add> } catch (Exception e) { <add> e.printStackTrace(); <ide> } <del> } <add> }); <ide> } <ide> mon.stop(); <ide> if (indexedPublication) {
Java
apache-2.0
f6bddb36fd2e7dfde7e4220a2c07a03c2883acdd
0
jianxiansining/acra,luoxiaoshenghustedu/acra,F43nd1r/acra,VikingDen/acra,admin-zhx/acra,F43nd1r/acra,cpinan/acra,deshion/acra,drewis/acra,MaTriXy/acra,simple88/acra,ACRA/acra,hgl888/acra,NKaushik89/acra,yungfan/acra,pjdelport/acra,ACRA/acra,ACRA/acra,ACRA/acra,awesome-niu/acra,fitramabr/acra,pfn/acra,Crazyphil/acra,0359xiaodong/acra,jbc25/acra
/* * Copyright 2012 Kevin Gaudin * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.acra.collector; import static org.acra.ACRA.LOG_TAG; import static org.acra.ReportField.ANDROID_VERSION; import static org.acra.ReportField.APPLICATION_LOG; import static org.acra.ReportField.APP_VERSION_CODE; import static org.acra.ReportField.APP_VERSION_NAME; import static org.acra.ReportField.AVAILABLE_MEM_SIZE; import static org.acra.ReportField.BRAND; import static org.acra.ReportField.BUILD; import static org.acra.ReportField.CRASH_CONFIGURATION; import static org.acra.ReportField.CUSTOM_DATA; import static org.acra.ReportField.DEVICE_FEATURES; import static org.acra.ReportField.DEVICE_ID; import static org.acra.ReportField.DISPLAY; import static org.acra.ReportField.DROPBOX; import static org.acra.ReportField.DUMPSYS_MEMINFO; import static org.acra.ReportField.ENVIRONMENT; import static org.acra.ReportField.EVENTSLOG; import static org.acra.ReportField.FILE_PATH; import static org.acra.ReportField.INITIAL_CONFIGURATION; import static org.acra.ReportField.INSTALLATION_ID; import static org.acra.ReportField.IS_SILENT; import static org.acra.ReportField.LOGCAT; import static org.acra.ReportField.MEDIA_CODEC_LIST; import static org.acra.ReportField.PACKAGE_NAME; import static org.acra.ReportField.PHONE_MODEL; import static org.acra.ReportField.PRODUCT; import static org.acra.ReportField.RADIOLOG; import static org.acra.ReportField.REPORT_ID; import static org.acra.ReportField.SETTINGS_SECURE; import static org.acra.ReportField.SETTINGS_SYSTEM; import static org.acra.ReportField.SHARED_PREFERENCES; import static org.acra.ReportField.STACK_TRACE; import static org.acra.ReportField.THREAD_DETAILS; import static org.acra.ReportField.TOTAL_MEM_SIZE; import static org.acra.ReportField.USER_CRASH_DATE; import static org.acra.ReportField.USER_EMAIL; import java.io.FileNotFoundException; import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; import java.io.Writer; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import org.acra.ACRA; import org.acra.ReportField; import org.acra.annotation.ReportsCrashes; import org.acra.util.Installation; import org.acra.util.PackageManagerWrapper; import org.acra.util.ReportUtils; import android.Manifest; import android.content.Context; import android.content.SharedPreferences; import android.content.pm.PackageInfo; import android.os.Environment; import android.text.format.Time; import android.util.Log; /** * Responsible for creating the CrashReportData for an Exception. * <p> * Also responsible for holding the custom data to send with each report. * </p> * * @author William Ferguson * @since 4.3.0 */ public final class CrashReportDataFactory { private final Context context; private final SharedPreferences prefs; private final List<ReportField> crashReportFields; private final Map<String, String> customParameters = new HashMap<String, String>(); private final Time appStartDate; private final String initialConfiguration; public CrashReportDataFactory(Context context, SharedPreferences prefs, Time appStartDate, String initialConfiguration) { this.context = context; this.prefs = prefs; this.appStartDate = appStartDate; this.initialConfiguration = initialConfiguration; final ReportsCrashes config = ACRA.getConfig(); final ReportField[] customReportFields = config.customReportContent(); final ReportField[] fieldsList; if (customReportFields.length != 0) { Log.d(LOG_TAG, "Using custom Report Fields"); fieldsList = customReportFields; } else if (config.mailTo() == null || "".equals(config.mailTo())) { Log.d(LOG_TAG, "Using default Report Fields"); fieldsList = ACRA.DEFAULT_REPORT_FIELDS; } else { Log.d(LOG_TAG, "Using default Mail Report Fields"); fieldsList = ACRA.DEFAULT_MAIL_REPORT_FIELDS; } this.crashReportFields = Arrays.asList(fieldsList); } /** * <p> * Adds a custom key and value to be reported with the generated * CashReportData. * </p> * <p> * The key/value pairs will be stored in the "custom" column, as a text * containing one 'key = value' pair on each line. * </p> * * @param key * A key for your custom data. * @param value * The value associated to your key. * @return The previous value for this key if there was one, or null. */ public String putCustomData(String key, String value) { return customParameters.put(key, value); } /** * Removes a key/value pair from the custom data field. * * @param key * The key of the data to be removed. * @return The value for this key before removal. */ public String removeCustomData(String key) { return customParameters.remove(key); } /** * Gets the current value for a key in the custom data field. * * @param key * The key of the data to be retrieved. * @return The value for this key. */ public String getCustomData(String key) { return customParameters.get(key); } /** * Collects crash data. * * @param th * Throwable that caused the crash. * @param isSilentReport * Whether to report this report as being sent silently. * @param brokenThread2 * @return CrashReportData representing the current state of the application * at the instant of the Exception. */ public CrashReportData createCrashData(Throwable th, boolean isSilentReport, Thread brokenThread) { final CrashReportData crashReportData = new CrashReportData(); try { // Make every entry here bullet proof and move any slightly dodgy // ones to the end. // This ensures that we collect as much info as possible before // something crashes the collection process. crashReportData.put(STACK_TRACE, getStackTrace(th)); crashReportData.put(ReportField.USER_APP_START_DATE, appStartDate.format3339(false)); if (isSilentReport) { crashReportData.put(IS_SILENT, "true"); } // Generate report uuid if (crashReportFields.contains(REPORT_ID)) { crashReportData.put(ReportField.REPORT_ID, UUID.randomUUID().toString()); } // Installation unique ID if (crashReportFields.contains(INSTALLATION_ID)) { crashReportData.put(INSTALLATION_ID, Installation.id(context)); } // Device Configuration when crashing if (crashReportFields.contains(INITIAL_CONFIGURATION)) { crashReportData.put(INITIAL_CONFIGURATION, initialConfiguration); } if (crashReportFields.contains(CRASH_CONFIGURATION)) { crashReportData.put(CRASH_CONFIGURATION, ConfigurationCollector.collectConfiguration(context)); } // Collect meminfo if (!(th instanceof OutOfMemoryError) && crashReportFields.contains(DUMPSYS_MEMINFO)) { crashReportData.put(DUMPSYS_MEMINFO, DumpSysCollector.collectMemInfo()); } // Application Package name if (crashReportFields.contains(PACKAGE_NAME)) { crashReportData.put(PACKAGE_NAME, context.getPackageName()); } // Android OS Build details if (crashReportFields.contains(BUILD)) { crashReportData.put(BUILD, ReflectionCollector.collectConstants(android.os.Build.class)); } // Device model if (crashReportFields.contains(PHONE_MODEL)) { crashReportData.put(PHONE_MODEL, android.os.Build.MODEL); } // Android version if (crashReportFields.contains(ANDROID_VERSION)) { crashReportData.put(ANDROID_VERSION, android.os.Build.VERSION.RELEASE); } // Device Brand (manufacturer) if (crashReportFields.contains(BRAND)) { crashReportData.put(BRAND, android.os.Build.BRAND); } if (crashReportFields.contains(PRODUCT)) { crashReportData.put(PRODUCT, android.os.Build.PRODUCT); } // Device Memory if (crashReportFields.contains(TOTAL_MEM_SIZE)) { crashReportData.put(TOTAL_MEM_SIZE, Long.toString(ReportUtils.getTotalInternalMemorySize())); } if (crashReportFields.contains(AVAILABLE_MEM_SIZE)) { crashReportData.put(AVAILABLE_MEM_SIZE, Long.toString(ReportUtils.getAvailableInternalMemorySize())); } // Application file path if (crashReportFields.contains(FILE_PATH)) { crashReportData.put(FILE_PATH, ReportUtils.getApplicationFilePath(context)); } // Main display details if (crashReportFields.contains(DISPLAY)) { crashReportData.put(DISPLAY, ReportUtils.getDisplayDetails(context)); } // User crash date with local timezone if (crashReportFields.contains(USER_CRASH_DATE)) { final Time curDate = new Time(); curDate.setToNow(); crashReportData.put(USER_CRASH_DATE, curDate.format3339(false)); } // Add custom info, they are all stored in a single field if (crashReportFields.contains(CUSTOM_DATA)) { crashReportData.put(CUSTOM_DATA, createCustomInfoString()); } // Add user email address, if set in the app's preferences if (crashReportFields.contains(USER_EMAIL)) { crashReportData.put(USER_EMAIL, prefs.getString(ACRA.PREF_USER_EMAIL_ADDRESS, "N/A")); } // Device features if (crashReportFields.contains(DEVICE_FEATURES)) { crashReportData.put(DEVICE_FEATURES, DeviceFeaturesCollector.getFeatures(context)); } // Environment (External storage state) if (crashReportFields.contains(ENVIRONMENT)) { crashReportData.put(ENVIRONMENT, ReflectionCollector.collectStaticGettersResults(Environment.class)); } // System settings if (crashReportFields.contains(SETTINGS_SYSTEM)) { crashReportData.put(SETTINGS_SYSTEM, SettingsCollector.collectSystemSettings(context)); } // Secure settings if (crashReportFields.contains(SETTINGS_SECURE)) { crashReportData.put(SETTINGS_SECURE, SettingsCollector.collectSecureSettings(context)); } // SharedPreferences if (crashReportFields.contains(SHARED_PREFERENCES)) { crashReportData.put(SHARED_PREFERENCES, SharedPreferencesCollector.collect(context)); } // Now get all the crash data that relies on the PackageManager // (which may or may not be here). final PackageManagerWrapper pm = new PackageManagerWrapper(context); final PackageInfo pi = pm.getPackageInfo(); if (pi != null) { // Application Version if (crashReportFields.contains(APP_VERSION_CODE)) { crashReportData.put(APP_VERSION_CODE, Integer.toString(pi.versionCode)); } if (crashReportFields.contains(APP_VERSION_NAME)) { crashReportData.put(APP_VERSION_NAME, pi.versionName != null ? pi.versionName : "not set"); } } else { // Could not retrieve package info... crashReportData.put(APP_VERSION_NAME, "Package info unavailable"); } // Retrieve UDID(IMEI) if permission is available if (crashReportFields.contains(DEVICE_ID) && prefs.getBoolean(ACRA.PREF_ENABLE_DEVICE_ID, true) && pm.hasPermission(Manifest.permission.READ_PHONE_STATE)) { final String deviceId = ReportUtils.getDeviceId(context); if (deviceId != null) { crashReportData.put(DEVICE_ID, deviceId); } } // Collect DropBox and logcat // Before JellyBean, this required the READ_LOGS permission // Since JellyBean, READ_LOGS is not granted to third-party apps anymore for security reasons. // Though, we can call logcat without any permission and still get traces related to our app. if (prefs.getBoolean(ACRA.PREF_ENABLE_SYSTEM_LOGS, true) && (pm.hasPermission(Manifest.permission.READ_LOGS)) || Compatibility.getAPILevel() >= 16) { Log.i(ACRA.LOG_TAG, "READ_LOGS granted! ACRA can include LogCat and DropBox data."); if (crashReportFields.contains(LOGCAT)) { crashReportData.put(LOGCAT, LogCatCollector.collectLogCat(null)); } if (crashReportFields.contains(EVENTSLOG)) { crashReportData.put(EVENTSLOG, LogCatCollector.collectLogCat("events")); } if (crashReportFields.contains(RADIOLOG)) { crashReportData.put(RADIOLOG, LogCatCollector.collectLogCat("radio")); } if (crashReportFields.contains(DROPBOX)) { crashReportData.put(DROPBOX, DropBoxCollector.read(context, ACRA.getConfig().additionalDropBoxTags())); } } else { Log.i(ACRA.LOG_TAG, "READ_LOGS not allowed. ACRA will not include LogCat and DropBox data."); } // Application specific log file if (crashReportFields.contains(APPLICATION_LOG)) { crashReportData.put(APPLICATION_LOG, LogFileCollector.collectLogFile(context, ACRA.getConfig() .applicationLogFile(), ACRA.getConfig().applicationLogFileLines())); } // Media Codecs list if (crashReportFields.contains(MEDIA_CODEC_LIST)) { crashReportData.put(MEDIA_CODEC_LIST, MediaCodecListCollector.collecMediaCodecList()); } // Failing thread details if (crashReportFields.contains(THREAD_DETAILS)) { crashReportData.put(THREAD_DETAILS, ThreadCollector.collect(brokenThread)); } } catch (RuntimeException e) { Log.e(LOG_TAG, "Error while retrieving crash data", e); } catch (FileNotFoundException e) { Log.e(LOG_TAG, "Error : application log file " + ACRA.getConfig().applicationLogFile() + " not found.", e); } catch (IOException e) { Log.e(LOG_TAG, "Error while reading application log file " + ACRA.getConfig().applicationLogFile() + ".", e); } return crashReportData; } /** * Generates the string which is posted in the single custom data field in * the GoogleDocs Form. * * @return A string with a 'key = value' pair on each line. */ private String createCustomInfoString() { final StringBuilder customInfo = new StringBuilder(); for (final String currentKey : customParameters.keySet()) { final String currentVal = customParameters.get(currentKey); customInfo.append(currentKey); customInfo.append(" = "); customInfo.append(currentVal); customInfo.append("\n"); } return customInfo.toString(); } private String getStackTrace(Throwable th) { final Writer result = new StringWriter(); final PrintWriter printWriter = new PrintWriter(result); // If the exception was thrown in a background thread inside // AsyncTask, then the actual exception can be found with getCause Throwable cause = th; while (cause != null) { cause.printStackTrace(printWriter); cause = cause.getCause(); } final String stacktraceAsString = result.toString(); printWriter.close(); return stacktraceAsString; } }
acra/src/main/java/org/acra/collector/CrashReportDataFactory.java
/* * Copyright 2012 Kevin Gaudin * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.acra.collector; import static org.acra.ACRA.LOG_TAG; import static org.acra.ReportField.ANDROID_VERSION; import static org.acra.ReportField.APPLICATION_LOG; import static org.acra.ReportField.APP_VERSION_CODE; import static org.acra.ReportField.APP_VERSION_NAME; import static org.acra.ReportField.AVAILABLE_MEM_SIZE; import static org.acra.ReportField.BRAND; import static org.acra.ReportField.BUILD; import static org.acra.ReportField.CRASH_CONFIGURATION; import static org.acra.ReportField.CUSTOM_DATA; import static org.acra.ReportField.DEVICE_FEATURES; import static org.acra.ReportField.DEVICE_ID; import static org.acra.ReportField.DISPLAY; import static org.acra.ReportField.DROPBOX; import static org.acra.ReportField.DUMPSYS_MEMINFO; import static org.acra.ReportField.ENVIRONMENT; import static org.acra.ReportField.EVENTSLOG; import static org.acra.ReportField.FILE_PATH; import static org.acra.ReportField.INITIAL_CONFIGURATION; import static org.acra.ReportField.INSTALLATION_ID; import static org.acra.ReportField.IS_SILENT; import static org.acra.ReportField.LOGCAT; import static org.acra.ReportField.MEDIA_CODEC_LIST; import static org.acra.ReportField.PACKAGE_NAME; import static org.acra.ReportField.PHONE_MODEL; import static org.acra.ReportField.PRODUCT; import static org.acra.ReportField.RADIOLOG; import static org.acra.ReportField.REPORT_ID; import static org.acra.ReportField.SETTINGS_SECURE; import static org.acra.ReportField.SETTINGS_SYSTEM; import static org.acra.ReportField.SHARED_PREFERENCES; import static org.acra.ReportField.STACK_TRACE; import static org.acra.ReportField.THREAD_DETAILS; import static org.acra.ReportField.TOTAL_MEM_SIZE; import static org.acra.ReportField.USER_CRASH_DATE; import static org.acra.ReportField.USER_EMAIL; import java.io.FileNotFoundException; import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; import java.io.Writer; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import org.acra.ACRA; import org.acra.ReportField; import org.acra.annotation.ReportsCrashes; import org.acra.util.Installation; import org.acra.util.PackageManagerWrapper; import org.acra.util.ReportUtils; import android.Manifest; import android.content.Context; import android.content.SharedPreferences; import android.content.pm.PackageInfo; import android.os.Environment; import android.text.format.Time; import android.util.Log; /** * Responsible for creating the CrashReportData for an Exception. * <p> * Also responsible for holding the custom data to send with each report. * </p> * * @author William Ferguson * @since 4.3.0 */ public final class CrashReportDataFactory { private final Context context; private final SharedPreferences prefs; private final List<ReportField> crashReportFields; private final Map<String, String> customParameters = new HashMap<String, String>(); private final Time appStartDate; private final String initialConfiguration; public CrashReportDataFactory(Context context, SharedPreferences prefs, Time appStartDate, String initialConfiguration) { this.context = context; this.prefs = prefs; this.appStartDate = appStartDate; this.initialConfiguration = initialConfiguration; final ReportsCrashes config = ACRA.getConfig(); final ReportField[] customReportFields = config.customReportContent(); final ReportField[] fieldsList; if (customReportFields.length != 0) { Log.d(LOG_TAG, "Using custom Report Fields"); fieldsList = customReportFields; } else if (config.mailTo() == null || "".equals(config.mailTo())) { Log.d(LOG_TAG, "Using default Report Fields"); fieldsList = ACRA.DEFAULT_REPORT_FIELDS; } else { Log.d(LOG_TAG, "Using default Mail Report Fields"); fieldsList = ACRA.DEFAULT_MAIL_REPORT_FIELDS; } this.crashReportFields = Arrays.asList(fieldsList); } /** * <p> * Adds a custom key and value to be reported with the generated * CashReportData. * </p> * <p> * The key/value pairs will be stored in the "custom" column, as a text * containing one 'key = value' pair on each line. * </p> * * @param key * A key for your custom data. * @param value * The value associated to your key. * @return The previous value for this key if there was one, or null. */ public String putCustomData(String key, String value) { return customParameters.put(key, value); } /** * Removes a key/value pair from the custom data field. * * @param key * The key of the data to be removed. * @return The value for this key before removal. */ public String removeCustomData(String key) { return customParameters.remove(key); } /** * Gets the current value for a key in the custom data field. * * @param key * The key of the data to be retrieved. * @return The value for this key. */ public String getCustomData(String key) { return customParameters.get(key); } /** * Collects crash data. * * @param th * Throwable that caused the crash. * @param isSilentReport * Whether to report this report as being sent silently. * @param brokenThread2 * @return CrashReportData representing the current state of the application * at the instant of the Exception. */ public CrashReportData createCrashData(Throwable th, boolean isSilentReport, Thread brokenThread) { final CrashReportData crashReportData = new CrashReportData(); try { // Make every entry here bullet proof and move any slightly dodgy // ones to the end. // This ensures that we collect as much info as possible before // something crashes the collection process. crashReportData.put(STACK_TRACE, getStackTrace(th)); crashReportData.put(ReportField.USER_APP_START_DATE, appStartDate.format3339(false)); if (isSilentReport) { crashReportData.put(IS_SILENT, "true"); } // Generate report uuid if (crashReportFields.contains(REPORT_ID)) { crashReportData.put(ReportField.REPORT_ID, UUID.randomUUID().toString()); } // Installation unique ID if (crashReportFields.contains(INSTALLATION_ID)) { crashReportData.put(INSTALLATION_ID, Installation.id(context)); } // Device Configuration when crashing if (crashReportFields.contains(INITIAL_CONFIGURATION)) { crashReportData.put(INITIAL_CONFIGURATION, initialConfiguration); } if (crashReportFields.contains(CRASH_CONFIGURATION)) { crashReportData.put(CRASH_CONFIGURATION, ConfigurationCollector.collectConfiguration(context)); } // Collect meminfo if (!(th instanceof OutOfMemoryError) && crashReportFields.contains(DUMPSYS_MEMINFO)) { crashReportData.put(DUMPSYS_MEMINFO, DumpSysCollector.collectMemInfo()); } // Application Package name if (crashReportFields.contains(PACKAGE_NAME)) { crashReportData.put(PACKAGE_NAME, context.getPackageName()); } // Android OS Build details if (crashReportFields.contains(BUILD)) { crashReportData.put(BUILD, ReflectionCollector.collectConstants(android.os.Build.class)); } // Device model if (crashReportFields.contains(PHONE_MODEL)) { crashReportData.put(PHONE_MODEL, android.os.Build.MODEL); } // Android version if (crashReportFields.contains(ANDROID_VERSION)) { crashReportData.put(ANDROID_VERSION, android.os.Build.VERSION.RELEASE); } // Device Brand (manufacturer) if (crashReportFields.contains(BRAND)) { crashReportData.put(BRAND, android.os.Build.BRAND); } if (crashReportFields.contains(PRODUCT)) { crashReportData.put(PRODUCT, android.os.Build.PRODUCT); } // Device Memory if (crashReportFields.contains(TOTAL_MEM_SIZE)) { crashReportData.put(TOTAL_MEM_SIZE, Long.toString(ReportUtils.getTotalInternalMemorySize())); } if (crashReportFields.contains(AVAILABLE_MEM_SIZE)) { crashReportData.put(AVAILABLE_MEM_SIZE, Long.toString(ReportUtils.getAvailableInternalMemorySize())); } // Application file path if (crashReportFields.contains(FILE_PATH)) { crashReportData.put(FILE_PATH, ReportUtils.getApplicationFilePath(context)); } // Main display details if (crashReportFields.contains(DISPLAY)) { crashReportData.put(DISPLAY, ReportUtils.getDisplayDetails(context)); } // User crash date with local timezone if (crashReportFields.contains(USER_CRASH_DATE)) { final Time curDate = new Time(); curDate.setToNow(); crashReportData.put(USER_CRASH_DATE, curDate.format3339(false)); } // Add custom info, they are all stored in a single field if (crashReportFields.contains(CUSTOM_DATA)) { crashReportData.put(CUSTOM_DATA, createCustomInfoString()); } // Add user email address, if set in the app's preferences if (crashReportFields.contains(USER_EMAIL)) { crashReportData.put(USER_EMAIL, prefs.getString(ACRA.PREF_USER_EMAIL_ADDRESS, "N/A")); } // Device features if (crashReportFields.contains(DEVICE_FEATURES)) { crashReportData.put(DEVICE_FEATURES, DeviceFeaturesCollector.getFeatures(context)); } // Environment (External storage state) if (crashReportFields.contains(ENVIRONMENT)) { crashReportData.put(ENVIRONMENT, ReflectionCollector.collectStaticGettersResults(Environment.class)); } // System settings if (crashReportFields.contains(SETTINGS_SYSTEM)) { crashReportData.put(SETTINGS_SYSTEM, SettingsCollector.collectSystemSettings(context)); } // Secure settings if (crashReportFields.contains(SETTINGS_SECURE)) { crashReportData.put(SETTINGS_SECURE, SettingsCollector.collectSecureSettings(context)); } // SharedPreferences if (crashReportFields.contains(SHARED_PREFERENCES)) { crashReportData.put(SHARED_PREFERENCES, SharedPreferencesCollector.collect(context)); } // Now get all the crash data that relies on the PackageManager // (which may or may not be here). final PackageManagerWrapper pm = new PackageManagerWrapper(context); final PackageInfo pi = pm.getPackageInfo(); if (pi != null) { // Application Version if (crashReportFields.contains(APP_VERSION_CODE)) { crashReportData.put(APP_VERSION_CODE, Integer.toString(pi.versionCode)); } if (crashReportFields.contains(APP_VERSION_NAME)) { crashReportData.put(APP_VERSION_NAME, pi.versionName != null ? pi.versionName : "not set"); } } else { // Could not retrieve package info... crashReportData.put(APP_VERSION_NAME, "Package info unavailable"); } // Retrieve UDID(IMEI) if permission is available if (crashReportFields.contains(DEVICE_ID) && prefs.getBoolean(ACRA.PREF_ENABLE_DEVICE_ID, true) && pm.hasPermission(Manifest.permission.READ_PHONE_STATE)) { final String deviceId = ReportUtils.getDeviceId(context); if (deviceId != null) { crashReportData.put(DEVICE_ID, deviceId); } } // Collect DropBox and logcat if (prefs.getBoolean(ACRA.PREF_ENABLE_SYSTEM_LOGS, true) && pm.hasPermission(Manifest.permission.READ_LOGS)) { Log.i(ACRA.LOG_TAG, "READ_LOGS granted! ACRA can include LogCat and DropBox data."); if (crashReportFields.contains(LOGCAT)) { crashReportData.put(LOGCAT, LogCatCollector.collectLogCat(null)); } if (crashReportFields.contains(EVENTSLOG)) { crashReportData.put(EVENTSLOG, LogCatCollector.collectLogCat("events")); } if (crashReportFields.contains(RADIOLOG)) { crashReportData.put(RADIOLOG, LogCatCollector.collectLogCat("radio")); } if (crashReportFields.contains(DROPBOX)) { crashReportData.put(DROPBOX, DropBoxCollector.read(context, ACRA.getConfig().additionalDropBoxTags())); } } else { Log.i(ACRA.LOG_TAG, "READ_LOGS not allowed. ACRA will not include LogCat and DropBox data."); } // Application specific log file if (crashReportFields.contains(APPLICATION_LOG)) { crashReportData.put(APPLICATION_LOG, LogFileCollector.collectLogFile(context, ACRA.getConfig() .applicationLogFile(), ACRA.getConfig().applicationLogFileLines())); } // Media Codecs list if (crashReportFields.contains(MEDIA_CODEC_LIST)) { crashReportData.put(MEDIA_CODEC_LIST, MediaCodecListCollector.collecMediaCodecList()); } // Failing thread details if (crashReportFields.contains(THREAD_DETAILS)) { crashReportData.put(THREAD_DETAILS, ThreadCollector.collect(brokenThread)); } } catch (RuntimeException e) { Log.e(LOG_TAG, "Error while retrieving crash data", e); } catch (FileNotFoundException e) { Log.e(LOG_TAG, "Error : application log file " + ACRA.getConfig().applicationLogFile() + " not found.", e); } catch (IOException e) { Log.e(LOG_TAG, "Error while reading application log file " + ACRA.getConfig().applicationLogFile() + ".", e); } return crashReportData; } /** * Generates the string which is posted in the single custom data field in * the GoogleDocs Form. * * @return A string with a 'key = value' pair on each line. */ private String createCustomInfoString() { final StringBuilder customInfo = new StringBuilder(); for (final String currentKey : customParameters.keySet()) { final String currentVal = customParameters.get(currentKey); customInfo.append(currentKey); customInfo.append(" = "); customInfo.append(currentVal); customInfo.append("\n"); } return customInfo.toString(); } private String getStackTrace(Throwable th) { final Writer result = new StringWriter(); final PrintWriter printWriter = new PrintWriter(result); // If the exception was thrown in a background thread inside // AsyncTask, then the actual exception can be found with getCause Throwable cause = th; while (cause != null) { cause.printStackTrace(printWriter); cause = cause.getCause(); } final String stacktraceAsString = result.toString(); printWriter.close(); return stacktraceAsString; } }
Allow LogCat collection for devices with JellyBean and later (API >= 16) Since JellyBean, READ_LOGS is not granted to third-party apps anymore for security reasons. Though, we can call logcat without any permission and still get traces related to our app.
acra/src/main/java/org/acra/collector/CrashReportDataFactory.java
Allow LogCat collection for devices with JellyBean and later (API >= 16) Since JellyBean, READ_LOGS is not granted to third-party apps anymore for security reasons. Though, we can call logcat without any permission and still get traces related to our app.
<ide><path>cra/src/main/java/org/acra/collector/CrashReportDataFactory.java <ide> } <ide> <ide> // Collect DropBox and logcat <del> if (prefs.getBoolean(ACRA.PREF_ENABLE_SYSTEM_LOGS, true) && pm.hasPermission(Manifest.permission.READ_LOGS)) { <add> // Before JellyBean, this required the READ_LOGS permission <add> // Since JellyBean, READ_LOGS is not granted to third-party apps anymore for security reasons. <add> // Though, we can call logcat without any permission and still get traces related to our app. <add> if (prefs.getBoolean(ACRA.PREF_ENABLE_SYSTEM_LOGS, true) <add> && (pm.hasPermission(Manifest.permission.READ_LOGS)) <add> || Compatibility.getAPILevel() >= 16) { <ide> Log.i(ACRA.LOG_TAG, "READ_LOGS granted! ACRA can include LogCat and DropBox data."); <ide> if (crashReportFields.contains(LOGCAT)) { <ide> crashReportData.put(LOGCAT, LogCatCollector.collectLogCat(null));
Java
apache-2.0
6b9ade5560c0320a092e834cd7d9968c881e852e
0
reportportal/service-api,reportportal/service-api,reportportal/service-api,reportportal/service-api,reportportal/service-api
/* * Copyright 2019 EPAM Systems * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.epam.ta.reportportal.core.widget.content.loader; import com.epam.ta.reportportal.commons.querygen.Filter; import com.epam.ta.reportportal.commons.validation.BusinessRule; import com.epam.ta.reportportal.core.widget.content.LoadContentStrategy; import com.epam.ta.reportportal.core.widget.util.ContentFieldMatcherUtil; import com.epam.ta.reportportal.dao.WidgetContentRepository; import com.epam.ta.reportportal.entity.widget.WidgetOptions; import com.epam.ta.reportportal.entity.widget.content.ChartStatisticsContent; import com.epam.ta.reportportal.ws.model.ErrorType; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.MapUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Sort; import org.springframework.stereotype.Service; import java.util.List; import java.util.Map; import static com.epam.ta.reportportal.commons.Predicates.equalTo; import static com.epam.ta.reportportal.commons.querygen.constant.GeneralCriteriaConstant.CRITERIA_START_TIME; import static com.epam.ta.reportportal.commons.querygen.constant.LaunchCriteriaConstant.CRITERIA_LAUNCH_NUMBER; import static com.epam.ta.reportportal.core.widget.content.constant.ContentLoaderConstants.RESULT; import static com.epam.ta.reportportal.core.widget.util.ContentFieldPatternConstants.COMBINED_CONTENT_FIELDS_REGEX; import static com.epam.ta.reportportal.core.widget.util.WidgetFilterUtil.GROUP_FILTERS; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; /** * @author Pavel Bortnik */ @Service public class LaunchesComparisonContentLoader implements LoadContentStrategy { @Autowired private WidgetContentRepository widgetContentRepository; @Override public Map<String, ?> loadContent(List<String> contentFields, Map<Filter, Sort> filterSortMapping, WidgetOptions widgetOptions, int limit) { validateFilterSortMapping(filterSortMapping); validateContentFields(contentFields); Filter filter = GROUP_FILTERS.apply(filterSortMapping.keySet()); Sort sort = Sort.by(Sort.Order.desc(CRITERIA_START_TIME), Sort.Order.desc(CRITERIA_LAUNCH_NUMBER)); List<ChartStatisticsContent> result = widgetContentRepository.launchesComparisonStatistics(filter, contentFields, sort, limit); return result.isEmpty() ? emptyMap() : singletonMap(RESULT, result); } /** * Mapping should not be empty * * @param filterSortMapping Map of ${@link Filter} for query building as key and ${@link Sort} as value for each filter */ private void validateFilterSortMapping(Map<Filter, Sort> filterSortMapping) { BusinessRule.expect(MapUtils.isNotEmpty(filterSortMapping), equalTo(true)) .verify(ErrorType.BAD_REQUEST_ERROR, "Filter-Sort mapping should not be empty"); } /** * Validate provided content fields. * The value of content field should not be empty * All content fields should match the pattern {@link com.epam.ta.reportportal.core.widget.util.ContentFieldPatternConstants#COMBINED_CONTENT_FIELDS_REGEX} * * @param contentFields List of provided content. */ private void validateContentFields(List<String> contentFields) { BusinessRule.expect(CollectionUtils.isNotEmpty(contentFields), equalTo(true)) .verify(ErrorType.BAD_REQUEST_ERROR, "Content fields should not be empty"); BusinessRule.expect(ContentFieldMatcherUtil.match(COMBINED_CONTENT_FIELDS_REGEX, contentFields), equalTo(true)) .verify(ErrorType.BAD_REQUEST_ERROR, "Bad content fields format"); } }
src/main/java/com/epam/ta/reportportal/core/widget/content/loader/LaunchesComparisonContentLoader.java
/* * Copyright 2019 EPAM Systems * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.epam.ta.reportportal.core.widget.content.loader; import com.epam.ta.reportportal.commons.querygen.Filter; import com.epam.ta.reportportal.commons.validation.BusinessRule; import com.epam.ta.reportportal.core.widget.content.LoadContentStrategy; import com.epam.ta.reportportal.core.widget.util.ContentFieldMatcherUtil; import com.epam.ta.reportportal.dao.WidgetContentRepository; import com.epam.ta.reportportal.entity.widget.WidgetOptions; import com.epam.ta.reportportal.entity.widget.content.ChartStatisticsContent; import com.epam.ta.reportportal.ws.model.ErrorType; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.MapUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Sort; import org.springframework.stereotype.Service; import java.util.List; import java.util.Map; import static com.epam.ta.reportportal.commons.Predicates.equalTo; import static com.epam.ta.reportportal.commons.querygen.constant.GeneralCriteriaConstant.CRITERIA_START_TIME; import static com.epam.ta.reportportal.core.widget.content.constant.ContentLoaderConstants.RESULT; import static com.epam.ta.reportportal.core.widget.util.ContentFieldPatternConstants.COMBINED_CONTENT_FIELDS_REGEX; import static com.epam.ta.reportportal.core.widget.util.WidgetFilterUtil.GROUP_FILTERS; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; /** * @author Pavel Bortnik */ @Service public class LaunchesComparisonContentLoader implements LoadContentStrategy { @Autowired private WidgetContentRepository widgetContentRepository; @Override public Map<String, ?> loadContent(List<String> contentFields, Map<Filter, Sort> filterSortMapping, WidgetOptions widgetOptions, int limit) { validateFilterSortMapping(filterSortMapping); validateContentFields(contentFields); Filter filter = GROUP_FILTERS.apply(filterSortMapping.keySet()); Sort sort = Sort.by(Sort.Direction.DESC, CRITERIA_START_TIME); List<ChartStatisticsContent> result = widgetContentRepository.launchesComparisonStatistics(filter, contentFields, sort, limit); return result.isEmpty() ? emptyMap() : singletonMap(RESULT, result); } /** * Mapping should not be empty * * @param filterSortMapping Map of ${@link Filter} for query building as key and ${@link Sort} as value for each filter */ private void validateFilterSortMapping(Map<Filter, Sort> filterSortMapping) { BusinessRule.expect(MapUtils.isNotEmpty(filterSortMapping), equalTo(true)) .verify(ErrorType.BAD_REQUEST_ERROR, "Filter-Sort mapping should not be empty"); } /** * Validate provided content fields. * The value of content field should not be empty * All content fields should match the pattern {@link com.epam.ta.reportportal.core.widget.util.ContentFieldPatternConstants#COMBINED_CONTENT_FIELDS_REGEX} * * @param contentFields List of provided content. */ private void validateContentFields(List<String> contentFields) { BusinessRule.expect(CollectionUtils.isNotEmpty(contentFields), equalTo(true)) .verify(ErrorType.BAD_REQUEST_ERROR, "Content fields should not be empty"); BusinessRule.expect(ContentFieldMatcherUtil.match(COMBINED_CONTENT_FIELDS_REGEX, contentFields), equalTo(true)) .verify(ErrorType.BAD_REQUEST_ERROR, "Bad content fields format"); } }
Launch number desc sorting added
src/main/java/com/epam/ta/reportportal/core/widget/content/loader/LaunchesComparisonContentLoader.java
Launch number desc sorting added
<ide><path>rc/main/java/com/epam/ta/reportportal/core/widget/content/loader/LaunchesComparisonContentLoader.java <ide> <ide> import static com.epam.ta.reportportal.commons.Predicates.equalTo; <ide> import static com.epam.ta.reportportal.commons.querygen.constant.GeneralCriteriaConstant.CRITERIA_START_TIME; <add>import static com.epam.ta.reportportal.commons.querygen.constant.LaunchCriteriaConstant.CRITERIA_LAUNCH_NUMBER; <ide> import static com.epam.ta.reportportal.core.widget.content.constant.ContentLoaderConstants.RESULT; <ide> import static com.epam.ta.reportportal.core.widget.util.ContentFieldPatternConstants.COMBINED_CONTENT_FIELDS_REGEX; <ide> import static com.epam.ta.reportportal.core.widget.util.WidgetFilterUtil.GROUP_FILTERS; <ide> <ide> Filter filter = GROUP_FILTERS.apply(filterSortMapping.keySet()); <ide> <del> Sort sort = Sort.by(Sort.Direction.DESC, CRITERIA_START_TIME); <add> Sort sort = Sort.by(Sort.Order.desc(CRITERIA_START_TIME), Sort.Order.desc(CRITERIA_LAUNCH_NUMBER)); <ide> <ide> List<ChartStatisticsContent> result = widgetContentRepository.launchesComparisonStatistics(filter, contentFields, sort, limit); <ide> return result.isEmpty() ? emptyMap() : singletonMap(RESULT, result);
JavaScript
mit
362b9f5477cbe2667dc1c0805576b9884393890d
0
ereboz/combolock,ereboz/combolock,ereboz/combolock,ereboz/combolock
/** * Created by javaBoxer on 7/23/2017. */ $(document).ready(function(){ //// affixes top nav to be sticky $(".wrapper-subnav").affix({ offset: { top: $('.bg-banner').height() } }); $("#topnavbar").height($(".wrapper-subnav").height() + $(".bg-banner").height()); // $('#topnavbar').affix({ // offset: { // top: $('#banner').height() // } // }); // Add Sale sticker to sale items // Assuming backend is writing the sale price strike through, we can toggle sale sticker in upper left of product. // iterate over each product-wrapper and search for value of retail-price. // if not empty, then it's on sale. Toggle Less class to display sale sticker in upper left of parent wrapper $('.product-wrapper').each(function () { var salePrice = $(this).find('.sale-price'); // target to center price vertically if no retail price var retailValue = $(this).find('.retail-price'); var retailValueText = $(retailValue).text(); if(retailValueText) { $(this).find('.product-sale').addClass("sale-on"); } else { $(this).find('.product-sale').addClass("sale-off"); $(this).find(salePrice).addClass("saleOnly"); // no retail price, so center price vertically } }) // Checkout Page - move the shopping cart section from the bottom of the page to the top // above "Account" var cartDetails = $(".checkout-details .cart-details"); var checkoutAccount = $(".checkout-details .checkout-account"); $(cartDetails).remove(); $(cartDetails).insertBefore(checkoutAccount); }); /** * Created by javaBoxer on 5/5/2017. */ $(window).load(function() { var loc = window.location.href.toLowerCase(); var locPath = window.location.pathname.toLowerCase(); var viewportWidth = $(window).width(); var pageWrapper = ""; // *************** HOME PAGE *************** // if on homepage, and viewport <= 600, find (.best-seller .item) within a .carousel. if not active (2nd item) add // .active class. This will display both rows (all 6 products) for mobilel // if (locPath != "/") { // local (there is something in the path. toggle to show effects) if (locPath === "/") { // server (nothing in the path) pageWrapper = $(".carousel"); if (viewportWidth <= 600) { $(pageWrapper).each(function () { var item = $(this).find('.best-sellers .item'); item.addClass('active'); }) // move .wrapper-category (3 links to keyedalike.com, keyeddifferent.com and buildalock.com) // under the best sellers for (mobile) var banners = ('.wrapper-category'); var bestSellers = ('.product-slider-wrapper') $(banners).detach().appendTo(bestSellers); } } // *************** BILL TO/SHIP TO ADDRESS FORM (from checkout page) *************** // NEED TO MAKE THE ADD NEW SHIPPING, ADD NEW BILLING ADDRESS .container CLASS 50% WIDTH TO REDUCE THE SIZE AND CENTER IT ON SCREEN // FOR DESKTOP > 800PX. // IT DOES NOT HAVE A UNIQUE CLASS AND SETTING WIDTH ON .container AFFECTS EVERY PAGE ON THE SITE. // READ IN THE URL AND APPLY STYLING ONLY TO THE ADDRESS PAGE for SHIPPING/BILLING(address/detail) // APPLIES TO: // https://combolock.com/address/detail if (loc.indexOf('/address/detail') > -1) { pageWrapper = $(".create-account-page"); $(pageWrapper).addClass("addressMobileWidth"); if (viewportWidth > 800) { $(pageWrapper).removeClass("addressMobileWidth").addClass("addressDesktopWidth"); } $(window).resize(function () { var viewportWidth = $(window).width(); if (viewportWidth < 800) { $(pageWrapper).removeClass("addressDesktopWidth").addClass("addressMobileWidth"); } if (viewportWidth > 800) { $(pageWrapper).removeClass("addressMobileWidth").addClass("addressDesktopWidth"); } }); } // *************** CONTACT US PAGE *************** // Center the page with a left margin class when in Desktop view > 800px if (loc.indexOf('/contactus') > -1) { pageWrapper = $(".contact-us-page"); $(pageWrapper).addClass("contactMobileWidth"); if (viewportWidth > 800) { $(pageWrapper).removeClass("contactMobileWidth").addClass("contactDesktopWidth"); } $(window).resize(function () { var viewportWidth = $(window).width(); if (viewportWidth < 800) { $(pageWrapper).removeClass("contactDesktopWidth").addClass("contactMobileWidth"); } if (viewportWidth > 800) { $(pageWrapper).removeClass("contactMobileWidth").addClass("contactDesktopWidth"); } }); // Replace all occurrences of (required) to * $('.form-label-suffix-required').each(function() { var txt = $(this).html(); txt = txt.replace('(required)','*'); $(this).html(txt); }); } });
dist/js/page.js
/** * Created by javaBoxer on 7/23/2017. */ $(document).ready(function(){ //// affixes top nav to be sticky $(".wrapper-subnav").affix({ offset: { top: $('.bg-banner').height() } }); $("#topnavbar").height($(".wrapper-subnav").height() + $(".bg-banner").height()); // $('#topnavbar').affix({ // offset: { // top: $('#banner').height() // } // }); // Add Sale sticker to sale items // Assuming backend is writing the sale price strike through, we can toggle sale sticker in upper left of product. // iterate over each product-wrapper and search for value of retail-price. // if not empty, then it's on sale. Toggle Less class to display sale sticker in upper left of parent wrapper $('.product-wrapper').each(function () { var salePrice = $(this).find('.sale-price'); // target to center price vertically if no retail price var retailValue = $(this).find('.retail-price'); var retailValueText = $(retailValue).text(); if(retailValueText) { $(this).find('.product-sale').addClass("sale-on"); } else { $(this).find('.product-sale').addClass("sale-off"); $(this).find(salePrice).addClass("saleOnly"); // no retail price, so center price vertically } }) // Checkout Page - move the shopping cart section from the bottom of the page to the top // above "Account" var cartDetails = $(".checkout-details .cart-details"); var checkoutAccount = $(".checkout-details .checkout-account"); $(cartDetails).remove(); $(cartDetails).insertBefore(checkoutAccount); }); /** * Created by javaBoxer on 5/5/2017. */ $(window).load(function() { var loc = window.location.href.toLowerCase(); var locPath = window.location.pathname.toLowerCase(); var viewportWidth = $(window).width(); var pageWrapper = ""; // *************** HOME PAGE *************** // if on homepage, and viewport <= 600, find (.best-seller .item) within a .carousel. if not active (2nd item) add // .active class. This will display both rows (all 6 products) for mobilel // if (locPath != "/") { // local (there is something in the path) if (locPath === "/") { // server (nothing in the path) pageWrapper = $(".carousel"); if (viewportWidth <= 600) { $(pageWrapper).each(function () { var item = $(this).find('.best-sellers .item'); item.addClass('active'); }) // move .wrapper-category (3 links to keyedalike.com, keyeddifferent.com and buildalock.com) // under the best sellers for (mobile) var banners = ('.wrapper-category'); var bestSellers = ('.product-slider-wrapper') $(banners).detach().appendTo(bestSellers); } } // *************** BILL TO/SHIP TO ADDRESS FORM (from checkout page) *************** // NEED TO MAKE THE ADD NEW SHIPPING, ADD NEW BILLING ADDRESS .container CLASS 50% WIDTH TO REDUCE THE SIZE AND CENTER IT ON SCREEN // FOR DESKTOP > 800PX. // IT DOES NOT HAVE A UNIQUE CLASS AND SETTING WIDTH ON .container AFFECTS EVERY PAGE ON THE SITE. // READ IN THE URL AND APPLY STYLING ONLY TO THE ADDRESS PAGE for SHIPPING/BILLING(address/detail) // APPLIES TO: // https://combolock.com/address/detail if (loc.indexOf('/address/detail') > -1) { pageWrapper = $(".create-account-page"); $(pageWrapper).addClass("addressMobileWidth"); if (viewportWidth > 800) { $(pageWrapper).removeClass("addressMobileWidth").addClass("addressDesktopWidth"); } $(window).resize(function () { var viewportWidth = $(window).width(); if (viewportWidth < 800) { $(pageWrapper).removeClass("addressDesktopWidth").addClass("addressMobileWidth"); } if (viewportWidth > 800) { $(pageWrapper).removeClass("addressMobileWidth").addClass("addressDesktopWidth"); } }); } // *************** CONTACT US PAGE *************** // Center the page with a left margin class when in Desktop view > 800px if (loc.indexOf('/contactus') > -1) { pageWrapper = $(".contact-us-page"); $(pageWrapper).addClass("contactMobileWidth"); if (viewportWidth > 800) { $(pageWrapper).removeClass("contactMobileWidth").addClass("contactDesktopWidth"); } $(window).resize(function () { var viewportWidth = $(window).width(); if (viewportWidth < 800) { $(pageWrapper).removeClass("contactDesktopWidth").addClass("contactMobileWidth"); } if (viewportWidth > 800) { $(pageWrapper).removeClass("contactMobileWidth").addClass("contactDesktopWidth"); } }); // Replace all occurrences of (required) to * $('.form-label-suffix-required').each(function() { var txt = $(this).html(); txt = txt.replace('(required)','*'); $(this).html(txt); }); } });
comments
dist/js/page.js
comments
<ide><path>ist/js/page.js <ide> // if on homepage, and viewport <= 600, find (.best-seller .item) within a .carousel. if not active (2nd item) add <ide> // .active class. This will display both rows (all 6 products) for mobilel <ide> <del> // if (locPath != "/") { // local (there is something in the path) <add> // if (locPath != "/") { // local (there is something in the path. toggle to show effects) <ide> if (locPath === "/") { // server (nothing in the path) <ide> pageWrapper = $(".carousel"); <ide>
Java
agpl-3.0
1edb5bc9f38e927a8ba7fe8c8bb74a8f69a2940e
0
podd/podd-redesign,podd/podd-redesign,podd/podd-redesign,podd/podd-redesign
/** * */ package com.github.podd.resources; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.openrdf.OpenRDFException; import org.openrdf.model.Model; import org.openrdf.model.Statement; import org.openrdf.model.URI; import org.openrdf.model.impl.ValueFactoryImpl; import org.openrdf.model.vocabulary.OWL; import org.openrdf.model.vocabulary.RDF; import org.openrdf.model.vocabulary.RDFS; import org.openrdf.repository.RepositoryConnection; import org.openrdf.rio.RDFFormat; import org.openrdf.rio.RDFWriter; import org.openrdf.rio.Rio; import org.restlet.data.MediaType; import org.restlet.data.Status; import org.restlet.representation.ByteArrayRepresentation; import org.restlet.representation.Representation; import org.restlet.representation.Variant; import org.restlet.resource.Get; import org.restlet.resource.Post; import org.restlet.resource.ResourceException; import org.restlet.security.User; import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.github.podd.api.DanglingObjectPolicy; import com.github.podd.api.FileReferenceVerificationPolicy; import com.github.podd.api.UpdatePolicy; import com.github.podd.exception.PoddException; import com.github.podd.exception.UnmanagedArtifactIRIException; import com.github.podd.restlet.PoddAction; import com.github.podd.restlet.RestletUtils; import com.github.podd.utils.FreemarkerUtil; import com.github.podd.utils.InferredOWLOntologyID; import com.github.podd.utils.OntologyUtils; import com.github.podd.utils.PoddObjectLabel; import com.github.podd.utils.PoddRdfConstants; import com.github.podd.utils.PoddWebConstants; /** * * Edit an artifact from PODD. * * @author kutila * */ public class EditArtifactResourceImpl extends AbstractPoddResourceImpl { private final Logger log = LoggerFactory.getLogger(this.getClass()); /** Constructor */ public EditArtifactResourceImpl() { super(); } /** * Handle an HTTP POST request submitting RDF data to update an existing artifact */ @Post("rdf|rj|json|ttl") public Representation editArtifactToRdf(final Representation entity, final Variant variant) throws ResourceException { final String artifactUri = this.getQuery().getFirstValue(PoddWebConstants.KEY_ARTIFACT_IDENTIFIER, true); if(artifactUri == null) { this.log.error("Artifact ID not submitted"); throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, "Artifact IRI not submitted"); } // Once we find the artifact URI, check authentication for it immediately this.checkAuthentication(PoddAction.ARTIFACT_EDIT, Collections.<URI> singleton(PoddRdfConstants.VF.createURI(artifactUri))); final String versionUri = this.getQuery().getFirstValue(PoddWebConstants.KEY_ARTIFACT_VERSION_IDENTIFIER, true); if(versionUri == null) { this.log.error("Artifact Version IRI not submitted"); throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, "Artifact Version IRI not submitted"); } // optional multiple parameter 'objectUri' String[] objectURIStrings = this.getQuery().getValuesArray(PoddWebConstants.KEY_OBJECT_IDENTIFIER, true); // - optional parameter 'isreplace' UpdatePolicy updatePolicy = UpdatePolicy.REPLACE_EXISTING; final String isReplaceStr = this.getQuery().getFirstValue(PoddWebConstants.KEY_EDIT_WITH_REPLACE, true); if(isReplaceStr != null && (Boolean.valueOf(isReplaceStr) == false)) { updatePolicy = UpdatePolicy.MERGE_WITH_EXISTING; } // - optional parameter 'isforce' DanglingObjectPolicy danglingObjectPolicy = DanglingObjectPolicy.REPORT; final String forceStr = this.getQuery().getFirstValue(PoddWebConstants.KEY_EDIT_WITH_FORCE, true); if(forceStr != null && Boolean.valueOf(forceStr)) { danglingObjectPolicy = DanglingObjectPolicy.FORCE_CLEAN; } // - optional parameter 'verifyfilerefs' FileReferenceVerificationPolicy fileRefVerificationPolicy = FileReferenceVerificationPolicy.DO_NOT_VERIFY; final String fileRefVerifyStr = this.getQuery().getFirstValue(PoddWebConstants.KEY_EDIT_VERIFY_FILE_REFERENCES, true); if(fileRefVerifyStr != null && Boolean.valueOf(fileRefVerifyStr)) { fileRefVerificationPolicy = FileReferenceVerificationPolicy.VERIFY; } Collection<URI> objectUris = new ArrayList<URI>(objectURIStrings.length); for(String nextObjectURIString : objectURIStrings) { objectUris.add(PoddRdfConstants.VF.createURI(nextObjectURIString)); } this.log.info("requesting edit artifact ({}): {}, with isReplace {}", variant.getMediaType().getName(), artifactUri, updatePolicy); final User user = this.getRequest().getClientInfo().getUser(); this.log.info("authenticated user: {}", user); // - get input stream with edited RDF content InputStream inputStream = null; try { inputStream = entity.getStream(); } catch(final IOException e) { throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, "There was a problem with the input", e); } final RDFFormat inputFormat = Rio.getParserFormatForMIMEType(entity.getMediaType().getName(), RDFFormat.RDFXML); // - prepare response final ByteArrayOutputStream output = new ByteArrayOutputStream(8096); final RDFWriter writer = Rio.createWriter(Rio.getWriterFormatForMIMEType(variant.getMediaType().getName(), RDFFormat.RDFXML), output); // - do the artifact update try { final InferredOWLOntologyID ontologyID = this.getPoddArtifactManager().updateArtifact(PoddRdfConstants.VF.createURI(artifactUri), PoddRdfConstants.VF.createURI(versionUri), objectUris, inputStream, inputFormat, updatePolicy, danglingObjectPolicy, fileRefVerificationPolicy); // TODO - send detailed errors for display where possible // FIXME Change response format so that it does not resemble an empty OWL Ontology // - write the artifact ID into response writer.startRDF(); OntologyUtils.ontologyIDsToHandler(Arrays.asList(ontologyID), writer); writer.endRDF(); } catch(final UnmanagedArtifactIRIException e) { throw new ResourceException(Status.CLIENT_ERROR_NOT_FOUND, "Could not find the given artifact", e); } catch(final PoddException e) { throw new ResourceException(Status.SERVER_ERROR_INTERNAL, "Could not create response", e); } catch(OpenRDFException | IOException | OWLException e) { throw new ResourceException(Status.SERVER_ERROR_INTERNAL, "Could not create response"); } return new ByteArrayRepresentation(output.toByteArray(), MediaType.valueOf(writer.getRDFFormat() .getDefaultMIMEType())); } /** * View the edit artifact page in HTML */ @Get("html") public Representation getEditArtifactHtml(final Representation entity) throws ResourceException { this.log.info("getEditArtifactHtml"); // the artifact in which editing is requested final String artifactUri = this.getQuery().getFirstValue(PoddWebConstants.KEY_ARTIFACT_IDENTIFIER); if(artifactUri == null) { throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, "Artifact ID not submitted"); } // Podd object to be edited. NULL indicates top object is to be edited. final String objectUri = this.getQuery().getFirstValue(PoddWebConstants.KEY_OBJECT_IDENTIFIER); this.log.info("requesting to edit artifact (HTML): {}, {}", artifactUri, objectUri); this.checkAuthentication(PoddAction.ARTIFACT_EDIT, Collections.singleton(PoddRdfConstants.VF.createURI(artifactUri))); final User user = this.getRequest().getClientInfo().getUser(); this.log.info("authenticated user: {}", user); // validate artifact exists InferredOWLOntologyID ontologyID; try { ontologyID = this.getPoddArtifactManager().getArtifactByIRI(IRI.create(artifactUri)); } catch(final UnmanagedArtifactIRIException e) { throw new ResourceException(Status.CLIENT_ERROR_NOT_FOUND, "Could not find the given artifact", e); } final Map<String, Object> dataModel = this.populateDataModelForGet(ontologyID, objectUri); return RestletUtils.getHtmlRepresentation(PoddWebConstants.PROPERTY_TEMPLATE_BASE, dataModel, MediaType.TEXT_HTML, this.getPoddApplication().getTemplateConfiguration()); } /** * Request for RDF data for building the "edit object" page. */ @Get("rdf|rj|json|ttl") public Representation getEditArtifactRdf(final Representation entity, final Variant variant) throws ResourceException { // the artifact in which editing is requested final String artifactUri = this.getQuery().getFirstValue(PoddWebConstants.KEY_ARTIFACT_IDENTIFIER); if(artifactUri == null) { throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, "Artifact ID not submitted"); } // Podd object to be edited. NULL indicates top object is to be edited. final String objectUri = this.getQuery().getFirstValue(PoddWebConstants.KEY_OBJECT_IDENTIFIER); this.log.info("requesting to populate edit artifact ({}): {}, ", variant.getMediaType().getName(), artifactUri); this.checkAuthentication(PoddAction.ARTIFACT_EDIT, Collections.singleton(PoddRdfConstants.VF.createURI(artifactUri))); final User user = this.getRequest().getClientInfo().getUser(); this.log.info("authenticated user: {}", user); // validate artifact exists InferredOWLOntologyID ontologyID; try { ontologyID = this.getPoddArtifactManager().getArtifactByIRI(IRI.create(artifactUri)); } catch(final UnmanagedArtifactIRIException e) { throw new ResourceException(Status.CLIENT_ERROR_NOT_FOUND, "Could not find the given artifact", e); } final Model modelForEdit = this.getModelForEdit(ontologyID, objectUri); // - prepare response final ByteArrayOutputStream output = new ByteArrayOutputStream(8096); final RDFWriter writer = Rio.createWriter(Rio.getWriterFormatForMIMEType(variant.getMediaType().getName(), RDFFormat.RDFXML), output); try { writer.startRDF(); for(final Statement st : modelForEdit) { writer.handleStatement(st); } writer.endRDF(); } catch(final OpenRDFException e) { throw new ResourceException(Status.SERVER_ERROR_INTERNAL, "Could not create response", e); } return new ByteArrayRepresentation(output.toByteArray(), MediaType.valueOf(writer.getRDFFormat() .getDefaultMIMEType())); } /** * Get a {@link Model} containing all data and meta-data necessary to display the "edit object" * page. * * @param ontologyID * @param objectToEdit * @return A Model containing all necessary statements */ private Model getModelForEdit(final InferredOWLOntologyID ontologyID, final String objectToEdit) { RepositoryConnection conn = null; try { conn = this.getPoddRepositoryManager().getRepository().getConnection(); conn.begin(); URI objectUri; if(objectToEdit == null) { objectUri = this.getPoddSesameManager().getTopObjectIRI(ontologyID, conn); } else { objectUri = ValueFactoryImpl.getInstance().createURI(objectToEdit); } if(objectUri == null) { throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, "Did not recognise the request"); } final List<URI> objectTypes = this.getPoddSesameManager().getObjectTypes(ontologyID, objectUri, conn); if(objectTypes == null || objectTypes.isEmpty()) { throw new ResourceException(Status.SERVER_ERROR_INTERNAL, "Could not determine type of object"); } return this.getPoddSesameManager().getObjectDetailsForEdit(ontologyID, objectUri, conn); } catch(final OpenRDFException e) { throw new ResourceException(Status.SERVER_ERROR_INTERNAL, "Failed to populate data model"); } finally { if(conn != null) { try { // This is a Get request, therefore nothing to commit conn.rollback(); conn.close(); } catch(final OpenRDFException e) { this.log.error("Failed to close RepositoryConnection", e); // Should we do anything other than log an error? } } } } /** * Internal method to populate the Freemarker Data Model for Get request * * @param ontologyID * The Artifact to be edited * @param objectToEdit * The specific PODD object to edit. * @return The populated data model */ private Map<String, Object> populateDataModelForGet(final InferredOWLOntologyID ontologyID, final String objectToEdit) { final Map<String, Object> dataModel = RestletUtils.getBaseDataModel(this.getRequest()); dataModel.put("contentTemplate", "modify_object.html.ftl"); dataModel.put("pageTitle", "Edit Artifact"); // add required constants and methods to data model dataModel.put("RDFS_LABEL", RDFS.LABEL); dataModel.put("RDFS_RANGE", RDFS.RANGE); dataModel.put("RDF_TYPE", RDF.TYPE); dataModel.put("OWL_OBJECT_PROPERTY", OWL.OBJECTPROPERTY); dataModel.put("OWL_DATA_PROPERTY", OWL.DATATYPEPROPERTY); dataModel.put("OWL_ANNOTATION_PROPERTY", OWL.ANNOTATIONPROPERTY); // dataModel.put("OWL_MAX_CARDINALITY", PoddRdfConstants.OWL_MAX_QUALIFIED_CARDINALITY); // dataModel.put("OWL_MIN_CARDINALITY", PoddRdfConstants.OWL_MIN_QUALIFIED_CARDINALITY); // dataModel.put("OWL_CARDINALITY", PoddRdfConstants.OWL_QUALIFIED_CARDINALITY); dataModel.put("PODD_BASE_HAS_CARDINALITY", PoddRdfConstants.PODD_BASE_HAS_CARDINALITY); dataModel.put("PODD_BASE_DISPLAY_TYPE", PoddRdfConstants.PODD_BASE_DISPLAY_TYPE); dataModel.put("PODD_BASE_HAS_WEIGHT", PoddRdfConstants.PODD_BASE_WEIGHT); dataModel.put("util", new FreemarkerUtil()); // Defaults to false. Set to true if multiple objects are being edited concurrently // TODO: investigate how to use this final boolean initialized = false; RepositoryConnection conn = null; try { conn = this.getPoddRepositoryManager().getRepository().getConnection(); conn.begin(); URI objectUri; if(objectToEdit == null) { objectUri = this.getPoddSesameManager().getTopObjectIRI(ontologyID, conn); } else { objectUri = ValueFactoryImpl.getInstance().createURI(objectToEdit); } final List<URI> objectTypes = this.getPoddSesameManager().getObjectTypes(ontologyID, objectUri, conn); if(objectTypes == null || objectTypes.isEmpty()) { throw new ResourceException(Status.SERVER_ERROR_INTERNAL, "Could not determine type of object"); } // Get label for the object type final PoddObjectLabel objectType = this.getPoddSesameManager().getObjectLabel(ontologyID, objectTypes.get(0), conn); if(objectType == null || objectType.getLabel() == null) { dataModel.put("objectType", objectTypes.get(0)); } else { dataModel.put("objectType", objectType.getLabel()); } final PoddObjectLabel theObject = this.getPoddSesameManager().getObjectLabel(ontologyID, objectUri, conn); dataModel.put("poddObject", theObject); dataModel.put("stopRefreshKey", "Stop Refresh Key"); } catch(final OpenRDFException e) // should be OpenRDFException { throw new ResourceException(Status.SERVER_ERROR_INTERNAL, "Failed to populate data model"); } finally { if(conn != null) { try { if(conn.isActive()) { // This is a Get request, therefore nothing to commit conn.rollback(); } } catch(final OpenRDFException e) { this.log.error("Failed to rollback RepositoryConnection", e); // Should we do anything other than log an error? } finally { try { if(conn.isOpen()) { conn.close(); } } catch(final OpenRDFException e) { this.log.error("Failed to close RepositoryConnection", e); // Should we do anything other than log an error? } } } } dataModel.put("initialized", initialized); return dataModel; } }
webapp/lib/src/main/java/com/github/podd/resources/EditArtifactResourceImpl.java
/** * */ package com.github.podd.resources; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.openrdf.OpenRDFException; import org.openrdf.model.Model; import org.openrdf.model.Statement; import org.openrdf.model.URI; import org.openrdf.model.impl.ValueFactoryImpl; import org.openrdf.model.vocabulary.OWL; import org.openrdf.model.vocabulary.RDF; import org.openrdf.model.vocabulary.RDFS; import org.openrdf.repository.RepositoryConnection; import org.openrdf.rio.RDFFormat; import org.openrdf.rio.RDFWriter; import org.openrdf.rio.Rio; import org.restlet.data.MediaType; import org.restlet.data.Status; import org.restlet.representation.ByteArrayRepresentation; import org.restlet.representation.Representation; import org.restlet.representation.Variant; import org.restlet.resource.Get; import org.restlet.resource.Post; import org.restlet.resource.ResourceException; import org.restlet.security.User; import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.github.podd.api.DanglingObjectPolicy; import com.github.podd.api.FileReferenceVerificationPolicy; import com.github.podd.api.UpdatePolicy; import com.github.podd.exception.PoddException; import com.github.podd.exception.UnmanagedArtifactIRIException; import com.github.podd.restlet.PoddAction; import com.github.podd.restlet.RestletUtils; import com.github.podd.utils.FreemarkerUtil; import com.github.podd.utils.InferredOWLOntologyID; import com.github.podd.utils.OntologyUtils; import com.github.podd.utils.PoddObjectLabel; import com.github.podd.utils.PoddRdfConstants; import com.github.podd.utils.PoddWebConstants; /** * * Edit an artifact from PODD. * * @author kutila * */ public class EditArtifactResourceImpl extends AbstractPoddResourceImpl { private final Logger log = LoggerFactory.getLogger(this.getClass()); /** Constructor */ public EditArtifactResourceImpl() { super(); } /** * Handle an HTTP POST request submitting RDF data to update an existing artifact */ @Post("rdf|rj|json|ttl") public Representation editArtifactToRdf(final Representation entity, final Variant variant) throws ResourceException { final String artifactUri = this.getQuery().getFirstValue(PoddWebConstants.KEY_ARTIFACT_IDENTIFIER, true); if(artifactUri == null) { this.log.error("Artifact ID not submitted"); throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, "Artifact IRI not submitted"); } // Once we find the artifact URI, check authentication for it immediately this.checkAuthentication(PoddAction.ARTIFACT_EDIT, Collections.<URI> singleton(PoddRdfConstants.VF.createURI(artifactUri))); final String versionUri = this.getQuery().getFirstValue(PoddWebConstants.KEY_ARTIFACT_VERSION_IDENTIFIER, true); if(versionUri == null) { this.log.error("Artifact Version IRI not submitted"); throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, "Artifact Version IRI not submitted"); } // optional multiple parameter 'objectUri' String[] objectURIStrings = this.getQuery().getValuesArray(PoddWebConstants.KEY_OBJECT_IDENTIFIER, true); // - optional parameter 'isreplace' UpdatePolicy updatePolicy = UpdatePolicy.REPLACE_EXISTING; final String isReplaceStr = this.getQuery().getFirstValue(PoddWebConstants.KEY_EDIT_WITH_REPLACE, true); if(isReplaceStr != null && (Boolean.valueOf(isReplaceStr) == false)) { updatePolicy = UpdatePolicy.MERGE_WITH_EXISTING; } // - optional parameter 'isforce' DanglingObjectPolicy danglingObjectPolicy = DanglingObjectPolicy.REPORT; final String forceStr = this.getQuery().getFirstValue(PoddWebConstants.KEY_EDIT_WITH_FORCE, true); if(forceStr != null && Boolean.valueOf(forceStr)) { danglingObjectPolicy = DanglingObjectPolicy.FORCE_CLEAN; } // - optional parameter 'verifyfilerefs' FileReferenceVerificationPolicy fileRefVerificationPolicy = FileReferenceVerificationPolicy.DO_NOT_VERIFY; final String fileRefVerifyStr = this.getQuery().getFirstValue(PoddWebConstants.KEY_EDIT_VERIFY_FILE_REFERENCES, true); if(fileRefVerifyStr != null && Boolean.valueOf(fileRefVerifyStr)) { fileRefVerificationPolicy = FileReferenceVerificationPolicy.VERIFY; } Collection<URI> objectUris = new ArrayList<URI>(objectURIStrings.length); for(String nextObjectURIString : objectURIStrings) { objectUris.add(PoddRdfConstants.VF.createURI(nextObjectURIString)); } this.log.info("requesting edit artifact ({}): {}, with isReplace {}", variant.getMediaType().getName(), artifactUri, updatePolicy); final User user = this.getRequest().getClientInfo().getUser(); this.log.info("authenticated user: {}", user); // - get input stream with edited RDF content InputStream inputStream = null; try { inputStream = entity.getStream(); } catch(final IOException e) { throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, "There was a problem with the input", e); } final RDFFormat inputFormat = Rio.getParserFormatForMIMEType(entity.getMediaType().getName(), RDFFormat.RDFXML); // - prepare response final ByteArrayOutputStream output = new ByteArrayOutputStream(8096); final RDFWriter writer = Rio.createWriter(Rio.getWriterFormatForMIMEType(variant.getMediaType().getName(), RDFFormat.RDFXML), output); // - do the artifact update try { final InferredOWLOntologyID ontologyID = this.getPoddArtifactManager().updateArtifact(PoddRdfConstants.VF.createURI(artifactUri), PoddRdfConstants.VF.createURI(versionUri), objectUris, inputStream, inputFormat, updatePolicy, danglingObjectPolicy, fileRefVerificationPolicy); // TODO - send detailed errors for display where possible // FIXME Change response format so that it does not resemble an empty OWL Ontology // - write the artifact ID into response writer.startRDF(); OntologyUtils.ontologyIDsToHandler(Arrays.asList(ontologyID), writer); writer.endRDF(); } catch(final UnmanagedArtifactIRIException e) { throw new ResourceException(Status.CLIENT_ERROR_NOT_FOUND, "Could not find the given artifact", e); } catch(final PoddException e) { throw new ResourceException(Status.SERVER_ERROR_INTERNAL, "Could not create response", e); } catch(OpenRDFException | IOException | OWLException e) { throw new ResourceException(Status.SERVER_ERROR_INTERNAL, "Could not create response"); } return new ByteArrayRepresentation(output.toByteArray(), MediaType.valueOf(writer.getRDFFormat() .getDefaultMIMEType())); } /** * View the edit artifact page in HTML */ @Get("html") public Representation getEditArtifactHtml(final Representation entity) throws ResourceException { this.log.info("getEditArtifactHtml"); // the artifact in which editing is requested final String artifactUri = this.getQuery().getFirstValue(PoddWebConstants.KEY_ARTIFACT_IDENTIFIER); if(artifactUri == null) { throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, "Artifact ID not submitted"); } // Podd object to be edited. NULL indicates top object is to be edited. final String objectUri = this.getQuery().getFirstValue(PoddWebConstants.KEY_OBJECT_IDENTIFIER); this.log.info("requesting to edit artifact (HTML): {}, {}", artifactUri, objectUri); this.checkAuthentication(PoddAction.ARTIFACT_EDIT, Collections.singleton(PoddRdfConstants.VF.createURI(artifactUri))); final User user = this.getRequest().getClientInfo().getUser(); this.log.info("authenticated user: {}", user); // validate artifact exists InferredOWLOntologyID ontologyID; try { ontologyID = this.getPoddArtifactManager().getArtifactByIRI(IRI.create(artifactUri)); } catch(final UnmanagedArtifactIRIException e) { throw new ResourceException(Status.CLIENT_ERROR_NOT_FOUND, "Could not find the given artifact", e); } final Map<String, Object> dataModel = this.populateDataModelForGet(ontologyID, objectUri); return RestletUtils.getHtmlRepresentation(PoddWebConstants.PROPERTY_TEMPLATE_BASE, dataModel, MediaType.TEXT_HTML, this.getPoddApplication().getTemplateConfiguration()); } /** * Request for RDF data for building the "edit object" page. */ @Get("rdf|rj|json|ttl") public Representation getEditArtifactRdf(final Representation entity, final Variant variant) throws ResourceException { // the artifact in which editing is requested final String artifactUri = this.getQuery().getFirstValue(PoddWebConstants.KEY_ARTIFACT_IDENTIFIER); if(artifactUri == null) { throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, "Artifact ID not submitted"); } // Podd object to be edited. NULL indicates top object is to be edited. final String objectUri = this.getQuery().getFirstValue(PoddWebConstants.KEY_OBJECT_IDENTIFIER); this.log.info("requesting to populate edit artifact ({}): {}, ", variant.getMediaType().getName(), artifactUri); this.checkAuthentication(PoddAction.ARTIFACT_EDIT, Collections.singleton(PoddRdfConstants.VF.createURI(artifactUri))); final User user = this.getRequest().getClientInfo().getUser(); this.log.info("authenticated user: {}", user); // validate artifact exists InferredOWLOntologyID ontologyID; try { ontologyID = this.getPoddArtifactManager().getArtifactByIRI(IRI.create(artifactUri)); } catch(final UnmanagedArtifactIRIException e) { throw new ResourceException(Status.CLIENT_ERROR_NOT_FOUND, "Could not find the given artifact", e); } final Model modelForEdit = this.getModelForEdit(ontologyID, objectUri); // - prepare response final ByteArrayOutputStream output = new ByteArrayOutputStream(8096); final RDFWriter writer = Rio.createWriter(Rio.getWriterFormatForMIMEType(variant.getMediaType().getName(), RDFFormat.RDFXML), output); try { writer.startRDF(); for(final Statement st : modelForEdit) { writer.handleStatement(st); } writer.endRDF(); } catch(final OpenRDFException e) { throw new ResourceException(Status.SERVER_ERROR_INTERNAL, "Could not create response", e); } return new ByteArrayRepresentation(output.toByteArray(), MediaType.valueOf(writer.getRDFFormat() .getDefaultMIMEType())); } /** * Get a {@link Model} containing all data and meta-data necessary to display the "edit object" * page. * * @param ontologyID * @param objectToEdit * @return A Model containing all necessary statements */ private Model getModelForEdit(final InferredOWLOntologyID ontologyID, final String objectToEdit) { RepositoryConnection conn = null; try { conn = this.getPoddRepositoryManager().getRepository().getConnection(); conn.begin(); URI objectUri; if(objectToEdit == null) { objectUri = this.getPoddSesameManager().getTopObjectIRI(ontologyID, conn); } else { objectUri = ValueFactoryImpl.getInstance().createURI(objectToEdit); } final List<URI> objectTypes = this.getPoddSesameManager().getObjectTypes(ontologyID, objectUri, conn); if(objectTypes == null || objectTypes.isEmpty()) { throw new ResourceException(Status.SERVER_ERROR_INTERNAL, "Could not determine type of object"); } return this.getPoddSesameManager().getObjectDetailsForEdit(ontologyID, objectUri, conn); } catch(final OpenRDFException e) { throw new ResourceException(Status.SERVER_ERROR_INTERNAL, "Failed to populate data model"); } finally { if(conn != null) { try { // This is a Get request, therefore nothing to commit conn.rollback(); conn.close(); } catch(final OpenRDFException e) { this.log.error("Failed to close RepositoryConnection", e); // Should we do anything other than log an error? } } } } /** * Internal method to populate the Freemarker Data Model for Get request * * @param ontologyID * The Artifact to be edited * @param objectToEdit * The specific PODD object to edit. * @return The populated data model */ private Map<String, Object> populateDataModelForGet(final InferredOWLOntologyID ontologyID, final String objectToEdit) { final Map<String, Object> dataModel = RestletUtils.getBaseDataModel(this.getRequest()); dataModel.put("contentTemplate", "modify_object.html.ftl"); dataModel.put("pageTitle", "Edit Artifact"); // add required constants and methods to data model dataModel.put("RDFS_LABEL", RDFS.LABEL); dataModel.put("RDFS_RANGE", RDFS.RANGE); dataModel.put("RDF_TYPE", RDF.TYPE); dataModel.put("OWL_OBJECT_PROPERTY", OWL.OBJECTPROPERTY); dataModel.put("OWL_DATA_PROPERTY", OWL.DATATYPEPROPERTY); dataModel.put("OWL_ANNOTATION_PROPERTY", OWL.ANNOTATIONPROPERTY); // dataModel.put("OWL_MAX_CARDINALITY", PoddRdfConstants.OWL_MAX_QUALIFIED_CARDINALITY); // dataModel.put("OWL_MIN_CARDINALITY", PoddRdfConstants.OWL_MIN_QUALIFIED_CARDINALITY); // dataModel.put("OWL_CARDINALITY", PoddRdfConstants.OWL_QUALIFIED_CARDINALITY); dataModel.put("PODD_BASE_HAS_CARDINALITY", PoddRdfConstants.PODD_BASE_HAS_CARDINALITY); dataModel.put("PODD_BASE_DISPLAY_TYPE", PoddRdfConstants.PODD_BASE_DISPLAY_TYPE); dataModel.put("PODD_BASE_HAS_WEIGHT", PoddRdfConstants.PODD_BASE_WEIGHT); dataModel.put("util", new FreemarkerUtil()); // Defaults to false. Set to true if multiple objects are being edited concurrently // TODO: investigate how to use this final boolean initialized = false; RepositoryConnection conn = null; try { conn = this.getPoddRepositoryManager().getRepository().getConnection(); conn.begin(); URI objectUri; if(objectToEdit == null) { objectUri = this.getPoddSesameManager().getTopObjectIRI(ontologyID, conn); } else { objectUri = ValueFactoryImpl.getInstance().createURI(objectToEdit); } final List<URI> objectTypes = this.getPoddSesameManager().getObjectTypes(ontologyID, objectUri, conn); if(objectTypes == null || objectTypes.isEmpty()) { throw new ResourceException(Status.SERVER_ERROR_INTERNAL, "Could not determine type of object"); } // Get label for the object type final PoddObjectLabel objectType = this.getPoddSesameManager().getObjectLabel(ontologyID, objectTypes.get(0), conn); if(objectType == null || objectType.getLabel() == null) { dataModel.put("objectType", objectTypes.get(0)); } else { dataModel.put("objectType", objectType.getLabel()); } final PoddObjectLabel theObject = this.getPoddSesameManager().getObjectLabel(ontologyID, objectUri, conn); dataModel.put("poddObject", theObject); dataModel.put("stopRefreshKey", "Stop Refresh Key"); } catch(final OpenRDFException e) // should be OpenRDFException { throw new ResourceException(Status.SERVER_ERROR_INTERNAL, "Failed to populate data model"); } finally { if(conn != null) { try { if(conn.isActive()) { // This is a Get request, therefore nothing to commit conn.rollback(); } } catch(final OpenRDFException e) { this.log.error("Failed to rollback RepositoryConnection", e); // Should we do anything other than log an error? } finally { try { if(conn.isOpen()) { conn.close(); } } catch(final OpenRDFException e) { this.log.error("Failed to close RepositoryConnection", e); // Should we do anything other than log an error? } } } } dataModel.put("initialized", initialized); return dataModel; } }
Add check to verify that the edited object in EditArtifactResource was accessible
webapp/lib/src/main/java/com/github/podd/resources/EditArtifactResourceImpl.java
Add check to verify that the edited object in EditArtifactResource was accessible
<ide><path>ebapp/lib/src/main/java/com/github/podd/resources/EditArtifactResourceImpl.java <ide> objectUri = ValueFactoryImpl.getInstance().createURI(objectToEdit); <ide> } <ide> <add> if(objectUri == null) <add> { <add> throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, "Did not recognise the request"); <add> } <add> <ide> final List<URI> objectTypes = this.getPoddSesameManager().getObjectTypes(ontologyID, objectUri, conn); <ide> if(objectTypes == null || objectTypes.isEmpty()) <ide> {
Java
apache-2.0
error: pathspec 'apps/openstacktelemetry/app/src/test/java/org/onosproject/openstacktelemetry/impl/PrometheusTelemetryManagerTest.java' did not match any file(s) known to git
7f7c43b621948f0da6e501214c1bb9925bd05ece
1
gkatsikas/onos,opennetworkinglab/onos,kuujo/onos,oplinkoms/onos,opennetworkinglab/onos,oplinkoms/onos,kuujo/onos,oplinkoms/onos,oplinkoms/onos,opennetworkinglab/onos,gkatsikas/onos,oplinkoms/onos,oplinkoms/onos,gkatsikas/onos,opennetworkinglab/onos,gkatsikas/onos,kuujo/onos,kuujo/onos,gkatsikas/onos,kuujo/onos,oplinkoms/onos,gkatsikas/onos,kuujo/onos,kuujo/onos,opennetworkinglab/onos,opennetworkinglab/onos
/* * Copyright 2018-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.openstacktelemetry.impl; import org.junit.Test; import org.onlab.junit.TestUtils; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; /** * Unit tests for prometheus telemetry manager. */ public final class PrometheusTelemetryManagerTest { private PrometheusTelemetryManager manager; private OpenstackTelemetryServiceAdapter telemetryService = new OpenstackTelemetryServiceAdapter(); /** * Tests app activation and deactivation. */ @Test public void testActivateDeactivate() { manager = new PrometheusTelemetryManager(); TestUtils.setField(manager, "openstackTelemetryService", telemetryService); manager.activate(); assertTrue(telemetryService.services.contains(manager)); manager.deactivate(); assertFalse(telemetryService.services.contains(manager)); } }
apps/openstacktelemetry/app/src/test/java/org/onosproject/openstacktelemetry/impl/PrometheusTelemetryManagerTest.java
Add prometheus telemetry manager unit test Change-Id: I45d7776a7ec945bf0d16e2d4a2889d47a65723d0
apps/openstacktelemetry/app/src/test/java/org/onosproject/openstacktelemetry/impl/PrometheusTelemetryManagerTest.java
Add prometheus telemetry manager unit test
<ide><path>pps/openstacktelemetry/app/src/test/java/org/onosproject/openstacktelemetry/impl/PrometheusTelemetryManagerTest.java <add>/* <add> * Copyright 2018-present Open Networking Foundation <add> * <add> * Licensed under the Apache License, Version 2.0 (the "License"); <add> * you may not use this file except in compliance with the License. <add> * You may obtain a copy of the License at <add> * <add> * http://www.apache.org/licenses/LICENSE-2.0 <add> * <add> * Unless required by applicable law or agreed to in writing, software <add> * distributed under the License is distributed on an "AS IS" BASIS, <add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <add> * See the License for the specific language governing permissions and <add> * limitations under the License. <add> */ <add>package org.onosproject.openstacktelemetry.impl; <add> <add>import org.junit.Test; <add>import org.onlab.junit.TestUtils; <add> <add>import static org.junit.Assert.assertFalse; <add>import static org.junit.Assert.assertTrue; <add> <add>/** <add> * Unit tests for prometheus telemetry manager. <add> */ <add>public final class PrometheusTelemetryManagerTest { <add> <add> private PrometheusTelemetryManager manager; <add> private OpenstackTelemetryServiceAdapter telemetryService = <add> new OpenstackTelemetryServiceAdapter(); <add> <add> /** <add> * Tests app activation and deactivation. <add> */ <add> @Test <add> public void testActivateDeactivate() { <add> manager = new PrometheusTelemetryManager(); <add> <add> TestUtils.setField(manager, "openstackTelemetryService", telemetryService); <add> <add> manager.activate(); <add> <add> assertTrue(telemetryService.services.contains(manager)); <add> <add> manager.deactivate(); <add> <add> assertFalse(telemetryService.services.contains(manager)); <add> } <add>}
Java
mit
error: pathspec 'src/main/java/dtprogrammer/github/io/ds/MaxPQ.java' did not match any file(s) known to git
4acb826e4632566652ddf0eda1ad8d3d13fd7519
1
dtprogrammer/leetcode-solutions
package dtprogrammer.github.io.ds; import java.util.Comparator; import java.util.Iterator; import java.util.NoSuchElementException; /** * Max - Heap or Priority queue implementation * Reference: https://algs4.cs.princeton.edu/code/edu/princeton/cs/algs4/MaxPQ.java.html * * @param <Key> */ public class MaxPQ<Key> implements Iterable<Key> { private Key[] pq; private Comparator<Key> comparator; private int size; public MaxPQ() { this(1); } public MaxPQ(int initCapacity) { pq = (Key[]) new Object[initCapacity + 1]; size = 0; } public MaxPQ(Comparator<Key> comparator) { this(comparator, 1); } public MaxPQ(Comparator<Key> comparator, int initCapacity) { this.comparator = comparator; pq = (Key[]) new Object[initCapacity + 1]; size = 0; } public MaxPQ(Key[] keys) { int n = keys.length; pq = (Key[]) new Object[n + 1]; for (int i = 0; i < n; i++) { pq[i + 1] = keys[i]; } for (int i = n / 2; i >= 1; i--) { sink(i); } isMaxHeap(); } public int size() { return size; } public boolean isEmpty() { return size == 0; } /** * Adds a new key to this priority queue. * * @param x the new key to add to this priority queue */ public void insert(Key x) { if (size == pq.length - 1) { resize(2 * pq.length); } pq[++size] = x; swim(size); isMaxHeap(); } /** * Removes and returns a largest key on this priority queue. * * @return a largest key on this priority queue * @throws NoSuchElementException if this priority queue is empty */ public Key delMax() throws NoSuchElementException { if (isEmpty()) { throw new NoSuchElementException(); } Key deleted = pq[1]; exch(1, size); size--; sink(1); pq[size + 1] = null; if (size > 0 && size == (pq.length - 1) / 4) { resize(pq.length / 2); } isMaxHeap(); return deleted; } private void resize(int newCapacity) { if (newCapacity <= size) { throw new IllegalArgumentException(); } Key[] resized = (Key[]) new Object[newCapacity]; for (int i = 1; i <= size; i++) { resized[i] = pq[i]; } pq = resized; } private void isMaxHeap() { if (!isMaxHeap(1)) { throw new IllegalStateException(); } } private boolean isMaxHeap(int k) { if (k > size) { return true; } int left = 2 * k; int right = 2 * k + 1; if (left <= size && less(k, left)) return false; if (right <= size && less(k, right)) return false; return isMaxHeap(left) && isMaxHeap(right); } private void sink(int k) { while (2 * k <= size) { int j = 2 * k; if (j < size && less(j, j + 1)) { j++; } if (less(j, k)) { break; } exch(j, k); k = j; } } private void swim(int k) { while (k > 1 && less(k / 2, k)) { exch(k / 2, k); k = k / 2; } } private void exch(int i, int j) { Key temp = pq[i]; pq[i] = pq[j]; pq[j] = temp; } private boolean less(int lessIndex, int moreIndex) { if (comparator == null) { return ((Comparable<Key>) pq[lessIndex]).compareTo(pq[moreIndex]) < 0; } else { return comparator.compare(pq[lessIndex], pq[moreIndex]) < 0; } } @Override public Iterator<Key> iterator() { return new MaxHeapIterator(); } private class MaxHeapIterator implements Iterator<Key> { private MaxPQ<Key> copy; public MaxHeapIterator() { if (comparator == null) { copy = new MaxPQ<>(size()); } else { copy = new MaxPQ<>(comparator, size()); } for (int i = 1; i <= size; i++) { copy.insert(pq[i]); } } @Override public boolean hasNext() { return !copy.isEmpty(); } public void remove() { throw new UnsupportedOperationException(); } @Override public Key next() { if (!hasNext()) { throw new NoSuchElementException(); } return copy.delMax(); } } public static void main(String[] args) { MaxPQ<Integer> queue = new MaxPQ<>(); queue.insert(23); queue.insert(43); queue.insert(67); queue.insert(99); queue.insert(24); queue.insert(27); queue.insert(23); System.out.println("Current size of PQ is: " + queue.size()); System.out.println("In decreasing order is: "); for (Integer value : queue) { System.out.println(value); } System.out.println("Deleting max: " + queue.delMax()); System.out.println("Current size of PQ is: " + queue.size()); System.out.println("In decreasing order is: "); for (Integer value : queue) { System.out.println(value); } System.out.println("inserting"); queue.insert(53); System.out.println("Current size of PQ is: " + queue.size()); System.out.println("In decreasing order is: "); for (Integer value : queue) { System.out.println(value); } } }
src/main/java/dtprogrammer/github/io/ds/MaxPQ.java
Max PQ or Heap
src/main/java/dtprogrammer/github/io/ds/MaxPQ.java
Max PQ or Heap
<ide><path>rc/main/java/dtprogrammer/github/io/ds/MaxPQ.java <add>package dtprogrammer.github.io.ds; <add> <add>import java.util.Comparator; <add>import java.util.Iterator; <add>import java.util.NoSuchElementException; <add> <add>/** <add> * Max - Heap or Priority queue implementation <add> * Reference: https://algs4.cs.princeton.edu/code/edu/princeton/cs/algs4/MaxPQ.java.html <add> * <add> * @param <Key> <add> */ <add>public class MaxPQ<Key> implements Iterable<Key> { <add> <add> private Key[] pq; <add> private Comparator<Key> comparator; <add> private int size; <add> <add> public MaxPQ() { <add> this(1); <add> } <add> <add> public MaxPQ(int initCapacity) { <add> pq = (Key[]) new Object[initCapacity + 1]; <add> size = 0; <add> } <add> <add> public MaxPQ(Comparator<Key> comparator) { <add> this(comparator, 1); <add> } <add> <add> public MaxPQ(Comparator<Key> comparator, int initCapacity) { <add> this.comparator = comparator; <add> pq = (Key[]) new Object[initCapacity + 1]; <add> size = 0; <add> } <add> <add> public MaxPQ(Key[] keys) { <add> int n = keys.length; <add> pq = (Key[]) new Object[n + 1]; <add> for (int i = 0; i < n; i++) { <add> pq[i + 1] = keys[i]; <add> } <add> for (int i = n / 2; i >= 1; i--) { <add> sink(i); <add> } <add> isMaxHeap(); <add> } <add> <add> public int size() { <add> return size; <add> } <add> <add> public boolean isEmpty() { <add> return size == 0; <add> } <add> <add> /** <add> * Adds a new key to this priority queue. <add> * <add> * @param x the new key to add to this priority queue <add> */ <add> public void insert(Key x) { <add> if (size == pq.length - 1) { <add> resize(2 * pq.length); <add> } <add> pq[++size] = x; <add> swim(size); <add> isMaxHeap(); <add> } <add> <add> /** <add> * Removes and returns a largest key on this priority queue. <add> * <add> * @return a largest key on this priority queue <add> * @throws NoSuchElementException if this priority queue is empty <add> */ <add> public Key delMax() throws NoSuchElementException { <add> if (isEmpty()) { <add> throw new NoSuchElementException(); <add> } <add> Key deleted = pq[1]; <add> exch(1, size); <add> size--; <add> sink(1); <add> pq[size + 1] = null; <add> if (size > 0 && size == (pq.length - 1) / 4) { <add> resize(pq.length / 2); <add> } <add> isMaxHeap(); <add> return deleted; <add> } <add> <add> private void resize(int newCapacity) { <add> if (newCapacity <= size) { <add> throw new IllegalArgumentException(); <add> } <add> <add> Key[] resized = (Key[]) new Object[newCapacity]; <add> <add> for (int i = 1; i <= size; i++) { <add> resized[i] = pq[i]; <add> } <add> pq = resized; <add> } <add> <add> private void isMaxHeap() { <add> if (!isMaxHeap(1)) { <add> throw new IllegalStateException(); <add> } <add> } <add> <add> private boolean isMaxHeap(int k) { <add> if (k > size) { <add> return true; <add> } <add> int left = 2 * k; <add> int right = 2 * k + 1; <add> if (left <= size && less(k, left)) return false; <add> if (right <= size && less(k, right)) return false; <add> return isMaxHeap(left) && isMaxHeap(right); <add> } <add> <add> private void sink(int k) { <add> while (2 * k <= size) { <add> int j = 2 * k; <add> if (j < size && less(j, j + 1)) { <add> j++; <add> } <add> if (less(j, k)) { <add> break; <add> } <add> exch(j, k); <add> k = j; <add> } <add> } <add> <add> private void swim(int k) { <add> while (k > 1 && less(k / 2, k)) { <add> exch(k / 2, k); <add> k = k / 2; <add> } <add> } <add> <add> private void exch(int i, int j) { <add> Key temp = pq[i]; <add> pq[i] = pq[j]; <add> pq[j] = temp; <add> } <add> <add> <add> private boolean less(int lessIndex, int moreIndex) { <add> if (comparator == null) { <add> return ((Comparable<Key>) pq[lessIndex]).compareTo(pq[moreIndex]) < 0; <add> } else { <add> return comparator.compare(pq[lessIndex], pq[moreIndex]) < 0; <add> } <add> } <add> <add> @Override <add> public Iterator<Key> iterator() { <add> return new MaxHeapIterator(); <add> } <add> <add> private class MaxHeapIterator implements Iterator<Key> { <add> private MaxPQ<Key> copy; <add> <add> public MaxHeapIterator() { <add> if (comparator == null) { <add> copy = new MaxPQ<>(size()); <add> } else { <add> copy = new MaxPQ<>(comparator, size()); <add> } <add> for (int i = 1; i <= size; i++) { <add> copy.insert(pq[i]); <add> } <add> } <add> <add> @Override <add> public boolean hasNext() { <add> return !copy.isEmpty(); <add> } <add> <add> public void remove() { <add> throw new UnsupportedOperationException(); <add> } <add> <add> @Override <add> public Key next() { <add> if (!hasNext()) { <add> throw new NoSuchElementException(); <add> } <add> return copy.delMax(); <add> } <add> } <add> <add> public static void main(String[] args) { <add> MaxPQ<Integer> queue = new MaxPQ<>(); <add> queue.insert(23); <add> queue.insert(43); <add> queue.insert(67); <add> queue.insert(99); <add> queue.insert(24); <add> queue.insert(27); <add> queue.insert(23); <add> System.out.println("Current size of PQ is: " + queue.size()); <add> System.out.println("In decreasing order is: "); <add> for (Integer value : queue) { <add> System.out.println(value); <add> } <add> System.out.println("Deleting max: " + queue.delMax()); <add> System.out.println("Current size of PQ is: " + queue.size()); <add> System.out.println("In decreasing order is: "); <add> for (Integer value : queue) { <add> System.out.println(value); <add> } <add> System.out.println("inserting"); <add> queue.insert(53); <add> System.out.println("Current size of PQ is: " + queue.size()); <add> System.out.println("In decreasing order is: "); <add> for (Integer value : queue) { <add> System.out.println(value); <add> } <add> } <add>}
Java
bsd-2-clause
0e8a9dd4840eca9910c823e72b6e91061d5f0d27
0
syohex/libbun-snapshot,syohex/libbun-snapshot,syohex/libbun-snapshot,syohex/libbun-snapshot,syohex/libbun-snapshot,syohex/libbun-snapshot,syohex/libbun-snapshot,syohex/libbun-snapshot
package libbun.parser.ssa2; import libbun.parser.ZVisitor; import libbun.parser.ast.ZAndNode; import libbun.parser.ast.ZArrayLiteralNode; import libbun.parser.ast.ZAsmNode; import libbun.parser.ast.ZBinaryNode; import libbun.parser.ast.ZBlockNode; import libbun.parser.ast.ZBooleanNode; import libbun.parser.ast.ZBreakNode; import libbun.parser.ast.ZCastNode; import libbun.parser.ast.ZClassNode; import libbun.parser.ast.ZComparatorNode; import libbun.parser.ast.ZDefaultValueNode; import libbun.parser.ast.ZErrorNode; import libbun.parser.ast.ZFloatNode; import libbun.parser.ast.ZFuncCallNode; import libbun.parser.ast.ZFunctionNode; import libbun.parser.ast.ZGetIndexNode; import libbun.parser.ast.ZGetNameNode; import libbun.parser.ast.ZGetterNode; import libbun.parser.ast.ZGroupNode; import libbun.parser.ast.ZIfNode; import libbun.parser.ast.ZInstanceOfNode; import libbun.parser.ast.ZIntNode; import libbun.parser.ast.ZLetVarNode; import libbun.parser.ast.ZListNode; import libbun.parser.ast.ZLocalDefinedNode; import libbun.parser.ast.ZMacroNode; import libbun.parser.ast.ZMapLiteralNode; import libbun.parser.ast.ZMethodCallNode; import libbun.parser.ast.ZNewObjectNode; import libbun.parser.ast.ZNode; import libbun.parser.ast.ZNotNode; import libbun.parser.ast.ZNullNode; import libbun.parser.ast.ZOrNode; import libbun.parser.ast.ZReturnNode; import libbun.parser.ast.ZSetIndexNode; import libbun.parser.ast.ZSetNameNode; import libbun.parser.ast.ZSetterNode; import libbun.parser.ast.ZStringNode; import libbun.parser.ast.ZSugarNode; import libbun.parser.ast.ZThrowNode; import libbun.parser.ast.ZTopLevelNode; import libbun.parser.ast.ZTryNode; import libbun.parser.ast.ZUnaryNode; import libbun.parser.ast.ZVarBlockNode; import libbun.parser.ast.ZWhileNode; import libbun.util.Var; public class ZASTTransformer extends ZVisitor { private ZNode TransformedValue; public ZASTTransformer() { this.TransformedValue = null; } protected void VisitBefore(ZNode Node, int Index) { } protected void VisitAfter(ZNode Node, int Index) { } protected void Transform(ZNode Node, int Index) { ZNode LastTransformed = this.TransformedValue; this.TransformedValue = Node.AST[Index]; this.VisitBefore(Node, Index); Node.AST[Index].Accept(this); Node.SetNode(Index, this.TransformedValue); this.VisitAfter(Node, Index); this.TransformedValue = LastTransformed; } protected void VisitListNode(ZListNode Node) { @Var int i = 0; while(i < Node.GetListSize()) { this.Transform(Node, i); i = i + 1; } } @Override public void VisitNullNode(ZNullNode Node) { /* do nothing */ } @Override public void VisitBooleanNode(ZBooleanNode Node) { /* do nothing */ } @Override public void VisitIntNode(ZIntNode Node) { /* do nothing */ } @Override public void VisitFloatNode(ZFloatNode Node) { /* do nothing */ } @Override public void VisitStringNode(ZStringNode Node) { /* do nothing */ } @Override public void VisitArrayLiteralNode(ZArrayLiteralNode Node) { this.VisitListNode(Node); } @Override public void VisitMapLiteralNode(ZMapLiteralNode Node) { this.VisitListNode(Node); } // @Override // public void VisitNewArrayNode(ZNewArrayNode Node) { // this.VisitListNode(Node); // } @Override public void VisitNewObjectNode(ZNewObjectNode Node) { this.VisitListNode(Node); } @Override public void VisitGetNameNode(ZGetNameNode Node) { /* do nothing */ } @Override public void VisitSetNameNode(ZSetNameNode Node) { this.Transform(Node, ZSetNameNode._Expr); } @Override public void VisitGroupNode(ZGroupNode Node) { this.Transform(Node, ZGroupNode._Expr); } @Override public void VisitGetterNode(ZGetterNode Node) { this.Transform(Node, ZGetterNode._Recv); } @Override public void VisitSetterNode(ZSetterNode Node) { this.Transform(Node, ZSetterNode._Recv); this.Transform(Node, ZSetterNode._Expr); } @Override public void VisitGetIndexNode(ZGetIndexNode Node) { this.Transform(Node, ZGetIndexNode._Recv); this.Transform(Node, ZGetIndexNode._Index); } @Override public void VisitSetIndexNode(ZSetIndexNode Node) { this.Transform(Node, ZSetIndexNode._Recv); this.Transform(Node, ZSetIndexNode._Index); this.Transform(Node, ZSetIndexNode._Expr); } @Override public void VisitMethodCallNode(ZMethodCallNode Node) { this.Transform(Node, ZMethodCallNode._Recv); this.VisitListNode(Node); } @Override public void VisitFuncCallNode(ZFuncCallNode Node) { this.Transform(Node, ZFuncCallNode._Functor); this.VisitListNode(Node); } @Override public void VisitUnaryNode(ZUnaryNode Node) { this.Transform(Node, ZUnaryNode._Recv); } @Override public void VisitNotNode(ZNotNode Node) { this.Transform(Node, ZNotNode._Recv); } @Override public void VisitCastNode(ZCastNode Node) { this.Transform(Node, ZCastNode._Expr); } @Override public void VisitInstanceOfNode(ZInstanceOfNode Node) { this.Transform(Node, ZInstanceOfNode._Left); } @Override public void VisitBinaryNode(ZBinaryNode Node) { this.Transform(Node, ZBinaryNode._Left); this.Transform(Node, ZBinaryNode._Right); } @Override public void VisitComparatorNode(ZComparatorNode Node) { this.Transform(Node, ZComparatorNode._Left); this.Transform(Node, ZComparatorNode._Right); } @Override public void VisitAndNode(ZAndNode Node) { this.Transform(Node, ZAndNode._Left); this.Transform(Node, ZAndNode._Right); } @Override public void VisitOrNode(ZOrNode Node) { this.Transform(Node, ZOrNode._Left); this.Transform(Node, ZOrNode._Right); } @Override public void VisitBlockNode(ZBlockNode Node) { this.VisitListNode(Node); } @Override public void VisitVarBlockNode(ZVarBlockNode Node) { this.Transform(Node, ZLetVarNode._InitValue); this.VisitListNode(Node); } @Override public void VisitIfNode(ZIfNode Node) { this.Transform(Node, ZIfNode._Cond); this.Transform(Node, ZIfNode._Then); if(Node.HasElseNode()) { this.Transform(Node, ZIfNode._Else); } } @Override public void VisitReturnNode(ZReturnNode Node) { if(Node.ExprNode() != null) { this.Transform(Node, ZReturnNode._Expr); } } @Override public void VisitWhileNode(ZWhileNode Node) { this.Transform(Node, ZWhileNode._Cond); this.Transform(Node, ZWhileNode._Block); } @Override public void VisitBreakNode(ZBreakNode Node) { /* do nothing */ } @Override public void VisitThrowNode(ZThrowNode Node) { this.Transform(Node, ZThrowNode._Expr); } @Override public void VisitTryNode(ZTryNode Node) { this.Transform(Node, ZTryNode._Try); this.Transform(Node, ZTryNode._Catch); this.Transform(Node, ZTryNode._Finally); } // public void VisitCatchNode(ZCatchNode Node) { // this.Transform(Node, ZCatchNode._Block); // } @Override public void VisitLetNode(ZLetVarNode Node) { this.Transform(Node, ZLetVarNode._InitValue); } @Override public void VisitFunctionNode(ZFunctionNode Node) { this.Transform(Node, ZFunctionNode._Block); } @Override public void VisitClassNode(ZClassNode Node) { this.VisitListNode(Node); } @Override public void VisitErrorNode(ZErrorNode Node) { /* do nothing */ } @Override public void EnableVisitor() { /* do nothing */ } @Override public void StopVisitor() { /* do nothing */ } @Override public boolean IsVisitable() { return false; } @Override public void VisitMacroNode(ZMacroNode Node) { this.VisitListNode(Node); } @Override public void VisitAsmNode(ZAsmNode Node) { // TODO Auto-generated method stub } @Override public void VisitTopLevelNode(ZTopLevelNode Node) { // TODO Auto-generated method stub } @Override public void VisitSugarNode(ZSugarNode Node) { // TODO Auto-generated method stub } @Override public void VisitLocalDefinedNode(ZLocalDefinedNode Node) { // TODO Auto-generated method stub } @Override public void VisitDefaultValueNode(ZDefaultValueNode Node) { // TODO Auto-generated method stub } }
java/src/libbun/parser/ssa2/ZASTTransformer.java
package libbun.parser.ssa2; import libbun.parser.ZVisitor; import libbun.parser.ast.ZAndNode; import libbun.parser.ast.ZArrayLiteralNode; import libbun.parser.ast.ZAsmNode; import libbun.parser.ast.ZBinaryNode; import libbun.parser.ast.ZBlockNode; import libbun.parser.ast.ZBooleanNode; import libbun.parser.ast.ZBreakNode; import libbun.parser.ast.ZCastNode; import libbun.parser.ast.ZClassNode; import libbun.parser.ast.ZComparatorNode; import libbun.parser.ast.ZDefaultValueNode; import libbun.parser.ast.ZErrorNode; import libbun.parser.ast.ZFloatNode; import libbun.parser.ast.ZFuncCallNode; import libbun.parser.ast.ZFunctionNode; import libbun.parser.ast.ZGetIndexNode; import libbun.parser.ast.ZGetNameNode; import libbun.parser.ast.ZGetterNode; import libbun.parser.ast.ZGroupNode; import libbun.parser.ast.ZIfNode; import libbun.parser.ast.ZInstanceOfNode; import libbun.parser.ast.ZIntNode; import libbun.parser.ast.ZLetVarNode; import libbun.parser.ast.ZListNode; import libbun.parser.ast.ZLocalDefinedNode; import libbun.parser.ast.ZMacroNode; import libbun.parser.ast.ZMapLiteralNode; import libbun.parser.ast.ZMethodCallNode; import libbun.parser.ast.ZNewObjectNode; import libbun.parser.ast.ZNode; import libbun.parser.ast.ZNotNode; import libbun.parser.ast.ZNullNode; import libbun.parser.ast.ZOrNode; import libbun.parser.ast.ZReturnNode; import libbun.parser.ast.ZSetIndexNode; import libbun.parser.ast.ZSetNameNode; import libbun.parser.ast.ZSetterNode; import libbun.parser.ast.ZStringNode; import libbun.parser.ast.ZSugarNode; import libbun.parser.ast.ZThrowNode; import libbun.parser.ast.ZTopLevelNode; import libbun.parser.ast.ZTryNode; import libbun.parser.ast.ZUnaryNode; import libbun.parser.ast.ZVarBlockNode; import libbun.parser.ast.ZWhileNode; import libbun.util.Var; public class ZASTTransformer extends ZVisitor { private ZNode TransformedValue; public ZASTTransformer() { this.TransformedValue = null; } protected void VisitBefore(ZNode Node, int Index) { } protected void VisitAfter(ZNode Node, int Index) { } protected void Transform(ZNode Node, int Index) { ZNode LastTransformed = this.TransformedValue; this.TransformedValue = Node.AST[Index]; this.VisitBefore(Node, Index); Node.AST[Index].Accept(this); Node.SetNode(Index, this.TransformedValue); this.VisitAfter(Node, Index); this.TransformedValue = LastTransformed; } protected void VisitListNode(ZListNode Node) { @Var int i = 0; while(i < Node.GetListSize()) { this.Transform(Node, i); } } @Override public void VisitNullNode(ZNullNode Node) { /* do nothing */ } @Override public void VisitBooleanNode(ZBooleanNode Node) { /* do nothing */ } @Override public void VisitIntNode(ZIntNode Node) { /* do nothing */ } @Override public void VisitFloatNode(ZFloatNode Node) { /* do nothing */ } @Override public void VisitStringNode(ZStringNode Node) { /* do nothing */ } @Override public void VisitArrayLiteralNode(ZArrayLiteralNode Node) { this.VisitListNode(Node); } @Override public void VisitMapLiteralNode(ZMapLiteralNode Node) { this.VisitListNode(Node); } // @Override // public void VisitNewArrayNode(ZNewArrayNode Node) { // this.VisitListNode(Node); // } @Override public void VisitNewObjectNode(ZNewObjectNode Node) { this.VisitListNode(Node); } @Override public void VisitGetNameNode(ZGetNameNode Node) { /* do nothing */ } @Override public void VisitSetNameNode(ZSetNameNode Node) { this.Transform(Node, ZSetNameNode._Expr); } @Override public void VisitGroupNode(ZGroupNode Node) { this.Transform(Node, ZGroupNode._Expr); } @Override public void VisitGetterNode(ZGetterNode Node) { this.Transform(Node, ZGetterNode._Recv); } @Override public void VisitSetterNode(ZSetterNode Node) { this.Transform(Node, ZSetterNode._Recv); this.Transform(Node, ZSetterNode._Expr); } @Override public void VisitGetIndexNode(ZGetIndexNode Node) { this.Transform(Node, ZGetIndexNode._Recv); this.Transform(Node, ZGetIndexNode._Index); } @Override public void VisitSetIndexNode(ZSetIndexNode Node) { this.Transform(Node, ZSetIndexNode._Recv); this.Transform(Node, ZSetIndexNode._Index); this.Transform(Node, ZSetIndexNode._Expr); } @Override public void VisitMethodCallNode(ZMethodCallNode Node) { this.Transform(Node, ZMethodCallNode._Recv); this.VisitListNode(Node); } @Override public void VisitFuncCallNode(ZFuncCallNode Node) { this.Transform(Node, ZFuncCallNode._Functor); this.VisitListNode(Node); } @Override public void VisitUnaryNode(ZUnaryNode Node) { this.Transform(Node, ZUnaryNode._Recv); } @Override public void VisitNotNode(ZNotNode Node) { this.Transform(Node, ZNotNode._Recv); } @Override public void VisitCastNode(ZCastNode Node) { this.Transform(Node, ZCastNode._Expr); } @Override public void VisitInstanceOfNode(ZInstanceOfNode Node) { this.Transform(Node, ZInstanceOfNode._Left); } @Override public void VisitBinaryNode(ZBinaryNode Node) { this.Transform(Node, ZBinaryNode._Left); this.Transform(Node, ZBinaryNode._Right); } @Override public void VisitComparatorNode(ZComparatorNode Node) { this.Transform(Node, ZComparatorNode._Left); this.Transform(Node, ZComparatorNode._Right); } @Override public void VisitAndNode(ZAndNode Node) { this.Transform(Node, ZAndNode._Left); this.Transform(Node, ZAndNode._Right); } @Override public void VisitOrNode(ZOrNode Node) { this.Transform(Node, ZOrNode._Left); this.Transform(Node, ZOrNode._Right); } @Override public void VisitBlockNode(ZBlockNode Node) { this.VisitListNode(Node); } @Override public void VisitVarBlockNode(ZVarBlockNode Node) { this.Transform(Node, ZLetVarNode._InitValue); this.VisitListNode(Node); } @Override public void VisitIfNode(ZIfNode Node) { this.Transform(Node, ZIfNode._Cond); this.Transform(Node, ZIfNode._Then); if(Node.HasElseNode()) { this.Transform(Node, ZIfNode._Else); } } @Override public void VisitReturnNode(ZReturnNode Node) { if(Node.ExprNode() != null) { this.Transform(Node, ZReturnNode._Expr); } } @Override public void VisitWhileNode(ZWhileNode Node) { this.Transform(Node, ZWhileNode._Cond); this.Transform(Node, ZWhileNode._Block); } @Override public void VisitBreakNode(ZBreakNode Node) { /* do nothing */ } @Override public void VisitThrowNode(ZThrowNode Node) { this.Transform(Node, ZThrowNode._Expr); } @Override public void VisitTryNode(ZTryNode Node) { this.Transform(Node, ZTryNode._Try); this.Transform(Node, ZTryNode._Catch); this.Transform(Node, ZTryNode._Finally); } // public void VisitCatchNode(ZCatchNode Node) { // this.Transform(Node, ZCatchNode._Block); // } @Override public void VisitLetNode(ZLetVarNode Node) { this.Transform(Node, ZLetVarNode._InitValue); } @Override public void VisitFunctionNode(ZFunctionNode Node) { this.Transform(Node, ZFunctionNode._Block); } @Override public void VisitClassNode(ZClassNode Node) { this.VisitListNode(Node); } @Override public void VisitErrorNode(ZErrorNode Node) { /* do nothing */ } @Override public void EnableVisitor() { /* do nothing */ } @Override public void StopVisitor() { /* do nothing */ } @Override public boolean IsVisitable() { return false; } @Override public void VisitMacroNode(ZMacroNode Node) { this.VisitListNode(Node); } @Override public void VisitAsmNode(ZAsmNode Node) { // TODO Auto-generated method stub } @Override public void VisitTopLevelNode(ZTopLevelNode Node) { // TODO Auto-generated method stub } @Override public void VisitSugarNode(ZSugarNode Node) { // TODO Auto-generated method stub } @Override public void VisitLocalDefinedNode(ZLocalDefinedNode Node) { // TODO Auto-generated method stub } @Override public void VisitDefaultValueNode(ZDefaultValueNode Node) { // TODO Auto-generated method stub } }
fix infinite loop in ZASTTransformer
java/src/libbun/parser/ssa2/ZASTTransformer.java
fix infinite loop in ZASTTransformer
<ide><path>ava/src/libbun/parser/ssa2/ZASTTransformer.java <ide> @Var int i = 0; <ide> while(i < Node.GetListSize()) { <ide> this.Transform(Node, i); <add> i = i + 1; <ide> } <ide> } <ide>
Java
mit
e845266f250d3662d1aa00bd733c7120cc92fb54
0
razorpay/razorpay-android-sample-app,razorpay/razorpay-android-sample-app
package com.razorpay.sampleapp; import android.app.Activity; import android.os.Bundle; import android.util.Log; import android.view.View; import android.widget.Button; import android.widget.Toast; import com.razorpay.Checkout; import com.razorpay.PaymentResultListener; import org.json.JSONObject; public class PaymentActivity extends Activity implements PaymentResultListener { private static final String TAG = PaymentActivity.class.getSimpleName(); @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_payment); /* To ensure faster loading of the Checkout form, call this method as early as possible in your checkout flow. */ Checkout.preload(getApplicationContext()); // Payment button created by you in XML layout Button button = (Button) findViewById(R.id.btn_pay); button.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { startPayment(); } }); } public void startPayment() { /* You need to pass current activity in order to let Razorpay create CheckoutActivity */ final Activity activity = this; final Checkout co = new Checkout(); try { JSONObject options = new JSONObject(); options.put("name", "Razorpay Corp"); options.put("description", "Demoing Charges"); //You can omit the image option to fetch the image from dashboard options.put("image", "https://rzp-mobile.s3.amazonaws.com/images/rzp.png"); options.put("currency", "INR"); options.put("amount", "100"); JSONObject preFill = new JSONObject(); preFill.put("email", "[email protected]"); preFill.put("contact", "9876543210"); options.put("prefill", preFill); co.open(activity, options); } catch (Exception e) { Toast.makeText(activity, "Error in payment: " + e.getMessage(), Toast.LENGTH_SHORT) .show(); e.printStackTrace(); } } /** * The name of the function has to be * onPaymentSuccess * Wrap your code in try catch, as shown, to ensure that this method runs correctly */ @SuppressWarnings("unused") @Override public void onPaymentSuccess(String razorpayPaymentID) { try { Toast.makeText(this, "Payment Successful: " + razorpayPaymentID, Toast.LENGTH_SHORT).show(); } catch (Exception e) { Log.e(TAG, "Exception in onPaymentSuccess", e); } } /** * The name of the function has to be * onPaymentError * Wrap your code in try catch, as shown, to ensure that this method runs correctly */ @SuppressWarnings("unused") @Override public void onPaymentError(int code, String response) { try { Toast.makeText(this, "Payment failed: " + code + " " + response, Toast.LENGTH_SHORT).show(); } catch (Exception e) { Log.e(TAG, "Exception in onPaymentError", e); } } }
app/src/main/java/com/razorpay/sampleapp/PaymentActivity.java
package com.razorpay.sampleapp; import android.app.Activity; import android.os.Bundle; import android.util.Log; import android.view.View; import android.widget.Button; import android.widget.Toast; import com.razorpay.Checkout; import com.razorpay.PaymentResultListener; import org.json.JSONObject; public class PaymentActivity extends Activity implements PaymentResultListener { private static final String TAG = PaymentActivity.class.getSimpleName(); @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_payment); /* To ensure faster loading of the Checkout form, call this method as early as possible in your checkout flow. */ Checkout.preload(getApplicationContext()); // Payment button created by you in XML layout Button button = (Button) findViewById(R.id.btn_pay); button.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { startPayment(); } }); } public void startPayment() { /* You need to pass current activity in order to let Razorpay create CheckoutActivity */ final Activity activity = this; final Checkout co = new Checkout(); try { JSONObject options = new JSONObject(); options.put("name", "Razorpay Corp"); options.put("description", "Demoing Charges"); //You can omit the image option to fetch the image from dashboard options.put("image", "https://rzp-mobile.s3.amazonaws.com/images/rzp.png"); options.put("currency", "INR"); options.put("amount", "100"); JSONObject preFill = new JSONObject(); preFill.put("email", "[email protected]"); preFill.put("contact", "9876543210"); options.put("prefill", preFill); co.open(activity, options); } catch (Exception e) { Toast.makeText(activity, "Error in payment: " + e.getMessage(), Toast.LENGTH_SHORT) .show(); e.printStackTrace(); } } /** * The name of the function has to be * onPaymentSuccess * Wrap your code in try catch, as shown, to ensure that this method runs correctly */ @SuppressWarnings("unused") @Override public void onPaymentSuccess(String razorpayPaymentID) { try { Toast.makeText(this, "Payment Successful: " + razorpayPaymentID, Toast.LENGTH_SHORT).show(); } catch (Exception e) { Log.e(TAG, "Exception in onPaymentSuccess", e); } } /** * The name of the function has to be * onPaymentError * Wrap your code in try catch, as shown, to ensure that this method runs correctly */ @SuppressWarnings("unused") @Override public void onPaymentError(int code, String response) { try { Toast.makeText(this, "Payment failed: " + code + " " + response, Toast.LENGTH_SHORT).show(); } catch (Exception e) { Log.e(TAG, "Exception in onPaymentError", e); } } }
[cleanup] Use [email protected] for sample email ID
app/src/main/java/com/razorpay/sampleapp/PaymentActivity.java
[cleanup] Use [email protected] for sample email ID
<ide><path>pp/src/main/java/com/razorpay/sampleapp/PaymentActivity.java <ide> options.put("amount", "100"); <ide> <ide> JSONObject preFill = new JSONObject(); <del> preFill.put("email", "[email protected]"); <add> preFill.put("email", "[email protected]"); <ide> preFill.put("contact", "9876543210"); <ide> <ide> options.put("prefill", preFill);
Java
mit
9ac3110d3ebc8ca5cd16cac09afff76e5d6b0846
0
MylesIsCool/ViaVersion
package us.myles.ViaVersion.protocols.protocolsnapshotto1_12_2; import net.md_5.bungee.api.chat.TextComponent; import net.md_5.bungee.chat.ComponentSerializer; import us.myles.ViaVersion.api.PacketWrapper; import us.myles.ViaVersion.api.data.UserConnection; import us.myles.ViaVersion.api.entities.Entity1_13Types; import us.myles.ViaVersion.api.minecraft.Position; import us.myles.ViaVersion.api.minecraft.item.Item; import us.myles.ViaVersion.api.platform.providers.ViaProviders; import us.myles.ViaVersion.api.protocol.Protocol; import us.myles.ViaVersion.api.remapper.PacketHandler; import us.myles.ViaVersion.api.remapper.PacketRemapper; import us.myles.ViaVersion.api.remapper.ValueCreator; import us.myles.ViaVersion.api.remapper.ValueTransformer; import us.myles.ViaVersion.api.type.Type; import us.myles.ViaVersion.packets.State; import us.myles.ViaVersion.protocols.protocol1_9_3to1_9_1_2.storage.ClientWorld; import us.myles.ViaVersion.protocols.protocolsnapshotto1_12_2.data.MappingData; import us.myles.ViaVersion.protocols.protocolsnapshotto1_12_2.packets.EntityPackets; import us.myles.ViaVersion.protocols.protocolsnapshotto1_12_2.packets.InventoryPackets; import us.myles.ViaVersion.protocols.protocolsnapshotto1_12_2.packets.WorldPackets; import us.myles.ViaVersion.protocols.protocolsnapshotto1_12_2.providers.BlockEntityProvider; import us.myles.ViaVersion.protocols.protocolsnapshotto1_12_2.providers.PaintingProvider; import us.myles.ViaVersion.protocols.protocolsnapshotto1_12_2.storage.BlockStorage; import us.myles.ViaVersion.protocols.protocolsnapshotto1_12_2.storage.EntityTracker; import us.myles.ViaVersion.protocols.protocolsnapshotto1_12_2.storage.TabCompleteTracker; import us.myles.ViaVersion.protocols.protocolsnapshotto1_12_2.types.Particle1_13Type; import java.util.Map; // Development of 1.13 support! public class ProtocolSnapshotTo1_12_2 extends Protocol { public static final Particle1_13Type PARTICLE_TYPE = new Particle1_13Type(); public static String legacyTextToJson(String legacyText) { return ComponentSerializer.toString( TextComponent.fromLegacyText(legacyText) ); } public static final PacketHandler POS_TO_3_INT = new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { Position position = wrapper.read(Type.POSITION); wrapper.write(Type.INT, position.getX().intValue()); wrapper.write(Type.INT, position.getY().intValue()); wrapper.write(Type.INT, position.getZ().intValue()); } }; static { MappingData.init(); } public static String jsonTextToLegacy(String value) { return TextComponent.toLegacyText(ComponentSerializer.parse(value)); } @Override protected void registerPackets() { // Register grouped packet changes EntityPackets.register(this); WorldPackets.register(this); InventoryPackets.register(this); // Outgoing packets // New packet 0x0 - Login Plugin Message registerOutgoing(State.LOGIN, 0x0, 0x1); registerOutgoing(State.LOGIN, 0x1, 0x2); registerOutgoing(State.LOGIN, 0x2, 0x3); registerOutgoing(State.LOGIN, 0x3, 0x4); // Statistics registerOutgoing(State.PLAY, 0x07, 0x07, new PacketRemapper() { @Override public void registerMap() { // TODO: This packet has changed handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { wrapper.cancel(); } }); } }); registerOutgoing(State.PLAY, 0xF, 0xE); // WorldPackets 0x10 -> 0x0F // Tab-Complete registerOutgoing(State.PLAY, 0xE, 0x10, new PacketRemapper() { @Override public void registerMap() { create(new ValueCreator() { @Override public void write(PacketWrapper wrapper) throws Exception { wrapper.write(Type.VAR_INT, wrapper.user().get(TabCompleteTracker.class).getTransactionId()); String input = wrapper.user().get(TabCompleteTracker.class).getInput(); // Start & End int index; int length; // If no input or new word (then it's the start) if (input.endsWith(" ") || input.length() == 0) { index = input.length(); length = 0; } else { // Otherwise find the last space (+1 as we include it) int lastSpace = input.lastIndexOf(" ") + 1; index = lastSpace; length = input.length() - lastSpace; } // Write index + length wrapper.write(Type.VAR_INT, index); wrapper.write(Type.VAR_INT, length); int count = wrapper.passthrough(Type.VAR_INT); for (int i = 0; i < count; i++) { String suggestion = wrapper.read(Type.STRING); // If we're at the start then handle removing slash if (suggestion.startsWith("/") && index == 0) { suggestion = suggestion.substring(1); } wrapper.write(Type.STRING, suggestion); wrapper.write(Type.BOOLEAN, false); } } }); } }); // New packet 0x11, declare commands registerOutgoing(State.PLAY, 0x11, 0x12); registerOutgoing(State.PLAY, 0x12, 0x13); registerOutgoing(State.PLAY, 0x13, 0x14); // InventoryPackets 0x14 -> 0x15 registerOutgoing(State.PLAY, 0x15, 0x16); // InventoryPackets 0x16 -> 0x17 registerOutgoing(State.PLAY, 0x17, 0x18); // InventoryPackets 0x18 -> 0x19 registerOutgoing(State.PLAY, 0x1A, 0x1B); registerOutgoing(State.PLAY, 0x1B, 0x1C); // New packet 0x1D - NBT Query registerOutgoing(State.PLAY, 0x1C, 0x1E); registerOutgoing(State.PLAY, 0x1D, 0x1F); registerOutgoing(State.PLAY, 0x1E, 0x20); registerOutgoing(State.PLAY, 0x1F, 0x21); // WorldPackets 0x20 -> 0x22 registerOutgoing(State.PLAY, 0x21, 0x23); // WorldPackets 0x22 -> 0x24 // Join (save dimension id) registerOutgoing(State.PLAY, 0x23, 0x25, new PacketRemapper() { @Override public void registerMap() { map(Type.INT); // 0 - Entity ID map(Type.UNSIGNED_BYTE); // 1 - Gamemode map(Type.INT); // 2 - Dimension handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { // Store the player int entityId = wrapper.get(Type.INT, 0); wrapper.user().get(EntityTracker.class).addEntity(entityId, Entity1_13Types.EntityType.PLAYER); ClientWorld clientChunks = wrapper.user().get(ClientWorld.class); int dimensionId = wrapper.get(Type.INT, 1); clientChunks.setEnvironment(dimensionId); // Send fake declare commands wrapper.create(0x11, new ValueCreator() { @Override public void write(PacketWrapper wrapper) { wrapper.write(Type.VAR_INT, 2); // Size // Write root node wrapper.write(Type.VAR_INT, 0); // Mark as command wrapper.write(Type.VAR_INT, 1); // 1 child wrapper.write(Type.VAR_INT, 1); // Child is at 1 // Write arg node wrapper.write(Type.VAR_INT, 0x02 | 0x04 | 0x10); // Mark as command wrapper.write(Type.VAR_INT, 0); // No children // Extra data wrapper.write(Type.STRING, "args"); // Arg name wrapper.write(Type.STRING, "brigadier:string"); wrapper.write(Type.VAR_INT, 2); // Greedy wrapper.write(Type.STRING, "minecraft:ask_server"); // Ask server wrapper.write(Type.VAR_INT, 0); // Root node index } }).send(ProtocolSnapshotTo1_12_2.class); // Send tags packet wrapper.create(0x55, new ValueCreator() { @Override public void write(PacketWrapper wrapper) throws Exception { wrapper.write(Type.VAR_INT, MappingData.blockTags.size()); // block tags for (Map.Entry<String, int[]> tag : MappingData.blockTags.entrySet()) { wrapper.write(Type.STRING, tag.getKey()); wrapper.write(Type.VAR_INT, tag.getValue().length); for (int id : tag.getValue()) { wrapper.write(Type.VAR_INT, id); } } wrapper.write(Type.VAR_INT, MappingData.itemTags.size()); // item tags for (Map.Entry<String, int[]> tag : MappingData.itemTags.entrySet()) { wrapper.write(Type.STRING, tag.getKey()); wrapper.write(Type.VAR_INT, tag.getValue().length); for (int id : tag.getValue()) { wrapper.write(Type.VAR_INT, id); } } wrapper.write(Type.VAR_INT, MappingData.fluidTags.size()); // fluid tags for (Map.Entry<String, int[]> tag : MappingData.fluidTags.entrySet()) { wrapper.write(Type.STRING, tag.getKey()); wrapper.write(Type.VAR_INT, tag.getValue().length); for (int id : tag.getValue()) { wrapper.write(Type.VAR_INT, id); } } } }).send(ProtocolSnapshotTo1_12_2.class); } }); } }); // Map packet registerOutgoing(State.PLAY, 0x24, 0x26, new PacketRemapper() { @Override public void registerMap() { map(Type.VAR_INT); // 0 - Map id map(Type.BYTE); // 1 - Scale map(Type.BOOLEAN); // 2 - Tracking Position handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { int iconCount = wrapper.passthrough(Type.VAR_INT); for (int i = 0; i < iconCount; i++) { byte directionAndType = wrapper.read(Type.BYTE); int type = (directionAndType & 0xF0) >> 4; wrapper.write(Type.VAR_INT, type); wrapper.passthrough(Type.BYTE); // Icon X wrapper.passthrough(Type.BYTE); // Icon Z byte direction = (byte) (directionAndType & 0x0F); wrapper.write(Type.BYTE, direction); wrapper.write(Type.OPTIONAL_CHAT, null); // Display Name } } }); } }); registerOutgoing(State.PLAY, 0x25, 0x27); registerOutgoing(State.PLAY, 0x26, 0x28); registerOutgoing(State.PLAY, 0x27, 0x29); registerOutgoing(State.PLAY, 0x28, 0x2A); registerOutgoing(State.PLAY, 0x29, 0x2B); registerOutgoing(State.PLAY, 0x2A, 0x2C); // Craft recipe response registerOutgoing(State.PLAY, 0x2B, 0x2D, new PacketRemapper() { @Override public void registerMap() { handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { // TODO This packet changed wrapper.cancel(); } }); } }); registerOutgoing(State.PLAY, 0x2C, 0x2E); registerOutgoing(State.PLAY, 0x2D, 0x2F); registerOutgoing(State.PLAY, 0x2E, 0x30); // New 0x31 - Face Player registerOutgoing(State.PLAY, 0x2F, 0x32); registerOutgoing(State.PLAY, 0x30, 0x33); // Recipe registerOutgoing(State.PLAY, 0x31, 0x34, new PacketRemapper() { @Override public void registerMap() { handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { // TODO: This has changed >.> wrapper.cancel(); } }); } }); // EntityPackets 0x32 -> 0x35 registerOutgoing(State.PLAY, 0x33, 0x36); registerOutgoing(State.PLAY, 0x34, 0x37); // Respawn (save dimension id) registerOutgoing(State.PLAY, 0x35, 0x38, new PacketRemapper() { @Override public void registerMap() { map(Type.INT); // 0 - Dimension ID handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { ClientWorld clientWorld = wrapper.user().get(ClientWorld.class); int dimensionId = wrapper.get(Type.INT, 0); clientWorld.setEnvironment(dimensionId); } }); } }); registerOutgoing(State.PLAY, 0x36, 0x39); registerOutgoing(State.PLAY, 0x37, 0x3A); registerOutgoing(State.PLAY, 0x38, 0x3B); registerOutgoing(State.PLAY, 0x39, 0x3C); registerOutgoing(State.PLAY, 0x3A, 0x3D); registerOutgoing(State.PLAY, 0x3B, 0x3E); // EntityPackets 0x3C -> 0x3F registerOutgoing(State.PLAY, 0x3D, 0x40); registerOutgoing(State.PLAY, 0x3E, 0x41); // InventoryPackets 0x3F -> 0x42 registerOutgoing(State.PLAY, 0x40, 0x43); registerOutgoing(State.PLAY, 0x41, 0x44); // Scoreboard Objective registerOutgoing(State.PLAY, 0x42, 0x45, new PacketRemapper() { @Override public void registerMap() { map(Type.STRING); // 0 - Objective name map(Type.BYTE); // 1 - Mode handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { byte mode = wrapper.get(Type.BYTE, 0); // On create or update if (mode == 0 || mode == 2) { wrapper.passthrough(Type.STRING); // Value String type = wrapper.read(Type.STRING); // integer or hearts wrapper.write(Type.VAR_INT, type.equals("integer") ? 0 : 1); } } }); } }); registerOutgoing(State.PLAY, 0x43, 0x46); // Team packet registerOutgoing(State.PLAY, 0x44, 0x47, new PacketRemapper() { @Override public void registerMap() { map(Type.STRING); // 0 - Team Name map(Type.BYTE); // 1 - Mode handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { byte action = wrapper.get(Type.BYTE, 0); if (action == 0 || action == 2) { wrapper.passthrough(Type.STRING); // Display Name String prefix = wrapper.read(Type.STRING); // Prefix moved String suffix = wrapper.read(Type.STRING); // Suffix moved wrapper.passthrough(Type.BYTE); // Flags wrapper.passthrough(Type.STRING); // Name Tag Visibility wrapper.passthrough(Type.STRING); // Collision rule // Handle new colors byte color = wrapper.read(Type.BYTE); if (color == -1) // -1 changed to 21 wrapper.write(Type.VAR_INT, 21); // RESET else wrapper.write(Type.VAR_INT, (int) color); wrapper.write(Type.STRING, legacyTextToJson(prefix)); // Prefix wrapper.write(Type.STRING, legacyTextToJson(suffix)); // Suffix } } }); } }); registerOutgoing(State.PLAY, 0x45, 0x48); registerOutgoing(State.PLAY, 0x46, 0x49); registerOutgoing(State.PLAY, 0x47, 0x4A); registerOutgoing(State.PLAY, 0x48, 0x4B); // New 0x4C - Stop Sound // Sound Effect packet registerOutgoing(State.PLAY, 0x49, 0x4D, new PacketRemapper() { @Override public void registerMap() { map(Type.VAR_INT); // 0 - Sound ID handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { int soundId = wrapper.get(Type.VAR_INT, 0); wrapper.set(Type.VAR_INT, 0, getNewSoundID(soundId)); } }); } }); registerOutgoing(State.PLAY, 0x4A, 0x4E); registerOutgoing(State.PLAY, 0x4B, 0x4F); registerOutgoing(State.PLAY, 0x4C, 0x50); // Advancements registerOutgoing(State.PLAY, 0x4D, 0x51, new PacketRemapper() { @Override public void registerMap() { handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { // TODO Temporary cancel advancements because of 'Non [a-z0-9/._-] character in path of location: minecraft:? https://fs.matsv.nl/media?id=auwje4z4lxw.png wrapper.cancel(); } }); } }); registerOutgoing(State.PLAY, 0x4E, 0x52); registerOutgoing(State.PLAY, 0x4F, 0x53); // New packet 0x54 - Declare Recipes // New packet 0x55 - Tags // Incoming packets // New packet 0x0 - Login Plugin Message registerIncoming(State.LOGIN, -1, 0x0, new PacketRemapper() { @Override public void registerMap() { handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { wrapper.cancel(); } }); } }); registerIncoming(State.LOGIN, 0x0, 0x1); registerIncoming(State.LOGIN, 0x1, 0x2); // New 0x01 - Query Block NBT registerIncoming(State.PLAY, -1, 0x01, new PacketRemapper() { @Override public void registerMap() { handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { wrapper.cancel(); } }); } }); // Tab-Complete registerIncoming(State.PLAY, 0x1, 0x5, new PacketRemapper() { @Override public void registerMap() { handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { int tid = wrapper.read(Type.VAR_INT); // Save transaction id wrapper.user().get(TabCompleteTracker.class).setTransactionId(tid); } }); // Prepend / map(Type.STRING, new ValueTransformer<String, String>(Type.STRING) { @Override public String transform(PacketWrapper wrapper, String inputValue) { wrapper.user().get(TabCompleteTracker.class).setInput(inputValue); return "/" + inputValue; } }); // Fake the end of the packet create(new ValueCreator() { @Override public void write(PacketWrapper wrapper) { wrapper.write(Type.BOOLEAN, false); wrapper.write(Type.OPTIONAL_POSITION, null); } }); } }); registerIncoming(State.PLAY, 0x05, 0x06); registerIncoming(State.PLAY, 0x06, 0x07); registerIncoming(State.PLAY, 0x07, 0x08); registerIncoming(State.PLAY, 0x08, 0x09); // InventoryPackets 0x09 -> 0x0A // New 0x0A - Edit book -> Plugin Message registerIncoming(State.PLAY, 0x09, 0x0B, new PacketRemapper() { @Override public void registerMap() { handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { Item item = wrapper.read(Type.FLAT_ITEM); boolean isSigning = wrapper.read(Type.BOOLEAN); InventoryPackets.toServer(item); wrapper.write(Type.STRING, isSigning ? "MC|BSign" : "MC|BEdit"); // Channel wrapper.write(Type.ITEM, item); } }); } }); // New 0x0C - Query Entity NBT registerIncoming(State.PLAY, -1, 0x0C, new PacketRemapper() { @Override public void registerMap() { handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { wrapper.cancel(); } }); } }); registerIncoming(State.PLAY, 0x0A, 0x0D); registerIncoming(State.PLAY, 0x0B, 0x0E); registerIncoming(State.PLAY, 0x0C, 0x0F); registerIncoming(State.PLAY, 0x0D, 0x10); registerIncoming(State.PLAY, 0x0E, 0x11); registerIncoming(State.PLAY, 0x0F, 0x12); registerIncoming(State.PLAY, 0x10, 0x13); registerIncoming(State.PLAY, 0x11, 0x14); // New 0x15 - Pick Item -> Plugin Message registerIncoming(State.PLAY, 0x09, 0x15, new PacketRemapper() { @Override public void registerMap() { create(new ValueCreator() { @Override public void write(PacketWrapper wrapper) throws Exception { wrapper.write(Type.STRING, "MC|PickItem"); // Channel } }); } }); // Craft recipe request registerIncoming(State.PLAY, 0x12, 0x16, new PacketRemapper() { @Override public void registerMap() { handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { // TODO: This has changed >.> wrapper.cancel(); } }); } }); registerIncoming(State.PLAY, 0x13, 0x17); registerIncoming(State.PLAY, 0x14, 0x18); registerIncoming(State.PLAY, 0x15, 0x19); registerIncoming(State.PLAY, 0x16, 0x1A); // Recipe Book Data registerIncoming(State.PLAY, 0x17, 0x1B, new PacketRemapper() { @Override public void registerMap() { map(Type.VAR_INT); // 0 - Type handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { int type = wrapper.get(Type.VAR_INT, 0); if (type == 1) { wrapper.passthrough(Type.BOOLEAN); // Crafting Recipe Book Open wrapper.passthrough(Type.BOOLEAN); // Crafting Recipe Filter Active wrapper.read(Type.BOOLEAN); // Smelting Recipe Book Open | IGNORE NEW 1.13 FIELD wrapper.read(Type.BOOLEAN); // Smelting Recipe Filter Active | IGNORE NEW 1.13 FIELD } } }); } }); // New 0x1C - Name Item -> Plugin Message registerIncoming(State.PLAY, 0x09, 0x1C, new PacketRemapper() { @Override public void registerMap() { create(new ValueCreator() { @Override public void write(PacketWrapper wrapper) throws Exception { wrapper.write(Type.STRING, "MC|ItemName"); // Channel } }); } }); registerIncoming(State.PLAY, 0x18, 0x1D); registerIncoming(State.PLAY, 0x19, 0x1E); // New 0x1F - Select Trade -> Plugin Message registerIncoming(State.PLAY, 0x09, 0x1F, new PacketRemapper() { @Override public void registerMap() { create(new ValueCreator() { @Override public void write(PacketWrapper wrapper) throws Exception { wrapper.write(Type.STRING, "MC|TrSel"); // Channel } }); map(Type.VAR_INT, Type.INT); // Slot } }); // New 0x20 - Set Beacon Effect -> Plugin Message registerIncoming(State.PLAY, 0x09, 0x20, new PacketRemapper() { @Override public void registerMap() { create(new ValueCreator() { @Override public void write(PacketWrapper wrapper) throws Exception { wrapper.write(Type.STRING, "MC|Beacon"); // Channel } }); map(Type.VAR_INT, Type.INT); // Primary Effect map(Type.VAR_INT, Type.INT); // Secondary Effect } }); registerIncoming(State.PLAY, 0x1A, 0x21); // New 0x22 - Update Command Block -> Plugin Message registerIncoming(State.PLAY, 0x09, 0x22, new PacketRemapper() { @Override public void registerMap() { create(new ValueCreator() { @Override public void write(PacketWrapper wrapper) throws Exception { wrapper.write(Type.STRING, "MC|AutoCmd"); } }); handler(POS_TO_3_INT); map(Type.STRING); // Command handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { int mode = wrapper.read(Type.VAR_INT); byte flags = wrapper.read(Type.BYTE); String stringMode = mode == 0 ? "SEQUENCE" : mode == 1 ? "AUTO" : "REDSTONE"; wrapper.write(Type.BOOLEAN, (flags & 0x1) != 0); // Track output wrapper.write(Type.STRING, stringMode); wrapper.write(Type.BOOLEAN, (flags & 0x2) != 0); // Is conditional wrapper.write(Type.BOOLEAN, (flags & 0x4) != 0); // Automatic } }); } }); // New 0x23 - Update Command Block Minecart -> Plugin Message registerIncoming(State.PLAY, 0x09, 0x23, new PacketRemapper() { @Override public void registerMap() { create(new ValueCreator() { @Override public void write(PacketWrapper wrapper) throws Exception { wrapper.write(Type.STRING, "MC|AdvCmd"); } }); map(Type.VAR_INT, Type.INT); // Entity Id } }); // 0x1B -> 0x24 in InventoryPackets // New 0x25 - Update Structure Block -> Message Channel registerIncoming(State.PLAY, 0x09, 0x25, new PacketRemapper() { @Override public void registerMap() { create(new ValueCreator() { @Override public void write(PacketWrapper wrapper) throws Exception { wrapper.write(Type.STRING, "MC|Struct"); // Channel } }); handler(POS_TO_3_INT); map(Type.VAR_INT, new ValueTransformer<Integer, Byte>(Type.BYTE) { // Action @Override public Byte transform(PacketWrapper wrapper, Integer action) throws Exception { return (byte) (action + 1); } }); // Action map(Type.VAR_INT, new ValueTransformer<Integer, String>(Type.STRING) { @Override public String transform(PacketWrapper wrapper, Integer mode) throws Exception { return mode == 0 ? "SAVE" : mode == 1 ? "LOAD" : mode == 2 ? "CORNER" : "DATA"; } }); map(Type.STRING); // Name map(Type.BYTE, Type.INT); // Offset X map(Type.BYTE, Type.INT); // Offset Y map(Type.BYTE, Type.INT); // Offset Z map(Type.BYTE, Type.INT); // Size X map(Type.BYTE, Type.INT); // Size Y map(Type.BYTE, Type.INT); // Size Z map(Type.VAR_INT, new ValueTransformer<Integer, String>(Type.STRING) { // Mirror @Override public String transform(PacketWrapper wrapper, Integer mirror) throws Exception { return mirror == 0 ? "NONE" : mirror == 1 ? "LEFT_RIGHT" : "FRONT_BACK"; } }); map(Type.VAR_INT, new ValueTransformer<Integer, String>(Type.STRING) { // Rotation @Override public String transform(PacketWrapper wrapper, Integer rotation) throws Exception { return rotation == 0 ? "NONE" : rotation == 1 ? "CLOCKWISE_90" : rotation == 2 ? "CLOCKWISE_180" : "COUNTERCLOCKWISE_90"; } }); map(Type.STRING); handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { float integrity = wrapper.read(Type.FLOAT); long seed = wrapper.read(Type.VAR_LONG); byte flags = wrapper.read(Type.BYTE); wrapper.write(Type.BOOLEAN, (flags & 0x1) != 0); // Ignore Entities wrapper.write(Type.BOOLEAN, (flags & 0x2) != 0); // Show air wrapper.write(Type.BOOLEAN, (flags & 0x4) != 0); // Show bounding box wrapper.write(Type.FLOAT, integrity); wrapper.write(Type.VAR_LONG, seed); } }); } }); registerIncoming(State.PLAY, 0x1C, 0x26); registerIncoming(State.PLAY, 0x1D, 0x27); registerIncoming(State.PLAY, 0x1E, 0x28); registerIncoming(State.PLAY, 0x1F, 0x29); registerIncoming(State.PLAY, 0x20, 0x2A); } @Override public void init(UserConnection userConnection) { userConnection.put(new EntityTracker(userConnection)); userConnection.put(new TabCompleteTracker(userConnection)); if (!userConnection.has(ClientWorld.class)) userConnection.put(new ClientWorld(userConnection)); userConnection.put(new BlockStorage(userConnection)); } @Override protected void register(ViaProviders providers) { providers.register(BlockEntityProvider.class, new BlockEntityProvider()); providers.register(PaintingProvider.class, new PaintingProvider()); } private int getNewSoundID(final int oldID) { return MappingData.oldToNewSounds.get(oldID); } }
common/src/main/java/us/myles/ViaVersion/protocols/protocolsnapshotto1_12_2/ProtocolSnapshotTo1_12_2.java
package us.myles.ViaVersion.protocols.protocolsnapshotto1_12_2; import net.md_5.bungee.api.chat.TextComponent; import net.md_5.bungee.chat.ComponentSerializer; import us.myles.ViaVersion.api.PacketWrapper; import us.myles.ViaVersion.api.data.UserConnection; import us.myles.ViaVersion.api.entities.Entity1_13Types; import us.myles.ViaVersion.api.minecraft.Position; import us.myles.ViaVersion.api.minecraft.item.Item; import us.myles.ViaVersion.api.platform.providers.ViaProviders; import us.myles.ViaVersion.api.protocol.Protocol; import us.myles.ViaVersion.api.remapper.PacketHandler; import us.myles.ViaVersion.api.remapper.PacketRemapper; import us.myles.ViaVersion.api.remapper.ValueCreator; import us.myles.ViaVersion.api.remapper.ValueTransformer; import us.myles.ViaVersion.api.type.Type; import us.myles.ViaVersion.packets.State; import us.myles.ViaVersion.protocols.protocol1_9_3to1_9_1_2.storage.ClientWorld; import us.myles.ViaVersion.protocols.protocolsnapshotto1_12_2.data.MappingData; import us.myles.ViaVersion.protocols.protocolsnapshotto1_12_2.packets.EntityPackets; import us.myles.ViaVersion.protocols.protocolsnapshotto1_12_2.packets.InventoryPackets; import us.myles.ViaVersion.protocols.protocolsnapshotto1_12_2.packets.WorldPackets; import us.myles.ViaVersion.protocols.protocolsnapshotto1_12_2.providers.BlockEntityProvider; import us.myles.ViaVersion.protocols.protocolsnapshotto1_12_2.providers.PaintingProvider; import us.myles.ViaVersion.protocols.protocolsnapshotto1_12_2.storage.BlockStorage; import us.myles.ViaVersion.protocols.protocolsnapshotto1_12_2.storage.EntityTracker; import us.myles.ViaVersion.protocols.protocolsnapshotto1_12_2.storage.TabCompleteTracker; import us.myles.ViaVersion.protocols.protocolsnapshotto1_12_2.types.Particle1_13Type; import java.util.Map; // Development of 1.13 support! public class ProtocolSnapshotTo1_12_2 extends Protocol { public static final Particle1_13Type PARTICLE_TYPE = new Particle1_13Type(); public static String legacyTextToJson(String legacyText) { return ComponentSerializer.toString( TextComponent.fromLegacyText(legacyText) ); } public static final PacketHandler POS_TO_3_INT = new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { Position position = wrapper.read(Type.POSITION); wrapper.write(Type.INT, position.getX().intValue()); wrapper.write(Type.INT, position.getY().intValue()); wrapper.write(Type.INT, position.getZ().intValue()); } }; static { MappingData.init(); } public static String jsonTextToLegacy(String value) { return TextComponent.toLegacyText(ComponentSerializer.parse(value)); } @Override protected void registerPackets() { // Register grouped packet changes EntityPackets.register(this); WorldPackets.register(this); InventoryPackets.register(this); // Outgoing packets // New packet 0x0 - Login Plugin Message registerOutgoing(State.LOGIN, 0x0, 0x1); registerOutgoing(State.LOGIN, 0x1, 0x2); registerOutgoing(State.LOGIN, 0x2, 0x3); registerOutgoing(State.LOGIN, 0x3, 0x4); // Statistics registerOutgoing(State.PLAY, 0x07, 0x07, new PacketRemapper() { @Override public void registerMap() { // TODO: This packet has changed handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { wrapper.cancel(); } }); } }); registerOutgoing(State.PLAY, 0xF, 0xE); // WorldPackets 0x10 -> 0x0F // Tab-Complete registerOutgoing(State.PLAY, 0xE, 0x10, new PacketRemapper() { @Override public void registerMap() { create(new ValueCreator() { @Override public void write(PacketWrapper wrapper) throws Exception { wrapper.write(Type.VAR_INT, wrapper.user().get(TabCompleteTracker.class).getTransactionId()); String input = wrapper.user().get(TabCompleteTracker.class).getInput(); // Start & End int index; int length; // If no input or new word (then it's the start) if (input.endsWith(" ") || input.length() == 0) { index = input.length(); length = 0; } else { // Otherwise find the last space (+1 as we include it) int lastSpace = input.lastIndexOf(" ") + 1; index = lastSpace; length = input.length() - lastSpace; } // Write index + length wrapper.write(Type.VAR_INT, index); wrapper.write(Type.VAR_INT, length); int count = wrapper.passthrough(Type.VAR_INT); for (int i = 0; i < count; i++) { String suggestion = wrapper.read(Type.STRING); // If we're at the start then handle removing slash if (suggestion.startsWith("/") && index == 0) { suggestion = suggestion.substring(1); } wrapper.write(Type.STRING, suggestion); wrapper.write(Type.BOOLEAN, false); } } }); } }); // New packet 0x11, declare commands registerOutgoing(State.PLAY, 0x11, 0x12); registerOutgoing(State.PLAY, 0x12, 0x13); registerOutgoing(State.PLAY, 0x13, 0x14); // InventoryPackets 0x14 -> 0x15 registerOutgoing(State.PLAY, 0x15, 0x16); // InventoryPackets 0x16 -> 0x17 registerOutgoing(State.PLAY, 0x17, 0x18); // InventoryPackets 0x18 -> 0x19 registerOutgoing(State.PLAY, 0x1A, 0x1B); registerOutgoing(State.PLAY, 0x1B, 0x1C); // New packet 0x1D - NBT Query registerOutgoing(State.PLAY, 0x1C, 0x1E); registerOutgoing(State.PLAY, 0x1D, 0x1F); registerOutgoing(State.PLAY, 0x1E, 0x20); registerOutgoing(State.PLAY, 0x1F, 0x21); // WorldPackets 0x20 -> 0x22 registerOutgoing(State.PLAY, 0x21, 0x23); // WorldPackets 0x22 -> 0x24 // Join (save dimension id) registerOutgoing(State.PLAY, 0x23, 0x25, new PacketRemapper() { @Override public void registerMap() { map(Type.INT); // 0 - Entity ID map(Type.UNSIGNED_BYTE); // 1 - Gamemode map(Type.INT); // 2 - Dimension handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { // Store the player int entityId = wrapper.get(Type.INT, 0); wrapper.user().get(EntityTracker.class).addEntity(entityId, Entity1_13Types.EntityType.PLAYER); ClientWorld clientChunks = wrapper.user().get(ClientWorld.class); int dimensionId = wrapper.get(Type.INT, 1); clientChunks.setEnvironment(dimensionId); // Send fake declare commands wrapper.create(0x11, new ValueCreator() { @Override public void write(PacketWrapper wrapper) { wrapper.write(Type.VAR_INT, 2); // Size // Write root node wrapper.write(Type.VAR_INT, 0); // Mark as command wrapper.write(Type.VAR_INT, 1); // 1 child wrapper.write(Type.VAR_INT, 1); // Child is at 1 // Write arg node wrapper.write(Type.VAR_INT, 0x02 | 0x04 | 0x10); // Mark as command wrapper.write(Type.VAR_INT, 0); // No children // Extra data wrapper.write(Type.STRING, "args"); // Arg name wrapper.write(Type.STRING, "brigadier:string"); wrapper.write(Type.VAR_INT, 2); // Greedy wrapper.write(Type.STRING, "minecraft:ask_server"); // Ask server wrapper.write(Type.VAR_INT, 0); // Root node index } }).send(ProtocolSnapshotTo1_12_2.class); // Send tags packet wrapper.create(0x55, new ValueCreator() { @Override public void write(PacketWrapper wrapper) throws Exception { wrapper.write(Type.VAR_INT, MappingData.blockTags.size()); // block tags for (Map.Entry<String, int[]> tag : MappingData.blockTags.entrySet()) { wrapper.write(Type.STRING, tag.getKey()); wrapper.write(Type.VAR_INT, tag.getValue().length); for (int id : tag.getValue()) { wrapper.write(Type.VAR_INT, id); } } wrapper.write(Type.VAR_INT, MappingData.itemTags.size()); // item tags for (Map.Entry<String, int[]> tag : MappingData.itemTags.entrySet()) { wrapper.write(Type.STRING, tag.getKey()); wrapper.write(Type.VAR_INT, tag.getValue().length); for (int id : tag.getValue()) { wrapper.write(Type.VAR_INT, id); } } wrapper.write(Type.VAR_INT, MappingData.fluidTags.size()); // fluid tags for (Map.Entry<String, int[]> tag : MappingData.fluidTags.entrySet()) { wrapper.write(Type.STRING, tag.getKey()); wrapper.write(Type.VAR_INT, tag.getValue().length); for (int id : tag.getValue()) { wrapper.write(Type.VAR_INT, id); } } } }).send(ProtocolSnapshotTo1_12_2.class); } }); } }); // Map packet registerOutgoing(State.PLAY, 0x24, 0x26, new PacketRemapper() { @Override public void registerMap() { map(Type.VAR_INT); // 0 - Map id map(Type.BYTE); // 1 - Scale map(Type.BOOLEAN); // 2 - Tracking Position handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { int iconCount = wrapper.passthrough(Type.VAR_INT); for (int i = 0; i < iconCount; i++) { byte directionAndType = wrapper.read(Type.BYTE); int type = (directionAndType & 0xF0) >> 4; wrapper.write(Type.VAR_INT, type); wrapper.passthrough(Type.BYTE); // Icon X wrapper.passthrough(Type.BYTE); // Icon Z byte direction = (byte) (directionAndType & 0x0F); wrapper.write(Type.BYTE, direction); wrapper.write(Type.OPTIONAL_CHAT, null); // Display Name } } }); } }); registerOutgoing(State.PLAY, 0x25, 0x27); registerOutgoing(State.PLAY, 0x26, 0x28); registerOutgoing(State.PLAY, 0x27, 0x29); registerOutgoing(State.PLAY, 0x28, 0x2A); registerOutgoing(State.PLAY, 0x29, 0x2B); registerOutgoing(State.PLAY, 0x2A, 0x2C); // Craft recipe response registerOutgoing(State.PLAY, 0x2B, 0x2D, new PacketRemapper() { @Override public void registerMap() { handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { // TODO This packet changed wrapper.cancel(); } }); } }); registerOutgoing(State.PLAY, 0x2C, 0x2E); registerOutgoing(State.PLAY, 0x2D, 0x2F); registerOutgoing(State.PLAY, 0x2E, 0x30); // New 0x31 - Face Player registerOutgoing(State.PLAY, 0x2F, 0x32); registerOutgoing(State.PLAY, 0x30, 0x33); // Recipe registerOutgoing(State.PLAY, 0x31, 0x34, new PacketRemapper() { @Override public void registerMap() { handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { // TODO: This has changed >.> wrapper.cancel(); } }); } }); // EntityPackets 0x32 -> 0x35 registerOutgoing(State.PLAY, 0x33, 0x36); registerOutgoing(State.PLAY, 0x34, 0x37); // Respawn (save dimension id) registerOutgoing(State.PLAY, 0x35, 0x38, new PacketRemapper() { @Override public void registerMap() { map(Type.INT); // 0 - Dimension ID handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { ClientWorld clientWorld = wrapper.user().get(ClientWorld.class); int dimensionId = wrapper.get(Type.INT, 0); clientWorld.setEnvironment(dimensionId); } }); } }); registerOutgoing(State.PLAY, 0x36, 0x39); registerOutgoing(State.PLAY, 0x37, 0x3A); registerOutgoing(State.PLAY, 0x38, 0x3B); registerOutgoing(State.PLAY, 0x39, 0x3C); registerOutgoing(State.PLAY, 0x3A, 0x3D); registerOutgoing(State.PLAY, 0x3B, 0x3E); // EntityPackets 0x3C -> 0x3F registerOutgoing(State.PLAY, 0x3D, 0x40); registerOutgoing(State.PLAY, 0x3E, 0x41); // InventoryPackets 0x3F -> 0x42 registerOutgoing(State.PLAY, 0x40, 0x43); registerOutgoing(State.PLAY, 0x41, 0x44); // Scoreboard Objective registerOutgoing(State.PLAY, 0x42, 0x45, new PacketRemapper() { @Override public void registerMap() { map(Type.STRING); // 0 - Objective name map(Type.BYTE); // 1 - Mode handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { byte mode = wrapper.get(Type.BYTE, 0); // On create or update if (mode == 0 || mode == 2) { wrapper.passthrough(Type.STRING); // Value String type = wrapper.read(Type.STRING); // integer or hearts wrapper.write(Type.VAR_INT, type.equals("integer") ? 0 : 1); } } }); } }); registerOutgoing(State.PLAY, 0x43, 0x46); // Team packet registerOutgoing(State.PLAY, 0x44, 0x47, new PacketRemapper() { @Override public void registerMap() { map(Type.STRING); // 0 - Team Name map(Type.BYTE); // 1 - Mode handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { byte action = wrapper.get(Type.BYTE, 0); if (action == 0 || action == 2) { wrapper.passthrough(Type.STRING); // Display Name String prefix = wrapper.read(Type.STRING); // Prefix moved String suffix = wrapper.read(Type.STRING); // Suffix moved wrapper.passthrough(Type.BYTE); // Flags wrapper.passthrough(Type.STRING); // Name Tag Visibility wrapper.passthrough(Type.STRING); // Collision rule // Handle new colors byte color = wrapper.read(Type.BYTE); if (color == -1) // -1 changed to 21 wrapper.write(Type.VAR_INT, 21); // RESET else wrapper.write(Type.VAR_INT, (int) color); wrapper.write(Type.STRING, legacyTextToJson(prefix)); // Prefix wrapper.write(Type.STRING, legacyTextToJson(suffix)); // Suffix } } }); } }); registerOutgoing(State.PLAY, 0x45, 0x48); registerOutgoing(State.PLAY, 0x46, 0x49); registerOutgoing(State.PLAY, 0x47, 0x4A); registerOutgoing(State.PLAY, 0x48, 0x4B); // New 0x4C - Stop Sound // Sound Effect packet registerOutgoing(State.PLAY, 0x49, 0x4D, new PacketRemapper() { @Override public void registerMap() { map(Type.VAR_INT); // 0 - Sound ID handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { int soundId = wrapper.get(Type.VAR_INT, 0); wrapper.set(Type.VAR_INT, 0, getNewSoundID(soundId)); } }); } }); registerOutgoing(State.PLAY, 0x4A, 0x4E); registerOutgoing(State.PLAY, 0x4B, 0x4F); registerOutgoing(State.PLAY, 0x4C, 0x50); // Advancements registerOutgoing(State.PLAY, 0x4D, 0x51, new PacketRemapper() { @Override public void registerMap() { handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { // TODO Temporary cancel advancements because of 'Non [a-z0-9/._-] character in path of location: minecraft:? https://fs.matsv.nl/media?id=auwje4z4lxw.png wrapper.cancel(); } }); } }); registerOutgoing(State.PLAY, 0x4E, 0x52); registerOutgoing(State.PLAY, 0x4F, 0x53); // New packet 0x54 - Declare Recipes // New packet 0x55 - Tags // Incoming packets // New packet 0x0 - Login Plugin Message registerIncoming(State.LOGIN, -1, 0x0, new PacketRemapper() { @Override public void registerMap() { handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { wrapper.cancel(); } }); } }); registerIncoming(State.LOGIN, 0x0, 0x1); registerIncoming(State.LOGIN, 0x1, 0x2); // New 0x01 - Query Block NBT registerIncoming(State.PLAY, -1, 0x01, new PacketRemapper() { @Override public void registerMap() { handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { wrapper.cancel(); } }); } }); // Tab-Complete registerIncoming(State.PLAY, 0x1, 0x5, new PacketRemapper() { @Override public void registerMap() { handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { int tid = wrapper.read(Type.VAR_INT); // Save transaction id wrapper.user().get(TabCompleteTracker.class).setTransactionId(tid); } }); // Prepend / map(Type.STRING, new ValueTransformer<String, String>(Type.STRING) { @Override public String transform(PacketWrapper wrapper, String inputValue) { wrapper.user().get(TabCompleteTracker.class).setInput(inputValue); return "/" + inputValue; } }); // Fake the end of the packet create(new ValueCreator() { @Override public void write(PacketWrapper wrapper) { wrapper.write(Type.BOOLEAN, false); wrapper.write(Type.OPTIONAL_POSITION, null); } }); } }); // InventoryPackets 0x09 -> 0x0A // New 0x0A - Edit book -> Plugin Message registerIncoming(State.PLAY, 0x09, 0x0B, new PacketRemapper() { @Override public void registerMap() { handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { Item item = wrapper.read(Type.FLAT_ITEM); boolean isSigning = wrapper.read(Type.BOOLEAN); InventoryPackets.toServer(item); wrapper.write(Type.STRING, isSigning ? "MC|BSign" : "MC|BEdit"); // Channel wrapper.write(Type.ITEM, item); } }); } }); // New 0x0C - Query Entity NBT registerIncoming(State.PLAY, -1, 0x0C, new PacketRemapper() { @Override public void registerMap() { handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { wrapper.cancel(); } }); } }); registerIncoming(State.PLAY, 0x0A, 0x0D); registerIncoming(State.PLAY, 0x0B, 0x0E); registerIncoming(State.PLAY, 0x0C, 0x0F); registerIncoming(State.PLAY, 0x0D, 0x10); registerIncoming(State.PLAY, 0x0E, 0x11); registerIncoming(State.PLAY, 0x0F, 0x12); registerIncoming(State.PLAY, 0x10, 0x13); registerIncoming(State.PLAY, 0x11, 0x14); // New 0x15 - Pick Item -> Plugin Message registerIncoming(State.PLAY, 0x09, 0x15, new PacketRemapper() { @Override public void registerMap() { create(new ValueCreator() { @Override public void write(PacketWrapper wrapper) throws Exception { wrapper.write(Type.STRING, "MC|PickItem"); // Channel } }); } }); // Craft recipe request registerIncoming(State.PLAY, 0x12, 0x16, new PacketRemapper() { @Override public void registerMap() { handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { // TODO: This has changed >.> wrapper.cancel(); } }); } }); registerIncoming(State.PLAY, 0x13, 0x17); registerIncoming(State.PLAY, 0x14, 0x18); registerIncoming(State.PLAY, 0x15, 0x19); registerIncoming(State.PLAY, 0x16, 0x1A); // Recipe Book Data registerIncoming(State.PLAY, 0x17, 0x1B, new PacketRemapper() { @Override public void registerMap() { map(Type.VAR_INT); // 0 - Type handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { int type = wrapper.get(Type.VAR_INT, 0); if (type == 1) { wrapper.passthrough(Type.BOOLEAN); // Crafting Recipe Book Open wrapper.passthrough(Type.BOOLEAN); // Crafting Recipe Filter Active wrapper.read(Type.BOOLEAN); // Smelting Recipe Book Open | IGNORE NEW 1.13 FIELD wrapper.read(Type.BOOLEAN); // Smelting Recipe Filter Active | IGNORE NEW 1.13 FIELD } } }); } }); // New 0x1C - Name Item -> Plugin Message registerIncoming(State.PLAY, 0x09, 0x1C, new PacketRemapper() { @Override public void registerMap() { create(new ValueCreator() { @Override public void write(PacketWrapper wrapper) throws Exception { wrapper.write(Type.STRING, "MC|ItemName"); // Channel } }); } }); registerIncoming(State.PLAY, 0x18, 0x1D); registerIncoming(State.PLAY, 0x19, 0x1E); // New 0x1F - Select Trade -> Plugin Message registerIncoming(State.PLAY, 0x09, 0x1F, new PacketRemapper() { @Override public void registerMap() { create(new ValueCreator() { @Override public void write(PacketWrapper wrapper) throws Exception { wrapper.write(Type.STRING, "MC|TrSel"); // Channel } }); map(Type.VAR_INT, Type.INT); // Slot } }); // New 0x20 - Set Beacon Effect -> Plugin Message registerIncoming(State.PLAY, 0x09, 0x20, new PacketRemapper() { @Override public void registerMap() { create(new ValueCreator() { @Override public void write(PacketWrapper wrapper) throws Exception { wrapper.write(Type.STRING, "MC|Beacon"); // Channel } }); map(Type.VAR_INT, Type.INT); // Primary Effect map(Type.VAR_INT, Type.INT); // Secondary Effect } }); registerIncoming(State.PLAY, 0x1A, 0x21); // New 0x22 - Update Command Block -> Plugin Message registerIncoming(State.PLAY, 0x09, 0x22, new PacketRemapper() { @Override public void registerMap() { create(new ValueCreator() { @Override public void write(PacketWrapper wrapper) throws Exception { wrapper.write(Type.STRING, "MC|AutoCmd"); } }); handler(POS_TO_3_INT); map(Type.STRING); // Command handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { int mode = wrapper.read(Type.VAR_INT); byte flags = wrapper.read(Type.BYTE); String stringMode = mode == 0 ? "SEQUENCE" : mode == 1 ? "AUTO" : "REDSTONE"; wrapper.write(Type.BOOLEAN, (flags & 0x1) != 0); // Track output wrapper.write(Type.STRING, stringMode); wrapper.write(Type.BOOLEAN, (flags & 0x2) != 0); // Is conditional wrapper.write(Type.BOOLEAN, (flags & 0x4) != 0); // Automatic } }); } }); // New 0x23 - Update Command Block Minecart -> Plugin Message registerIncoming(State.PLAY, 0x09, 0x23, new PacketRemapper() { @Override public void registerMap() { create(new ValueCreator() { @Override public void write(PacketWrapper wrapper) throws Exception { wrapper.write(Type.STRING, "MC|AdvCmd"); } }); map(Type.VAR_INT, Type.INT); // Entity Id } }); // 0x1B -> 0x24 in InventoryPackets // New 0x25 - Update Structure Block -> Message Channel registerIncoming(State.PLAY, 0x09, 0x25, new PacketRemapper() { @Override public void registerMap() { create(new ValueCreator() { @Override public void write(PacketWrapper wrapper) throws Exception { wrapper.write(Type.STRING, "MC|Struct"); // Channel } }); handler(POS_TO_3_INT); map(Type.VAR_INT, new ValueTransformer<Integer, Byte>(Type.BYTE) { // Action @Override public Byte transform(PacketWrapper wrapper, Integer action) throws Exception { return (byte) (action + 1); } }); // Action map(Type.VAR_INT, new ValueTransformer<Integer, String>(Type.STRING) { @Override public String transform(PacketWrapper wrapper, Integer mode) throws Exception { return mode == 0 ? "SAVE" : mode == 1 ? "LOAD" : mode == 2 ? "CORNER" : "DATA"; } }); map(Type.STRING); // Name map(Type.BYTE, Type.INT); // Offset X map(Type.BYTE, Type.INT); // Offset Y map(Type.BYTE, Type.INT); // Offset Z map(Type.BYTE, Type.INT); // Size X map(Type.BYTE, Type.INT); // Size Y map(Type.BYTE, Type.INT); // Size Z map(Type.VAR_INT, new ValueTransformer<Integer, String>(Type.STRING) { // Mirror @Override public String transform(PacketWrapper wrapper, Integer mirror) throws Exception { return mirror == 0 ? "NONE" : mirror == 1 ? "LEFT_RIGHT" : "FRONT_BACK"; } }); map(Type.VAR_INT, new ValueTransformer<Integer, String>(Type.STRING) { // Rotation @Override public String transform(PacketWrapper wrapper, Integer rotation) throws Exception { return rotation == 0 ? "NONE" : rotation == 1 ? "CLOCKWISE_90" : rotation == 2 ? "CLOCKWISE_180" : "COUNTERCLOCKWISE_90"; } }); map(Type.STRING); handler(new PacketHandler() { @Override public void handle(PacketWrapper wrapper) throws Exception { float integrity = wrapper.read(Type.FLOAT); long seed = wrapper.read(Type.VAR_LONG); byte flags = wrapper.read(Type.BYTE); wrapper.write(Type.BOOLEAN, (flags & 0x1) != 0); // Ignore Entities wrapper.write(Type.BOOLEAN, (flags & 0x2) != 0); // Show air wrapper.write(Type.BOOLEAN, (flags & 0x4) != 0); // Show bounding box wrapper.write(Type.FLOAT, integrity); wrapper.write(Type.VAR_LONG, seed); } }); } }); registerIncoming(State.PLAY, 0x1C, 0x26); registerIncoming(State.PLAY, 0x1D, 0x27); registerIncoming(State.PLAY, 0x1E, 0x28); registerIncoming(State.PLAY, 0x1F, 0x29); registerIncoming(State.PLAY, 0x20, 0x2A); } @Override public void init(UserConnection userConnection) { userConnection.put(new EntityTracker(userConnection)); userConnection.put(new TabCompleteTracker(userConnection)); if (!userConnection.has(ClientWorld.class)) userConnection.put(new ClientWorld(userConnection)); userConnection.put(new BlockStorage(userConnection)); } @Override protected void register(ViaProviders providers) { providers.register(BlockEntityProvider.class, new BlockEntityProvider()); providers.register(PaintingProvider.class, new PaintingProvider()); } private int getNewSoundID(final int oldID) { return MappingData.oldToNewSounds.get(oldID); } }
it should work
common/src/main/java/us/myles/ViaVersion/protocols/protocolsnapshotto1_12_2/ProtocolSnapshotTo1_12_2.java
it should work
<ide><path>ommon/src/main/java/us/myles/ViaVersion/protocols/protocolsnapshotto1_12_2/ProtocolSnapshotTo1_12_2.java <ide> } <ide> }); <ide> <add> registerIncoming(State.PLAY, 0x05, 0x06); <add> registerIncoming(State.PLAY, 0x06, 0x07); <add> registerIncoming(State.PLAY, 0x07, 0x08); <add> registerIncoming(State.PLAY, 0x08, 0x09); <add> <ide> // InventoryPackets 0x09 -> 0x0A <ide> // New 0x0A - Edit book -> Plugin Message <ide> registerIncoming(State.PLAY, 0x09, 0x0B, new PacketRemapper() {
Java
bsd-3-clause
3b50e28e795580ac43a5edb8940c57c23ff80cfa
0
asamgir/openspecimen,krishagni/openspecimen,asamgir/openspecimen,NCIP/catissue-core,NCIP/catissue-core,krishagni/openspecimen,NCIP/catissue-core,asamgir/openspecimen,krishagni/openspecimen
/** * <p>Title: ParticipantAction Class> * <p>Description: This class initializes the fields in the Participant Add/Edit webpage. </p> * Copyright: Copyright (c) year * Company: Washington University, School of Medicine, St. Louis. * @author Gautam Shetty * @version 1.00 * Created on Apr 7, 2005 */ package edu.wustl.catissuecore.action; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import edu.wustl.catissuecore.actionForm.ParticipantForm; import edu.wustl.catissuecore.bizlogic.BizLogicFactory; import edu.wustl.catissuecore.bizlogic.ParticipantBizLogic; import edu.wustl.catissuecore.domain.Site; import edu.wustl.catissuecore.util.global.Constants; import edu.wustl.common.action.SecureAction; import edu.wustl.common.beans.NameValueBean; import edu.wustl.common.cde.CDEManager; import edu.wustl.common.util.MapDataParser; import edu.wustl.common.util.logger.Logger; /** * This class initializes the fields in the Participant Add/Edit webpage. * @author gautam_shetty */ public class ParticipantAction extends SecureAction { /** * Overrides the execute method of Action class. * Sets the various fields in Participant Add/Edit webpage. * */ protected ActionForward executeSecureAction(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { ParticipantForm participantForm = (ParticipantForm) form; //This if condition is for participant lookup. When participant is selected from the list then //that participant gets stored in request as participantform1. //After that we have to show the slected participant in o/p if (request.getAttribute("participantSelect") != null) { participantForm = (ParticipantForm) request.getAttribute("participantForm1"); request.setAttribute("participantForm", participantForm); } if (participantForm.getGender() == null) { participantForm.setGender(Constants.UNSPECIFIED); } if (participantForm.getVitalStatus() == null) { participantForm.setVitalStatus(Constants.UNKNOWN); } //List of keys used in map of ActionForm List key = new ArrayList(); key.add("ParticipantMedicalIdentifier:i_Site_id"); key.add("ParticipantMedicalIdentifier:i_medicalRecordNumber"); //Gets the map from ActionForm Map map = participantForm.getValues(); //Calling DeleteRow of BaseAction class MapDataParser.deleteRow(key, map, request.getParameter("status")); //Gets the value of the operation parameter. String operation = request.getParameter(Constants.OPERATION); //Sets the operation attribute to be used in the Add/Edit Participant Page. request.setAttribute(Constants.OPERATION, operation); //Sets the pageOf attribute (for Add,Edit or Query Interface) String pageOf = request.getParameter(Constants.PAGEOF); request.setAttribute(Constants.PAGEOF, pageOf); //Sets the genderList attribute to be used in the Add/Edit Participant Page. List genderList = CDEManager.getCDEManager().getPermissibleValueList( Constants.CDE_NAME_GENDER, null); genderList.remove(0); request.setAttribute(Constants.GENDER_LIST, genderList); if (participantForm.getGender() == null || participantForm.getGender().equals("")) { Iterator itr = genderList.iterator(); while (itr.hasNext()) { NameValueBean nvb = (NameValueBean) itr.next(); participantForm.setGender(nvb.getValue()); break; } } //Sets the genotypeList attribute to be used in the Add/Edit Participant Page. //NameValueBean unknownVal = new NameValueBean(Constants.UNKNOWN,Constants.UNKNOWN); List genotypeList = CDEManager.getCDEManager().getPermissibleValueList( Constants.CDE_NAME_GENOTYPE, null); request.setAttribute(Constants.GENOTYPE_LIST, genotypeList); //Sets the ethnicityList attribute to be used in the Add/Edit Participant Page. List ethnicityList = CDEManager.getCDEManager().getPermissibleValueList( Constants.CDE_NAME_ETHNICITY, null); request.setAttribute(Constants.ETHNICITY_LIST, ethnicityList); //Sets the raceList attribute to be used in the Add/Edit Participant Page. List raceList = CDEManager.getCDEManager().getPermissibleValueList(Constants.CDE_NAME_RACE, null); request.setAttribute(Constants.RACELIST, raceList); //Sets the vitalStatus attribute to be used in the Add/Edit Participant Page. List vitalStatusList = CDEManager.getCDEManager().getPermissibleValueList( Constants.CDE_VITAL_STATUS, null); vitalStatusList.remove(0); request.setAttribute(Constants.VITAL_STATUS_LIST, vitalStatusList); if (participantForm.getVitalStatus() == null || participantForm.getVitalStatus().equals("")) { Iterator itr = vitalStatusList.iterator(); while (itr.hasNext()) { NameValueBean nvb = (NameValueBean) itr.next(); participantForm.setVitalStatus(nvb.getValue()); break; } } //Sets the activityStatusList attribute to be used in the Site Add/Edit Page. request.setAttribute(Constants.ACTIVITYSTATUSLIST, Constants.ACTIVITY_STATUS_VALUES); ParticipantBizLogic bizlogic = (ParticipantBizLogic) BizLogicFactory.getInstance() .getBizLogic(Constants.PARTICIPANT_FORM_ID); //Sets the Site list of corresponding type. String sourceObjectName = Site.class.getName(); String[] displayNameFields = {"name"}; String valueField = Constants.SYSTEM_IDENTIFIER; List siteList = bizlogic.getList(sourceObjectName, displayNameFields, valueField, true); request.setAttribute(Constants.SITELIST, siteList); Logger.out.debug("pageOf :---------- " + pageOf); return mapping.findForward(pageOf); } }
WEB-INF/src/edu/wustl/catissuecore/action/ParticipantAction.java
/** * <p>Title: ParticipantAction Class> * <p>Description: This class initializes the fields in the Participant Add/Edit webpage. </p> * Copyright: Copyright (c) year * Company: Washington University, School of Medicine, St. Louis. * @author Gautam Shetty * @version 1.00 * Created on Apr 7, 2005 */ package edu.wustl.catissuecore.action; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import edu.wustl.catissuecore.actionForm.ParticipantForm; import edu.wustl.catissuecore.bizlogic.BizLogicFactory; import edu.wustl.catissuecore.bizlogic.ParticipantBizLogic; import edu.wustl.catissuecore.domain.Site; import edu.wustl.catissuecore.util.global.Constants; import edu.wustl.common.action.BaseAction; import edu.wustl.common.beans.NameValueBean; import edu.wustl.common.cde.CDEManager; import edu.wustl.common.util.MapDataParser; import edu.wustl.common.util.logger.Logger; /** * This class initializes the fields in the Participant Add/Edit webpage. * @author gautam_shetty */ public class ParticipantAction extends BaseAction { /** * Overrides the execute method of Action class. * Sets the various fields in Participant Add/Edit webpage. * */ protected ActionForward executeAction(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { ParticipantForm participantForm = (ParticipantForm) form; //This if condition is for participant lookup. When participant is selected from the list then //that participant gets stored in request as participantform1. //After that we have to show the slected participant in o/p if (request.getAttribute("participantSelect") != null) { participantForm = (ParticipantForm) request.getAttribute("participantForm1"); request.setAttribute("participantForm", participantForm); } if (participantForm.getGender() == null) { participantForm.setGender(Constants.UNSPECIFIED); } if (participantForm.getVitalStatus() == null) { participantForm.setVitalStatus(Constants.UNKNOWN); } //List of keys used in map of ActionForm List key = new ArrayList(); key.add("ParticipantMedicalIdentifier:i_Site_id"); key.add("ParticipantMedicalIdentifier:i_medicalRecordNumber"); //Gets the map from ActionForm Map map = participantForm.getValues(); //Calling DeleteRow of BaseAction class MapDataParser.deleteRow(key, map, request.getParameter("status")); //Gets the value of the operation parameter. String operation = request.getParameter(Constants.OPERATION); //Sets the operation attribute to be used in the Add/Edit Participant Page. request.setAttribute(Constants.OPERATION, operation); //Sets the pageOf attribute (for Add,Edit or Query Interface) String pageOf = request.getParameter(Constants.PAGEOF); request.setAttribute(Constants.PAGEOF, pageOf); //Sets the genderList attribute to be used in the Add/Edit Participant Page. List genderList = CDEManager.getCDEManager().getPermissibleValueList( Constants.CDE_NAME_GENDER, null); genderList.remove(0); request.setAttribute(Constants.GENDER_LIST, genderList); if (participantForm.getGender() == null || participantForm.getGender().equals("")) { Iterator itr = genderList.iterator(); while (itr.hasNext()) { NameValueBean nvb = (NameValueBean) itr.next(); participantForm.setGender(nvb.getValue()); break; } } //Sets the genotypeList attribute to be used in the Add/Edit Participant Page. //NameValueBean unknownVal = new NameValueBean(Constants.UNKNOWN,Constants.UNKNOWN); List genotypeList = CDEManager.getCDEManager().getPermissibleValueList( Constants.CDE_NAME_GENOTYPE, null); request.setAttribute(Constants.GENOTYPE_LIST, genotypeList); //Sets the ethnicityList attribute to be used in the Add/Edit Participant Page. List ethnicityList = CDEManager.getCDEManager().getPermissibleValueList( Constants.CDE_NAME_ETHNICITY, null); request.setAttribute(Constants.ETHNICITY_LIST, ethnicityList); //Sets the raceList attribute to be used in the Add/Edit Participant Page. List raceList = CDEManager.getCDEManager().getPermissibleValueList(Constants.CDE_NAME_RACE, null); request.setAttribute(Constants.RACELIST, raceList); //Sets the vitalStatus attribute to be used in the Add/Edit Participant Page. List vitalStatusList = CDEManager.getCDEManager().getPermissibleValueList( Constants.CDE_VITAL_STATUS, null); vitalStatusList.remove(0); request.setAttribute(Constants.VITAL_STATUS_LIST, vitalStatusList); if (participantForm.getVitalStatus() == null || participantForm.getVitalStatus().equals("")) { Iterator itr = vitalStatusList.iterator(); while (itr.hasNext()) { NameValueBean nvb = (NameValueBean) itr.next(); participantForm.setVitalStatus(nvb.getValue()); break; } } //Sets the activityStatusList attribute to be used in the Site Add/Edit Page. request.setAttribute(Constants.ACTIVITYSTATUSLIST, Constants.ACTIVITY_STATUS_VALUES); ParticipantBizLogic bizlogic = (ParticipantBizLogic) BizLogicFactory.getInstance() .getBizLogic(Constants.PARTICIPANT_FORM_ID); //Sets the Site list of corresponding type. String sourceObjectName = Site.class.getName(); String[] displayNameFields = {"name"}; String valueField = Constants.SYSTEM_IDENTIFIER; List siteList = bizlogic.getList(sourceObjectName, displayNameFields, valueField, true); request.setAttribute(Constants.SITELIST, siteList); Logger.out.debug("pageOf :---------- " + pageOf); return mapping.findForward(pageOf); } }
Extending the ParticipantAction from SecureAction as Technician should not be able to access Participant Add page. SVN-Revision: 4931
WEB-INF/src/edu/wustl/catissuecore/action/ParticipantAction.java
Extending the ParticipantAction from SecureAction as Technician should not be able to access Participant Add page.
<ide><path>EB-INF/src/edu/wustl/catissuecore/action/ParticipantAction.java <ide> import edu.wustl.catissuecore.bizlogic.ParticipantBizLogic; <ide> import edu.wustl.catissuecore.domain.Site; <ide> import edu.wustl.catissuecore.util.global.Constants; <del>import edu.wustl.common.action.BaseAction; <add>import edu.wustl.common.action.SecureAction; <ide> import edu.wustl.common.beans.NameValueBean; <ide> import edu.wustl.common.cde.CDEManager; <ide> import edu.wustl.common.util.MapDataParser; <ide> * This class initializes the fields in the Participant Add/Edit webpage. <ide> * @author gautam_shetty <ide> */ <del>public class ParticipantAction extends BaseAction <add>public class ParticipantAction extends SecureAction <ide> { <ide> <ide> /** <ide> * Overrides the execute method of Action class. <ide> * Sets the various fields in Participant Add/Edit webpage. <ide> * */ <del> protected ActionForward executeAction(ActionMapping mapping, ActionForm form, <add> protected ActionForward executeSecureAction(ActionMapping mapping, ActionForm form, <ide> HttpServletRequest request, HttpServletResponse response) throws Exception <ide> { <ide> ParticipantForm participantForm = (ParticipantForm) form;
Java
apache-2.0
a15ee07caf60c36bbf216251659e9cec41a11fd5
0
cushon/error-prone,cushon/error-prone,google/error-prone,google/error-prone,cushon/error-prone,cushon/error-prone
/* * Copyright 2016 The Error Prone Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.errorprone.bugpatterns; import static com.google.errorprone.BugPattern.Category.JDK; import static com.google.errorprone.BugPattern.SeverityLevel.WARNING; import static com.google.errorprone.matchers.Matchers.allOf; import static com.sun.source.tree.Tree.Kind.NULL_LITERAL; import com.google.errorprone.BugPattern; import com.google.errorprone.VisitorState; import com.google.errorprone.bugpatterns.BugChecker.MethodTreeMatcher; import com.google.errorprone.matchers.Description; import com.google.errorprone.matchers.Matcher; import com.google.errorprone.matchers.Matchers; import com.sun.source.tree.ClassTree; import com.sun.source.tree.LambdaExpressionTree; import com.sun.source.tree.MethodTree; import com.sun.source.tree.ReturnTree; import com.sun.source.util.TreeScanner; /** * ToString should not return null. * * @author [email protected] (Eleanor Harris) * @author [email protected] (Siyuan Liu) */ @BugPattern( name = "ToStringReturnsNull", summary = "An implementation of Object.toString() should never return null.", category = JDK, severity = WARNING) public class ToStringReturnsNull extends BugChecker implements MethodTreeMatcher { private static final Matcher<MethodTree> TO_STRING = allOf(Matchers.methodIsNamed("toString"), Matchers.methodHasParameters()); @Override public Description matchMethod(MethodTree tree, VisitorState state) { if (!TO_STRING.matches(tree, state)) { return Description.NO_MATCH; } boolean hasReturnNull = tree.accept( new TreeScanner<Boolean, Void>() { @Override public Boolean visitLambdaExpression(LambdaExpressionTree node, Void unused) { return false; } @Override public Boolean visitClass(ClassTree node, Void unused) { return false; } @Override public Boolean visitReturn(ReturnTree node, Void unused) { return node.getExpression().getKind() == NULL_LITERAL; } @Override public Boolean reduce(Boolean r1, Boolean r2) { return (r1 != null && r1) || (r2 != null && r2); } }, null); return hasReturnNull ? describeMatch(tree) : Description.NO_MATCH; } }
core/src/main/java/com/google/errorprone/bugpatterns/ToStringReturnsNull.java
/* * Copyright 2016 The Error Prone Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.errorprone.bugpatterns; import static com.google.errorprone.BugPattern.Category.JDK; import static com.google.errorprone.BugPattern.SeverityLevel.WARNING; import static com.google.errorprone.matchers.Matchers.allOf; import static com.sun.source.tree.Tree.Kind.NULL_LITERAL; import com.google.errorprone.BugPattern; import com.google.errorprone.VisitorState; import com.google.errorprone.bugpatterns.BugChecker.MethodTreeMatcher; import com.google.errorprone.matchers.Description; import com.google.errorprone.matchers.Matcher; import com.google.errorprone.matchers.Matchers; import com.sun.source.tree.ClassTree; import com.sun.source.tree.LambdaExpressionTree; import com.sun.source.tree.MethodTree; import com.sun.source.tree.ReturnTree; import com.sun.source.tree.Tree; import com.sun.source.util.TreeScanner; /** * ToString should not return null. * * @author [email protected] (Eleanor Harris) * @author [email protected] (Siyuan Liu) */ @BugPattern( name = "ToStringReturnsNull", summary = "An implementation of Object.toString() should never return null.", category = JDK, severity = WARNING) public class ToStringReturnsNull extends BugChecker implements MethodTreeMatcher { private static final Matcher<MethodTree> TO_STRING = allOf(Matchers.methodIsNamed("toString"), Matchers.methodHasParameters()); private static class ReturnNullMatcher implements Matcher<Tree> { @Override public boolean matches(Tree tree, VisitorState state) { return tree instanceof ReturnTree && ((ReturnTree) tree).getExpression().getKind() == NULL_LITERAL; } } @Override public Description matchMethod(MethodTree tree, VisitorState state) { if (!TO_STRING.matches(tree, state)) { return Description.NO_MATCH; } boolean hasReturnNull = tree.accept( new TreeScanner<Boolean, Void>() { @Override public Boolean visitLambdaExpression(LambdaExpressionTree node, Void unused) { return false; } @Override public Boolean visitClass(ClassTree node, Void unused) { return false; } @Override public Boolean visitReturn(ReturnTree node, Void unused) { return node.getExpression().getKind() == NULL_LITERAL; } @Override public Boolean reduce(Boolean r1, Boolean r2) { return (r1 != null && r1) || (r2 != null && r2); } }, null); return hasReturnNull ? describeMatch(tree) : Description.NO_MATCH; } }
Remove unused ReturnNullMatcher. RELNOTES: n/a ------------- Created by MOE: https://github.com/google/moe MOE_MIGRATED_REVID=201992197
core/src/main/java/com/google/errorprone/bugpatterns/ToStringReturnsNull.java
Remove unused ReturnNullMatcher.
<ide><path>ore/src/main/java/com/google/errorprone/bugpatterns/ToStringReturnsNull.java <ide> import com.sun.source.tree.LambdaExpressionTree; <ide> import com.sun.source.tree.MethodTree; <ide> import com.sun.source.tree.ReturnTree; <del>import com.sun.source.tree.Tree; <ide> import com.sun.source.util.TreeScanner; <ide> <ide> /** <ide> <ide> private static final Matcher<MethodTree> TO_STRING = <ide> allOf(Matchers.methodIsNamed("toString"), Matchers.methodHasParameters()); <del> <del> private static class ReturnNullMatcher implements Matcher<Tree> { <del> @Override <del> public boolean matches(Tree tree, VisitorState state) { <del> return tree instanceof ReturnTree <del> && ((ReturnTree) tree).getExpression().getKind() == NULL_LITERAL; <del> } <del> } <ide> <ide> @Override <ide> public Description matchMethod(MethodTree tree, VisitorState state) {
Java
mit
68056e36d64cb64368cb67abe05ae3e906de210a
0
csmith/DMDirc-Plugins,csmith/DMDirc-Plugins,csmith/DMDirc-Plugins,csmith/DMDirc-Plugins,DMDirc/Plugins,DMDirc/Plugins,DMDirc/Plugins,csmith/DMDirc-Plugins,DMDirc/Plugins,DMDirc/Plugins
/* * Copyright (c) 2006-2010 Chris Smith, Shane Mc Cormack, Gregory Holmes * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.dmdirc.addons.ui_swing.textpane; import java.awt.event.ActionEvent; import javax.swing.AbstractAction; /** * Copy action. */ public class TextPaneCopyAction extends AbstractAction { /** * A version number for this class. It should be changed whenever the class * structure is changed (or anything else that would prevent serialized * objects being unserialized with the new class). */ private static final long serialVersionUID = 1; /** TextPane instance. */ private TextPane textpane; /** * Instantiates a new action. * * @param textpane Textpane */ public TextPaneCopyAction(final TextPane textpane) { super("Copy"); this.textpane = textpane; } /** * {@inheritDoc} * @param e Action event */ @Override public void actionPerformed(final ActionEvent e) { textpane.copy(); } /** {@inheritDoc}. */ @Override public boolean isEnabled() { return textpane.hasSelectedRange(); } }
src/com/dmdirc/addons/ui_swing/textpane/TextPaneCopyAction.java
/* * Copyright (c) 2006-2010 Chris Smith, Shane Mc Cormack, Gregory Holmes * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.dmdirc.addons.ui_swing.textpane; import java.awt.event.ActionEvent; import javax.swing.AbstractAction; /** * Copy action. */ public class TextPaneCopyAction extends AbstractAction { /** * A version number for this class. It should be changed whenever the class * structure is changed (or anything else that would prevent serialized * objects being unserialized with the new class). */ private static final long serialVersionUID = 1; /** TextPane instance. */ private TextPane textpane; /** * Instantiates a new action. * * @param textpane Textpane */ public TextPaneCopyAction(final TextPane textpane) { super("Copy"); this.textpane = textpane; } /** * {@inheritDoc}. * @param e Action event */ @Override public void actionPerformed(ActionEvent e) { textpane.copy(); } }
Check is there is text to select before enabling copy item. Fixes issue 4351 Change-Id: I2d14306b26843c1982c27569e918a95c034baba7 Reviewed-on: http://gerrit.dmdirc.com/1461 Reviewed-by: Chris Smith <[email protected]> Automatic-Compile: Gregory Holmes <[email protected]>
src/com/dmdirc/addons/ui_swing/textpane/TextPaneCopyAction.java
Check is there is text to select before enabling copy item.
<ide><path>rc/com/dmdirc/addons/ui_swing/textpane/TextPaneCopyAction.java <ide> } <ide> <ide> /** <del> * {@inheritDoc}. <add> * {@inheritDoc} <ide> * @param e Action event <ide> */ <ide> @Override <del> public void actionPerformed(ActionEvent e) { <add> public void actionPerformed(final ActionEvent e) { <ide> textpane.copy(); <ide> } <add> <add> /** {@inheritDoc}. */ <add> @Override <add> public boolean isEnabled() { <add> return textpane.hasSelectedRange(); <add> } <ide> }
Java
unlicense
ebdc0a0a4f9b1fa657d5ae3a447090443daec8b5
0
Phylogeny/ExtraBitManipulation
package com.phylogeny.extrabitmanipulation; import net.minecraft.client.Minecraft; import net.minecraft.client.renderer.entity.RenderItem; import net.minecraft.client.resources.model.ModelResourceLocation; import net.minecraft.item.Item; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.fml.common.Mod; import net.minecraftforge.fml.common.Mod.EventHandler; import net.minecraftforge.fml.common.event.FMLInitializationEvent; import net.minecraftforge.fml.common.event.FMLPreInitializationEvent; import net.minecraftforge.fml.common.network.NetworkRegistry; import net.minecraftforge.fml.common.network.simpleimpl.SimpleNetworkWrapper; import net.minecraftforge.fml.relauncher.Side; import com.phylogeny.extrabitmanipulation.client.config.ConfigHandlerExtraBitManipulation; import com.phylogeny.extrabitmanipulation.client.eventhandler.ClientEventHandler; import com.phylogeny.extrabitmanipulation.init.ItemsExtraBitManipulation; import com.phylogeny.extrabitmanipulation.init.PacketRegistration; import com.phylogeny.extrabitmanipulation.init.RecipesExtraBitManipulation; import com.phylogeny.extrabitmanipulation.item.ItemBitWrench; import com.phylogeny.extrabitmanipulation.item.ItemSculptingLoop; import com.phylogeny.extrabitmanipulation.reference.Reference; @Mod(modid = Reference.MOD_ID, version = Reference.VERSION, guiFactory = Reference.GUI_FACTORY_CLASSPATH, dependencies = "required-after:chiselsandbits") public class ExtraBitManipulation { public static SimpleNetworkWrapper packetNetwork = NetworkRegistry.INSTANCE.newSimpleChannel(Reference.MOD_ID); @EventHandler public void preinit(FMLPreInitializationEvent event) { ItemsExtraBitManipulation.itemsInit(); ConfigHandlerExtraBitManipulation.setUpConfigs(event.getSuggestedConfigurationFile()); MinecraftForge.EVENT_BUS.register(new ConfigHandlerExtraBitManipulation()); PacketRegistration.registerPackets(); } @EventHandler public void init(FMLInitializationEvent event) { RecipesExtraBitManipulation.recipeInit(); if(event.getSide() == Side.CLIENT) { MinecraftForge.EVENT_BUS.register(new ClientEventHandler()); RenderItem renderItem = Minecraft.getMinecraft().getRenderItem(); register(renderItem, ItemsExtraBitManipulation.BitWrench, ((ItemBitWrench) ItemsExtraBitManipulation.BitWrench).getName()); register(renderItem, ItemsExtraBitManipulation.SculptingLoop, ((ItemSculptingLoop) ItemsExtraBitManipulation.SculptingLoop).getName()); } } private void register(RenderItem renderItem, Item item, String name) { renderItem.getItemModelMesher().register(item, 0, new ModelResourceLocation(Reference.MOD_ID + ":" + name, "inventory")); } }
src/main/java/com/phylogeny/extrabitmanipulation/ExtraBitManipulation.java
package com.phylogeny.extrabitmanipulation; import net.minecraft.client.Minecraft; import net.minecraft.client.renderer.entity.RenderItem; import net.minecraft.client.resources.model.ModelResourceLocation; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.fml.common.Mod; import net.minecraftforge.fml.common.Mod.EventHandler; import net.minecraftforge.fml.common.event.FMLInitializationEvent; import net.minecraftforge.fml.common.event.FMLPreInitializationEvent; import net.minecraftforge.fml.common.network.NetworkRegistry; import net.minecraftforge.fml.common.network.simpleimpl.SimpleNetworkWrapper; import net.minecraftforge.fml.relauncher.Side; import com.phylogeny.extrabitmanipulation.client.config.ConfigHandlerExtraBitManipulation; import com.phylogeny.extrabitmanipulation.client.eventhandler.ClientEventHandler; import com.phylogeny.extrabitmanipulation.init.ItemsExtraBitManipulation; import com.phylogeny.extrabitmanipulation.init.RecipesExtraBitManipulation; import com.phylogeny.extrabitmanipulation.item.ItemBitWrench; import com.phylogeny.extrabitmanipulation.packet.PacketCycleWrench; import com.phylogeny.extrabitmanipulation.reference.Reference; @Mod(modid = Reference.MOD_ID, version = Reference.VERSION, guiFactory = Reference.GUI_FACTORY_CLASSPATH, dependencies = "required-after:chiselsandbits") public class ExtraBitManipulation { public static SimpleNetworkWrapper packetNetwork = NetworkRegistry.INSTANCE.newSimpleChannel(Reference.MOD_ID); @EventHandler public void preinit(FMLPreInitializationEvent event) { ItemsExtraBitManipulation.itemsInit(); ConfigHandlerExtraBitManipulation.setUpConfigs(event.getSuggestedConfigurationFile()); MinecraftForge.EVENT_BUS.register(new ConfigHandlerExtraBitManipulation()); packetNetwork.registerMessage(PacketCycleWrench.Handler.class, PacketCycleWrench.class, 0, Side.SERVER); } @EventHandler public void init(FMLInitializationEvent event) { RecipesExtraBitManipulation.recipeInit(); if(event.getSide() == Side.CLIENT) { MinecraftForge.EVENT_BUS.register(new ClientEventHandler()); RenderItem renderItem = Minecraft.getMinecraft().getRenderItem(); renderItem.getItemModelMesher().register(ItemsExtraBitManipulation.BitWrench, 0, new ModelResourceLocation(Reference.MOD_ID + ":" + ((ItemBitWrench) ItemsExtraBitManipulation.BitWrench).getName(), "inventory")); } } }
Moved packet registration to separate class and registered Sculpting Loop model.
src/main/java/com/phylogeny/extrabitmanipulation/ExtraBitManipulation.java
Moved packet registration to separate class and registered Sculpting Loop model.
<ide><path>rc/main/java/com/phylogeny/extrabitmanipulation/ExtraBitManipulation.java <ide> import net.minecraft.client.Minecraft; <ide> import net.minecraft.client.renderer.entity.RenderItem; <ide> import net.minecraft.client.resources.model.ModelResourceLocation; <add>import net.minecraft.item.Item; <ide> import net.minecraftforge.common.MinecraftForge; <ide> import net.minecraftforge.fml.common.Mod; <ide> import net.minecraftforge.fml.common.Mod.EventHandler; <ide> import com.phylogeny.extrabitmanipulation.client.config.ConfigHandlerExtraBitManipulation; <ide> import com.phylogeny.extrabitmanipulation.client.eventhandler.ClientEventHandler; <ide> import com.phylogeny.extrabitmanipulation.init.ItemsExtraBitManipulation; <add>import com.phylogeny.extrabitmanipulation.init.PacketRegistration; <ide> import com.phylogeny.extrabitmanipulation.init.RecipesExtraBitManipulation; <ide> import com.phylogeny.extrabitmanipulation.item.ItemBitWrench; <del>import com.phylogeny.extrabitmanipulation.packet.PacketCycleWrench; <add>import com.phylogeny.extrabitmanipulation.item.ItemSculptingLoop; <ide> import com.phylogeny.extrabitmanipulation.reference.Reference; <ide> <ide> @Mod(modid = Reference.MOD_ID, version = Reference.VERSION, guiFactory = Reference.GUI_FACTORY_CLASSPATH, dependencies = "required-after:chiselsandbits") <ide> ItemsExtraBitManipulation.itemsInit(); <ide> ConfigHandlerExtraBitManipulation.setUpConfigs(event.getSuggestedConfigurationFile()); <ide> MinecraftForge.EVENT_BUS.register(new ConfigHandlerExtraBitManipulation()); <del> packetNetwork.registerMessage(PacketCycleWrench.Handler.class, PacketCycleWrench.class, 0, Side.SERVER); <add> PacketRegistration.registerPackets(); <ide> } <ide> <ide> @EventHandler <ide> { <ide> MinecraftForge.EVENT_BUS.register(new ClientEventHandler()); <ide> RenderItem renderItem = Minecraft.getMinecraft().getRenderItem(); <del> renderItem.getItemModelMesher().register(ItemsExtraBitManipulation.BitWrench, 0, new ModelResourceLocation(Reference.MOD_ID + ":" + ((ItemBitWrench) ItemsExtraBitManipulation.BitWrench).getName(), "inventory")); <add> register(renderItem, ItemsExtraBitManipulation.BitWrench, ((ItemBitWrench) ItemsExtraBitManipulation.BitWrench).getName()); <add> register(renderItem, ItemsExtraBitManipulation.SculptingLoop, ((ItemSculptingLoop) ItemsExtraBitManipulation.SculptingLoop).getName()); <ide> } <add> } <add> <add> private void register(RenderItem renderItem, Item item, String name) <add> { <add> renderItem.getItemModelMesher().register(item, 0, new ModelResourceLocation(Reference.MOD_ID + ":" + name, "inventory")); <ide> } <ide> <ide> }
Java
lgpl-2.1
134eca7dc0818f44812dfea9e562e9e81d01a0f0
0
cwarden/kettle,juanmjacobs/kettle,juanmjacobs/kettle,cwarden/kettle,juanmjacobs/kettle,cwarden/kettle
/* * Copyright (c) 2007 Pentaho Corporation. All rights reserved. * This software was developed by Pentaho Corporation and is provided under the terms * of the GNU Lesser General Public License, Version 2.1. You may not use * this file except in compliance with the license. If you need a copy of the license, * please go to http://www.gnu.org/licenses/lgpl-2.1.txt. The Original Code is Samatar Hassan * The Initial Developer is Samatar Hassan. * * Software distributed under the GNU Lesser Public License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. Please refer to * the license for the specific language governing your rights and limitations. */ package org.pentaho.di.trans.steps.checksum; import java.security.MessageDigest; import java.util.zip.Adler32; import java.util.zip.CRC32; import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.RowDataUtil; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStep; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; /** * Caculate a checksum for each row. * * @author Samatar Hassan * @since 30-06-2008 */ public class CheckSum extends BaseStep implements StepInterface { private static Class<?> PKG = CheckSumMeta.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$ private CheckSumMeta meta; private CheckSumData data; public CheckSum(StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans) { super(stepMeta, stepDataInterface, copyNr, transMeta, trans); } public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException { meta = (CheckSumMeta) smi; data = (CheckSumData) sdi; Object[] r = getRow(); // get row, set busy! if (r == null) // no more input to be expected... { setOutputDone(); return false; } if (first) { first = false; data.outputRowMeta = getInputRowMeta().clone(); meta.getFields(data.outputRowMeta, getStepname(), null, null, this); if (meta.getFieldName() == null || meta.getFieldName().length > 0) { data.fieldnrs = new int[meta.getFieldName().length]; for (int i = 0; i < meta.getFieldName().length; i++) { data.fieldnrs[i] = getInputRowMeta().indexOfValue( meta.getFieldName()[i]); if (data.fieldnrs[i] < 0) { logError(BaseMessages.getString(PKG, "CheckSum.Log.CanNotFindField", meta.getFieldName()[i])); throw new KettleException(BaseMessages.getString(PKG, "CheckSum.Log.CanNotFindField", meta.getFieldName()[i])); } } } else { data.fieldnrs = new int[r.length]; for (int i = 0; i < r.length; i++) { data.fieldnrs[i] = i; } } data.fieldnr = data.fieldnrs.length; } // end if first boolean sendToErrorRow = false; String errorMessage = null; Object[] outputRowData = null; try { if (meta.getCheckSumType().equals(CheckSumMeta.TYPE_ADLER32) || meta.getCheckSumType().equals(CheckSumMeta.TYPE_CRC32)) { // get checksum Long checksum=calculCheckSum(r); outputRowData = RowDataUtil.addValueData(r, getInputRowMeta().size(), checksum); } else { // get checksum byte[] o= createCheckSum(r); switch(meta.getResultType()) { case CheckSumMeta.result_TYPE_BINARY : outputRowData = RowDataUtil.addValueData(r, getInputRowMeta().size(), o); break; case CheckSumMeta.result_TYPE_HEXADECIMAL : outputRowData = RowDataUtil.addValueData(r, getInputRowMeta().size(), byteToHexEncode(o)); break; default: outputRowData = RowDataUtil.addValueData(r, getInputRowMeta().size(), getStringFromBytes(o)); break; } } if (checkFeedback(getLinesRead())) { if (log.isDetailed()) logDetailed(BaseMessages.getString(PKG, "CheckSum.Log.LineNumber", "" + getLinesRead())); //$NON-NLS-1$ } // add new values to the row. putRow(data.outputRowMeta, outputRowData); // copy row to output // rowset(s); } catch (Exception e) { if (getStepMeta().isDoingErrorHandling()) { sendToErrorRow = true; errorMessage = e.toString(); } else { logError(BaseMessages.getString(PKG, "CheckSum.ErrorInStepRunning") + e.getMessage()); //$NON-NLS-1$ setErrors(1); stopAll(); setOutputDone(); // signal end to receiver(s) return false; } if (sendToErrorRow) { // Simply add this row to the error row putError(getInputRowMeta(), r, 1, errorMessage, meta .getResultFieldName(), "CheckSum001"); } } return true; } private byte[] createCheckSum(Object[] r) throws Exception { StringBuffer Buff = new StringBuffer(); // Loop through fields for (int i = 0; i < data.fieldnr; i++) { String fieldvalue = getInputRowMeta() .getString(r, data.fieldnrs[i]); Buff.append(fieldvalue); } MessageDigest digest; if(meta.getCheckSumType().equals(CheckSumMeta.TYPE_MD5)) digest = MessageDigest.getInstance(CheckSumMeta.TYPE_MD5); else digest = MessageDigest.getInstance(CheckSumMeta.TYPE_SHA1); digest.update(Buff.toString().getBytes()); byte[] hash = digest.digest(); return hash; } private static String getStringFromBytes(byte[] bytes) { StringBuffer sb = new StringBuffer(); for (int i = 0; i < bytes.length; i++) { byte b = bytes[i]; sb.append((int) (0x00FF & b)); if (i + 1 < bytes.length) { sb.append("-"); } } return sb.toString(); } public String byteToHexEncode(byte[] in) { if(in==null) return null; final char hexDigits[] ={ '0','1','2','3','4','5','6','7','8','9','A','B','C','D','E','F' }; StringBuffer hexString = new StringBuffer(2 * in.length); for (int i = 0; i < in.length; i++) { hexString.append(hexDigits[(in[i] & 0x00F0) >> 4]); // high nibble hexString.append(hexDigits[in[i] & 0x000F]); // low nibble } return hexString.toString(); } private Long calculCheckSum(Object[] r) throws Exception { Long retval; StringBuffer Buff = new StringBuffer(); // Loop through fields for (int i = 0; i < data.fieldnr; i++) { String fieldvalue = getInputRowMeta() .getString(r, data.fieldnrs[i]); Buff.append(fieldvalue); } if (meta.getCheckSumType().equals("CRC32")) { CRC32 crc32 = new CRC32(); crc32.update(Buff.toString().getBytes()); retval = new Long(crc32.getValue()); } else { Adler32 adler32 = new Adler32(); adler32.update(Buff.toString().getBytes()); retval = new Long(adler32.getValue()); } return retval; } public boolean init(StepMetaInterface smi, StepDataInterface sdi) { meta = (CheckSumMeta) smi; data = (CheckSumData) sdi; if (super.init(smi, sdi)) { if (Const.isEmpty(meta.getResultFieldName())) { logError(BaseMessages.getString(PKG, "CheckSum.Error.ResultFieldMissing")); return false; } return true; } return false; } }
src/org/pentaho/di/trans/steps/checksum/CheckSum.java
/* * Copyright (c) 2007 Pentaho Corporation. All rights reserved. * This software was developed by Pentaho Corporation and is provided under the terms * of the GNU Lesser General Public License, Version 2.1. You may not use * this file except in compliance with the license. If you need a copy of the license, * please go to http://www.gnu.org/licenses/lgpl-2.1.txt. The Original Code is Samatar Hassan * The Initial Developer is Samatar Hassan. * * Software distributed under the GNU Lesser Public License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. Please refer to * the license for the specific language governing your rights and limitations. */ package org.pentaho.di.trans.steps.checksum; import java.security.MessageDigest; import java.util.zip.Adler32; import java.util.zip.CRC32; import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.RowDataUtil; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStep; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; /** * Caculate a checksum for each row. * * @author Samatar Hassan * @since 30-06-2008 */ public class CheckSum extends BaseStep implements StepInterface { private static Class<?> PKG = CheckSumMeta.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$ private CheckSumMeta meta; private CheckSumData data; public CheckSum(StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans) { super(stepMeta, stepDataInterface, copyNr, transMeta, trans); } public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException { meta = (CheckSumMeta) smi; data = (CheckSumData) sdi; Object[] r = getRow(); // get row, set busy! if (r == null) // no more input to be expected... { setOutputDone(); return false; } if (first) { first = false; data.outputRowMeta = getInputRowMeta().clone(); meta.getFields(data.outputRowMeta, getStepname(), null, null, this); if (meta.getFieldName() == null || meta.getFieldName().length > 0) { data.fieldnrs = new int[meta.getFieldName().length]; for (int i = 0; i < meta.getFieldName().length; i++) { data.fieldnrs[i] = getInputRowMeta().indexOfValue( meta.getFieldName()[i]); if (data.fieldnrs[i] < 0) { logError(BaseMessages.getString(PKG, "CheckSum.Log.CanNotFindField", meta.getFieldName()[i])); throw new KettleException(BaseMessages.getString(PKG, "CheckSum.Log.CanNotFindField", meta.getFieldName()[i])); } } } else { data.fieldnrs = new int[r.length]; for (int i = 0; i < r.length; i++) { data.fieldnrs[i] = i; } } data.fieldnr = data.fieldnrs.length; } // end if first boolean sendToErrorRow = false; String errorMessage = null; Object[] outputRowData = null; try { if (meta.getCheckSumType().equals(CheckSumMeta.TYPE_ADLER32) || meta.getCheckSumType().equals(CheckSumMeta.TYPE_CRC32)) { // get checksum Long checksum=calculCheckSum(r); outputRowData = RowDataUtil.addValueData(r, getInputRowMeta().size(), checksum); } else { // get checksum byte[] o= createCheckSum(r); switch(meta.getResultType()) { case CheckSumMeta.result_TYPE_BINARY : outputRowData = RowDataUtil.addValueData(r, getInputRowMeta().size(), o); break; case CheckSumMeta.result_TYPE_HEXADECIMAL : outputRowData = RowDataUtil.addValueData(r, getInputRowMeta().size(), byteToHexEncode(o)); break; default: outputRowData = RowDataUtil.addValueData(r, getInputRowMeta().size(), getStringFromBytes(o)); break; } } if (checkFeedback(getLinesRead())) { if (log.isDetailed()) logDetailed(BaseMessages.getString(PKG, "CheckSum.Log.LineNumber", "" + getLinesRead())); //$NON-NLS-1$ } // add new values to the row. putRow(data.outputRowMeta, outputRowData); // copy row to output // rowset(s); } catch (Exception e) { if (getStepMeta().isDoingErrorHandling()) { sendToErrorRow = true; errorMessage = e.toString(); } else { logError(BaseMessages.getString(PKG, "CheckSum.ErrorInStepRunning") + e.getMessage()); //$NON-NLS-1$ setErrors(1); stopAll(); setOutputDone(); // signal end to receiver(s) return false; } if (sendToErrorRow) { // Simply add this row to the error row putError(getInputRowMeta(), r, 1, errorMessage, meta .getResultFieldName(), "CheckSum001"); } } return true; } private byte[] createCheckSum(Object[] r) throws Exception { StringBuffer Buff = new StringBuffer(); // Loop through fields for (int i = 0; i < data.fieldnr; i++) { String fieldvalue = getInputRowMeta() .getString(r, data.fieldnrs[i]); Buff.append(fieldvalue); } MessageDigest digest; if(meta.getCheckSumType().equals(CheckSumMeta.TYPE_MD5)) digest = MessageDigest.getInstance(CheckSumMeta.TYPE_MD5); else digest = MessageDigest.getInstance(CheckSumMeta.TYPE_SHA1); digest.update(Buff.toString().getBytes()); byte[] hash = digest.digest(); return hash; } private static String getStringFromBytes(byte[] bytes) { StringBuffer sb = new StringBuffer(); for (int i = 0; i < bytes.length; i++) { byte b = bytes[i]; sb.append((int) (0x00FF & b)); if (i + 1 < bytes.length) { sb.append("-"); } } return sb.toString(); } public String byteToHexEncode(byte[] in) { if(in==null) return null; final char hexDigits[] ={ '0','1','2','3','4','5','6','7','8','9','A','B','C','D','E','F' }; String hex = new String(in); char[] s = hex.toCharArray(); StringBuffer hexString = new StringBuffer(2 * s.length); for (int i = 0; i < s.length; i++) { hexString.append(hexDigits[(s[i] & 0x00F0) >> 4]); // hi nibble hexString.append(hexDigits[s[i] & 0x000F]); // lo nibble } return hexString.toString(); } private Long calculCheckSum(Object[] r) throws Exception { Long retval; StringBuffer Buff = new StringBuffer(); // Loop through fields for (int i = 0; i < data.fieldnr; i++) { String fieldvalue = getInputRowMeta() .getString(r, data.fieldnrs[i]); Buff.append(fieldvalue); } if (meta.getCheckSumType().equals("CRC32")) { CRC32 crc32 = new CRC32(); crc32.update(Buff.toString().getBytes()); retval = new Long(crc32.getValue()); } else { Adler32 adler32 = new Adler32(); adler32.update(Buff.toString().getBytes()); retval = new Long(adler32.getValue()); } return retval; } public boolean init(StepMetaInterface smi, StepDataInterface sdi) { meta = (CheckSumMeta) smi; data = (CheckSumData) sdi; if (super.init(smi, sdi)) { if (Const.isEmpty(meta.getResultFieldName())) { logError(BaseMessages.getString(PKG, "CheckSum.Error.ResultFieldMissing")); return false; } return true; } return false; } }
PDI5190: MD5 Checksum step gives incorrect results git-svn-id: 51b39fcfd0d3a6ea7caa15377cad4af13b9d2664@14678 5fb7f6ec-07c1-534a-b4ca-9155e429e800
src/org/pentaho/di/trans/steps/checksum/CheckSum.java
PDI5190: MD5 Checksum step gives incorrect results
<ide><path>rc/org/pentaho/di/trans/steps/checksum/CheckSum.java <ide> if(in==null) return null; <ide> final char hexDigits[] ={ '0','1','2','3','4','5','6','7','8','9','A','B','C','D','E','F' }; <ide> <del> String hex = new String(in); <add> StringBuffer hexString = new StringBuffer(2 * in.length); <ide> <del> char[] s = hex.toCharArray(); <del> StringBuffer hexString = new StringBuffer(2 * s.length); <del> <del> for (int i = 0; i < s.length; i++) <add> for (int i = 0; i < in.length; i++) <ide> { <del> hexString.append(hexDigits[(s[i] & 0x00F0) >> 4]); // hi nibble <del> hexString.append(hexDigits[s[i] & 0x000F]); // lo nibble <add> hexString.append(hexDigits[(in[i] & 0x00F0) >> 4]); // high nibble <add> hexString.append(hexDigits[in[i] & 0x000F]); // low nibble <ide> } <ide> <ide> return hexString.toString();
Java
apache-2.0
ff03d29d1a67741b5acabcc2e1aed45618a82252
0
cbeams-archive/spring-framework-2.5.x,cbeams-archive/spring-framework-2.5.x,cbeams-archive/spring-framework-2.5.x,cbeams-archive/spring-framework-2.5.x
/* * Copyright 2002-2007 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.dao.annotation; import java.lang.annotation.Annotation; import java.util.LinkedList; import java.util.List; import org.springframework.aop.framework.Advised; import org.springframework.aop.framework.ProxyFactory; import org.springframework.aop.support.AopUtils; import org.springframework.beans.BeansException; import org.springframework.beans.factory.BeanFactory; import org.springframework.beans.factory.BeanFactoryAware; import org.springframework.beans.factory.ListableBeanFactory; import org.springframework.beans.factory.config.BeanPostProcessor; import org.springframework.core.Ordered; import org.springframework.dao.support.ChainedPersistenceExceptionTranslator; import org.springframework.dao.support.PersistenceExceptionTranslator; import org.springframework.stereotype.Repository; import org.springframework.util.Assert; /** * Bean post-processor that automatically applies persistence exception * translation to any bean that carries the * {@link org.springframework.stereotype.Repository} annotation, * adding a corresponding {@link PersistenceExceptionTranslationAdvisor} * to the exposed proxy (either an existing AOP proxy or a newly generated * proxy that implements all of the target's interfaces). * * <p>Translates native resource exceptions to Spring's * {@link org.springframework.dao.DataAccessException} hierarchy. * Autodetects beans that implement the * {@link org.springframework.dao.support.PersistenceExceptionTranslator} * interface, which are subsequently asked to translate candidate exceptions. * * <p>All of Spring's applicable resource factories implement the * <code>PersistenceExceptionTranslator</code> interface out of the box. * As a consequence, all that is usually needed to enable automatic exception * translation is marking all affected beans (such as DAOs) with the * <code>Repository</code> annotation, along with defining this post-processor * as bean in the application context. * * @author Rod Johnson * @author Juergen Hoeller * @since 2.0 * @see PersistenceExceptionTranslationAdvisor * @see org.springframework.stereotype.Repository * @see org.springframework.dao.DataAccessException * @see org.springframework.dao.support.PersistenceExceptionTranslator */ public class PersistenceExceptionTranslationPostProcessor implements BeanPostProcessor, BeanFactoryAware, Ordered { private Class<? extends Annotation> repositoryAnnotationType = Repository.class; private PersistenceExceptionTranslationAdvisor persistenceExceptionTranslationAdvisor; /** * Set the 'repository' annotation type. * The default required annotation type is the {@link Repository} annotation. * <p>This setter property exists so that developers can provide their own * (non-Spring-specific) annotation type to indicate that a class has a * repository role. * @param repositoryAnnotationType the desired annotation type */ public void setRepositoryAnnotationType(Class<? extends Annotation> repositoryAnnotationType) { Assert.notNull(repositoryAnnotationType, "'requiredAnnotationType' must not be null"); this.repositoryAnnotationType = repositoryAnnotationType; } public void setBeanFactory(BeanFactory beanFactory) throws BeansException { if (!(beanFactory instanceof ListableBeanFactory)) { throw new IllegalArgumentException("Cannot use " + getClass().getSimpleName() + " without ListableBeanFactory"); } ListableBeanFactory lbf = (ListableBeanFactory) beanFactory; // Find all translators, being careful not to activate FactoryBeans List<PersistenceExceptionTranslator> pets = new LinkedList<PersistenceExceptionTranslator>(); for (String petBeanName : lbf.getBeanNamesForType(PersistenceExceptionTranslator.class, false, false)) { pets.add((PersistenceExceptionTranslator) lbf.getBean(petBeanName)); } pets = validateAndFilter(pets); ChainedPersistenceExceptionTranslator cpet = new ChainedPersistenceExceptionTranslator(); for (PersistenceExceptionTranslator pet : pets) { cpet.addDelegate(pet); } this.persistenceExceptionTranslationAdvisor = new PersistenceExceptionTranslationAdvisor(cpet, this.repositoryAnnotationType); } public int getOrder() { // This should run after all other post-processors, so that it can just add // an advisor to existing proxies rather than double-proxy. return LOWEST_PRECEDENCE; } public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException { return bean; } public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException { Class<?> targetClass = (bean instanceof Advised ? ((Advised) bean).getTargetSource().getTargetClass() : bean.getClass()); if (targetClass == null) { // Can't do much here return bean; } if (AopUtils.canApply(this.persistenceExceptionTranslationAdvisor, targetClass)) { if (bean instanceof Advised) { ((Advised) bean).addAdvisor(this.persistenceExceptionTranslationAdvisor); return bean; } else { ProxyFactory pf = new ProxyFactory(bean); pf.addAdvisor(this.persistenceExceptionTranslationAdvisor); return pf.getProxy(targetClass.getClassLoader()); } } else { // This is not a repository. return bean; } } /** * Validate and filter the given PersistenceExceptionTranslators. */ protected List<PersistenceExceptionTranslator> validateAndFilter(List<PersistenceExceptionTranslator> allPets) throws IllegalStateException { List<PersistenceExceptionTranslator> filteredPets = new LinkedList<PersistenceExceptionTranslator>(); for (PersistenceExceptionTranslator pet : allPets) { // TODO filter according to rules: one of each class etc. filteredPets.add(pet); } if (filteredPets.isEmpty()) { throw new IllegalStateException( "No persistence exception translators found. Cannot translate. Remove this PostProcessor"); } return filteredPets; } }
tiger/src/org/springframework/dao/annotation/PersistenceExceptionTranslationPostProcessor.java
/* * Copyright 2002-2006 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.dao.annotation; import java.lang.annotation.Annotation; import java.util.LinkedList; import java.util.List; import org.springframework.aop.framework.Advised; import org.springframework.aop.framework.ProxyFactory; import org.springframework.aop.support.AopUtils; import org.springframework.beans.BeansException; import org.springframework.beans.factory.BeanFactory; import org.springframework.beans.factory.BeanFactoryAware; import org.springframework.beans.factory.ListableBeanFactory; import org.springframework.beans.factory.config.BeanPostProcessor; import org.springframework.core.Ordered; import org.springframework.dao.support.ChainedPersistenceExceptionTranslator; import org.springframework.dao.support.PersistenceExceptionTranslator; import org.springframework.stereotype.Repository; import org.springframework.util.Assert; /** * Bean post-processor that automatically applies persistence exception * translation to any bean that carries the Repository annotation. * * <p>Translates native resource exceptions to Spring's DataAccessException hierarchy. * Autodetects beans that implement the PersistenceExceptionTranslator interface, * which are subsequently asked to translate candidate exceptions. * * <p>All of Spring's applicable resource factories implement the * PersistenceExceptionTranslator interface out of the box. As a consequence, * all that is usually needed to enable automatic exception translation is * marking all affected beans (such as DAOs) with the Repository annotation, * along with defining this post-processor as bean in the application context. * * @author Rod Johnson * @author Juergen Hoeller * @since 2.0 * @see org.springframework.stereotype.Repository * @see org.springframework.dao.DataAccessException * @see PersistenceExceptionTranslator * @see PersistenceExceptionTranslationAdvisor */ public class PersistenceExceptionTranslationPostProcessor implements BeanPostProcessor, BeanFactoryAware, Ordered { private Class<? extends Annotation> repositoryAnnotationType = Repository.class; private PersistenceExceptionTranslationAdvisor persistenceExceptionTranslationAdvisor; /** * Set the 'repository' annotation type. * The default required annotation type is the {@link Repository} annotation. * <p>This setter property exists so that developers can provide their own * (non-Spring-specific) annotation type to indicate that a class has a * repository role. * @param repositoryAnnotationType the desired annotation type */ public void setRepositoryAnnotationType(Class<? extends Annotation> repositoryAnnotationType) { Assert.notNull(repositoryAnnotationType, "requiredAnnotationType must not be null"); this.repositoryAnnotationType = repositoryAnnotationType; } public void setBeanFactory(BeanFactory beanFactory) throws BeansException { if (!(beanFactory instanceof ListableBeanFactory)) { throw new IllegalArgumentException("Cannot use " + getClass().getSimpleName() + " without ListableBeanFactory"); } ListableBeanFactory lbf = (ListableBeanFactory) beanFactory; // Find all translators, being careful not to activate FactoryBeans List<PersistenceExceptionTranslator> pets = new LinkedList<PersistenceExceptionTranslator>(); for (String petBeanName : lbf.getBeanNamesForType(PersistenceExceptionTranslator.class, false, false)) { pets.add((PersistenceExceptionTranslator) lbf.getBean(petBeanName)); } pets = validateAndFilter(pets); ChainedPersistenceExceptionTranslator cpet = new ChainedPersistenceExceptionTranslator(); for (PersistenceExceptionTranslator pet : pets) { cpet.addDelegate(pet); } this.persistenceExceptionTranslationAdvisor = new PersistenceExceptionTranslationAdvisor(cpet, this.repositoryAnnotationType); } public int getOrder() { // This should run after all other post-processors, so that it can just add // an advisor to existing proxies rather than double-proxy. return LOWEST_PRECEDENCE; } public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException { return bean; } public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException { Class<?> targetClass; if (bean instanceof Advised) { Advised advised = (Advised) bean; targetClass = advised.getTargetSource().getTargetClass(); } else { targetClass = bean.getClass(); } if (targetClass == null) { // Can't do much here return bean; } if (AopUtils.canApply(this.persistenceExceptionTranslationAdvisor, targetClass)) { if (bean instanceof Advised) { Advised advised = (Advised) bean; advised.addAdvisor(this.persistenceExceptionTranslationAdvisor); return bean; } else { ProxyFactory pf = new ProxyFactory(bean); pf.addAdvisor(this.persistenceExceptionTranslationAdvisor); return pf.getProxy(); } } else { // This is not a repository. return bean; } } /** * Validate and filter the given PersistenceExceptionTranslators. */ protected List<PersistenceExceptionTranslator> validateAndFilter(List<PersistenceExceptionTranslator> allPets) throws IllegalStateException { List<PersistenceExceptionTranslator> filteredPets = new LinkedList<PersistenceExceptionTranslator>(); for (PersistenceExceptionTranslator pet : allPets) { // TODO filter according to rules: one of each class etc. filteredPets.add(pet); } if (filteredPets.isEmpty()) { throw new IllegalStateException( "No persistence exception translators found. Cannot translate. Remove this PostProcessor"); } return filteredPets; } }
use each bean's ClassLoader for generating translation proxies git-svn-id: b619a0c99665f88f1afe72824344cefe9a1c8c90@12654 fd5a2b45-1f63-4059-99e9-3c7cb7fd75c8
tiger/src/org/springframework/dao/annotation/PersistenceExceptionTranslationPostProcessor.java
use each bean's ClassLoader for generating translation proxies
<ide><path>iger/src/org/springframework/dao/annotation/PersistenceExceptionTranslationPostProcessor.java <ide> /* <del> * Copyright 2002-2006 the original author or authors. <add> * Copyright 2002-2007 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> <ide> /** <ide> * Bean post-processor that automatically applies persistence exception <del> * translation to any bean that carries the Repository annotation. <add> * translation to any bean that carries the <add> * {@link org.springframework.stereotype.Repository} annotation, <add> * adding a corresponding {@link PersistenceExceptionTranslationAdvisor} <add> * to the exposed proxy (either an existing AOP proxy or a newly generated <add> * proxy that implements all of the target's interfaces). <ide> * <del> * <p>Translates native resource exceptions to Spring's DataAccessException hierarchy. <del> * Autodetects beans that implement the PersistenceExceptionTranslator interface, <del> * which are subsequently asked to translate candidate exceptions. <add> * <p>Translates native resource exceptions to Spring's <add> * {@link org.springframework.dao.DataAccessException} hierarchy. <add> * Autodetects beans that implement the <add> * {@link org.springframework.dao.support.PersistenceExceptionTranslator} <add> * interface, which are subsequently asked to translate candidate exceptions. <ide> * <ide> * <p>All of Spring's applicable resource factories implement the <del> * PersistenceExceptionTranslator interface out of the box. As a consequence, <del> * all that is usually needed to enable automatic exception translation is <del> * marking all affected beans (such as DAOs) with the Repository annotation, <del> * along with defining this post-processor as bean in the application context. <add> * <code>PersistenceExceptionTranslator</code> interface out of the box. <add> * As a consequence, all that is usually needed to enable automatic exception <add> * translation is marking all affected beans (such as DAOs) with the <add> * <code>Repository</code> annotation, along with defining this post-processor <add> * as bean in the application context. <ide> * <ide> * @author Rod Johnson <ide> * @author Juergen Hoeller <ide> * @since 2.0 <add> * @see PersistenceExceptionTranslationAdvisor <ide> * @see org.springframework.stereotype.Repository <ide> * @see org.springframework.dao.DataAccessException <del> * @see PersistenceExceptionTranslator <del> * @see PersistenceExceptionTranslationAdvisor <add> * @see org.springframework.dao.support.PersistenceExceptionTranslator <ide> */ <ide> public class PersistenceExceptionTranslationPostProcessor implements BeanPostProcessor, BeanFactoryAware, Ordered { <ide> <ide> * @param repositoryAnnotationType the desired annotation type <ide> */ <ide> public void setRepositoryAnnotationType(Class<? extends Annotation> repositoryAnnotationType) { <del> Assert.notNull(repositoryAnnotationType, "requiredAnnotationType must not be null"); <add> Assert.notNull(repositoryAnnotationType, "'requiredAnnotationType' must not be null"); <ide> this.repositoryAnnotationType = repositoryAnnotationType; <ide> } <ide> <ide> } <ide> <ide> public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException { <del> Class<?> targetClass; <del> if (bean instanceof Advised) { <del> Advised advised = (Advised) bean; <del> targetClass = advised.getTargetSource().getTargetClass(); <del> } <del> else { <del> targetClass = bean.getClass(); <del> } <del> <add> Class<?> targetClass = <add> (bean instanceof Advised ? ((Advised) bean).getTargetSource().getTargetClass() : bean.getClass()); <ide> if (targetClass == null) { <ide> // Can't do much here <ide> return bean; <ide> <ide> if (AopUtils.canApply(this.persistenceExceptionTranslationAdvisor, targetClass)) { <ide> if (bean instanceof Advised) { <del> Advised advised = (Advised) bean; <del> advised.addAdvisor(this.persistenceExceptionTranslationAdvisor); <add> ((Advised) bean).addAdvisor(this.persistenceExceptionTranslationAdvisor); <ide> return bean; <ide> } <ide> else { <ide> ProxyFactory pf = new ProxyFactory(bean); <ide> pf.addAdvisor(this.persistenceExceptionTranslationAdvisor); <del> return pf.getProxy(); <add> return pf.getProxy(targetClass.getClassLoader()); <ide> } <ide> } <ide> else {
Java
bsd-2-clause
5b47ff4ce0b91ce68d44b2218e2bce232d04fe11
0
makkus/getdown,makkus/getdown
// // $Id$ // // Getdown - application installer, patcher and launcher // Copyright (C) 2004-2010 Three Rings Design, Inc. // http://code.google.com/p/getdown/ // // Redistribution and use in source and binary forms, with or without modification, are permitted // provided that the following conditions are met: // // 1. Redistributions of source code must retain the above copyright notice, this list of // conditions and the following disclaimer. // 2. Redistributions in binary form must reproduce the above copyright notice, this list of // conditions and the following disclaimer in the documentation and/or other materials provided // with the distribution. // // THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, // INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A // PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, // INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT // LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. package com.threerings.getdown.launcher; import java.awt.Container; import java.awt.Image; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.PrintStream; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import javax.swing.JFrame; import javax.swing.WindowConstants; import com.samskivert.swing.util.SwingUtil; import com.samskivert.util.ArrayUtil; import com.samskivert.util.StringUtil; import static com.threerings.getdown.Log.log; /** * The main application entry point for Getdown. */ public class GetdownApp { public static void main (String[] argArray) { // maybe they specified the appdir in a system property int aidx = 0; List<String> args = Arrays.asList(argArray); String adarg = System.getProperty("appdir"); // if not, check for a command line argument if (StringUtil.isBlank(adarg)) { if (args.isEmpty()) { System.err.println("Usage: java -jar getdown.jar app_dir [app_id] [app args]"); System.exit(-1); } adarg = args.get(aidx++); } // look for a specific app identifier String appId = (aidx < args.size()) ? args.get(aidx++) : System.getProperty("appid"); // pass along anything after that as app args String[] appArgs = (aidx < args.size()) ? args.subList(aidx, args.size()).toArray(ArrayUtil.EMPTY_STRING) : null; // ensure a valid directory was supplied File appDir = new File(adarg); if (!appDir.exists() || !appDir.isDirectory()) { log.warning("Invalid app_dir '" + adarg + "'."); System.exit(-1); } // pipe our output into a file in the application directory if (System.getProperty("no_log_redir") == null) { File logFile = new File(appDir, "launcher.log"); try { PrintStream logOut = new PrintStream( new BufferedOutputStream(new FileOutputStream(logFile)), true); System.setOut(logOut); System.setErr(logOut); } catch (IOException ioe) { log.warning("Unable to redirect output to '" + logFile + "': " + ioe); } } // record a few things for posterity log.info("------------------ VM Info ------------------"); log.info("-- OS Name: " + System.getProperty("os.name")); log.info("-- OS Arch: " + System.getProperty("os.arch")); log.info("-- OS Vers: " + System.getProperty("os.version")); log.info("-- Java Vers: " + System.getProperty("java.version")); log.info("-- Java Home: " + System.getProperty("java.home")); log.info("-- User Name: " + System.getProperty("user.name")); log.info("-- User Home: " + System.getProperty("user.home")); log.info("-- Cur dir: " + System.getProperty("user.dir")); log.info("---------------------------------------------"); try { Getdown app = new Getdown(appDir, appId, null, null, appArgs) { @Override protected Container createContainer () { // create our user interface, and display it String title = StringUtil.isBlank(_ifc.name) ? "" : _ifc.name; if (_frame == null) { _frame = new JFrame(title); _frame.addWindowListener(new WindowAdapter() { @Override public void windowClosing (WindowEvent evt) { handleWindowClose(); } }); _frame.setResizable(false); } else { _frame.setTitle(title); _frame.getContentPane().removeAll(); } if (_ifc.iconImages != null) { ArrayList<Image> icons = new ArrayList<Image>(); for (String path : _ifc.iconImages) { Image img = loadImage(path); if (img == null) { log.warning("Error loading icon image", "path", path); } else { icons.add(img); } } if (icons.isEmpty()) { log.warning("Failed to load any icons", "iconImages", _ifc.iconImages); } else { SwingUtil.setFrameIcons(_frame, icons); } } _frame.setDefaultCloseOperation(WindowConstants.DO_NOTHING_ON_CLOSE); return _frame.getContentPane(); } @Override protected void showContainer () { if (_frame != null) { _frame.pack(); SwingUtil.centerWindow(_frame); _frame.setVisible(true); } } @Override protected void disposeContainer () { if (_frame != null) { _frame.dispose(); _frame = null; } } @Override protected void exit (int exitCode) { System.exit(exitCode); } protected JFrame _frame; }; app.start(); } catch (Exception e) { log.warning("main() failed.", e); } } }
src/main/java/com/threerings/getdown/launcher/GetdownApp.java
// // $Id$ // // Getdown - application installer, patcher and launcher // Copyright (C) 2004-2010 Three Rings Design, Inc. // http://code.google.com/p/getdown/ // // Redistribution and use in source and binary forms, with or without modification, are permitted // provided that the following conditions are met: // // 1. Redistributions of source code must retain the above copyright notice, this list of // conditions and the following disclaimer. // 2. Redistributions in binary form must reproduce the above copyright notice, this list of // conditions and the following disclaimer in the documentation and/or other materials provided // with the distribution. // // THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, // INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A // PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, // INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT // LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. package com.threerings.getdown.launcher; import java.awt.Container; import java.awt.Image; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.PrintStream; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import javax.swing.JFrame; import javax.swing.WindowConstants; import com.samskivert.swing.util.SwingUtil; import com.samskivert.util.ArrayUtil; import com.samskivert.util.StringUtil; import static com.threerings.getdown.Log.log; /** * The main application entry point for Getdown. */ public class GetdownApp { public static void main (String[] argArray) { // maybe they specified the appdir in a system property int aidx = 0; List<String> args = Arrays.asList(argArray); String adarg = System.getProperty("appdir"); // if not, check for a command line argument if (StringUtil.isBlank(adarg)) { if (args.isEmpty()) { System.err.println("Usage: java -jar getdown.jar app_dir [app_id] [app args]"); System.exit(-1); } adarg = args.get(aidx++); } // look for a specific app identifier String appId = (aidx < args.size()) ? args.get(aidx++) : null; // pass along anything after that as app args String[] appArgs = (aidx < args.size()) ? args.subList(aidx, args.size()).toArray(ArrayUtil.EMPTY_STRING) : null; // ensure a valid directory was supplied File appDir = new File(adarg); if (!appDir.exists() || !appDir.isDirectory()) { log.warning("Invalid app_dir '" + adarg + "'."); System.exit(-1); } // pipe our output into a file in the application directory if (System.getProperty("no_log_redir") == null) { File logFile = new File(appDir, "launcher.log"); try { PrintStream logOut = new PrintStream( new BufferedOutputStream(new FileOutputStream(logFile)), true); System.setOut(logOut); System.setErr(logOut); } catch (IOException ioe) { log.warning("Unable to redirect output to '" + logFile + "': " + ioe); } } // record a few things for posterity log.info("------------------ VM Info ------------------"); log.info("-- OS Name: " + System.getProperty("os.name")); log.info("-- OS Arch: " + System.getProperty("os.arch")); log.info("-- OS Vers: " + System.getProperty("os.version")); log.info("-- Java Vers: " + System.getProperty("java.version")); log.info("-- Java Home: " + System.getProperty("java.home")); log.info("-- User Name: " + System.getProperty("user.name")); log.info("-- User Home: " + System.getProperty("user.home")); log.info("-- Cur dir: " + System.getProperty("user.dir")); log.info("---------------------------------------------"); try { Getdown app = new Getdown(appDir, appId, null, null, appArgs) { @Override protected Container createContainer () { // create our user interface, and display it String title = StringUtil.isBlank(_ifc.name) ? "" : _ifc.name; if (_frame == null) { _frame = new JFrame(title); _frame.addWindowListener(new WindowAdapter() { @Override public void windowClosing (WindowEvent evt) { handleWindowClose(); } }); _frame.setResizable(false); } else { _frame.setTitle(title); _frame.getContentPane().removeAll(); } if (_ifc.iconImages != null) { ArrayList<Image> icons = new ArrayList<Image>(); for (String path : _ifc.iconImages) { Image img = loadImage(path); if (img == null) { log.warning("Error loading icon image", "path", path); } else { icons.add(img); } } if (icons.isEmpty()) { log.warning("Failed to load any icons", "iconImages", _ifc.iconImages); } else { SwingUtil.setFrameIcons(_frame, icons); } } _frame.setDefaultCloseOperation(WindowConstants.DO_NOTHING_ON_CLOSE); return _frame.getContentPane(); } @Override protected void showContainer () { if (_frame != null) { _frame.pack(); SwingUtil.centerWindow(_frame); _frame.setVisible(true); } } @Override protected void disposeContainer () { if (_frame != null) { _frame.dispose(); _frame = null; } } @Override protected void exit (int exitCode) { System.exit(exitCode); } protected JFrame _frame; }; app.start(); } catch (Exception e) { log.warning("main() failed.", e); } } }
Allow the 'appid' to be specified as a system property. git-svn-id: a68a83646e9f3f8749d8366215f39ba5d386d793@439 a0402a4a-d63b-ad04-3ffe-6d551d543dc1
src/main/java/com/threerings/getdown/launcher/GetdownApp.java
Allow the 'appid' to be specified as a system property.
<ide><path>rc/main/java/com/threerings/getdown/launcher/GetdownApp.java <ide> } <ide> <ide> // look for a specific app identifier <del> String appId = (aidx < args.size()) ? args.get(aidx++) : null; <add> String appId = (aidx < args.size()) ? args.get(aidx++) : System.getProperty("appid"); <ide> <ide> // pass along anything after that as app args <ide> String[] appArgs = (aidx < args.size()) ?
Java
agpl-3.0
cef73108113fafe394350f203eb552e4ff4ebf60
0
imCodePartnerAB/imcms,imCodePartnerAB/imcms,imCodePartnerAB/imcms
package com.imcode.imcms.domain.service.api; import com.imcode.imcms.components.datainitializer.*; import com.imcode.imcms.config.TestConfig; import com.imcode.imcms.config.WebTestConfig; import com.imcode.imcms.domain.dto.*; import com.imcode.imcms.domain.service.core.TextDocumentTemplateService; import com.imcode.imcms.mapping.jpa.User; import com.imcode.imcms.persistence.entity.Meta; import com.imcode.imcms.persistence.repository.MetaRepository; import com.imcode.imcms.util.Value; import imcode.server.Imcms; import imcode.server.user.UserDomainObject; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.test.context.web.WebAppConfiguration; import org.springframework.transaction.annotation.Transactional; import java.util.*; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; import static com.imcode.imcms.persistence.entity.Meta.DisabledLanguageShowMode.DO_NOT_SHOW; import static com.imcode.imcms.persistence.entity.Meta.DisabledLanguageShowMode.SHOW_IN_DEFAULT_LANGUAGE; import static org.junit.Assert.*; @Transactional @WebAppConfiguration @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(classes = {TestConfig.class, WebTestConfig.class}) public class DocumentServiceTest { private static final int TEST_VERSION_INDEX = 0; private DocumentDTO createdDoc; @Autowired private DocumentService documentService; @Autowired private MetaRepository metaRepository; @Autowired private Function<Meta, DocumentDTO> metaToDocumentDTO; @Autowired private VersionDataInitializer versionDataInitializer; @Autowired private CommonContentDataInitializer commonContentDataInitializer; @Autowired private UserDataInitializer userDataInitializer; @Autowired private CategoryService categoryService; @Autowired private CategoryDataInitializer categoryDataInitializer; @Autowired private RoleService roleService; @Autowired private TemplateDataInitializer templateDataInitializer; @Autowired private TextDocumentTemplateService templateService; @Before public void setUp() throws Exception { templateDataInitializer.cleanRepositories(); final Meta metaDoc = Value.with(new Meta(), meta -> { meta.setArchivedDatetime(new Date()); meta.setArchiverId(1); meta.setCategoryIds(new HashSet<>()); meta.setCreatedDatetime(new Date()); meta.setCreatorId(1); meta.setModifiedDatetime(new Date()); meta.setModifierId(1); meta.setDefaultVersionNo(0); meta.setDisabledLanguageShowMode(SHOW_IN_DEFAULT_LANGUAGE); meta.setDocumentType(Meta.DocumentType.TEXT); meta.setKeywords(new HashSet<>()); meta.setLinkableByOtherUsers(true); meta.setLinkedForUnauthorizedUsers(true); meta.setPublicationStartDatetime(new Date()); meta.setPublicationStatus(Meta.PublicationStatus.APPROVED); meta.setPublisherId(1); meta.setSearchDisabled(false); meta.setTarget("test"); }); final UserDomainObject user = new UserDomainObject(1); user.setLanguageIso639_2("eng"); Imcms.setUser(user); metaRepository.save(metaDoc); templateDataInitializer.createData(metaDoc.getId(), "demo", "demo"); versionDataInitializer.createData(TEST_VERSION_INDEX, metaDoc.getId()); commonContentDataInitializer.createData(metaDoc.getId(), TEST_VERSION_INDEX); createdDoc = metaToDocumentDTO.apply(metaDoc); } @Test public void get() throws Exception { final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); assertEquals(documentDTO, createdDoc); } @Test public void getDocumentTitle() throws Exception { final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); final String testHeadline = "test_headline"; for (CommonContentDTO commonContentDTO : documentDTO.getCommonContents()) { commonContentDTO.setHeadline(testHeadline); } documentService.save(documentDTO); assertEquals(documentService.getDocumentTitle(createdDoc.getId()), testHeadline); } @Test public void getDocumentTarget() throws Exception { final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); final String testTarget = "_target"; documentDTO.setTarget(testTarget); documentService.save(documentDTO); assertEquals(documentService.getDocumentTarget(createdDoc.getId()), testTarget); } @Test public void getDocumentLink_When_NoAlias_Expect_DocIdInLink() throws Exception { final int docId = createdDoc.getId(); assertEquals(documentService.getDocumentLink(docId), "/" + createdDoc.getId()); } @Test public void getDocumentLink_When_AliasIsSet_Expect_AliasInLink() throws Exception { final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); final String testAlias = "test_alias"; documentDTO.setAlias(testAlias); documentService.save(documentDTO); assertEquals(documentService.getDocumentLink(createdDoc.getId()), "/" + testAlias); } @Test public void save_With_Target_Expected_Saved() throws Exception { final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); documentDTO.setTarget("test_target"); documentService.save(documentDTO); final DocumentDTO documentDTO1 = documentService.get(documentDTO.getId()); assertEquals(documentDTO1, documentDTO); } @Test public void save_When_CustomCommonContentsSet_Expect_Saved() { final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); final List<CommonContentDTO> commonContents = documentDTO.getCommonContents(); for (int i = 0; i < commonContents.size(); i++) { CommonContentDTO commonContentDTO = commonContents.get(i); commonContentDTO.setHeadline("Test headline " + i); commonContentDTO.setMenuText("Test menu text " + i); commonContentDTO.setMenuImageURL("Test menu image url " + i); commonContentDTO.setEnabled((i % 2) == 0); } documentService.save(documentDTO); final DocumentDTO savedDocumentDTO = documentService.get(createdDoc.getId()); assertEquals(savedDocumentDTO.getCommonContents(), commonContents); } @Test public void save_When_TargetAndAliasChanged_Expect_Saved() { final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); final String newTarget = "_blank"; final String newAlias = "test-alias"; documentDTO.setTarget(newTarget); documentDTO.setAlias(newAlias); documentService.save(documentDTO); final DocumentDTO savedDocumentDTO = documentService.get(createdDoc.getId()); assertEquals(savedDocumentDTO.getTarget(), newTarget); assertEquals(savedDocumentDTO.getAlias(), newAlias); } @Test public void save_When_DifferentPublicationStatusSet_Expect_Saved() { final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); final Meta.PublicationStatus statusApproved = Meta.PublicationStatus.APPROVED; final Meta.PublicationStatus statusDisapproved = Meta.PublicationStatus.DISAPPROVED; final Meta.PublicationStatus statusNew = Meta.PublicationStatus.NEW; // approved documentDTO.setPublicationStatus(statusApproved); documentService.save(documentDTO); DocumentDTO savedDocumentDTO = documentService.get(createdDoc.getId()); assertEquals(savedDocumentDTO.getPublicationStatus(), statusApproved); // disapproved documentDTO.setPublicationStatus(statusDisapproved); documentService.save(documentDTO); savedDocumentDTO = documentService.get(createdDoc.getId()); assertEquals(savedDocumentDTO.getPublicationStatus(), statusDisapproved); // new documentDTO.setPublicationStatus(statusNew); documentService.save(documentDTO); savedDocumentDTO = documentService.get(createdDoc.getId()); assertEquals(savedDocumentDTO.getPublicationStatus(), statusNew); } @Test public void save_When_CreatedAndModifiedAndArchivedAndPublishedAndDepublishedAttributesSet_Expect_Saved() { final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); final User user = userDataInitializer.createData("testUser"); final Supplier<AuditDTO> auditCreator = () -> { final AuditDTO auditDTO = new AuditDTO(); auditDTO.setDateTime(new Date()); auditDTO.setId(user.getId()); auditDTO.setBy(user.getLogin()); return auditDTO; }; final AuditDTO createdAudit = auditCreator.get(); final AuditDTO modifiedAudit = auditCreator.get(); final AuditDTO archivedAudit = auditCreator.get(); final AuditDTO publishedAudit = auditCreator.get(); final AuditDTO depublishedAudit = auditCreator.get(); documentDTO.setCreated(createdAudit); documentDTO.setModified(modifiedAudit); documentDTO.setArchived(archivedAudit); documentDTO.setPublished(publishedAudit); documentDTO.setPublicationEnd(depublishedAudit); documentService.save(documentDTO); DocumentDTO savedDocumentDTO = documentService.get(createdDoc.getId()); assertEquals(createdAudit, savedDocumentDTO.getCreated()); assertEquals(modifiedAudit, savedDocumentDTO.getModified()); assertEquals(archivedAudit, savedDocumentDTO.getArchived()); assertEquals(publishedAudit, savedDocumentDTO.getPublished()); assertEquals(depublishedAudit, savedDocumentDTO.getPublicationEnd()); // only for nullable things final AuditDTO emptyArchivedAudit = new AuditDTO(); final AuditDTO emptyPublishedAudit = new AuditDTO(); final AuditDTO emptyDepublishedAudit = new AuditDTO(); documentDTO.setArchived(emptyArchivedAudit); documentDTO.setPublished(emptyPublishedAudit); documentDTO.setPublicationEnd(emptyDepublishedAudit); documentService.save(documentDTO); savedDocumentDTO = documentService.get(createdDoc.getId()); assertEquals(emptyArchivedAudit, savedDocumentDTO.getArchived()); assertEquals(emptyPublishedAudit, savedDocumentDTO.getPublished()); assertEquals(emptyDepublishedAudit, savedDocumentDTO.getPublicationEnd()); } @Test public void save_When_CustomMissingLanguagePropertySet_Expect_Saved() { final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); documentDTO.setDisabledLanguageShowMode(SHOW_IN_DEFAULT_LANGUAGE); documentService.save(documentDTO); DocumentDTO savedDocumentDTO = documentService.get(createdDoc.getId()); assertEquals(savedDocumentDTO.getDisabledLanguageShowMode(), SHOW_IN_DEFAULT_LANGUAGE); documentDTO.setDisabledLanguageShowMode(DO_NOT_SHOW); documentService.save(documentDTO); savedDocumentDTO = documentService.get(createdDoc.getId()); assertEquals(savedDocumentDTO.getDisabledLanguageShowMode(), DO_NOT_SHOW); } @Test public void save_When_CustomKeywordsSet_Expect_Saved() { final Set<String> keywords = new HashSet<>(); keywords.add("test keyword 1"); keywords.add("test keyword 2"); keywords.add("test keyword 3"); keywords.add("test keyword 4"); keywords.add("test keyword 5"); keywords.add("test keyword 6"); final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); documentDTO.setKeywords(keywords); documentService.save(documentDTO); final DocumentDTO savedDocumentDTO = documentService.get(createdDoc.getId()); assertEquals(keywords, savedDocumentDTO.getKeywords()); final int prevSize = keywords.size(); keywords.remove("test keyword 1"); assertEquals(keywords.size() + 1, prevSize); savedDocumentDTO.setKeywords(keywords); documentService.save(savedDocumentDTO); final DocumentDTO savedDocumentDTO1 = documentService.get(createdDoc.getId()); assertEquals(keywords, savedDocumentDTO1.getKeywords()); } @Test public void save_When_SearchEnabledAndDisabled_Expect_Saved() { final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); documentDTO.setSearchDisabled(true); documentService.save(documentDTO); final DocumentDTO savedDocumentDTO = documentService.get(createdDoc.getId()); assertTrue(savedDocumentDTO.isSearchDisabled()); savedDocumentDTO.setSearchDisabled(false); documentService.save(savedDocumentDTO); final DocumentDTO savedDocumentDTO1 = documentService.get(createdDoc.getId()); assertFalse(savedDocumentDTO1.isSearchDisabled()); } @Test public void save_When_CategoriesIsSet_Expect_Saved() { categoryDataInitializer.createData(50); final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); final Set<CategoryDTO> categories = categoryService.getAll().stream() .filter(categoryDTO -> categoryDTO.getId() % 2 == 0) .collect(Collectors.toSet()); documentDTO.setCategories(categories); documentService.save(documentDTO); final DocumentDTO savedDocumentDTO = documentService.get(createdDoc.getId()); assertEquals(categories, savedDocumentDTO.getCategories()); final Set<CategoryDTO> categories1 = categoryService.getAll().stream() .filter(categoryDTO -> categoryDTO.getId() % 2 == 1) .collect(Collectors.toSet()); documentDTO.setCategories(categories1); documentService.save(documentDTO); final DocumentDTO savedDocumentDTO1 = documentService.get(createdDoc.getId()); assertEquals(categories1, savedDocumentDTO1.getCategories()); } @Test public void save_When_CustomAccessRulesSet_Expect_Saved() { final Set<RoleDTO> roles = new HashSet<>(); for (PermissionDTO permissionDTO : PermissionDTO.values()) { final RoleDTO roleDTO = roleService.save(new RoleDTO(null, "test_role_" + permissionDTO)); roleDTO.setPermission(permissionDTO); roles.add(roleDTO); } final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); documentDTO.setRoles(roles); documentService.save(documentDTO); final DocumentDTO savedDocumentDTO = documentService.get(createdDoc.getId()); assertTrue(savedDocumentDTO.getRoles().containsAll(roles)); final Set<RoleDTO> roles1 = new HashSet<>(); savedDocumentDTO.setRoles(roles1); documentService.save(savedDocumentDTO); final DocumentDTO savedDocumentDTO1 = documentService.get(createdDoc.getId()); assertEquals(savedDocumentDTO1.getRoles(), roles1); } @Test public void save_When_RestrictedPermissionsSet_Expect_Saved() { final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); final HashMap<PermissionDTO, RestrictedPermissionDTO> restrictedPermissions = new HashMap<>(); final RestrictedPermissionDTO restricted1 = new RestrictedPermissionDTO(); restricted1.setEditDocumentInfo(true); restricted1.setEditImage(false); restricted1.setEditLoop(true); restricted1.setEditMenu(false); restricted1.setEditText(true); final RestrictedPermissionDTO restricted2 = new RestrictedPermissionDTO(); restricted2.setEditDocumentInfo(false); restricted2.setEditImage(true); restricted2.setEditLoop(false); restricted2.setEditMenu(true); restricted2.setEditText(false); restrictedPermissions.put(PermissionDTO.RESTRICTED_1, restricted1); restrictedPermissions.put(PermissionDTO.RESTRICTED_2, restricted2); documentDTO.setRestrictedPermissions(restrictedPermissions); documentService.save(documentDTO); final DocumentDTO documentDTO1 = documentService.get(documentDTO.getId()); assertEquals(restricted1, documentDTO1.getRestrictedPermissions().get(PermissionDTO.RESTRICTED_1)); assertEquals(restricted2, documentDTO1.getRestrictedPermissions().get(PermissionDTO.RESTRICTED_2)); assertEquals(documentDTO1, documentDTO); } @Test public void save_When_CustomTemplateSet_Expect_Saved() throws Exception { final String templateName = "test_" + System.currentTimeMillis(); final int docId = createdDoc.getId(); final TextDocumentTemplateDTO templateDTO = new TextDocumentTemplateDTO(docId, templateName, 0, templateName); final TextDocumentTemplateDTO savedTemplate = templateService.save(templateDTO); assertNotNull(savedTemplate); final DocumentDTO documentDTO = documentService.get(docId); documentDTO.setTemplate(templateDTO); documentService.save(documentDTO); final DocumentDTO savedDoc = documentService.get(documentDTO.getId()); final TextDocumentTemplateDTO savedDocTemplate = savedDoc.getTemplate(); assertEquals(savedDocTemplate, savedTemplate); } }
src/test/java/com/imcode/imcms/domain/service/api/DocumentServiceTest.java
package com.imcode.imcms.domain.service.api; import com.imcode.imcms.components.datainitializer.*; import com.imcode.imcms.config.TestConfig; import com.imcode.imcms.config.WebTestConfig; import com.imcode.imcms.domain.dto.*; import com.imcode.imcms.mapping.jpa.User; import com.imcode.imcms.persistence.entity.Meta; import com.imcode.imcms.persistence.repository.MetaRepository; import com.imcode.imcms.util.Value; import imcode.server.Imcms; import imcode.server.user.UserDomainObject; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.test.context.web.WebAppConfiguration; import org.springframework.transaction.annotation.Transactional; import java.util.*; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; import static com.imcode.imcms.persistence.entity.Meta.DisabledLanguageShowMode.DO_NOT_SHOW; import static com.imcode.imcms.persistence.entity.Meta.DisabledLanguageShowMode.SHOW_IN_DEFAULT_LANGUAGE; import static org.junit.Assert.*; @Transactional @WebAppConfiguration @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(classes = {TestConfig.class, WebTestConfig.class}) public class DocumentServiceTest { private static final int TEST_VERSION_INDEX = 0; private DocumentDTO createdDoc; @Autowired private DocumentService documentService; @Autowired private MetaRepository metaRepository; @Autowired private Function<Meta, DocumentDTO> metaToDocumentDTO; @Autowired private VersionDataInitializer versionDataInitializer; @Autowired private CommonContentDataInitializer commonContentDataInitializer; @Autowired private UserDataInitializer userDataInitializer; @Autowired private CategoryService categoryService; @Autowired private CategoryDataInitializer categoryDataInitializer; @Autowired private RoleService roleService; @Autowired private TemplateDataInitializer templateDataInitializer; @Before public void setUp() throws Exception { templateDataInitializer.cleanRepositories(); final Meta metaDoc = Value.with(new Meta(), meta -> { meta.setArchivedDatetime(new Date()); meta.setArchiverId(1); meta.setCategoryIds(new HashSet<>()); meta.setCreatedDatetime(new Date()); meta.setCreatorId(1); meta.setModifiedDatetime(new Date()); meta.setModifierId(1); meta.setDefaultVersionNo(0); meta.setDisabledLanguageShowMode(SHOW_IN_DEFAULT_LANGUAGE); meta.setDocumentType(Meta.DocumentType.TEXT); meta.setKeywords(new HashSet<>()); meta.setLinkableByOtherUsers(true); meta.setLinkedForUnauthorizedUsers(true); meta.setPublicationStartDatetime(new Date()); meta.setPublicationStatus(Meta.PublicationStatus.APPROVED); meta.setPublisherId(1); meta.setSearchDisabled(false); meta.setTarget("test"); }); final UserDomainObject user = new UserDomainObject(1); user.setLanguageIso639_2("eng"); Imcms.setUser(user); metaRepository.save(metaDoc); templateDataInitializer.createData(metaDoc.getId(), "demo", "demo"); versionDataInitializer.createData(TEST_VERSION_INDEX, metaDoc.getId()); commonContentDataInitializer.createData(metaDoc.getId(), TEST_VERSION_INDEX); createdDoc = metaToDocumentDTO.apply(metaDoc); } @Test public void get() throws Exception { final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); assertEquals(documentDTO, createdDoc); } @Test public void getDocumentTitle() throws Exception { final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); final String testHeadline = "test_headline"; for (CommonContentDTO commonContentDTO : documentDTO.getCommonContents()) { commonContentDTO.setHeadline(testHeadline); } documentService.save(documentDTO); assertEquals(documentService.getDocumentTitle(createdDoc.getId()), testHeadline); } @Test public void getDocumentTarget() throws Exception { final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); final String testTarget = "_target"; documentDTO.setTarget(testTarget); documentService.save(documentDTO); assertEquals(documentService.getDocumentTarget(createdDoc.getId()), testTarget); } @Test public void getDocumentLink_When_NoAlias_Expect_DocIdInLink() throws Exception { final int docId = createdDoc.getId(); assertEquals(documentService.getDocumentLink(docId), "/" + createdDoc.getId()); } @Test public void getDocumentLink_When_AliasIsSet_Expect_AliasInLink() throws Exception { final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); final String testAlias = "test_alias"; documentDTO.setAlias(testAlias); documentService.save(documentDTO); assertEquals(documentService.getDocumentLink(createdDoc.getId()), "/" + testAlias); } @Test public void save_With_Target_Expected_Saved() throws Exception { final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); documentDTO.setTarget("test_target"); documentService.save(documentDTO); final DocumentDTO documentDTO1 = documentService.get(documentDTO.getId()); assertEquals(documentDTO1, documentDTO); } @Test public void save_When_CustomCommonContentsSet_Expect_Saved() { final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); final List<CommonContentDTO> commonContents = documentDTO.getCommonContents(); for (int i = 0; i < commonContents.size(); i++) { CommonContentDTO commonContentDTO = commonContents.get(i); commonContentDTO.setHeadline("Test headline " + i); commonContentDTO.setMenuText("Test menu text " + i); commonContentDTO.setMenuImageURL("Test menu image url " + i); commonContentDTO.setEnabled((i % 2) == 0); } documentService.save(documentDTO); final DocumentDTO savedDocumentDTO = documentService.get(createdDoc.getId()); assertEquals(savedDocumentDTO.getCommonContents(), commonContents); } @Test public void save_When_TargetAndAliasChanged_Expect_Saved() { final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); final String newTarget = "_blank"; final String newAlias = "test-alias"; documentDTO.setTarget(newTarget); documentDTO.setAlias(newAlias); documentService.save(documentDTO); final DocumentDTO savedDocumentDTO = documentService.get(createdDoc.getId()); assertEquals(savedDocumentDTO.getTarget(), newTarget); assertEquals(savedDocumentDTO.getAlias(), newAlias); } @Test public void save_When_DifferentPublicationStatusSet_Expect_Saved() { final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); final Meta.PublicationStatus statusApproved = Meta.PublicationStatus.APPROVED; final Meta.PublicationStatus statusDisapproved = Meta.PublicationStatus.DISAPPROVED; final Meta.PublicationStatus statusNew = Meta.PublicationStatus.NEW; // approved documentDTO.setPublicationStatus(statusApproved); documentService.save(documentDTO); DocumentDTO savedDocumentDTO = documentService.get(createdDoc.getId()); assertEquals(savedDocumentDTO.getPublicationStatus(), statusApproved); // disapproved documentDTO.setPublicationStatus(statusDisapproved); documentService.save(documentDTO); savedDocumentDTO = documentService.get(createdDoc.getId()); assertEquals(savedDocumentDTO.getPublicationStatus(), statusDisapproved); // new documentDTO.setPublicationStatus(statusNew); documentService.save(documentDTO); savedDocumentDTO = documentService.get(createdDoc.getId()); assertEquals(savedDocumentDTO.getPublicationStatus(), statusNew); } @Test public void save_When_CreatedAndModifiedAndArchivedAndPublishedAndDepublishedAttributesSet_Expect_Saved() { final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); final User user = userDataInitializer.createData("testUser"); final Supplier<AuditDTO> auditCreator = () -> { final AuditDTO auditDTO = new AuditDTO(); auditDTO.setDateTime(new Date()); auditDTO.setId(user.getId()); auditDTO.setBy(user.getLogin()); return auditDTO; }; final AuditDTO createdAudit = auditCreator.get(); final AuditDTO modifiedAudit = auditCreator.get(); final AuditDTO archivedAudit = auditCreator.get(); final AuditDTO publishedAudit = auditCreator.get(); final AuditDTO depublishedAudit = auditCreator.get(); documentDTO.setCreated(createdAudit); documentDTO.setModified(modifiedAudit); documentDTO.setArchived(archivedAudit); documentDTO.setPublished(publishedAudit); documentDTO.setPublicationEnd(depublishedAudit); documentService.save(documentDTO); DocumentDTO savedDocumentDTO = documentService.get(createdDoc.getId()); assertEquals(createdAudit, savedDocumentDTO.getCreated()); assertEquals(modifiedAudit, savedDocumentDTO.getModified()); assertEquals(archivedAudit, savedDocumentDTO.getArchived()); assertEquals(publishedAudit, savedDocumentDTO.getPublished()); assertEquals(depublishedAudit, savedDocumentDTO.getPublicationEnd()); // only for nullable things final AuditDTO emptyArchivedAudit = new AuditDTO(); final AuditDTO emptyPublishedAudit = new AuditDTO(); final AuditDTO emptyDepublishedAudit = new AuditDTO(); documentDTO.setArchived(emptyArchivedAudit); documentDTO.setPublished(emptyPublishedAudit); documentDTO.setPublicationEnd(emptyDepublishedAudit); documentService.save(documentDTO); savedDocumentDTO = documentService.get(createdDoc.getId()); assertEquals(emptyArchivedAudit, savedDocumentDTO.getArchived()); assertEquals(emptyPublishedAudit, savedDocumentDTO.getPublished()); assertEquals(emptyDepublishedAudit, savedDocumentDTO.getPublicationEnd()); } @Test public void save_When_CustomMissingLanguagePropertySet_Expect_Saved() { final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); documentDTO.setDisabledLanguageShowMode(SHOW_IN_DEFAULT_LANGUAGE); documentService.save(documentDTO); DocumentDTO savedDocumentDTO = documentService.get(createdDoc.getId()); assertEquals(savedDocumentDTO.getDisabledLanguageShowMode(), SHOW_IN_DEFAULT_LANGUAGE); documentDTO.setDisabledLanguageShowMode(DO_NOT_SHOW); documentService.save(documentDTO); savedDocumentDTO = documentService.get(createdDoc.getId()); assertEquals(savedDocumentDTO.getDisabledLanguageShowMode(), DO_NOT_SHOW); } @Test public void save_When_CustomKeywordsSet_Expect_Saved() { final Set<String> keywords = new HashSet<>(); keywords.add("test keyword 1"); keywords.add("test keyword 2"); keywords.add("test keyword 3"); keywords.add("test keyword 4"); keywords.add("test keyword 5"); keywords.add("test keyword 6"); final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); documentDTO.setKeywords(keywords); documentService.save(documentDTO); final DocumentDTO savedDocumentDTO = documentService.get(createdDoc.getId()); assertEquals(keywords, savedDocumentDTO.getKeywords()); final int prevSize = keywords.size(); keywords.remove("test keyword 1"); assertEquals(keywords.size() + 1, prevSize); savedDocumentDTO.setKeywords(keywords); documentService.save(savedDocumentDTO); final DocumentDTO savedDocumentDTO1 = documentService.get(createdDoc.getId()); assertEquals(keywords, savedDocumentDTO1.getKeywords()); } @Test public void save_When_SearchEnabledAndDisabled_Expect_Saved() { final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); documentDTO.setSearchDisabled(true); documentService.save(documentDTO); final DocumentDTO savedDocumentDTO = documentService.get(createdDoc.getId()); assertTrue(savedDocumentDTO.isSearchDisabled()); savedDocumentDTO.setSearchDisabled(false); documentService.save(savedDocumentDTO); final DocumentDTO savedDocumentDTO1 = documentService.get(createdDoc.getId()); assertFalse(savedDocumentDTO1.isSearchDisabled()); } @Test public void save_When_CategoriesIsSet_Expect_Saved() { categoryDataInitializer.createData(50); final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); final Set<CategoryDTO> categories = categoryService.getAll().stream() .filter(categoryDTO -> categoryDTO.getId() % 2 == 0) .collect(Collectors.toSet()); documentDTO.setCategories(categories); documentService.save(documentDTO); final DocumentDTO savedDocumentDTO = documentService.get(createdDoc.getId()); assertEquals(categories, savedDocumentDTO.getCategories()); final Set<CategoryDTO> categories1 = categoryService.getAll().stream() .filter(categoryDTO -> categoryDTO.getId() % 2 == 1) .collect(Collectors.toSet()); documentDTO.setCategories(categories1); documentService.save(documentDTO); final DocumentDTO savedDocumentDTO1 = documentService.get(createdDoc.getId()); assertEquals(categories1, savedDocumentDTO1.getCategories()); } @Test public void save_When_CustomAccessRulesSet_Expect_Saved() { final Set<RoleDTO> roles = new HashSet<>(); for (PermissionDTO permissionDTO : PermissionDTO.values()) { final RoleDTO roleDTO = roleService.save(new RoleDTO(null, "test_role_" + permissionDTO)); roleDTO.setPermission(permissionDTO); roles.add(roleDTO); } final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); documentDTO.setRoles(roles); documentService.save(documentDTO); final DocumentDTO savedDocumentDTO = documentService.get(createdDoc.getId()); assertTrue(savedDocumentDTO.getRoles().containsAll(roles)); final Set<RoleDTO> roles1 = new HashSet<>(); savedDocumentDTO.setRoles(roles1); documentService.save(savedDocumentDTO); final DocumentDTO savedDocumentDTO1 = documentService.get(createdDoc.getId()); assertEquals(savedDocumentDTO1.getRoles(), roles1); } @Test public void save_When_RestrictedPermissionsSet_Expect_Saved() { final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); final HashMap<PermissionDTO, RestrictedPermissionDTO> restrictedPermissions = new HashMap<>(); final RestrictedPermissionDTO restricted1 = new RestrictedPermissionDTO(); restricted1.setEditDocumentInfo(true); restricted1.setEditImage(false); restricted1.setEditLoop(true); restricted1.setEditMenu(false); restricted1.setEditText(true); final RestrictedPermissionDTO restricted2 = new RestrictedPermissionDTO(); restricted2.setEditDocumentInfo(false); restricted2.setEditImage(true); restricted2.setEditLoop(false); restricted2.setEditMenu(true); restricted2.setEditText(false); restrictedPermissions.put(PermissionDTO.RESTRICTED_1, restricted1); restrictedPermissions.put(PermissionDTO.RESTRICTED_2, restricted2); documentDTO.setRestrictedPermissions(restrictedPermissions); documentService.save(documentDTO); final DocumentDTO documentDTO1 = documentService.get(documentDTO.getId()); assertEquals(restricted1, documentDTO1.getRestrictedPermissions().get(PermissionDTO.RESTRICTED_1)); assertEquals(restricted2, documentDTO1.getRestrictedPermissions().get(PermissionDTO.RESTRICTED_2)); assertEquals(documentDTO1, documentDTO); } @Test public void save_When_CustomTemplateSet_Expect_Saved() throws Exception { // final String templateName = "test_" + System.currentTimeMillis(); // final File templateFile = new File(TemplateMapper.getTemplateDirectory(), templateName + ".jsp"); // final TemplateDTO templateDTO = new TemplateDTO(null, templateName, false); // // try { // assertTrue(templateFile.createNewFile()); // assertNotNull(templateService.save(templateDTO)); // // final Optional<TemplateDTO> oTemplate = templateService.getTemplate(templateName); // assertTrue(oTemplate.isPresent()); // // final TemplateDTO templateDTO1 = oTemplate.get(); // final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); // documentDTO.setTemplate(templateDTO1); // // documentService.save(documentDTO); // // final DocumentDTO savedDoc = documentService.get(documentDTO.getId()); // final TemplateDTO savedTemplate = savedDoc.getTemplate(); // // assertEquals(savedTemplate, templateDTO1); // // } finally { // assertTrue(FileUtility.forceDelete(templateFile)); // } } }
IMCMS-255 - Upgrade server side to work with new client: - Saving document with custom template covered by test.
src/test/java/com/imcode/imcms/domain/service/api/DocumentServiceTest.java
IMCMS-255 - Upgrade server side to work with new client: - Saving document with custom template covered by test.
<ide><path>rc/test/java/com/imcode/imcms/domain/service/api/DocumentServiceTest.java <ide> import com.imcode.imcms.config.TestConfig; <ide> import com.imcode.imcms.config.WebTestConfig; <ide> import com.imcode.imcms.domain.dto.*; <add>import com.imcode.imcms.domain.service.core.TextDocumentTemplateService; <ide> import com.imcode.imcms.mapping.jpa.User; <ide> import com.imcode.imcms.persistence.entity.Meta; <ide> import com.imcode.imcms.persistence.repository.MetaRepository; <ide> <ide> @Autowired <ide> private TemplateDataInitializer templateDataInitializer; <add> <add> @Autowired <add> private TextDocumentTemplateService templateService; <ide> <ide> @Before <ide> public void setUp() throws Exception { <ide> <ide> @Test <ide> public void save_When_CustomTemplateSet_Expect_Saved() throws Exception { <del>// final String templateName = "test_" + System.currentTimeMillis(); <del>// final File templateFile = new File(TemplateMapper.getTemplateDirectory(), templateName + ".jsp"); <del>// final TemplateDTO templateDTO = new TemplateDTO(null, templateName, false); <del>// <del>// try { <del>// assertTrue(templateFile.createNewFile()); <del>// assertNotNull(templateService.save(templateDTO)); <del>// <del>// final Optional<TemplateDTO> oTemplate = templateService.getTemplate(templateName); <del>// assertTrue(oTemplate.isPresent()); <del>// <del>// final TemplateDTO templateDTO1 = oTemplate.get(); <del>// final DocumentDTO documentDTO = documentService.get(createdDoc.getId()); <del>// documentDTO.setTemplate(templateDTO1); <del>// <del>// documentService.save(documentDTO); <del>// <del>// final DocumentDTO savedDoc = documentService.get(documentDTO.getId()); <del>// final TemplateDTO savedTemplate = savedDoc.getTemplate(); <del>// <del>// assertEquals(savedTemplate, templateDTO1); <del>// <del>// } finally { <del>// assertTrue(FileUtility.forceDelete(templateFile)); <del>// } <add> final String templateName = "test_" + System.currentTimeMillis(); <add> final int docId = createdDoc.getId(); <add> final TextDocumentTemplateDTO templateDTO = new TextDocumentTemplateDTO(docId, templateName, 0, templateName); <add> <add> final TextDocumentTemplateDTO savedTemplate = templateService.save(templateDTO); <add> assertNotNull(savedTemplate); <add> <add> final DocumentDTO documentDTO = documentService.get(docId); <add> documentDTO.setTemplate(templateDTO); <add> <add> documentService.save(documentDTO); <add> <add> final DocumentDTO savedDoc = documentService.get(documentDTO.getId()); <add> final TextDocumentTemplateDTO savedDocTemplate = savedDoc.getTemplate(); <add> <add> assertEquals(savedDocTemplate, savedTemplate); <ide> } <ide> }
Java
apache-2.0
17de8173abcf57deb14e3e0d8c6cef7a0e2a51d7
0
gpolitis/jitsi,bebo/jitsi,martin7890/jitsi,459below/jitsi,level7systems/jitsi,procandi/jitsi,laborautonomo/jitsi,jibaro/jitsi,gpolitis/jitsi,cobratbq/jitsi,jitsi/jitsi,bhatvv/jitsi,damencho/jitsi,ibauersachs/jitsi,dkcreinoso/jitsi,level7systems/jitsi,tuijldert/jitsi,bhatvv/jitsi,jitsi/jitsi,marclaporte/jitsi,iant-gmbh/jitsi,dkcreinoso/jitsi,HelioGuilherme66/jitsi,Metaswitch/jitsi,ibauersachs/jitsi,procandi/jitsi,459below/jitsi,damencho/jitsi,mckayclarey/jitsi,damencho/jitsi,tuijldert/jitsi,jibaro/jitsi,mckayclarey/jitsi,martin7890/jitsi,dkcreinoso/jitsi,marclaporte/jitsi,ibauersachs/jitsi,HelioGuilherme66/jitsi,jitsi/jitsi,pplatek/jitsi,459below/jitsi,bebo/jitsi,ringdna/jitsi,ibauersachs/jitsi,level7systems/jitsi,jitsi/jitsi,procandi/jitsi,ringdna/jitsi,cobratbq/jitsi,Metaswitch/jitsi,laborautonomo/jitsi,pplatek/jitsi,martin7890/jitsi,gpolitis/jitsi,pplatek/jitsi,HelioGuilherme66/jitsi,cobratbq/jitsi,laborautonomo/jitsi,ibauersachs/jitsi,Metaswitch/jitsi,martin7890/jitsi,iant-gmbh/jitsi,pplatek/jitsi,ringdna/jitsi,dkcreinoso/jitsi,bhatvv/jitsi,ringdna/jitsi,procandi/jitsi,gpolitis/jitsi,laborautonomo/jitsi,jibaro/jitsi,procandi/jitsi,459below/jitsi,damencho/jitsi,martin7890/jitsi,jibaro/jitsi,bebo/jitsi,pplatek/jitsi,gpolitis/jitsi,mckayclarey/jitsi,cobratbq/jitsi,459below/jitsi,jibaro/jitsi,Metaswitch/jitsi,ringdna/jitsi,bebo/jitsi,HelioGuilherme66/jitsi,tuijldert/jitsi,bhatvv/jitsi,marclaporte/jitsi,mckayclarey/jitsi,damencho/jitsi,jitsi/jitsi,dkcreinoso/jitsi,bebo/jitsi,level7systems/jitsi,marclaporte/jitsi,mckayclarey/jitsi,tuijldert/jitsi,level7systems/jitsi,iant-gmbh/jitsi,marclaporte/jitsi,HelioGuilherme66/jitsi,tuijldert/jitsi,laborautonomo/jitsi,bhatvv/jitsi,cobratbq/jitsi,iant-gmbh/jitsi,iant-gmbh/jitsi
/* * SIP Communicator, the OpenSource Java VoIP and Instant Messaging client. * * Distributable under LGPL license. * See terms of license at gnu.org. */ package net.java.sip.communicator.impl.media.device; import java.util.*; import javax.media.*; import javax.media.format.*; import net.java.sip.communicator.impl.media.*; import net.java.sip.communicator.service.configuration.*; import net.java.sip.communicator.util.*; /** * This class aims to provide a simple configuration interface for JMF. It * retrieves stored configuration when started or listens to ConfigurationEvent * for property changes and configures the JMF accordingly. * * @author Martin Andre * @author Emil Ivov * @author Lubomir Marinov */ @SuppressWarnings("unchecked") public class DeviceConfiguration extends PropertyChangeNotifier { /** * The name of the <code>DeviceConfiguration</code> property which * represents the device used by <code>DeviceConfiguration</code> for audio * capture. */ public static final String AUDIO_CAPTURE_DEVICE = "AUDIO_CAPTURE_DEVICE"; /** * The name of the <code>DeviceConfiguration</code> property which * represents the device used by <code>DeviceConfiguration</code> for audio * playback. */ public static final String AUDIO_PLAYBACK_DEVICE = "AUDIO_PLAYBACK_DEVICE"; /** * The name of the <code>DeviceConfiguration</code> property which * represents the device used by <code>DeviceConfiguration</code> for audio * notify. */ public static final String AUDIO_NOTIFY_DEVICE = "AUDIO_NOTIFY_DEVICE"; /** * The name of the <code>DeviceConfiguration</code> property which * represents the device used by <code>DeviceConfiguration</code> for video * capture. */ public static final String VIDEO_CAPTURE_DEVICE = "VIDEO_CAPTURE_DEVICE"; /** * When audio is disabled the selected audio system is with name None. */ public static final String AUDIO_SYSTEM_NONE = "None"; /** * JavaSound sound system. */ public static final String AUDIO_SYSTEM_JAVASOUND = "JavaSound"; /** * PortAudio sound system. */ public static final String AUDIO_SYSTEM_PORTAUDIO = "PortAudio"; private static final String PROP_AUDIO_DEVICE = "net.java.sip.communicator.impl.media.audiodev"; private static final String PROP_AUDIO_PLAYBACK_DEVICE = "net.java.sip.communicator.impl.media.audio.playbackdev"; private static final String PROP_AUDIO_NOTIFY_DEVICE = "net.java.sip.communicator.impl.media.audio.notifydev"; private static final String PROP_AUDIO_DEVICE_IS_DISABLED = "net.java.sip.communicator.impl.media.audiodevIsDisabled"; private static final String PROP_VIDEO_DEVICE = "net.java.sip.communicator.impl.media.videodev"; private static final String PROP_VIDEO_DEVICE_IS_DISABLED = "net.java.sip.communicator.impl.media.videodevIsDisabled"; private static final CaptureDeviceInfo[] NO_CAPTURE_DEVICES = new CaptureDeviceInfo[0]; private Logger logger = Logger.getLogger(DeviceConfiguration.class); /** * The device that we'll be using for audio capture. */ private CaptureDeviceInfo audioCaptureDevice = null; private CaptureDeviceInfo audioPlaybackDevice = null; private CaptureDeviceInfo audioNotifyDevice = null; /** * The device that we'll be using for video capture. */ private CaptureDeviceInfo videoCaptureDevice; private static Vector<String> audioSystems = new Vector<String>(); private String audioSystem = null; /** * Default constructor. */ public DeviceConfiguration() { //dummy ... XXX do we really need it though? } /** * Initializes capture devices. */ public void initialize() { // these seem to be throwing exceptions every now and then so we'll // blindly catch them for now try { JmfDeviceDetector.detectAndConfigureCaptureDevices(); extractConfiguredCaptureDevices(); } catch (Exception ex) { logger.error("Failed to initialize media.", ex); } } /** * Detects capture devices configured through JMF and disable audio and/or * video transmission if none were found. Stores found devices in * audioCaptureDevice and videoCaptureDevice. */ private void extractConfiguredCaptureDevices() { ConfigurationService config = MediaActivator.getConfigurationService(); logger.info("Scanning for configured Audio Devices."); CaptureDeviceInfo[] audioCaptureDevices = getAvailableAudioCaptureDevices(); if (config.getBoolean(PROP_AUDIO_DEVICE_IS_DISABLED, false)) { audioCaptureDevice = null; audioSystem = AUDIO_SYSTEM_NONE; } else if (audioCaptureDevices.length < 1) { logger.warn("No Audio Device was found."); audioCaptureDevice = null; audioSystem = AUDIO_SYSTEM_NONE; } else { logger.debug("Found " + audioCaptureDevices.length + " capture devices: " + audioCaptureDevices); String audioDevName = config.getString(PROP_AUDIO_DEVICE); if(audioDevName == null) { // the default behaviour if nothing set is to use javasound // this will also choose the capture device setAudioSystem(AUDIO_SYSTEM_JAVASOUND, null); } else { for (CaptureDeviceInfo captureDeviceInfo : audioCaptureDevices) { if (audioDevName.equals(captureDeviceInfo.getName())) { setAudioSystem(getAudioSystem(captureDeviceInfo), captureDeviceInfo); break; } } if(getAudioSystem() == null) { logger.warn("Computer sound config changed or " + "there is a problem since last config was saved, " + "will back to default javasound"); setAudioPlaybackDevice(null); setAudioNotifyDevice(null); setAudioCaptureDevice(null); setAudioSystem(AUDIO_SYSTEM_JAVASOUND, null); } } if (audioCaptureDevice != null) logger.info("Found " + audioCaptureDevice.getName() + " as an audio capture device."); } if (config.getBoolean(PROP_VIDEO_DEVICE_IS_DISABLED, false)) videoCaptureDevice = null; else { logger.info("Scanning for configured Video Devices."); videoCaptureDevice = extractConfiguredVideoCaptureDevice(VideoFormat.RGB); // no RGB camera found. And what about YUV ? if (videoCaptureDevice == null) { videoCaptureDevice = extractConfiguredVideoCaptureDevice(VideoFormat.YUV); if (videoCaptureDevice == null) logger.info("No Video Device was found."); } } } /** * Returns the configured video capture device with the specified * output format. * @param format the output format of the video format. * @return CaptureDeviceInfo for the video device. */ private CaptureDeviceInfo extractConfiguredVideoCaptureDevice(String format) { List<CaptureDeviceInfo> videoCaptureDevices = CaptureDeviceManager.getDeviceList(new VideoFormat(format)); CaptureDeviceInfo videoCaptureDevice = null; if (videoCaptureDevices.size() > 0) { String videoDevName = MediaActivator.getConfigurationService().getString( PROP_VIDEO_DEVICE); if (videoDevName == null) videoCaptureDevice = videoCaptureDevices.get(0); else { for (CaptureDeviceInfo captureDeviceInfo : videoCaptureDevices) { if (videoDevName.equals(captureDeviceInfo.getName())) { videoCaptureDevice = captureDeviceInfo; break; } } } if (videoCaptureDevice != null) logger.info("Found " + videoCaptureDevice.getName() + " as an RGB Video Device."); } return videoCaptureDevice; } /** * Returns a device that we could use for audio capture. * * @return the CaptureDeviceInfo of a device that we could use for audio * capture. */ public CaptureDeviceInfo getAudioCaptureDevice() { return audioCaptureDevice; } /** * Gets the list of audio capture devices which are available through this * <code>DeviceConfiguration</code>, amongst which is * {@link #getAudioCaptureDevice()} and represent acceptable values * for {@link #setAudioCaptureDevice(CaptureDeviceInfo)} * * @return an array of <code>CaptureDeviceInfo</code> describing the audio * capture devices available through this * <code>DeviceConfiguration</code> */ public CaptureDeviceInfo[] getAvailableAudioCaptureDevices() { Vector<CaptureDeviceInfo> audioCaptureDevices = CaptureDeviceManager.getDeviceList(new AudioFormat( AudioFormat.LINEAR, 44100, 16, 1));// 1 means 1 channel for mono return audioCaptureDevices.toArray(NO_CAPTURE_DEVICES); } /** * Gets the list of audio capture devices which are available through this * <code>DeviceConfiguration</code>, amongst which is * {@link #getAudioCaptureDevice()} and represent acceptable values * for {@link #setAudioCaptureDevice(CaptureDeviceInfo)} * * @param soundSystem * filter capture devices only from the supplied audio system. * * @return an array of <code>CaptureDeviceInfo</code> describing the audio * capture devices available through this * <code>DeviceConfiguration</code> */ public CaptureDeviceInfo[] getAvailableAudioCaptureDevices(String soundSystem) { String protocol = null; if(soundSystem.equals(AUDIO_SYSTEM_JAVASOUND)) protocol = "javasound"; else if(soundSystem.equals(AUDIO_SYSTEM_PORTAUDIO)) protocol = "portaudio"; Vector<CaptureDeviceInfo> res = new Vector<CaptureDeviceInfo>(); if(protocol != null) { CaptureDeviceInfo[] all = getAvailableAudioCaptureDevices(); for(int i = 0; i < all.length; i++) { CaptureDeviceInfo cDeviceInfo = all[i]; if(cDeviceInfo.getLocator().getProtocol().equals(protocol)) { res.add(cDeviceInfo); } } } return res.toArray(NO_CAPTURE_DEVICES); } /** * Lists all the playback devices. These are only portaudio devices * as we can only set particular device for playback when using portaudio. * * @return the devices that can be used for playback. */ public CaptureDeviceInfo[] getAvailableAudioPlaybackDevices() { return PortAudioAuto.playbackDevices; } /** * Gets the list of video capture devices which are available through this * <code>DeviceConfiguration</code>, amongst which is * {@link #getVideoCaptureDevice()} and represent acceptable values * for {@link #setVideoCaptureDevice(CaptureDeviceInfo)} * * @return an array of <code>CaptureDeviceInfo</code> describing the video * capture devices available through this * <code>DeviceConfiguration</code> */ public CaptureDeviceInfo[] getAvailableVideoCaptureDevices() { Set<CaptureDeviceInfo> videoCaptureDevices = new HashSet<CaptureDeviceInfo>(); videoCaptureDevices.addAll(CaptureDeviceManager .getDeviceList(new VideoFormat(VideoFormat.RGB))); videoCaptureDevices.addAll(CaptureDeviceManager .getDeviceList(new VideoFormat(VideoFormat.YUV))); return videoCaptureDevices.toArray(NO_CAPTURE_DEVICES); } /** * Returns a device that we could use for video capture. * * @return the CaptureDeviceInfo of a device that we could use for video * capture. */ public CaptureDeviceInfo getVideoCaptureDevice() { return videoCaptureDevice; } /** * Sets the device which is to be used by this * <code>DeviceConfiguration</code> for video capture. * * @param device a <code>CaptureDeviceInfo</code> describing device to be * used by this <code>DeviceConfiguration</code> for video * capture */ public void setVideoCaptureDevice(CaptureDeviceInfo device) { if (videoCaptureDevice != device) { CaptureDeviceInfo oldDevice = videoCaptureDevice; videoCaptureDevice = device; ConfigurationService config = MediaActivator.getConfigurationService(); config.setProperty(PROP_VIDEO_DEVICE_IS_DISABLED, videoCaptureDevice == null); if (videoCaptureDevice != null) config.setProperty(PROP_VIDEO_DEVICE, videoCaptureDevice .getName()); firePropertyChange(VIDEO_CAPTURE_DEVICE, oldDevice, device); } } /** * Sets the device which is to be used by this * <code>DeviceConfiguration</code> for audio capture. * * @param device a <code>CaptureDeviceInfo</code> describing the device to * be used by this <code>DeviceConfiguration</code> for audio * capture */ public void setAudioCaptureDevice(CaptureDeviceInfo device) { if (audioCaptureDevice != device) { CaptureDeviceInfo oldDevice = audioCaptureDevice; audioCaptureDevice = device; ConfigurationService config = MediaActivator.getConfigurationService(); if (audioCaptureDevice != null) { config.setProperty(PROP_AUDIO_DEVICE, audioCaptureDevice .getName()); } else config.setProperty(PROP_AUDIO_DEVICE, null); firePropertyChange(AUDIO_CAPTURE_DEVICE, oldDevice, device); } } /** * Enable or disable Audio stream transmission. * * @return true if audio capture is supported and false otherwise. */ public boolean isAudioCaptureSupported() { return this.audioCaptureDevice != null; } /** * Enable or disable Video stream transmission. * * @return true if audio capture is supported and false otherwise. */ public boolean isVideoCaptureSupported() { return this.videoCaptureDevice != null; } /** * Return the installed Audio Systems. * @return the audio systems names. */ public String[] getAvailableAudioSystems() { return audioSystems.toArray(new String[0]); } /** * Adds audio system. * @param audioSystemName the name of the audio system. */ public static void addAudioSystem(String audioSystemName) { audioSystems.add(audioSystemName); } /** * The current selected audio system. * @return the name of the current audio system. */ public String getAudioSystem() { return audioSystem; } /** * Extracts the audio system for the given device info. * @param cdi the device * @return the audio system used by the device. */ private String getAudioSystem(CaptureDeviceInfo cdi) { String res = null; // Here we iterate over the available audio systems // to be sure that the audio system // is available and enabled on the system we are running on if(cdi.getLocator().getProtocol().equals("javasound")) { Iterator<String> iter = audioSystems.iterator(); while (iter.hasNext()) { String asName = iter.next(); if(asName.equals(AUDIO_SYSTEM_JAVASOUND)) res = asName; } } else if(cdi.getLocator().getProtocol().equals("portaudio")) { Iterator<String> iter = audioSystems.iterator(); while (iter.hasNext()) { String asName = iter.next(); if(asName.equals(AUDIO_SYSTEM_PORTAUDIO)) res = asName; } } if(res == null) res = AUDIO_SYSTEM_NONE; return res; } /** * Changes the current audio system. * When javasound is selected we also change the capture device. * * @param name the name of the audio system. * @param captureDevice the selected capture device, if is null we will * choose a default one. Param used when first time initing and * extracting config. */ public void setAudioSystem(String name, CaptureDeviceInfo captureDevice) { ConfigurationService config = MediaActivator.getConfigurationService(); audioSystem = name; if(name.equals(AUDIO_SYSTEM_NONE)) { setAudioCaptureDevice(null); setAudioNotifyDevice(null); setAudioPlaybackDevice(null); } else if(name.equals(AUDIO_SYSTEM_JAVASOUND)) { setAudioNotifyDevice(null); setAudioPlaybackDevice(null); // as there is only one device for javasound // lets search for it if(captureDevice != null) setAudioCaptureDevice(captureDevice); else { CaptureDeviceInfo[] audioCaptureDevices = getAvailableAudioCaptureDevices(); for (CaptureDeviceInfo captureDeviceInfo : audioCaptureDevices) { if(captureDeviceInfo.getLocator().getProtocol(). equals("javasound")) { setAudioCaptureDevice(captureDeviceInfo); break; } } } // if we have inited the audiocaptureDevice, it means javasound is // available and everything is ok if (audioCaptureDevice != null) { removePortAudioRenderer(); initJavaSoundRenderer(); } } else if(name.equals(AUDIO_SYSTEM_PORTAUDIO)) { // changed to portaudio, so lets set the default devices setAudioCaptureDevice(PortAudioAuto.defaultCaptureDevice); setAudioNotifyDevice(PortAudioAuto.defaultPlaybackDevice); setAudioPlaybackDevice(PortAudioAuto.defaultPlaybackDevice); // we don't save anything cause it will be saved // when the devices are stored // if nothing is set we consider it as not configured // so when we restart we will end up with default config // till restart will use latest config // capture device is not null when we are called for the // first time, we will also extract playback devices here if(captureDevice != null) { setAudioCaptureDevice(captureDevice); String audioDevName = config.getString(PROP_AUDIO_NOTIFY_DEVICE); if(audioDevName != null) { for (CaptureDeviceInfo captureDeviceInfo : PortAudioAuto.playbackDevices) { if (audioDevName.equals(captureDeviceInfo.getName())) { this.audioNotifyDevice = captureDeviceInfo; break; } } } audioDevName = config.getString(PROP_AUDIO_PLAYBACK_DEVICE); if(audioDevName != null) { for (CaptureDeviceInfo captureDeviceInfo : PortAudioAuto.playbackDevices) { if (audioDevName.equals(captureDeviceInfo.getName())) { this.audioPlaybackDevice = captureDeviceInfo; setDeviceToRenderer(audioPlaybackDevice); removeJavaSoundRenderer(); initPortAudioRenderer(); break; } } } } // return here to prevent clearing the last config that was saved return; } else { // not expected behaviour logger.error("Unknown audio system! Name:" + name); audioSystem = null; } config.setProperty(PROP_AUDIO_DEVICE_IS_DISABLED, audioCaptureDevice == null); } /** * Installs the PortAudio Renderer */ protected static void initPortAudioRenderer() { PlugInManager.addPlugIn( "net.java.sip.communicator.impl.media.renderer.audio.PortAudioRenderer", net.java.sip.communicator.impl.media.renderer.audio. PortAudioRenderer.supportedInputFormats, null, PlugInManager.RENDERER); } /** * Removes javasound renderer. */ private void removeJavaSoundRenderer() { PlugInManager.removePlugIn( "com.sun.media.renderer.audio.JavaSoundRenderer", PlugInManager.RENDERER); } /** * Removed portaudio renderer. */ private void removePortAudioRenderer() { PlugInManager.removePlugIn( "net.java.sip.communicator.impl.media.renderer.audio.PortAudioRenderer", PlugInManager.RENDERER); } /** * Registers javasound renderer. */ private void initJavaSoundRenderer() { try { PlugInManager.addPlugIn( "com.sun.media.renderer.audio.JavaSoundRenderer", new com.sun.media.renderer.audio.JavaSoundRenderer() .getSupportedInputFormats(), null, PlugInManager.RENDERER); } catch (Exception e) { // if class is missing logger.error("Problem init javasound renderer", e); } } /** * Sets the device to be used by portaudio renderer. * @param devInfo */ private void setDeviceToRenderer(CaptureDeviceInfo devInfo) { // no need to change device to renderer it will not be used anyway if(devInfo == null) return; try { net.java.sip.communicator.impl.media.renderer.audio. PortAudioRenderer.setDevice(devInfo.getLocator()); } catch (Exception e) { logger.error("error setting device to renderer", e); } } /** * @return the audioPlaybackDevice */ public CaptureDeviceInfo getAudioPlaybackDevice() { return audioPlaybackDevice; } /** * @return the audioNotifyDevice */ public CaptureDeviceInfo getAudioNotifyDevice() { return audioNotifyDevice; } /** * @param audioPlaybackDevice the audioPlaybackDevice to set */ public void setAudioPlaybackDevice(CaptureDeviceInfo audioPlaybackDevice) { if(this.audioPlaybackDevice != audioPlaybackDevice) { CaptureDeviceInfo oldDev = this.audioPlaybackDevice; this.audioPlaybackDevice = audioPlaybackDevice; setDeviceToRenderer(audioPlaybackDevice); // we changed playback device, so we are using portaudio // lets use it, remove javasound renderer to be sure // its not used anymore and install the portaudio one removeJavaSoundRenderer(); initPortAudioRenderer(); ConfigurationService config = MediaActivator.getConfigurationService(); if (audioPlaybackDevice != null) { config.setProperty(PROP_AUDIO_PLAYBACK_DEVICE, audioPlaybackDevice.getName()); config.setProperty(PROP_AUDIO_DEVICE_IS_DISABLED, false); } else config.setProperty(PROP_AUDIO_PLAYBACK_DEVICE, null); firePropertyChange(AUDIO_PLAYBACK_DEVICE, oldDev, audioPlaybackDevice); } } /** * @param audioNotifyDevice the audioNotifyDevice to set */ public void setAudioNotifyDevice(CaptureDeviceInfo audioNotifyDevice) { if(this.audioNotifyDevice != audioNotifyDevice) { CaptureDeviceInfo oldDev = this.audioNotifyDevice; this.audioNotifyDevice = audioNotifyDevice; ConfigurationService config = MediaActivator.getConfigurationService(); if (audioNotifyDevice != null) { config.setProperty(PROP_AUDIO_NOTIFY_DEVICE, audioNotifyDevice.getName()); // atleast notify or playback must be set to consider // portaudio for enabled config.setProperty(PROP_AUDIO_DEVICE_IS_DISABLED, false); } else config.setProperty(PROP_AUDIO_NOTIFY_DEVICE, null); firePropertyChange(AUDIO_NOTIFY_DEVICE, oldDev, audioNotifyDevice); } } }
src/net/java/sip/communicator/impl/media/device/DeviceConfiguration.java
/* * SIP Communicator, the OpenSource Java VoIP and Instant Messaging client. * * Distributable under LGPL license. * See terms of license at gnu.org. */ package net.java.sip.communicator.impl.media.device; import java.util.*; import javax.media.*; import javax.media.format.*; import net.java.sip.communicator.impl.media.*; import net.java.sip.communicator.service.configuration.*; import net.java.sip.communicator.util.*; /** * This class aims to provide a simple configuration interface for JMF. It * retrieves stored configuration when started or listens to ConfigurationEvent * for property changes and configures the JMF accordingly. * * @author Martin Andre * @author Emil Ivov * @author Lubomir Marinov */ @SuppressWarnings("unchecked") public class DeviceConfiguration extends PropertyChangeNotifier { /** * The name of the <code>DeviceConfiguration</code> property which * represents the device used by <code>DeviceConfiguration</code> for audio * capture. */ public static final String AUDIO_CAPTURE_DEVICE = "AUDIO_CAPTURE_DEVICE"; /** * The name of the <code>DeviceConfiguration</code> property which * represents the device used by <code>DeviceConfiguration</code> for audio * playback. */ public static final String AUDIO_PLAYBACK_DEVICE = "AUDIO_PLAYBACK_DEVICE"; /** * The name of the <code>DeviceConfiguration</code> property which * represents the device used by <code>DeviceConfiguration</code> for audio * notify. */ public static final String AUDIO_NOTIFY_DEVICE = "AUDIO_NOTIFY_DEVICE"; /** * The name of the <code>DeviceConfiguration</code> property which * represents the device used by <code>DeviceConfiguration</code> for video * capture. */ public static final String VIDEO_CAPTURE_DEVICE = "VIDEO_CAPTURE_DEVICE"; /** * When audio is disabled the selected audio system is with name None. */ public static final String AUDIO_SYSTEM_NONE = "None"; /** * JavaSound sound system. */ public static final String AUDIO_SYSTEM_JAVASOUND = "JavaSound"; /** * PortAudio sound system. */ public static final String AUDIO_SYSTEM_PORTAUDIO = "PortAudio"; private static final String PROP_AUDIO_DEVICE = "net.java.sip.communicator.impl.media.audiodev"; private static final String PROP_AUDIO_PLAYBACK_DEVICE = "net.java.sip.communicator.impl.media.audio.playbackdev"; private static final String PROP_AUDIO_NOTIFY_DEVICE = "net.java.sip.communicator.impl.media.audio.notifydev"; private static final String PROP_AUDIO_DEVICE_IS_DISABLED = "net.java.sip.communicator.impl.media.audiodevIsDisabled"; private static final String PROP_VIDEO_DEVICE = "net.java.sip.communicator.impl.media.videodev"; private static final String PROP_VIDEO_DEVICE_IS_DISABLED = "net.java.sip.communicator.impl.media.videodevIsDisabled"; private static final CaptureDeviceInfo[] NO_CAPTURE_DEVICES = new CaptureDeviceInfo[0]; private Logger logger = Logger.getLogger(DeviceConfiguration.class); /** * The device that we'll be using for audio capture. */ private CaptureDeviceInfo audioCaptureDevice = null; private CaptureDeviceInfo audioPlaybackDevice = null; private CaptureDeviceInfo audioNotifyDevice = null; /** * The device that we'll be using for video capture. */ private CaptureDeviceInfo videoCaptureDevice; private static Vector<String> audioSystems = new Vector<String>(); private String audioSystem = null; /** * Default constructor. */ public DeviceConfiguration() { //dummy ... XXX do we really need it though? } /** * Initializes capture devices. */ public void initialize() { // these seem to be throwing exceptions every now and then so we'll // blindly catch them for now try { JmfDeviceDetector.detectAndConfigureCaptureDevices(); extractConfiguredCaptureDevices(); } catch (Exception ex) { logger.error("Failed to initialize media.", ex); } } /** * Detects capture devices configured through JMF and disable audio and/or * video transmission if none were found. Stores found devices in * audioCaptureDevice and videoCaptureDevice. */ private void extractConfiguredCaptureDevices() { ConfigurationService config = MediaActivator.getConfigurationService(); logger.info("Scanning for configured Audio Devices."); CaptureDeviceInfo[] audioCaptureDevices = getAvailableAudioCaptureDevices(); if (config.getBoolean(PROP_AUDIO_DEVICE_IS_DISABLED, false)) { audioCaptureDevice = null; audioSystem = AUDIO_SYSTEM_NONE; } else if (audioCaptureDevices.length < 1) { logger.warn("No Audio Device was found."); audioCaptureDevice = null; audioSystem = AUDIO_SYSTEM_NONE; } else { logger.debug("Found " + audioCaptureDevices.length + " capture devices: " + audioCaptureDevices); String audioDevName = config.getString(PROP_AUDIO_DEVICE); if(audioDevName == null) { // the default behaviour if nothing set is to use javasound // this will also choose the capture device setAudioSystem(AUDIO_SYSTEM_JAVASOUND, null); } else { for (CaptureDeviceInfo captureDeviceInfo : audioCaptureDevices) { if (audioDevName.equals(captureDeviceInfo.getName())) { setAudioSystem(getAudioSystem(captureDeviceInfo), captureDeviceInfo); break; } } if(getAudioSystem() == null) { logger.warn("Computer sound config changed or " + "there is a problem since last config was saved, " + "will back to default javasound"); setAudioPlaybackDevice(null); setAudioNotifyDevice(null); setAudioCaptureDevice(null); setAudioSystem(AUDIO_SYSTEM_JAVASOUND, null); } } if (audioCaptureDevice != null) logger.info("Found " + audioCaptureDevice.getName() + " as an audio capture device."); } if (config.getBoolean(PROP_VIDEO_DEVICE_IS_DISABLED, false)) videoCaptureDevice = null; else { logger.info("Scanning for configured Video Devices."); videoCaptureDevice = extractConfiguredVideoCaptureDevice(VideoFormat.RGB); // no RGB camera found. And what about YUV ? if (videoCaptureDevice == null) { videoCaptureDevice = extractConfiguredVideoCaptureDevice(VideoFormat.YUV); if (videoCaptureDevice == null) logger.info("No Video Device was found."); } } } /** * Returns the configured video capture device with the specified * output format. * @param format the output format of the video format. * @return CaptureDeviceInfo for the video device. */ private CaptureDeviceInfo extractConfiguredVideoCaptureDevice(String format) { List<CaptureDeviceInfo> videoCaptureDevices = CaptureDeviceManager.getDeviceList(new VideoFormat(format)); CaptureDeviceInfo videoCaptureDevice = null; if (videoCaptureDevices.size() > 0) { String videoDevName = MediaActivator.getConfigurationService().getString( PROP_VIDEO_DEVICE); if (videoDevName == null) videoCaptureDevice = videoCaptureDevices.get(0); else { for (CaptureDeviceInfo captureDeviceInfo : videoCaptureDevices) { if (videoDevName.equals(captureDeviceInfo.getName())) { videoCaptureDevice = captureDeviceInfo; break; } } } if (videoCaptureDevice != null) logger.info("Found " + videoCaptureDevice.getName() + " as an RGB Video Device."); } return videoCaptureDevice; } /** * Returns a device that we could use for audio capture. * * @return the CaptureDeviceInfo of a device that we could use for audio * capture. */ public CaptureDeviceInfo getAudioCaptureDevice() { return audioCaptureDevice; } // /** // * Returns the default capture device used by the portaudio system. // * @return the default capture device. // */ // public CaptureDeviceInfo getDefaultAudioCaptureDevice() // { // return PortAudioAuto.defaultCaptureDevice; // } /** * Gets the list of audio capture devices which are available through this * <code>DeviceConfiguration</code>, amongst which is * {@link #getAudioCaptureDevice()} and represent acceptable values * for {@link #setAudioCaptureDevice(CaptureDeviceInfo)} * * @return an array of <code>CaptureDeviceInfo</code> describing the audio * capture devices available through this * <code>DeviceConfiguration</code> */ public CaptureDeviceInfo[] getAvailableAudioCaptureDevices() { Vector<CaptureDeviceInfo> audioCaptureDevices = CaptureDeviceManager.getDeviceList(new AudioFormat( AudioFormat.LINEAR, 44100, 16, 1));// 1 means 1 channel for mono return audioCaptureDevices.toArray(NO_CAPTURE_DEVICES); } /** * Gets the list of audio capture devices which are available through this * <code>DeviceConfiguration</code>, amongst which is * {@link #getAudioCaptureDevice()} and represent acceptable values * for {@link #setAudioCaptureDevice(CaptureDeviceInfo)} * * @param soundSystem * filter capture devices only from the supplied audio system. * * @return an array of <code>CaptureDeviceInfo</code> describing the audio * capture devices available through this * <code>DeviceConfiguration</code> */ public CaptureDeviceInfo[] getAvailableAudioCaptureDevices(String soundSystem) { String protocol = null; if(soundSystem.equals(AUDIO_SYSTEM_JAVASOUND)) protocol = "javasound"; else if(soundSystem.equals(AUDIO_SYSTEM_PORTAUDIO)) protocol = "portaudio"; Vector<CaptureDeviceInfo> res = new Vector<CaptureDeviceInfo>(); if(protocol != null) { CaptureDeviceInfo[] all = getAvailableAudioCaptureDevices(); for(int i = 0; i < all.length; i++) { CaptureDeviceInfo cDeviceInfo = all[i]; if(cDeviceInfo.getLocator().getProtocol().equals(protocol)) { res.add(cDeviceInfo); } } } return res.toArray(NO_CAPTURE_DEVICES); } /** * Lists all the playback devices. These are only portaudio devices * as we can only set particular device for playback when using portaudio. * * @return the devices that can be used for playback. */ public CaptureDeviceInfo[] getAvailableAudioPlaybackDevices() { return PortAudioAuto.playbackDevices; } /** * Gets the list of video capture devices which are available through this * <code>DeviceConfiguration</code>, amongst which is * {@link #getVideoCaptureDevice()} and represent acceptable values * for {@link #setVideoCaptureDevice(CaptureDeviceInfo)} * * @return an array of <code>CaptureDeviceInfo</code> describing the video * capture devices available through this * <code>DeviceConfiguration</code> */ public CaptureDeviceInfo[] getAvailableVideoCaptureDevices() { Set<CaptureDeviceInfo> videoCaptureDevices = new HashSet<CaptureDeviceInfo>(); videoCaptureDevices.addAll(CaptureDeviceManager .getDeviceList(new VideoFormat(VideoFormat.RGB))); videoCaptureDevices.addAll(CaptureDeviceManager .getDeviceList(new VideoFormat(VideoFormat.YUV))); return videoCaptureDevices.toArray(NO_CAPTURE_DEVICES); } /** * Returns a device that we could use for video capture. * * @return the CaptureDeviceInfo of a device that we could use for video * capture. */ public CaptureDeviceInfo getVideoCaptureDevice() { return videoCaptureDevice; } /** * Sets the device which is to be used by this * <code>DeviceConfiguration</code> for video capture. * * @param device a <code>CaptureDeviceInfo</code> describing device to be * used by this <code>DeviceConfiguration</code> for video * capture */ public void setVideoCaptureDevice(CaptureDeviceInfo device) { if (videoCaptureDevice != device) { CaptureDeviceInfo oldDevice = videoCaptureDevice; videoCaptureDevice = device; ConfigurationService config = MediaActivator.getConfigurationService(); config.setProperty(PROP_VIDEO_DEVICE_IS_DISABLED, videoCaptureDevice == null); if (videoCaptureDevice != null) config.setProperty(PROP_VIDEO_DEVICE, videoCaptureDevice .getName()); firePropertyChange(VIDEO_CAPTURE_DEVICE, oldDevice, device); } } /** * Sets the device which is to be used by this * <code>DeviceConfiguration</code> for audio capture. * * @param device a <code>CaptureDeviceInfo</code> describing the device to * be used by this <code>DeviceConfiguration</code> for audio * capture */ public void setAudioCaptureDevice(CaptureDeviceInfo device) { if (audioCaptureDevice != device) { CaptureDeviceInfo oldDevice = audioCaptureDevice; audioCaptureDevice = device; ConfigurationService config = MediaActivator.getConfigurationService(); if (audioCaptureDevice != null) { config.setProperty(PROP_AUDIO_DEVICE, audioCaptureDevice .getName()); } else config.setProperty(PROP_AUDIO_DEVICE, null); firePropertyChange(AUDIO_CAPTURE_DEVICE, oldDevice, device); } } /** * Enable or disable Audio stream transmission. * * @return true if audio capture is supported and false otherwise. */ public boolean isAudioCaptureSupported() { return this.audioCaptureDevice != null; } /** * Enable or disable Video stream transmission. * * @return true if audio capture is supported and false otherwise. */ public boolean isVideoCaptureSupported() { return this.videoCaptureDevice != null; } /** * Return the installed Audio Systems. * @return the audio systems names. */ public String[] getAvailableAudioSystems() { return audioSystems.toArray(new String[0]); } /** * Adds audio system. * @param audioSystemName the name of the audio system. */ public static void addAudioSystem(String audioSystemName) { audioSystems.add(audioSystemName); } /** * The current selected audio system. * @return the name of the current audio system. */ public String getAudioSystem() { return audioSystem; } /** * Extracts the audio system for the given device info. * @param cdi the device * @return the audio system used by the device. */ private String getAudioSystem(CaptureDeviceInfo cdi) { String res = null; // Here we iterate over the available audio systems // to be sure that the audio system // is available and enabled on the system we are running on if(cdi.getLocator().getProtocol().equals("javasound")) { Iterator<String> iter = audioSystems.iterator(); while (iter.hasNext()) { String asName = iter.next(); if(asName.equals(AUDIO_SYSTEM_JAVASOUND)) res = asName; } } else if(cdi.getLocator().getProtocol().equals("portaudio")) { Iterator<String> iter = audioSystems.iterator(); while (iter.hasNext()) { String asName = iter.next(); if(asName.equals(AUDIO_SYSTEM_PORTAUDIO)) res = asName; } } if(res == null) res = AUDIO_SYSTEM_NONE; return res; } /** * Changes the current audio system. * When javasound is selected we also change the capture device. * * @param name the name of the audio system. * @param captureDevice the selected capture device, if is null we will * choose a default one. Param used when first time initing and * extracting config. */ public void setAudioSystem(String name, CaptureDeviceInfo captureDevice) { ConfigurationService config = MediaActivator.getConfigurationService(); audioSystem = name; if(name.equals(AUDIO_SYSTEM_NONE)) { setAudioCaptureDevice(null); setAudioNotifyDevice(null); setAudioPlaybackDevice(null); } else if(name.equals(AUDIO_SYSTEM_JAVASOUND)) { setAudioNotifyDevice(null); setAudioPlaybackDevice(null); // as there is only one device for javasound // lets search for it if(captureDevice != null) setAudioCaptureDevice(captureDevice); else { CaptureDeviceInfo[] audioCaptureDevices = getAvailableAudioCaptureDevices(); for (CaptureDeviceInfo captureDeviceInfo : audioCaptureDevices) { if(captureDeviceInfo.getLocator().getProtocol(). equals("javasound")) { setAudioCaptureDevice(captureDeviceInfo); break; } } } // if we have inited the audiocaptureDevice, it means javasound is // available and everything is ok if (audioCaptureDevice != null) { removePortAudioRenderer(); initJavaSoundRenderer(); } } else if(name.equals(AUDIO_SYSTEM_PORTAUDIO)) { // changed to portaudio, so lets set the default devices setAudioCaptureDevice(PortAudioAuto.defaultCaptureDevice); setAudioNotifyDevice(PortAudioAuto.defaultPlaybackDevice); setAudioPlaybackDevice(PortAudioAuto.defaultPlaybackDevice); // we don't save anything cause it will be saved // when the devices are stored // if nothing is set we consider it as not configured // so when we restart we will end up with default config // till restart will use latest config // capture device is not null when we are called for the // first time, we will also extract playback devices here if(captureDevice != null) { setAudioCaptureDevice(captureDevice); String audioDevName = config.getString(PROP_AUDIO_NOTIFY_DEVICE); if(audioDevName != null) { for (CaptureDeviceInfo captureDeviceInfo : PortAudioAuto.playbackDevices) { if (audioDevName.equals(captureDeviceInfo.getName())) { this.audioNotifyDevice = captureDeviceInfo; break; } } } audioDevName = config.getString(PROP_AUDIO_PLAYBACK_DEVICE); if(audioDevName != null) { for (CaptureDeviceInfo captureDeviceInfo : PortAudioAuto.playbackDevices) { if (audioDevName.equals(captureDeviceInfo.getName())) { this.audioPlaybackDevice = captureDeviceInfo; setDeviceToRenderer(audioPlaybackDevice); removeJavaSoundRenderer(); initPortAudioRenderer(); break; } } } } // return here to prevent clearing the last config that was saved return; } else { // not expected behaviour logger.error("Unknown audio system! Name:" + name); audioSystem = null; } config.setProperty(PROP_AUDIO_DEVICE_IS_DISABLED, audioCaptureDevice == null); } /** * Installs the PortAudio Renderer */ protected static void initPortAudioRenderer() { PlugInManager.addPlugIn( "net.java.sip.communicator.impl.media.renderer.audio.PortAudioRenderer", net.java.sip.communicator.impl.media.renderer.audio. PortAudioRenderer.supportedInputFormats, null, PlugInManager.RENDERER); } /** * Removes javasound renderer. */ private void removeJavaSoundRenderer() { PlugInManager.removePlugIn( "com.sun.media.renderer.audio.JavaSoundRenderer", PlugInManager.RENDERER); } /** * Removed portaudio renderer. */ private void removePortAudioRenderer() { PlugInManager.removePlugIn( "net.java.sip.communicator.impl.media.renderer.audio.PortAudioRenderer", PlugInManager.RENDERER); } /** * Registers javasound renderer. */ private void initJavaSoundRenderer() { try { PlugInManager.addPlugIn( "com.sun.media.renderer.audio.JavaSoundRenderer", new com.sun.media.renderer.audio.JavaSoundRenderer() .getSupportedInputFormats(), null, PlugInManager.RENDERER); } catch (Exception e) { // if class is missing logger.error("Problem init javasound renderer", e); } } /** * Sets the device to be used by portaudio renderer. * @param devInfo */ private void setDeviceToRenderer(CaptureDeviceInfo devInfo) { // no need to change device to renderer it will not be used anyway if(devInfo == null) return; try { net.java.sip.communicator.impl.media.renderer.audio. PortAudioRenderer.setDevice(devInfo.getLocator()); } catch (Exception e) { logger.error("error setting device to renderer", e); } } /** * @return the audioPlaybackDevice */ public CaptureDeviceInfo getAudioPlaybackDevice() { return audioPlaybackDevice; } // /** // * Returns the default playback device used by the portaudio system. // * @return the default playback device. // */ // public CaptureDeviceInfo getDefaultAudioPlaybackDevice() // { // return PortAudioAuto.defaultCaptureDevice; // } /** * @return the audioNotifyDevice */ public CaptureDeviceInfo getAudioNotifyDevice() { return audioNotifyDevice; } // /** // * Returns the default notify device used by the portaudio system. // * @return the default notify device. // */ // public CaptureDeviceInfo getDefaultAudioNotifyDevice() // { // return PortAudioAuto.defaultPlaybackDevice; // } /** * @param audioPlaybackDevice the audioPlaybackDevice to set */ public void setAudioPlaybackDevice(CaptureDeviceInfo audioPlaybackDevice) { if(this.audioPlaybackDevice != audioPlaybackDevice) { CaptureDeviceInfo oldDev = this.audioPlaybackDevice; this.audioPlaybackDevice = audioPlaybackDevice; setDeviceToRenderer(audioPlaybackDevice); // we changed playback device, so we are using portaudio // lets use it, remove javasound renderer to be sure // its not used anymore and install the portaudio one removeJavaSoundRenderer(); initPortAudioRenderer(); ConfigurationService config = MediaActivator.getConfigurationService(); if (audioPlaybackDevice != null) { config.setProperty(PROP_AUDIO_PLAYBACK_DEVICE, audioPlaybackDevice.getName()); config.setProperty(PROP_AUDIO_DEVICE_IS_DISABLED, false); } else config.setProperty(PROP_AUDIO_PLAYBACK_DEVICE, null); firePropertyChange(AUDIO_PLAYBACK_DEVICE, oldDev, audioPlaybackDevice); } } /** * @param audioNotifyDevice the audioNotifyDevice to set */ public void setAudioNotifyDevice(CaptureDeviceInfo audioNotifyDevice) { if(this.audioNotifyDevice != audioNotifyDevice) { CaptureDeviceInfo oldDev = this.audioNotifyDevice; this.audioNotifyDevice = audioNotifyDevice; ConfigurationService config = MediaActivator.getConfigurationService(); if (audioNotifyDevice != null) { config.setProperty(PROP_AUDIO_NOTIFY_DEVICE, audioNotifyDevice.getName()); // atleast notify or playback must be set to consider // portaudio for enabled config.setProperty(PROP_AUDIO_DEVICE_IS_DISABLED, false); } else config.setProperty(PROP_AUDIO_NOTIFY_DEVICE, null); firePropertyChange(AUDIO_NOTIFY_DEVICE, oldDev, audioNotifyDevice); } } }
Remove unneeded commented methods.
src/net/java/sip/communicator/impl/media/device/DeviceConfiguration.java
Remove unneeded commented methods.
<ide><path>rc/net/java/sip/communicator/impl/media/device/DeviceConfiguration.java <ide> { <ide> return audioCaptureDevice; <ide> } <del> <del>// /** <del>// * Returns the default capture device used by the portaudio system. <del>// * @return the default capture device. <del>// */ <del>// public CaptureDeviceInfo getDefaultAudioCaptureDevice() <del>// { <del>// return PortAudioAuto.defaultCaptureDevice; <del>// } <ide> <ide> /** <ide> * Gets the list of audio capture devices which are available through this <ide> return audioPlaybackDevice; <ide> } <ide> <del>// /** <del>// * Returns the default playback device used by the portaudio system. <del>// * @return the default playback device. <del>// */ <del>// public CaptureDeviceInfo getDefaultAudioPlaybackDevice() <del>// { <del>// return PortAudioAuto.defaultCaptureDevice; <del>// } <del> <ide> /** <ide> * @return the audioNotifyDevice <ide> */ <ide> { <ide> return audioNotifyDevice; <ide> } <del> <del>// /** <del>// * Returns the default notify device used by the portaudio system. <del>// * @return the default notify device. <del>// */ <del>// public CaptureDeviceInfo getDefaultAudioNotifyDevice() <del>// { <del>// return PortAudioAuto.defaultPlaybackDevice; <del>// } <ide> <ide> /** <ide> * @param audioPlaybackDevice the audioPlaybackDevice to set
Java
lgpl-2.1
6ba4b957a159127146038c3527f6e7480dccdb42
0
DavideD/hibernate-ogm-contrib,DavideD/hibernate-ogm,jhalliday/hibernate-ogm,hferentschik/hibernate-ogm,uugaa/hibernate-ogm,tempbottle/hibernate-ogm,gunnarmorling/hibernate-ogm,DavideD/hibernate-ogm-cassandra,ZJaffee/hibernate-ogm,hibernate/hibernate-ogm,DavideD/hibernate-ogm-cassandra,Sanne/hibernate-ogm,schernolyas/hibernate-ogm,gunnarmorling/hibernate-ogm,gunnarmorling/hibernate-ogm,jhalliday/hibernate-ogm,ZJaffee/hibernate-ogm,Sanne/hibernate-ogm,Sanne/hibernate-ogm,schernolyas/hibernate-ogm,schernolyas/hibernate-ogm,mp911de/hibernate-ogm,DavideD/hibernate-ogm-contrib,tempbottle/hibernate-ogm,hibernate/hibernate-ogm,DavideD/hibernate-ogm,jhalliday/hibernate-ogm,uugaa/hibernate-ogm,mp911de/hibernate-ogm,ZJaffee/hibernate-ogm,DavideD/hibernate-ogm-contrib,DavideD/hibernate-ogm,Sanne/hibernate-ogm,DavideD/hibernate-ogm,mp911de/hibernate-ogm,tempbottle/hibernate-ogm,hibernate/hibernate-ogm,DavideD/hibernate-ogm-cassandra,hibernate/hibernate-ogm,uugaa/hibernate-ogm
/* * Hibernate OGM, Domain model persistence for NoSQL datastores * * License: GNU Lesser General Public License (LGPL), version 2.1 or later * See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>. */ package org.hibernate.ogm.datastore.couchdb; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.persistence.OptimisticLockException; import org.hibernate.LockMode; import org.hibernate.dialect.lock.LockingStrategy; import org.hibernate.ogm.datastore.couchdb.dialect.backend.impl.CouchDBDatastore; import org.hibernate.ogm.datastore.couchdb.dialect.backend.json.impl.AssociationDocument; import org.hibernate.ogm.datastore.couchdb.dialect.backend.json.impl.Document; import org.hibernate.ogm.datastore.couchdb.dialect.backend.json.impl.EntityDocument; import org.hibernate.ogm.datastore.couchdb.dialect.model.impl.CouchDBAssociation; import org.hibernate.ogm.datastore.couchdb.dialect.model.impl.CouchDBAssociationSnapshot; import org.hibernate.ogm.datastore.couchdb.dialect.model.impl.CouchDBTupleSnapshot; import org.hibernate.ogm.datastore.couchdb.dialect.type.impl.CouchDBBlobType; import org.hibernate.ogm.datastore.couchdb.dialect.type.impl.CouchDBByteType; import org.hibernate.ogm.datastore.couchdb.dialect.type.impl.CouchDBLongType; import org.hibernate.ogm.datastore.couchdb.impl.CouchDBDatastoreProvider; import org.hibernate.ogm.datastore.couchdb.util.impl.Identifier; import org.hibernate.ogm.datastore.document.options.AssociationStorageType; import org.hibernate.ogm.datastore.document.options.spi.AssociationStorageOption; import org.hibernate.ogm.dialect.spi.AssociationContext; import org.hibernate.ogm.dialect.spi.AssociationTypeContext; import org.hibernate.ogm.dialect.spi.BaseGridDialect; import org.hibernate.ogm.dialect.spi.DuplicateInsertPreventionStrategy; import org.hibernate.ogm.dialect.spi.ModelConsumer; import org.hibernate.ogm.dialect.spi.NextValueRequest; import org.hibernate.ogm.dialect.spi.TupleAlreadyExistsException; import org.hibernate.ogm.dialect.spi.TupleContext; import org.hibernate.ogm.model.key.spi.AssociationKey; import org.hibernate.ogm.model.key.spi.AssociationKeyMetadata; import org.hibernate.ogm.model.key.spi.EntityKey; import org.hibernate.ogm.model.key.spi.EntityKeyMetadata; import org.hibernate.ogm.model.key.spi.RowKey; import org.hibernate.ogm.model.spi.Association; import org.hibernate.ogm.model.spi.AssociationKind; import org.hibernate.ogm.model.spi.Tuple; import org.hibernate.ogm.type.impl.Iso8601StringCalendarType; import org.hibernate.ogm.type.impl.Iso8601StringDateType; import org.hibernate.ogm.type.spi.GridType; import org.hibernate.persister.entity.Lockable; import org.hibernate.type.StandardBasicTypes; import org.hibernate.type.Type; /** * Stores tuples and associations as JSON documents inside CouchDB. * <p> * Tuples are stored in CouchDB documents obtained as a JSON serialization of a {@link EntityDocument} object. * Associations are stored in CouchDB documents obtained as a JSON serialization of a {@link AssociationDocument} object. * * @author Andrea Boriero &lt;[email protected]&gt; * @author Gunnar Morling */ public class CouchDBDialect extends BaseGridDialect { private final CouchDBDatastoreProvider provider; public CouchDBDialect(CouchDBDatastoreProvider provider) { this.provider = provider; } @Override public LockingStrategy getLockingStrategy(Lockable lockable, LockMode lockMode) { return null; } @Override public Tuple getTuple(EntityKey key, TupleContext tupleContext) { EntityDocument entity = getDataStore().getEntity( Identifier.createEntityId( key ) ); if ( entity != null ) { return new Tuple( new CouchDBTupleSnapshot( entity.getProperties() ) ); } return null; } @Override public Tuple createTuple(EntityKey key, TupleContext tupleContext) { return new Tuple( new CouchDBTupleSnapshot( key ) ); } @Override public void insertOrUpdateTuple(EntityKey key, Tuple tuple, TupleContext tupleContext) { CouchDBTupleSnapshot snapshot = (CouchDBTupleSnapshot) tuple.getSnapshot(); String revision = (String) snapshot.get( Document.REVISION_FIELD_NAME ); // load the latest revision for updates without the revision being present; a warning about // this mapping will have been issued at factory start-up if ( revision == null && !snapshot.isCreatedOnInsert() ) { revision = getDataStore().getCurrentRevision( Identifier.createEntityId( key ), false ); } try { // this will raise an optimistic locking exception if the revision is either null or not the current one getDataStore().saveDocument( new EntityDocument( key, revision, tuple ) ); } catch (OptimisticLockException ole) { if ( snapshot.isCreatedOnInsert() ) { throw new TupleAlreadyExistsException( key.getMetadata(), tuple, ole ); } else { throw ole; } } } @Override public void removeTuple(EntityKey key, TupleContext tupleContext) { removeDocumentIfPresent( Identifier.createEntityId( key ) ); } @Override public Association getAssociation(AssociationKey key, AssociationContext associationContext) { CouchDBAssociation couchDBAssociation = null; if ( isStoredInEntityStructure( key.getMetadata(), associationContext.getAssociationTypeContext() ) ) { EntityDocument owningEntity = getDataStore().getEntity( Identifier.createEntityId( key.getEntityKey() ) ); if ( owningEntity != null && owningEntity.getProperties().containsKey( key.getMetadata().getCollectionRole() ) ) { couchDBAssociation = CouchDBAssociation.fromEmbeddedAssociation( owningEntity, key.getMetadata().getCollectionRole() ); } } else { AssociationDocument association = getDataStore().getAssociation( Identifier.createAssociationId( key ) ); if ( association != null ) { couchDBAssociation = CouchDBAssociation.fromAssociationDocument( association ); } } return couchDBAssociation != null ? new Association( new CouchDBAssociationSnapshot( couchDBAssociation, key ) ) : null; } @Override public Association createAssociation(AssociationKey key, AssociationContext associationContext) { CouchDBAssociation couchDBAssociation = null; if ( isStoredInEntityStructure( key.getMetadata(), associationContext.getAssociationTypeContext() ) ) { EntityDocument owningEntity = getDataStore().getEntity( Identifier.createEntityId( key.getEntityKey() ) ); if ( owningEntity == null ) { owningEntity = (EntityDocument) getDataStore().saveDocument( new EntityDocument( key.getEntityKey() ) ); } couchDBAssociation = CouchDBAssociation.fromEmbeddedAssociation( owningEntity, key.getMetadata().getCollectionRole() ); } else { AssociationDocument association = new AssociationDocument( Identifier.createAssociationId( key ) ); couchDBAssociation = CouchDBAssociation.fromAssociationDocument( association ); } return new Association( new CouchDBAssociationSnapshot( couchDBAssociation, key ) ); } @Override public void insertOrUpdateAssociation(AssociationKey associationKey, Association association, AssociationContext associationContext) { List<Object> rows = getAssociationRows( association, associationKey ); CouchDBAssociation couchDBAssociation = ( (CouchDBAssociationSnapshot) association.getSnapshot() ).getCouchDbAssociation(); couchDBAssociation.setRows( rows ); getDataStore().saveDocument( couchDBAssociation.getOwningDocument() ); } private List<Object> getAssociationRows(Association association, AssociationKey associationKey) { List<Object> rows = new ArrayList<Object>(); for ( RowKey rowKey : association.getKeys() ) { Tuple tuple = association.get( rowKey ); String[] columnsToPersist = associationKey.getMetadata().getColumnsWithoutKeyColumns( tuple.getColumnNames() ); // return value itself if there is only a single column to store if ( columnsToPersist.length == 1 ) { Object row = tuple.get( columnsToPersist[0] ); rows.add( row ); } else { Map<String, Object> row = new HashMap<String, Object>( columnsToPersist.length ); for ( String columnName : columnsToPersist ) { row.put( columnName, tuple.get( columnName ) ); } rows.add( row ); } } return rows; } @Override public void removeAssociation(AssociationKey key, AssociationContext associationContext) { if ( isStoredInEntityStructure( key.getMetadata(), associationContext.getAssociationTypeContext() ) ) { EntityDocument owningEntity = getDataStore().getEntity( Identifier.createEntityId( key.getEntityKey() ) ); if ( owningEntity != null ) { owningEntity.removeAssociation( key.getMetadata().getCollectionRole() ); getDataStore().saveDocument( owningEntity ); } } else { removeDocumentIfPresent( Identifier.createAssociationId( key ) ); } } @Override public boolean isStoredInEntityStructure(AssociationKeyMetadata associationKeyMetadata, AssociationTypeContext associationTypeContext) { AssociationStorageType associationStorage = associationTypeContext .getOptionsContext() .getUnique( AssociationStorageOption.class ); return associationKeyMetadata.getAssociationKind() == AssociationKind.EMBEDDED_COLLECTION || associationStorage == AssociationStorageType.IN_ENTITY; } @Override public Number nextValue(NextValueRequest request) { return getDataStore().nextValue( request.getKey(), request.getIncrement(), request.getInitialValue() ); } @Override public GridType overrideType(Type type) { if ( type == StandardBasicTypes.MATERIALIZED_BLOB ) { return CouchDBBlobType.INSTANCE; } // persist calendars as ISO8601 strings, including TZ info else if ( type == StandardBasicTypes.CALENDAR ) { return Iso8601StringCalendarType.DATE_TIME; } else if ( type == StandardBasicTypes.CALENDAR_DATE ) { return Iso8601StringCalendarType.DATE; } // persist date as ISO8601 strings, in UTC, without TZ info else if ( type == StandardBasicTypes.DATE ) { return Iso8601StringDateType.DATE; } else if ( type == StandardBasicTypes.TIME ) { return Iso8601StringDateType.TIME; } else if ( type == StandardBasicTypes.TIMESTAMP ) { return Iso8601StringDateType.DATE_TIME; } else if ( type == StandardBasicTypes.BYTE ) { return CouchDBByteType.INSTANCE; } else if ( type == StandardBasicTypes.LONG ) { return CouchDBLongType.INSTANCE; } return null; } @Override public void forEachTuple(ModelConsumer consumer, EntityKeyMetadata... entityKeyMetadatas) { for ( EntityKeyMetadata entityKeyMetadata : entityKeyMetadatas ) { forTuple( consumer, entityKeyMetadata ); } } @Override public DuplicateInsertPreventionStrategy getDuplicateInsertPreventionStrategy() { return DuplicateInsertPreventionStrategy.NATIVE; } private void forTuple(ModelConsumer consumer, EntityKeyMetadata entityKeyMetadata) { List<Tuple> tuples = getTuples( entityKeyMetadata ); for ( Tuple tuple : tuples ) { consumer.consume( tuple ); } } private List<Tuple> getTuples(EntityKeyMetadata entityKeyMetadata) { return getDataStore().getTuples( entityKeyMetadata ); } private CouchDBDatastore getDataStore() { return provider.getDataStore(); } private void removeDocumentIfPresent(String id) { String currentRevision = getDataStore().getCurrentRevision( id, false ); if ( currentRevision != null ) { getDataStore().deleteDocument( id, currentRevision ); } } }
couchdb/src/main/java/org/hibernate/ogm/datastore/couchdb/CouchDBDialect.java
/* * Hibernate OGM, Domain model persistence for NoSQL datastores * * License: GNU Lesser General Public License (LGPL), version 2.1 or later * See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>. */ package org.hibernate.ogm.datastore.couchdb; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.hibernate.LockMode; import org.hibernate.dialect.lock.LockingStrategy; import org.hibernate.ogm.datastore.couchdb.dialect.backend.impl.CouchDBDatastore; import org.hibernate.ogm.datastore.couchdb.dialect.backend.json.impl.AssociationDocument; import org.hibernate.ogm.datastore.couchdb.dialect.backend.json.impl.Document; import org.hibernate.ogm.datastore.couchdb.dialect.backend.json.impl.EntityDocument; import org.hibernate.ogm.datastore.couchdb.dialect.model.impl.CouchDBAssociation; import org.hibernate.ogm.datastore.couchdb.dialect.model.impl.CouchDBAssociationSnapshot; import org.hibernate.ogm.datastore.couchdb.dialect.model.impl.CouchDBTupleSnapshot; import org.hibernate.ogm.datastore.couchdb.dialect.type.impl.CouchDBBlobType; import org.hibernate.ogm.datastore.couchdb.dialect.type.impl.CouchDBByteType; import org.hibernate.ogm.datastore.couchdb.dialect.type.impl.CouchDBLongType; import org.hibernate.ogm.datastore.couchdb.impl.CouchDBDatastoreProvider; import org.hibernate.ogm.datastore.couchdb.util.impl.Identifier; import org.hibernate.ogm.datastore.document.options.AssociationStorageType; import org.hibernate.ogm.datastore.document.options.spi.AssociationStorageOption; import org.hibernate.ogm.dialect.spi.AssociationContext; import org.hibernate.ogm.dialect.spi.AssociationTypeContext; import org.hibernate.ogm.dialect.spi.BaseGridDialect; import org.hibernate.ogm.dialect.spi.ModelConsumer; import org.hibernate.ogm.dialect.spi.NextValueRequest; import org.hibernate.ogm.dialect.spi.TupleContext; import org.hibernate.ogm.model.key.spi.AssociationKey; import org.hibernate.ogm.model.key.spi.AssociationKeyMetadata; import org.hibernate.ogm.model.key.spi.EntityKey; import org.hibernate.ogm.model.key.spi.EntityKeyMetadata; import org.hibernate.ogm.model.key.spi.RowKey; import org.hibernate.ogm.model.spi.Association; import org.hibernate.ogm.model.spi.AssociationKind; import org.hibernate.ogm.model.spi.Tuple; import org.hibernate.ogm.type.impl.Iso8601StringCalendarType; import org.hibernate.ogm.type.impl.Iso8601StringDateType; import org.hibernate.ogm.type.spi.GridType; import org.hibernate.persister.entity.Lockable; import org.hibernate.type.StandardBasicTypes; import org.hibernate.type.Type; /** * Stores tuples and associations as JSON documents inside CouchDB. * <p> * Tuples are stored in CouchDB documents obtained as a JSON serialization of a {@link EntityDocument} object. * Associations are stored in CouchDB documents obtained as a JSON serialization of a {@link AssociationDocument} object. * * @author Andrea Boriero &lt;[email protected]&gt; * @author Gunnar Morling */ public class CouchDBDialect extends BaseGridDialect { private final CouchDBDatastoreProvider provider; public CouchDBDialect(CouchDBDatastoreProvider provider) { this.provider = provider; } @Override public LockingStrategy getLockingStrategy(Lockable lockable, LockMode lockMode) { return null; } @Override public Tuple getTuple(EntityKey key, TupleContext tupleContext) { EntityDocument entity = getDataStore().getEntity( Identifier.createEntityId( key ) ); if ( entity != null ) { return new Tuple( new CouchDBTupleSnapshot( entity.getProperties() ) ); } return null; } @Override public Tuple createTuple(EntityKey key, TupleContext tupleContext) { return new Tuple( new CouchDBTupleSnapshot( key ) ); } @Override public void insertOrUpdateTuple(EntityKey key, Tuple tuple, TupleContext tupleContext) { CouchDBTupleSnapshot snapshot = (CouchDBTupleSnapshot) tuple.getSnapshot(); String revision = (String) snapshot.get( Document.REVISION_FIELD_NAME ); // load the latest revision for updates without the revision being present; a warning about // this mapping will have been issued at factory start-up if ( revision == null && !snapshot.isCreatedOnInsert() ) { revision = getDataStore().getCurrentRevision( Identifier.createEntityId( key ), false ); } // this will raise an optimistic locking exception if the revision is either null or not the current one getDataStore().saveDocument( new EntityDocument( key, revision, tuple ) ); } @Override public void removeTuple(EntityKey key, TupleContext tupleContext) { removeDocumentIfPresent( Identifier.createEntityId( key ) ); } @Override public Association getAssociation(AssociationKey key, AssociationContext associationContext) { CouchDBAssociation couchDBAssociation = null; if ( isStoredInEntityStructure( key.getMetadata(), associationContext.getAssociationTypeContext() ) ) { EntityDocument owningEntity = getDataStore().getEntity( Identifier.createEntityId( key.getEntityKey() ) ); if ( owningEntity != null && owningEntity.getProperties().containsKey( key.getMetadata().getCollectionRole() ) ) { couchDBAssociation = CouchDBAssociation.fromEmbeddedAssociation( owningEntity, key.getMetadata().getCollectionRole() ); } } else { AssociationDocument association = getDataStore().getAssociation( Identifier.createAssociationId( key ) ); if ( association != null ) { couchDBAssociation = CouchDBAssociation.fromAssociationDocument( association ); } } return couchDBAssociation != null ? new Association( new CouchDBAssociationSnapshot( couchDBAssociation, key ) ) : null; } @Override public Association createAssociation(AssociationKey key, AssociationContext associationContext) { CouchDBAssociation couchDBAssociation = null; if ( isStoredInEntityStructure( key.getMetadata(), associationContext.getAssociationTypeContext() ) ) { EntityDocument owningEntity = getDataStore().getEntity( Identifier.createEntityId( key.getEntityKey() ) ); if ( owningEntity == null ) { owningEntity = (EntityDocument) getDataStore().saveDocument( new EntityDocument( key.getEntityKey() ) ); } couchDBAssociation = CouchDBAssociation.fromEmbeddedAssociation( owningEntity, key.getMetadata().getCollectionRole() ); } else { AssociationDocument association = new AssociationDocument( Identifier.createAssociationId( key ) ); couchDBAssociation = CouchDBAssociation.fromAssociationDocument( association ); } return new Association( new CouchDBAssociationSnapshot( couchDBAssociation, key ) ); } @Override public void insertOrUpdateAssociation(AssociationKey associationKey, Association association, AssociationContext associationContext) { List<Object> rows = getAssociationRows( association, associationKey ); CouchDBAssociation couchDBAssociation = ( (CouchDBAssociationSnapshot) association.getSnapshot() ).getCouchDbAssociation(); couchDBAssociation.setRows( rows ); getDataStore().saveDocument( couchDBAssociation.getOwningDocument() ); } private List<Object> getAssociationRows(Association association, AssociationKey associationKey) { List<Object> rows = new ArrayList<Object>(); for ( RowKey rowKey : association.getKeys() ) { Tuple tuple = association.get( rowKey ); String[] columnsToPersist = associationKey.getMetadata().getColumnsWithoutKeyColumns( tuple.getColumnNames() ); // return value itself if there is only a single column to store if ( columnsToPersist.length == 1 ) { Object row = tuple.get( columnsToPersist[0] ); rows.add( row ); } else { Map<String, Object> row = new HashMap<String, Object>( columnsToPersist.length ); for ( String columnName : columnsToPersist ) { row.put( columnName, tuple.get( columnName ) ); } rows.add( row ); } } return rows; } @Override public void removeAssociation(AssociationKey key, AssociationContext associationContext) { if ( isStoredInEntityStructure( key.getMetadata(), associationContext.getAssociationTypeContext() ) ) { EntityDocument owningEntity = getDataStore().getEntity( Identifier.createEntityId( key.getEntityKey() ) ); if ( owningEntity != null ) { owningEntity.removeAssociation( key.getMetadata().getCollectionRole() ); getDataStore().saveDocument( owningEntity ); } } else { removeDocumentIfPresent( Identifier.createAssociationId( key ) ); } } @Override public boolean isStoredInEntityStructure(AssociationKeyMetadata associationKeyMetadata, AssociationTypeContext associationTypeContext) { AssociationStorageType associationStorage = associationTypeContext .getOptionsContext() .getUnique( AssociationStorageOption.class ); return associationKeyMetadata.getAssociationKind() == AssociationKind.EMBEDDED_COLLECTION || associationStorage == AssociationStorageType.IN_ENTITY; } @Override public Number nextValue(NextValueRequest request) { return getDataStore().nextValue( request.getKey(), request.getIncrement(), request.getInitialValue() ); } @Override public GridType overrideType(Type type) { if ( type == StandardBasicTypes.MATERIALIZED_BLOB ) { return CouchDBBlobType.INSTANCE; } // persist calendars as ISO8601 strings, including TZ info else if ( type == StandardBasicTypes.CALENDAR ) { return Iso8601StringCalendarType.DATE_TIME; } else if ( type == StandardBasicTypes.CALENDAR_DATE ) { return Iso8601StringCalendarType.DATE; } // persist date as ISO8601 strings, in UTC, without TZ info else if ( type == StandardBasicTypes.DATE ) { return Iso8601StringDateType.DATE; } else if ( type == StandardBasicTypes.TIME ) { return Iso8601StringDateType.TIME; } else if ( type == StandardBasicTypes.TIMESTAMP ) { return Iso8601StringDateType.DATE_TIME; } else if ( type == StandardBasicTypes.BYTE ) { return CouchDBByteType.INSTANCE; } else if ( type == StandardBasicTypes.LONG ) { return CouchDBLongType.INSTANCE; } return null; } @Override public void forEachTuple(ModelConsumer consumer, EntityKeyMetadata... entityKeyMetadatas) { for ( EntityKeyMetadata entityKeyMetadata : entityKeyMetadatas ) { forTuple( consumer, entityKeyMetadata ); } } private void forTuple(ModelConsumer consumer, EntityKeyMetadata entityKeyMetadata) { List<Tuple> tuples = getTuples( entityKeyMetadata ); for ( Tuple tuple : tuples ) { consumer.consume( tuple ); } } private List<Tuple> getTuples(EntityKeyMetadata entityKeyMetadata) { return getDataStore().getTuples( entityKeyMetadata ); } private CouchDBDatastore getDataStore() { return provider.getDataStore(); } private void removeDocumentIfPresent(String id) { String currentRevision = getDataStore().getCurrentRevision( id, false ); if ( currentRevision != null ) { getDataStore().deleteDocument( id, currentRevision ); } } }
OGM-642 Using strategy NATIVE for CouchDB
couchdb/src/main/java/org/hibernate/ogm/datastore/couchdb/CouchDBDialect.java
OGM-642 Using strategy NATIVE for CouchDB
<ide><path>ouchdb/src/main/java/org/hibernate/ogm/datastore/couchdb/CouchDBDialect.java <ide> import java.util.HashMap; <ide> import java.util.List; <ide> import java.util.Map; <add> <add>import javax.persistence.OptimisticLockException; <ide> <ide> import org.hibernate.LockMode; <ide> import org.hibernate.dialect.lock.LockingStrategy; <ide> import org.hibernate.ogm.dialect.spi.AssociationContext; <ide> import org.hibernate.ogm.dialect.spi.AssociationTypeContext; <ide> import org.hibernate.ogm.dialect.spi.BaseGridDialect; <add>import org.hibernate.ogm.dialect.spi.DuplicateInsertPreventionStrategy; <ide> import org.hibernate.ogm.dialect.spi.ModelConsumer; <ide> import org.hibernate.ogm.dialect.spi.NextValueRequest; <add>import org.hibernate.ogm.dialect.spi.TupleAlreadyExistsException; <ide> import org.hibernate.ogm.dialect.spi.TupleContext; <ide> import org.hibernate.ogm.model.key.spi.AssociationKey; <ide> import org.hibernate.ogm.model.key.spi.AssociationKeyMetadata; <ide> revision = getDataStore().getCurrentRevision( Identifier.createEntityId( key ), false ); <ide> } <ide> <del> // this will raise an optimistic locking exception if the revision is either null or not the current one <del> getDataStore().saveDocument( new EntityDocument( key, revision, tuple ) ); <add> try { <add> // this will raise an optimistic locking exception if the revision is either null or not the current one <add> getDataStore().saveDocument( new EntityDocument( key, revision, tuple ) ); <add> } <add> catch (OptimisticLockException ole) { <add> if ( snapshot.isCreatedOnInsert() ) { <add> throw new TupleAlreadyExistsException( key.getMetadata(), tuple, ole ); <add> } <add> else { <add> throw ole; <add> } <add> } <ide> } <ide> <ide> @Override <ide> } <ide> } <ide> <add> @Override <add> public DuplicateInsertPreventionStrategy getDuplicateInsertPreventionStrategy() { <add> return DuplicateInsertPreventionStrategy.NATIVE; <add> } <add> <ide> private void forTuple(ModelConsumer consumer, EntityKeyMetadata entityKeyMetadata) { <ide> List<Tuple> tuples = getTuples( entityKeyMetadata ); <ide> for ( Tuple tuple : tuples ) {
Java
bsd-3-clause
80be52c02e62360bd715883647d64685dc814473
0
NCIP/cadsr-semantic-tools,NCIP/cadsr-semantic-tools
/* * Copyright 2000-2005 Oracle, Inc. This software was developed in conjunction with the National Cancer Institute, and so to the extent government employees are co-authors, any rights in such works shall be subject to Title 17 of the United States Code, section 105. * Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the disclaimer of Article 3, below. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * * 2. The end-user documentation included with the redistribution, if any, must include the following acknowledgment: * * "This product includes software developed by Oracle, Inc. and the National Cancer Institute." * * If no such end-user documentation is to be included, this acknowledgment shall appear in the software itself, wherever such third-party acknowledgments normally appear. * * 3. The names "The National Cancer Institute", "NCI" and "Oracle" must not be used to endorse or promote products derived from this software. * * 4. This license does not authorize the incorporation of this software into any proprietary programs. This license does not authorize the recipient to use any trademarks owned by either NCI or Oracle, Inc. * * 5. THIS SOFTWARE IS PROVIDED "AS IS," AND ANY EXPRESSED OR IMPLIED WARRANTIES, (INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE) ARE DISCLAIMED. IN NO EVENT SHALL THE NATIONAL CANCER INSTITUTE, ORACLE, OR THEIR AFFILIATES BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE */ package gov.nih.nci.ncicb.cadsr.loader.ui; import gov.nih.nci.ncicb.cadsr.loader.*; import gov.nih.nci.ncicb.cadsr.loader.event.ReviewEvent; import gov.nih.nci.ncicb.cadsr.loader.event.ReviewListener; import gov.nih.nci.ncicb.cadsr.loader.parser.ElementWriter; import gov.nih.nci.ncicb.cadsr.loader.parser.ParserException; import gov.nih.nci.ncicb.cadsr.loader.ui.tree.*; import gov.nih.nci.ncicb.cadsr.loader.ui.event.*; import gov.nih.nci.ncicb.cadsr.loader.util.*; import gov.nih.nci.ncicb.cadsr.loader.ui.util.*; import gov.nih.nci.ncicb.cadsr.loader.validator.*; import java.awt.Component; import java.awt.BorderLayout; import java.awt.Dimension; import java.awt.Toolkit; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.InputEvent; import java.awt.event.KeyEvent; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeEvent; import java.io.File; import javax.swing.*; import java.util.*; import gov.nih.nci.ncicb.cadsr.domain.*; import javax.swing.tree.DefaultMutableTreeNode; import org.apache.log4j.Logger; /** * The main Frame containing other frames * * @author <a href="mailto:[email protected]">Christophe Ludet</a> */ public class MainFrame extends JFrame implements ViewChangeListener, CloseableTabbedPaneListener, PropertyChangeListener { private JMenuBar mainMenuBar = new JMenuBar(); private JMenu fileMenu = new JMenu("File"); private JMenuItem saveMenuItem = new JMenuItem("Save"); private JMenuItem saveAsMenuItem = new JMenuItem("Save As"); private JMenuItem exportErrorsMenuItem = new JMenuItem("Export"); private JMenuItem exitMenuItem = new JMenuItem("Exit"); private JMenu editMenu = new JMenu("Edit"); private JMenuItem findMenuItem = new JMenuItem("Find"); private JMenuItem prefMenuItem = new JMenuItem("Preferences"); private JMenu elementMenu = new JMenu("Element"); private JMenuItem applyMenuItem = new JMenuItem("Apply"); private JMenuItem applyToAllMenuItem = new JMenuItem("Apply to All"); private JMenu runMenu = new JMenu("Run"); private JMenuItem validateMenuItem = new JMenuItem("Validate"); private JMenuItem uploadMenuItem = new JMenuItem("Upload"); private JMenuItem defaultsMenuItem = new JMenuItem("Defaults"); private JMenuItem validateConceptsMenuItem = new JMenuItem("Validate Concepts"); private JMenu helpMenu = new JMenu("Help"); private JMenuItem aboutMenuItem = new JMenuItem("About"); private JMenuItem indexMenuItem = new JMenuItem("Index"); private JMenuItem semanticConnectorMenuItem = new JMenuItem("Semantic Connector"); private JSplitPane jSplitPane1 = new JSplitPane(); private JSplitPane jSplitPane2 = new JSplitPane(); private JTabbedPane jTabbedPane1 = new JTabbedPane(); private CloseableTabbedPane viewTabbedPane = new CloseableTabbedPane(); private JPanel jPanel1 = new JPanel(); private NavigationPanel navigationPanel = new NavigationPanel(); private ErrorPanel errorPanel = null; private MainFrame _this = this; private JLabel infoLabel = new JLabel(" "); private Map<String, UMLElementViewPanel> viewPanels = new HashMap(); private AssociationViewPanel associationViewPanel = null; private ReviewTracker reviewTracker = ReviewTracker.getInstance(); private RunMode runMode = null; private String saveFilename = ""; private static Logger logger = Logger.getLogger(MainFrame.class); public MainFrame() { try { UserSelections selections = UserSelections.getInstance(); runMode = (RunMode)(selections.getProperty("MODE")); saveFilename = (String)selections.getProperty("FILENAME"); jbInit(); } catch(Exception e) { e.printStackTrace(); } } public void exit() { if(!ChangeTracker.getInstance().isEmpty()) { int result = JOptionPane.showConfirmDialog((JFrame) null, "Would you like to save your file before quitting?"); switch(result) { case JOptionPane.YES_OPTION: saveMenuItem.doClick(); break; case JOptionPane.NO_OPTION: break; case JOptionPane.CANCEL_OPTION: return; } System.exit(0); } else System.exit(0); } public void propertyChange(PropertyChangeEvent evt) { if(evt.getPropertyName().equals("APPLY")) { applyMenuItem.setEnabled((Boolean)evt.getNewValue()); applyToAllMenuItem.setEnabled((Boolean)evt.getNewValue()); if((Boolean)evt.getNewValue() == true) infoLabel.setText("Unsaved Changes"); else infoLabel.setText("Changes Applied"); } } private void jbInit() throws Exception { this.getContentPane().setLayout(new BorderLayout()); this.setSize(new Dimension(830, 650)); this.setJMenuBar(mainMenuBar); UserSelections selections = UserSelections.getInstance(); String fileName = new File((String)selections.getProperty("FILENAME")).getName(); this.setTitle("Semantic Integration Workbench - " + fileName); jSplitPane2.setOrientation(JSplitPane.VERTICAL_SPLIT); jSplitPane1.setDividerLocation(160); jSplitPane2.setDividerLocation(400); fileMenu.add(saveMenuItem); fileMenu.add(saveAsMenuItem); fileMenu.addSeparator(); fileMenu.add(findMenuItem); fileMenu.add(exportErrorsMenuItem); fileMenu.addSeparator(); fileMenu.add(exitMenuItem); mainMenuBar.add(fileMenu); editMenu.add(findMenuItem); editMenu.add(prefMenuItem); mainMenuBar.add(editMenu); applyMenuItem.setEnabled(false); applyToAllMenuItem.setEnabled(false); elementMenu.add(applyMenuItem); elementMenu.add(applyToAllMenuItem); mainMenuBar.add(elementMenu); // runMenu.add(validateMenuItem); if(runMode.equals(RunMode.Reviewer)) { runMenu.add(uploadMenuItem); uploadMenuItem.setEnabled(false); runMenu.addSeparator(); runMenu.add(defaultsMenuItem); runMenu.add(validateConceptsMenuItem); mainMenuBar.add(runMenu); } helpMenu.add(indexMenuItem); helpMenu.addSeparator(); helpMenu.add(aboutMenuItem); mainMenuBar.add(helpMenu); errorPanel = new ErrorPanel(TreeBuilder.getInstance().getRootNode()); jTabbedPane1.addTab("Errors", errorPanel); Icon closeIcon = new ImageIcon(Thread.currentThread().getContextClassLoader().getResource("close-tab.gif")); viewTabbedPane.setCloseIcons(closeIcon, closeIcon, closeIcon); viewTabbedPane.addCloseableTabbedPaneListener(this); jTabbedPane1.addTab("Log", new JPanel()); jSplitPane2.add(jTabbedPane1, JSplitPane.BOTTOM); jSplitPane2.add(viewTabbedPane, JSplitPane.TOP); jSplitPane1.add(jSplitPane2, JSplitPane.RIGHT); jSplitPane1.add(navigationPanel, JSplitPane.LEFT); navigationPanel.addViewChangeListener(this); this.getContentPane().add(jSplitPane1, BorderLayout.CENTER); this.getContentPane().add(infoLabel, BorderLayout.SOUTH); exitMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { _this.exit(); } }); defaultsMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { UmlDefaultsPanel dp = new UmlDefaultsPanel(_this); dp.show(); UIUtil.putToCenter(dp); } }); findMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { SearchDialog sd = new SearchDialog(_this); UIUtil.putToCenter(sd); sd.addSearchListener(navigationPanel); sd.setVisible(true); } }); findMenuItem.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_F, Toolkit.getDefaultToolkit().getMenuShortcutKeyMask())); final PreferenceDialog pd = new PreferenceDialog(_this); prefMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { UIUtil.putToCenter(pd); pd.setVisible(true); } }); saveMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { ElementWriter writer = BeansAccessor.getWriter(); writer.setOutput(saveFilename); try { writer.write(ElementsLists.getInstance()); infoLabel.setText("File Saved"); } catch (ParserException e){ JOptionPane.showMessageDialog(_this, "There was an error saving your File. Please contact support.", "Error Saving File", JOptionPane.ERROR_MESSAGE); infoLabel.setText("Save Failed!!"); logger.error(e); e.printStackTrace(); } // end of try-catch } }); saveMenuItem.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_S, Toolkit.getDefaultToolkit().getMenuShortcutKeyMask())); saveAsMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { String saveDir = UserPreferences.getInstance().getRecentDir(); JFileChooser chooser = new JFileChooser(saveDir); javax.swing.filechooser.FileFilter filter = new javax.swing.filechooser.FileFilter() { String fileExtension = null; { if(runMode.equals(RunMode.Curator)) fileExtension = "csv"; else if(runMode.equals(RunMode.Reviewer)) fileExtension = "xmi"; } public boolean accept(File f) { if (f.isDirectory()) { return true; } return f.getName().endsWith("." + fileExtension); } public String getDescription() { return fileExtension.toUpperCase() + " Files"; } }; chooser.setFileFilter(filter); int returnVal = chooser.showSaveDialog(null); if(returnVal == JFileChooser.APPROVE_OPTION) { String filePath = chooser.getSelectedFile().getAbsolutePath(); String fileExtension = "xmi"; if(runMode.equals(RunMode.Curator)) fileExtension = "csv"; else if(runMode.equals(RunMode.Reviewer)) fileExtension = "xmi"; if(!filePath.endsWith(fileExtension)) filePath = filePath + "." + fileExtension; UserPreferences.getInstance().setRecentDir(filePath); ElementWriter writer = BeansAccessor.getWriter(); writer.setOutput(filePath); saveFilename = filePath; try { writer.write(ElementsLists.getInstance()); infoLabel.setText("File Saved"); } catch (ParserException e){ JOptionPane.showMessageDialog(_this, "There was an error saving your File. Please contact support.", "Error Saving File", JOptionPane.ERROR_MESSAGE); infoLabel.setText("Save Failed!!"); } // end of try-catch } } }); exportErrorsMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { JOptionPane.showMessageDialog(_this, "Sorry, Not Implemented Yet", "Not Implemented", JOptionPane.INFORMATION_MESSAGE); } }); validateMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { ValidationItems.getInstance().clear(); Validator validator = new UMLValidator(); validator.validate(); ElementsLists elements = ElementsLists.getInstance(); TreeBuilder tb = TreeBuilder.getInstance(); tb.init(); tb.buildTree(elements); errorPanel.update(tb.getRootNode()); // JOptionPane.showMessageDialog(_this, "Sorry, Not Implemented Yet", "Not Implemented", JOptionPane.INFORMATION_MESSAGE); } }); validateConceptsMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { int n = JOptionPane.showConfirmDialog(_this, "This process may take some time. Would you like to continue? ", "Validate Concepts", JOptionPane.YES_NO_OPTION); if(n == JOptionPane.YES_OPTION) { ValidateConceptsDialog vcd = new ValidateConceptsDialog(_this); vcd.addSearchListener(navigationPanel); vcd.setVisible(true); } } }); uploadMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { JOptionPane.showMessageDialog(_this, "Sorry, Not Implemented Yet", "Not Implemented", JOptionPane.INFORMATION_MESSAGE); } }); applyMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { UMLElementViewPanel viewPanel = (UMLElementViewPanel)viewTabbedPane .getSelectedComponent(); viewPanel.apply(false); } }); applyToAllMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { UMLElementViewPanel viewPanel = (UMLElementViewPanel)viewTabbedPane .getSelectedComponent(); viewPanel.apply(true); } }); aboutMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { new AboutPanel(); } }); } public void viewChanged(ViewChangeEvent event) { if(event.getType() == ViewChangeEvent.VIEW_CONCEPTS) { UMLNode node = (UMLNode)event.getViewObject(); // If concept is already showing, just bring it up front if(viewPanels.containsKey(node.getFullPath())) { UMLElementViewPanel pa = viewPanels.get(node.getFullPath()); viewTabbedPane.setSelectedComponent(pa); return; } if((event.getInNewTab() == true) || (viewPanels.size() == 0) || viewTabbedPane.getSelectedComponent() instanceof AssociationViewPanel) { UMLElementViewPanel viewPanel = new UMLElementViewPanel(node); viewPanel.addPropertyChangeListener(this); viewPanel.addReviewListener(navigationPanel); viewPanel.addReviewListener(reviewTracker); viewPanel.addElementChangeListener(ChangeTracker.getInstance()); viewPanel.addNavigationListener(navigationPanel); navigationPanel.addNavigationListener(viewPanel); String tabTitle = node.getDisplay();; if(node instanceof AttributeNode) tabTitle = node.getParent().getDisplay() + "." + tabTitle; viewTabbedPane.addTab(tabTitle, viewPanel); viewTabbedPane.setSelectedComponent(viewPanel); viewPanel.setName(node.getFullPath()); viewPanels.put(viewPanel.getName(), viewPanel); infoLabel.setText(tabTitle); } else { UMLElementViewPanel viewPanel = (UMLElementViewPanel) viewTabbedPane.getSelectedComponent(); viewPanels.remove(viewPanel.getName()); String tabTitle = node.getDisplay();; if(node instanceof AttributeNode) tabTitle = node.getParent().getDisplay() + "." + tabTitle; viewTabbedPane.setTitleAt(viewTabbedPane.getSelectedIndex(), tabTitle); infoLabel.setText(tabTitle); viewPanel.setName(node.getFullPath()); viewPanel.updateNode(node); viewPanels.put(viewPanel.getName(), viewPanel); } } else if(event.getType() == ViewChangeEvent.VIEW_ASSOCIATION) { UMLNode node = (UMLNode)event.getViewObject(); if(associationViewPanel == null) { associationViewPanel = new AssociationViewPanel((ObjectClassRelationship)node.getUserObject()); viewTabbedPane.addTab("Association", associationViewPanel); associationViewPanel.setName("Association"); infoLabel.setText("Association"); } else associationViewPanel.update((ObjectClassRelationship)node.getUserObject()); viewTabbedPane.setSelectedComponent(associationViewPanel); } } public boolean closeTab(int index) { Component c = viewTabbedPane.getComponentAt(index); if(c.equals(associationViewPanel)) associationViewPanel = null; viewPanels.remove(c.getName()); return true; } }
src/gov/nih/nci/ncicb/cadsr/loader/ui/MainFrame.java
/* * Copyright 2000-2005 Oracle, Inc. This software was developed in conjunction with the National Cancer Institute, and so to the extent government employees are co-authors, any rights in such works shall be subject to Title 17 of the United States Code, section 105. * Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the disclaimer of Article 3, below. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * * 2. The end-user documentation included with the redistribution, if any, must include the following acknowledgment: * * "This product includes software developed by Oracle, Inc. and the National Cancer Institute." * * If no such end-user documentation is to be included, this acknowledgment shall appear in the software itself, wherever such third-party acknowledgments normally appear. * * 3. The names "The National Cancer Institute", "NCI" and "Oracle" must not be used to endorse or promote products derived from this software. * * 4. This license does not authorize the incorporation of this software into any proprietary programs. This license does not authorize the recipient to use any trademarks owned by either NCI or Oracle, Inc. * * 5. THIS SOFTWARE IS PROVIDED "AS IS," AND ANY EXPRESSED OR IMPLIED WARRANTIES, (INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE) ARE DISCLAIMED. IN NO EVENT SHALL THE NATIONAL CANCER INSTITUTE, ORACLE, OR THEIR AFFILIATES BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE */ package gov.nih.nci.ncicb.cadsr.loader.ui; import gov.nih.nci.ncicb.cadsr.loader.*; import gov.nih.nci.ncicb.cadsr.loader.event.ReviewEvent; import gov.nih.nci.ncicb.cadsr.loader.event.ReviewListener; import gov.nih.nci.ncicb.cadsr.loader.parser.ElementWriter; import gov.nih.nci.ncicb.cadsr.loader.parser.ParserException; import gov.nih.nci.ncicb.cadsr.loader.ui.tree.*; import gov.nih.nci.ncicb.cadsr.loader.ui.event.*; import gov.nih.nci.ncicb.cadsr.loader.util.*; import gov.nih.nci.ncicb.cadsr.loader.ui.util.*; import gov.nih.nci.ncicb.cadsr.loader.validator.*; import java.awt.Component; import java.awt.BorderLayout; import java.awt.Dimension; import java.awt.Toolkit; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.InputEvent; import java.awt.event.KeyEvent; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeEvent; import java.io.File; import javax.swing.*; import java.util.*; import gov.nih.nci.ncicb.cadsr.domain.*; import javax.swing.tree.DefaultMutableTreeNode; import org.apache.log4j.Logger; /** * The main Frame containing other frames * * @author <a href="mailto:[email protected]">Christophe Ludet</a> */ public class MainFrame extends JFrame implements ViewChangeListener, CloseableTabbedPaneListener, PropertyChangeListener { private JMenuBar mainMenuBar = new JMenuBar(); private JMenu fileMenu = new JMenu("File"); private JMenuItem saveMenuItem = new JMenuItem("Save"); private JMenuItem saveAsMenuItem = new JMenuItem("Save As"); private JMenuItem exportErrorsMenuItem = new JMenuItem("Export"); private JMenuItem exitMenuItem = new JMenuItem("Exit"); private JMenu editMenu = new JMenu("Edit"); private JMenuItem findMenuItem = new JMenuItem("Find"); private JMenuItem prefMenuItem = new JMenuItem("Preferences"); private JMenu elementMenu = new JMenu("Element"); private JMenuItem applyMenuItem = new JMenuItem("Apply"); private JMenuItem applyToAllMenuItem = new JMenuItem("Apply to All"); private JMenu runMenu = new JMenu("Run"); private JMenuItem validateMenuItem = new JMenuItem("Validate"); private JMenuItem uploadMenuItem = new JMenuItem("Upload"); private JMenuItem defaultsMenuItem = new JMenuItem("Defaults"); private JMenuItem validateConceptsMenuItem = new JMenuItem("Validate Concepts"); private JMenu helpMenu = new JMenu("Help"); private JMenuItem aboutMenuItem = new JMenuItem("About"); private JMenuItem indexMenuItem = new JMenuItem("Index"); private JMenuItem semanticConnectorMenuItem = new JMenuItem("Semantic Connector"); private JSplitPane jSplitPane1 = new JSplitPane(); private JSplitPane jSplitPane2 = new JSplitPane(); private JTabbedPane jTabbedPane1 = new JTabbedPane(); private CloseableTabbedPane viewTabbedPane = new CloseableTabbedPane(); private JPanel jPanel1 = new JPanel(); private NavigationPanel navigationPanel = new NavigationPanel(); private ErrorPanel errorPanel = null; private MainFrame _this = this; private JLabel infoLabel = new JLabel(" "); private Map<String, UMLElementViewPanel> viewPanels = new HashMap(); private AssociationViewPanel associationViewPanel = null; private ReviewTracker reviewTracker = ReviewTracker.getInstance(); private RunMode runMode = null; private String saveFilename = ""; private static Logger logger = Logger.getLogger(MainFrame.class); public MainFrame() { try { UserSelections selections = UserSelections.getInstance(); runMode = (RunMode)(selections.getProperty("MODE")); saveFilename = (String)selections.getProperty("FILENAME"); jbInit(); } catch(Exception e) { e.printStackTrace(); } } public void exit() { int result = JOptionPane.showConfirmDialog((JFrame) null, "Would you like to save your file before quitting?"); switch(result) { case JOptionPane.YES_OPTION: saveMenuItem.doClick(); break; case JOptionPane.NO_OPTION: break; case JOptionPane.CANCEL_OPTION: return; } System.exit(0); } public void propertyChange(PropertyChangeEvent evt) { if(evt.getPropertyName().equals("APPLY")) { applyMenuItem.setEnabled((Boolean)evt.getNewValue()); applyToAllMenuItem.setEnabled((Boolean)evt.getNewValue()); if((Boolean)evt.getNewValue() == true) infoLabel.setText("Unsaved Changes"); else infoLabel.setText("Changes Applied"); } } private void jbInit() throws Exception { this.getContentPane().setLayout(new BorderLayout()); this.setSize(new Dimension(830, 650)); this.setJMenuBar(mainMenuBar); UserSelections selections = UserSelections.getInstance(); String fileName = new File((String)selections.getProperty("FILENAME")).getName(); this.setTitle("Semantic Integration Workbench - " + fileName); jSplitPane2.setOrientation(JSplitPane.VERTICAL_SPLIT); jSplitPane1.setDividerLocation(160); jSplitPane2.setDividerLocation(400); fileMenu.add(saveMenuItem); fileMenu.add(saveAsMenuItem); fileMenu.addSeparator(); fileMenu.add(findMenuItem); fileMenu.add(exportErrorsMenuItem); fileMenu.addSeparator(); fileMenu.add(exitMenuItem); mainMenuBar.add(fileMenu); editMenu.add(findMenuItem); editMenu.add(prefMenuItem); mainMenuBar.add(editMenu); applyMenuItem.setEnabled(false); applyToAllMenuItem.setEnabled(false); elementMenu.add(applyMenuItem); elementMenu.add(applyToAllMenuItem); mainMenuBar.add(elementMenu); // runMenu.add(validateMenuItem); if(runMode.equals(RunMode.Reviewer)) { runMenu.add(uploadMenuItem); uploadMenuItem.setEnabled(false); runMenu.addSeparator(); runMenu.add(defaultsMenuItem); runMenu.add(validateConceptsMenuItem); mainMenuBar.add(runMenu); } helpMenu.add(indexMenuItem); helpMenu.addSeparator(); helpMenu.add(aboutMenuItem); mainMenuBar.add(helpMenu); errorPanel = new ErrorPanel(TreeBuilder.getInstance().getRootNode()); jTabbedPane1.addTab("Errors", errorPanel); Icon closeIcon = new ImageIcon(Thread.currentThread().getContextClassLoader().getResource("close-tab.gif")); viewTabbedPane.setCloseIcons(closeIcon, closeIcon, closeIcon); viewTabbedPane.addCloseableTabbedPaneListener(this); jTabbedPane1.addTab("Log", new JPanel()); jSplitPane2.add(jTabbedPane1, JSplitPane.BOTTOM); jSplitPane2.add(viewTabbedPane, JSplitPane.TOP); jSplitPane1.add(jSplitPane2, JSplitPane.RIGHT); jSplitPane1.add(navigationPanel, JSplitPane.LEFT); navigationPanel.addViewChangeListener(this); this.getContentPane().add(jSplitPane1, BorderLayout.CENTER); this.getContentPane().add(infoLabel, BorderLayout.SOUTH); exitMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { _this.exit(); } }); defaultsMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { UmlDefaultsPanel dp = new UmlDefaultsPanel(_this); dp.show(); UIUtil.putToCenter(dp); } }); findMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { SearchDialog sd = new SearchDialog(_this); UIUtil.putToCenter(sd); sd.addSearchListener(navigationPanel); sd.setVisible(true); } }); findMenuItem.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_F, Toolkit.getDefaultToolkit().getMenuShortcutKeyMask())); final PreferenceDialog pd = new PreferenceDialog(_this); prefMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { UIUtil.putToCenter(pd); pd.setVisible(true); } }); saveMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { ElementWriter writer = BeansAccessor.getWriter(); writer.setOutput(saveFilename); try { writer.write(ElementsLists.getInstance()); infoLabel.setText("File Saved"); } catch (ParserException e){ JOptionPane.showMessageDialog(_this, "There was an error saving your File. Please contact support.", "Error Saving File", JOptionPane.ERROR_MESSAGE); infoLabel.setText("Save Failed!!"); logger.error(e); e.printStackTrace(); } // end of try-catch } }); saveMenuItem.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_S, Toolkit.getDefaultToolkit().getMenuShortcutKeyMask())); saveAsMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { String saveDir = UserPreferences.getInstance().getRecentDir(); JFileChooser chooser = new JFileChooser(saveDir); javax.swing.filechooser.FileFilter filter = new javax.swing.filechooser.FileFilter() { String fileExtension = null; { if(runMode.equals(RunMode.Curator)) fileExtension = "csv"; else if(runMode.equals(RunMode.Reviewer)) fileExtension = "xmi"; } public boolean accept(File f) { if (f.isDirectory()) { return true; } return f.getName().endsWith("." + fileExtension); } public String getDescription() { return fileExtension.toUpperCase() + " Files"; } }; chooser.setFileFilter(filter); int returnVal = chooser.showSaveDialog(null); if(returnVal == JFileChooser.APPROVE_OPTION) { String filePath = chooser.getSelectedFile().getAbsolutePath(); String fileExtension = "xmi"; if(runMode.equals(RunMode.Curator)) fileExtension = "csv"; else if(runMode.equals(RunMode.Reviewer)) fileExtension = "xmi"; if(!filePath.endsWith(fileExtension)) filePath = filePath + "." + fileExtension; UserPreferences.getInstance().setRecentDir(filePath); ElementWriter writer = BeansAccessor.getWriter(); writer.setOutput(filePath); saveFilename = filePath; try { writer.write(ElementsLists.getInstance()); infoLabel.setText("File Saved"); } catch (ParserException e){ JOptionPane.showMessageDialog(_this, "There was an error saving your File. Please contact support.", "Error Saving File", JOptionPane.ERROR_MESSAGE); infoLabel.setText("Save Failed!!"); } // end of try-catch } } }); exportErrorsMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { JOptionPane.showMessageDialog(_this, "Sorry, Not Implemented Yet", "Not Implemented", JOptionPane.INFORMATION_MESSAGE); } }); validateMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { ValidationItems.getInstance().clear(); Validator validator = new UMLValidator(); validator.validate(); ElementsLists elements = ElementsLists.getInstance(); TreeBuilder tb = TreeBuilder.getInstance(); tb.init(); tb.buildTree(elements); errorPanel.update(tb.getRootNode()); // JOptionPane.showMessageDialog(_this, "Sorry, Not Implemented Yet", "Not Implemented", JOptionPane.INFORMATION_MESSAGE); } }); validateConceptsMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { int n = JOptionPane.showConfirmDialog(_this, "This process may take some time. Would you like to continue? ", "Validate Concepts", JOptionPane.YES_NO_OPTION); if(n == JOptionPane.YES_OPTION) { ValidateConceptsDialog vcd = new ValidateConceptsDialog(_this); vcd.addSearchListener(navigationPanel); vcd.setVisible(true); } } }); uploadMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { JOptionPane.showMessageDialog(_this, "Sorry, Not Implemented Yet", "Not Implemented", JOptionPane.INFORMATION_MESSAGE); } }); applyMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { UMLElementViewPanel viewPanel = (UMLElementViewPanel)viewTabbedPane .getSelectedComponent(); viewPanel.apply(false); } }); applyToAllMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { UMLElementViewPanel viewPanel = (UMLElementViewPanel)viewTabbedPane .getSelectedComponent(); viewPanel.apply(true); } }); aboutMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { new AboutPanel(); } }); } public void viewChanged(ViewChangeEvent event) { if(event.getType() == ViewChangeEvent.VIEW_CONCEPTS) { UMLNode node = (UMLNode)event.getViewObject(); // If concept is already showing, just bring it up front if(viewPanels.containsKey(node.getFullPath())) { UMLElementViewPanel pa = viewPanels.get(node.getFullPath()); viewTabbedPane.setSelectedComponent(pa); return; } if((event.getInNewTab() == true) || (viewPanels.size() == 0) || viewTabbedPane.getSelectedComponent() instanceof AssociationViewPanel) { UMLElementViewPanel viewPanel = new UMLElementViewPanel(node); viewPanel.addPropertyChangeListener(this); viewPanel.addReviewListener(navigationPanel); viewPanel.addReviewListener(reviewTracker); viewPanel.addElementChangeListener(ChangeTracker.getInstance()); viewPanel.addNavigationListener(navigationPanel); navigationPanel.addNavigationListener(viewPanel); String tabTitle = node.getDisplay();; if(node instanceof AttributeNode) tabTitle = node.getParent().getDisplay() + "." + tabTitle; viewTabbedPane.addTab(tabTitle, viewPanel); viewTabbedPane.setSelectedComponent(viewPanel); viewPanel.setName(node.getFullPath()); viewPanels.put(viewPanel.getName(), viewPanel); infoLabel.setText(tabTitle); } else { UMLElementViewPanel viewPanel = (UMLElementViewPanel) viewTabbedPane.getSelectedComponent(); viewPanels.remove(viewPanel.getName()); String tabTitle = node.getDisplay();; if(node instanceof AttributeNode) tabTitle = node.getParent().getDisplay() + "." + tabTitle; viewTabbedPane.setTitleAt(viewTabbedPane.getSelectedIndex(), tabTitle); infoLabel.setText(tabTitle); viewPanel.setName(node.getFullPath()); viewPanel.updateNode(node); viewPanels.put(viewPanel.getName(), viewPanel); } } else if(event.getType() == ViewChangeEvent.VIEW_ASSOCIATION) { UMLNode node = (UMLNode)event.getViewObject(); if(associationViewPanel == null) { associationViewPanel = new AssociationViewPanel((ObjectClassRelationship)node.getUserObject()); viewTabbedPane.addTab("Association", associationViewPanel); associationViewPanel.setName("Association"); infoLabel.setText("Association"); } else associationViewPanel.update((ObjectClassRelationship)node.getUserObject()); viewTabbedPane.setSelectedComponent(associationViewPanel); } } public boolean closeTab(int index) { Component c = viewTabbedPane.getComponentAt(index); if(c.equals(associationViewPanel)) associationViewPanel = null; viewPanels.remove(c.getName()); return true; } }
Only prompt user to save if changeTracker has changed. SVN-Revision: 789
src/gov/nih/nci/ncicb/cadsr/loader/ui/MainFrame.java
Only prompt user to save if changeTracker has changed.
<ide><path>rc/gov/nih/nci/ncicb/cadsr/loader/ui/MainFrame.java <ide> } <ide> <ide> public void exit() { <add> if(!ChangeTracker.getInstance().isEmpty()) { <ide> int result = JOptionPane.showConfirmDialog((JFrame) null, "Would you like to save your file before quitting?"); <ide> switch(result) { <ide> case JOptionPane.YES_OPTION: <ide> return; <ide> } <ide> System.exit(0); <add> } <add> else <add> System.exit(0); <ide> } <ide> <ide> public void propertyChange(PropertyChangeEvent evt) { <ide> <ide> exitMenuItem.addActionListener(new ActionListener() { <ide> public void actionPerformed(ActionEvent event) { <del> _this.exit(); <add> _this.exit(); <ide> } <ide> }); <ide>
Java
agpl-3.0
9ae8aa6c5f4341edc417acdda54a69ceaa28f1e6
0
exomiser/Exomiser,exomiser/Exomiser
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package de.charite.compbio.exomiser.cli.config; import java.nio.file.Path; import java.sql.Connection; import java.sql.SQLException; import javax.sql.DataSource; import org.h2.jdbcx.JdbcConnectionPool; import org.postgresql.ds.PGPoolingDataSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; import org.springframework.core.env.Environment; /** * Provides the JDBC datasource from the jdbc.properties file located in the * classpath. * * @author Jules Jacobsen <[email protected]> */ @Configuration @PropertySource({"jdbc.properties"}) public class DataSourceConfig { private static final Logger logger = LoggerFactory.getLogger(DataSourceConfig.class); @Autowired private Environment env; @Autowired private Path dataPath; @Bean public DataSource dataSource() { if (env.getProperty("usePostgreSQL").equals("true")) { return postgreSQLDataSource(); } return h2DataSource(); } private int maxConnections() { int maxConnections = 10; String userSpecifiedMaxConn = env.getProperty("maxConnections"); try { maxConnections = Integer.parseInt(userSpecifiedMaxConn); } catch (NumberFormatException ex) { logger.error("{} is not a valid integer value. Returning defualt value of {}", userSpecifiedMaxConn, maxConnections, ex); } return maxConnections; } private DataSource postgreSQLDataSource() { int maxConnections = maxConnections(); PGPoolingDataSource dataSource = new PGPoolingDataSource(); dataSource.setMaxConnections(maxConnections); logger.info("DataSource using maximum of {} database connections", maxConnections); dataSource.setInitialConnections(3); //resolve the placeholders in the jdbc.properties using the user-supplied data from application.properties env.resolvePlaceholders("dbuser"); env.resolvePlaceholders("password"); env.resolvePlaceholders("server"); env.resolvePlaceholders("database"); env.resolvePlaceholders("port"); //read in the properties from jdbc.properties String user = env.getProperty("pg.username"); String password = env.getProperty("pg.password"); String server = env.getProperty("pg.server"); String db = env.getProperty("pg.database"); int port = Integer.parseInt(env.getProperty("pg.port")); dataSource.setServerName(server); dataSource.setDatabaseName(db); dataSource.setPortNumber(port); dataSource.setUser(user); dataSource.setPassword(password); String url = String.format("jdbc:postgresql://%s:%d/%s", server, port, db); logger.info("Returning a new PostgreSQL DataSource to URL {} user: {}", url, user); return dataSource; } private DataSource h2DataSource() { String user = env.getProperty("h2.username"); String password = env.getProperty("h2.password"); String url = env.getProperty("h2.url"); if (env.containsProperty("h2Path") & !env.getProperty("h2Path").isEmpty()) { env.resolvePlaceholders("h2Path"); //this comes from the application.properties } else { //in this case it hasn't been manually set, so we'll use the default location //the placeholders are not visible in the url string hence we replace the 'file:' String h2Filepath = String.format("h2:%s", dataPath); url = env.getProperty("h2.url").replace("h2:", h2Filepath); } int maxConnections = maxConnections(); logger.info("DataSource using maximum of {} database connections", maxConnections); JdbcConnectionPool dataSource = JdbcConnectionPool.create(url, user, password); dataSource.setMaxConnections(maxConnections); logger.info("Returning a new H2 DataSource to URL {} user: {}", url, user); return dataSource; } @Bean public Connection connection() { Connection connection = null; try { connection = dataSource().getConnection(); } catch (SQLException ex) { logger.error(null, ex); } return connection; } }
exomiser-cli/src/main/java/de/charite/compbio/exomiser/cli/config/DataSourceConfig.java
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package de.charite.compbio.exomiser.cli.config; import java.nio.file.Path; import java.sql.Connection; import java.sql.SQLException; import javax.sql.DataSource; import org.h2.jdbcx.JdbcConnectionPool; import org.postgresql.ds.PGPoolingDataSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; import org.springframework.core.env.Environment; /** * Provides the JDBC datasource from the jdbc.properties file located in the * classpath. * * @author Jules Jacobsen <[email protected]> */ @Configuration @PropertySource({"jdbc.properties"}) public class DataSourceConfig { private static final Logger logger = LoggerFactory.getLogger(DataSourceConfig.class); @Autowired private Environment env; @Autowired private Path dataPath; @Bean public DataSource dataSource() { if (env.getProperty("usePostgreSQL").equals("true")) { return postgreSQLDataSource(); } return h2DataSource(); } private int maxConnections() { int maxConnections = 10; String userSpecifiedMaxConn = env.getProperty("maxConnections"); try { maxConnections = Integer.parseInt(userSpecifiedMaxConn); } catch (NumberFormatException ex) { logger.error("{} is not a valid integer value. Returning defualt value of {}", userSpecifiedMaxConn, maxConnections, ex); } return maxConnections; } private DataSource postgreSQLDataSource() { int maxConnections = maxConnections(); PGPoolingDataSource dataSource = new PGPoolingDataSource(); dataSource.setMaxConnections(maxConnections); logger.info("DataSource using maximum of {} database connections", maxConnections); dataSource.setInitialConnections(3); //resolve the placeholders in the jdbc.properties using the user-supplied data from application.properties env.resolvePlaceholders("dbuser"); env.resolvePlaceholders("password"); env.resolvePlaceholders("server"); env.resolvePlaceholders("database"); env.resolvePlaceholders("port"); //read in the properties from jdbc.properties String user = env.getProperty("pg.username"); String password = env.getProperty("pg.password"); String server = env.getProperty("pg.server"); String db = env.getProperty("pg.database"); int port = Integer.parseInt(env.getProperty("pg.port")); dataSource.setServerName(server); dataSource.setDatabaseName(db); dataSource.setPortNumber(port); dataSource.setUser(user); dataSource.setPassword(password); String url = String.format("jdbc:postgresql://%s:%d/%s", server, port, db); logger.info("Returning a new PostgreSQL DataSource to URL {} user: {}", url, user); return dataSource; } private DataSource h2DataSource() { String user = env.getProperty("h2.username"); String password = env.getProperty("h2.password"); String url = env.getProperty("h2.url"); if (env.containsProperty("h2Path") & !env.getProperty("h2Path").isEmpty()) { env.resolvePlaceholders("h2Path"); //this comes from the application.properties } else { //in this case it hasn't been manually set, so we'll use the default location //the placeholders are not visible in the url string hence we replace the 'file:' String h2Filepath = String.format("file:%s", dataPath); url = env.getProperty("h2.url").replace("file:", h2Filepath); } int maxConnections = maxConnections(); logger.info("DataSource using maximum of {} database connections", maxConnections); JdbcConnectionPool dataSource = JdbcConnectionPool.create(url, user, password); dataSource.setMaxConnections(maxConnections); logger.info("Returning a new H2 DataSource to URL {} user: {}", url, user); return dataSource; } @Bean public Connection connection() { Connection connection = null; try { connection = dataSource().getConnection(); } catch (SQLException ex) { logger.error(null, ex); } return connection; } }
bug fix to H2 URL when using default location. URLs had changed to h2: rather than file: but had not been updated in this bit of the code
exomiser-cli/src/main/java/de/charite/compbio/exomiser/cli/config/DataSourceConfig.java
bug fix to H2 URL when using default location. URLs had changed to h2: rather than file: but had not been updated in this bit of the code
<ide><path>xomiser-cli/src/main/java/de/charite/compbio/exomiser/cli/config/DataSourceConfig.java <ide> } else { <ide> //in this case it hasn't been manually set, so we'll use the default location <ide> //the placeholders are not visible in the url string hence we replace the 'file:' <del> String h2Filepath = String.format("file:%s", dataPath); <del> url = env.getProperty("h2.url").replace("file:", h2Filepath); <add> String h2Filepath = String.format("h2:%s", dataPath); <add> url = env.getProperty("h2.url").replace("h2:", h2Filepath); <ide> } <ide> int maxConnections = maxConnections(); <ide> logger.info("DataSource using maximum of {} database connections", maxConnections);
Java
mit
1107151634b1fa4f4ebea5cd7d10f3648be53d86
0
QuarkWorks/RealmTypeSafeQuery-Android,QuarkWorks/RealmTypeSafeQuery-Android
package com.quarkworks.android.tests.models; import com.quarkworks.android.realmtypesafequery.annotations.GenerateRealmFieldNames; import com.quarkworks.android.realmtypesafequery.annotations.GenerateRealmFields; import io.realm.RealmModel; import io.realm.annotations.RealmClass; @RealmClass @GenerateRealmFieldNames @GenerateRealmFields public class StatData implements RealmModel { public static final int SUM_DATA_INDEX; public static final int AVG_DATA_INDEX; public static final int MAX_DATA_INDEX; public static final int MIN_DATA_INDEX; public static final Object [] SUM_DATA = {13057L, 130.57f}; public static final Object [] AVG_DATA = {395, 3.95666666666667d}; public static final Object [] MAX_DATA = {1089, 10.89f, "1089,"}; public static final Object [] MIN_DATA = {33, 0.33f, "0033,"}; public static final Object[][] DATA = new Object[][] { {215, 2.15f , "0215,"}, {189, 1.89f , "0189,"}, {243, 2.43f , "0243,"}, {845, 8.45f , "0845,"}, {453, 4.53f , "0453,"}, {963, 9.63f , "0963,"}, {1025, 10.25f, "1025,"}, {413, 4.13f , "0413,"}, {105, 1.05f , "0105,"}, {89, 0.89f , "0089,"}, {585, 5.85f , "0585,"}, {33, 0.33f , "0033,"}, {633, 6.33f , "0633,"}, {1089, 10.89f, "1089,"}, {75, 0.75f , "0075,"}, {495, 4.95f , "0495,"}, {305, 3.05f , "0305,"}, {375, 3.75f , "0375,"}, {165, 1.65f , "0165,"}, {39, 0.39f , "0039,"}, {63, 0.63f , "0063,"}, {683, 6.83f , "0683,"}, {339, 3.39f , "0339,"}, {539, 5.39f , "0539,"}, {903, 9.03f , "0903,"}, {273, 2.73f , "0273,"}, {123, 1.23f , "0123,"}, {143, 1.43f , "0143,"}, {45, 0.45f , "0045,"}, {35, 0.35f , "0035,"}, {53, 0.53f , "0053,"}, {735, 7.35f , "0735,"}, {789, 7.89f , "0789,"}, {395, 3.95666666666667d}, {13057, 130.57f}, {1089, 10.89f}, {33, 0.33f}, }; static { AVG_DATA_INDEX = DATA.length - 4; SUM_DATA_INDEX = DATA.length - 3; MAX_DATA_INDEX = DATA.length - 2; MIN_DATA_INDEX = DATA.length - 1; } // @PrimaryKey // public String primaryKey; public Integer integerField; public Float floatField; public String stringField; }
tests/src/androidTest/java/com/quarkworks/android/tests/models/StatData.java
package com.quarkworks.android.tests.models; import com.quarkworks.android.realmtypesafequery.annotations.GenerateRealmFieldNames; import com.quarkworks.android.realmtypesafequery.annotations.GenerateRealmFields; import io.realm.RealmModel; import io.realm.annotations.RealmClass; @RealmClass @GenerateRealmFieldNames @GenerateRealmFields public class StatData implements RealmModel { public static final int SUM_DATA_INDEX; public static final int AVG_DATA_INDEX; public static final int MAX_DATA_INDEX; public static final int MIN_DATA_INDEX; public static final Object [] SUM_DATA = {13057, 130.57f}; public static final Object [] AVG_DATA = {395, 3.95666666666667d}; public static final Object [] MAX_DATA = {1089, 10.89f, "1089,"}; public static final Object [] MIN_DATA = {33, 0.33f, "0033,"}; public static final Object[][] DATA = new Object[][] { {215, 2.15f , "0215,"}, {189, 1.89f , "0189,"}, {243, 2.43f , "0243,"}, {845, 8.45f , "0845,"}, {453, 4.53f , "0453,"}, {963, 9.63f , "0963,"}, {1025, 10.25f, "1025,"}, {413, 4.13f , "0413,"}, {105, 1.05f , "0105,"}, {89, 0.89f , "0089,"}, {585, 5.85f , "0585,"}, {33, 0.33f , "0033,"}, {633, 6.33f , "0633,"}, {1089, 10.89f, "1089,"}, {75, 0.75f , "0075,"}, {495, 4.95f , "0495,"}, {305, 3.05f , "0305,"}, {375, 3.75f , "0375,"}, {165, 1.65f , "0165,"}, {39, 0.39f , "0039,"}, {63, 0.63f , "0063,"}, {683, 6.83f , "0683,"}, {339, 3.39f , "0339,"}, {539, 5.39f , "0539,"}, {903, 9.03f , "0903,"}, {273, 2.73f , "0273,"}, {123, 1.23f , "0123,"}, {143, 1.43f , "0143,"}, {45, 0.45f , "0045,"}, {35, 0.35f , "0035,"}, {53, 0.53f , "0053,"}, {735, 7.35f , "0735,"}, {789, 7.89f , "0789,"}, {395, 3.95666666666667d}, {13057, 130.57f}, {1089, 10.89f}, {33, 0.33f}, }; static { AVG_DATA_INDEX = DATA.length - 4; SUM_DATA_INDEX = DATA.length - 3; MAX_DATA_INDEX = DATA.length - 2; MIN_DATA_INDEX = DATA.length - 1; } // @PrimaryKey // public String primaryKey; public Integer integerField; public Float floatField; public String stringField; }
Fix test
tests/src/androidTest/java/com/quarkworks/android/tests/models/StatData.java
Fix test
<ide><path>ests/src/androidTest/java/com/quarkworks/android/tests/models/StatData.java <ide> public static final int AVG_DATA_INDEX; <ide> public static final int MAX_DATA_INDEX; <ide> public static final int MIN_DATA_INDEX; <del> public static final Object [] SUM_DATA = {13057, 130.57f}; <add> public static final Object [] SUM_DATA = {13057L, 130.57f}; <ide> public static final Object [] AVG_DATA = {395, 3.95666666666667d}; <ide> public static final Object [] MAX_DATA = {1089, 10.89f, "1089,"}; <ide> public static final Object [] MIN_DATA = {33, 0.33f, "0033,"};
Java
apache-2.0
60902dcbbf68a77c5885628bd0015931dc473ab2
0
BernhardBln/jdbi,kentyeh/jdbi,11xor6/jdbi,fengshao0907/jdbi,11xor6/jdbi,kentyeh/jdbi,christophercurrie/jdbi,hgschmie/jdbi,voidifremoved/jdbi,jdbi/jdbi,jdbi/jdbi,zanebenefits/jdbi,HubSpot/jdbi,hgschmie/jdbi,fengshao0907/jdbi,hgschmie/jdbi,voidifremoved/jdbi,stevenschlansker/jdbi,jdbi/jdbi,stevenschlansker/jdbi,omidp/jdbi,pennello/jdbi,john9x/jdbi,omidp/jdbi,christophercurrie/jdbi,pennello/jdbi,zanebenefits/jdbi,BernhardBln/jdbi,john9x/jdbi,HubSpot/jdbi
/* * Copyright (C) 2004 - 2014 Brian McCallister * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.skife.jdbi.v2.exceptions; public class UnableToRestoreAutoCommitStateException extends DBIException { private static final long serialVersionUID = 2433069110223543423L; public UnableToRestoreAutoCommitStateException(Throwable throwable) { super(throwable); } }
src/main/java/org/skife/jdbi/v2/exceptions/UnableToRestoreAutoCommitStateException.java
/* * Copyright (C) 2004 - 2014 Brian McCallister * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.skife.jdbi.v2.exceptions; public class UnableToRestoreAutoCommitStateException extends DBIException { public UnableToRestoreAutoCommitStateException(Throwable throwable) { super(throwable); } }
Added serialVersionUID.
src/main/java/org/skife/jdbi/v2/exceptions/UnableToRestoreAutoCommitStateException.java
Added serialVersionUID.
<ide><path>rc/main/java/org/skife/jdbi/v2/exceptions/UnableToRestoreAutoCommitStateException.java <ide> */ <ide> package org.skife.jdbi.v2.exceptions; <ide> <del>public class UnableToRestoreAutoCommitStateException extends DBIException { <add>public class UnableToRestoreAutoCommitStateException extends DBIException { <add> <add> private static final long serialVersionUID = 2433069110223543423L; <ide> <ide> public UnableToRestoreAutoCommitStateException(Throwable throwable) { <ide> super(throwable);
Java
apache-2.0
d64ae9a089f0f3458ed28d1800e251ec1fca5f49
0
vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa
// Copyright 2018 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.vespa.hosted.controller.persistence; import com.google.common.util.concurrent.UncheckedTimeoutException; import com.google.inject.Inject; import com.yahoo.component.Version; import com.yahoo.component.Vtag; import com.yahoo.config.provision.ApplicationId; import com.yahoo.config.provision.HostName; import com.yahoo.config.provision.TenantName; import com.yahoo.config.provision.zone.ZoneId; import com.yahoo.path.Path; import com.yahoo.slime.Slime; import com.yahoo.vespa.config.SlimeUtils; import com.yahoo.vespa.curator.Curator; import com.yahoo.vespa.curator.Lock; import com.yahoo.vespa.hosted.controller.Application; import com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType; import com.yahoo.vespa.hosted.controller.api.integration.deployment.RunId; import com.yahoo.vespa.hosted.controller.application.RoutingPolicy; import com.yahoo.vespa.hosted.controller.auditlog.AuditLog; import com.yahoo.vespa.hosted.controller.deployment.Run; import com.yahoo.vespa.hosted.controller.deployment.Step; import com.yahoo.vespa.hosted.controller.dns.NameServiceQueue; import com.yahoo.vespa.hosted.controller.tenant.Tenant; import com.yahoo.vespa.hosted.controller.versions.OsVersion; import com.yahoo.vespa.hosted.controller.versions.OsVersionStatus; import com.yahoo.vespa.hosted.controller.versions.VersionStatus; import com.yahoo.vespa.hosted.controller.versions.VespaVersion; import java.io.IOException; import java.io.UncheckedIOException; import java.nio.ByteBuffer; import java.time.Duration; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeoutException; import java.util.function.Function; import java.util.function.Predicate; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; import java.util.stream.LongStream; import static java.util.Comparator.comparing; import static java.util.stream.Collectors.collectingAndThen; /** * Curator backed database for storing the persistence state of controllers. This maps controller specific operations * to general curator operations. * * @author bratseth * @author mpolden * @author jonmv */ public class CuratorDb { private static final Logger log = Logger.getLogger(CuratorDb.class.getName()); private static final Duration deployLockTimeout = Duration.ofMinutes(30); private static final Duration defaultLockTimeout = Duration.ofMinutes(5); private static final Duration defaultTryLockTimeout = Duration.ofSeconds(1); private static final Path root = Path.fromString("/controller/v1"); private static final Path lockRoot = root.append("locks"); private static final Path tenantRoot = root.append("tenants"); private static final Path applicationRoot = root.append("applications"); private static final Path jobRoot = root.append("jobs"); private static final Path controllerRoot = root.append("controllers"); private static final Path routingPoliciesRoot = root.append("routingPolicies"); private final StringSetSerializer stringSetSerializer = new StringSetSerializer(); private final VersionStatusSerializer versionStatusSerializer = new VersionStatusSerializer(); private final VersionSerializer versionSerializer = new VersionSerializer(); private final ConfidenceOverrideSerializer confidenceOverrideSerializer = new ConfidenceOverrideSerializer(); private final TenantSerializer tenantSerializer = new TenantSerializer(); private final ApplicationSerializer applicationSerializer = new ApplicationSerializer(); private final RunSerializer runSerializer = new RunSerializer(); private final OsVersionSerializer osVersionSerializer = new OsVersionSerializer(); private final OsVersionStatusSerializer osVersionStatusSerializer = new OsVersionStatusSerializer(osVersionSerializer); private final RoutingPolicySerializer routingPolicySerializer = new RoutingPolicySerializer(); private final AuditLogSerializer auditLogSerializer = new AuditLogSerializer(); private final NameServiceQueueSerializer nameServiceQueueSerializer = new NameServiceQueueSerializer(); private final Curator curator; private final Duration tryLockTimeout; /** * All keys, to allow reentrancy. * This will grow forever, but this should be too slow to be a problem. */ private final ConcurrentHashMap<Path, Lock> locks = new ConcurrentHashMap<>(); @Inject public CuratorDb(Curator curator) { this(curator, defaultTryLockTimeout); } CuratorDb(Curator curator, Duration tryLockTimeout) { this.curator = curator; this.tryLockTimeout = tryLockTimeout; // TODO: Remove after 7.60 curator.delete(root.append("openStackServerPool")); curator.delete(root.append("vespaServerPool")); } /** Returns all hosts configured to be part of this ZooKeeper cluster */ public List<HostName> cluster() { return Arrays.stream(curator.zooKeeperEnsembleConnectionSpec().split(",")) .filter(hostAndPort -> !hostAndPort.isEmpty()) .map(hostAndPort -> hostAndPort.split(":")[0]) .map(HostName::from) .collect(Collectors.toList()); } // -------------- Locks --------------------------------------------------- /** Creates a reentrant lock */ private Lock lock(Path path, Duration timeout) { curator.create(path); Lock lock = locks.computeIfAbsent(path, (pathArg) -> new Lock(pathArg.getAbsolute(), curator)); lock.acquire(timeout); return lock; } public Lock lock(TenantName name) { return lock(lockPath(name), defaultLockTimeout.multipliedBy(2)); } public Lock lock(ApplicationId id) { return lock(lockPath(id), defaultLockTimeout.multipliedBy(2)); } public Lock lockForDeployment(ApplicationId id, ZoneId zone) { return lock(lockPath(id, zone), deployLockTimeout); } public Lock lock(ApplicationId id, JobType type) { return lock(lockPath(id, type), defaultLockTimeout); } public Lock lock(ApplicationId id, JobType type, Step step) throws TimeoutException { return tryLock(lockPath(id, type, step)); } public Lock lockRotations() { return lock(lockRoot.append("rotations"), defaultLockTimeout); } public Lock lockConfidenceOverrides() { return lock(lockRoot.append("confidenceOverrides"), defaultLockTimeout); } public Lock lockInactiveJobs() { return lock(lockRoot.append("inactiveJobsLock"), defaultLockTimeout); } public Lock lockMaintenanceJob(String jobName) throws TimeoutException { return tryLock(lockRoot.append("maintenanceJobLocks").append(jobName)); } @SuppressWarnings("unused") // Called by internal code public Lock lockProvisionState(String provisionStateId) { return lock(lockPath(provisionStateId), Duration.ofSeconds(1)); } public Lock lockOsVersions() { return lock(lockRoot.append("osTargetVersion"), defaultLockTimeout); } public Lock lockOsVersionStatus() { return lock(lockRoot.append("osVersionStatus"), defaultLockTimeout); } public Lock lockRoutingPolicies() { return lock(lockRoot.append("routingPolicies"), defaultLockTimeout); } public Lock lockAuditLog() { return lock(lockRoot.append("auditLog"), defaultLockTimeout); } public Lock lockNameServiceQueue() { return lock(lockRoot.append("nameServiceQueue"), defaultLockTimeout); } // -------------- Helpers ------------------------------------------ /** Try locking with a low timeout, meaning it is OK to fail lock acquisition. * * Useful for maintenance jobs, where there is no point in running the jobs back to back. */ private Lock tryLock(Path path) throws TimeoutException { try { return lock(path, tryLockTimeout); } catch (UncheckedTimeoutException e) { throw new TimeoutException(e.getMessage()); } } private <T> Optional<T> read(Path path, Function<byte[], T> mapper) { return curator.getData(path).filter(data -> data.length > 0).map(mapper); } private Optional<Slime> readSlime(Path path) { return read(path, SlimeUtils::jsonToSlime); } private static byte[] asJson(Slime slime) { try { return SlimeUtils.toJsonBytes(slime); } catch (IOException e) { throw new UncheckedIOException(e); } } // -------------- Deployment orchestration -------------------------------- public Set<String> readInactiveJobs() { try { return readSlime(inactiveJobsPath()).map(stringSetSerializer::fromSlime).orElseGet(HashSet::new); } catch (RuntimeException e) { log.log(Level.WARNING, "Error reading inactive jobs, deleting inactive state"); writeInactiveJobs(Collections.emptySet()); return new HashSet<>(); } } public void writeInactiveJobs(Set<String> inactiveJobs) { curator.set(inactiveJobsPath(), stringSetSerializer.toJson(inactiveJobs)); } public double readUpgradesPerMinute() { return read(upgradesPerMinutePath(), ByteBuffer::wrap).map(ByteBuffer::getDouble).orElse(0.125); } public void writeUpgradesPerMinute(double n) { curator.set(upgradesPerMinutePath(), ByteBuffer.allocate(Double.BYTES).putDouble(n).array()); } public Optional<Integer> readTargetMajorVersion() { return read(targetMajorVersionPath(), ByteBuffer::wrap).map(ByteBuffer::getInt); } public void writeTargetMajorVersion(Optional<Integer> targetMajorVersion) { if (targetMajorVersion.isPresent()) curator.set(targetMajorVersionPath(), ByteBuffer.allocate(Integer.BYTES).putInt(targetMajorVersion.get()).array()); else curator.delete(targetMajorVersionPath()); } public void writeVersionStatus(VersionStatus status) { curator.set(versionStatusPath(), asJson(versionStatusSerializer.toSlime(status))); } public VersionStatus readVersionStatus() { return readSlime(versionStatusPath()).map(versionStatusSerializer::fromSlime).orElseGet(VersionStatus::empty); } public void writeConfidenceOverrides(Map<Version, VespaVersion.Confidence> overrides) { curator.set(confidenceOverridesPath(), asJson(confidenceOverrideSerializer.toSlime(overrides))); } public Map<Version, VespaVersion.Confidence> readConfidenceOverrides() { return readSlime(confidenceOverridesPath()).map(confidenceOverrideSerializer::fromSlime) .orElseGet(Collections::emptyMap); } public void writeControllerVersion(HostName hostname, Version version) { curator.set(controllerPath(hostname.value()), asJson(versionSerializer.toSlime(version))); } public Version readControllerVersion(HostName hostname) { return readSlime(controllerPath(hostname.value())) .map(versionSerializer::fromSlime) .orElse(Vtag.currentVersion); } // Infrastructure upgrades public void writeOsVersions(Set<OsVersion> versions) { curator.set(osTargetVersionPath(), asJson(osVersionSerializer.toSlime(versions))); } public Set<OsVersion> readOsVersions() { return readSlime(osTargetVersionPath()).map(osVersionSerializer::fromSlime).orElseGet(Collections::emptySet); } public void writeOsVersionStatus(OsVersionStatus status) { curator.set(osVersionStatusPath(), asJson(osVersionStatusSerializer.toSlime(status))); } public OsVersionStatus readOsVersionStatus() { return readSlime(osVersionStatusPath()).map(osVersionStatusSerializer::fromSlime).orElse(OsVersionStatus.empty); } // -------------- Tenant -------------------------------------------------- public void writeTenant(Tenant tenant) { curator.set(tenantPath(tenant.name()), asJson(tenantSerializer.toSlime(tenant))); } public Optional<Tenant> readTenant(TenantName name) { return readSlime(tenantPath(name)).map(tenantSerializer::tenantFrom); } public List<Tenant> readTenants() { return readTenantNames().stream() .map(this::readTenant) .flatMap(Optional::stream) .collect(collectingAndThen(Collectors.toList(), Collections::unmodifiableList)); } public List<TenantName> readTenantNames() { return curator.getChildren(tenantRoot).stream() .map(TenantName::from) .collect(Collectors.toList()); } public void removeTenant(TenantName name) { curator.delete(tenantPath(name)); } // -------------- Application --------------------------------------------- public void writeApplication(Application application) { curator.set(applicationPath(application.id()), asJson(applicationSerializer.toSlime(application))); } public Optional<Application> readApplication(ApplicationId application) { return readSlime(applicationPath(application)).map(applicationSerializer::fromSlime); } public List<Application> readApplications() { return readApplications(ignored -> true); } public List<Application> readApplications(TenantName name) { return readApplications(application -> application.tenant().equals(name)); } private List<Application> readApplications(Predicate<ApplicationId> applicationFilter) { return curator.getChildren(applicationRoot).stream() .map(ApplicationId::fromSerializedForm) .filter(applicationFilter) .map(this::readApplication) .flatMap(Optional::stream) .collect(collectingAndThen(Collectors.toList(), Collections::unmodifiableList)); } public void removeApplication(ApplicationId application) { curator.delete(applicationPath(application)); } // -------------- Job Runs ------------------------------------------------ public void writeLastRun(Run run) { curator.set(lastRunPath(run.id().application(), run.id().type()), asJson(runSerializer.toSlime(run))); } public void writeHistoricRuns(ApplicationId id, JobType type, Iterable<Run> runs) { curator.set(runsPath(id, type), asJson(runSerializer.toSlime(runs))); } public Optional<Run> readLastRun(ApplicationId id, JobType type) { return readSlime(lastRunPath(id, type)).map(runSerializer::runFromSlime); } public SortedMap<RunId, Run> readHistoricRuns(ApplicationId id, JobType type) { return readSlime(runsPath(id, type)).map(runSerializer::runsFromSlime).orElse(new TreeMap<>(comparing(RunId::number))); } public void deleteRunData(ApplicationId id, JobType type) { curator.delete(runsPath(id, type)); curator.delete(lastRunPath(id, type)); } public void deleteRunData(ApplicationId id) { curator.delete(jobRoot.append(id.serializedForm())); } public List<ApplicationId> applicationsWithJobs() { return curator.getChildren(jobRoot).stream() .map(ApplicationId::fromSerializedForm) .collect(Collectors.toList()); } public Optional<byte[]> readLog(ApplicationId id, JobType type, long chunkId) { return curator.getData(logPath(id, type, chunkId)); } public void writeLog(ApplicationId id, JobType type, long chunkId, byte[] log) { curator.set(logPath(id, type, chunkId), log); } public void deleteLog(ApplicationId id, JobType type) { curator.delete(runsPath(id, type).append("logs")); } public Optional<Long> readLastLogEntryId(ApplicationId id, JobType type) { return curator.getData(lastLogPath(id, type)) .map(String::new).map(Long::parseLong); } public void writeLastLogEntryId(ApplicationId id, JobType type, long lastId) { curator.set(lastLogPath(id, type), Long.toString(lastId).getBytes()); } public LongStream getLogChunkIds(ApplicationId id, JobType type) { return curator.getChildren(runsPath(id, type).append("logs")).stream() .mapToLong(Long::parseLong) .sorted(); } // -------------- Audit log ----------------------------------------------- public AuditLog readAuditLog() { return readSlime(auditLogPath()).map(auditLogSerializer::fromSlime) .orElse(AuditLog.empty); } public void writeAuditLog(AuditLog log) { curator.set(auditLogPath(), asJson(auditLogSerializer.toSlime(log))); } // -------------- Name service log ---------------------------------------- public NameServiceQueue readNameServiceQueue() { return readSlime(nameServiceQueuePath()).map(nameServiceQueueSerializer::fromSlime) .orElse(NameServiceQueue.EMPTY); } public void writeNameServiceQueue(NameServiceQueue queue) { curator.set(nameServiceQueuePath(), asJson(nameServiceQueueSerializer.toSlime(queue))); } // -------------- Provisioning (called by internal code) ------------------ @SuppressWarnings("unused") public Optional<byte[]> readProvisionState(String provisionId) { return curator.getData(provisionStatePath(provisionId)); } @SuppressWarnings("unused") public void writeProvisionState(String provisionId, byte[] data) { curator.set(provisionStatePath(provisionId), data); } // -------------- Routing policies ---------------------------------------- public void writeRoutingPolicies(ApplicationId application, Set<RoutingPolicy> policies) { curator.set(routingPolicyPath(application), asJson(routingPolicySerializer.toSlime(policies))); } public Map<ApplicationId, Set<RoutingPolicy>> readRoutingPolicies() { return curator.getChildren(routingPoliciesRoot).stream() .map(ApplicationId::fromSerializedForm) .collect(Collectors.toUnmodifiableMap(Function.identity(), this::readRoutingPolicies)); } public Set<RoutingPolicy> readRoutingPolicies(ApplicationId application) { return readSlime(routingPolicyPath(application)).map(slime -> routingPolicySerializer.fromSlime(application, slime)) .orElseGet(Collections::emptySet); } // -------------- Paths --------------------------------------------------- private Path lockPath(TenantName tenant) { return lockRoot .append(tenant.value()); } private Path lockPath(ApplicationId application) { return lockPath(application.tenant()) .append(application.application().value()) .append(application.instance().value()); } private Path lockPath(ApplicationId application, ZoneId zone) { return lockPath(application) .append(zone.environment().value()) .append(zone.region().value()); } private Path lockPath(ApplicationId application, JobType type) { return lockPath(application) .append(type.jobName()); } private Path lockPath(ApplicationId application, JobType type, Step step) { return lockPath(application, type) .append(step.name()); } private Path lockPath(String provisionId) { return lockRoot .append(provisionStatePath()) .append(provisionId); } private static Path inactiveJobsPath() { return root.append("inactiveJobs"); } private static Path upgradesPerMinutePath() { return root.append("upgrader").append("upgradesPerMinute"); } private static Path targetMajorVersionPath() { return root.append("upgrader").append("targetMajorVersion"); } private static Path confidenceOverridesPath() { return root.append("upgrader").append("confidenceOverrides"); } private static Path osTargetVersionPath() { return root.append("osUpgrader").append("targetVersion"); } private static Path osVersionStatusPath() { return root.append("osVersionStatus"); } private static Path versionStatusPath() { return root.append("versionStatus"); } private static Path routingPolicyPath(ApplicationId application) { return routingPoliciesRoot.append(application.serializedForm()); } private static Path nameServiceQueuePath() { return root.append("nameServiceQueue"); } private static Path auditLogPath() { return root.append("auditLog"); } private static Path provisionStatePath() { return root.append("provisioning").append("states"); } private static Path provisionStatePath(String provisionId) { return provisionStatePath().append(provisionId); } private static Path tenantPath(TenantName name) { return tenantRoot.append(name.value()); } private static Path applicationPath(ApplicationId application) { return applicationRoot.append(application.serializedForm()); } private static Path runsPath(ApplicationId id, JobType type) { return jobRoot.append(id.serializedForm()).append(type.jobName()); } private static Path lastRunPath(ApplicationId id, JobType type) { return runsPath(id, type).append("last"); } private static Path logPath(ApplicationId id, JobType type, long first) { return runsPath(id, type).append("logs").append(Long.toString(first)); } private static Path lastLogPath(ApplicationId id, JobType type) { return runsPath(id, type).append("logs"); } private static Path controllerPath(String hostname) { return controllerRoot.append(hostname); } }
controller-server/src/main/java/com/yahoo/vespa/hosted/controller/persistence/CuratorDb.java
// Copyright 2018 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.vespa.hosted.controller.persistence; import com.google.common.util.concurrent.UncheckedTimeoutException; import com.google.inject.Inject; import com.yahoo.component.Version; import com.yahoo.component.Vtag; import com.yahoo.config.provision.ApplicationId; import com.yahoo.config.provision.HostName; import com.yahoo.config.provision.TenantName; import com.yahoo.config.provision.zone.ZoneId; import com.yahoo.path.Path; import com.yahoo.slime.Slime; import com.yahoo.vespa.config.SlimeUtils; import com.yahoo.vespa.curator.Curator; import com.yahoo.vespa.curator.Lock; import com.yahoo.vespa.hosted.controller.Application; import com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType; import com.yahoo.vespa.hosted.controller.api.integration.deployment.RunId; import com.yahoo.vespa.hosted.controller.application.RoutingPolicy; import com.yahoo.vespa.hosted.controller.auditlog.AuditLog; import com.yahoo.vespa.hosted.controller.deployment.Run; import com.yahoo.vespa.hosted.controller.deployment.Step; import com.yahoo.vespa.hosted.controller.dns.NameServiceQueue; import com.yahoo.vespa.hosted.controller.tenant.Tenant; import com.yahoo.vespa.hosted.controller.versions.OsVersion; import com.yahoo.vespa.hosted.controller.versions.OsVersionStatus; import com.yahoo.vespa.hosted.controller.versions.VersionStatus; import com.yahoo.vespa.hosted.controller.versions.VespaVersion; import java.io.IOException; import java.io.UncheckedIOException; import java.nio.ByteBuffer; import java.time.Duration; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeoutException; import java.util.function.Function; import java.util.function.Predicate; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; import java.util.stream.LongStream; import static java.util.Comparator.comparing; import static java.util.stream.Collectors.collectingAndThen; /** * Curator backed database for storing the persistence state of controllers. This maps controller specific operations * to general curator operations. * * @author bratseth * @author mpolden * @author jonmv */ public class CuratorDb { private static final Logger log = Logger.getLogger(CuratorDb.class.getName()); private static final Duration deployLockTimeout = Duration.ofMinutes(30); private static final Duration defaultLockTimeout = Duration.ofMinutes(5); private static final Duration defaultTryLockTimeout = Duration.ofSeconds(1); private static final Path root = Path.fromString("/controller/v1"); private static final Path lockRoot = root.append("locks"); private static final Path tenantRoot = root.append("tenants"); private static final Path applicationRoot = root.append("applications"); private static final Path jobRoot = root.append("jobs"); private static final Path controllerRoot = root.append("controllers"); private static final Path routingPoliciesRoot = root.append("routingPolicies"); private final StringSetSerializer stringSetSerializer = new StringSetSerializer(); private final VersionStatusSerializer versionStatusSerializer = new VersionStatusSerializer(); private final VersionSerializer versionSerializer = new VersionSerializer(); private final ConfidenceOverrideSerializer confidenceOverrideSerializer = new ConfidenceOverrideSerializer(); private final TenantSerializer tenantSerializer = new TenantSerializer(); private final ApplicationSerializer applicationSerializer = new ApplicationSerializer(); private final RunSerializer runSerializer = new RunSerializer(); private final OsVersionSerializer osVersionSerializer = new OsVersionSerializer(); private final OsVersionStatusSerializer osVersionStatusSerializer = new OsVersionStatusSerializer(osVersionSerializer); private final RoutingPolicySerializer routingPolicySerializer = new RoutingPolicySerializer(); private final AuditLogSerializer auditLogSerializer = new AuditLogSerializer(); private final NameServiceQueueSerializer nameServiceQueueSerializer = new NameServiceQueueSerializer(); private final Curator curator; private final Duration tryLockTimeout; /** * All keys, to allow reentrancy. * This will grow forever, but this should be too slow to be a problem. */ private final ConcurrentHashMap<Path, Lock> locks = new ConcurrentHashMap<>(); @Inject public CuratorDb(Curator curator) { this(curator, defaultTryLockTimeout); } CuratorDb(Curator curator, Duration tryLockTimeout) { this.curator = curator; this.tryLockTimeout = tryLockTimeout; } /** Returns all hosts configured to be part of this ZooKeeper cluster */ public List<HostName> cluster() { return Arrays.stream(curator.zooKeeperEnsembleConnectionSpec().split(",")) .filter(hostAndPort -> !hostAndPort.isEmpty()) .map(hostAndPort -> hostAndPort.split(":")[0]) .map(HostName::from) .collect(Collectors.toList()); } // -------------- Locks --------------------------------------------------- /** Creates a reentrant lock */ private Lock lock(Path path, Duration timeout) { curator.create(path); Lock lock = locks.computeIfAbsent(path, (pathArg) -> new Lock(pathArg.getAbsolute(), curator)); lock.acquire(timeout); return lock; } public Lock lock(TenantName name) { return lock(lockPath(name), defaultLockTimeout.multipliedBy(2)); } public Lock lock(ApplicationId id) { return lock(lockPath(id), defaultLockTimeout.multipliedBy(2)); } public Lock lockForDeployment(ApplicationId id, ZoneId zone) { return lock(lockPath(id, zone), deployLockTimeout); } public Lock lock(ApplicationId id, JobType type) { return lock(lockPath(id, type), defaultLockTimeout); } public Lock lock(ApplicationId id, JobType type, Step step) throws TimeoutException { return tryLock(lockPath(id, type, step)); } public Lock lockRotations() { return lock(lockRoot.append("rotations"), defaultLockTimeout); } public Lock lockConfidenceOverrides() { return lock(lockRoot.append("confidenceOverrides"), defaultLockTimeout); } public Lock lockInactiveJobs() { return lock(lockRoot.append("inactiveJobsLock"), defaultLockTimeout); } public Lock lockMaintenanceJob(String jobName) throws TimeoutException { return tryLock(lockRoot.append("maintenanceJobLocks").append(jobName)); } @SuppressWarnings("unused") // Called by internal code public Lock lockProvisionState(String provisionStateId) { return lock(lockPath(provisionStateId), Duration.ofSeconds(1)); } public Lock lockOsVersions() { return lock(lockRoot.append("osTargetVersion"), defaultLockTimeout); } public Lock lockOsVersionStatus() { return lock(lockRoot.append("osVersionStatus"), defaultLockTimeout); } public Lock lockRoutingPolicies() { return lock(lockRoot.append("routingPolicies"), defaultLockTimeout); } public Lock lockAuditLog() { return lock(lockRoot.append("auditLog"), defaultLockTimeout); } public Lock lockNameServiceQueue() { return lock(lockRoot.append("nameServiceQueue"), defaultLockTimeout); } // -------------- Helpers ------------------------------------------ /** Try locking with a low timeout, meaning it is OK to fail lock acquisition. * * Useful for maintenance jobs, where there is no point in running the jobs back to back. */ private Lock tryLock(Path path) throws TimeoutException { try { return lock(path, tryLockTimeout); } catch (UncheckedTimeoutException e) { throw new TimeoutException(e.getMessage()); } } private <T> Optional<T> read(Path path, Function<byte[], T> mapper) { return curator.getData(path).filter(data -> data.length > 0).map(mapper); } private Optional<Slime> readSlime(Path path) { return read(path, SlimeUtils::jsonToSlime); } private static byte[] asJson(Slime slime) { try { return SlimeUtils.toJsonBytes(slime); } catch (IOException e) { throw new UncheckedIOException(e); } } // -------------- Deployment orchestration -------------------------------- public Set<String> readInactiveJobs() { try { return readSlime(inactiveJobsPath()).map(stringSetSerializer::fromSlime).orElseGet(HashSet::new); } catch (RuntimeException e) { log.log(Level.WARNING, "Error reading inactive jobs, deleting inactive state"); writeInactiveJobs(Collections.emptySet()); return new HashSet<>(); } } public void writeInactiveJobs(Set<String> inactiveJobs) { curator.set(inactiveJobsPath(), stringSetSerializer.toJson(inactiveJobs)); } public double readUpgradesPerMinute() { return read(upgradesPerMinutePath(), ByteBuffer::wrap).map(ByteBuffer::getDouble).orElse(0.125); } public void writeUpgradesPerMinute(double n) { curator.set(upgradesPerMinutePath(), ByteBuffer.allocate(Double.BYTES).putDouble(n).array()); } public Optional<Integer> readTargetMajorVersion() { return read(targetMajorVersionPath(), ByteBuffer::wrap).map(ByteBuffer::getInt); } public void writeTargetMajorVersion(Optional<Integer> targetMajorVersion) { if (targetMajorVersion.isPresent()) curator.set(targetMajorVersionPath(), ByteBuffer.allocate(Integer.BYTES).putInt(targetMajorVersion.get()).array()); else curator.delete(targetMajorVersionPath()); } public void writeVersionStatus(VersionStatus status) { curator.set(versionStatusPath(), asJson(versionStatusSerializer.toSlime(status))); } public VersionStatus readVersionStatus() { return readSlime(versionStatusPath()).map(versionStatusSerializer::fromSlime).orElseGet(VersionStatus::empty); } public void writeConfidenceOverrides(Map<Version, VespaVersion.Confidence> overrides) { curator.set(confidenceOverridesPath(), asJson(confidenceOverrideSerializer.toSlime(overrides))); } public Map<Version, VespaVersion.Confidence> readConfidenceOverrides() { return readSlime(confidenceOverridesPath()).map(confidenceOverrideSerializer::fromSlime) .orElseGet(Collections::emptyMap); } public void writeControllerVersion(HostName hostname, Version version) { curator.set(controllerPath(hostname.value()), asJson(versionSerializer.toSlime(version))); } public Version readControllerVersion(HostName hostname) { return readSlime(controllerPath(hostname.value())) .map(versionSerializer::fromSlime) .orElse(Vtag.currentVersion); } // Infrastructure upgrades public void writeOsVersions(Set<OsVersion> versions) { curator.set(osTargetVersionPath(), asJson(osVersionSerializer.toSlime(versions))); } public Set<OsVersion> readOsVersions() { return readSlime(osTargetVersionPath()).map(osVersionSerializer::fromSlime).orElseGet(Collections::emptySet); } public void writeOsVersionStatus(OsVersionStatus status) { curator.set(osVersionStatusPath(), asJson(osVersionStatusSerializer.toSlime(status))); } public OsVersionStatus readOsVersionStatus() { return readSlime(osVersionStatusPath()).map(osVersionStatusSerializer::fromSlime).orElse(OsVersionStatus.empty); } // -------------- Tenant -------------------------------------------------- public void writeTenant(Tenant tenant) { curator.set(tenantPath(tenant.name()), asJson(tenantSerializer.toSlime(tenant))); } public Optional<Tenant> readTenant(TenantName name) { return readSlime(tenantPath(name)).map(tenantSerializer::tenantFrom); } public List<Tenant> readTenants() { return readTenantNames().stream() .map(this::readTenant) .flatMap(Optional::stream) .collect(collectingAndThen(Collectors.toList(), Collections::unmodifiableList)); } public List<TenantName> readTenantNames() { return curator.getChildren(tenantRoot).stream() .map(TenantName::from) .collect(Collectors.toList()); } public void removeTenant(TenantName name) { curator.delete(tenantPath(name)); } // -------------- Application --------------------------------------------- public void writeApplication(Application application) { curator.set(applicationPath(application.id()), asJson(applicationSerializer.toSlime(application))); } public Optional<Application> readApplication(ApplicationId application) { return readSlime(applicationPath(application)).map(applicationSerializer::fromSlime); } public List<Application> readApplications() { return readApplications(ignored -> true); } public List<Application> readApplications(TenantName name) { return readApplications(application -> application.tenant().equals(name)); } private List<Application> readApplications(Predicate<ApplicationId> applicationFilter) { return curator.getChildren(applicationRoot).stream() .map(ApplicationId::fromSerializedForm) .filter(applicationFilter) .map(this::readApplication) .flatMap(Optional::stream) .collect(collectingAndThen(Collectors.toList(), Collections::unmodifiableList)); } public void removeApplication(ApplicationId application) { curator.delete(applicationPath(application)); } // -------------- Job Runs ------------------------------------------------ public void writeLastRun(Run run) { curator.set(lastRunPath(run.id().application(), run.id().type()), asJson(runSerializer.toSlime(run))); } public void writeHistoricRuns(ApplicationId id, JobType type, Iterable<Run> runs) { curator.set(runsPath(id, type), asJson(runSerializer.toSlime(runs))); } public Optional<Run> readLastRun(ApplicationId id, JobType type) { return readSlime(lastRunPath(id, type)).map(runSerializer::runFromSlime); } public SortedMap<RunId, Run> readHistoricRuns(ApplicationId id, JobType type) { return readSlime(runsPath(id, type)).map(runSerializer::runsFromSlime).orElse(new TreeMap<>(comparing(RunId::number))); } public void deleteRunData(ApplicationId id, JobType type) { curator.delete(runsPath(id, type)); curator.delete(lastRunPath(id, type)); } public void deleteRunData(ApplicationId id) { curator.delete(jobRoot.append(id.serializedForm())); } public List<ApplicationId> applicationsWithJobs() { return curator.getChildren(jobRoot).stream() .map(ApplicationId::fromSerializedForm) .collect(Collectors.toList()); } public Optional<byte[]> readLog(ApplicationId id, JobType type, long chunkId) { return curator.getData(logPath(id, type, chunkId)); } public void writeLog(ApplicationId id, JobType type, long chunkId, byte[] log) { curator.set(logPath(id, type, chunkId), log); } public void deleteLog(ApplicationId id, JobType type) { curator.delete(runsPath(id, type).append("logs")); } public Optional<Long> readLastLogEntryId(ApplicationId id, JobType type) { return curator.getData(lastLogPath(id, type)) .map(String::new).map(Long::parseLong); } public void writeLastLogEntryId(ApplicationId id, JobType type, long lastId) { curator.set(lastLogPath(id, type), Long.toString(lastId).getBytes()); } public LongStream getLogChunkIds(ApplicationId id, JobType type) { return curator.getChildren(runsPath(id, type).append("logs")).stream() .mapToLong(Long::parseLong) .sorted(); } // -------------- Audit log ----------------------------------------------- public AuditLog readAuditLog() { return readSlime(auditLogPath()).map(auditLogSerializer::fromSlime) .orElse(AuditLog.empty); } public void writeAuditLog(AuditLog log) { curator.set(auditLogPath(), asJson(auditLogSerializer.toSlime(log))); } // -------------- Name service log ---------------------------------------- public NameServiceQueue readNameServiceQueue() { return readSlime(nameServiceQueuePath()).map(nameServiceQueueSerializer::fromSlime) .orElse(NameServiceQueue.EMPTY); } public void writeNameServiceQueue(NameServiceQueue queue) { curator.set(nameServiceQueuePath(), asJson(nameServiceQueueSerializer.toSlime(queue))); } // -------------- Provisioning (called by internal code) ------------------ @SuppressWarnings("unused") public Optional<byte[]> readProvisionState(String provisionId) { return curator.getData(provisionStatePath(provisionId)); } @SuppressWarnings("unused") public void writeProvisionState(String provisionId, byte[] data) { curator.set(provisionStatePath(provisionId), data); } // -------------- Routing policies ---------------------------------------- public void writeRoutingPolicies(ApplicationId application, Set<RoutingPolicy> policies) { curator.set(routingPolicyPath(application), asJson(routingPolicySerializer.toSlime(policies))); } public Map<ApplicationId, Set<RoutingPolicy>> readRoutingPolicies() { return curator.getChildren(routingPoliciesRoot).stream() .map(ApplicationId::fromSerializedForm) .collect(Collectors.toUnmodifiableMap(Function.identity(), this::readRoutingPolicies)); } public Set<RoutingPolicy> readRoutingPolicies(ApplicationId application) { return readSlime(routingPolicyPath(application)).map(slime -> routingPolicySerializer.fromSlime(application, slime)) .orElseGet(Collections::emptySet); } // -------------- Paths --------------------------------------------------- private Path lockPath(TenantName tenant) { return lockRoot .append(tenant.value()); } private Path lockPath(ApplicationId application) { return lockPath(application.tenant()) .append(application.application().value()) .append(application.instance().value()); } private Path lockPath(ApplicationId application, ZoneId zone) { return lockPath(application) .append(zone.environment().value()) .append(zone.region().value()); } private Path lockPath(ApplicationId application, JobType type) { return lockPath(application) .append(type.jobName()); } private Path lockPath(ApplicationId application, JobType type, Step step) { return lockPath(application, type) .append(step.name()); } private Path lockPath(String provisionId) { return lockRoot .append(provisionStatePath()) .append(provisionId); } private static Path inactiveJobsPath() { return root.append("inactiveJobs"); } private static Path upgradesPerMinutePath() { return root.append("upgrader").append("upgradesPerMinute"); } private static Path targetMajorVersionPath() { return root.append("upgrader").append("targetMajorVersion"); } private static Path confidenceOverridesPath() { return root.append("upgrader").append("confidenceOverrides"); } private static Path osTargetVersionPath() { return root.append("osUpgrader").append("targetVersion"); } private static Path osVersionStatusPath() { return root.append("osVersionStatus"); } private static Path versionStatusPath() { return root.append("versionStatus"); } private static Path routingPolicyPath(ApplicationId application) { return routingPoliciesRoot.append(application.serializedForm()); } private static Path nameServiceQueuePath() { return root.append("nameServiceQueue"); } private static Path auditLogPath() { return root.append("auditLog"); } private static Path provisionStatePath() { return root.append("provisioning").append("states"); } private static Path provisionStatePath(String provisionId) { return provisionStatePath().append(provisionId); } private static Path tenantPath(TenantName name) { return tenantRoot.append(name.value()); } private static Path applicationPath(ApplicationId application) { return applicationRoot.append(application.serializedForm()); } private static Path runsPath(ApplicationId id, JobType type) { return jobRoot.append(id.serializedForm()).append(type.jobName()); } private static Path lastRunPath(ApplicationId id, JobType type) { return runsPath(id, type).append("last"); } private static Path logPath(ApplicationId id, JobType type, long first) { return runsPath(id, type).append("logs").append(Long.toString(first)); } private static Path lastLogPath(ApplicationId id, JobType type) { return runsPath(id, type).append("logs"); } private static Path controllerPath(String hostname) { return controllerRoot.append(hostname); } }
Temporarily delete the unused paths
controller-server/src/main/java/com/yahoo/vespa/hosted/controller/persistence/CuratorDb.java
Temporarily delete the unused paths
<ide><path>ontroller-server/src/main/java/com/yahoo/vespa/hosted/controller/persistence/CuratorDb.java <ide> CuratorDb(Curator curator, Duration tryLockTimeout) { <ide> this.curator = curator; <ide> this.tryLockTimeout = tryLockTimeout; <add> <add> // TODO: Remove after 7.60 <add> curator.delete(root.append("openStackServerPool")); <add> curator.delete(root.append("vespaServerPool")); <ide> } <ide> <ide> /** Returns all hosts configured to be part of this ZooKeeper cluster */
Java
apache-2.0
be58427a9fde0dbd05c4acfa38e0e600d84348d4
0
OpenHFT/Chronicle-Map
/* * Copyright (C) 2015 higherfrequencytrading.com * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package net.openhft.chronicle.map; import net.openhft.chronicle.algo.MemoryUnit; import net.openhft.chronicle.algo.hashing.LongHashFunction; import net.openhft.chronicle.bytes.Byteable; import net.openhft.chronicle.bytes.Bytes; import net.openhft.chronicle.bytes.BytesStore; import net.openhft.chronicle.core.Jvm; import net.openhft.chronicle.core.OS; import net.openhft.chronicle.hash.ChronicleHashBuilder; import net.openhft.chronicle.hash.ChronicleHashRecoveryFailedException; import net.openhft.chronicle.hash.impl.CompactOffHeapLinearHashTable; import net.openhft.chronicle.hash.impl.SizePrefixedBlob; import net.openhft.chronicle.hash.impl.VanillaChronicleHash; import net.openhft.chronicle.hash.impl.stage.entry.ChecksumStrategy; import net.openhft.chronicle.hash.impl.util.math.PoissonDistribution; import net.openhft.chronicle.hash.serialization.*; import net.openhft.chronicle.hash.serialization.impl.SerializationBuilder; import net.openhft.chronicle.map.replication.MapRemoteOperations; import net.openhft.chronicle.set.ChronicleSetBuilder; import net.openhft.chronicle.threads.NamedThreadFactory; import net.openhft.chronicle.values.ValueModel; import net.openhft.chronicle.wire.TextWire; import net.openhft.chronicle.wire.Wire; import org.jetbrains.annotations.NotNull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.nio.channels.FileLock; import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import static java.lang.Double.isNaN; import static java.lang.Math.round; import static java.nio.ByteOrder.LITTLE_ENDIAN; import static net.openhft.chronicle.bytes.NativeBytesStore.lazyNativeBytesStoreWithFixedCapacity; import static net.openhft.chronicle.core.Maths.*; import static net.openhft.chronicle.hash.impl.CompactOffHeapLinearHashTable.*; import static net.openhft.chronicle.hash.impl.SizePrefixedBlob.*; import static net.openhft.chronicle.hash.impl.util.FileIOUtils.readFully; import static net.openhft.chronicle.hash.impl.util.FileIOUtils.writeFully; import static net.openhft.chronicle.hash.impl.util.Objects.builderEquals; import static net.openhft.chronicle.map.DefaultSpi.mapEntryOperations; import static net.openhft.chronicle.map.DefaultSpi.mapRemoteOperations; import static net.openhft.chronicle.map.VanillaChronicleMap.alignAddr; /** * {@code ChronicleMapBuilder} manages {@link ChronicleMap} configurations; could be used as a * classic builder and/or factory. This means that in addition to the standard builder usage * pattern: <pre>{@code * ChronicleMap<Key, Value> map = ChronicleMapOnHeapUpdatableBuilder * .of(Key.class, Value.class) * // ... other configurations * .create();}</pre> * it could be prepared and used to create many similar maps: <pre>{@code * ChronicleMapBuilder<Key, Value> builder = ChronicleMapBuilder * .of(Key.class, Value.class) * .entries(..) * // ... other configurations * * ChronicleMap<Key, Value> map1 = builder.create(); * ChronicleMap<Key, Value> map2 = builder.create();}</pre> * i. e. created {@code ChronicleMap} instances don't depend on the builder. * * <p>{@code ChronicleMapBuilder} is mutable, see a note in {@link ChronicleHashBuilder} interface * documentation. * * <p>Later in this documentation, "ChronicleMap" means "ChronicleMaps, created by {@code * ChronicleMapBuilder}", unless specified different, because theoretically someone might provide * {@code ChronicleMap} implementations with completely different properties. * * <p>In addition to the key and value types, you <i>must</i> configure {@linkplain #entries(long) * number of entries} you are going to insert into the created map <i>at most</i>. See {@link * #entries(long)} method documentation for more information on this. * * <p>If you key or value type is not constantly sized and known to {@code ChronicleHashBuilder}, i. * e. it is not a boxed primitive, {@linkplain net.openhft.chronicle.values.Values value interface}, * or {@link Byteable}, you <i>must</i> provide the {@code ChronicleHashBuilder} with some * information about you keys or values: if they are constantly-sized, call {@link * #constantKeySizeBySample(Object)}, otherwise {@link #averageKey(Object)} or {@link * #averageKeySize(double)} method, and accordingly for values. * * @param <K> key type of the maps, produced by this builder * @param <V> value type of the maps, produced by this builder * @see ChronicleHashBuilder * @see ChronicleMap * @see ChronicleSetBuilder */ public final class ChronicleMapBuilder<K, V> implements ChronicleHashBuilder<K, ChronicleMap<K, V>, ChronicleMapBuilder<K, V>> { private static final int UNDEFINED_ALIGNMENT_CONFIG = -1; private static final int NO_ALIGNMENT = 1; /** * If want to increase this number, note {@link OldDeletedEntriesCleanup} uses array to store * all segment indexes -- so it could be current JVM max array size, not Integer.MAX_VALUE * (which is an obvious limitation, as many APIs and internals use int type for representing * segment index). * * Anyway, unlikely anyone ever need more than 1 billion segments. */ private static final int MAX_SEGMENTS = (1 << 30); private static final Logger LOG = LoggerFactory.getLogger(ChronicleMapBuilder.class.getName()); private static final double UNDEFINED_DOUBLE_CONFIG = Double.NaN; private static boolean isDefined(double config) { return !isNaN(config); } private static long toLong(double v) { long l = round(v); if (l != v) throw new IllegalArgumentException("Integer argument expected, given " + v); return l; } private static long roundUp(double v) { return round(Math.ceil(v)); } private static long roundDown(double v) { return (long) v; } private static int MAX_BOOTSTRAPPING_HEADER_SIZE = (int) MemoryUnit.KILOBYTES.toBytes(16); private static final ConcurrentHashMap<File, ChronicleMap> concurrentPersistedChronicleMapCreationControl = new ConcurrentHashMap<>(128); interface CreateMap<K, V> { ChronicleMap<K, V> createMap() throws IOException; } /** * When Chronicle Maps are created using {@link #createPersistedTo(File)} or * {@link #recoverPersistedTo(File, boolean)} or {@link #createOrRecoverPersistedTo(File)} * methods, file lock on the Chronicle Map's lock is acquired, that shouldn't be done from * concurrent threads within the same JVM process. So creation of Chronicle Maps * persisted to the same File should be synchronized across JVM's threads. Simple way would be * to synchronize on some static (lock) object, but would serialize all Chronicle Maps creations * (persisted to any files), ConcurrentHashMap#compute() gives more scalability. * ConcurrentHashMap is used effectively for lock striping only, the entries are removed * immediately after compute() returns. */ private static <K, V> ChronicleMap<K, V> createMapFileSynchronized( File file, CreateMap<K, V> createMap) throws IOException { ChronicleMap map = concurrentPersistedChronicleMapCreationControl.compute(file, (k, v) -> { try { return createMap.createMap(); } catch (IOException e) { throw Jvm.rethrow(e); } }); concurrentPersistedChronicleMapCreationControl.remove(file); //noinspection unchecked return map; } // not final because of cloning private ChronicleMapBuilderPrivateAPI<K, V> privateAPI = new ChronicleMapBuilderPrivateAPI<>(this); ////////////////////////////// // Configuration fields SerializationBuilder<K> keyBuilder; SerializationBuilder<V> valueBuilder; // used when configuring the number of segments. private int minSegments = -1; private int actualSegments = -1; // used when reading the number of entries per private long entriesPerSegment = -1L; private long actualChunksPerSegmentTier = -1L; private double averageKeySize = UNDEFINED_DOUBLE_CONFIG; K averageKey; private K sampleKey; private double averageValueSize = UNDEFINED_DOUBLE_CONFIG; V averageValue; private V sampleValue; private int actualChunkSize = 0; private int worstAlignment = -1; private int maxChunksPerEntry = -1; private int alignment = UNDEFINED_ALIGNMENT_CONFIG; private long entries = -1L; private double maxBloatFactor = 1.0; private boolean allowSegmentTiering = true; private double nonTieredSegmentsPercentile = 0.99999; private boolean aligned64BitMemoryOperationsAtomic = OS.is64Bit(); enum ChecksumEntries {YES, NO, IF_PERSISTED} private ChecksumEntries checksumEntries = ChecksumEntries.IF_PERSISTED; private boolean putReturnsNull = false; private boolean removeReturnsNull = false; /** * Default timeout is 1 minute. Even loopback tests converge often in the course of seconds, * let alone WAN replication over many nodes might take tens of seconds. * * TODO review */ long cleanupTimeout = 1; TimeUnit cleanupTimeoutUnit = TimeUnit.MINUTES; boolean cleanupRemovedEntries = true; DefaultValueProvider<K, V> defaultValueProvider = DefaultSpi.defaultValueProvider(); byte replicationIdentifier = -1; MapMethods<K, V, ?> methods = DefaultSpi.mapMethods(); MapEntryOperations<K, V, ?> entryOperations = mapEntryOperations(); MapRemoteOperations<K, V, ?> remoteOperations = mapRemoteOperations(); ////////////////////////////// // Instance fields private boolean replicated; private boolean persisted; ChronicleMapBuilder(Class<K> keyClass, Class<V> valueClass) { keyBuilder = new SerializationBuilder<>(keyClass); valueBuilder = new SerializationBuilder<>(valueClass); } /** * Returns a new {@code ChronicleMapBuilder} instance which is able to {@linkplain #create() * create} maps with the specified key and value classes. * * @param keyClass class object used to infer key type and discover it's properties via * reflection * @param valueClass class object used to infer value type and discover it's properties via * reflection * @param <K> key type of the maps, created by the returned builder * @param <V> value type of the maps, created by the returned builder * @return a new builder for the given key and value classes */ public static <K, V> ChronicleMapBuilder<K, V> of( @NotNull Class<K> keyClass, @NotNull Class<V> valueClass) { return new ChronicleMapBuilder<>(keyClass, valueClass); } private static void checkSegments(long segments) { if (segments <= 0) { throw new IllegalArgumentException("segments should be positive, " + segments + " given"); } if (segments > MAX_SEGMENTS) { throw new IllegalArgumentException("Max segments is " + MAX_SEGMENTS + ", " + segments + " given"); } } private static String pretty(int value) { return value > 0 ? value + "" : "not configured"; } private static String pretty(Object obj) { return obj != null ? obj + "" : "not configured"; } @Override public ChronicleMapBuilder<K, V> clone() { try { @SuppressWarnings("unchecked") ChronicleMapBuilder<K, V> result = (ChronicleMapBuilder<K, V>) super.clone(); result.keyBuilder = keyBuilder.clone(); result.valueBuilder = valueBuilder.clone(); result.privateAPI = new ChronicleMapBuilderPrivateAPI<>(result); return result; } catch (CloneNotSupportedException e) { throw new AssertionError(e); } } /** * @deprecated don't use private API in the client code */ @Override @Deprecated public Object privateAPI() { return privateAPI; } /** * {@inheritDoc} * * <p>Example: if keys in your map(s) are English words in {@link String} form, average English * word length is 5.1, configure average key size of 6: <pre>{@code * ChronicleMap<String, LongValue> wordFrequencies = ChronicleMapBuilder * .of(String.class, LongValue.class) * .entries(50000) * .averageKeySize(6) * .create();}</pre> * (Note that 6 is chosen as average key size in bytes despite strings in Java are UTF-16 * encoded (and each character takes 2 bytes on-heap), because default off-heap {@link String} * encoding is UTF-8 in {@code ChronicleMap}.) * * @param averageKeySize the average size of the key * @throws IllegalStateException {@inheritDoc} * @throws IllegalArgumentException {@inheritDoc} * @see #averageKey(Object) * @see #constantKeySizeBySample(Object) * @see #averageValueSize(double) * @see #actualChunkSize(int) */ @Override public ChronicleMapBuilder<K, V> averageKeySize(double averageKeySize) { checkSizeIsStaticallyKnown(keyBuilder, "Key"); checkAverageSize(averageKeySize, "key"); this.averageKeySize = averageKeySize; averageKey = null; sampleKey = null; return this; } /** * {@inheritDoc} * * @param averageKey the average (by footprint in serialized form) key, is going to be put * into the hash containers, created by this builder * @throws NullPointerException {@inheritDoc} * @see #averageKeySize(double) * @see #constantKeySizeBySample(Object) * @see #averageValue(Object) * @see #actualChunkSize(int) */ @Override public ChronicleMapBuilder<K, V> averageKey(K averageKey) { Objects.requireNonNull(averageKey); checkSizeIsStaticallyKnown(keyBuilder, "Key"); this.averageKey = averageKey; sampleKey = null; averageKeySize = UNDEFINED_DOUBLE_CONFIG; return this; } /** * {@inheritDoc} * * <p>For example, if your keys are Git commit hashes:<pre>{@code * Map<byte[], String> gitCommitMessagesByHash = * ChronicleMapBuilder.of(byte[].class, String.class) * .constantKeySizeBySample(new byte[20]) * .create();}</pre> * * @see #averageKeySize(double) * @see #averageKey(Object) * @see #constantValueSizeBySample(Object) */ @Override public ChronicleMapBuilder<K, V> constantKeySizeBySample(K sampleKey) { this.sampleKey = sampleKey; averageKey = null; averageKeySize = UNDEFINED_DOUBLE_CONFIG; return this; } private double averageKeySize() { if (!isDefined(averageKeySize)) throw new AssertionError(); return averageKeySize; } /** * Configures the average number of bytes, taken by serialized form of values, put into maps, * created by this builder. However, in many cases {@link #averageValue(Object)} might be easier * to use and more reliable. If value size is always the same, call {@link * #constantValueSizeBySample(Object)} method instead of this one. * * <p>{@code ChronicleHashBuilder} implementation heuristically chooses {@linkplain * #actualChunkSize(int) the actual chunk size} based on this configuration and the key size, * that, however, might result to quite high internal fragmentation, i. e. losses because only * integral number of chunks could be allocated for the entry. If you want to avoid this, you * should manually configure the actual chunk size in addition to this average value size * configuration, which is anyway needed. * * <p>If values are of boxed primitive type or {@link Byteable} subclass, i. e. if value size is * known statically, it is automatically accounted and shouldn't be specified by user. * * <p>Calling this method clears any previous {@link #constantValueSizeBySample(Object)} and * {@link #averageValue(Object)} configurations. * * @param averageValueSize number of bytes, taken by serialized form of values * @return this builder back * @throws IllegalStateException if value size is known statically and shouldn't be * configured by user * @throws IllegalArgumentException if the given {@code averageValueSize} is non-positive * @see #averageValue(Object) * @see #constantValueSizeBySample(Object) * @see #averageKeySize(double) * @see #actualChunkSize(int) */ public ChronicleMapBuilder<K, V> averageValueSize(double averageValueSize) { checkSizeIsStaticallyKnown(valueBuilder, "Value"); checkAverageSize(averageValueSize, "value"); this.averageValueSize = averageValueSize; averageValue = null; sampleValue = null; return this; } /** * Configures the average number of bytes, taken by serialized form of values, put into maps, * created by this builder, by serializing the given {@code averageValue} using the configured * {@link #valueMarshallers(SizedReader, SizedWriter) value marshallers}. In some cases, {@link * #averageValueSize(double)} might be easier to use, than constructing the "average value". * If value size is always the same, call {@link #constantValueSizeBySample(Object)} method * instead of this one. * * <p>Example: If you * * <p>{@code ChronicleHashBuilder} implementation heuristically chooses {@linkplain * #actualChunkSize(int) the actual chunk size} based on this configuration and the key size, * that, however, might result to quite high internal fragmentation, i. e. losses because only * integral number of chunks could be allocated for the entry. If you want to avoid this, you * should manually configure the actual chunk size in addition to this average value size * configuration, which is anyway needed. * * <p>If values are of boxed primitive type or {@link Byteable} subclass, i. e. if value size is * known statically, it is automatically accounted and shouldn't be specified by user. * * <p>Calling this method clears any previous {@link #constantValueSizeBySample(Object)} * and {@link #averageValueSize(double)} configurations. * * @param averageValue the average (by footprint in serialized form) value, is going to be put * into the maps, created by this builder * @return this builder back * @throws NullPointerException if the given {@code averageValue} is {@code null} * @see #averageValueSize(double) * @see #constantValueSizeBySample(Object) * @see #averageKey(Object) * @see #actualChunkSize(int) */ public ChronicleMapBuilder<K, V> averageValue(V averageValue) { Objects.requireNonNull(averageValue); checkSizeIsStaticallyKnown(valueBuilder, "Value"); this.averageValue = averageValue; sampleValue = null; averageValueSize = UNDEFINED_DOUBLE_CONFIG; return this; } private static void checkSizeIsStaticallyKnown(SerializationBuilder builder, String role) { if (builder.sizeIsStaticallyKnown) { throw new IllegalStateException("Size of " + builder.tClass + " instances is constant and statically known, shouldn't be specified via " + "average" + role + "Size() or average" + role + "() methods"); } } private static void checkAverageSize(double averageSize, String role) { if (averageSize <= 0 || isNaN(averageSize) || Double.isInfinite(averageSize)) { throw new IllegalArgumentException("Average " + role + " size must be a positive, " + "finite number"); } } /** * Configures the constant number of bytes, taken by serialized form of values, put into maps, * created by this builder. This is done by providing the {@code sampleValue}, all values should * take the same number of bytes in serialized form, as this sample object. * * <p>If values are of boxed primitive type or {@link Byteable} subclass, i. e. if value size is * known statically, it is automatically accounted and this method shouldn't be called. * * <p>If value size varies, method {@link #averageValue(Object)} or {@link * #averageValueSize(double)} should be called instead of this one. * * <p>Calling this method clears any previous {@link #averageValue(Object)} and * {@link #averageValueSize(double)} configurations. * * @param sampleValue the sample value * @return this builder back * @see #averageValueSize(double) * @see #averageValue(Object) * @see #constantKeySizeBySample(Object) */ public ChronicleMapBuilder<K, V> constantValueSizeBySample(V sampleValue) { this.sampleValue = sampleValue; averageValue = null; averageValueSize = UNDEFINED_DOUBLE_CONFIG; return this; } double averageValueSize() { if (!isDefined(averageValueSize)) throw new AssertionError(); return averageValueSize; } private <E> double averageKeyOrValueSize( double configuredSize, SerializationBuilder<E> builder, E average) { if (isDefined(configuredSize)) return configuredSize; if (builder.constantSizeMarshaller()) return builder.constantSize(); if (average != null) { return builder.serializationSize(average); } return Double.NaN; } /** * {@inheritDoc} * * @throws IllegalStateException is sizes of both keys and values of maps created by this * builder are constant, hence chunk size shouldn't be configured by user * @see #entryAndValueOffsetAlignment(int) * @see #entries(long) * @see #maxChunksPerEntry(int) */ @Override public ChronicleMapBuilder<K, V> actualChunkSize(int actualChunkSize) { if (constantlySizedEntries()) { throw new IllegalStateException("Sizes of key type: " + keyBuilder.tClass + " and " + "value type: " + valueBuilder.tClass + " are both constant, " + "so chunk size shouldn't be specified manually"); } if (actualChunkSize <= 0) throw new IllegalArgumentException("Chunk size must be positive"); this.actualChunkSize = actualChunkSize; return this; } SerializationBuilder<K> keyBuilder() { return keyBuilder; } static class EntrySizeInfo { final double averageEntrySize; final int worstAlignment; public EntrySizeInfo(double averageEntrySize, int worstAlignment) { this.averageEntrySize = averageEntrySize; this.worstAlignment = worstAlignment; } } private EntrySizeInfo entrySizeInfo() { double size = 0; double keySize = averageKeySize(); size += averageSizeStoringLength(keyBuilder, keySize); size += keySize; if (replicated) size += ReplicatedChronicleMap.ADDITIONAL_ENTRY_BYTES; if (checksumEntries()) size += ChecksumStrategy.CHECKSUM_STORED_BYTES; double valueSize = averageValueSize(); size += averageSizeStoringLength(valueBuilder, valueSize); int alignment = valueAlignment(); int worstAlignment; if (worstAlignmentComputationRequiresValueSize(alignment)) { long constantSizeBeforeAlignment = toLong(size); if (constantlySizedValues()) { // see tierEntrySpaceInnerOffset() long totalDataSize = constantSizeBeforeAlignment + constantValueSize(); worstAlignment = (int) (alignAddr(totalDataSize, alignment) - totalDataSize); } else { determineAlignment: if (actualChunkSize > 0) { worstAlignment = worstAlignmentAssumingChunkSize(constantSizeBeforeAlignment, actualChunkSize); } else { int chunkSize = 8; worstAlignment = worstAlignmentAssumingChunkSize( constantSizeBeforeAlignment, chunkSize); if (size + worstAlignment + valueSize >= maxDefaultChunksPerAverageEntry(replicated) * chunkSize) { break determineAlignment; } chunkSize = 4; worstAlignment = worstAlignmentAssumingChunkSize( constantSizeBeforeAlignment, chunkSize); } } } else { // assume worst case, we always lose most possible bytes for alignment worstAlignment = worstAlignmentWithoutValueSize(alignment); } size += worstAlignment; size += valueSize; return new EntrySizeInfo(size, worstAlignment); } private boolean worstAlignmentComputationRequiresValueSize(int alignment) { return alignment != NO_ALIGNMENT && constantlySizedKeys() && valueBuilder.constantStoringLengthSizeMarshaller(); } private int worstAlignmentWithoutValueSize(int alignment) { return alignment - 1; } int segmentEntrySpaceInnerOffset() { // This is needed, if chunkSize = constant entry size is not aligned, for entry alignment // to be always the same, we should _misalign_ the first chunk. if (!constantlySizedEntries()) return 0; return (int) (constantValueSize() % valueAlignment()); } private long constantValueSize() { return valueBuilder.constantSize(); } boolean constantlySizedKeys() { return keyBuilder.constantSizeMarshaller() || sampleKey != null; } private static double averageSizeStoringLength( SerializationBuilder builder, double averageSize) { SizeMarshaller sizeMarshaller = builder.sizeMarshaller(); if (averageSize == round(averageSize)) return sizeMarshaller.storingLength(round(averageSize)); long lower = roundDown(averageSize); long upper = lower + 1; int lowerStoringLength = sizeMarshaller.storingLength(lower); int upperStoringLength = sizeMarshaller.storingLength(upper); if (lowerStoringLength == upperStoringLength) return lowerStoringLength; return lower * (upper - averageSize) + upper * (averageSize - lower); } private int worstAlignmentAssumingChunkSize( long constantSizeBeforeAlignment, int chunkSize) { int alignment = valueAlignment(); long firstAlignment = alignAddr(constantSizeBeforeAlignment, alignment) - constantSizeBeforeAlignment; int gcdOfAlignmentAndChunkSize = greatestCommonDivisor(alignment, chunkSize); if (gcdOfAlignmentAndChunkSize == alignment) return (int) firstAlignment; // assume worst by now because we cannot predict alignment in VanillaCM.entrySize() method // before allocation long worstAlignment = firstAlignment; while (worstAlignment + gcdOfAlignmentAndChunkSize < alignment) worstAlignment += gcdOfAlignmentAndChunkSize; return (int) worstAlignment; } int worstAlignment() { if (worstAlignment >= 0) return worstAlignment; int alignment = valueAlignment(); if (!worstAlignmentComputationRequiresValueSize(alignment)) return worstAlignment = worstAlignmentWithoutValueSize(alignment); return worstAlignment = entrySizeInfo().worstAlignment; } void worstAlignment(int worstAlignment) { assert worstAlignment >= 0; this.worstAlignment = worstAlignment; } static int greatestCommonDivisor(int a, int b) { if (b == 0) return a; return greatestCommonDivisor(b, a % b); } long chunkSize() { if (actualChunkSize > 0) return actualChunkSize; double averageEntrySize = entrySizeInfo().averageEntrySize; if (constantlySizedEntries()) return toLong(averageEntrySize); int maxChunkSize = 1 << 30; for (long chunkSize = 4; chunkSize <= maxChunkSize; chunkSize *= 2L) { if (maxDefaultChunksPerAverageEntry(replicated) * chunkSize > averageEntrySize) return chunkSize; } return maxChunkSize; } boolean constantlySizedEntries() { return constantlySizedKeys() && constantlySizedValues(); } double averageChunksPerEntry() { if (constantlySizedEntries()) return 1.0; long chunkSize = chunkSize(); // assuming we always has worst internal fragmentation. This affects total segment // entry space which is allocated lazily on Linux (main target platform) // so we can afford this return (entrySizeInfo().averageEntrySize + chunkSize - 1) / chunkSize; } private static int maxDefaultChunksPerAverageEntry(boolean replicated) { // When replicated, having 8 chunks (=> 8 bits in bitsets) per entry seems more wasteful // because when replicated we have bit sets per each remote node, not only allocation // bit set as when non-replicated return replicated ? 4 : 8; } @Override public ChronicleMapBuilder<K, V> maxChunksPerEntry(int maxChunksPerEntry) { if (maxChunksPerEntry < 1) throw new IllegalArgumentException("maxChunksPerEntry should be >= 1, " + maxChunksPerEntry + " given"); this.maxChunksPerEntry = maxChunksPerEntry; return this; } int maxChunksPerEntry() { if (constantlySizedEntries()) return 1; long actualChunksPerSegmentTier = actualChunksPerSegmentTier(); int result = (int) Math.min(actualChunksPerSegmentTier, (long) Integer.MAX_VALUE); if (this.maxChunksPerEntry > 0) result = Math.min(this.maxChunksPerEntry, result); return result; } boolean constantlySizedValues() { return valueBuilder.constantSizeMarshaller() || sampleValue != null; } /** * Configures alignment of address in memory of entries and independently of address in memory * of values within entries ((i. e. final addresses in native memory are multiples of the given * alignment) for ChronicleMaps, created by this builder. * * <p>Useful when values of the map are updated intensively, particularly fields with volatile * access, because it doesn't work well if the value crosses cache lines. Also, on some * (nowadays rare) architectures any misaligned memory access is more expensive than aligned. * * <p>If values couldn't reference off-heap memory (i. e. it is not {@link Byteable} or a value * interface), alignment configuration makes no sense. * * <p>Default is {@link ValueModel#recommendedOffsetAlignment()} if the value type is a value * interface, otherwise 1 (that is effectively no alignment) or chosen heuristically (configure * explicitly for being sure and to compare performance in your case). * * @param alignment the new alignment of the maps constructed by this builder * @return this {@code ChronicleMapOnHeapUpdatableBuilder} back * @throws IllegalStateException if values of maps, created by this builder, couldn't reference * off-heap memory */ public ChronicleMapBuilder<K, V> entryAndValueOffsetAlignment(int alignment) { if (alignment <= 0) { throw new IllegalArgumentException("Alignment should be positive integer, " + alignment + " given"); } if (!isPowerOf2(alignment)) { throw new IllegalArgumentException("Alignment should be a power of 2, " + alignment + " given"); } this.alignment = alignment; return this; } int valueAlignment() { if (alignment != UNDEFINED_ALIGNMENT_CONFIG) return alignment; try { return ValueModel.acquire(valueBuilder.tClass).recommendedOffsetAlignment(); } catch (Exception e) { return NO_ALIGNMENT; } } @Override public ChronicleMapBuilder<K, V> entries(long entries) { if (entries <= 0L) throw new IllegalArgumentException("Entries should be positive, " + entries + " given"); this.entries = entries; return this; } long entries() { if (entries < 0) { throw new IllegalStateException("If in-memory Chronicle Map is created or persisted\n" + "to a file for the first time (i. e. not accessing existing file),\n" + "ChronicleMapBuilder.entries() must be configured.\n" + "See Chronicle Map 3 tutorial and javadocs for more information"); } return entries; } @Override public ChronicleMapBuilder<K, V> entriesPerSegment(long entriesPerSegment) { if (entriesPerSegment <= 0L) throw new IllegalArgumentException("Entries per segment should be positive, " + entriesPerSegment + " given"); this.entriesPerSegment = entriesPerSegment; return this; } long entriesPerSegment() { long entriesPerSegment; if (this.entriesPerSegment > 0L) { entriesPerSegment = this.entriesPerSegment; } else { int actualSegments = actualSegments(); double averageEntriesPerSegment = entries() * 1.0 / actualSegments; if (actualSegments > 1) { entriesPerSegment = PoissonDistribution.inverseCumulativeProbability( averageEntriesPerSegment, nonTieredSegmentsPercentile); } else { // if there is only 1 segment, there is no source of variance in segments filling entriesPerSegment = roundUp(averageEntriesPerSegment); } } boolean actualChunksDefined = actualChunksPerSegmentTier > 0; if (!actualChunksDefined) { double averageChunksPerEntry = averageChunksPerEntry(); if (entriesPerSegment * averageChunksPerEntry > MAX_TIER_CHUNKS) throw new IllegalStateException("Max chunks per segment tier is " + MAX_TIER_CHUNKS + " configured entries() and actualSegments() so that " + "there should be " + entriesPerSegment + " entries per segment tier, " + "while average chunks per entry is " + averageChunksPerEntry); } if (entriesPerSegment > MAX_TIER_ENTRIES) throw new IllegalStateException("shouldn't be more than " + MAX_TIER_ENTRIES + " entries per segment"); return entriesPerSegment; } @Override public ChronicleMapBuilder<K, V> actualChunksPerSegmentTier(long actualChunksPerSegmentTier) { if (actualChunksPerSegmentTier <= 0 || actualChunksPerSegmentTier > MAX_TIER_CHUNKS) throw new IllegalArgumentException("Actual chunks per segment tier should be in [1, " + MAX_TIER_CHUNKS + "], range, " + actualChunksPerSegmentTier + " given"); this.actualChunksPerSegmentTier = actualChunksPerSegmentTier; return this; } private void checkActualChunksPerSegmentTierIsConfiguredOnlyIfOtherLowLevelConfigsAreManual() { if (actualChunksPerSegmentTier > 0) { if (entriesPerSegment <= 0 || (actualChunkSize <= 0 && !constantlySizedEntries()) || actualSegments <= 0) throw new IllegalStateException("Actual chunks per segment tier could be " + "configured only if other three low level configs are manual: " + "entriesPerSegment(), actualSegments() and actualChunkSize(), unless " + "both keys and value sizes are constant"); } } private void checkActualChunksPerSegmentGreaterOrEqualToEntries() { if (actualChunksPerSegmentTier > 0 && entriesPerSegment > 0 && entriesPerSegment > actualChunksPerSegmentTier) { throw new IllegalStateException("Entries per segment couldn't be greater than " + "actual chunks per segment tier. Entries: " + entriesPerSegment + ", " + "chunks: " + actualChunksPerSegmentTier + " is configured"); } } long actualChunksPerSegmentTier() { if (actualChunksPerSegmentTier > 0) return actualChunksPerSegmentTier; return chunksPerSegmentTier(entriesPerSegment()); } private long chunksPerSegmentTier(long entriesPerSegment) { return roundUp(entriesPerSegment * averageChunksPerEntry()); } @Override public ChronicleMapBuilder<K, V> minSegments(int minSegments) { checkSegments(minSegments); this.minSegments = minSegments; return this; } int minSegments() { return Math.max(estimateSegments(), minSegments); } private int estimateSegments() { return (int) Math.min(nextPower2(entries() / 32, 1), estimateSegmentsBasedOnSize()); } //TODO review because this heuristic doesn't seem to perform well private int estimateSegmentsBasedOnSize() { // the idea is that if values are huge, operations on them (and simply ser/deser) // could take long time, so we want more segment to minimize probablity that // two or more concurrent write ops will go to the same segment, and then all but one of // these threads will wait for long time. int segmentsForEntries = estimateSegmentsForEntries(entries()); double averageValueSize = averageValueSize(); return averageValueSize >= 1000000 ? segmentsForEntries * 16 : averageValueSize >= 100000 ? segmentsForEntries * 8 : averageValueSize >= 10000 ? segmentsForEntries * 4 : averageValueSize >= 1000 ? segmentsForEntries * 2 : segmentsForEntries; } private static int estimateSegmentsForEntries(long size) { if (size > 200 << 20) return 256; if (size >= 1 << 20) return 128; if (size >= 128 << 10) return 64; if (size >= 16 << 10) return 32; if (size >= 4 << 10) return 16; if (size >= 1 << 10) return 8; return 1; } @Override public ChronicleMapBuilder<K, V> actualSegments(int actualSegments) { checkSegments(actualSegments); this.actualSegments = actualSegments; return this; } int actualSegments() { if (actualSegments > 0) return actualSegments; if (entriesPerSegment > 0) { return (int) segmentsGivenEntriesPerSegmentFixed(entriesPerSegment); } // Try to fit 4 bytes per hash lookup slot, then 8. Trying to apply small slot // size (=> segment size, because slot size depends on segment size) not only because // they take less memory per entry (if entries are of KBs or MBs, it doesn't matter), but // also because if segment size is small, slot and free list are likely to lie on a single // memory page, reducing number of memory pages to update, if Chronicle Map is persisted. // Actually small segments are all ways better: many segments => better parallelism, lesser // pauses for per-key operations, if parallel/background operation blocks the segment for // the whole time while it operates on it (like iteration, probably replication background // thread will require some level of full segment lock, however currently if doesn't, in // future durability background thread could update slot states), because smaller segments // contain less entries/slots and are processed faster. // // The only problem with small segments is that due to probability theory, if there are // a lot of segments each of little number of entries, difference between most filled // and least filled segment in the Chronicle Map grows. (Number of entries in a segment is // Poisson-distributed with mean = average number of entries per segment.) It is meaningful, // because segment tiering is exceptional mechanism, only very few segments should be // tiered, if any, normally. So, we are required to allocate unnecessarily many entries per // each segment. To compensate this at least on linux, don't accept segment sizes that with // the given entry sizes, lead to too small total segment sizes in native memory pages, // see comment in tryHashLookupSlotSize() long segments = tryHashLookupSlotSize(4); if (segments > 0) return (int) segments; int maxHashLookupEntrySize = aligned64BitMemoryOperationsAtomic() ? 8 : 4; long maxEntriesPerSegment = findMaxEntriesPerSegmentToFitHashLookupSlotSize(maxHashLookupEntrySize); long maxSegments = trySegments(maxEntriesPerSegment, MAX_SEGMENTS); if (maxSegments > 0L) return (int) maxSegments; throw new IllegalStateException("Max segments is " + MAX_SEGMENTS + ", configured so much" + " entries (" + entries() + ") or average chunks per entry is too high (" + averageChunksPerEntry() + ") that builder automatically decided to use " + (-maxSegments) + " segments"); } private long tryHashLookupSlotSize(int hashLookupSlotSize) { long entriesPerSegment = findMaxEntriesPerSegmentToFitHashLookupSlotSize( hashLookupSlotSize); long entrySpaceSize = roundUp(entriesPerSegment * entrySizeInfo().averageEntrySize); // Not to lose too much on linux because of "poor distribution" entry over-allocation. // This condition should likely filter cases when we target very small hash lookup // size + entry size is small. // * 5 => segment will lose not more than 20% of memory, 10% on average if (entrySpaceSize < OS.pageSize() * 5L) return -1; return trySegments(entriesPerSegment, MAX_SEGMENTS); } private long findMaxEntriesPerSegmentToFitHashLookupSlotSize( int targetHashLookupSlotSize) { long entriesPerSegment = 1L << 62; long step = entriesPerSegment / 2L; while (step > 0L) { if (hashLookupSlotBytes(entriesPerSegment) > targetHashLookupSlotSize) entriesPerSegment -= step; step /= 2L; } return entriesPerSegment - 1L; } private int hashLookupSlotBytes(long entriesPerSegment) { int valueBits = valueBits(chunksPerSegmentTier(entriesPerSegment)); int keyBits = keyBits(entriesPerSegment, valueBits); return entrySize(keyBits, valueBits); } private long trySegments(long entriesPerSegment, int maxSegments) { long segments = segmentsGivenEntriesPerSegmentFixed(entriesPerSegment); segments = nextPower2(Math.max(segments, minSegments()), 1L); return segments <= maxSegments ? segments : -segments; } private long segmentsGivenEntriesPerSegmentFixed(long entriesPerSegment) { double precision = 1.0 / averageChunksPerEntry(); long entriesPerSegmentShouldBe = roundDown(PoissonDistribution.meanByCumulativeProbabilityAndValue( nonTieredSegmentsPercentile, entriesPerSegment, precision)); long segments = divideRoundUp(entries(), entriesPerSegmentShouldBe); checkSegments(segments); if (minSegments > 0) segments = Math.max(minSegments, segments); return segments; } long tierHashLookupCapacity() { long entriesPerSegment = entriesPerSegment(); long capacity = CompactOffHeapLinearHashTable.capacityFor(entriesPerSegment); if (actualSegments() > 1) { // if there is only 1 segment, there is no source of variance in segments filling long maxEntriesPerTier = PoissonDistribution.inverseCumulativeProbability( entriesPerSegment, nonTieredSegmentsPercentile); while (maxEntriesPerTier > MAX_LOAD_FACTOR * capacity) { capacity *= 2; } } return capacity; } int segmentHeaderSize() { int segments = actualSegments(); long pageSize = 4096; if (segments * (64 * 3) < (2 * pageSize)) // i. e. <= 42 segments return 64 * 3; // cache line per header, plus one CL to the left, plus one to the right if (segments * (64 * 2) < (3 * pageSize)) // i. e. <= 96 segments return 64 * 2; // reduce false sharing unless we have a lot of segments. return segments <= 16 * 1024 ? 64 : 32; } /** * Configures if the maps created by this {@code ChronicleMapBuilder} should return {@code null} * instead of previous mapped values on {@link ChronicleMap#put(Object, Object) * ChornicleMap.put(key, value)} calls. * * <p>{@link Map#put(Object, Object) Map.put()} returns the previous value, functionality * which is rarely used but fairly cheap for simple in-process, on-heap implementations like * {@link HashMap}. But an off-heap collection has to create a new object and deserialize * the data from off-heap memory. A collection hiding remote queries over the network should * send the value back in addition to that. It's expensive for something you probably don't use. * * <p>By default, of cause, {@code ChronicleMap} conforms the general {@code Map} contract and * returns the previous mapped value on {@code put()} calls. * * @param putReturnsNull {@code true} if you want {@link ChronicleMap#put(Object, Object) * ChronicleMap.put()} to not return the value that was replaced but * instead return {@code null} * @return this builder back * @see #removeReturnsNull(boolean) */ public ChronicleMapBuilder<K, V> putReturnsNull(boolean putReturnsNull) { this.putReturnsNull = putReturnsNull; return this; } boolean putReturnsNull() { return putReturnsNull; } /** * Configures if the maps created by this {@code ChronicleMapBuilder} should return {@code null} * instead of the last mapped value on {@link ChronicleMap#remove(Object) * ChronicleMap.remove(key)} calls. * * <p>{@link Map#remove(Object) Map.remove()} returns the previous value, functionality which is * rarely used but fairly cheap for simple in-process, on-heap implementations like {@link * HashMap}. But an off-heap collection has to create a new object and deserialize the data * from off-heap memory. A collection hiding remote queries over the network should send * the value back in addition to that. It's expensive for something you probably don't use. * * <p>By default, of cause, {@code ChronicleMap} conforms the general {@code Map} contract and * returns the mapped value on {@code remove()} calls. * * @param removeReturnsNull {@code true} if you want {@link ChronicleMap#remove(Object) * ChronicleMap.remove()} to not return the value of the removed entry * but instead return {@code null} * @return this builder back * @see #putReturnsNull(boolean) */ public ChronicleMapBuilder<K, V> removeReturnsNull(boolean removeReturnsNull) { this.removeReturnsNull = removeReturnsNull; return this; } boolean removeReturnsNull() { return removeReturnsNull; } @Override public ChronicleMapBuilder<K, V> maxBloatFactor(double maxBloatFactor) { if (isNaN(maxBloatFactor) || maxBloatFactor < 1.0 || maxBloatFactor > 1_000.0) { throw new IllegalArgumentException("maxBloatFactor should be in [1.0, 1_000.0] " + "bounds, " + maxBloatFactor + " given"); } this.maxBloatFactor = maxBloatFactor; return this; } @Override public ChronicleMapBuilder<K, V> allowSegmentTiering(boolean allowSegmentTiering) { this.allowSegmentTiering = allowSegmentTiering; return this; } @Override public ChronicleMapBuilder<K, V> nonTieredSegmentsPercentile( double nonTieredSegmentsPercentile) { if (isNaN(nonTieredSegmentsPercentile) || 0.5 <= nonTieredSegmentsPercentile || nonTieredSegmentsPercentile >= 1.0) { throw new IllegalArgumentException("nonTieredSegmentsPercentile should be in (0.5, " + "1.0) range, " + nonTieredSegmentsPercentile + " is given"); } this.nonTieredSegmentsPercentile = nonTieredSegmentsPercentile; return this; } long maxExtraTiers() { if (!allowSegmentTiering) return 0; int actualSegments = actualSegments(); // maxBloatFactor is scale, so we do (- 1.0) to compute _extra_ tiers return round((maxBloatFactor - 1.0) * actualSegments) // but to mitigate slight misconfiguration, and uneven distribution of entries // between segments, add 1.0 x actualSegments + actualSegments; } @Override public String toString() { return "ChronicleMapBuilder{" + ", actualSegments=" + pretty(actualSegments) + ", minSegments=" + pretty(minSegments) + ", entriesPerSegment=" + pretty(entriesPerSegment) + ", actualChunksPerSegmentTier=" + pretty(actualChunksPerSegmentTier) + ", averageKeySize=" + pretty(averageKeySize) + ", sampleKeyForConstantSizeComputation=" + pretty(sampleKey) + ", averageValueSize=" + pretty(averageValueSize) + ", sampleValueForConstantSizeComputation=" + pretty(sampleValue) + ", actualChunkSize=" + pretty(actualChunkSize) + ", valueAlignment=" + valueAlignment() + ", entries=" + entries() + ", putReturnsNull=" + putReturnsNull() + ", removeReturnsNull=" + removeReturnsNull() + ", keyBuilder=" + keyBuilder + ", valueBuilder=" + valueBuilder + '}'; } @SuppressWarnings("EqualsWhichDoesntCheckParameterClass") @Override public boolean equals(Object o) { return builderEquals(this, o); } @Override public int hashCode() { return toString().hashCode(); } ChronicleMapBuilder<K, V> removedEntryCleanupTimeout( long removedEntryCleanupTimeout, TimeUnit unit) { if (unit.toMillis(removedEntryCleanupTimeout) < 1) { throw new IllegalArgumentException("timeout should be >= 1 millisecond, " + removedEntryCleanupTimeout + " " + unit + " is given"); } cleanupTimeout = removedEntryCleanupTimeout; cleanupTimeoutUnit = unit; return this; } ChronicleMapBuilder<K, V> cleanupRemovedEntries(boolean cleanupRemovedEntries) { this.cleanupRemovedEntries = cleanupRemovedEntries; return this; } @Override public ChronicleMapBuilder<K, V> keyReaderAndDataAccess( SizedReader<K> keyReader, @NotNull DataAccess<K> keyDataAccess) { keyBuilder.reader(keyReader); keyBuilder.dataAccess(keyDataAccess); return this; } @Override public ChronicleMapBuilder<K, V> keyMarshallers( @NotNull SizedReader<K> keyReader, @NotNull SizedWriter<? super K> keyWriter) { keyBuilder.reader(keyReader); keyBuilder.writer(keyWriter); return this; } @Override public <M extends SizedReader<K> & SizedWriter<? super K>> ChronicleMapBuilder<K, V> keyMarshaller(@NotNull M sizedMarshaller) { return keyMarshallers(sizedMarshaller, sizedMarshaller); } @Override public ChronicleMapBuilder<K, V> keyMarshallers( @NotNull BytesReader<K> keyReader, @NotNull BytesWriter<? super K> keyWriter) { keyBuilder.reader(keyReader); keyBuilder.writer(keyWriter); return this; } @Override public <M extends BytesReader<K> & BytesWriter<? super K>> ChronicleMapBuilder<K, V> keyMarshaller(@NotNull M marshaller) { return keyMarshallers(marshaller, marshaller); } @Override public ChronicleMapBuilder<K, V> keySizeMarshaller(@NotNull SizeMarshaller keySizeMarshaller) { keyBuilder.sizeMarshaller(keySizeMarshaller); return this; } @Override public ChronicleMapBuilder<K, V> aligned64BitMemoryOperationsAtomic( boolean aligned64BitMemoryOperationsAtomic) { this.aligned64BitMemoryOperationsAtomic = aligned64BitMemoryOperationsAtomic; return this; } @Override public ChronicleMapBuilder<K, V> checksumEntries(boolean checksumEntries) { this.checksumEntries = checksumEntries ? ChecksumEntries.YES : ChecksumEntries.NO; return this; } boolean checksumEntries() { switch (checksumEntries) { case NO: return false; case YES: return true; case IF_PERSISTED: return persisted; default: throw new AssertionError(); } } boolean aligned64BitMemoryOperationsAtomic() { return aligned64BitMemoryOperationsAtomic; } /** * Configures the {@code DataAccess} and {@code SizedReader} used to serialize and deserialize * values to and from off-heap memory in maps, created by this builder. * * @param valueReader the new bytes &rarr; value object reader strategy * @param valueDataAccess the new strategy of accessing the values' bytes for writing * @return this builder back * @see #valueMarshallers(SizedReader, SizedWriter) * @see ChronicleHashBuilder#keyReaderAndDataAccess(SizedReader, DataAccess) */ public ChronicleMapBuilder<K, V> valueReaderAndDataAccess( SizedReader<V> valueReader, @NotNull DataAccess<V> valueDataAccess) { valueBuilder.reader(valueReader); valueBuilder.dataAccess(valueDataAccess); return this; } /** * Configures the marshallers, used to serialize/deserialize values to/from off-heap memory in * maps, created by this builder. * * @param valueReader the new bytes &rarr; value object reader strategy * @param valueWriter the new value object &rarr; bytes writer strategy * @return this builder back * @see #valueReaderAndDataAccess(SizedReader, DataAccess) * @see #valueSizeMarshaller(SizeMarshaller) * @see ChronicleHashBuilder#keyMarshallers(SizedReader, SizedWriter) */ public ChronicleMapBuilder<K, V> valueMarshallers( @NotNull SizedReader<V> valueReader, @NotNull SizedWriter<? super V> valueWriter) { valueBuilder.reader(valueReader); valueBuilder.writer(valueWriter); return this; } /** * Shortcut for {@link #valueMarshallers(SizedReader, SizedWriter) * valueMarshallers(sizedMarshaller, sizedMarshaller)}. */ public <M extends SizedReader<V> & SizedWriter<? super V>> ChronicleMapBuilder<K, V> valueMarshaller(@NotNull M sizedMarshaller) { return valueMarshallers(sizedMarshaller, sizedMarshaller); } /** * Configures the marshallers, used to serialize/deserialize values to/from off-heap memory in * maps, created by this builder. * * @param valueReader the new bytes &rarr; value object reader strategy * @param valueWriter the new value object &rarr; bytes writer strategy * @return this builder back * @see #valueReaderAndDataAccess(SizedReader, DataAccess) * @see #valueSizeMarshaller(SizeMarshaller) * @see ChronicleHashBuilder#keyMarshallers(BytesReader, BytesWriter) */ public ChronicleMapBuilder<K, V> valueMarshallers( @NotNull BytesReader<V> valueReader, @NotNull BytesWriter<? super V> valueWriter) { valueBuilder.reader(valueReader); valueBuilder.writer(valueWriter); return this; } /** * Shortcut for {@link #valueMarshallers(BytesReader, BytesWriter) * valueMarshallers(marshaller, marshaller)}. */ public <M extends BytesReader<V> & BytesWriter<? super V>> ChronicleMapBuilder<K, V> valueMarshaller(@NotNull M marshaller) { return valueMarshallers(marshaller, marshaller); } /** * Configures the marshaller used to serialize actual value sizes to off-heap memory in maps, * created by this builder. * * <p>Default value size marshaller is so-called "stop bit encoding" marshalling, unless {@link * #constantValueSizeBySample(Object)} or the builder statically knows the value size is * constant -- special constant size marshalling is used by default in these cases. * * @param valueSizeMarshaller the new marshaller, used to serialize actual value sizes to * off-heap memory * @return this builder back * @see #keySizeMarshaller(SizeMarshaller) */ public ChronicleMapBuilder<K, V> valueSizeMarshaller( @NotNull SizeMarshaller valueSizeMarshaller) { valueBuilder.sizeMarshaller(valueSizeMarshaller); return this; } /** * Specifies the function to obtain a value for the key during {@link ChronicleMap#acquireUsing * acquireUsing()} calls, if the key is absent in the map, created by this builder. * * @param defaultValueProvider the strategy to obtain a default value by the absent key * @return this builder object back */ public ChronicleMapBuilder<K, V> defaultValueProvider( @NotNull DefaultValueProvider<K, V> defaultValueProvider) { Objects.requireNonNull(defaultValueProvider); this.defaultValueProvider = defaultValueProvider; return this; } ChronicleMapBuilder<K, V> replication(byte identifier) { if (identifier <= 0) throw new IllegalArgumentException("Identifier must be positive, " + identifier + " given"); this.replicationIdentifier = identifier; return this; } @Override public ChronicleMap<K, V> createPersistedTo(File file) throws IOException { // clone() to make this builder instance thread-safe, because createWithFile() method // computes some state based on configurations, but doesn't synchronize on configuration // changes. return createMapFileSynchronized(file, () -> clone().createWithFile(file, false, false)); } @Override public ChronicleMap<K, V> createOrRecoverPersistedTo(File file) throws IOException { return file.exists() ? recoverPersistedTo(file, true) : createPersistedTo(file); } @Override public ChronicleMap<K, V> recoverPersistedTo(File file, boolean sameBuilderConfig) throws IOException { return createMapFileSynchronized(file, () -> clone().createWithFile(file, true, sameBuilderConfig)); } @Override public ChronicleMap<K, V> create() { // clone() to make this builder instance thread-safe, because createWithoutFile() method // computes some state based on configurations, but doesn't synchronize on configuration // changes. return clone().createWithoutFile(); } ChronicleMap<K, V> createWithFile( File file, boolean recover, boolean overrideBuilderConfig) throws IOException { if (overrideBuilderConfig && !recover) throw new AssertionError("recover -> overrideBuilderConfig"); replicated = replicationIdentifier != -1; persisted = true; if (!file.exists()) { if (recover) throw new FileNotFoundException("file " + file + " should exist for recovery"); //noinspection ResultOfMethodCallIgnored file.createNewFile(); } try (RandomAccessFile raf = new RandomAccessFile(file, "rw")) { if (raf.length() > 0) return openWithExistingFile(file, raf, recover, overrideBuilderConfig); VanillaChronicleMap<K, V, ?> map = null; ByteBuffer headerBuffer = null; boolean newFile; FileChannel fileChannel = raf.getChannel(); try (FileLock ignored = fileChannel.lock()) { if (raf.length() == 0) { map = newMap(); headerBuffer = writeHeader(fileChannel, map); newFile = true; } else { newFile = false; } } if (newFile) { int headerSize = headerBuffer.remaining(); return createWithNewFile(map, file, raf, headerBuffer, headerSize); } else { return openWithExistingFile(file, raf, recover, overrideBuilderConfig); } } } /** * @return size of the self bootstrapping header */ private int waitUntilReady(RandomAccessFile raf, boolean recover) throws IOException { FileChannel fileChannel = raf.getChannel(); ByteBuffer sizeWordBuffer = ByteBuffer.allocate(4); sizeWordBuffer.order(LITTLE_ENDIAN); // 60 * 10, 100 ms wait = 1 minute total wait int attempts = 60 * 10; int lastReadHeaderSize = -1; for (int attempt = 0; attempt < attempts; attempt++) { if (raf.length() >= SELF_BOOTSTRAPPING_HEADER_OFFSET) { sizeWordBuffer.clear(); readFully(fileChannel, SIZE_WORD_OFFSET, sizeWordBuffer); if (sizeWordBuffer.remaining() == 0) { int sizeWord = sizeWordBuffer.getInt(0); lastReadHeaderSize = SizePrefixedBlob.extractSize(sizeWord); if (SizePrefixedBlob.isReady(sizeWord)) return lastReadHeaderSize; } // The only possible reason why not 4 bytes are read, is that the file is // truncated between length() and read() calls, then continue to wait } try { Thread.sleep(100); } catch (InterruptedException e) { if (recover) { break; } else { throw new IOException(e); } } } if (recover) { if (lastReadHeaderSize == -1) { throw new ChronicleHashRecoveryFailedException("File header is not recoverable"); } else { return lastReadHeaderSize; } } else { throw new IOException("Unable to wait until the file is ready, likely the process " + "which created the file crashed or hung for more than 1 minute"); } } /** * @return ByteBuffer, in [position, limit) range the self bootstrapping header is read */ private ByteBuffer checkSumSelfBootstrappingHeader( RandomAccessFile raf, int headerSize, boolean recover) throws IOException { if (raf.length() < headerSize + SELF_BOOTSTRAPPING_HEADER_OFFSET) { throw throwRecoveryOrReturnIOException("The file is shorter than the header size: " + headerSize + ", file size: " + raf.length(), recover); } FileChannel fileChannel = raf.getChannel(); ByteBuffer headerBuffer = ByteBuffer.allocate( SELF_BOOTSTRAPPING_HEADER_OFFSET + headerSize); headerBuffer.order(LITTLE_ENDIAN); readFully(fileChannel, 0, headerBuffer); if (headerBuffer.remaining() > 0) { throw throwRecoveryOrReturnIOException("Unable to read the header fully, " + headerBuffer.remaining() + " is remaining to read, likely the file was " + "truncated", recover); } int sizeWord = headerBuffer.getInt(SIZE_WORD_OFFSET); if (!SizePrefixedBlob.isReady(sizeWord)) { if (recover) { LOG.error("size-prefixed blob readiness bit is set to NOT_READY"); // the bit will be overwritten to READY in the end of recovery procedure, so nothing // to fix right here } else { throw new IOException("sizeWord is not ready: " + sizeWord); } } long checkSum = headerChecksum(headerBuffer, headerSize); long storedChecksum = headerBuffer.getLong(HEADER_OFFSET); if (storedChecksum != checkSum) { throw throwRecoveryOrReturnIOException("Self Bootstrapping Header checksum doesn't " + "match the stored checksum: " + storedChecksum + ", computed: " + checkSum, recover); } headerBuffer.position(SELF_BOOTSTRAPPING_HEADER_OFFSET); return headerBuffer; } private IOException throwRecoveryOrReturnIOException(String message, boolean recover) { if (recover) { throw new ChronicleHashRecoveryFailedException(message); } else { return new IOException(message); } } private static long headerChecksum(ByteBuffer headerBuffer, int headerSize) { return LongHashFunction.xx_r39().hashBytes(headerBuffer, SIZE_WORD_OFFSET, headerSize + 4); } private ChronicleMap<K, V> createWithNewFile( VanillaChronicleMap<K, V, ?> map, File file, RandomAccessFile raf, ByteBuffer headerBuffer, int headerSize) throws IOException { FileChannel fileChannel = raf.getChannel(); map.initBeforeMapping(file, fileChannel, headerBuffer.limit()); map.createMappedStoreAndSegments(file, raf); establishReplication(map); commitChronicleMapReady(map, raf, headerBuffer, headerSize); return map; } private ChronicleMap<K, V> openWithExistingFile( File file, RandomAccessFile raf, boolean recover, boolean overrideBuilderConfig) throws IOException { try { int headerSize = waitUntilReady(raf, recover); FileChannel fileChannel = raf.getChannel(); ByteBuffer headerBuffer; if (overrideBuilderConfig) { VanillaChronicleMap<K, V, ?> mapObjectForHeaderOverwrite = newMap(); headerBuffer = writeHeader(fileChannel, mapObjectForHeaderOverwrite); headerSize = headerBuffer.remaining(); } else { headerBuffer = checkSumSelfBootstrappingHeader(raf, headerSize, recover); assert headerSize == headerBuffer.remaining(); } Bytes<ByteBuffer> headerBytes = Bytes.wrapForRead(headerBuffer); headerBytes.readPosition(headerBuffer.position()); headerBytes.readLimit(headerBuffer.limit()); Wire wire = new TextWire(headerBytes); VanillaChronicleMap<K, V, ?> map = wire.getValueIn().typedMarshallable(); assert map != null; map.initBeforeMapping(file, fileChannel, headerBuffer.limit()); long expectedFileLength = map.expectedFileSize(); if (!recover && expectedFileLength != file.length()) { throw new IOException("The file " + file + " the map is serialized from " + "has unexpected length " + file.length() + ", probably corrupted. " + "Expected length is " + expectedFileLength); } map.initTransientsFromBuilder(this); if (!recover) { map.createMappedStoreAndSegments(file, raf); } else { if (!overrideBuilderConfig) writeNotReady(fileChannel, headerBuffer, headerSize); // if overrideBuilderConfig = true, readiness bit is already set // in writeHeader() call map.recover(file, raf); } establishReplication(map); commitChronicleMapReady(map, raf, headerBuffer, headerSize); return map; } catch (Exception e) { if (recover && !(e instanceof IOException) && !(e instanceof ChronicleHashRecoveryFailedException)) throw new ChronicleHashRecoveryFailedException(e); throw e; } } private static void writeNotReady( FileChannel fileChannel, ByteBuffer headerBuffer, int headerSize) throws IOException { //noinspection PointlessBitwiseExpression headerBuffer.putInt(SIZE_WORD_OFFSET, NOT_READY | DATA | headerSize); headerBuffer.clear().position(SIZE_WORD_OFFSET).limit(SIZE_WORD_OFFSET + 4); writeFully(fileChannel, SIZE_WORD_OFFSET, headerBuffer); } /** * @return ByteBuffer, with self bootstrapping header in [position, limit) range */ private static <K, V> ByteBuffer writeHeader( FileChannel fileChannel, VanillaChronicleMap<K, V, ?> map) throws IOException { ByteBuffer headerBuffer = ByteBuffer.allocate( SELF_BOOTSTRAPPING_HEADER_OFFSET + MAX_BOOTSTRAPPING_HEADER_SIZE); headerBuffer.order(LITTLE_ENDIAN); Bytes<ByteBuffer> headerBytes = Bytes.wrapForWrite(headerBuffer); headerBytes.writePosition(SELF_BOOTSTRAPPING_HEADER_OFFSET); Wire wire = new TextWire(headerBytes); wire.getValueOut().typedMarshallable(map); int headerLimit = (int) headerBytes.writePosition(); int headerSize = headerLimit - SELF_BOOTSTRAPPING_HEADER_OFFSET; // First set readiness bit to READY, to compute checksum correctly //noinspection PointlessBitwiseExpression headerBuffer.putInt(SIZE_WORD_OFFSET, READY | DATA | headerSize); long checksum = headerChecksum(headerBuffer, headerSize); headerBuffer.putLong(HEADER_OFFSET, checksum); // Set readiness bit to NOT_READY, because the Chronicle Map instance is not actually // ready yet //noinspection PointlessBitwiseExpression headerBuffer.putInt(SIZE_WORD_OFFSET, NOT_READY | DATA | headerSize); // Write the size-prefixed blob to the file headerBuffer.position(0).limit(headerLimit); writeFully(fileChannel, 0, headerBuffer); headerBuffer.position(SELF_BOOTSTRAPPING_HEADER_OFFSET); return headerBuffer; } private static void commitChronicleMapReady( VanillaChronicleHash map, RandomAccessFile raf, ByteBuffer headerBuffer, int headerSize) throws IOException { FileChannel fileChannel = raf.getChannel(); // see HCOLL-396 map.msync(raf); //noinspection PointlessBitwiseExpression headerBuffer.putInt(SIZE_WORD_OFFSET, READY | DATA | headerSize); headerBuffer.clear().position(SIZE_WORD_OFFSET).limit(SIZE_WORD_OFFSET + 4); writeFully(fileChannel, SIZE_WORD_OFFSET, headerBuffer); } ChronicleMap<K, V> createWithoutFile() { replicated = replicationIdentifier != -1; persisted = false; try { VanillaChronicleMap<K, V, ?> map = newMap(); // TODO this method had been moved // if(OS.warnOnWindows(map.sizeInBytesWithoutTiers())){ // throw new IllegalStateException("Windows cannot support this configuration"); // } BytesStore bytesStore = lazyNativeBytesStoreWithFixedCapacity(map.sizeInBytesWithoutTiers()); map.createMappedStoreAndSegments(bytesStore); establishReplication(map); return map; } catch (IOException e) { // file-less version should never trigger an IOException. throw new AssertionError(e); } } private VanillaChronicleMap<K, V, ?> newMap() throws IOException { preMapConstruction(); if (replicated) { return new ReplicatedChronicleMap<>(this); } else { return new VanillaChronicleMap<>(this); } } void preMapConstruction() { averageKeySize = preMapConstruction( keyBuilder, averageKeySize, averageKey, sampleKey, "Key"); averageValueSize = preMapConstruction( valueBuilder, averageValueSize, averageValue, sampleValue, "Value"); stateChecks(); } private <E> double preMapConstruction( SerializationBuilder<E> builder, double configuredAverageSize, E average, E sample, String dim) { if (sample != null) { return builder.constantSizeBySample(sample); } else { double result = averageKeyOrValueSize(configuredAverageSize, builder, average); if (!isNaN(result) || allLowLevelConfigurationsAreManual()) { return result; } else { throw new IllegalStateException(dim + " size in serialized form must " + "be configured in ChronicleMap, at least approximately.\nUse builder" + ".average" + dim + "()/.constant" + dim + "SizeBySample()/" + ".average" + dim + "Size() methods to configure the size"); } } } private void stateChecks() { checkActualChunksPerSegmentTierIsConfiguredOnlyIfOtherLowLevelConfigsAreManual(); checkActualChunksPerSegmentGreaterOrEqualToEntries(); } private boolean allLowLevelConfigurationsAreManual() { return actualSegments > 0 && entriesPerSegment > 0 && actualChunksPerSegmentTier > 0 && actualChunkSize > 0; } private void establishReplication( VanillaChronicleMap<K, V, ?> map) throws IOException { if (map instanceof ReplicatedChronicleMap) { ReplicatedChronicleMap result = (ReplicatedChronicleMap) map; if (cleanupRemovedEntries) establishCleanupThread(result); } } private void establishCleanupThread(ReplicatedChronicleMap map) { OldDeletedEntriesCleanup cleanup = new OldDeletedEntriesCleanup(map); NamedThreadFactory threadFactory = new NamedThreadFactory("cleanup thread for map persisted at " + map.file()); ExecutorService executor = Executors.newSingleThreadExecutor(threadFactory); executor.submit(cleanup); map.addCloseable(cleanup); // WARNING this relies on the fact that ReplicatedChronicleMap closes closeables in the same // order as they are added, i. e. OldDeletedEntriesCleanup instance close()d before the // following closeable map.addCloseable(() -> { executor.shutdown(); try { executor.awaitTermination(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { LOG.error("", e); } }); } /** * Inject your SPI code around basic {@code ChronicleMap}'s operations with entries: * removing entries, replacing the entries' value and inserting the new entry. * * <p>This affects behaviour of ordinary map.put(), map.remove(), etc. calls, as well as removes * and replacing values <i>during iterations</i>, <i>remote map calls</i> and * <i>internal replication operations</i>. */ public ChronicleMapBuilder<K, V> entryOperations(MapEntryOperations<K, V, ?> entryOperations) { Objects.requireNonNull(entryOperations); this.entryOperations = entryOperations; return this; } /** * Inject your SPI around logic of all {@code ChronicleMap}'s operations with individual keys: * from {@link ChronicleMap#containsKey} to {@link ChronicleMap#acquireUsing} and * {@link ChronicleMap#merge}. * * <p>This affects behaviour of ordinary map calls, as well as <i>remote calls</i>. */ public ChronicleMapBuilder<K, V> mapMethods(MapMethods<K, V, ?> mapMethods) { Objects.requireNonNull(mapMethods); this.methods = mapMethods; return this; } ChronicleMapBuilder<K, V> remoteOperations( MapRemoteOperations<K, V, ?> remoteOperations) { Objects.requireNonNull(remoteOperations); this.remoteOperations = remoteOperations; return this; } }
src/main/java/net/openhft/chronicle/map/ChronicleMapBuilder.java
/* * Copyright (C) 2015 higherfrequencytrading.com * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package net.openhft.chronicle.map; import net.openhft.chronicle.algo.MemoryUnit; import net.openhft.chronicle.algo.hashing.LongHashFunction; import net.openhft.chronicle.bytes.Byteable; import net.openhft.chronicle.bytes.Bytes; import net.openhft.chronicle.bytes.BytesStore; import net.openhft.chronicle.core.OS; import net.openhft.chronicle.hash.ChronicleHashBuilder; import net.openhft.chronicle.hash.ChronicleHashRecoveryFailedException; import net.openhft.chronicle.hash.impl.CompactOffHeapLinearHashTable; import net.openhft.chronicle.hash.impl.SizePrefixedBlob; import net.openhft.chronicle.hash.impl.VanillaChronicleHash; import net.openhft.chronicle.hash.impl.stage.entry.ChecksumStrategy; import net.openhft.chronicle.hash.impl.util.math.PoissonDistribution; import net.openhft.chronicle.hash.serialization.*; import net.openhft.chronicle.hash.serialization.impl.SerializationBuilder; import net.openhft.chronicle.map.replication.MapRemoteOperations; import net.openhft.chronicle.set.ChronicleSetBuilder; import net.openhft.chronicle.threads.NamedThreadFactory; import net.openhft.chronicle.values.ValueModel; import net.openhft.chronicle.wire.TextWire; import net.openhft.chronicle.wire.Wire; import org.jetbrains.annotations.NotNull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.nio.channels.FileLock; import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import static java.lang.Double.isNaN; import static java.lang.Math.round; import static java.nio.ByteOrder.LITTLE_ENDIAN; import static net.openhft.chronicle.bytes.NativeBytesStore.lazyNativeBytesStoreWithFixedCapacity; import static net.openhft.chronicle.core.Maths.*; import static net.openhft.chronicle.hash.impl.CompactOffHeapLinearHashTable.*; import static net.openhft.chronicle.hash.impl.SizePrefixedBlob.*; import static net.openhft.chronicle.hash.impl.util.FileIOUtils.readFully; import static net.openhft.chronicle.hash.impl.util.FileIOUtils.writeFully; import static net.openhft.chronicle.hash.impl.util.Objects.builderEquals; import static net.openhft.chronicle.map.DefaultSpi.mapEntryOperations; import static net.openhft.chronicle.map.DefaultSpi.mapRemoteOperations; import static net.openhft.chronicle.map.VanillaChronicleMap.alignAddr; /** * {@code ChronicleMapBuilder} manages {@link ChronicleMap} configurations; could be used as a * classic builder and/or factory. This means that in addition to the standard builder usage * pattern: <pre>{@code * ChronicleMap<Key, Value> map = ChronicleMapOnHeapUpdatableBuilder * .of(Key.class, Value.class) * // ... other configurations * .create();}</pre> * it could be prepared and used to create many similar maps: <pre>{@code * ChronicleMapBuilder<Key, Value> builder = ChronicleMapBuilder * .of(Key.class, Value.class) * .entries(..) * // ... other configurations * * ChronicleMap<Key, Value> map1 = builder.create(); * ChronicleMap<Key, Value> map2 = builder.create();}</pre> * i. e. created {@code ChronicleMap} instances don't depend on the builder. * * <p>{@code ChronicleMapBuilder} is mutable, see a note in {@link ChronicleHashBuilder} interface * documentation. * * <p>Later in this documentation, "ChronicleMap" means "ChronicleMaps, created by {@code * ChronicleMapBuilder}", unless specified different, because theoretically someone might provide * {@code ChronicleMap} implementations with completely different properties. * * <p>In addition to the key and value types, you <i>must</i> configure {@linkplain #entries(long) * number of entries} you are going to insert into the created map <i>at most</i>. See {@link * #entries(long)} method documentation for more information on this. * * <p>If you key or value type is not constantly sized and known to {@code ChronicleHashBuilder}, i. * e. it is not a boxed primitive, {@linkplain net.openhft.chronicle.values.Values value interface}, * or {@link Byteable}, you <i>must</i> provide the {@code ChronicleHashBuilder} with some * information about you keys or values: if they are constantly-sized, call {@link * #constantKeySizeBySample(Object)}, otherwise {@link #averageKey(Object)} or {@link * #averageKeySize(double)} method, and accordingly for values. * * @param <K> key type of the maps, produced by this builder * @param <V> value type of the maps, produced by this builder * @see ChronicleHashBuilder * @see ChronicleMap * @see ChronicleSetBuilder */ public final class ChronicleMapBuilder<K, V> implements ChronicleHashBuilder<K, ChronicleMap<K, V>, ChronicleMapBuilder<K, V>> { private static final int UNDEFINED_ALIGNMENT_CONFIG = -1; private static final int NO_ALIGNMENT = 1; /** * If want to increase this number, note {@link OldDeletedEntriesCleanup} uses array to store * all segment indexes -- so it could be current JVM max array size, not Integer.MAX_VALUE * (which is an obvious limitation, as many APIs and internals use int type for representing * segment index). * * Anyway, unlikely anyone ever need more than 1 billion segments. */ private static final int MAX_SEGMENTS = (1 << 30); private static final Logger LOG = LoggerFactory.getLogger(ChronicleMapBuilder.class.getName()); private static final double UNDEFINED_DOUBLE_CONFIG = Double.NaN; private static boolean isDefined(double config) { return !isNaN(config); } private static long toLong(double v) { long l = round(v); if (l != v) throw new IllegalArgumentException("Integer argument expected, given " + v); return l; } private static long roundUp(double v) { return round(Math.ceil(v)); } private static long roundDown(double v) { return (long) v; } private static int MAX_BOOTSTRAPPING_HEADER_SIZE = (int) MemoryUnit.KILOBYTES.toBytes(16); // not final because of cloning private ChronicleMapBuilderPrivateAPI<K, V> privateAPI = new ChronicleMapBuilderPrivateAPI<>(this); ////////////////////////////// // Configuration fields SerializationBuilder<K> keyBuilder; SerializationBuilder<V> valueBuilder; // used when configuring the number of segments. private int minSegments = -1; private int actualSegments = -1; // used when reading the number of entries per private long entriesPerSegment = -1L; private long actualChunksPerSegmentTier = -1L; private double averageKeySize = UNDEFINED_DOUBLE_CONFIG; K averageKey; private K sampleKey; private double averageValueSize = UNDEFINED_DOUBLE_CONFIG; V averageValue; private V sampleValue; private int actualChunkSize = 0; private int worstAlignment = -1; private int maxChunksPerEntry = -1; private int alignment = UNDEFINED_ALIGNMENT_CONFIG; private long entries = -1L; private double maxBloatFactor = 1.0; private boolean allowSegmentTiering = true; private double nonTieredSegmentsPercentile = 0.99999; private boolean aligned64BitMemoryOperationsAtomic = OS.is64Bit(); enum ChecksumEntries {YES, NO, IF_PERSISTED} private ChecksumEntries checksumEntries = ChecksumEntries.IF_PERSISTED; private boolean putReturnsNull = false; private boolean removeReturnsNull = false; /** * Default timeout is 1 minute. Even loopback tests converge often in the course of seconds, * let alone WAN replication over many nodes might take tens of seconds. * * TODO review */ long cleanupTimeout = 1; TimeUnit cleanupTimeoutUnit = TimeUnit.MINUTES; boolean cleanupRemovedEntries = true; DefaultValueProvider<K, V> defaultValueProvider = DefaultSpi.defaultValueProvider(); byte replicationIdentifier = -1; MapMethods<K, V, ?> methods = DefaultSpi.mapMethods(); MapEntryOperations<K, V, ?> entryOperations = mapEntryOperations(); MapRemoteOperations<K, V, ?> remoteOperations = mapRemoteOperations(); ////////////////////////////// // Instance fields private boolean replicated; private boolean persisted; ChronicleMapBuilder(Class<K> keyClass, Class<V> valueClass) { keyBuilder = new SerializationBuilder<>(keyClass); valueBuilder = new SerializationBuilder<>(valueClass); } /** * Returns a new {@code ChronicleMapBuilder} instance which is able to {@linkplain #create() * create} maps with the specified key and value classes. * * @param keyClass class object used to infer key type and discover it's properties via * reflection * @param valueClass class object used to infer value type and discover it's properties via * reflection * @param <K> key type of the maps, created by the returned builder * @param <V> value type of the maps, created by the returned builder * @return a new builder for the given key and value classes */ public static <K, V> ChronicleMapBuilder<K, V> of( @NotNull Class<K> keyClass, @NotNull Class<V> valueClass) { return new ChronicleMapBuilder<>(keyClass, valueClass); } private static void checkSegments(long segments) { if (segments <= 0) { throw new IllegalArgumentException("segments should be positive, " + segments + " given"); } if (segments > MAX_SEGMENTS) { throw new IllegalArgumentException("Max segments is " + MAX_SEGMENTS + ", " + segments + " given"); } } private static String pretty(int value) { return value > 0 ? value + "" : "not configured"; } private static String pretty(Object obj) { return obj != null ? obj + "" : "not configured"; } @Override public ChronicleMapBuilder<K, V> clone() { try { @SuppressWarnings("unchecked") ChronicleMapBuilder<K, V> result = (ChronicleMapBuilder<K, V>) super.clone(); result.keyBuilder = keyBuilder.clone(); result.valueBuilder = valueBuilder.clone(); result.privateAPI = new ChronicleMapBuilderPrivateAPI<>(result); return result; } catch (CloneNotSupportedException e) { throw new AssertionError(e); } } /** * @deprecated don't use private API in the client code */ @Override @Deprecated public Object privateAPI() { return privateAPI; } /** * {@inheritDoc} * * <p>Example: if keys in your map(s) are English words in {@link String} form, average English * word length is 5.1, configure average key size of 6: <pre>{@code * ChronicleMap<String, LongValue> wordFrequencies = ChronicleMapBuilder * .of(String.class, LongValue.class) * .entries(50000) * .averageKeySize(6) * .create();}</pre> * (Note that 6 is chosen as average key size in bytes despite strings in Java are UTF-16 * encoded (and each character takes 2 bytes on-heap), because default off-heap {@link String} * encoding is UTF-8 in {@code ChronicleMap}.) * * @param averageKeySize the average size of the key * @throws IllegalStateException {@inheritDoc} * @throws IllegalArgumentException {@inheritDoc} * @see #averageKey(Object) * @see #constantKeySizeBySample(Object) * @see #averageValueSize(double) * @see #actualChunkSize(int) */ @Override public ChronicleMapBuilder<K, V> averageKeySize(double averageKeySize) { checkSizeIsStaticallyKnown(keyBuilder, "Key"); checkAverageSize(averageKeySize, "key"); this.averageKeySize = averageKeySize; averageKey = null; sampleKey = null; return this; } /** * {@inheritDoc} * * @param averageKey the average (by footprint in serialized form) key, is going to be put * into the hash containers, created by this builder * @throws NullPointerException {@inheritDoc} * @see #averageKeySize(double) * @see #constantKeySizeBySample(Object) * @see #averageValue(Object) * @see #actualChunkSize(int) */ @Override public ChronicleMapBuilder<K, V> averageKey(K averageKey) { Objects.requireNonNull(averageKey); checkSizeIsStaticallyKnown(keyBuilder, "Key"); this.averageKey = averageKey; sampleKey = null; averageKeySize = UNDEFINED_DOUBLE_CONFIG; return this; } /** * {@inheritDoc} * * <p>For example, if your keys are Git commit hashes:<pre>{@code * Map<byte[], String> gitCommitMessagesByHash = * ChronicleMapBuilder.of(byte[].class, String.class) * .constantKeySizeBySample(new byte[20]) * .create();}</pre> * * @see #averageKeySize(double) * @see #averageKey(Object) * @see #constantValueSizeBySample(Object) */ @Override public ChronicleMapBuilder<K, V> constantKeySizeBySample(K sampleKey) { this.sampleKey = sampleKey; averageKey = null; averageKeySize = UNDEFINED_DOUBLE_CONFIG; return this; } private double averageKeySize() { if (!isDefined(averageKeySize)) throw new AssertionError(); return averageKeySize; } /** * Configures the average number of bytes, taken by serialized form of values, put into maps, * created by this builder. However, in many cases {@link #averageValue(Object)} might be easier * to use and more reliable. If value size is always the same, call {@link * #constantValueSizeBySample(Object)} method instead of this one. * * <p>{@code ChronicleHashBuilder} implementation heuristically chooses {@linkplain * #actualChunkSize(int) the actual chunk size} based on this configuration and the key size, * that, however, might result to quite high internal fragmentation, i. e. losses because only * integral number of chunks could be allocated for the entry. If you want to avoid this, you * should manually configure the actual chunk size in addition to this average value size * configuration, which is anyway needed. * * <p>If values are of boxed primitive type or {@link Byteable} subclass, i. e. if value size is * known statically, it is automatically accounted and shouldn't be specified by user. * * <p>Calling this method clears any previous {@link #constantValueSizeBySample(Object)} and * {@link #averageValue(Object)} configurations. * * @param averageValueSize number of bytes, taken by serialized form of values * @return this builder back * @throws IllegalStateException if value size is known statically and shouldn't be * configured by user * @throws IllegalArgumentException if the given {@code averageValueSize} is non-positive * @see #averageValue(Object) * @see #constantValueSizeBySample(Object) * @see #averageKeySize(double) * @see #actualChunkSize(int) */ public ChronicleMapBuilder<K, V> averageValueSize(double averageValueSize) { checkSizeIsStaticallyKnown(valueBuilder, "Value"); checkAverageSize(averageValueSize, "value"); this.averageValueSize = averageValueSize; averageValue = null; sampleValue = null; return this; } /** * Configures the average number of bytes, taken by serialized form of values, put into maps, * created by this builder, by serializing the given {@code averageValue} using the configured * {@link #valueMarshallers(SizedReader, SizedWriter) value marshallers}. In some cases, {@link * #averageValueSize(double)} might be easier to use, than constructing the "average value". * If value size is always the same, call {@link #constantValueSizeBySample(Object)} method * instead of this one. * * <p>Example: If you * * <p>{@code ChronicleHashBuilder} implementation heuristically chooses {@linkplain * #actualChunkSize(int) the actual chunk size} based on this configuration and the key size, * that, however, might result to quite high internal fragmentation, i. e. losses because only * integral number of chunks could be allocated for the entry. If you want to avoid this, you * should manually configure the actual chunk size in addition to this average value size * configuration, which is anyway needed. * * <p>If values are of boxed primitive type or {@link Byteable} subclass, i. e. if value size is * known statically, it is automatically accounted and shouldn't be specified by user. * * <p>Calling this method clears any previous {@link #constantValueSizeBySample(Object)} * and {@link #averageValueSize(double)} configurations. * * @param averageValue the average (by footprint in serialized form) value, is going to be put * into the maps, created by this builder * @return this builder back * @throws NullPointerException if the given {@code averageValue} is {@code null} * @see #averageValueSize(double) * @see #constantValueSizeBySample(Object) * @see #averageKey(Object) * @see #actualChunkSize(int) */ public ChronicleMapBuilder<K, V> averageValue(V averageValue) { Objects.requireNonNull(averageValue); checkSizeIsStaticallyKnown(valueBuilder, "Value"); this.averageValue = averageValue; sampleValue = null; averageValueSize = UNDEFINED_DOUBLE_CONFIG; return this; } private static void checkSizeIsStaticallyKnown(SerializationBuilder builder, String role) { if (builder.sizeIsStaticallyKnown) { throw new IllegalStateException("Size of " + builder.tClass + " instances is constant and statically known, shouldn't be specified via " + "average" + role + "Size() or average" + role + "() methods"); } } private static void checkAverageSize(double averageSize, String role) { if (averageSize <= 0 || isNaN(averageSize) || Double.isInfinite(averageSize)) { throw new IllegalArgumentException("Average " + role + " size must be a positive, " + "finite number"); } } /** * Configures the constant number of bytes, taken by serialized form of values, put into maps, * created by this builder. This is done by providing the {@code sampleValue}, all values should * take the same number of bytes in serialized form, as this sample object. * * <p>If values are of boxed primitive type or {@link Byteable} subclass, i. e. if value size is * known statically, it is automatically accounted and this method shouldn't be called. * * <p>If value size varies, method {@link #averageValue(Object)} or {@link * #averageValueSize(double)} should be called instead of this one. * * <p>Calling this method clears any previous {@link #averageValue(Object)} and * {@link #averageValueSize(double)} configurations. * * @param sampleValue the sample value * @return this builder back * @see #averageValueSize(double) * @see #averageValue(Object) * @see #constantKeySizeBySample(Object) */ public ChronicleMapBuilder<K, V> constantValueSizeBySample(V sampleValue) { this.sampleValue = sampleValue; averageValue = null; averageValueSize = UNDEFINED_DOUBLE_CONFIG; return this; } double averageValueSize() { if (!isDefined(averageValueSize)) throw new AssertionError(); return averageValueSize; } private <E> double averageKeyOrValueSize( double configuredSize, SerializationBuilder<E> builder, E average) { if (isDefined(configuredSize)) return configuredSize; if (builder.constantSizeMarshaller()) return builder.constantSize(); if (average != null) { return builder.serializationSize(average); } return Double.NaN; } /** * {@inheritDoc} * * @throws IllegalStateException is sizes of both keys and values of maps created by this * builder are constant, hence chunk size shouldn't be configured by user * @see #entryAndValueOffsetAlignment(int) * @see #entries(long) * @see #maxChunksPerEntry(int) */ @Override public ChronicleMapBuilder<K, V> actualChunkSize(int actualChunkSize) { if (constantlySizedEntries()) { throw new IllegalStateException("Sizes of key type: " + keyBuilder.tClass + " and " + "value type: " + valueBuilder.tClass + " are both constant, " + "so chunk size shouldn't be specified manually"); } if (actualChunkSize <= 0) throw new IllegalArgumentException("Chunk size must be positive"); this.actualChunkSize = actualChunkSize; return this; } SerializationBuilder<K> keyBuilder() { return keyBuilder; } static class EntrySizeInfo { final double averageEntrySize; final int worstAlignment; public EntrySizeInfo(double averageEntrySize, int worstAlignment) { this.averageEntrySize = averageEntrySize; this.worstAlignment = worstAlignment; } } private EntrySizeInfo entrySizeInfo() { double size = 0; double keySize = averageKeySize(); size += averageSizeStoringLength(keyBuilder, keySize); size += keySize; if (replicated) size += ReplicatedChronicleMap.ADDITIONAL_ENTRY_BYTES; if (checksumEntries()) size += ChecksumStrategy.CHECKSUM_STORED_BYTES; double valueSize = averageValueSize(); size += averageSizeStoringLength(valueBuilder, valueSize); int alignment = valueAlignment(); int worstAlignment; if (worstAlignmentComputationRequiresValueSize(alignment)) { long constantSizeBeforeAlignment = toLong(size); if (constantlySizedValues()) { // see tierEntrySpaceInnerOffset() long totalDataSize = constantSizeBeforeAlignment + constantValueSize(); worstAlignment = (int) (alignAddr(totalDataSize, alignment) - totalDataSize); } else { determineAlignment: if (actualChunkSize > 0) { worstAlignment = worstAlignmentAssumingChunkSize(constantSizeBeforeAlignment, actualChunkSize); } else { int chunkSize = 8; worstAlignment = worstAlignmentAssumingChunkSize( constantSizeBeforeAlignment, chunkSize); if (size + worstAlignment + valueSize >= maxDefaultChunksPerAverageEntry(replicated) * chunkSize) { break determineAlignment; } chunkSize = 4; worstAlignment = worstAlignmentAssumingChunkSize( constantSizeBeforeAlignment, chunkSize); } } } else { // assume worst case, we always lose most possible bytes for alignment worstAlignment = worstAlignmentWithoutValueSize(alignment); } size += worstAlignment; size += valueSize; return new EntrySizeInfo(size, worstAlignment); } private boolean worstAlignmentComputationRequiresValueSize(int alignment) { return alignment != NO_ALIGNMENT && constantlySizedKeys() && valueBuilder.constantStoringLengthSizeMarshaller(); } private int worstAlignmentWithoutValueSize(int alignment) { return alignment - 1; } int segmentEntrySpaceInnerOffset() { // This is needed, if chunkSize = constant entry size is not aligned, for entry alignment // to be always the same, we should _misalign_ the first chunk. if (!constantlySizedEntries()) return 0; return (int) (constantValueSize() % valueAlignment()); } private long constantValueSize() { return valueBuilder.constantSize(); } boolean constantlySizedKeys() { return keyBuilder.constantSizeMarshaller() || sampleKey != null; } private static double averageSizeStoringLength( SerializationBuilder builder, double averageSize) { SizeMarshaller sizeMarshaller = builder.sizeMarshaller(); if (averageSize == round(averageSize)) return sizeMarshaller.storingLength(round(averageSize)); long lower = roundDown(averageSize); long upper = lower + 1; int lowerStoringLength = sizeMarshaller.storingLength(lower); int upperStoringLength = sizeMarshaller.storingLength(upper); if (lowerStoringLength == upperStoringLength) return lowerStoringLength; return lower * (upper - averageSize) + upper * (averageSize - lower); } private int worstAlignmentAssumingChunkSize( long constantSizeBeforeAlignment, int chunkSize) { int alignment = valueAlignment(); long firstAlignment = alignAddr(constantSizeBeforeAlignment, alignment) - constantSizeBeforeAlignment; int gcdOfAlignmentAndChunkSize = greatestCommonDivisor(alignment, chunkSize); if (gcdOfAlignmentAndChunkSize == alignment) return (int) firstAlignment; // assume worst by now because we cannot predict alignment in VanillaCM.entrySize() method // before allocation long worstAlignment = firstAlignment; while (worstAlignment + gcdOfAlignmentAndChunkSize < alignment) worstAlignment += gcdOfAlignmentAndChunkSize; return (int) worstAlignment; } int worstAlignment() { if (worstAlignment >= 0) return worstAlignment; int alignment = valueAlignment(); if (!worstAlignmentComputationRequiresValueSize(alignment)) return worstAlignment = worstAlignmentWithoutValueSize(alignment); return worstAlignment = entrySizeInfo().worstAlignment; } void worstAlignment(int worstAlignment) { assert worstAlignment >= 0; this.worstAlignment = worstAlignment; } static int greatestCommonDivisor(int a, int b) { if (b == 0) return a; return greatestCommonDivisor(b, a % b); } long chunkSize() { if (actualChunkSize > 0) return actualChunkSize; double averageEntrySize = entrySizeInfo().averageEntrySize; if (constantlySizedEntries()) return toLong(averageEntrySize); int maxChunkSize = 1 << 30; for (long chunkSize = 4; chunkSize <= maxChunkSize; chunkSize *= 2L) { if (maxDefaultChunksPerAverageEntry(replicated) * chunkSize > averageEntrySize) return chunkSize; } return maxChunkSize; } boolean constantlySizedEntries() { return constantlySizedKeys() && constantlySizedValues(); } double averageChunksPerEntry() { if (constantlySizedEntries()) return 1.0; long chunkSize = chunkSize(); // assuming we always has worst internal fragmentation. This affects total segment // entry space which is allocated lazily on Linux (main target platform) // so we can afford this return (entrySizeInfo().averageEntrySize + chunkSize - 1) / chunkSize; } private static int maxDefaultChunksPerAverageEntry(boolean replicated) { // When replicated, having 8 chunks (=> 8 bits in bitsets) per entry seems more wasteful // because when replicated we have bit sets per each remote node, not only allocation // bit set as when non-replicated return replicated ? 4 : 8; } @Override public ChronicleMapBuilder<K, V> maxChunksPerEntry(int maxChunksPerEntry) { if (maxChunksPerEntry < 1) throw new IllegalArgumentException("maxChunksPerEntry should be >= 1, " + maxChunksPerEntry + " given"); this.maxChunksPerEntry = maxChunksPerEntry; return this; } int maxChunksPerEntry() { if (constantlySizedEntries()) return 1; long actualChunksPerSegmentTier = actualChunksPerSegmentTier(); int result = (int) Math.min(actualChunksPerSegmentTier, (long) Integer.MAX_VALUE); if (this.maxChunksPerEntry > 0) result = Math.min(this.maxChunksPerEntry, result); return result; } boolean constantlySizedValues() { return valueBuilder.constantSizeMarshaller() || sampleValue != null; } /** * Configures alignment of address in memory of entries and independently of address in memory * of values within entries ((i. e. final addresses in native memory are multiples of the given * alignment) for ChronicleMaps, created by this builder. * * <p>Useful when values of the map are updated intensively, particularly fields with volatile * access, because it doesn't work well if the value crosses cache lines. Also, on some * (nowadays rare) architectures any misaligned memory access is more expensive than aligned. * * <p>If values couldn't reference off-heap memory (i. e. it is not {@link Byteable} or a value * interface), alignment configuration makes no sense. * * <p>Default is {@link ValueModel#recommendedOffsetAlignment()} if the value type is a value * interface, otherwise 1 (that is effectively no alignment) or chosen heuristically (configure * explicitly for being sure and to compare performance in your case). * * @param alignment the new alignment of the maps constructed by this builder * @return this {@code ChronicleMapOnHeapUpdatableBuilder} back * @throws IllegalStateException if values of maps, created by this builder, couldn't reference * off-heap memory */ public ChronicleMapBuilder<K, V> entryAndValueOffsetAlignment(int alignment) { if (alignment <= 0) { throw new IllegalArgumentException("Alignment should be positive integer, " + alignment + " given"); } if (!isPowerOf2(alignment)) { throw new IllegalArgumentException("Alignment should be a power of 2, " + alignment + " given"); } this.alignment = alignment; return this; } int valueAlignment() { if (alignment != UNDEFINED_ALIGNMENT_CONFIG) return alignment; try { return ValueModel.acquire(valueBuilder.tClass).recommendedOffsetAlignment(); } catch (Exception e) { return NO_ALIGNMENT; } } @Override public ChronicleMapBuilder<K, V> entries(long entries) { if (entries <= 0L) throw new IllegalArgumentException("Entries should be positive, " + entries + " given"); this.entries = entries; return this; } long entries() { if (entries < 0) { throw new IllegalStateException("If in-memory Chronicle Map is created or persisted\n" + "to a file for the first time (i. e. not accessing existing file),\n" + "ChronicleMapBuilder.entries() must be configured.\n" + "See Chronicle Map 3 tutorial and javadocs for more information"); } return entries; } @Override public ChronicleMapBuilder<K, V> entriesPerSegment(long entriesPerSegment) { if (entriesPerSegment <= 0L) throw new IllegalArgumentException("Entries per segment should be positive, " + entriesPerSegment + " given"); this.entriesPerSegment = entriesPerSegment; return this; } long entriesPerSegment() { long entriesPerSegment; if (this.entriesPerSegment > 0L) { entriesPerSegment = this.entriesPerSegment; } else { int actualSegments = actualSegments(); double averageEntriesPerSegment = entries() * 1.0 / actualSegments; if (actualSegments > 1) { entriesPerSegment = PoissonDistribution.inverseCumulativeProbability( averageEntriesPerSegment, nonTieredSegmentsPercentile); } else { // if there is only 1 segment, there is no source of variance in segments filling entriesPerSegment = roundUp(averageEntriesPerSegment); } } boolean actualChunksDefined = actualChunksPerSegmentTier > 0; if (!actualChunksDefined) { double averageChunksPerEntry = averageChunksPerEntry(); if (entriesPerSegment * averageChunksPerEntry > MAX_TIER_CHUNKS) throw new IllegalStateException("Max chunks per segment tier is " + MAX_TIER_CHUNKS + " configured entries() and actualSegments() so that " + "there should be " + entriesPerSegment + " entries per segment tier, " + "while average chunks per entry is " + averageChunksPerEntry); } if (entriesPerSegment > MAX_TIER_ENTRIES) throw new IllegalStateException("shouldn't be more than " + MAX_TIER_ENTRIES + " entries per segment"); return entriesPerSegment; } @Override public ChronicleMapBuilder<K, V> actualChunksPerSegmentTier(long actualChunksPerSegmentTier) { if (actualChunksPerSegmentTier <= 0 || actualChunksPerSegmentTier > MAX_TIER_CHUNKS) throw new IllegalArgumentException("Actual chunks per segment tier should be in [1, " + MAX_TIER_CHUNKS + "], range, " + actualChunksPerSegmentTier + " given"); this.actualChunksPerSegmentTier = actualChunksPerSegmentTier; return this; } private void checkActualChunksPerSegmentTierIsConfiguredOnlyIfOtherLowLevelConfigsAreManual() { if (actualChunksPerSegmentTier > 0) { if (entriesPerSegment <= 0 || (actualChunkSize <= 0 && !constantlySizedEntries()) || actualSegments <= 0) throw new IllegalStateException("Actual chunks per segment tier could be " + "configured only if other three low level configs are manual: " + "entriesPerSegment(), actualSegments() and actualChunkSize(), unless " + "both keys and value sizes are constant"); } } private void checkActualChunksPerSegmentGreaterOrEqualToEntries() { if (actualChunksPerSegmentTier > 0 && entriesPerSegment > 0 && entriesPerSegment > actualChunksPerSegmentTier) { throw new IllegalStateException("Entries per segment couldn't be greater than " + "actual chunks per segment tier. Entries: " + entriesPerSegment + ", " + "chunks: " + actualChunksPerSegmentTier + " is configured"); } } long actualChunksPerSegmentTier() { if (actualChunksPerSegmentTier > 0) return actualChunksPerSegmentTier; return chunksPerSegmentTier(entriesPerSegment()); } private long chunksPerSegmentTier(long entriesPerSegment) { return roundUp(entriesPerSegment * averageChunksPerEntry()); } @Override public ChronicleMapBuilder<K, V> minSegments(int minSegments) { checkSegments(minSegments); this.minSegments = minSegments; return this; } int minSegments() { return Math.max(estimateSegments(), minSegments); } private int estimateSegments() { return (int) Math.min(nextPower2(entries() / 32, 1), estimateSegmentsBasedOnSize()); } //TODO review because this heuristic doesn't seem to perform well private int estimateSegmentsBasedOnSize() { // the idea is that if values are huge, operations on them (and simply ser/deser) // could take long time, so we want more segment to minimize probablity that // two or more concurrent write ops will go to the same segment, and then all but one of // these threads will wait for long time. int segmentsForEntries = estimateSegmentsForEntries(entries()); double averageValueSize = averageValueSize(); return averageValueSize >= 1000000 ? segmentsForEntries * 16 : averageValueSize >= 100000 ? segmentsForEntries * 8 : averageValueSize >= 10000 ? segmentsForEntries * 4 : averageValueSize >= 1000 ? segmentsForEntries * 2 : segmentsForEntries; } private static int estimateSegmentsForEntries(long size) { if (size > 200 << 20) return 256; if (size >= 1 << 20) return 128; if (size >= 128 << 10) return 64; if (size >= 16 << 10) return 32; if (size >= 4 << 10) return 16; if (size >= 1 << 10) return 8; return 1; } @Override public ChronicleMapBuilder<K, V> actualSegments(int actualSegments) { checkSegments(actualSegments); this.actualSegments = actualSegments; return this; } int actualSegments() { if (actualSegments > 0) return actualSegments; if (entriesPerSegment > 0) { return (int) segmentsGivenEntriesPerSegmentFixed(entriesPerSegment); } // Try to fit 4 bytes per hash lookup slot, then 8. Trying to apply small slot // size (=> segment size, because slot size depends on segment size) not only because // they take less memory per entry (if entries are of KBs or MBs, it doesn't matter), but // also because if segment size is small, slot and free list are likely to lie on a single // memory page, reducing number of memory pages to update, if Chronicle Map is persisted. // Actually small segments are all ways better: many segments => better parallelism, lesser // pauses for per-key operations, if parallel/background operation blocks the segment for // the whole time while it operates on it (like iteration, probably replication background // thread will require some level of full segment lock, however currently if doesn't, in // future durability background thread could update slot states), because smaller segments // contain less entries/slots and are processed faster. // // The only problem with small segments is that due to probability theory, if there are // a lot of segments each of little number of entries, difference between most filled // and least filled segment in the Chronicle Map grows. (Number of entries in a segment is // Poisson-distributed with mean = average number of entries per segment.) It is meaningful, // because segment tiering is exceptional mechanism, only very few segments should be // tiered, if any, normally. So, we are required to allocate unnecessarily many entries per // each segment. To compensate this at least on linux, don't accept segment sizes that with // the given entry sizes, lead to too small total segment sizes in native memory pages, // see comment in tryHashLookupSlotSize() long segments = tryHashLookupSlotSize(4); if (segments > 0) return (int) segments; int maxHashLookupEntrySize = aligned64BitMemoryOperationsAtomic() ? 8 : 4; long maxEntriesPerSegment = findMaxEntriesPerSegmentToFitHashLookupSlotSize(maxHashLookupEntrySize); long maxSegments = trySegments(maxEntriesPerSegment, MAX_SEGMENTS); if (maxSegments > 0L) return (int) maxSegments; throw new IllegalStateException("Max segments is " + MAX_SEGMENTS + ", configured so much" + " entries (" + entries() + ") or average chunks per entry is too high (" + averageChunksPerEntry() + ") that builder automatically decided to use " + (-maxSegments) + " segments"); } private long tryHashLookupSlotSize(int hashLookupSlotSize) { long entriesPerSegment = findMaxEntriesPerSegmentToFitHashLookupSlotSize( hashLookupSlotSize); long entrySpaceSize = roundUp(entriesPerSegment * entrySizeInfo().averageEntrySize); // Not to lose too much on linux because of "poor distribution" entry over-allocation. // This condition should likely filter cases when we target very small hash lookup // size + entry size is small. // * 5 => segment will lose not more than 20% of memory, 10% on average if (entrySpaceSize < OS.pageSize() * 5L) return -1; return trySegments(entriesPerSegment, MAX_SEGMENTS); } private long findMaxEntriesPerSegmentToFitHashLookupSlotSize( int targetHashLookupSlotSize) { long entriesPerSegment = 1L << 62; long step = entriesPerSegment / 2L; while (step > 0L) { if (hashLookupSlotBytes(entriesPerSegment) > targetHashLookupSlotSize) entriesPerSegment -= step; step /= 2L; } return entriesPerSegment - 1L; } private int hashLookupSlotBytes(long entriesPerSegment) { int valueBits = valueBits(chunksPerSegmentTier(entriesPerSegment)); int keyBits = keyBits(entriesPerSegment, valueBits); return entrySize(keyBits, valueBits); } private long trySegments(long entriesPerSegment, int maxSegments) { long segments = segmentsGivenEntriesPerSegmentFixed(entriesPerSegment); segments = nextPower2(Math.max(segments, minSegments()), 1L); return segments <= maxSegments ? segments : -segments; } private long segmentsGivenEntriesPerSegmentFixed(long entriesPerSegment) { double precision = 1.0 / averageChunksPerEntry(); long entriesPerSegmentShouldBe = roundDown(PoissonDistribution.meanByCumulativeProbabilityAndValue( nonTieredSegmentsPercentile, entriesPerSegment, precision)); long segments = divideRoundUp(entries(), entriesPerSegmentShouldBe); checkSegments(segments); if (minSegments > 0) segments = Math.max(minSegments, segments); return segments; } long tierHashLookupCapacity() { long entriesPerSegment = entriesPerSegment(); long capacity = CompactOffHeapLinearHashTable.capacityFor(entriesPerSegment); if (actualSegments() > 1) { // if there is only 1 segment, there is no source of variance in segments filling long maxEntriesPerTier = PoissonDistribution.inverseCumulativeProbability( entriesPerSegment, nonTieredSegmentsPercentile); while (maxEntriesPerTier > MAX_LOAD_FACTOR * capacity) { capacity *= 2; } } return capacity; } int segmentHeaderSize() { int segments = actualSegments(); long pageSize = 4096; if (segments * (64 * 3) < (2 * pageSize)) // i. e. <= 42 segments return 64 * 3; // cache line per header, plus one CL to the left, plus one to the right if (segments * (64 * 2) < (3 * pageSize)) // i. e. <= 96 segments return 64 * 2; // reduce false sharing unless we have a lot of segments. return segments <= 16 * 1024 ? 64 : 32; } /** * Configures if the maps created by this {@code ChronicleMapBuilder} should return {@code null} * instead of previous mapped values on {@link ChronicleMap#put(Object, Object) * ChornicleMap.put(key, value)} calls. * * <p>{@link Map#put(Object, Object) Map.put()} returns the previous value, functionality * which is rarely used but fairly cheap for simple in-process, on-heap implementations like * {@link HashMap}. But an off-heap collection has to create a new object and deserialize * the data from off-heap memory. A collection hiding remote queries over the network should * send the value back in addition to that. It's expensive for something you probably don't use. * * <p>By default, of cause, {@code ChronicleMap} conforms the general {@code Map} contract and * returns the previous mapped value on {@code put()} calls. * * @param putReturnsNull {@code true} if you want {@link ChronicleMap#put(Object, Object) * ChronicleMap.put()} to not return the value that was replaced but * instead return {@code null} * @return this builder back * @see #removeReturnsNull(boolean) */ public ChronicleMapBuilder<K, V> putReturnsNull(boolean putReturnsNull) { this.putReturnsNull = putReturnsNull; return this; } boolean putReturnsNull() { return putReturnsNull; } /** * Configures if the maps created by this {@code ChronicleMapBuilder} should return {@code null} * instead of the last mapped value on {@link ChronicleMap#remove(Object) * ChronicleMap.remove(key)} calls. * * <p>{@link Map#remove(Object) Map.remove()} returns the previous value, functionality which is * rarely used but fairly cheap for simple in-process, on-heap implementations like {@link * HashMap}. But an off-heap collection has to create a new object and deserialize the data * from off-heap memory. A collection hiding remote queries over the network should send * the value back in addition to that. It's expensive for something you probably don't use. * * <p>By default, of cause, {@code ChronicleMap} conforms the general {@code Map} contract and * returns the mapped value on {@code remove()} calls. * * @param removeReturnsNull {@code true} if you want {@link ChronicleMap#remove(Object) * ChronicleMap.remove()} to not return the value of the removed entry * but instead return {@code null} * @return this builder back * @see #putReturnsNull(boolean) */ public ChronicleMapBuilder<K, V> removeReturnsNull(boolean removeReturnsNull) { this.removeReturnsNull = removeReturnsNull; return this; } boolean removeReturnsNull() { return removeReturnsNull; } @Override public ChronicleMapBuilder<K, V> maxBloatFactor(double maxBloatFactor) { if (isNaN(maxBloatFactor) || maxBloatFactor < 1.0 || maxBloatFactor > 1_000.0) { throw new IllegalArgumentException("maxBloatFactor should be in [1.0, 1_000.0] " + "bounds, " + maxBloatFactor + " given"); } this.maxBloatFactor = maxBloatFactor; return this; } @Override public ChronicleMapBuilder<K, V> allowSegmentTiering(boolean allowSegmentTiering) { this.allowSegmentTiering = allowSegmentTiering; return this; } @Override public ChronicleMapBuilder<K, V> nonTieredSegmentsPercentile( double nonTieredSegmentsPercentile) { if (isNaN(nonTieredSegmentsPercentile) || 0.5 <= nonTieredSegmentsPercentile || nonTieredSegmentsPercentile >= 1.0) { throw new IllegalArgumentException("nonTieredSegmentsPercentile should be in (0.5, " + "1.0) range, " + nonTieredSegmentsPercentile + " is given"); } this.nonTieredSegmentsPercentile = nonTieredSegmentsPercentile; return this; } long maxExtraTiers() { if (!allowSegmentTiering) return 0; int actualSegments = actualSegments(); // maxBloatFactor is scale, so we do (- 1.0) to compute _extra_ tiers return round((maxBloatFactor - 1.0) * actualSegments) // but to mitigate slight misconfiguration, and uneven distribution of entries // between segments, add 1.0 x actualSegments + actualSegments; } @Override public String toString() { return "ChronicleMapBuilder{" + ", actualSegments=" + pretty(actualSegments) + ", minSegments=" + pretty(minSegments) + ", entriesPerSegment=" + pretty(entriesPerSegment) + ", actualChunksPerSegmentTier=" + pretty(actualChunksPerSegmentTier) + ", averageKeySize=" + pretty(averageKeySize) + ", sampleKeyForConstantSizeComputation=" + pretty(sampleKey) + ", averageValueSize=" + pretty(averageValueSize) + ", sampleValueForConstantSizeComputation=" + pretty(sampleValue) + ", actualChunkSize=" + pretty(actualChunkSize) + ", valueAlignment=" + valueAlignment() + ", entries=" + entries() + ", putReturnsNull=" + putReturnsNull() + ", removeReturnsNull=" + removeReturnsNull() + ", keyBuilder=" + keyBuilder + ", valueBuilder=" + valueBuilder + '}'; } @SuppressWarnings("EqualsWhichDoesntCheckParameterClass") @Override public boolean equals(Object o) { return builderEquals(this, o); } @Override public int hashCode() { return toString().hashCode(); } ChronicleMapBuilder<K, V> removedEntryCleanupTimeout( long removedEntryCleanupTimeout, TimeUnit unit) { if (unit.toMillis(removedEntryCleanupTimeout) < 1) { throw new IllegalArgumentException("timeout should be >= 1 millisecond, " + removedEntryCleanupTimeout + " " + unit + " is given"); } cleanupTimeout = removedEntryCleanupTimeout; cleanupTimeoutUnit = unit; return this; } ChronicleMapBuilder<K, V> cleanupRemovedEntries(boolean cleanupRemovedEntries) { this.cleanupRemovedEntries = cleanupRemovedEntries; return this; } @Override public ChronicleMapBuilder<K, V> keyReaderAndDataAccess( SizedReader<K> keyReader, @NotNull DataAccess<K> keyDataAccess) { keyBuilder.reader(keyReader); keyBuilder.dataAccess(keyDataAccess); return this; } @Override public ChronicleMapBuilder<K, V> keyMarshallers( @NotNull SizedReader<K> keyReader, @NotNull SizedWriter<? super K> keyWriter) { keyBuilder.reader(keyReader); keyBuilder.writer(keyWriter); return this; } @Override public <M extends SizedReader<K> & SizedWriter<? super K>> ChronicleMapBuilder<K, V> keyMarshaller(@NotNull M sizedMarshaller) { return keyMarshallers(sizedMarshaller, sizedMarshaller); } @Override public ChronicleMapBuilder<K, V> keyMarshallers( @NotNull BytesReader<K> keyReader, @NotNull BytesWriter<? super K> keyWriter) { keyBuilder.reader(keyReader); keyBuilder.writer(keyWriter); return this; } @Override public <M extends BytesReader<K> & BytesWriter<? super K>> ChronicleMapBuilder<K, V> keyMarshaller(@NotNull M marshaller) { return keyMarshallers(marshaller, marshaller); } @Override public ChronicleMapBuilder<K, V> keySizeMarshaller(@NotNull SizeMarshaller keySizeMarshaller) { keyBuilder.sizeMarshaller(keySizeMarshaller); return this; } @Override public ChronicleMapBuilder<K, V> aligned64BitMemoryOperationsAtomic( boolean aligned64BitMemoryOperationsAtomic) { this.aligned64BitMemoryOperationsAtomic = aligned64BitMemoryOperationsAtomic; return this; } @Override public ChronicleMapBuilder<K, V> checksumEntries(boolean checksumEntries) { this.checksumEntries = checksumEntries ? ChecksumEntries.YES : ChecksumEntries.NO; return this; } boolean checksumEntries() { switch (checksumEntries) { case NO: return false; case YES: return true; case IF_PERSISTED: return persisted; default: throw new AssertionError(); } } boolean aligned64BitMemoryOperationsAtomic() { return aligned64BitMemoryOperationsAtomic; } /** * Configures the {@code DataAccess} and {@code SizedReader} used to serialize and deserialize * values to and from off-heap memory in maps, created by this builder. * * @param valueReader the new bytes &rarr; value object reader strategy * @param valueDataAccess the new strategy of accessing the values' bytes for writing * @return this builder back * @see #valueMarshallers(SizedReader, SizedWriter) * @see ChronicleHashBuilder#keyReaderAndDataAccess(SizedReader, DataAccess) */ public ChronicleMapBuilder<K, V> valueReaderAndDataAccess( SizedReader<V> valueReader, @NotNull DataAccess<V> valueDataAccess) { valueBuilder.reader(valueReader); valueBuilder.dataAccess(valueDataAccess); return this; } /** * Configures the marshallers, used to serialize/deserialize values to/from off-heap memory in * maps, created by this builder. * * @param valueReader the new bytes &rarr; value object reader strategy * @param valueWriter the new value object &rarr; bytes writer strategy * @return this builder back * @see #valueReaderAndDataAccess(SizedReader, DataAccess) * @see #valueSizeMarshaller(SizeMarshaller) * @see ChronicleHashBuilder#keyMarshallers(SizedReader, SizedWriter) */ public ChronicleMapBuilder<K, V> valueMarshallers( @NotNull SizedReader<V> valueReader, @NotNull SizedWriter<? super V> valueWriter) { valueBuilder.reader(valueReader); valueBuilder.writer(valueWriter); return this; } /** * Shortcut for {@link #valueMarshallers(SizedReader, SizedWriter) * valueMarshallers(sizedMarshaller, sizedMarshaller)}. */ public <M extends SizedReader<V> & SizedWriter<? super V>> ChronicleMapBuilder<K, V> valueMarshaller(@NotNull M sizedMarshaller) { return valueMarshallers(sizedMarshaller, sizedMarshaller); } /** * Configures the marshallers, used to serialize/deserialize values to/from off-heap memory in * maps, created by this builder. * * @param valueReader the new bytes &rarr; value object reader strategy * @param valueWriter the new value object &rarr; bytes writer strategy * @return this builder back * @see #valueReaderAndDataAccess(SizedReader, DataAccess) * @see #valueSizeMarshaller(SizeMarshaller) * @see ChronicleHashBuilder#keyMarshallers(BytesReader, BytesWriter) */ public ChronicleMapBuilder<K, V> valueMarshallers( @NotNull BytesReader<V> valueReader, @NotNull BytesWriter<? super V> valueWriter) { valueBuilder.reader(valueReader); valueBuilder.writer(valueWriter); return this; } /** * Shortcut for {@link #valueMarshallers(BytesReader, BytesWriter) * valueMarshallers(marshaller, marshaller)}. */ public <M extends BytesReader<V> & BytesWriter<? super V>> ChronicleMapBuilder<K, V> valueMarshaller(@NotNull M marshaller) { return valueMarshallers(marshaller, marshaller); } /** * Configures the marshaller used to serialize actual value sizes to off-heap memory in maps, * created by this builder. * * <p>Default value size marshaller is so-called "stop bit encoding" marshalling, unless {@link * #constantValueSizeBySample(Object)} or the builder statically knows the value size is * constant -- special constant size marshalling is used by default in these cases. * * @param valueSizeMarshaller the new marshaller, used to serialize actual value sizes to * off-heap memory * @return this builder back * @see #keySizeMarshaller(SizeMarshaller) */ public ChronicleMapBuilder<K, V> valueSizeMarshaller( @NotNull SizeMarshaller valueSizeMarshaller) { valueBuilder.sizeMarshaller(valueSizeMarshaller); return this; } /** * Specifies the function to obtain a value for the key during {@link ChronicleMap#acquireUsing * acquireUsing()} calls, if the key is absent in the map, created by this builder. * * @param defaultValueProvider the strategy to obtain a default value by the absent key * @return this builder object back */ public ChronicleMapBuilder<K, V> defaultValueProvider( @NotNull DefaultValueProvider<K, V> defaultValueProvider) { Objects.requireNonNull(defaultValueProvider); this.defaultValueProvider = defaultValueProvider; return this; } ChronicleMapBuilder<K, V> replication(byte identifier) { if (identifier <= 0) throw new IllegalArgumentException("Identifier must be positive, " + identifier + " given"); this.replicationIdentifier = identifier; return this; } @Override public ChronicleMap<K, V> createPersistedTo(File file) throws IOException { // clone() to make this builder instance thread-safe, because createWithFile() method // computes some state based on configurations, but doesn't synchronize on configuration // changes. return clone().createWithFile(file, false, false); } @Override public ChronicleMap<K, V> createOrRecoverPersistedTo(File file) throws IOException { return file.exists() ? recoverPersistedTo(file, true) : createPersistedTo(file); } @Override public ChronicleMap<K, V> recoverPersistedTo(File file, boolean sameBuilderConfig) throws IOException { return clone().createWithFile(file, true, sameBuilderConfig); } @Override public ChronicleMap<K, V> create() { // clone() to make this builder instance thread-safe, because createWithoutFile() method // computes some state based on configurations, but doesn't synchronize on configuration // changes. return clone().createWithoutFile(); } ChronicleMap<K, V> createWithFile( File file, boolean recover, boolean overrideBuilderConfig) throws IOException { if (overrideBuilderConfig && !recover) throw new AssertionError("recover -> overrideBuilderConfig"); replicated = replicationIdentifier != -1; persisted = true; if (!file.exists()) { if (recover) throw new FileNotFoundException("file " + file + " should exist for recovery"); //noinspection ResultOfMethodCallIgnored file.createNewFile(); } try (RandomAccessFile raf = new RandomAccessFile(file, "rw")) { if (raf.length() > 0) return openWithExistingFile(file, raf, recover, overrideBuilderConfig); VanillaChronicleMap<K, V, ?> map = null; ByteBuffer headerBuffer = null; boolean newFile; FileChannel fileChannel = raf.getChannel(); try (FileLock ignored = fileChannel.lock()) { if (raf.length() == 0) { map = newMap(); headerBuffer = writeHeader(fileChannel, map); newFile = true; } else { newFile = false; } } if (newFile) { int headerSize = headerBuffer.remaining(); return createWithNewFile(map, file, raf, headerBuffer, headerSize); } else { return openWithExistingFile(file, raf, recover, overrideBuilderConfig); } } } /** * @return size of the self bootstrapping header */ private int waitUntilReady(RandomAccessFile raf, boolean recover) throws IOException { FileChannel fileChannel = raf.getChannel(); ByteBuffer sizeWordBuffer = ByteBuffer.allocate(4); sizeWordBuffer.order(LITTLE_ENDIAN); // 60 * 10, 100 ms wait = 1 minute total wait int attempts = 60 * 10; int lastReadHeaderSize = -1; for (int attempt = 0; attempt < attempts; attempt++) { if (raf.length() >= SELF_BOOTSTRAPPING_HEADER_OFFSET) { sizeWordBuffer.clear(); readFully(fileChannel, SIZE_WORD_OFFSET, sizeWordBuffer); if (sizeWordBuffer.remaining() == 0) { int sizeWord = sizeWordBuffer.getInt(0); lastReadHeaderSize = SizePrefixedBlob.extractSize(sizeWord); if (SizePrefixedBlob.isReady(sizeWord)) return lastReadHeaderSize; } // The only possible reason why not 4 bytes are read, is that the file is // truncated between length() and read() calls, then continue to wait } try { Thread.sleep(100); } catch (InterruptedException e) { if (recover) { break; } else { throw new IOException(e); } } } if (recover) { if (lastReadHeaderSize == -1) { throw new ChronicleHashRecoveryFailedException("File header is not recoverable"); } else { return lastReadHeaderSize; } } else { throw new IOException("Unable to wait until the file is ready, likely the process " + "which created the file crashed or hung for more than 1 minute"); } } /** * @return ByteBuffer, in [position, limit) range the self bootstrapping header is read */ private ByteBuffer checkSumSelfBootstrappingHeader( RandomAccessFile raf, int headerSize, boolean recover) throws IOException { if (raf.length() < headerSize + SELF_BOOTSTRAPPING_HEADER_OFFSET) { throw throwRecoveryOrReturnIOException("The file is shorter than the header size: " + headerSize + ", file size: " + raf.length(), recover); } FileChannel fileChannel = raf.getChannel(); ByteBuffer headerBuffer = ByteBuffer.allocate( SELF_BOOTSTRAPPING_HEADER_OFFSET + headerSize); headerBuffer.order(LITTLE_ENDIAN); readFully(fileChannel, 0, headerBuffer); if (headerBuffer.remaining() > 0) { throw throwRecoveryOrReturnIOException("Unable to read the header fully, " + headerBuffer.remaining() + " is remaining to read, likely the file was " + "truncated", recover); } int sizeWord = headerBuffer.getInt(SIZE_WORD_OFFSET); if (!SizePrefixedBlob.isReady(sizeWord)) { if (recover) { LOG.error("size-prefixed blob readiness bit is set to NOT_READY"); // the bit will be overwritten to READY in the end of recovery procedure, so nothing // to fix right here } else { throw new IOException("sizeWord is not ready: " + sizeWord); } } long checkSum = headerChecksum(headerBuffer, headerSize); long storedChecksum = headerBuffer.getLong(HEADER_OFFSET); if (storedChecksum != checkSum) { throw throwRecoveryOrReturnIOException("Self Bootstrapping Header checksum doesn't " + "match the stored checksum: " + storedChecksum + ", computed: " + checkSum, recover); } headerBuffer.position(SELF_BOOTSTRAPPING_HEADER_OFFSET); return headerBuffer; } private IOException throwRecoveryOrReturnIOException(String message, boolean recover) { if (recover) { throw new ChronicleHashRecoveryFailedException(message); } else { return new IOException(message); } } private static long headerChecksum(ByteBuffer headerBuffer, int headerSize) { return LongHashFunction.xx_r39().hashBytes(headerBuffer, SIZE_WORD_OFFSET, headerSize + 4); } private ChronicleMap<K, V> createWithNewFile( VanillaChronicleMap<K, V, ?> map, File file, RandomAccessFile raf, ByteBuffer headerBuffer, int headerSize) throws IOException { FileChannel fileChannel = raf.getChannel(); map.initBeforeMapping(file, fileChannel, headerBuffer.limit()); map.createMappedStoreAndSegments(file, raf); establishReplication(map); commitChronicleMapReady(map, raf, headerBuffer, headerSize); return map; } private ChronicleMap<K, V> openWithExistingFile( File file, RandomAccessFile raf, boolean recover, boolean overrideBuilderConfig) throws IOException { try { int headerSize = waitUntilReady(raf, recover); FileChannel fileChannel = raf.getChannel(); ByteBuffer headerBuffer; if (overrideBuilderConfig) { VanillaChronicleMap<K, V, ?> mapObjectForHeaderOverwrite = newMap(); headerBuffer = writeHeader(fileChannel, mapObjectForHeaderOverwrite); headerSize = headerBuffer.remaining(); } else { headerBuffer = checkSumSelfBootstrappingHeader(raf, headerSize, recover); assert headerSize == headerBuffer.remaining(); } Bytes<ByteBuffer> headerBytes = Bytes.wrapForRead(headerBuffer); headerBytes.readPosition(headerBuffer.position()); headerBytes.readLimit(headerBuffer.limit()); Wire wire = new TextWire(headerBytes); VanillaChronicleMap<K, V, ?> map = wire.getValueIn().typedMarshallable(); assert map != null; map.initBeforeMapping(file, fileChannel, headerBuffer.limit()); long expectedFileLength = map.expectedFileSize(); if (!recover && expectedFileLength != file.length()) { throw new IOException("The file " + file + " the map is serialized from " + "has unexpected length " + file.length() + ", probably corrupted. " + "Expected length is " + expectedFileLength); } map.initTransientsFromBuilder(this); if (!recover) { map.createMappedStoreAndSegments(file, raf); } else { if (!overrideBuilderConfig) writeNotReady(fileChannel, headerBuffer, headerSize); // if overrideBuilderConfig = true, readiness bit is already set // in writeHeader() call map.recover(file, raf); } establishReplication(map); commitChronicleMapReady(map, raf, headerBuffer, headerSize); return map; } catch (Exception e) { if (recover && !(e instanceof IOException) && !(e instanceof ChronicleHashRecoveryFailedException)) throw new ChronicleHashRecoveryFailedException(e); throw e; } } private static void writeNotReady( FileChannel fileChannel, ByteBuffer headerBuffer, int headerSize) throws IOException { //noinspection PointlessBitwiseExpression headerBuffer.putInt(SIZE_WORD_OFFSET, NOT_READY | DATA | headerSize); headerBuffer.clear().position(SIZE_WORD_OFFSET).limit(SIZE_WORD_OFFSET + 4); writeFully(fileChannel, SIZE_WORD_OFFSET, headerBuffer); } /** * @return ByteBuffer, with self bootstrapping header in [position, limit) range */ private static <K, V> ByteBuffer writeHeader( FileChannel fileChannel, VanillaChronicleMap<K, V, ?> map) throws IOException { ByteBuffer headerBuffer = ByteBuffer.allocate( SELF_BOOTSTRAPPING_HEADER_OFFSET + MAX_BOOTSTRAPPING_HEADER_SIZE); headerBuffer.order(LITTLE_ENDIAN); Bytes<ByteBuffer> headerBytes = Bytes.wrapForWrite(headerBuffer); headerBytes.writePosition(SELF_BOOTSTRAPPING_HEADER_OFFSET); Wire wire = new TextWire(headerBytes); wire.getValueOut().typedMarshallable(map); int headerLimit = (int) headerBytes.writePosition(); int headerSize = headerLimit - SELF_BOOTSTRAPPING_HEADER_OFFSET; // First set readiness bit to READY, to compute checksum correctly //noinspection PointlessBitwiseExpression headerBuffer.putInt(SIZE_WORD_OFFSET, READY | DATA | headerSize); long checksum = headerChecksum(headerBuffer, headerSize); headerBuffer.putLong(HEADER_OFFSET, checksum); // Set readiness bit to NOT_READY, because the Chronicle Map instance is not actually // ready yet //noinspection PointlessBitwiseExpression headerBuffer.putInt(SIZE_WORD_OFFSET, NOT_READY | DATA | headerSize); // Write the size-prefixed blob to the file headerBuffer.position(0).limit(headerLimit); writeFully(fileChannel, 0, headerBuffer); headerBuffer.position(SELF_BOOTSTRAPPING_HEADER_OFFSET); return headerBuffer; } private static void commitChronicleMapReady( VanillaChronicleHash map, RandomAccessFile raf, ByteBuffer headerBuffer, int headerSize) throws IOException { FileChannel fileChannel = raf.getChannel(); // see HCOLL-396 map.msync(raf); //noinspection PointlessBitwiseExpression headerBuffer.putInt(SIZE_WORD_OFFSET, READY | DATA | headerSize); headerBuffer.clear().position(SIZE_WORD_OFFSET).limit(SIZE_WORD_OFFSET + 4); writeFully(fileChannel, SIZE_WORD_OFFSET, headerBuffer); } ChronicleMap<K, V> createWithoutFile() { replicated = replicationIdentifier != -1; persisted = false; try { VanillaChronicleMap<K, V, ?> map = newMap(); // TODO this method had been moved // if(OS.warnOnWindows(map.sizeInBytesWithoutTiers())){ // throw new IllegalStateException("Windows cannot support this configuration"); // } BytesStore bytesStore = lazyNativeBytesStoreWithFixedCapacity(map.sizeInBytesWithoutTiers()); map.createMappedStoreAndSegments(bytesStore); establishReplication(map); return map; } catch (IOException e) { // file-less version should never trigger an IOException. throw new AssertionError(e); } } private VanillaChronicleMap<K, V, ?> newMap() throws IOException { preMapConstruction(); if (replicated) { return new ReplicatedChronicleMap<>(this); } else { return new VanillaChronicleMap<>(this); } } void preMapConstruction() { averageKeySize = preMapConstruction( keyBuilder, averageKeySize, averageKey, sampleKey, "Key"); averageValueSize = preMapConstruction( valueBuilder, averageValueSize, averageValue, sampleValue, "Value"); stateChecks(); } private <E> double preMapConstruction( SerializationBuilder<E> builder, double configuredAverageSize, E average, E sample, String dim) { if (sample != null) { return builder.constantSizeBySample(sample); } else { double result = averageKeyOrValueSize(configuredAverageSize, builder, average); if (!isNaN(result) || allLowLevelConfigurationsAreManual()) { return result; } else { throw new IllegalStateException(dim + " size in serialized form must " + "be configured in ChronicleMap, at least approximately.\nUse builder" + ".average" + dim + "()/.constant" + dim + "SizeBySample()/" + ".average" + dim + "Size() methods to configure the size"); } } } private void stateChecks() { checkActualChunksPerSegmentTierIsConfiguredOnlyIfOtherLowLevelConfigsAreManual(); checkActualChunksPerSegmentGreaterOrEqualToEntries(); } private boolean allLowLevelConfigurationsAreManual() { return actualSegments > 0 && entriesPerSegment > 0 && actualChunksPerSegmentTier > 0 && actualChunkSize > 0; } private void establishReplication( VanillaChronicleMap<K, V, ?> map) throws IOException { if (map instanceof ReplicatedChronicleMap) { ReplicatedChronicleMap result = (ReplicatedChronicleMap) map; if (cleanupRemovedEntries) establishCleanupThread(result); } } private void establishCleanupThread(ReplicatedChronicleMap map) { OldDeletedEntriesCleanup cleanup = new OldDeletedEntriesCleanup(map); NamedThreadFactory threadFactory = new NamedThreadFactory("cleanup thread for map persisted at " + map.file()); ExecutorService executor = Executors.newSingleThreadExecutor(threadFactory); executor.submit(cleanup); map.addCloseable(cleanup); // WARNING this relies on the fact that ReplicatedChronicleMap closes closeables in the same // order as they are added, i. e. OldDeletedEntriesCleanup instance close()d before the // following closeable map.addCloseable(() -> { executor.shutdown(); try { executor.awaitTermination(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { LOG.error("", e); } }); } /** * Inject your SPI code around basic {@code ChronicleMap}'s operations with entries: * removing entries, replacing the entries' value and inserting the new entry. * * <p>This affects behaviour of ordinary map.put(), map.remove(), etc. calls, as well as removes * and replacing values <i>during iterations</i>, <i>remote map calls</i> and * <i>internal replication operations</i>. */ public ChronicleMapBuilder<K, V> entryOperations(MapEntryOperations<K, V, ?> entryOperations) { Objects.requireNonNull(entryOperations); this.entryOperations = entryOperations; return this; } /** * Inject your SPI around logic of all {@code ChronicleMap}'s operations with individual keys: * from {@link ChronicleMap#containsKey} to {@link ChronicleMap#acquireUsing} and * {@link ChronicleMap#merge}. * * <p>This affects behaviour of ordinary map calls, as well as <i>remote calls</i>. */ public ChronicleMapBuilder<K, V> mapMethods(MapMethods<K, V, ?> mapMethods) { Objects.requireNonNull(mapMethods); this.methods = mapMethods; return this; } ChronicleMapBuilder<K, V> remoteOperations( MapRemoteOperations<K, V, ?> remoteOperations) { Objects.requireNonNull(remoteOperations); this.remoteOperations = remoteOperations; return this; } }
Synchronize ChronicleMap creation persisted to some file within JVM
src/main/java/net/openhft/chronicle/map/ChronicleMapBuilder.java
Synchronize ChronicleMap creation persisted to some file within JVM
<ide><path>rc/main/java/net/openhft/chronicle/map/ChronicleMapBuilder.java <ide> import net.openhft.chronicle.bytes.Byteable; <ide> import net.openhft.chronicle.bytes.Bytes; <ide> import net.openhft.chronicle.bytes.BytesStore; <add>import net.openhft.chronicle.core.Jvm; <ide> import net.openhft.chronicle.core.OS; <ide> import net.openhft.chronicle.hash.ChronicleHashBuilder; <ide> import net.openhft.chronicle.hash.ChronicleHashRecoveryFailedException; <ide> import java.util.HashMap; <ide> import java.util.Map; <ide> import java.util.Objects; <add>import java.util.concurrent.ConcurrentHashMap; <ide> import java.util.concurrent.ExecutorService; <ide> import java.util.concurrent.Executors; <ide> import java.util.concurrent.TimeUnit; <ide> <ide> private static int MAX_BOOTSTRAPPING_HEADER_SIZE = (int) MemoryUnit.KILOBYTES.toBytes(16); <ide> <add> private static final ConcurrentHashMap<File, ChronicleMap> <add> concurrentPersistedChronicleMapCreationControl = new ConcurrentHashMap<>(128); <add> <add> interface CreateMap<K, V> { <add> ChronicleMap<K, V> createMap() throws IOException; <add> } <add> <add> /** <add> * When Chronicle Maps are created using {@link #createPersistedTo(File)} or <add> * {@link #recoverPersistedTo(File, boolean)} or {@link #createOrRecoverPersistedTo(File)} <add> * methods, file lock on the Chronicle Map's lock is acquired, that shouldn't be done from <add> * concurrent threads within the same JVM process. So creation of Chronicle Maps <add> * persisted to the same File should be synchronized across JVM's threads. Simple way would be <add> * to synchronize on some static (lock) object, but would serialize all Chronicle Maps creations <add> * (persisted to any files), ConcurrentHashMap#compute() gives more scalability. <add> * ConcurrentHashMap is used effectively for lock striping only, the entries are removed <add> * immediately after compute() returns. <add> */ <add> private static <K, V> ChronicleMap<K, V> createMapFileSynchronized( <add> File file, CreateMap<K, V> createMap) throws IOException { <add> ChronicleMap map = concurrentPersistedChronicleMapCreationControl.compute(file, (k, v) -> { <add> try { <add> return createMap.createMap(); <add> } catch (IOException e) { <add> throw Jvm.rethrow(e); <add> } <add> }); <add> concurrentPersistedChronicleMapCreationControl.remove(file); <add> //noinspection unchecked <add> return map; <add> } <add> <ide> // not final because of cloning <ide> private ChronicleMapBuilderPrivateAPI<K, V> privateAPI = <ide> new ChronicleMapBuilderPrivateAPI<>(this); <ide> // clone() to make this builder instance thread-safe, because createWithFile() method <ide> // computes some state based on configurations, but doesn't synchronize on configuration <ide> // changes. <del> return clone().createWithFile(file, false, false); <add> return createMapFileSynchronized(file, () -> clone().createWithFile(file, false, false)); <ide> } <ide> <ide> @Override <ide> @Override <ide> public ChronicleMap<K, V> recoverPersistedTo(File file, boolean sameBuilderConfig) <ide> throws IOException { <del> return clone().createWithFile(file, true, sameBuilderConfig); <add> return createMapFileSynchronized(file, <add> () -> clone().createWithFile(file, true, sameBuilderConfig)); <ide> } <ide> <ide> @Override
Java
bsd-2-clause
fc74a3b0b23ed30b21633a16319054f1aaf17c64
0
scifio/scifio
// // FileStitcher.java // /* LOCI Bio-Formats package for reading and converting biological file formats. Copyright (C) 2005-@year@ Melissa Linkert, Curtis Rueden, Chris Allan, Eric Kjellman and Brian Loranger. This program is free software; you can redistribute it and/or modify it under the terms of the GNU Library General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Library General Public License for more details. You should have received a copy of the GNU Library General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package loci.formats; import java.awt.image.BufferedImage; import java.io.*; import java.lang.reflect.InvocationTargetException; import java.math.BigInteger; import java.util.*; /** * Logic to stitch together files with similar names. * Assumes that all files have the same dimensions. * * <dl><dt><b>Source code:</b></dt> * <dd><a href="https://skyking.microscopy.wisc.edu/trac/java/browser/trunk/loci/formats/FileStitcher.java">Trac</a>, * <a href="https://skyking.microscopy.wisc.edu/svn/java/trunk/loci/formats/FileStitcher.java">SVN</a></dd></dl> */ public class FileStitcher implements IFormatReader { // -- Fields -- /** FormatReader to use as a template for constituent readers. */ private IFormatReader reader; /** * Whether string ids given should be treated * as file patterns rather than single file paths. */ private boolean patternIds = false; /** Current file pattern string. */ private String currentId; /** File pattern object used to build the list of files. */ private FilePattern fp; /** Axis guesser object used to guess which dimensional axes are which. */ private AxisGuesser[] ag; /** The matching files. */ private String[][] files; /** Used files list. */ private String[] usedFiles; /** Reader used for each file. */ private IFormatReader[][] readers; /** Blank buffered image, for use when image counts vary between files. */ private BufferedImage[] blankImage; /** Blank image bytes, for use when image counts vary between files. */ private byte[][] blankBytes; /** Blank buffered thumbnail, for use when image counts vary between files. */ private BufferedImage[] blankThumb; /** Blank thumbnail bytes, for use when image counts vary between files. */ private byte[][] blankThumbBytes; /** Number of images per file. */ private int[] imagesPerFile; /** Dimensional axis lengths per file. */ private int[] sizeZ, sizeC, sizeT; /** Component lengths for each axis type. */ private int[][] lenZ, lenC, lenT; /** Core metadata. */ private CoreMetadata core; /** Current series number. */ private int series; private String[] seriesBlocks; private Vector fileVector; private Vector seriesNames; // -- Constructors -- /** Constructs a FileStitcher around a new image reader. */ public FileStitcher() { this(new ImageReader()); } /** * Constructs a FileStitcher around a new image reader. * @param patternIds Whether string ids given should be treated as file * patterns rather than single file paths. */ public FileStitcher(boolean patternIds) { this(new ImageReader(), patternIds); } /** * Constructs a FileStitcher with the given reader. * @param r The reader to use for reading stitched files. */ public FileStitcher(IFormatReader r) { this(r, false); } /** * Constructs a FileStitcher with the given reader. * @param r The reader to use for reading stitched files. * @param patternIds Whether string ids given should be treated as file * patterns rather than single file paths. */ public FileStitcher(IFormatReader r, boolean patternIds) { reader = r; this.patternIds = patternIds; } // -- FileStitcher API methods -- /** Gets the wrapped reader prototype. */ public IFormatReader getReader() { return reader; } /** * Gets the axis type for each dimensional block. * @return An array containing values from the enumeration: * <ul> * <li>AxisGuesser.Z_AXIS: focal planes</li> * <li>AxisGuesser.T_AXIS: time points</li> * <li>AxisGuesser.C_AXIS: channels</li> * <li>AxisGuesser.S_AXIS: series</li> * </ul> */ public int[] getAxisTypes() { FormatTools.assertId(currentId, true, 2); return ag[getSeries()].getAxisTypes(); } /** * Sets the axis type for each dimensional block. * @param axes An array containing values from the enumeration: * <ul> * <li>AxisGuesser.Z_AXIS: focal planes</li> * <li>AxisGuesser.T_AXIS: time points</li> * <li>AxisGuesser.C_AXIS: channels</li> * <li>AxisGuesser.S_AXIS: series</li> * </ul> */ public void setAxisTypes(int[] axes) throws FormatException { FormatTools.assertId(currentId, true, 2); ag[getSeries()].setAxisTypes(axes); computeAxisLengths(); } /** Gets the file pattern object used to build the list of files. */ public FilePattern getFilePattern() { FormatTools.assertId(currentId, true, 2); return fp; } /** * Gets the axis guesser object used to guess * which dimensional axes are which. */ public AxisGuesser getAxisGuesser() { FormatTools.assertId(currentId, true, 2); return ag[getSeries()]; } /** * Finds the file pattern for the given ID, based on the state of the file * stitcher. Takes both ID map entries and the patternIds flag into account. */ public FilePattern findPattern(String id) { FormatTools.assertId(currentId, true, 2); if (!patternIds) { // find the containing pattern Hashtable map = Location.getIdMap(); String pattern = null; if (map.containsKey(id)) { // search ID map for pattern, rather than files on disk String[] idList = new String[map.size()]; Enumeration en = map.keys(); for (int i=0; i<idList.length; i++) { idList[i] = (String) en.nextElement(); } pattern = FilePattern.findPattern(id, null, idList); } else { // id is an unmapped file path; look to similar files on disk pattern = FilePattern.findPattern(new Location(id)); } if (pattern != null) id = pattern; } return new FilePattern(id); } // -- IFormatReader API methods -- /* @see IFormatReader#isThisType(byte[]) */ public boolean isThisType(byte[] block) { return reader.isThisType(block); } /* @see IFormatReader#setId(String) */ public void setId(String id) throws FormatException, IOException { if (!id.equals(currentId)) initFile(id); } /* @see IFormatReader#setId(String, boolean) */ public void setId(String id, boolean force) throws FormatException, IOException { if (!id.equals(currentId) || force) initFile(id); } /* @see IFormatReader#getImageCount() */ public int getImageCount() { FormatTools.assertId(currentId, true, 2); return core.imageCount[getSeries()]; } /* @see IFormatReader#isRGB() */ public boolean isRGB() { FormatTools.assertId(currentId, true, 2); return core.rgb[getSeries()]; } /* @see IFormatReader#getSizeX() */ public int getSizeX() { FormatTools.assertId(currentId, true, 2); return core.sizeX[getSeries()]; } /* @see IFormatReader#getSizeY() */ public int getSizeY() { FormatTools.assertId(currentId, true, 2); return core.sizeY[getSeries()]; } /* @see IFormatReader#getSizeZ() */ public int getSizeZ() { FormatTools.assertId(currentId, true, 2); return core.sizeZ[getSeries()]; } /* @see IFormatReader#getSizeC() */ public int getSizeC() { FormatTools.assertId(currentId, true, 2); return core.sizeC[getSeries()]; } /* @see IFormatReader#getSizeT() */ public int getSizeT() { FormatTools.assertId(currentId, true, 2); return core.sizeT[getSeries()]; } /* @see IFormatReader#getPixelType() */ public int getPixelType() { FormatTools.assertId(currentId, true, 2); return core.pixelType[getSeries()]; } /* @see IFormatReader#getEffectiveSizeC() */ public int getEffectiveSizeC() { FormatTools.assertId(currentId, true, 2); return getImageCount() / (getSizeZ() * getSizeT()); } /* @see IFormatReader#getRGBChannelCount() */ public int getRGBChannelCount() { FormatTools.assertId(currentId, true, 2); return getSizeC() / getEffectiveSizeC(); } /* @see IFormatReader#isIndexed() */ public boolean isIndexed() { FormatTools.assertId(currentId, true, 2); return reader.isIndexed(); } /* @see IFormatReader#isFalseColor() */ public boolean isFalseColor() { FormatTools.assertId(currentId, true, 2); return reader.isFalseColor(); } /* @see IFormatReader#get8BitLookupTable() */ public byte[][] get8BitLookupTable() throws FormatException, IOException { FormatTools.assertId(currentId, true, 2); return reader.get8BitLookupTable(); } /* @see IFormatReader#get16BitLookupTable() */ public short[][] get16BitLookupTable() throws FormatException, IOException { FormatTools.assertId(currentId, true, 2); return reader.get16BitLookupTable(); } /* @see IFormatReader#getChannelDimLengths() */ public int[] getChannelDimLengths() { FormatTools.assertId(currentId, true, 1); return core.cLengths[getSeries()]; } /* @see IFormatReader#getChannelDimTypes() */ public String[] getChannelDimTypes() { FormatTools.assertId(currentId, true, 1); return core.cTypes[getSeries()]; } /* @see IFormatReader#getThumbSizeX() */ public int getThumbSizeX() { FormatTools.assertId(currentId, true, 2); return reader.getThumbSizeX(); } /* @see IFormatReader#getThumbSizeY() */ public int getThumbSizeY() { FormatTools.assertId(currentId, true, 2); return reader.getThumbSizeY(); } /* @see IFormatReader#isLittleEndian() */ public boolean isLittleEndian() { FormatTools.assertId(currentId, true, 2); return reader.isLittleEndian(); } /* @see IFormatReader#getDimensionOrder() */ public String getDimensionOrder() { FormatTools.assertId(currentId, true, 2); return core.currentOrder[getSeries()]; } /* @see IFormatReader#isOrderCertain() */ public boolean isOrderCertain() { FormatTools.assertId(currentId, true, 2); return core.orderCertain[getSeries()]; } /* @see IFormatReader#isInterleaved() */ public boolean isInterleaved() { FormatTools.assertId(currentId, true, 2); return reader.isInterleaved(); } /* @see IFormatReader#isInterleaved(int) */ public boolean isInterleaved(int subC) { FormatTools.assertId(currentId, true, 2); return reader.isInterleaved(subC); } /* @see IFormatReader#openImage(int) */ public BufferedImage openImage(int no) throws FormatException, IOException { FormatTools.assertId(currentId, true, 2); int[] q = computeIndices(no); int sno = getSeries(); int fno = q[0], ino = q[1]; if (ino < readers[sno][fno].getImageCount()) { return readers[sno][fno].openImage(ino); } // return a blank image to cover for the fact that // this file does not contain enough image planes if (blankImage[sno] == null) { blankImage[sno] = ImageTools.blankImage(core.sizeX[sno], core.sizeY[sno], sizeC[sno], getPixelType()); } return blankImage[sno]; } /* @see IFormatReader#openImage(int, int, int, int, int) */ public BufferedImage openImage(int no, int x, int y, int w, int h) throws FormatException, IOException { return openImage(no).getSubimage(x, y, w, h); } /* @see IFormatReader#openBytes(int) */ public byte[] openBytes(int no) throws FormatException, IOException { FormatTools.assertId(currentId, true, 2); int[] q = computeIndices(no); int sno = getSeries(); int fno = q[0], ino = q[1]; if (ino < readers[sno][fno].getImageCount()) { return readers[sno][fno].openBytes(ino); } // return a blank image to cover for the fact that // this file does not contain enough image planes if (blankBytes[sno] == null) { int bytes = FormatTools.getBytesPerPixel(getPixelType()); blankBytes[sno] = new byte[core.sizeX[sno] * core.sizeY[sno] * bytes * getRGBChannelCount()]; } return blankBytes[sno]; } /* @see IFormatReader#openBytes(int, int, int, int, int) */ public byte[] openBytes(int no, int x, int y, int w, int h) throws FormatException, IOException { byte[] buffer = new byte[w * h * FormatTools.getBytesPerPixel(getPixelType()) * getRGBChannelCount()]; return openBytes(no, buffer, x, y, w, h); } /* @see IFormatReader#openBytes(int, byte[]) */ public byte[] openBytes(int no, byte[] buf) throws FormatException, IOException { FormatTools.assertId(currentId, true, 2); int[] q = computeIndices(no); int sno = getSeries(); int fno = q[0], ino = q[1]; if (ino < readers[sno][fno].getImageCount()) { return readers[sno][fno].openBytes(ino, buf); } // return a blank image to cover for the fact that // this file does not contain enough image planes Arrays.fill(buf, (byte) 0); return buf; } /* @see IFormatReader#openBytes(int, byte[], int, int, int, int) */ public byte[] openBytes(int no, byte[] buf, int x, int y, int w, int h) throws FormatException, IOException { byte[] bytes = openBytes(no); int bpp = FormatTools.getBytesPerPixel(getPixelType()); int ch = getRGBChannelCount(); if (buf.length < w * h * bpp * ch) { throw new FormatException("Buffer too small."); } for (int yy=y; yy<y + h; yy++) { for (int xx=x; xx<x + w; xx++) { for (int cc=0; cc<ch; cc++) { int oldNdx = -1, newNdx = -1; if (isInterleaved()) { oldNdx = yy*getSizeX()*bpp*ch + xx*bpp*ch + cc*bpp; newNdx = (yy - y)*w*bpp*ch + (xx - x)*bpp*ch + cc*bpp; } else { oldNdx = cc*getSizeX()*getSizeY()*bpp + yy*getSizeX()*bpp + xx*bpp; newNdx = cc*w*h*bpp + (yy - y)*w*bpp + (xx - x)*bpp; } System.arraycopy(bytes, oldNdx, buf, newNdx, bpp); } } } return buf; } /* @see IFormatReader#openThumbImage(int) */ public BufferedImage openThumbImage(int no) throws FormatException, IOException { FormatTools.assertId(currentId, true, 2); int[] q = computeIndices(no); int sno = getSeries(); int fno = q[0], ino = q[1]; if (ino < readers[sno][fno].getImageCount()) { return readers[sno][fno].openThumbImage(ino); } // return a blank image to cover for the fact that // this file does not contain enough image planes if (blankThumb[sno] == null) { blankThumb[sno] = ImageTools.blankImage(getThumbSizeX(), getThumbSizeY(), sizeC[sno], getPixelType()); } return blankThumb[sno]; } /* @see IFormatReader#openThumbBytes(int) */ public byte[] openThumbBytes(int no) throws FormatException, IOException { FormatTools.assertId(currentId, true, 2); int[] q = computeIndices(no); int sno = getSeries(); int fno = q[0], ino = q[1]; if (ino < readers[sno][fno].getImageCount()) { return readers[sno][fno].openThumbBytes(ino); } // return a blank image to cover for the fact that // this file does not contain enough image planes if (blankThumbBytes[sno] == null) { int bytes = FormatTools.getBytesPerPixel(getPixelType()); blankThumbBytes[sno] = new byte[getThumbSizeX() * getThumbSizeY() * bytes * getRGBChannelCount()]; } return blankThumbBytes[sno]; } /* @see IFormatReader#close(boolean) */ public void close(boolean fileOnly) throws IOException { if (readers == null) reader.close(fileOnly); else { for (int i=0; i<readers.length; i++) { for (int j=0; j<readers[i].length; j++) { readers[i][j].close(fileOnly); } } } if (!fileOnly) { readers = null; blankImage = null; blankBytes = null; currentId = null; } } /* @see IFormatReader#close() */ public void close() throws IOException { if (readers == null) reader.close(); else { for (int i=0; i<readers.length; i++) { for (int j=0; j<readers[i].length; j++) { readers[i][j].close(); } } } readers = null; blankImage = null; blankBytes = null; currentId = null; } /* @see IFormatReader#getSeriesCount() */ public int getSeriesCount() { FormatTools.assertId(currentId, true, 2); return core.sizeX.length; } /* @see IFormatReader#setSeries(int) */ public void setSeries(int no) { FormatTools.assertId(currentId, true, 2); int n = reader.getSeriesCount(); if (n > 1) reader.setSeries(no); else series = no; } /* @see IFormatReader#getSeries() */ public int getSeries() { FormatTools.assertId(currentId, true, 2); return series == 0 ? reader.getSeries() : series; } /* @see IFormatReader#setGroupFiles(boolean) */ public void setGroupFiles(boolean group) { for (int i=0; i<readers.length; i++) { for (int j=0; j<readers[i].length; j++) { readers[i][j].setGroupFiles(group); } } } /* @see IFormatReader#isGroupFiles() */ public boolean isGroupFiles() { return readers[0][0].isGroupFiles(); } /* @see IFormatReader#fileGroupOption(String) */ public int fileGroupOption(String id) throws FormatException, IOException { return readers[0][0].fileGroupOption(id); } /* @see IFormatReader#isMetadataComplete() */ public boolean isMetadataComplete() { return readers[0][0].isMetadataComplete(); } /* @see IFormatReader#setNormalized(boolean) */ public void setNormalized(boolean normalize) { FormatTools.assertId(currentId, false, 2); if (readers == null) reader.setNormalized(normalize); else { for (int i=0; i<readers.length; i++) { for (int j=0; j<readers[i].length; j++) { readers[i][j].setNormalized(normalize); } } } } /* @see IFormatReader#isNormalized() */ public boolean isNormalized() { return reader.isNormalized(); } /* @see IFormatReader#setMetadataCollected(boolean) */ public void setMetadataCollected(boolean collect) { FormatTools.assertId(currentId, false, 2); if (readers == null) reader.setMetadataCollected(collect); else { for (int i=0; i<readers.length; i++) { for (int j=0; j<readers[i].length; j++) { readers[i][j].setMetadataCollected(collect); } } } } /* @see IFormatReader#isMetadataCollected() */ public boolean isMetadataCollected() { return reader.isMetadataCollected(); } /* @see IFormatReader#setOriginalMetadataPopulated(boolean) */ public void setOriginalMetadataPopulated(boolean populate) { FormatTools.assertId(currentId, false, 1); if (readers == null) reader.setOriginalMetadataPopulated(populate); else { for (int i=0; i<readers.length; i++) { for (int j=0; j<readers[i].length; j++) { readers[i][j].setOriginalMetadataPopulated(populate); } } } } /* @see IFormatReader#isOriginalMetadataPopulated() */ public boolean isOriginalMetadataPopulated() { return reader.isOriginalMetadataPopulated(); } /* @see IFormatReader#getUsedFiles() */ public String[] getUsedFiles() { FormatTools.assertId(currentId, true, 2); // returning the files list directly here is fast, since we do not // have to call initFile on each constituent file; but we can only do so // when each constituent file does not itself have multiple used files if (reader.getUsedFiles().length > 1) { // each constituent file has multiple used files; we must build the list // this could happen with, e.g., a stitched collection of ICS/IDS pairs // we have no datasets structured this way, so this logic is untested if (usedFiles == null) { String[][][] used = new String[files.length][][]; int total = 0; for (int i=0; i<files.length; i++) { for (int j=0; j<files[i].length; j++) { try { readers[i][j].setId(files[i][j]); } catch (FormatException exc) { LogTools.trace(exc); return null; } catch (IOException exc) { LogTools.trace(exc); return null; } used[i][j] = readers[i][j].getUsedFiles(); total += used[i][j].length; } } usedFiles = new String[total]; for (int i=0, off=0; i<used.length; i++) { for (int j=0; j<used[i].length; j++) { System.arraycopy(used[i][j], 0, usedFiles, off, used[i][j].length); off += used[i][j].length; } } } return usedFiles; } // assume every constituent file has no other used files // this logic could fail if the first constituent has no extra used files, // but later constituents do; in practice, this scenario seems unlikely Vector v = new Vector(); for (int i=0; i<files.length; i++) { for (int j=0; j<files[i].length; j++) { v.add(files[i][j]); } } return (String[]) v.toArray(new String[0]); } /* @see IFormatReader#getCurrentFile() */ public String getCurrentFile() { return currentId; } /* @see IFormatReader#getIndex(int, int, int) */ public int getIndex(int z, int c, int t) { return FormatTools.getIndex(this, z, c, t); } /* @see IFormatReader#getZCTCoords(int) */ public int[] getZCTCoords(int index) { return FormatTools.getZCTCoords(this, index); } /* @see IFormatReader#getMetadataValue(String) */ public Object getMetadataValue(String field) { FormatTools.assertId(currentId, true, 2); return reader.getMetadataValue(field); } /* @see IFormatReader#getMetadata() */ public Hashtable getMetadata() { FormatTools.assertId(currentId, true, 2); return reader.getMetadata(); } /* @see IFormatReader#getCoreMetadata() */ public CoreMetadata getCoreMetadata() { FormatTools.assertId(currentId, true, 2); return core; } /* @see IFormatReader#setMetadataFiltered(boolean) */ public void setMetadataFiltered(boolean filter) { FormatTools.assertId(currentId, false, 2); reader.setMetadataFiltered(filter); } /* @see IFormatReader#isMetadataFiltered() */ public boolean isMetadataFiltered() { return reader.isMetadataFiltered(); } /* @see IFormatReader#setMetadataStore(MetadataStore) */ public void setMetadataStore(MetadataStore store) { FormatTools.assertId(currentId, false, 2); reader.setMetadataStore(store); } /* @see IFormatReader#getMetadataStore() */ public MetadataStore getMetadataStore() { FormatTools.assertId(currentId, true, 2); return reader.getMetadataStore(); } /* @see IFormatReader#getMetadataStoreRoot() */ public Object getMetadataStoreRoot() { FormatTools.assertId(currentId, true, 2); return reader.getMetadataStoreRoot(); } /* @see IFormatReader#getUnderlyingReaders() */ public IFormatReader[] getUnderlyingReaders() { Vector v = new Vector(); for (int i=0; i<readers.length; i++) { for (int j=0; j<readers[i].length; j++) { v.add(readers[i][j]); } } return (IFormatReader[]) v.toArray(new IFormatReader[0]); } // -- IFormatHandler API methods -- /* @see IFormatHandler#isThisType(String) */ public boolean isThisType(String name) { return reader.isThisType(name); } /* @see IFormatHandler#isThisType(String, boolean) */ public boolean isThisType(String name, boolean open) { return reader.isThisType(name, open); } /* @see IFormatHandler#getFormat() */ public String getFormat() { FormatTools.assertId(currentId, true, 2); return reader.getFormat(); } /* @see IFormatHandler#getSuffixes() */ public String[] getSuffixes() { return reader.getSuffixes(); } // -- StatusReporter API methods -- /* @see IFormatHandler#addStatusListener(StatusListener) */ public void addStatusListener(StatusListener l) { if (readers == null) reader.addStatusListener(l); else { for (int i=0; i<readers.length; i++) { for (int j=0; j<readers[i].length; j++) { readers[i][j].addStatusListener(l); } } } } /* @see IFormatHandler#removeStatusListener(StatusListener) */ public void removeStatusListener(StatusListener l) { if (readers == null) reader.removeStatusListener(l); else { for (int i=0; i<readers.length; i++) { for (int j=0; j<readers[i].length; j++) { readers[i][j].removeStatusListener(l); } } } } /* @see IFormatHandler#getStatusListeners() */ public StatusListener[] getStatusListeners() { return reader.getStatusListeners(); } // -- Internal FormatReader API methods -- /** Initializes the given file. */ protected void initFile(String id) throws FormatException, IOException { if (FormatHandler.debug) { LogTools.println("calling FileStitcher.initFile(" + id + ")"); } currentId = id; fp = findPattern(id); reader.setId(fp.getFiles()[0]); // if this is a multi-series dataset, we need some special logic AxisGuesser guesser = new AxisGuesser(fp, reader.getDimensionOrder(), reader.getSizeZ(), reader.getSizeT(), reader.getEffectiveSizeC(), reader.isOrderCertain()); int seriesCount = reader.getSeriesCount(); boolean seriesInFile = true; if (guesser.getAxisCountS() > 0) { int[] count = fp.getCount(); int[] axes = guesser.getAxisTypes(); seriesInFile = false; String[] blockPrefixes = fp.getPrefixes(); Vector sBlock = new Vector(); for (int i=0; i<axes.length; i++) { if (axes[i] == AxisGuesser.S_AXIS) { sBlock.add(blockPrefixes[i]); } } seriesBlocks = (String[]) sBlock.toArray(new String[0]); fileVector = new Vector(); seriesNames = new Vector(); String file = fp.getFiles()[0]; Location dir = new Location(file).getAbsoluteFile().getParentFile(); String dpath = dir.getAbsolutePath(); String[] fs = dir.list(); setFiles(fs, seriesBlocks[0], fp.getFirst()[0], fp.getLast()[0], fp.getStep()[0], dpath, 0); seriesCount = fileVector.size(); files = new String[seriesCount][]; for (int i=0; i<seriesCount; i++) { files[i] = (String[]) fileVector.get(i); } } // verify that file pattern is valid and matches existing files String msg = " Please rename your files or disable file stitching."; if (!fp.isValid()) { throw new FormatException("Invalid " + (patternIds ? "file pattern" : "filename") + " (" + currentId + "): " + fp.getErrorMessage() + msg); } if (files == null) { files = new String[1][]; files[0] = fp.getFiles(); } if (files == null) { throw new FormatException("No files matching pattern (" + fp.getPattern() + "). " + msg); } for (int i=0; i<files.length; i++) { for (int j=0; j<files[i].length; j++) { if (!new Location(files[i][j]).exists()) { throw new FormatException("File #" + i + " (" + files[i][j] + ") does not exist."); } } } // determine reader type for these files; assume all are the same type Vector classes = new Vector(); IFormatReader r = reader; while (r instanceof ReaderWrapper) { classes.add(r.getClass()); r = ((ReaderWrapper) r).getReader(); } if (r instanceof ImageReader) r = ((ImageReader) r).getReader(files[0][0]); classes.add(r.getClass()); // construct list of readers for all files readers = new IFormatReader[files.length][]; for (int i=0; i<readers.length; i++) { readers[i] = new IFormatReader[files[i].length]; } readers[0][0] = reader; for (int i=0; i<readers.length; i++) { for (int j=0; j<readers[i].length; j++) { // use crazy reflection to instantiate a reader of the proper type try { r = null; for (int k=classes.size()-1; k>=0; k--) { Class c = (Class) classes.elementAt(k); if (r == null) r = (IFormatReader) c.newInstance(); else { r = (IFormatReader) c.getConstructor(new Class[] {IFormatReader.class}).newInstance(new Object[] {r}); } } readers[i][j] = (IFormatReader) r; } catch (InstantiationException exc) { LogTools.trace(exc); } catch (IllegalAccessException exc) { LogTools.trace(exc); } catch (NoSuchMethodException exc) { LogTools.trace(exc); } catch (InvocationTargetException exc) { LogTools.trace(exc); } } } // sync reader configurations with original reader boolean normalized = reader.isNormalized(); boolean metadataFiltered = reader.isMetadataFiltered(); boolean metadataCollected = reader.isMetadataCollected(); StatusListener[] statusListeners = reader.getStatusListeners(); for (int i=0; i<readers.length; i++) { for (int j=0; j<readers[i].length; j++) { readers[i][j].setNormalized(normalized); readers[i][j].setMetadataFiltered(metadataFiltered); readers[i][j].setMetadataCollected(metadataCollected); for (int k=0; k<statusListeners.length; k++) { readers[i][j].addStatusListener(statusListeners[k]); } } } String[] originalUsedFiles = reader.getUsedFiles(); boolean doNotStitch = true; for (int i=0; i<files.length; i++) { for (int k=0; k<files[i].length; k++) { boolean found = false; for (int j=0; j<originalUsedFiles.length; j++) { if (originalUsedFiles[j].endsWith(files[i][k])) { found = true; break; } } if (!found) { doNotStitch = false; break; } } } if (doNotStitch) { // the reader for this file uses its own stitching logic that is probably // smarter than FileStitcher readers = new IFormatReader[1][1]; readers[0][0] = reader; String f = files[0][0]; files = new String[1][1]; files[0][0] = f; fp = new FilePattern(files[0][0]); } ag = new AxisGuesser[seriesCount]; blankImage = new BufferedImage[seriesCount]; blankBytes = new byte[seriesCount][]; blankThumb = new BufferedImage[seriesCount]; blankThumbBytes = new byte[seriesCount][]; imagesPerFile = new int[seriesCount]; sizeZ = new int[seriesCount]; sizeC = new int[seriesCount]; sizeT = new int[seriesCount]; boolean[] certain = new boolean[seriesCount]; lenZ = new int[seriesCount][]; lenC = new int[seriesCount][]; lenT = new int[seriesCount][]; // analyze first file; assume each file has the same parameters core = new CoreMetadata(seriesCount); int oldSeries = reader.getSeries(); IFormatReader rr = reader; for (int i=0; i<seriesCount; i++) { if (seriesInFile) rr.setSeries(i); else { rr = readers[i][0]; rr.setId(files[i][0]); } core.sizeX[i] = rr.getSizeX(); core.sizeY[i] = rr.getSizeY(); // NB: core.sizeZ populated in computeAxisLengths below // NB: core.sizeC populated in computeAxisLengths below // NB: core.sizeT populated in computeAxisLengths below core.pixelType[i] = rr.getPixelType(); imagesPerFile[i] = rr.getImageCount(); core.imageCount[i] = files[i].length * imagesPerFile[i]; core.thumbSizeX[i] = rr.getThumbSizeX(); core.thumbSizeY[i] = rr.getThumbSizeY(); // NB: core.cLengths[i] populated in computeAxisLengths below // NB: core.cTypes[i] populated in computeAxisLengths below core.currentOrder[i] = rr.getDimensionOrder(); // NB: core.orderCertain[i] populated below core.rgb[i] = rr.isRGB(); core.littleEndian[i] = rr.isLittleEndian(); core.interleaved[i] = rr.isInterleaved(); core.seriesMetadata[i] = rr.getMetadata(); sizeZ[i] = rr.getSizeZ(); sizeC[i] = rr.getSizeC(); sizeT[i] = rr.getSizeT(); certain[i] = rr.isOrderCertain(); } reader.setSeries(oldSeries); // guess at dimensions corresponding to file numbering for (int i=0; i<seriesCount; i++) { ag[i] = new AxisGuesser(fp, core.currentOrder[i], sizeZ[i], sizeT[i], sizeC[i], certain[i]); } // order may need to be adjusted for (int i=0; i<seriesCount; i++) { setSeries(i); core.currentOrder[i] = ag[i].getAdjustedOrder(); core.orderCertain[i] = ag[i].isCertain(); computeAxisLengths(); } setSeries(oldSeries); // initialize used files list only when requested usedFiles = null; } // -- Helper methods -- /** Computes axis length arrays, and total axis lengths. */ protected void computeAxisLengths() throws FormatException { int sno = getSeries(); FilePattern p = new FilePattern(FilePattern.findPattern(files[sno][0], new Location(files[sno][0]).getAbsoluteFile().getParentFile().getPath(), files[sno])); int[] count = p.getCount(); try { readers[sno][0].setId(files[sno][0]); } catch (IOException e) { throw new FormatException(e); } ag[sno] = new AxisGuesser(p, readers[sno][0].getDimensionOrder(), readers[sno][0].getSizeZ(), readers[sno][0].getSizeT(), readers[sno][0].getSizeC(), readers[sno][0].isOrderCertain()); int[] axes = ag[sno].getAxisTypes(); int numZ = ag[sno].getAxisCountZ(); int numC = ag[sno].getAxisCountC(); int numT = ag[sno].getAxisCountT(); core.sizeZ[sno] = sizeZ[sno]; core.sizeC[sno] = sizeC[sno]; core.sizeT[sno] = sizeT[sno]; lenZ[sno] = new int[numZ + 1]; lenC[sno] = new int[numC + 1]; lenT[sno] = new int[numT + 1]; lenZ[sno][0] = sizeZ[sno]; lenC[sno][0] = sizeC[sno]; lenT[sno][0] = sizeT[sno]; for (int i=0, z=1, c=1, t=1; i<count.length; i++) { switch (axes[i]) { case AxisGuesser.Z_AXIS: core.sizeZ[sno] *= count[i]; lenZ[sno][z++] = count[i]; break; case AxisGuesser.C_AXIS: core.sizeC[sno] *= count[i]; lenC[sno][c++] = count[i]; break; case AxisGuesser.T_AXIS: core.sizeT[sno] *= count[i]; lenT[sno][t++] = count[i]; break; case AxisGuesser.S_AXIS: break; default: throw new FormatException("Unknown axis type for axis #" + i + ": " + axes[i]); } } int[] cLengths = reader.getChannelDimLengths(); String[] cTypes = reader.getChannelDimTypes(); int cCount = 0; for (int i=0; i<cLengths.length; i++) { if (cLengths[i] > 1) cCount++; } for (int i=1; i<lenC[sno].length; i++) { if (lenC[sno][i] > 1) cCount++; } if (cCount == 0) { core.cLengths[sno] = new int[] {1}; core.cTypes[sno] = new String[] {FormatTools.CHANNEL}; } else { core.cLengths[sno] = new int[cCount]; core.cTypes[sno] = new String[cCount]; } int c = 0; for (int i=0; i<cLengths.length; i++) { if (cLengths[i] == 1) continue; core.cLengths[sno][c] = cLengths[i]; core.cTypes[sno][c] = cTypes[i]; c++; } for (int i=1; i<lenC[sno].length; i++) { if (lenC[sno][i] == 1) continue; core.cLengths[sno][c] = lenC[sno][i]; core.cTypes[sno][c] = FormatTools.CHANNEL; } // populate metadata store int pixelType = getPixelType(); boolean little = reader.isLittleEndian(); MetadataStore s = reader.getMetadataStore(); for (int i=0; i<core.sizeX.length; i++) { s.setImage((String) seriesNames.get(i), null, null, new Integer(i)); } FormatTools.populatePixels(s, this); } /** * Gets the file index, and image index into that file, * corresponding to the given global image index. * * @return An array of size 2, dimensioned {file index, image index}. */ protected int[] computeIndices(int no) throws FormatException, IOException { int sno = getSeries(); int[] axes = ag[sno].getAxisTypes(); int[] count = fp.getCount(); // get Z, C and T positions int[] zct = getZCTCoords(no); zct[1] *= getRGBChannelCount(); int[] posZ = FormatTools.rasterToPosition(lenZ[sno], zct[0]); int[] posC = FormatTools.rasterToPosition(lenC[sno], zct[1]); int[] posT = FormatTools.rasterToPosition(lenT[sno], zct[2]); // convert Z, C and T position lists into file index and image index int[] pos = new int[axes.length]; int z = 1, c = 1, t = 1; for (int i=0; i<axes.length; i++) { if (axes[i] == AxisGuesser.Z_AXIS) pos[i] = posZ[z++]; else if (axes[i] == AxisGuesser.C_AXIS) pos[i] = posC[c++]; else if (axes[i] == AxisGuesser.T_AXIS) pos[i] = posT[t++]; else { throw new FormatException("Unknown axis type for axis #" + i + ": " + axes[i]); } } int fno = FormatTools.positionToRaster(count, pos); // configure the reader, in case we haven't done this one yet readers[sno][fno].setId(files[sno][fno]); readers[sno][fno].setSeries(reader.getSeries()); int ino; if (posZ[0] < readers[sno][fno].getSizeZ() && posC[0] < readers[sno][fno].getSizeC() && posT[0] < readers[sno][fno].getSizeT()) { ino = FormatTools.getIndex(readers[sno][fno], posZ[0], posC[0], posT[0]); } else ino = Integer.MAX_VALUE; // coordinates out of range return new int[] {fno, ino}; } /** * Gets a list of readers to include in relation to the given C position. * @return Array with indices corresponding to the list of readers, and * values indicating the internal channel index to use for that reader. */ protected int[] getIncludeList(int theC) throws FormatException, IOException { int[] include = new int[readers.length]; Arrays.fill(include, -1); for (int t=0; t<sizeT[getSeries()]; t++) { for (int z=0; z<sizeZ[getSeries()]; z++) { int no = getIndex(z, theC, t); int[] q = computeIndices(no); int fno = q[0], ino = q[1]; include[fno] = ino; } } return include; } private FilePattern getPattern(String[] f, String dir, String block) { Vector v = new Vector(); for (int i=0; i<f.length; i++) { if (f[i].indexOf(File.separator) != -1) { f[i] = f[i].substring(f[i].lastIndexOf(File.separator) + 1); } if (dir.endsWith(File.separator)) f[i] = dir + f[i]; else f[i] = dir + File.separator + f[i]; if (f[i].indexOf(block) != -1 && new Location(f[i]).exists()) { v.add(f[i].substring(f[i].lastIndexOf(File.separator) + 1)); } } f = (String[]) v.toArray(new String[0]); return new FilePattern(FilePattern.findPattern(f[0], dir, f)); } private void setFiles(String[] list, String prefix, BigInteger first, BigInteger last, BigInteger step, String dir, int blockNum) { long f = first.longValue(); long l = last.longValue(); long s = step.longValue(); for (long i=f; i<=l; i+=s) { FilePattern newPattern = getPattern(list, dir, prefix + i); if (blockNum == seriesBlocks.length - 1) { fileVector.add(newPattern.getFiles()); String name = newPattern.getPattern(); if (name.indexOf(File.separator) != -1) { name = name.substring(name.lastIndexOf(File.separator) + 1); } seriesNames.add(name); } else { String next = seriesBlocks[blockNum + 1]; String[] blocks = newPattern.getPrefixes(); BigInteger fi = null; BigInteger la = null; BigInteger st = null; for (int q=0; q<blocks.length; q++) { if (blocks[q].indexOf(next) != -1) { fi = newPattern.getFirst()[q]; la = newPattern.getLast()[q]; st = newPattern.getStep()[q]; break; } } setFiles(newPattern.getFiles(), next, fi, la, st, dir, blockNum + 1); } } } // -- Deprecated FileStitcher API methods -- /** @deprecated Replaced by {@link #getAxisTypes()} */ public int[] getAxisTypes(String id) throws FormatException, IOException { setId(id); return getAxisTypes(); } /** @deprecated Replaced by {@link #setAxisTypes(int[])} */ public void setAxisTypes(String id, int[] axes) throws FormatException, IOException { setId(id); setAxisTypes(axes); } /** @deprecated Replaced by {@link #getFilePattern()} */ public FilePattern getFilePattern(String id) throws FormatException, IOException { setId(id); return getFilePattern(); } /** @deprecated Replaced by {@link #getAxisGuesser()} */ public AxisGuesser getAxisGuesser(String id) throws FormatException, IOException { setId(id); return getAxisGuesser(); } // -- Deprecated IFormatReader API methods -- /** @deprecated Replaced by {@link #getImageCount()} */ public int getImageCount(String id) throws FormatException, IOException { setId(id); return getImageCount(); } /** @deprecated Replaced by {@link #isRGB()} */ public boolean isRGB(String id) throws FormatException, IOException { setId(id); return isRGB(); } /** @deprecated Replaced by {@link #getSizeX()} */ public int getSizeX(String id) throws FormatException, IOException { setId(id); return getSizeX(); } /** @deprecated Replaced by {@link #getSizeY()} */ public int getSizeY(String id) throws FormatException, IOException { setId(id); return getSizeY(); } /** @deprecated Replaced by {@link #getSizeZ()} */ public int getSizeZ(String id) throws FormatException, IOException { setId(id); return getSizeZ(); } /** @deprecated Replaced by {@link #getSizeC()} */ public int getSizeC(String id) throws FormatException, IOException { setId(id); return getSizeC(); } /** @deprecated Replaced by {@link #getSizeT()} */ public int getSizeT(String id) throws FormatException, IOException { setId(id); return getSizeT(); } /** @deprecated Replaced by {@link #getPixelType()} */ public int getPixelType(String id) throws FormatException, IOException { setId(id); return getPixelType(); } /** @deprecated Replaced by {@link #getEffectiveSizeC()} */ public int getEffectiveSizeC(String id) throws FormatException, IOException { setId(id); return getEffectiveSizeC(); } /** @deprecated Replaced by {@link #getRGBChannelCount()} */ public int getRGBChannelCount(String id) throws FormatException, IOException { setId(id); return getSizeC() / getEffectiveSizeC(); } /** @deprecated Replaced by {@link #getChannelDimLengths()} */ public int[] getChannelDimLengths(String id) throws FormatException, IOException { setId(id); return getChannelDimLengths(); } /** @deprecated Replaced by {@link #getChannelDimTypes()} */ public String[] getChannelDimTypes(String id) throws FormatException, IOException { setId(id); return getChannelDimTypes(); } /** @deprecated Replaced by {@link #getThumbSizeX()} */ public int getThumbSizeX(String id) throws FormatException, IOException { setId(id); return getThumbSizeX(); } /** @deprecated Replaced by {@link #getThumbSizeY()} */ public int getThumbSizeY(String id) throws FormatException, IOException { setId(id); return getThumbSizeY(); } /** @deprecated Replaced by {@link #isLittleEndian()} */ public boolean isLittleEndian(String id) throws FormatException, IOException { setId(id); return isLittleEndian(); } /** @deprecated Replaced by {@link #getDimensionOrder()} */ public String getDimensionOrder(String id) throws FormatException, IOException { setId(id); return getDimensionOrder(); } /** @deprecated Replaced by {@link #isOrderCertain()} */ public boolean isOrderCertain(String id) throws FormatException, IOException { setId(id); return isOrderCertain(); } /** @deprecated Replaced by {@link #isInterleaved()} */ public boolean isInterleaved(String id) throws FormatException, IOException { setId(id); return isInterleaved(); } /** @deprecated Replaced by {@link #isInterleaved(int)} */ public boolean isInterleaved(String id, int subC) throws FormatException, IOException { setId(id); return isInterleaved(subC); } /** @deprecated Replaced by {@link #openImage(int)} */ public BufferedImage openImage(String id, int no) throws FormatException, IOException { setId(id); return openImage(no); } /** @deprecated Replaced by {@link #openBytes(int)} */ public byte[] openBytes(String id, int no) throws FormatException, IOException { setId(id); return openBytes(no); } /** @deprecated Replaced by {@link #openBytes(int, byte[])} */ public byte[] openBytes(String id, int no, byte[] buf) throws FormatException, IOException { setId(id); return openBytes(no, buf); } /** @deprecated Replaced by {@link #openThumbImage(int)} */ public BufferedImage openThumbImage(String id, int no) throws FormatException, IOException { setId(id); return openThumbImage(no); } /** @deprecated Replaced by {@link #openThumbImage(int)} */ public byte[] openThumbBytes(String id, int no) throws FormatException, IOException { setId(id); return openThumbBytes(no); } /** @deprecated Replaced by {@link #getSeriesCount()} */ public int getSeriesCount(String id) throws FormatException, IOException { setId(id); return getSeriesCount(); } /** @deprecated Replaced by {@link #setSeries(int)} */ public void setSeries(String id, int no) throws FormatException, IOException { setId(id); setSeries(no); } /** @deprecated Replaced by {@link #getSeries()} */ public int getSeries(String id) throws FormatException, IOException { setId(id); return getSeries(); } /** @deprecated Replaced by {@link #getUsedFiles()} */ public String[] getUsedFiles(String id) throws FormatException, IOException { setId(id); return getUsedFiles(); } /** @deprecated Replaced by {@link #getIndex(int, int, int)} */ public int getIndex(String id, int z, int c, int t) throws FormatException, IOException { setId(id); return getIndex(z, c, t); } /** @deprecated Replaced by {@link #getZCTCoords(int)} */ public int[] getZCTCoords(String id, int index) throws FormatException, IOException { setId(id); return getZCTCoords(index); } /** @deprecated Replaced by {@link #getMetadataValue(String)} */ public Object getMetadataValue(String id, String field) throws FormatException, IOException { setId(id); return getMetadataValue(field); } /** @deprecated Replaced by {@link #getMetadata()} */ public Hashtable getMetadata(String id) throws FormatException, IOException { setId(id); return getMetadata(); } /** @deprecated Replaced by {@link #getCoreMetadata()} */ public CoreMetadata getCoreMetadata(String id) throws FormatException, IOException { setId(id); return getCoreMetadata(); } /** @deprecated Replaced by {@link #getMetadataStore()} */ public MetadataStore getMetadataStore(String id) throws FormatException, IOException { setId(id); return getMetadataStore(); } /** @deprecated Replaced by {@link #getMetadataStoreRoot()} */ public Object getMetadataStoreRoot(String id) throws FormatException, IOException { setId(id); return getMetadataStoreRoot(); } }
loci/formats/FileStitcher.java
// // FileStitcher.java // /* LOCI Bio-Formats package for reading and converting biological file formats. Copyright (C) 2005-@year@ Melissa Linkert, Curtis Rueden, Chris Allan, Eric Kjellman and Brian Loranger. This program is free software; you can redistribute it and/or modify it under the terms of the GNU Library General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Library General Public License for more details. You should have received a copy of the GNU Library General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package loci.formats; import java.awt.image.BufferedImage; import java.io.*; import java.lang.reflect.InvocationTargetException; import java.math.BigInteger; import java.util.*; /** * Logic to stitch together files with similar names. * Assumes that all files have the same dimensions. * * <dl><dt><b>Source code:</b></dt> * <dd><a href="https://skyking.microscopy.wisc.edu/trac/java/browser/trunk/loci/formats/FileStitcher.java">Trac</a>, * <a href="https://skyking.microscopy.wisc.edu/svn/java/trunk/loci/formats/FileStitcher.java">SVN</a></dd></dl> */ public class FileStitcher implements IFormatReader { // -- Fields -- /** FormatReader to use as a template for constituent readers. */ private IFormatReader reader; /** * Whether string ids given should be treated * as file patterns rather than single file paths. */ private boolean patternIds = false; /** Current file pattern string. */ private String currentId; /** File pattern object used to build the list of files. */ private FilePattern fp; /** Axis guesser object used to guess which dimensional axes are which. */ private AxisGuesser[] ag; /** The matching files. */ private String[][] files; /** Used files list. */ private String[] usedFiles; /** Reader used for each file. */ private IFormatReader[][] readers; /** Blank buffered image, for use when image counts vary between files. */ private BufferedImage[] blankImage; /** Blank image bytes, for use when image counts vary between files. */ private byte[][] blankBytes; /** Blank buffered thumbnail, for use when image counts vary between files. */ private BufferedImage[] blankThumb; /** Blank thumbnail bytes, for use when image counts vary between files. */ private byte[][] blankThumbBytes; /** Number of images per file. */ private int[] imagesPerFile; /** Dimensional axis lengths per file. */ private int[] sizeZ, sizeC, sizeT; /** Component lengths for each axis type. */ private int[][] lenZ, lenC, lenT; /** Core metadata. */ private CoreMetadata core; /** Current series number. */ private int series; private String[] seriesBlocks; private Vector fileVector; // -- Constructors -- /** Constructs a FileStitcher around a new image reader. */ public FileStitcher() { this(new ImageReader()); } /** * Constructs a FileStitcher around a new image reader. * @param patternIds Whether string ids given should be treated as file * patterns rather than single file paths. */ public FileStitcher(boolean patternIds) { this(new ImageReader(), patternIds); } /** * Constructs a FileStitcher with the given reader. * @param r The reader to use for reading stitched files. */ public FileStitcher(IFormatReader r) { this(r, false); } /** * Constructs a FileStitcher with the given reader. * @param r The reader to use for reading stitched files. * @param patternIds Whether string ids given should be treated as file * patterns rather than single file paths. */ public FileStitcher(IFormatReader r, boolean patternIds) { reader = r; this.patternIds = patternIds; } // -- FileStitcher API methods -- /** Gets the wrapped reader prototype. */ public IFormatReader getReader() { return reader; } /** * Gets the axis type for each dimensional block. * @return An array containing values from the enumeration: * <ul> * <li>AxisGuesser.Z_AXIS: focal planes</li> * <li>AxisGuesser.T_AXIS: time points</li> * <li>AxisGuesser.C_AXIS: channels</li> * <li>AxisGuesser.S_AXIS: series</li> * </ul> */ public int[] getAxisTypes() { FormatTools.assertId(currentId, true, 2); return ag[getSeries()].getAxisTypes(); } /** * Sets the axis type for each dimensional block. * @param axes An array containing values from the enumeration: * <ul> * <li>AxisGuesser.Z_AXIS: focal planes</li> * <li>AxisGuesser.T_AXIS: time points</li> * <li>AxisGuesser.C_AXIS: channels</li> * <li>AxisGuesser.S_AXIS: series</li> * </ul> */ public void setAxisTypes(int[] axes) throws FormatException { FormatTools.assertId(currentId, true, 2); ag[getSeries()].setAxisTypes(axes); computeAxisLengths(); } /** Gets the file pattern object used to build the list of files. */ public FilePattern getFilePattern() { FormatTools.assertId(currentId, true, 2); return fp; } /** * Gets the axis guesser object used to guess * which dimensional axes are which. */ public AxisGuesser getAxisGuesser() { FormatTools.assertId(currentId, true, 2); return ag[getSeries()]; } /** * Finds the file pattern for the given ID, based on the state of the file * stitcher. Takes both ID map entries and the patternIds flag into account. */ public FilePattern findPattern(String id) { FormatTools.assertId(currentId, true, 2); if (!patternIds) { // find the containing pattern Hashtable map = Location.getIdMap(); String pattern = null; if (map.containsKey(id)) { // search ID map for pattern, rather than files on disk String[] idList = new String[map.size()]; Enumeration en = map.keys(); for (int i=0; i<idList.length; i++) { idList[i] = (String) en.nextElement(); } pattern = FilePattern.findPattern(id, null, idList); } else { // id is an unmapped file path; look to similar files on disk pattern = FilePattern.findPattern(new Location(id)); } if (pattern != null) id = pattern; } return new FilePattern(id); } // -- IFormatReader API methods -- /* @see IFormatReader#isThisType(byte[]) */ public boolean isThisType(byte[] block) { return reader.isThisType(block); } /* @see IFormatReader#setId(String) */ public void setId(String id) throws FormatException, IOException { if (!id.equals(currentId)) initFile(id); } /* @see IFormatReader#setId(String, boolean) */ public void setId(String id, boolean force) throws FormatException, IOException { if (!id.equals(currentId) || force) initFile(id); } /* @see IFormatReader#getImageCount() */ public int getImageCount() { FormatTools.assertId(currentId, true, 2); return core.imageCount[getSeries()]; } /* @see IFormatReader#isRGB() */ public boolean isRGB() { FormatTools.assertId(currentId, true, 2); return core.rgb[getSeries()]; } /* @see IFormatReader#getSizeX() */ public int getSizeX() { FormatTools.assertId(currentId, true, 2); return core.sizeX[getSeries()]; } /* @see IFormatReader#getSizeY() */ public int getSizeY() { FormatTools.assertId(currentId, true, 2); return core.sizeY[getSeries()]; } /* @see IFormatReader#getSizeZ() */ public int getSizeZ() { FormatTools.assertId(currentId, true, 2); return core.sizeZ[getSeries()]; } /* @see IFormatReader#getSizeC() */ public int getSizeC() { FormatTools.assertId(currentId, true, 2); return core.sizeC[getSeries()]; } /* @see IFormatReader#getSizeT() */ public int getSizeT() { FormatTools.assertId(currentId, true, 2); return core.sizeT[getSeries()]; } /* @see IFormatReader#getPixelType() */ public int getPixelType() { FormatTools.assertId(currentId, true, 2); return core.pixelType[getSeries()]; } /* @see IFormatReader#getEffectiveSizeC() */ public int getEffectiveSizeC() { FormatTools.assertId(currentId, true, 2); return getImageCount() / (getSizeZ() * getSizeT()); } /* @see IFormatReader#getRGBChannelCount() */ public int getRGBChannelCount() { FormatTools.assertId(currentId, true, 2); return getSizeC() / getEffectiveSizeC(); } /* @see IFormatReader#isIndexed() */ public boolean isIndexed() { FormatTools.assertId(currentId, true, 2); return reader.isIndexed(); } /* @see IFormatReader#isFalseColor() */ public boolean isFalseColor() { FormatTools.assertId(currentId, true, 2); return reader.isFalseColor(); } /* @see IFormatReader#get8BitLookupTable() */ public byte[][] get8BitLookupTable() throws FormatException, IOException { FormatTools.assertId(currentId, true, 2); return reader.get8BitLookupTable(); } /* @see IFormatReader#get16BitLookupTable() */ public short[][] get16BitLookupTable() throws FormatException, IOException { FormatTools.assertId(currentId, true, 2); return reader.get16BitLookupTable(); } /* @see IFormatReader#getChannelDimLengths() */ public int[] getChannelDimLengths() { FormatTools.assertId(currentId, true, 1); return core.cLengths[getSeries()]; } /* @see IFormatReader#getChannelDimTypes() */ public String[] getChannelDimTypes() { FormatTools.assertId(currentId, true, 1); return core.cTypes[getSeries()]; } /* @see IFormatReader#getThumbSizeX() */ public int getThumbSizeX() { FormatTools.assertId(currentId, true, 2); return reader.getThumbSizeX(); } /* @see IFormatReader#getThumbSizeY() */ public int getThumbSizeY() { FormatTools.assertId(currentId, true, 2); return reader.getThumbSizeY(); } /* @see IFormatReader#isLittleEndian() */ public boolean isLittleEndian() { FormatTools.assertId(currentId, true, 2); return reader.isLittleEndian(); } /* @see IFormatReader#getDimensionOrder() */ public String getDimensionOrder() { FormatTools.assertId(currentId, true, 2); return core.currentOrder[getSeries()]; } /* @see IFormatReader#isOrderCertain() */ public boolean isOrderCertain() { FormatTools.assertId(currentId, true, 2); return core.orderCertain[getSeries()]; } /* @see IFormatReader#isInterleaved() */ public boolean isInterleaved() { FormatTools.assertId(currentId, true, 2); return reader.isInterleaved(); } /* @see IFormatReader#isInterleaved(int) */ public boolean isInterleaved(int subC) { FormatTools.assertId(currentId, true, 2); return reader.isInterleaved(subC); } /* @see IFormatReader#openImage(int) */ public BufferedImage openImage(int no) throws FormatException, IOException { FormatTools.assertId(currentId, true, 2); int[] q = computeIndices(no); int sno = getSeries(); int fno = q[0], ino = q[1]; if (ino < readers[sno][fno].getImageCount()) { return readers[sno][fno].openImage(ino); } // return a blank image to cover for the fact that // this file does not contain enough image planes if (blankImage[sno] == null) { blankImage[sno] = ImageTools.blankImage(core.sizeX[sno], core.sizeY[sno], sizeC[sno], getPixelType()); } return blankImage[sno]; } /* @see IFormatReader#openImage(int, int, int, int, int) */ public BufferedImage openImage(int no, int x, int y, int w, int h) throws FormatException, IOException { return openImage(no).getSubimage(x, y, w, h); } /* @see IFormatReader#openBytes(int) */ public byte[] openBytes(int no) throws FormatException, IOException { FormatTools.assertId(currentId, true, 2); int[] q = computeIndices(no); int sno = getSeries(); int fno = q[0], ino = q[1]; if (ino < readers[sno][fno].getImageCount()) { return readers[sno][fno].openBytes(ino); } // return a blank image to cover for the fact that // this file does not contain enough image planes if (blankBytes[sno] == null) { int bytes = FormatTools.getBytesPerPixel(getPixelType()); blankBytes[sno] = new byte[core.sizeX[sno] * core.sizeY[sno] * bytes * getRGBChannelCount()]; } return blankBytes[sno]; } /* @see IFormatReader#openBytes(int, int, int, int, int) */ public byte[] openBytes(int no, int x, int y, int w, int h) throws FormatException, IOException { byte[] buffer = new byte[w * h * FormatTools.getBytesPerPixel(getPixelType()) * getRGBChannelCount()]; return openBytes(no, buffer, x, y, w, h); } /* @see IFormatReader#openBytes(int, byte[]) */ public byte[] openBytes(int no, byte[] buf) throws FormatException, IOException { FormatTools.assertId(currentId, true, 2); int[] q = computeIndices(no); int sno = getSeries(); int fno = q[0], ino = q[1]; if (ino < readers[sno][fno].getImageCount()) { return readers[sno][fno].openBytes(ino, buf); } // return a blank image to cover for the fact that // this file does not contain enough image planes Arrays.fill(buf, (byte) 0); return buf; } /* @see IFormatReader#openBytes(int, byte[], int, int, int, int) */ public byte[] openBytes(int no, byte[] buf, int x, int y, int w, int h) throws FormatException, IOException { byte[] bytes = openBytes(no); int bpp = FormatTools.getBytesPerPixel(getPixelType()); int ch = getRGBChannelCount(); if (buf.length < w * h * bpp * ch) { throw new FormatException("Buffer too small."); } for (int yy=y; yy<y + h; yy++) { for (int xx=x; xx<x + w; xx++) { for (int cc=0; cc<ch; cc++) { int oldNdx = -1, newNdx = -1; if (isInterleaved()) { oldNdx = yy*getSizeX()*bpp*ch + xx*bpp*ch + cc*bpp; newNdx = (yy - y)*w*bpp*ch + (xx - x)*bpp*ch + cc*bpp; } else { oldNdx = cc*getSizeX()*getSizeY()*bpp + yy*getSizeX()*bpp + xx*bpp; newNdx = cc*w*h*bpp + (yy - y)*w*bpp + (xx - x)*bpp; } System.arraycopy(bytes, oldNdx, buf, newNdx, bpp); } } } return buf; } /* @see IFormatReader#openThumbImage(int) */ public BufferedImage openThumbImage(int no) throws FormatException, IOException { FormatTools.assertId(currentId, true, 2); int[] q = computeIndices(no); int sno = getSeries(); int fno = q[0], ino = q[1]; if (ino < readers[sno][fno].getImageCount()) { return readers[sno][fno].openThumbImage(ino); } // return a blank image to cover for the fact that // this file does not contain enough image planes if (blankThumb[sno] == null) { blankThumb[sno] = ImageTools.blankImage(getThumbSizeX(), getThumbSizeY(), sizeC[sno], getPixelType()); } return blankThumb[sno]; } /* @see IFormatReader#openThumbBytes(int) */ public byte[] openThumbBytes(int no) throws FormatException, IOException { FormatTools.assertId(currentId, true, 2); int[] q = computeIndices(no); int sno = getSeries(); int fno = q[0], ino = q[1]; if (ino < readers[sno][fno].getImageCount()) { return readers[sno][fno].openThumbBytes(ino); } // return a blank image to cover for the fact that // this file does not contain enough image planes if (blankThumbBytes[sno] == null) { int bytes = FormatTools.getBytesPerPixel(getPixelType()); blankThumbBytes[sno] = new byte[getThumbSizeX() * getThumbSizeY() * bytes * getRGBChannelCount()]; } return blankThumbBytes[sno]; } /* @see IFormatReader#close(boolean) */ public void close(boolean fileOnly) throws IOException { if (readers == null) reader.close(fileOnly); else { for (int i=0; i<readers.length; i++) { for (int j=0; j<readers[i].length; j++) { readers[i][j].close(fileOnly); } } } if (!fileOnly) { readers = null; blankImage = null; blankBytes = null; currentId = null; } } /* @see IFormatReader#close() */ public void close() throws IOException { if (readers == null) reader.close(); else { for (int i=0; i<readers.length; i++) { for (int j=0; j<readers[i].length; j++) { readers[i][j].close(); } } } readers = null; blankImage = null; blankBytes = null; currentId = null; } /* @see IFormatReader#getSeriesCount() */ public int getSeriesCount() { FormatTools.assertId(currentId, true, 2); return core.sizeX.length; } /* @see IFormatReader#setSeries(int) */ public void setSeries(int no) { FormatTools.assertId(currentId, true, 2); int n = reader.getSeriesCount(); if (n > 1) reader.setSeries(no); else series = no; } /* @see IFormatReader#getSeries() */ public int getSeries() { FormatTools.assertId(currentId, true, 2); return series == 0 ? reader.getSeries() : series; } /* @see IFormatReader#setGroupFiles(boolean) */ public void setGroupFiles(boolean group) { for (int i=0; i<readers.length; i++) { for (int j=0; j<readers[i].length; j++) { readers[i][j].setGroupFiles(group); } } } /* @see IFormatReader#isGroupFiles() */ public boolean isGroupFiles() { return readers[0][0].isGroupFiles(); } /* @see IFormatReader#fileGroupOption(String) */ public int fileGroupOption(String id) throws FormatException, IOException { return readers[0][0].fileGroupOption(id); } /* @see IFormatReader#isMetadataComplete() */ public boolean isMetadataComplete() { return readers[0][0].isMetadataComplete(); } /* @see IFormatReader#setNormalized(boolean) */ public void setNormalized(boolean normalize) { FormatTools.assertId(currentId, false, 2); if (readers == null) reader.setNormalized(normalize); else { for (int i=0; i<readers.length; i++) { for (int j=0; j<readers[i].length; j++) { readers[i][j].setNormalized(normalize); } } } } /* @see IFormatReader#isNormalized() */ public boolean isNormalized() { return reader.isNormalized(); } /* @see IFormatReader#setMetadataCollected(boolean) */ public void setMetadataCollected(boolean collect) { FormatTools.assertId(currentId, false, 2); if (readers == null) reader.setMetadataCollected(collect); else { for (int i=0; i<readers.length; i++) { for (int j=0; j<readers[i].length; j++) { readers[i][j].setMetadataCollected(collect); } } } } /* @see IFormatReader#isMetadataCollected() */ public boolean isMetadataCollected() { return reader.isMetadataCollected(); } /* @see IFormatReader#setOriginalMetadataPopulated(boolean) */ public void setOriginalMetadataPopulated(boolean populate) { FormatTools.assertId(currentId, false, 1); if (readers == null) reader.setOriginalMetadataPopulated(populate); else { for (int i=0; i<readers.length; i++) { for (int j=0; j<readers[i].length; j++) { readers[i][j].setOriginalMetadataPopulated(populate); } } } } /* @see IFormatReader#isOriginalMetadataPopulated() */ public boolean isOriginalMetadataPopulated() { return reader.isOriginalMetadataPopulated(); } /* @see IFormatReader#getUsedFiles() */ public String[] getUsedFiles() { FormatTools.assertId(currentId, true, 2); // returning the files list directly here is fast, since we do not // have to call initFile on each constituent file; but we can only do so // when each constituent file does not itself have multiple used files if (reader.getUsedFiles().length > 1) { // each constituent file has multiple used files; we must build the list // this could happen with, e.g., a stitched collection of ICS/IDS pairs // we have no datasets structured this way, so this logic is untested if (usedFiles == null) { String[][][] used = new String[files.length][][]; int total = 0; for (int i=0; i<files.length; i++) { for (int j=0; j<files[i].length; j++) { try { readers[i][j].setId(files[i][j]); } catch (FormatException exc) { LogTools.trace(exc); return null; } catch (IOException exc) { LogTools.trace(exc); return null; } used[i][j] = readers[i][j].getUsedFiles(); total += used[i][j].length; } } usedFiles = new String[total]; for (int i=0, off=0; i<used.length; i++) { for (int j=0; j<used[i].length; j++) { System.arraycopy(used[i][j], 0, usedFiles, off, used[i][j].length); off += used[i][j].length; } } } return usedFiles; } // assume every constituent file has no other used files // this logic could fail if the first constituent has no extra used files, // but later constituents do; in practice, this scenario seems unlikely Vector v = new Vector(); for (int i=0; i<files.length; i++) { for (int j=0; j<files[i].length; j++) { v.add(files[i][j]); } } return (String[]) v.toArray(new String[0]); } /* @see IFormatReader#getCurrentFile() */ public String getCurrentFile() { return currentId; } /* @see IFormatReader#getIndex(int, int, int) */ public int getIndex(int z, int c, int t) { return FormatTools.getIndex(this, z, c, t); } /* @see IFormatReader#getZCTCoords(int) */ public int[] getZCTCoords(int index) { return FormatTools.getZCTCoords(this, index); } /* @see IFormatReader#getMetadataValue(String) */ public Object getMetadataValue(String field) { FormatTools.assertId(currentId, true, 2); return reader.getMetadataValue(field); } /* @see IFormatReader#getMetadata() */ public Hashtable getMetadata() { FormatTools.assertId(currentId, true, 2); return reader.getMetadata(); } /* @see IFormatReader#getCoreMetadata() */ public CoreMetadata getCoreMetadata() { FormatTools.assertId(currentId, true, 2); return core; } /* @see IFormatReader#setMetadataFiltered(boolean) */ public void setMetadataFiltered(boolean filter) { FormatTools.assertId(currentId, false, 2); reader.setMetadataFiltered(filter); } /* @see IFormatReader#isMetadataFiltered() */ public boolean isMetadataFiltered() { return reader.isMetadataFiltered(); } /* @see IFormatReader#setMetadataStore(MetadataStore) */ public void setMetadataStore(MetadataStore store) { FormatTools.assertId(currentId, false, 2); reader.setMetadataStore(store); } /* @see IFormatReader#getMetadataStore() */ public MetadataStore getMetadataStore() { FormatTools.assertId(currentId, true, 2); return reader.getMetadataStore(); } /* @see IFormatReader#getMetadataStoreRoot() */ public Object getMetadataStoreRoot() { FormatTools.assertId(currentId, true, 2); return reader.getMetadataStoreRoot(); } /* @see IFormatReader#getUnderlyingReaders() */ public IFormatReader[] getUnderlyingReaders() { Vector v = new Vector(); for (int i=0; i<readers.length; i++) { for (int j=0; j<readers[i].length; j++) { v.add(readers[i][j]); } } return (IFormatReader[]) v.toArray(new IFormatReader[0]); } // -- IFormatHandler API methods -- /* @see IFormatHandler#isThisType(String) */ public boolean isThisType(String name) { return reader.isThisType(name); } /* @see IFormatHandler#isThisType(String, boolean) */ public boolean isThisType(String name, boolean open) { return reader.isThisType(name, open); } /* @see IFormatHandler#getFormat() */ public String getFormat() { FormatTools.assertId(currentId, true, 2); return reader.getFormat(); } /* @see IFormatHandler#getSuffixes() */ public String[] getSuffixes() { return reader.getSuffixes(); } // -- StatusReporter API methods -- /* @see IFormatHandler#addStatusListener(StatusListener) */ public void addStatusListener(StatusListener l) { if (readers == null) reader.addStatusListener(l); else { for (int i=0; i<readers.length; i++) { for (int j=0; j<readers[i].length; j++) { readers[i][j].addStatusListener(l); } } } } /* @see IFormatHandler#removeStatusListener(StatusListener) */ public void removeStatusListener(StatusListener l) { if (readers == null) reader.removeStatusListener(l); else { for (int i=0; i<readers.length; i++) { for (int j=0; j<readers[i].length; j++) { readers[i][j].removeStatusListener(l); } } } } /* @see IFormatHandler#getStatusListeners() */ public StatusListener[] getStatusListeners() { return reader.getStatusListeners(); } // -- Internal FormatReader API methods -- /** Initializes the given file. */ protected void initFile(String id) throws FormatException, IOException { if (FormatHandler.debug) { LogTools.println("calling FileStitcher.initFile(" + id + ")"); } currentId = id; fp = findPattern(id); reader.setId(fp.getFiles()[0]); // if this is a multi-series dataset, we need some special logic AxisGuesser guesser = new AxisGuesser(fp, reader.getDimensionOrder(), reader.getSizeZ(), reader.getSizeT(), reader.getEffectiveSizeC(), reader.isOrderCertain()); int seriesCount = reader.getSeriesCount(); boolean seriesInFile = true; if (guesser.getAxisCountS() > 0) { int[] count = fp.getCount(); int[] axes = guesser.getAxisTypes(); seriesInFile = false; String[] blockPrefixes = fp.getPrefixes(); Vector sBlock = new Vector(); for (int i=0; i<axes.length; i++) { if (axes[i] == AxisGuesser.S_AXIS) { sBlock.add(blockPrefixes[i]); } } seriesBlocks = (String[]) sBlock.toArray(new String[0]); fileVector = new Vector(); String file = fp.getFiles()[0]; Location dir = new Location(file).getAbsoluteFile().getParentFile(); String dpath = dir.getAbsolutePath(); String[] fs = dir.list(); setFiles(fs, seriesBlocks[0], fp.getFirst()[0], fp.getLast()[0], fp.getStep()[0], dpath, 0); seriesCount = fileVector.size(); files = new String[seriesCount][]; for (int i=0; i<seriesCount; i++) { files[i] = (String[]) fileVector.get(i); } } // verify that file pattern is valid and matches existing files String msg = " Please rename your files or disable file stitching."; if (!fp.isValid()) { throw new FormatException("Invalid " + (patternIds ? "file pattern" : "filename") + " (" + currentId + "): " + fp.getErrorMessage() + msg); } if (files == null) { files = new String[1][]; files[0] = fp.getFiles(); } if (files == null) { throw new FormatException("No files matching pattern (" + fp.getPattern() + "). " + msg); } for (int i=0; i<files.length; i++) { for (int j=0; j<files[i].length; j++) { if (!new Location(files[i][j]).exists()) { throw new FormatException("File #" + i + " (" + files[i][j] + ") does not exist."); } } } // determine reader type for these files; assume all are the same type Vector classes = new Vector(); IFormatReader r = reader; while (r instanceof ReaderWrapper) { classes.add(r.getClass()); r = ((ReaderWrapper) r).getReader(); } if (r instanceof ImageReader) r = ((ImageReader) r).getReader(files[0][0]); classes.add(r.getClass()); // construct list of readers for all files readers = new IFormatReader[files.length][]; for (int i=0; i<readers.length; i++) { readers[i] = new IFormatReader[files[i].length]; } readers[0][0] = reader; for (int i=0; i<readers.length; i++) { for (int j=0; j<readers[i].length; j++) { // use crazy reflection to instantiate a reader of the proper type try { r = null; for (int k=classes.size()-1; k>=0; k--) { Class c = (Class) classes.elementAt(k); if (r == null) r = (IFormatReader) c.newInstance(); else { r = (IFormatReader) c.getConstructor(new Class[] {IFormatReader.class}).newInstance(new Object[] {r}); } } readers[i][j] = (IFormatReader) r; } catch (InstantiationException exc) { LogTools.trace(exc); } catch (IllegalAccessException exc) { LogTools.trace(exc); } catch (NoSuchMethodException exc) { LogTools.trace(exc); } catch (InvocationTargetException exc) { LogTools.trace(exc); } } } // sync reader configurations with original reader boolean normalized = reader.isNormalized(); boolean metadataFiltered = reader.isMetadataFiltered(); boolean metadataCollected = reader.isMetadataCollected(); StatusListener[] statusListeners = reader.getStatusListeners(); for (int i=0; i<readers.length; i++) { for (int j=0; j<readers[i].length; j++) { readers[i][j].setNormalized(normalized); readers[i][j].setMetadataFiltered(metadataFiltered); readers[i][j].setMetadataCollected(metadataCollected); for (int k=0; k<statusListeners.length; k++) { readers[i][j].addStatusListener(statusListeners[k]); } } } String[] originalUsedFiles = reader.getUsedFiles(); boolean doNotStitch = true; for (int i=0; i<files.length; i++) { for (int k=0; k<files[i].length; k++) { boolean found = false; for (int j=0; j<originalUsedFiles.length; j++) { if (originalUsedFiles[j].endsWith(files[i][k])) { found = true; break; } } if (!found) { doNotStitch = false; break; } } } if (doNotStitch) { // the reader for this file uses its own stitching logic that is probably // smarter than FileStitcher readers = new IFormatReader[1][1]; readers[0][0] = reader; String f = files[0][0]; files = new String[1][1]; files[0][0] = f; fp = new FilePattern(files[0][0]); } ag = new AxisGuesser[seriesCount]; blankImage = new BufferedImage[seriesCount]; blankBytes = new byte[seriesCount][]; blankThumb = new BufferedImage[seriesCount]; blankThumbBytes = new byte[seriesCount][]; imagesPerFile = new int[seriesCount]; sizeZ = new int[seriesCount]; sizeC = new int[seriesCount]; sizeT = new int[seriesCount]; boolean[] certain = new boolean[seriesCount]; lenZ = new int[seriesCount][]; lenC = new int[seriesCount][]; lenT = new int[seriesCount][]; // analyze first file; assume each file has the same parameters core = new CoreMetadata(seriesCount); int oldSeries = reader.getSeries(); IFormatReader rr = reader; for (int i=0; i<seriesCount; i++) { if (seriesInFile) rr.setSeries(i); else { rr = readers[i][0]; rr.setId(files[i][0]); } core.sizeX[i] = rr.getSizeX(); core.sizeY[i] = rr.getSizeY(); // NB: core.sizeZ populated in computeAxisLengths below // NB: core.sizeC populated in computeAxisLengths below // NB: core.sizeT populated in computeAxisLengths below core.pixelType[i] = rr.getPixelType(); imagesPerFile[i] = rr.getImageCount(); core.imageCount[i] = files[i].length * imagesPerFile[i]; core.thumbSizeX[i] = rr.getThumbSizeX(); core.thumbSizeY[i] = rr.getThumbSizeY(); // NB: core.cLengths[i] populated in computeAxisLengths below // NB: core.cTypes[i] populated in computeAxisLengths below core.currentOrder[i] = rr.getDimensionOrder(); // NB: core.orderCertain[i] populated below core.rgb[i] = rr.isRGB(); core.littleEndian[i] = rr.isLittleEndian(); core.interleaved[i] = rr.isInterleaved(); core.seriesMetadata[i] = rr.getMetadata(); sizeZ[i] = rr.getSizeZ(); sizeC[i] = rr.getSizeC(); sizeT[i] = rr.getSizeT(); certain[i] = rr.isOrderCertain(); } reader.setSeries(oldSeries); // guess at dimensions corresponding to file numbering for (int i=0; i<seriesCount; i++) { ag[i] = new AxisGuesser(fp, core.currentOrder[i], sizeZ[i], sizeT[i], sizeC[i], certain[i]); } // order may need to be adjusted for (int i=0; i<seriesCount; i++) { setSeries(i); core.currentOrder[i] = ag[i].getAdjustedOrder(); core.orderCertain[i] = ag[i].isCertain(); computeAxisLengths(); } setSeries(oldSeries); // initialize used files list only when requested usedFiles = null; } // -- Helper methods -- /** Computes axis length arrays, and total axis lengths. */ protected void computeAxisLengths() throws FormatException { int sno = getSeries(); FilePattern p = new FilePattern(FilePattern.findPattern(files[sno][0], new Location(files[sno][0]).getAbsoluteFile().getParentFile().getPath(), files[sno])); int[] count = p.getCount(); try { readers[sno][0].setId(files[sno][0]); } catch (IOException e) { throw new FormatException(e); } ag[sno] = new AxisGuesser(p, readers[sno][0].getDimensionOrder(), readers[sno][0].getSizeZ(), readers[sno][0].getSizeT(), readers[sno][0].getSizeC(), readers[sno][0].isOrderCertain()); int[] axes = ag[sno].getAxisTypes(); int numZ = ag[sno].getAxisCountZ(); int numC = ag[sno].getAxisCountC(); int numT = ag[sno].getAxisCountT(); core.sizeZ[sno] = sizeZ[sno]; core.sizeC[sno] = sizeC[sno]; core.sizeT[sno] = sizeT[sno]; lenZ[sno] = new int[numZ + 1]; lenC[sno] = new int[numC + 1]; lenT[sno] = new int[numT + 1]; lenZ[sno][0] = sizeZ[sno]; lenC[sno][0] = sizeC[sno]; lenT[sno][0] = sizeT[sno]; for (int i=0, z=1, c=1, t=1; i<count.length; i++) { switch (axes[i]) { case AxisGuesser.Z_AXIS: core.sizeZ[sno] *= count[i]; lenZ[sno][z++] = count[i]; break; case AxisGuesser.C_AXIS: core.sizeC[sno] *= count[i]; lenC[sno][c++] = count[i]; break; case AxisGuesser.T_AXIS: core.sizeT[sno] *= count[i]; lenT[sno][t++] = count[i]; break; case AxisGuesser.S_AXIS: break; default: throw new FormatException("Unknown axis type for axis #" + i + ": " + axes[i]); } } int[] cLengths = reader.getChannelDimLengths(); String[] cTypes = reader.getChannelDimTypes(); int cCount = 0; for (int i=0; i<cLengths.length; i++) { if (cLengths[i] > 1) cCount++; } for (int i=1; i<lenC[sno].length; i++) { if (lenC[sno][i] > 1) cCount++; } if (cCount == 0) { core.cLengths[sno] = new int[] {1}; core.cTypes[sno] = new String[] {FormatTools.CHANNEL}; } else { core.cLengths[sno] = new int[cCount]; core.cTypes[sno] = new String[cCount]; } int c = 0; for (int i=0; i<cLengths.length; i++) { if (cLengths[i] == 1) continue; core.cLengths[sno][c] = cLengths[i]; core.cTypes[sno][c] = cTypes[i]; c++; } for (int i=1; i<lenC[sno].length; i++) { if (lenC[sno][i] == 1) continue; core.cLengths[sno][c] = lenC[sno][i]; core.cTypes[sno][c] = FormatTools.CHANNEL; } // populate metadata store int pixelType = getPixelType(); boolean little = reader.isLittleEndian(); MetadataStore s = reader.getMetadataStore(); s.setPixels(new Integer(core.sizeX[sno]), new Integer(core.sizeY[sno]), new Integer(core.sizeZ[sno]), new Integer(core.sizeC[sno]), new Integer(core.sizeT[sno]), new Integer(pixelType), new Boolean(!little), core.currentOrder[sno], new Integer(sno), null); } /** * Gets the file index, and image index into that file, * corresponding to the given global image index. * * @return An array of size 2, dimensioned {file index, image index}. */ protected int[] computeIndices(int no) throws FormatException, IOException { int sno = getSeries(); int[] axes = ag[sno].getAxisTypes(); int[] count = fp.getCount(); // get Z, C and T positions int[] zct = getZCTCoords(no); zct[1] *= getRGBChannelCount(); int[] posZ = FormatTools.rasterToPosition(lenZ[sno], zct[0]); int[] posC = FormatTools.rasterToPosition(lenC[sno], zct[1]); int[] posT = FormatTools.rasterToPosition(lenT[sno], zct[2]); // convert Z, C and T position lists into file index and image index int[] pos = new int[axes.length]; int z = 1, c = 1, t = 1; for (int i=0; i<axes.length; i++) { if (axes[i] == AxisGuesser.Z_AXIS) pos[i] = posZ[z++]; else if (axes[i] == AxisGuesser.C_AXIS) pos[i] = posC[c++]; else if (axes[i] == AxisGuesser.T_AXIS) pos[i] = posT[t++]; else { throw new FormatException("Unknown axis type for axis #" + i + ": " + axes[i]); } } int fno = FormatTools.positionToRaster(count, pos); // configure the reader, in case we haven't done this one yet readers[sno][fno].setId(files[sno][fno]); readers[sno][fno].setSeries(reader.getSeries()); int ino; if (posZ[0] < readers[sno][fno].getSizeZ() && posC[0] < readers[sno][fno].getSizeC() && posT[0] < readers[sno][fno].getSizeT()) { ino = FormatTools.getIndex(readers[sno][fno], posZ[0], posC[0], posT[0]); } else ino = Integer.MAX_VALUE; // coordinates out of range return new int[] {fno, ino}; } /** * Gets a list of readers to include in relation to the given C position. * @return Array with indices corresponding to the list of readers, and * values indicating the internal channel index to use for that reader. */ protected int[] getIncludeList(int theC) throws FormatException, IOException { int[] include = new int[readers.length]; Arrays.fill(include, -1); for (int t=0; t<sizeT[getSeries()]; t++) { for (int z=0; z<sizeZ[getSeries()]; z++) { int no = getIndex(z, theC, t); int[] q = computeIndices(no); int fno = q[0], ino = q[1]; include[fno] = ino; } } return include; } private FilePattern getPattern(String[] f, String dir, String block) { Vector v = new Vector(); for (int i=0; i<f.length; i++) { if (f[i].indexOf(File.separator) != -1) { f[i] = f[i].substring(f[i].lastIndexOf(File.separator) + 1); } if (dir.endsWith(File.separator)) f[i] = dir + f[i]; else f[i] = dir + File.separator + f[i]; if (f[i].indexOf(block) != -1 && new Location(f[i]).exists()) { v.add(f[i].substring(f[i].lastIndexOf(File.separator) + 1)); } } f = (String[]) v.toArray(new String[0]); return new FilePattern(FilePattern.findPattern(f[0], dir, f)); } private void setFiles(String[] list, String prefix, BigInteger first, BigInteger last, BigInteger step, String dir, int blockNum) { long f = first.longValue(); long l = last.longValue(); long s = step.longValue(); for (long i=f; i<=l; i+=s) { FilePattern newPattern = getPattern(list, dir, prefix + i); if (blockNum == seriesBlocks.length - 1) { fileVector.add(newPattern.getFiles()); } else { String next = seriesBlocks[blockNum + 1]; String[] blocks = newPattern.getPrefixes(); BigInteger fi = null; BigInteger la = null; BigInteger st = null; for (int q=0; q<blocks.length; q++) { if (blocks[q].indexOf(next) != -1) { fi = newPattern.getFirst()[q]; la = newPattern.getLast()[q]; st = newPattern.getStep()[q]; break; } } setFiles(newPattern.getFiles(), next, fi, la, st, dir, blockNum + 1); } } } // -- Deprecated FileStitcher API methods -- /** @deprecated Replaced by {@link #getAxisTypes()} */ public int[] getAxisTypes(String id) throws FormatException, IOException { setId(id); return getAxisTypes(); } /** @deprecated Replaced by {@link #setAxisTypes(int[])} */ public void setAxisTypes(String id, int[] axes) throws FormatException, IOException { setId(id); setAxisTypes(axes); } /** @deprecated Replaced by {@link #getFilePattern()} */ public FilePattern getFilePattern(String id) throws FormatException, IOException { setId(id); return getFilePattern(); } /** @deprecated Replaced by {@link #getAxisGuesser()} */ public AxisGuesser getAxisGuesser(String id) throws FormatException, IOException { setId(id); return getAxisGuesser(); } // -- Deprecated IFormatReader API methods -- /** @deprecated Replaced by {@link #getImageCount()} */ public int getImageCount(String id) throws FormatException, IOException { setId(id); return getImageCount(); } /** @deprecated Replaced by {@link #isRGB()} */ public boolean isRGB(String id) throws FormatException, IOException { setId(id); return isRGB(); } /** @deprecated Replaced by {@link #getSizeX()} */ public int getSizeX(String id) throws FormatException, IOException { setId(id); return getSizeX(); } /** @deprecated Replaced by {@link #getSizeY()} */ public int getSizeY(String id) throws FormatException, IOException { setId(id); return getSizeY(); } /** @deprecated Replaced by {@link #getSizeZ()} */ public int getSizeZ(String id) throws FormatException, IOException { setId(id); return getSizeZ(); } /** @deprecated Replaced by {@link #getSizeC()} */ public int getSizeC(String id) throws FormatException, IOException { setId(id); return getSizeC(); } /** @deprecated Replaced by {@link #getSizeT()} */ public int getSizeT(String id) throws FormatException, IOException { setId(id); return getSizeT(); } /** @deprecated Replaced by {@link #getPixelType()} */ public int getPixelType(String id) throws FormatException, IOException { setId(id); return getPixelType(); } /** @deprecated Replaced by {@link #getEffectiveSizeC()} */ public int getEffectiveSizeC(String id) throws FormatException, IOException { setId(id); return getEffectiveSizeC(); } /** @deprecated Replaced by {@link #getRGBChannelCount()} */ public int getRGBChannelCount(String id) throws FormatException, IOException { setId(id); return getSizeC() / getEffectiveSizeC(); } /** @deprecated Replaced by {@link #getChannelDimLengths()} */ public int[] getChannelDimLengths(String id) throws FormatException, IOException { setId(id); return getChannelDimLengths(); } /** @deprecated Replaced by {@link #getChannelDimTypes()} */ public String[] getChannelDimTypes(String id) throws FormatException, IOException { setId(id); return getChannelDimTypes(); } /** @deprecated Replaced by {@link #getThumbSizeX()} */ public int getThumbSizeX(String id) throws FormatException, IOException { setId(id); return getThumbSizeX(); } /** @deprecated Replaced by {@link #getThumbSizeY()} */ public int getThumbSizeY(String id) throws FormatException, IOException { setId(id); return getThumbSizeY(); } /** @deprecated Replaced by {@link #isLittleEndian()} */ public boolean isLittleEndian(String id) throws FormatException, IOException { setId(id); return isLittleEndian(); } /** @deprecated Replaced by {@link #getDimensionOrder()} */ public String getDimensionOrder(String id) throws FormatException, IOException { setId(id); return getDimensionOrder(); } /** @deprecated Replaced by {@link #isOrderCertain()} */ public boolean isOrderCertain(String id) throws FormatException, IOException { setId(id); return isOrderCertain(); } /** @deprecated Replaced by {@link #isInterleaved()} */ public boolean isInterleaved(String id) throws FormatException, IOException { setId(id); return isInterleaved(); } /** @deprecated Replaced by {@link #isInterleaved(int)} */ public boolean isInterleaved(String id, int subC) throws FormatException, IOException { setId(id); return isInterleaved(subC); } /** @deprecated Replaced by {@link #openImage(int)} */ public BufferedImage openImage(String id, int no) throws FormatException, IOException { setId(id); return openImage(no); } /** @deprecated Replaced by {@link #openBytes(int)} */ public byte[] openBytes(String id, int no) throws FormatException, IOException { setId(id); return openBytes(no); } /** @deprecated Replaced by {@link #openBytes(int, byte[])} */ public byte[] openBytes(String id, int no, byte[] buf) throws FormatException, IOException { setId(id); return openBytes(no, buf); } /** @deprecated Replaced by {@link #openThumbImage(int)} */ public BufferedImage openThumbImage(String id, int no) throws FormatException, IOException { setId(id); return openThumbImage(no); } /** @deprecated Replaced by {@link #openThumbImage(int)} */ public byte[] openThumbBytes(String id, int no) throws FormatException, IOException { setId(id); return openThumbBytes(no); } /** @deprecated Replaced by {@link #getSeriesCount()} */ public int getSeriesCount(String id) throws FormatException, IOException { setId(id); return getSeriesCount(); } /** @deprecated Replaced by {@link #setSeries(int)} */ public void setSeries(String id, int no) throws FormatException, IOException { setId(id); setSeries(no); } /** @deprecated Replaced by {@link #getSeries()} */ public int getSeries(String id) throws FormatException, IOException { setId(id); return getSeries(); } /** @deprecated Replaced by {@link #getUsedFiles()} */ public String[] getUsedFiles(String id) throws FormatException, IOException { setId(id); return getUsedFiles(); } /** @deprecated Replaced by {@link #getIndex(int, int, int)} */ public int getIndex(String id, int z, int c, int t) throws FormatException, IOException { setId(id); return getIndex(z, c, t); } /** @deprecated Replaced by {@link #getZCTCoords(int)} */ public int[] getZCTCoords(String id, int index) throws FormatException, IOException { setId(id); return getZCTCoords(index); } /** @deprecated Replaced by {@link #getMetadataValue(String)} */ public Object getMetadataValue(String id, String field) throws FormatException, IOException { setId(id); return getMetadataValue(field); } /** @deprecated Replaced by {@link #getMetadata()} */ public Hashtable getMetadata(String id) throws FormatException, IOException { setId(id); return getMetadata(); } /** @deprecated Replaced by {@link #getCoreMetadata()} */ public CoreMetadata getCoreMetadata(String id) throws FormatException, IOException { setId(id); return getCoreMetadata(); } /** @deprecated Replaced by {@link #getMetadataStore()} */ public MetadataStore getMetadataStore(String id) throws FormatException, IOException { setId(id); return getMetadataStore(); } /** @deprecated Replaced by {@link #getMetadataStoreRoot()} */ public Object getMetadataStoreRoot(String id) throws FormatException, IOException { setId(id); return getMetadataStoreRoot(); } }
Updated MetadataStore population logic to handle multiple series.
loci/formats/FileStitcher.java
Updated MetadataStore population logic to handle multiple series.
<ide><path>oci/formats/FileStitcher.java <ide> <ide> private String[] seriesBlocks; <ide> private Vector fileVector; <add> private Vector seriesNames; <ide> <ide> // -- Constructors -- <ide> <ide> <ide> seriesBlocks = (String[]) sBlock.toArray(new String[0]); <ide> fileVector = new Vector(); <add> seriesNames = new Vector(); <ide> <ide> String file = fp.getFiles()[0]; <ide> Location dir = new Location(file).getAbsoluteFile().getParentFile(); <ide> int pixelType = getPixelType(); <ide> boolean little = reader.isLittleEndian(); <ide> MetadataStore s = reader.getMetadataStore(); <del> s.setPixels(new Integer(core.sizeX[sno]), new Integer(core.sizeY[sno]), <del> new Integer(core.sizeZ[sno]), new Integer(core.sizeC[sno]), <del> new Integer(core.sizeT[sno]), new Integer(pixelType), <del> new Boolean(!little), core.currentOrder[sno], new Integer(sno), null); <add> for (int i=0; i<core.sizeX.length; i++) { <add> s.setImage((String) seriesNames.get(i), null, null, new Integer(i)); <add> } <add> FormatTools.populatePixels(s, this); <ide> } <ide> <ide> /** <ide> FilePattern newPattern = getPattern(list, dir, prefix + i); <ide> if (blockNum == seriesBlocks.length - 1) { <ide> fileVector.add(newPattern.getFiles()); <add> String name = newPattern.getPattern(); <add> if (name.indexOf(File.separator) != -1) { <add> name = name.substring(name.lastIndexOf(File.separator) + 1); <add> } <add> seriesNames.add(name); <ide> } <ide> else { <ide> String next = seriesBlocks[blockNum + 1];
Java
bsd-3-clause
1b09652f33b1b55a552348b10d73b4e91e193b57
0
rajsarkapally-sfdc/Argus,xizi-xu/Argus,dilipdevaraj-sfdc/Argus-1,dilipdevaraj-sfdc/Argus-1,salesforce/Argus,SalesforceEng/Argus,rajsarkapally/Argus,SalesforceEng/Argus,xizi-xu/Argus,rajsarkapally/Argus,dilipdevaraj-sfdc/Argus-1,salesforce/Argus,rajsarkapally/Argus,rajsarkapally-sfdc/Argus,xizi-xu/Argus,xizi-xu/Argus,salesforce/Argus,dilipdevaraj-sfdc/Argus-1,rajsarkapally-sfdc/Argus,salesforce/Argus,rajsarkapally/Argus,SalesforceEng/Argus,rajsarkapally/Argus,rajsarkapally-sfdc/Argus,SalesforceEng/Argus
/* * Copyright (c) 2016, Salesforce.com, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. Neither the name of Salesforce.com nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package com.salesforce.dva.argus.service.tsdb; import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import org.apache.http.HttpResponse; import org.apache.http.impl.client.CloseableHttpClient; import com.fasterxml.jackson.core.type.TypeReference; import com.google.inject.Inject; import com.google.inject.Singleton; import com.salesforce.dva.argus.entity.Annotation; import com.salesforce.dva.argus.entity.Metric; import com.salesforce.dva.argus.service.MonitorService; import com.salesforce.dva.argus.service.TSDBService; import com.salesforce.dva.argus.service.metric.transform.InterpolateTransform; import com.salesforce.dva.argus.service.metric.transform.Transform; import com.salesforce.dva.argus.service.metric.transform.TransformFactory; import com.salesforce.dva.argus.service.tsdb.MetricQuery.Aggregator; import com.salesforce.dva.argus.service.tsdb.MetricQuery.MetricQueryContext; import com.salesforce.dva.argus.system.SystemConfiguration; import com.salesforce.dva.argus.system.SystemException; /** * The federated implementation of the TSDBService. * * @author Dilip Devaraj ([email protected]) */ @Singleton public class FederatedTSDBService extends AbstractTSDBService{ //~ Instance fields ****************************************************************************************************************************** private final TransformFactory _transformFactory; //~ Constructors ********************************************************************************************************************************* /** * Creates a new Default TSDB Service having an equal number of read and write routes. * * @param config The system _configuration used to configure the service. * @param monitorService The monitor service used to collect query time window counters. Cannot be null. * @param transformFactory Transform Factory * * @throws SystemException If an error occurs configuring the service. */ @Inject public FederatedTSDBService(SystemConfiguration config, MonitorService monitorService, TransformFactory transformFactory) { super(config, monitorService); _transformFactory = transformFactory; } //~ Methods ************************************************************************************************************************************** /** @see TSDBService#dispose() */ @Override public void dispose() { super.dispose(); List<CloseableHttpClient> clients = new ArrayList<>(); clients.addAll(_readPortMap.values()); clients.add(_writeHttpClient); for (CloseableHttpClient client : clients) { try { client.close(); } catch (Exception ex) { _logger.warn("A TSDB HTTP client failed to shutdown properly.", ex); } } _executorService.shutdownNow(); try { _executorService.awaitTermination(10, TimeUnit.SECONDS); } catch (InterruptedException e) { _logger.debug("Await Termination Interrupted", e); } } /** @see TSDBService#getMetrics(java.util.List) */ @Override public Map<MetricQuery, List<Metric>> getMetrics(List<MetricQuery> queries) { List<MetricQuery> additionalQueries = new ArrayList<>(); List<MetricQuery> removeQueries = new ArrayList<>(); Map<MetricQuery, List<MetricQuery>> removeAdditionalQueryMap = new HashMap<>(); for(MetricQuery query: queries){ if(query.getAggregator().equals(Aggregator.AVG)){ MetricQuery mq1 = new MetricQuery(query); mq1.setAggregator(Aggregator.ZIMSUM); additionalQueries.add(mq1); MetricQuery mq2 = new MetricQuery(query); mq2.setAggregator(Aggregator.COUNT); additionalQueries.add(mq2); removeQueries.add(query); removeAdditionalQueryMap.put(query, additionalQueries); } } queries.removeAll(removeQueries); queries.addAll(additionalQueries); Map<MetricQuery, List<Metric>> queryMetricsMap = federateJoinMetrics(queries); for(Map.Entry<MetricQuery, List<MetricQuery>> entry : removeAdditionalQueryMap.entrySet()){ additionalQueries = entry.getValue(); List<Metric> averageMetrics = queryMetricsMap.get(additionalQueries.get(0)); averageMetrics.addAll(queryMetricsMap.get(additionalQueries.get(1))); Transform divideTransform = _transformFactory.getTransform(TransformFactory.Function.DIVIDE_V.getName()); List<Metric> result = divideTransform.transform(averageMetrics); queryMetricsMap.put(entry.getKey(), result); queryMetricsMap.remove(additionalQueries.get(0)); queryMetricsMap.remove(additionalQueries.get(1)); } return queryMetricsMap; } public Map<MetricQuery, List<Metric>> federateJoinMetrics(List<MetricQuery> queries) { Map<MetricQuery, Long> queryStartExecutionTime = new HashMap<>(); for (MetricQuery query : queries) { queryStartExecutionTime.put(query, System.currentTimeMillis()); } QueryFederation queryFederation = new EndPointQueryFederation(_readEndPoints); Map<MetricQuery, List<MetricQuery>> mapQueryEndPointSubQueries = queryFederation.federateQueries(queries); List<MetricQuery> queriesSplit = new ArrayList<>(); for(List<MetricQuery> subQueries : mapQueryEndPointSubQueries.values()){ queriesSplit.addAll(subQueries); } long beforeTime = System.currentTimeMillis(); Map<MetricQuery, List<Metric>> subQueryMetricsMap = getSubQueryMetrics(queriesSplit); long afterTime = System.currentTimeMillis(); _logger.info("Time spent in waiting for all sub query results: {}", afterTime - beforeTime); beforeTime = System.currentTimeMillis(); Map<MetricQuery, List<Metric>> queryMetricsMap = queryFederation.join(mapQueryEndPointSubQueries, subQueryMetricsMap); afterTime = System.currentTimeMillis(); _logger.info("Time spent in joining results: {}", afterTime - beforeTime); for (MetricQuery query : queries) { instrumentQueryLatency(_monitorService, query, queryStartExecutionTime.get(query), "metrics"); } return queryMetricsMap; } /** @see TSDBService#getAnnotations(java.util.List) */ @Override public List<Annotation> getAnnotations(List<AnnotationQuery> queries) { requireNotDisposed(); requireArgument(queries != null, "Annotation queries cannot be null."); List<Annotation> annotations = new ArrayList<>(); for (AnnotationQuery query : queries) { long start = System.currentTimeMillis(); for (String readEndPoint : _readEndPoints) { String pattern = readEndPoint + "/api/query?{0}"; String requestUrl = MessageFormat.format(pattern, query.toString()); List<AnnotationWrapper> wrappers = null; try { HttpResponse response = executeHttpRequest(HttpMethod.GET, requestUrl, _readPortMap.get(readEndPoint), null); wrappers = toEntity(extractResponse(response), new TypeReference<AnnotationWrappers>() { }); } catch (Exception ex) { _logger.warn("Failed to get annotations from TSDB. Reason: " + ex.getMessage()); try { if (!_readBackupEndPointsMap.get(readEndPoint).isEmpty()) { _logger.warn("Trying to read from Backup endpoint"); pattern = _readBackupEndPointsMap.get(readEndPoint) + "/api/query?{0}"; requestUrl = MessageFormat.format(pattern, query.toString()); HttpResponse response = executeHttpRequest(HttpMethod.GET, requestUrl, _readPortMap.get( _readBackupEndPointsMap.get(readEndPoint)), null); wrappers = toEntity(extractResponse(response), new TypeReference<AnnotationWrappers>() { }); } } catch (Exception e) { _logger.warn("Failed to get annotations from Backup TSDB. Reason: " + e.getMessage()); continue; } } if (wrappers != null) { for (AnnotationWrapper wrapper : wrappers) { for (Annotation existing : wrapper.getAnnotations()) { String source = existing.getSource(); String id = existing.getId(); String type = query.getType(); String scope = query.getScope(); String metric = query.getMetric(); Long timestamp = existing.getTimestamp(); Annotation updated = new Annotation(source, id, type, scope, metric, timestamp); updated.setFields(existing.getFields()); updated.setTags(query.getTags()); annotations.add(updated); } } } } instrumentQueryLatency(_monitorService, query, start, "annotations"); } return annotations; } /* Gets metrics for a list of queries */ private Map<MetricQuery, List<Metric>> getSubQueryMetrics(List<MetricQuery> queries) { Map<MetricQuery, Future<List<Metric>>> queryFutureMap = new HashMap<>(); for (MetricQuery query : queries) { MetricQuery querySubstitutedAgg = new MetricQuery(query); querySubstitutedAgg.setAggregator(getSubstitutedAggregator(query.getAggregator())); if(query.getDownsampler() !=null){ querySubstitutedAgg.setDownsampler(getSubstitutedDownsampler(query.getDownsampler())); } String requestBody = fromEntity(querySubstitutedAgg); String requestUrl = query.getMetricQueryContext().getReadEndPoint() + "/api/query"; queryFutureMap.put(query, _executorService.submit(new QueryWorker(requestUrl, query.getMetricQueryContext().getReadEndPoint(), requestBody))); } Map<MetricQuery, List<Metric>> subQueryMetricsMap = new HashMap<>(); for (Entry<MetricQuery, Future<List<Metric>>> entry : queryFutureMap.entrySet()) { List<Metric> metrics = new ArrayList<>(); List<Metric> m = null; try { m = entry.getValue().get(); } catch (InterruptedException | ExecutionException e) { _logger.warn("Failed to get metrics from TSDB. Reason: " + e.getMessage()); try { String readBackupEndPoint = _readBackupEndPointsMap.get(entry.getKey().getMetricQueryContext().getReadEndPoint()); if (!readBackupEndPoint.isEmpty()) { _logger.warn("Trying to read from Backup endpoint"); MetricQuery querySubstitutedAgg = new MetricQuery(entry.getKey()); querySubstitutedAgg.setAggregator(getSubstitutedAggregator(entry.getKey().getAggregator())); if(entry.getKey().getDownsampler() !=null){ querySubstitutedAgg.setDownsampler(getSubstitutedDownsampler(entry.getKey().getDownsampler())); } m = new QueryWorker(readBackupEndPoint + "/api/query", readBackupEndPoint, fromEntity(querySubstitutedAgg)).call(); } } catch (Exception ex) { _logger.warn("Failed to get metrics from Backup TSDB. Reason: " + ex.getMessage()); continue; } } if (m != null) { for (Metric metric : m) { if (metric != null) { metric.setQuery(entry.getKey()); metrics.add(metric); } } } subQueryMetricsMap.put(entry.getKey(), metrics); } return subQueryMetricsMap; } private Aggregator getSubstitutedAggregator(Aggregator aggregator){ switch(aggregator){ case SUM: return Aggregator.ZIMSUM; case MIN: return Aggregator.MIMMIN; case MAX: return Aggregator.MIMMAX; case ZIMSUM: return Aggregator.ZIMSUM; case COUNT: return Aggregator.COUNT; default: throw new UnsupportedOperationException("Unsupported aggregator specified"); } } private Aggregator getSubstitutedDownsampler(Aggregator aggregator){ switch(aggregator){ case SUM: return Aggregator.SUM; case MIN: return Aggregator.MIN; case MAX: return Aggregator.MAX; case ZIMSUM: return Aggregator.ZIMSUM; case COUNT: return Aggregator.COUNT; case AVG: return Aggregator.AVG; default: throw new UnsupportedOperationException("Unsupported aggregator specified"); } } @Override public Properties getServiceProperties() { Properties serviceProps= new Properties(); for(Property property:Property.values()){ serviceProps.put(property.getName(), property.getDefaultValue()); } return serviceProps; } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */
ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/FederatedTSDBService.java
/* * Copyright (c) 2016, Salesforce.com, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. Neither the name of Salesforce.com nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package com.salesforce.dva.argus.service.tsdb; import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import org.apache.http.HttpResponse; import org.apache.http.impl.client.CloseableHttpClient; import com.fasterxml.jackson.core.type.TypeReference; import com.google.inject.Inject; import com.google.inject.Singleton; import com.salesforce.dva.argus.entity.Annotation; import com.salesforce.dva.argus.entity.Metric; import com.salesforce.dva.argus.service.MonitorService; import com.salesforce.dva.argus.service.TSDBService; import com.salesforce.dva.argus.service.metric.transform.InterpolateTransform; import com.salesforce.dva.argus.service.metric.transform.Transform; import com.salesforce.dva.argus.service.metric.transform.TransformFactory; import com.salesforce.dva.argus.service.tsdb.MetricQuery.Aggregator; import com.salesforce.dva.argus.service.tsdb.MetricQuery.MetricQueryContext; import com.salesforce.dva.argus.system.SystemConfiguration; import com.salesforce.dva.argus.system.SystemException; /** * The federated implementation of the TSDBService. * * @author Dilip Devaraj ([email protected]) */ @Singleton public class FederatedTSDBService extends AbstractTSDBService{ //~ Instance fields ****************************************************************************************************************************** private final TransformFactory _transformFactory; //~ Constructors ********************************************************************************************************************************* /** * Creates a new Default TSDB Service having an equal number of read and write routes. * * @param config The system _configuration used to configure the service. * @param monitorService The monitor service used to collect query time window counters. Cannot be null. * @param transformFactory Transform Factory * * @throws SystemException If an error occurs configuring the service. */ @Inject public FederatedTSDBService(SystemConfiguration config, MonitorService monitorService, TransformFactory transformFactory) { super(config, monitorService); _transformFactory = transformFactory; } //~ Methods ************************************************************************************************************************************** /** @see TSDBService#dispose() */ @Override public void dispose() { super.dispose(); List<CloseableHttpClient> clients = new ArrayList<>(); clients.addAll(_readPortMap.values()); clients.add(_writeHttpClient); for (CloseableHttpClient client : clients) { try { client.close(); } catch (Exception ex) { _logger.warn("A TSDB HTTP client failed to shutdown properly.", ex); } } _executorService.shutdownNow(); try { _executorService.awaitTermination(10, TimeUnit.SECONDS); } catch (InterruptedException e) { _logger.debug("Await Termination Interrupted", e); } } /** @see TSDBService#getMetrics(java.util.List) */ @Override public Map<MetricQuery, List<Metric>> getMetrics(List<MetricQuery> queries) { List<MetricQuery> additionalQueries = new ArrayList<>(); List<MetricQuery> removeQueries = new ArrayList<>(); Map<MetricQuery, List<MetricQuery>> removeAdditionalQueryMap = new HashMap<>(); for(MetricQuery query: queries){ if(query.getAggregator().equals(Aggregator.AVG)){ MetricQuery mq1 = new MetricQuery(query); mq1.setAggregator(Aggregator.ZIMSUM); additionalQueries.add(mq1); MetricQuery mq2 = new MetricQuery(query); mq2.setAggregator(Aggregator.COUNT); additionalQueries.add(mq2); removeQueries.add(query); removeAdditionalQueryMap.put(query, additionalQueries); } } queries.removeAll(removeQueries); queries.addAll(additionalQueries); Map<MetricQuery, List<Metric>> queryMetricsMap = federateJoinMetrics(queries); for(Map.Entry<MetricQuery, List<MetricQuery>> entry : removeAdditionalQueryMap.entrySet()){ additionalQueries = entry.getValue(); List<Metric> averageMetrics = queryMetricsMap.get(additionalQueries.get(0)); averageMetrics.addAll(queryMetricsMap.get(additionalQueries.get(1))); Transform divideTransform = _transformFactory.getTransform(TransformFactory.Function.DIVIDE_V.getName()); List<Metric> result = divideTransform.transform(averageMetrics); queryMetricsMap.put(entry.getKey(), result); queryMetricsMap.remove(additionalQueries.get(0)); queryMetricsMap.remove(additionalQueries.get(1)); } return queryMetricsMap; } public Map<MetricQuery, List<Metric>> federateJoinMetrics(List<MetricQuery> queries) { Map<MetricQuery, Long> queryStartExecutionTime = new HashMap<>(); for (MetricQuery query : queries) { queryStartExecutionTime.put(query, System.currentTimeMillis()); } QueryFederation queryFederation = new EndPointQueryFederation(_readEndPoints); Map<MetricQuery, List<MetricQuery>> mapQueryEndPointSubQueries = queryFederation.federateQueries(queries); List<MetricQuery> queriesSplit = new ArrayList<>(); for(List<MetricQuery> subQueries : mapQueryEndPointSubQueries.values()){ queriesSplit.addAll(subQueries); } long beforeTime = System.currentTimeMillis(); Map<MetricQuery, List<Metric>> subQueryMetricsMap = getSubQueryMetrics(queriesSplit); long afterTime = System.currentTimeMillis(); _logger.info("Time spent in waiting for all sub query results: {}", afterTime - beforeTime); beforeTime = System.currentTimeMillis(); Map<MetricQuery, List<Metric>> queryMetricsMap = queryFederation.join(mapQueryEndPointSubQueries, subQueryMetricsMap); afterTime = System.currentTimeMillis(); _logger.info("Time spent in joining results: {}", afterTime - beforeTime); for (MetricQuery query : queries) { instrumentQueryLatency(_monitorService, query, queryStartExecutionTime.get(query), "metrics"); } return queryMetricsMap; } /** @see TSDBService#getAnnotations(java.util.List) */ @Override public List<Annotation> getAnnotations(List<AnnotationQuery> queries) { requireNotDisposed(); requireArgument(queries != null, "Annotation queries cannot be null."); List<Annotation> annotations = new ArrayList<>(); for (AnnotationQuery query : queries) { long start = System.currentTimeMillis(); for (String readEndPoint : _readEndPoints) { String pattern = readEndPoint + "/api/query?{0}"; String requestUrl = MessageFormat.format(pattern, query.toString()); List<AnnotationWrapper> wrappers = null; try { HttpResponse response = executeHttpRequest(HttpMethod.GET, requestUrl, _readPortMap.get(readEndPoint), null); wrappers = toEntity(extractResponse(response), new TypeReference<AnnotationWrappers>() { }); } catch (Exception ex) { _logger.warn("Failed to get annotations from TSDB. Reason: " + ex.getMessage()); try { if (!_readBackupEndPointsMap.get(readEndPoint).isEmpty()) { _logger.warn("Trying to read from Backup endpoint"); pattern = _readBackupEndPointsMap.get(readEndPoint) + "/api/query?{0}"; requestUrl = MessageFormat.format(pattern, query.toString()); HttpResponse response = executeHttpRequest(HttpMethod.GET, requestUrl, _readPortMap.get( _readBackupEndPointsMap.get(readEndPoint)), null); wrappers = toEntity(extractResponse(response), new TypeReference<AnnotationWrappers>() { }); } } catch (Exception e) { _logger.warn("Failed to get annotations from Backup TSDB. Reason: " + e.getMessage()); continue; } } if (wrappers != null) { for (AnnotationWrapper wrapper : wrappers) { for (Annotation existing : wrapper.getAnnotations()) { String source = existing.getSource(); String id = existing.getId(); String type = query.getType(); String scope = query.getScope(); String metric = query.getMetric(); Long timestamp = existing.getTimestamp(); Annotation updated = new Annotation(source, id, type, scope, metric, timestamp); updated.setFields(existing.getFields()); updated.setTags(query.getTags()); annotations.add(updated); } } } } instrumentQueryLatency(_monitorService, query, start, "annotations"); } return annotations; } /* Gets metrics for a list of queries */ private Map<MetricQuery, List<Metric>> getSubQueryMetrics(List<MetricQuery> queries) { Map<MetricQuery, Future<List<Metric>>> queryFutureMap = new HashMap<>(); for (MetricQuery query : queries) { MetricQuery querySubstitutedAgg = new MetricQuery(query); querySubstitutedAgg.setAggregator(getSubstitutedAggregator(query.getAggregator())); if(query.getDownsampler() !=null){ querySubstitutedAgg.setDownsampler(getSubstitutedDownsampler(query.getDownsampler())); } String requestBody = fromEntity(querySubstitutedAgg); String requestUrl = query.getMetricQueryContext().getReadEndPoint() + "/api/query"; queryFutureMap.put(query, _executorService.submit(new QueryWorker(requestUrl, query.getMetricQueryContext().getReadEndPoint(), requestBody))); } Map<MetricQuery, List<Metric>> subQueryMetricsMap = new HashMap<>(); for (Entry<MetricQuery, Future<List<Metric>>> entry : queryFutureMap.entrySet()) { List<Metric> metrics = new ArrayList<>(); List<Metric> m = null; try { m = entry.getValue().get(); } catch (InterruptedException | ExecutionException e) { _logger.warn("Failed to get metrics from TSDB. Reason: " + e.getMessage()); try { String readBackupEndPoint = _readBackupEndPointsMap.get(entry.getKey().getMetricQueryContext().getReadEndPoint()); if (!readBackupEndPoint.isEmpty()) { _logger.warn("Trying to read from Backup endpoint"); MetricQuery querySubstitutedAgg = new MetricQuery(entry.getKey()); querySubstitutedAgg.setAggregator(getSubstitutedAggregator(entry.getKey().getAggregator())); if(entry.getKey().getDownsampler() !=null){ querySubstitutedAgg.setDownsampler(getSubstitutedDownsampler(entry.getKey().getDownsampler())); } m = new QueryWorker(readBackupEndPoint + "/api/query", readBackupEndPoint, fromEntity(querySubstitutedAgg)).call(); } } catch (Exception ex) { _logger.warn("Failed to get metrics from Backup TSDB. Reason: " + ex.getMessage()); continue; } } if (m != null) { for (Metric metric : m) { if (metric != null) { metric.setQuery(entry.getKey()); metrics.add(metric); } } } subQueryMetricsMap.put(entry.getKey(), metrics); } return subQueryMetricsMap; } private Aggregator getSubstitutedAggregator(Aggregator aggregator){ switch(aggregator){ case SUM: return Aggregator.ZIMSUM; case MIN: return Aggregator.MIMMIN; case MAX: return Aggregator.MIMMAX; case ZIMSUM: return Aggregator.ZIMSUM; case COUNT: return Aggregator.COUNT; default: throw new UnsupportedOperationException("Unsupported aggregator specified"); } } private Aggregator getSubstitutedDownsampler(Aggregator aggregator){ switch(aggregator){ case SUM: return Aggregator.SUM; case MIN: return Aggregator.MIN; case MAX: return Aggregator.MAX; default: throw new UnsupportedOperationException("Unsupported aggregator specified"); } } @Override public Properties getServiceProperties() { Properties serviceProps= new Properties(); for(Property property:Property.values()){ serviceProps.put(property.getName(), property.getDefaultValue()); } return serviceProps; } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */
Minor change
ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/FederatedTSDBService.java
Minor change
<ide><path>rgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/FederatedTSDBService.java <ide> case MIN: <ide> return Aggregator.MIN; <ide> case MAX: <del> return Aggregator.MAX; <add> return Aggregator.MAX; <add> case ZIMSUM: <add> return Aggregator.ZIMSUM; <add> case COUNT: <add> return Aggregator.COUNT; <add> case AVG: <add> return Aggregator.AVG; <ide> default: <ide> throw new UnsupportedOperationException("Unsupported aggregator specified"); <ide> }
Java
apache-2.0
899e5a04a6dc3df344a62028eaff129de3b45ef7
0
DataSketches/sketches-core
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.datasketches.req; import static java.lang.Math.max; import static java.lang.Math.sqrt; import static org.apache.datasketches.Criteria.GE; import static org.apache.datasketches.Criteria.GT; import static org.apache.datasketches.Criteria.LE; import static org.apache.datasketches.Criteria.LT; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.datasketches.Criteria; import org.apache.datasketches.SketchesArgumentException; import org.apache.datasketches.memory.Buffer; import org.apache.datasketches.memory.Memory; import org.apache.datasketches.memory.WritableBuffer; import org.apache.datasketches.memory.WritableMemory; /** * This Relative Error Quantiles Sketch is the Java implementation based on the paper * "Relative Error Streaming Quantiles", https://arxiv.org/abs/2004.01668, and loosely derived from * a Python prototype written by Pavel Vesely. * * <p>This implementation differs from the algorithm described in the paper in the following:</p> * * <ul> * <li>The algorithm requires no upper bound on the stream length. * Instead, each relative-compactor counts the number of compaction operations performed * so far (variable numCompactions). Initially, the relative-compactor starts with 3 sections. * Each time the numCompactions exceeds 2^{numSections - 1}, we double numSections.</li> * * <li>The size of each section (variable k and sectionSize in the code and parameter k in * the paper) is initialized with a value set by the user via variable k. * When the number of sections doubles, we decrease sectionSize by a factor of sqrt(2). * This is applied at each level separately. Thus, when we double the number of sections, the * nominal compactor size increases by a factor of sqrt(2) (up to +-1 after rounding).</li> * * <li>The merge operation here does not perform "special compactions", which are used in the paper * to allow for a tight mathematical analysis of the sketch.</li> * </ul> * * <p>This implementation provides a number of capabilities not discussed in the paper or provided * in the Python prototype.</p> * <ul><li>The Python prototype only implemented high accuracy for low ranks. This implementation * provides the user with the ability to choose either high rank accuracy or low rank accuracy at * the time of construction.</li> * <li>The Python prototype only implemented a comparison criterion of "&le;". This implementation * allows the user to switch back and forth between the "&le;" criterion and the "&lt;" criterion.</li> * <li>This implementation provides extensive debug visibility into the operation of the sketch with * two levels of detail output. This is not only useful for debugging, but is a powerful tool to * help users understand how the sketch works.</li> * </ul> * * @author Edo Liberty * @author Pavel Vesely * @author Lee Rhodes */ public class ReqSketch extends BaseReqSketch { //static finals private static final String LS = System.getProperty("line.separator"); static final int INIT_NUMBER_OF_SECTIONS = 3; static final int MIN_K = 4; private static final double relRseFactor = sqrt(0.0512 / INIT_NUMBER_OF_SECTIONS); private static final double fixRseFactor = .06; //finals private final int k; //user config, default is 12 (1% @ 95% Conf) final boolean hra; //user config, default is true //state variables private boolean compatible = true; //user config, default: true, can be set after construction private Criteria criterion = LT; //user config, default: LT, can be set after construction private long totalN; private float minValue = Float.NaN; private float maxValue = Float.NaN; //computed from compactors private int retItems = 0; //number of retained items in the sketch private int maxNomSize = 0; //sum of nominal capacities of all compactors //Objects private ReqAuxiliary aux = null; private List<ReqCompactor> compactors = new ArrayList<>(); ReqDebug reqDebug = null; //user config, default: null, can be set after construction. /** * Public Constructor. * @param k Controls the size and error of the sketch. It must be even and larger than or equal * to 4. If odd, it will be rounded down by one. * The default value of 12 roughly corresponds to 1% relative error guarantee at 95% confidence. * @param highRankAccuracy if true, the default, the high ranks are prioritized for better * accuracy. Otherwise the low ranks are prioritized for better accuracy. */ public ReqSketch(final int k, final boolean highRankAccuracy) { this.k = max(k & -2, MIN_K); //rounds down one if odd hra = highRankAccuracy; retItems = 0; maxNomSize = 0; totalN = 0; grow(); } /** * Copy Constructor * @param other the other sketch to be deep copied into this one. */ ReqSketch(final ReqSketch other) { k = other.k; hra = other.hra; totalN = other.totalN; retItems = other.retItems; maxNomSize = other.maxNomSize; minValue = other.minValue; maxValue = other.maxValue; compatible = other.compatible; criterion = other.criterion; reqDebug = other.reqDebug; //aux does not need to be copied for (int i = 0; i < other.getNumLevels(); i++) { compactors.add(new ReqCompactor(other.compactors.get(i))); } aux = null; } /** * Construct from elements. After sketch is constructed, retItems and maxNomSize must be computed. */ ReqSketch(final int k, final boolean hra, final long totalN, final float minValue, final float maxValue, final List<ReqCompactor> compactors) { this.k = k; this.hra = hra; this.totalN = totalN; this.minValue = minValue; this.maxValue = maxValue; this.compactors = compactors; } /** * Deserializes the byte stream of the given Memory object and places it on the Java heap. * @param mem the given Memory object * @return a ReqSketch on the Java heap. */ public static ReqSketch heapify(final Memory mem) { final Buffer buff = mem.asBuffer(); final byte preLongs = buff.getByte(); assert preLongs == (byte)1; final byte serVer = buff.getByte(); assert serVer == (byte)1; final byte familyId = buff.getByte(); assert familyId == 17; final int flags = buff.getByte() & 0xFFFF; final boolean hra = (flags & 8) > 0; final boolean compatible = (flags & 16) > 0; final boolean ltEq = (flags & 32) > 0; final int k = buff.getInt(); final long totalN = buff.getLong(); final float minValue = buff.getFloat(); final float maxValue = buff.getFloat(); final List<ReqCompactor> compactors = new ArrayList<>(); final int numCompactors = buff.getInt(); for (int i = 0; i < numCompactors; i++) { final int cBytes = buff.getInt(); final long pos = buff.getPosition(); final long end = pos + cBytes; buff.setStartPositionEnd(0, pos, end); compactors.add(ReqCompactor.heapify(buff.region())); buff.setStartPositionEnd(0, pos + cBytes, buff.getCapacity()); } final ReqSketch sk = new ReqSketch(k, hra, totalN, minValue, maxValue, compactors); sk.updateMaxNomSize(); sk.updateRetainedItems(); sk.setCompatible(compatible); sk.setLessThanOrEqual(ltEq); return sk; } /** * Returns a new ReqSketchBuilder * @return a new ReqSketchBuilder */ public static final ReqSketchBuilder builder() { return new ReqSketchBuilder(); } private void compress() { if (reqDebug != null) { reqDebug.emitStartCompress(); } for (int h = 0; h < compactors.size(); h++) { final ReqCompactor c = compactors.get(h); final int retCompItems = c.getBuffer().getLength(); final int nomCap = c.getNomCapacity(); if (retCompItems >= nomCap) { if (h + 1 >= getNumLevels()) { //at the top? if (reqDebug != null) { reqDebug.emitMustAddCompactor(); } grow(); //add a level, increases maxNomSize } final FloatBuffer promoted = c.compact(); compactors.get(h + 1).getBuffer().mergeSortIn(promoted); updateRetainedItems(); if (retItems < maxNomSize) { break; } } } updateMaxNomSize(); aux = null; if (reqDebug != null) { reqDebug.emitCompressDone(); } } ReqAuxiliary getAux() { return aux; } @Override public double[] getCDF(final float[] splitPoints) { if (isEmpty()) { return new double[0]; } final long[] buckets = getPMForCDF(splitPoints); final int numBkts = buckets.length; final double[] outArr = new double[numBkts]; for (int j = 0; j < numBkts; j++) { outArr[j] = (double)buckets[j] / getN(); } return outArr; } List<ReqCompactor> getCompactors() { return compactors; } private long getCount(final float value) { final int numComp = compactors.size(); long cumNnr = 0; for (int i = 0; i < numComp; i++) { //cycle through compactors final ReqCompactor c = compactors.get(i); final long wt = 1L << c.getLgWeight(); final FloatBuffer buf = c.getBuffer(); cumNnr += buf.getCountWithCriterion(value, criterion) * wt; } if (criterion == GT || criterion == GE) { cumNnr = totalN - cumNnr; } return cumNnr; } private long[] getCounts(final float[] values) { final int numValues = values.length; final int numComp = compactors.size(); final long[] cumNnrArr = new long[numValues]; for (int i = 0; i < numComp; i++) { //cycle through compactors final ReqCompactor c = compactors.get(i); final long wt = 1L << c.getLgWeight(); final FloatBuffer buf = c.getBuffer(); for (int j = 0; j < numValues; j++) { cumNnrArr[j] += buf.getCountWithCriterion(values[j], criterion) * wt; } } if (criterion == GT || criterion == GE) { for (int j = 0; j < numValues; j++) { cumNnrArr[j] = totalN - cumNnrArr[j]; } } return cumNnrArr; } Criteria getCriterion() { return criterion; } @Override public boolean getHighRankAccuracy() { return hra; } int getK() { return k; } int getMaxNomSize() { return maxNomSize; } @Override public float getMaxValue() { return maxValue; } @Override public float getMinValue() { return minValue; } @Override public long getN() { return totalN; } /** * Gets the number of levels of compactors in the sketch. * @return the number of levels of compactors in the sketch. */ int getNumLevels() { return compactors.size(); } @Override public double[] getPMF(final float[] splitPoints) { if (isEmpty()) { return new double[0]; } final long[] buckets = getPMForCDF(splitPoints); final int numBkts = buckets.length; final double[] outArr = new double[numBkts]; outArr[0] = (double)buckets[0] / getN(); for (int j = 1; j < numBkts; j++) { outArr[j] = (double)(buckets[j] - buckets[j - 1]) / getN(); } return outArr; } /** * Gets a CDF in raw counts, which can be easily converted into a CDF or PMF. * @param splits the splitPoints array * @return a CDF in raw counts */ private long[] getPMForCDF(final float[] splits) { validateSplits(splits); final int numSplits = splits.length; final long[] splitCounts = getCounts(splits); final int numBkts = numSplits + 1; final long[] bkts = Arrays.copyOf(splitCounts, numBkts); bkts[numBkts - 1] = getN(); return bkts; } @Override public float getQuantile(final double normRank) { if (isEmpty()) { throw new SketchesArgumentException( "Sketch is empty."); } if (normRank < 0 || normRank > 1.0) { throw new SketchesArgumentException( "Normalized rank must be in the range [0.0, 1.0]: " + normRank); } if (aux == null) { aux = new ReqAuxiliary(this); } final float q = aux.getQuantile(normRank); if (Float.isNaN(q)) { //possible result from aux.getQuantile() if (compatible) { if (criterion == LT || criterion == LE) { return minValue; } else { return maxValue; } } } return q; } @Override public float[] getQuantiles(final double[] normRanks) { final int len = normRanks.length; final float[] qArr = new float[len]; for (int i = 0; i < len; i++) { qArr[i] = getQuantile(normRanks[i]); } return qArr; } @Override public double getRank(final float value) { final long nnCount = getCount(value); return (double)nnCount / totalN; } @Override public double[] getRanks(final float[] values) { final long[] cumNnrArr = getCounts(values); final int numValues = values.length; final double[] rArr = new double[numValues]; for (int i = 0; i < numValues; i++) { rArr[i] = (double)cumNnrArr[i] / totalN; } return rArr; } private static double getRankLB(final int k, final int levels, final double rank, final int numStdDev, final boolean hra, final long totalN) { if (levels == 1) { return rank; } final double thresh = (double)k * INIT_NUMBER_OF_SECTIONS / totalN; if ( hra && rank >= 1.0 - thresh) { return rank; } if (!hra && rank <= thresh) { return rank; } final double relative = relRseFactor / k * (hra ? 1.0 - rank : rank); final double fixed = fixRseFactor / k; final double lbRel = rank - numStdDev * relative; final double lbFix = rank - numStdDev * fixed; return Math.max(lbRel, lbFix); } @Override public double getRankLowerBound(final double rank, final int numStdDev) { return getRankLB(k, getNumLevels(), rank, numStdDev, hra, getN()); } private static double getRankUB(final int k, final int levels, final double rank, final int numStdDev, final boolean hra, final long totalN) { if (levels == 1) { return rank; } final double thresh = (double)k * INIT_NUMBER_OF_SECTIONS / totalN; if ( hra && rank >= 1.0 - thresh) { return rank; } if (!hra && rank <= thresh) { return rank; } final double relative = relRseFactor / k * (hra ? 1.0 - rank : rank); final double fixed = fixRseFactor / k; final double ubRel = rank + numStdDev * relative; final double ubFix = rank + numStdDev * fixed; return Math.min(ubRel, ubFix); } @Override public double getRankUpperBound(final double rank, final int numStdDev) { return getRankUB(k, getNumLevels(), rank, numStdDev, hra, getN()); } @Override public int getRetainedItems() { return retItems; } @Override public double getRSE(final int k, final double rank, final boolean hra, final long totalN) { return getRankUB(k, 2, rank, 1, hra, totalN); //more conservative to assume > 1 level } @Override //Serialize totalN, k, minValue, maxValue = 20 // In preamble Flags keep: hra-bit, compatible-bit, criterion-bit // plus compactors. public int getSerializationBytes() { int cBytes = 0; for (int i = 0; i < compactors.size(); i++) { cBytes += compactors.get(i).getSerializationBytes() + 4; //int length before each one } final int members = 20; //totalN(8), minValue(4), maxValue(4), numCompactors(4) final int preamble = 8; //includes k(4) return cBytes + members + preamble; } private void grow() { final byte lgWeight = (byte)getNumLevels(); if (lgWeight == 0 && reqDebug != null) { reqDebug.emitStart(this); } compactors.add(new ReqCompactor(lgWeight, hra, k, reqDebug)); updateMaxNomSize(); if (reqDebug != null) { reqDebug.emitNewCompactor(lgWeight); } } @Override public boolean isCompatible() { return compatible; } @Override public boolean isEmpty() { return totalN == 0; } @Override public boolean isEstimationMode() { return getNumLevels() > 1; } @Override public boolean isLessThanOrEqual() { return criterion == LE; } @Override public ReqIterator iterator() { return new ReqIterator(this); } @Override public ReqSketch merge(final ReqSketch other) { if (other == null || other.isEmpty()) { return this; } totalN += other.totalN; //update min, max values, n if (Float.isNaN(minValue) || other.minValue < minValue) { minValue = other.minValue; } if (Float.isNaN(maxValue) || other.maxValue > maxValue) { maxValue = other.maxValue; } //Grow until self has at least as many compactors as other while (getNumLevels() < other.getNumLevels()) { grow(); } //Merge the items in all height compactors for (int i = 0; i < getNumLevels(); i++) { compactors.get(i).merge(other.compactors.get(i)); } updateMaxNomSize(); updateRetainedItems(); if (retItems >= maxNomSize) { compress(); } assert retItems < maxNomSize; aux = null; return this; } @Override public ReqSketch reset() { totalN = 0; retItems = 0; maxNomSize = 0; minValue = Float.NaN; maxValue = Float.NaN; aux = null; compactors = new ArrayList<>(); grow(); return this; } @Override public ReqSketch setCompatible(final boolean compatible) { this.compatible = compatible; return this; } /** * <b>NOTE:</b> This is public only to allow testing from another * package and is not intended for use by normal users of this class. * @param criterion one of LT, LE, GT, GE. * @return this */ public ReqSketch setCriterion(final Criteria criterion) { this.criterion = criterion; return this; } /** * <b>NOTE:</b> This is public only to allow testing from another * package and is not intened for use by normal users of this class. * @param reqDebug the ReqDebug implementation * @return this */ public ReqSketch setReqDebug(final ReqDebug reqDebug) { this.reqDebug = reqDebug; return this; } @Override public ReqSketch setLessThanOrEqual(final boolean ltEq) { if (ltEq) { setCriterion(LE); } else { setCriterion(LT); } return this; } @Override public byte[] toByteArray() { final int bytes = getSerializationBytes(); final byte[] arr = new byte[bytes]; final WritableBuffer wbuf = WritableMemory.wrap(arr).asWritableBuffer(); final int flags = (isEmpty() ? 4 : 0) | (hra ? 8 : 0) | (compatible ? 16 : 0) | (criterion == LE ? 32 : 0); wbuf.putByte((byte)1); //PreLongs wbuf.putByte((byte)1); //SerVer wbuf.putByte((byte)17); //Family ID wbuf.putByte((byte)flags); wbuf.putInt(k); //end of 8 byte preamble wbuf.putLong(totalN); //16 wbuf.putFloat(minValue); wbuf.putFloat(maxValue); wbuf.putInt(compactors.size()); //28 for (int i = 0; i < compactors.size(); i++) { final ReqCompactor c = compactors.get(i); final byte[] cArr = c.toByteArray(); //+320 wbuf.putInt(cArr.length); //32 wbuf.putByteArray(cArr, 0, cArr.length); //352 } assert wbuf.getPosition() == bytes; return arr; } @Override public String toString() { final StringBuilder sb = new StringBuilder(); sb.append("**********Relative Error Quantiles Sketch Summary**********").append(LS); sb.append(" N : " + totalN).append(LS); sb.append(" Retained Items : " + retItems).append(LS); sb.append(" Max Nominal Size: " + maxNomSize).append(LS); sb.append(" Min Value : " + minValue).append(LS); sb.append(" Max Value : " + maxValue).append(LS); sb.append(" Estimation Mode : " + isEstimationMode()).append(LS); sb.append(" Criterion : " + criterion).append(LS); sb.append(" High Rank Acc : " + hra).append(LS); sb.append(" Levels : " + compactors.size()).append(LS); sb.append("************************End Summary************************").append(LS); return sb.toString(); } @Override public void update(final float item) { if (Float.isNaN(item)) { return; } if (isEmpty()) { minValue = item; maxValue = item; } else { if (item < minValue) { minValue = item; } if (item > maxValue) { maxValue = item; } } final FloatBuffer buf = compactors.get(0).getBuffer(); buf.append(item); retItems++; totalN++; if (retItems >= maxNomSize) { buf.sort(); compress(); } aux = null; } /** * Computes a new bound for determining when to compress the sketch. */ void updateMaxNomSize() { int cap = 0; for (ReqCompactor c : compactors) { cap += c.getNomCapacity(); } maxNomSize = cap; } /** * Computes the retItems for the sketch. */ private void updateRetainedItems() { int count = 0; for (ReqCompactor c : compactors) { count += c.getBuffer().getLength(); } retItems = count; } /** * This checks the given float array to make sure that it contains only finite values * and is monotonically increasing in value. * @param splits the given array */ static void validateSplits(final float[] splits) { final int len = splits.length; for (int i = 0; i < len; i++) { final float v = splits[i]; if (!Float.isFinite(v)) { throw new SketchesArgumentException("Values must be finite"); } if (i < len - 1 && v >= splits[i + 1]) { throw new SketchesArgumentException( "Values must be unique and monotonically increasing"); } } } @Override public String viewCompactorDetail(final String fmt, final boolean allData) { final StringBuilder sb = new StringBuilder(); sb.append("*********Relative Error Quantiles Compactor Detail*********").append(LS); sb.append("Compactor Detail: Ret Items: ").append(getRetainedItems()) .append(" N: ").append(getN()); sb.append(LS); for (int i = 0; i < getNumLevels(); i++) { final ReqCompactor c = compactors.get(i); sb.append(c.toListPrefix()).append(LS); if (allData) { sb.append(c.getBuffer().toHorizList(fmt, 20)).append(LS); } } sb.append("************************End Detail*************************").append(LS); return sb.toString(); } }
src/main/java/org/apache/datasketches/req/ReqSketch.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.datasketches.req; import static java.lang.Math.max; import static java.lang.Math.sqrt; import static org.apache.datasketches.Criteria.GE; import static org.apache.datasketches.Criteria.GT; import static org.apache.datasketches.Criteria.LE; import static org.apache.datasketches.Criteria.LT; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.datasketches.Criteria; import org.apache.datasketches.SketchesArgumentException; import org.apache.datasketches.memory.Buffer; import org.apache.datasketches.memory.Memory; import org.apache.datasketches.memory.WritableBuffer; import org.apache.datasketches.memory.WritableMemory; /** * This Relative Error Quantiles Sketch is the Java implementation based on the paper * "Relative Error Streaming Quantiles", https://arxiv.org/abs/2004.01668, and loosely derived from * a Python prototype written by Pavel Vesely. * * <p>This implementation differs from the algorithm described in the paper in the following:</p> * * <ul> * <li>The algorithm requires no upper bound on the stream length. * Instead, each relative-compactor counts the number of compaction operations performed * so far (variable numCompactions). Initially, the relative-compactor starts with 3 sections. * Each time the numCompactions exceeds 2^{numSections - 1}, we double numSections.</li> * * <li>The size of each section (variable k and sectionSize in the code and parameter k in * the paper) is initialized with a value set by the user via variable k. * When the number of sections doubles, we decrease sectionSize by a factor of sqrt(2). * This is applied at each level separately. Thus, when we double the number of sections, the * nominal compactor size increases by a factor of sqrt(2) (up to +-1 after rounding).</li> * * <li>The merge operation here does not perform "special compactions", which are used in the paper * to allow for a tight mathematical analysis of the sketch.</li> * </ul> * * <p>This implementation provides a number of capabilities not discussed in the paper or provided * in the Python prototype.</p> * <ul><li>The Python prototype only implemented high accuracy for low ranks. This implementation * provides the user with the ability to choose either high rank accuracy or low rank accuracy at * the time of construction.</li> * <li>The Python prototype only implemented a comparison criterion of "&le;". This implementation * allows the user to switch back and forth between the "&le;" criterion and the "&lt;" criterion.</li> * <li>This implementation provides extensive debug visibility into the operation of the sketch with * two levels of detail output. This is not only useful for debugging, but is a powerful tool to * help users understand how the sketch works.</li> * </ul> * * @author Edo Liberty * @author Pavel Vesely * @author Lee Rhodes */ public class ReqSketch extends BaseReqSketch { //static finals private static final String LS = System.getProperty("line.separator"); static final int INIT_NUMBER_OF_SECTIONS = 3; static final int MIN_K = 4; private static final double relRseFactor = sqrt(0.0512 / INIT_NUMBER_OF_SECTIONS); private static final double fixRseFactor = .06; //finals private final int k; //user config, default is 12 (1% @ 95% Conf) final boolean hra; //user config, default is true //state variables private boolean compatible = true; //user config, default: true, can be set after construction private Criteria criterion = LT; //user config, default: LT, can be set after construction private long totalN; private float minValue = Float.NaN; private float maxValue = Float.NaN; //computed from compactors private int retItems = 0; //number of retained items in the sketch private int maxNomSize = 0; //sum of nominal capacities of all compactors //Objects private ReqAuxiliary aux = null; private List<ReqCompactor> compactors = new ArrayList<>(); ReqDebug reqDebug = null; //user config, default: null, can be set after construction. /** * Public Constructor. * @param k Controls the size and error of the sketch. It must be even and larger than or equal * to 4. If odd, it will be rounded down by one. * The default value of 12 roughly corresponds to 1% relative error guarantee at 95% confidence. * @param highRankAccuracy if true, the default, the high ranks are prioritized for better * accuracy. Otherwise the low ranks are prioritized for better accuracy. */ public ReqSketch(final int k, final boolean highRankAccuracy) { this.k = max(k & -2, MIN_K); //rounds down one if odd hra = highRankAccuracy; retItems = 0; maxNomSize = 0; totalN = 0; grow(); } /** * Copy Constructor * @param other the other sketch to be deep copied into this one. */ ReqSketch(final ReqSketch other) { k = other.k; hra = other.hra; totalN = other.totalN; retItems = other.retItems; maxNomSize = other.maxNomSize; minValue = other.minValue; maxValue = other.maxValue; compatible = other.compatible; criterion = other.criterion; reqDebug = other.reqDebug; //aux does not need to be copied for (int i = 0; i < other.getNumLevels(); i++) { compactors.add(new ReqCompactor(other.compactors.get(i))); } aux = null; } /** * Construct from elements. After sketch is constructed, retItems and maxNomSize must be computed. */ ReqSketch(final int k, final boolean hra, final long totalN, final float minValue, final float maxValue, final List<ReqCompactor> compactors) { this.k = k; this.hra = hra; this.totalN = totalN; this.minValue = minValue; this.maxValue = maxValue; this.compactors = compactors; } static ReqSketch heapify(final Memory mem) { //TODO PUBLIC final Buffer buff = mem.asBuffer(); final byte preLongs = buff.getByte(); assert preLongs == (byte)1; final byte serVer = buff.getByte(); assert serVer == (byte)1; final byte familyId = buff.getByte(); assert familyId == 17; final int flags = buff.getByte() & 0xFFFF; final boolean hra = (flags & 8) > 0; final boolean compatible = (flags & 16) > 0; final boolean ltEq = (flags & 32) > 0; final int k = buff.getInt(); final long totalN = buff.getLong(); final float minValue = buff.getFloat(); final float maxValue = buff.getFloat(); final List<ReqCompactor> compactors = new ArrayList<>(); final int numCompactors = buff.getInt(); for (int i = 0; i < numCompactors; i++) { final int cBytes = buff.getInt(); final long pos = buff.getPosition(); final long end = pos + cBytes; buff.setStartPositionEnd(0, pos, end); compactors.add(ReqCompactor.heapify(buff.region())); buff.setStartPositionEnd(0, pos + cBytes, buff.getCapacity()); } final ReqSketch sk = new ReqSketch(k, hra, totalN, minValue, maxValue, compactors); sk.updateMaxNomSize(); sk.updateRetainedItems(); sk.setCompatible(compatible); sk.setLessThanOrEqual(ltEq); return sk; } /** * Returns a new ReqSketchBuilder * @return a new ReqSketchBuilder */ public static final ReqSketchBuilder builder() { return new ReqSketchBuilder(); } private void compress() { if (reqDebug != null) { reqDebug.emitStartCompress(); } for (int h = 0; h < compactors.size(); h++) { final ReqCompactor c = compactors.get(h); final int retCompItems = c.getBuffer().getLength(); final int nomCap = c.getNomCapacity(); if (retCompItems >= nomCap) { if (h + 1 >= getNumLevels()) { //at the top? if (reqDebug != null) { reqDebug.emitMustAddCompactor(); } grow(); //add a level, increases maxNomSize } final FloatBuffer promoted = c.compact(); compactors.get(h + 1).getBuffer().mergeSortIn(promoted); updateRetainedItems(); if (retItems < maxNomSize) { break; } } } updateMaxNomSize(); aux = null; if (reqDebug != null) { reqDebug.emitCompressDone(); } } ReqAuxiliary getAux() { return aux; } @Override public double[] getCDF(final float[] splitPoints) { if (isEmpty()) { return new double[0]; } final long[] buckets = getPMForCDF(splitPoints); final int numBkts = buckets.length; final double[] outArr = new double[numBkts]; for (int j = 0; j < numBkts; j++) { outArr[j] = (double)buckets[j] / getN(); } return outArr; } List<ReqCompactor> getCompactors() { return compactors; } private long getCount(final float value) { final int numComp = compactors.size(); long cumNnr = 0; for (int i = 0; i < numComp; i++) { //cycle through compactors final ReqCompactor c = compactors.get(i); final long wt = 1L << c.getLgWeight(); final FloatBuffer buf = c.getBuffer(); cumNnr += buf.getCountWithCriterion(value, criterion) * wt; } if (criterion == GT || criterion == GE) { cumNnr = totalN - cumNnr; } return cumNnr; } private long[] getCounts(final float[] values) { final int numValues = values.length; final int numComp = compactors.size(); final long[] cumNnrArr = new long[numValues]; for (int i = 0; i < numComp; i++) { //cycle through compactors final ReqCompactor c = compactors.get(i); final long wt = 1L << c.getLgWeight(); final FloatBuffer buf = c.getBuffer(); for (int j = 0; j < numValues; j++) { cumNnrArr[j] += buf.getCountWithCriterion(values[j], criterion) * wt; } } if (criterion == GT || criterion == GE) { for (int j = 0; j < numValues; j++) { cumNnrArr[j] = totalN - cumNnrArr[j]; } } return cumNnrArr; } Criteria getCriterion() { return criterion; } @Override public boolean getHighRankAccuracy() { return hra; } int getK() { return k; } int getMaxNomSize() { return maxNomSize; } @Override public float getMaxValue() { return maxValue; } @Override public float getMinValue() { return minValue; } @Override public long getN() { return totalN; } /** * Gets the number of levels of compactors in the sketch. * @return the number of levels of compactors in the sketch. */ int getNumLevels() { return compactors.size(); } @Override public double[] getPMF(final float[] splitPoints) { if (isEmpty()) { return new double[0]; } final long[] buckets = getPMForCDF(splitPoints); final int numBkts = buckets.length; final double[] outArr = new double[numBkts]; outArr[0] = (double)buckets[0] / getN(); for (int j = 1; j < numBkts; j++) { outArr[j] = (double)(buckets[j] - buckets[j - 1]) / getN(); } return outArr; } /** * Gets a CDF in raw counts, which can be easily converted into a CDF or PMF. * @param splits the splitPoints array * @return a CDF in raw counts */ private long[] getPMForCDF(final float[] splits) { validateSplits(splits); final int numSplits = splits.length; final long[] splitCounts = getCounts(splits); final int numBkts = numSplits + 1; final long[] bkts = Arrays.copyOf(splitCounts, numBkts); bkts[numBkts - 1] = getN(); return bkts; } @Override public float getQuantile(final double normRank) { if (isEmpty()) { throw new SketchesArgumentException( "Sketch is empty."); } if (normRank < 0 || normRank > 1.0) { throw new SketchesArgumentException( "Normalized rank must be in the range [0.0, 1.0]: " + normRank); } if (aux == null) { aux = new ReqAuxiliary(this); } final float q = aux.getQuantile(normRank); if (Float.isNaN(q)) { //possible result from aux.getQuantile() if (compatible) { if (criterion == LT || criterion == LE) { return minValue; } else { return maxValue; } } } return q; } @Override public float[] getQuantiles(final double[] normRanks) { final int len = normRanks.length; final float[] qArr = new float[len]; for (int i = 0; i < len; i++) { qArr[i] = getQuantile(normRanks[i]); } return qArr; } @Override public double getRank(final float value) { final long nnCount = getCount(value); return (double)nnCount / totalN; } @Override public double[] getRanks(final float[] values) { final long[] cumNnrArr = getCounts(values); final int numValues = values.length; final double[] rArr = new double[numValues]; for (int i = 0; i < numValues; i++) { rArr[i] = (double)cumNnrArr[i] / totalN; } return rArr; } private static double getRankLB(final int k, final int levels, final double rank, final int numStdDev, final boolean hra, final long totalN) { if (levels == 1) { return rank; } final double thresh = (double)k * INIT_NUMBER_OF_SECTIONS / totalN; if ( hra && rank >= 1.0 - thresh) { return rank; } if (!hra && rank <= thresh) { return rank; } final double relative = relRseFactor / k * (hra ? 1.0 - rank : rank); final double fixed = fixRseFactor / k; final double lbRel = rank - numStdDev * relative; final double lbFix = rank - numStdDev * fixed; return Math.max(lbRel, lbFix); } @Override public double getRankLowerBound(final double rank, final int numStdDev) { return getRankLB(k, getNumLevels(), rank, numStdDev, hra, getN()); } private static double getRankUB(final int k, final int levels, final double rank, final int numStdDev, final boolean hra, final long totalN) { if (levels == 1) { return rank; } final double thresh = (double)k * INIT_NUMBER_OF_SECTIONS / totalN; if ( hra && rank >= 1.0 - thresh) { return rank; } if (!hra && rank <= thresh) { return rank; } final double relative = relRseFactor / k * (hra ? 1.0 - rank : rank); final double fixed = fixRseFactor / k; final double ubRel = rank + numStdDev * relative; final double ubFix = rank + numStdDev * fixed; return Math.min(ubRel, ubFix); } @Override public double getRankUpperBound(final double rank, final int numStdDev) { return getRankUB(k, getNumLevels(), rank, numStdDev, hra, getN()); } @Override public int getRetainedItems() { return retItems; } @Override public double getRSE(final int k, final double rank, final boolean hra, final long totalN) { return getRankUB(k, 2, rank, 1, hra, totalN); //more conservative to assume > 1 level } @Override //Serialize totalN, k, minValue, maxValue = 20 // In preamble Flags keep: hra-bit, compatible-bit, criterion-bit // plus compactors. public int getSerializationBytes() { int cBytes = 0; for (int i = 0; i < compactors.size(); i++) { cBytes += compactors.get(i).getSerializationBytes() + 4; //int length before each one } final int members = 20; //totalN(8), minValue(4), maxValue(4), numCompactors(4) final int preamble = 8; //includes k(4) return cBytes + members + preamble; } private void grow() { final byte lgWeight = (byte)getNumLevels(); if (lgWeight == 0 && reqDebug != null) { reqDebug.emitStart(this); } compactors.add(new ReqCompactor(lgWeight, hra, k, reqDebug)); updateMaxNomSize(); if (reqDebug != null) { reqDebug.emitNewCompactor(lgWeight); } } @Override public boolean isCompatible() { return compatible; } @Override public boolean isEmpty() { return totalN == 0; } @Override public boolean isEstimationMode() { return getNumLevels() > 1; } @Override public boolean isLessThanOrEqual() { return criterion == LE; } @Override public ReqIterator iterator() { return new ReqIterator(this); } @Override public ReqSketch merge(final ReqSketch other) { if (other == null || other.isEmpty()) { return this; } totalN += other.totalN; //update min, max values, n if (Float.isNaN(minValue) || other.minValue < minValue) { minValue = other.minValue; } if (Float.isNaN(maxValue) || other.maxValue > maxValue) { maxValue = other.maxValue; } //Grow until self has at least as many compactors as other while (getNumLevels() < other.getNumLevels()) { grow(); } //Merge the items in all height compactors for (int i = 0; i < getNumLevels(); i++) { compactors.get(i).merge(other.compactors.get(i)); } updateMaxNomSize(); updateRetainedItems(); if (retItems >= maxNomSize) { compress(); } assert retItems < maxNomSize; aux = null; return this; } @Override public ReqSketch reset() { totalN = 0; retItems = 0; maxNomSize = 0; minValue = Float.NaN; maxValue = Float.NaN; aux = null; compactors = new ArrayList<>(); grow(); return this; } @Override public ReqSketch setCompatible(final boolean compatible) { this.compatible = compatible; return this; } /** * <b>NOTE:</b> This is public only to allow testing from another * package and is not intended for use by normal users of this class. * @param criterion one of LT, LE, GT, GE. * @return this */ public ReqSketch setCriterion(final Criteria criterion) { this.criterion = criterion; return this; } /** * <b>NOTE:</b> This is public only to allow testing from another * package and is not intened for use by normal users of this class. * @param reqDebug the ReqDebug implementation * @return this */ public ReqSketch setReqDebug(final ReqDebug reqDebug) { this.reqDebug = reqDebug; return this; } @Override public ReqSketch setLessThanOrEqual(final boolean ltEq) { if (ltEq) { setCriterion(LE); } else { setCriterion(LT); } return this; } @Override public byte[] toByteArray() { final int bytes = getSerializationBytes(); final byte[] arr = new byte[bytes]; final WritableBuffer wbuf = WritableMemory.wrap(arr).asWritableBuffer(); final int flags = (isEmpty() ? 4 : 0) | (hra ? 8 : 0) | (compatible ? 16 : 0) | (criterion == LE ? 32 : 0); wbuf.putByte((byte)1); //PreLongs wbuf.putByte((byte)1); //SerVer wbuf.putByte((byte)17); //Family ID wbuf.putByte((byte)flags); wbuf.putInt(k); //end of 8 byte preamble wbuf.putLong(totalN); //16 wbuf.putFloat(minValue); wbuf.putFloat(maxValue); wbuf.putInt(compactors.size()); //28 for (int i = 0; i < compactors.size(); i++) { final ReqCompactor c = compactors.get(i); final byte[] cArr = c.toByteArray(); //+320 wbuf.putInt(cArr.length); //32 wbuf.putByteArray(cArr, 0, cArr.length); //352 } assert wbuf.getPosition() == bytes; return arr; } @Override public String toString() { final StringBuilder sb = new StringBuilder(); sb.append("**********Relative Error Quantiles Sketch Summary**********").append(LS); sb.append(" N : " + totalN).append(LS); sb.append(" Retained Items : " + retItems).append(LS); sb.append(" Max Nominal Size: " + maxNomSize).append(LS); sb.append(" Min Value : " + minValue).append(LS); sb.append(" Max Value : " + maxValue).append(LS); sb.append(" Estimation Mode : " + isEstimationMode()).append(LS); sb.append(" Criterion : " + criterion).append(LS); sb.append(" High Rank Acc : " + hra).append(LS); sb.append(" Levels : " + compactors.size()).append(LS); sb.append("************************End Summary************************").append(LS); return sb.toString(); } @Override public void update(final float item) { if (Float.isNaN(item)) { return; } if (isEmpty()) { minValue = item; maxValue = item; } else { if (item < minValue) { minValue = item; } if (item > maxValue) { maxValue = item; } } final FloatBuffer buf = compactors.get(0).getBuffer(); buf.append(item); retItems++; totalN++; if (retItems >= maxNomSize) { buf.sort(); compress(); } aux = null; } /** * Computes a new bound for determining when to compress the sketch. */ void updateMaxNomSize() { int cap = 0; for (ReqCompactor c : compactors) { cap += c.getNomCapacity(); } maxNomSize = cap; } /** * Computes the retItems for the sketch. */ private void updateRetainedItems() { int count = 0; for (ReqCompactor c : compactors) { count += c.getBuffer().getLength(); } retItems = count; } /** * This checks the given float array to make sure that it contains only finite values * and is monotonically increasing in value. * @param splits the given array */ static void validateSplits(final float[] splits) { final int len = splits.length; for (int i = 0; i < len; i++) { final float v = splits[i]; if (!Float.isFinite(v)) { throw new SketchesArgumentException("Values must be finite"); } if (i < len - 1 && v >= splits[i + 1]) { throw new SketchesArgumentException( "Values must be unique and monotonically increasing"); } } } @Override public String viewCompactorDetail(final String fmt, final boolean allData) { final StringBuilder sb = new StringBuilder(); sb.append("*********Relative Error Quantiles Compactor Detail*********").append(LS); sb.append("Compactor Detail: Ret Items: ").append(getRetainedItems()) .append(" N: ").append(getN()); sb.append(LS); for (int i = 0; i < getNumLevels(); i++) { final ReqCompactor c = compactors.get(i); sb.append(c.toListPrefix()).append(LS); if (allData) { sb.append(c.getBuffer().toHorizList(fmt, 20)).append(LS); } } sb.append("************************End Detail*************************").append(LS); return sb.toString(); } }
Make heapify() public.
src/main/java/org/apache/datasketches/req/ReqSketch.java
Make heapify() public.
<ide><path>rc/main/java/org/apache/datasketches/req/ReqSketch.java <ide> this.compactors = compactors; <ide> } <ide> <del> static ReqSketch heapify(final Memory mem) { //TODO PUBLIC <add> /** <add> * Deserializes the byte stream of the given Memory object and places it on the Java heap. <add> * @param mem the given Memory object <add> * @return a ReqSketch on the Java heap. <add> */ <add> public static ReqSketch heapify(final Memory mem) { <ide> final Buffer buff = mem.asBuffer(); <ide> final byte preLongs = buff.getByte(); <ide> assert preLongs == (byte)1;
Java
epl-1.0
8ed24db562cd68f29800655e8d4ff17d5894dfaa
0
DavidGutknecht/elexis-3-base,DavidGutknecht/elexis-3-base,DavidGutknecht/elexis-3-base,DavidGutknecht/elexis-3-base,DavidGutknecht/elexis-3-base
package ch.elexis.base.ch.arzttarife.pandemie.model; import java.time.LocalDate; import java.util.Optional; import ch.elexis.base.ch.arzttarife.model.service.CoreModelServiceHolder; import ch.elexis.base.ch.arzttarife.pandemie.IPandemieLeistung; import ch.elexis.core.jpa.model.adapter.AbstractIdDeleteModelAdapter; import ch.elexis.core.model.IBillableOptifier; import ch.elexis.core.model.IBillableVerifier; import ch.elexis.core.model.IBilled; import ch.elexis.core.model.IBillingSystemFactor; import ch.elexis.core.model.IXid; import ch.elexis.core.model.Identifiable; import ch.elexis.core.model.billable.AbstractOptifier; import ch.elexis.core.model.billable.DefaultVerifier; import ch.elexis.core.services.holder.BillingServiceHolder; import ch.elexis.core.services.holder.XidServiceHolder; public class PandemieLeistung extends AbstractIdDeleteModelAdapter<ch.elexis.core.jpa.entities.PandemieLeistung> implements Identifiable, IPandemieLeistung { public static final String STS_CLASS = "ch.elexis.data.PandemieLeistung"; private static IBillableOptifier<PandemieLeistung> optifier; private IBillableVerifier verifier; public PandemieLeistung(ch.elexis.core.jpa.entities.PandemieLeistung entity){ super(entity); verifier = new DefaultVerifier(); } @Override public IBillableOptifier<PandemieLeistung> getOptifier(){ if (optifier == null) { optifier = new AbstractOptifier<PandemieLeistung>(CoreModelServiceHolder.get()) { @Override protected void setPrice(PandemieLeistung billable, IBilled billed){ Optional<IBillingSystemFactor> billingFactor = BillingServiceHolder.get().getBillingSystemFactor(getCodeSystemName(), billed.getEncounter().getDate()); if (billingFactor.isPresent()) { billed.setFactor(billingFactor.get().getFactor()); } else { billed.setFactor(1.0); } int points = 0; // use cents if set if (billable.getCents() > 0) { points = billable.getCents(); } else { points = billable.getTaxpoints(); } billed.setPoints(points); } }; } return optifier; } @Override public IBillableVerifier getVerifier(){ return verifier; } @Override public String getCodeSystemName(){ return ch.elexis.core.jpa.entities.PandemieLeistung.CODESYSTEM_NAME; } @Override public String getCodeSystemCode(){ return "351"; } @Override public String getCode(){ return getEntity().getCode(); } @Override public void setCode(String value){ getEntity().setCode(value); } @Override public String getText(){ return getEntity().getTitle(); } @Override public void setText(String value){ getEntity().setTitle(value); } @Override public String getDescription(){ return getEntity().getDescription(); } @Override public void setDescription(String value){ getEntity().setDescription(value); } @Override public String getChapter(){ return getEntity().getChapter(); } @Override public void setChapter(String value){ getEntity().setChapter(value); } @Override public int getCents(){ return getEntity().getCents(); } @Override public void setCents(int value){ getEntity().setCents(value); } @Override public String getLabel(){ return "(" + getCode() + ") " + getText(); } @Override public boolean addXid(String domain, String id, boolean updateIfExists){ return XidServiceHolder.get().addXid(this, domain, id, updateIfExists); } @Override public IXid getXid(String domain){ return XidServiceHolder.get().getXid(this, domain); } @Override public void setId(String id){ getEntityMarkDirty().setId(id); } @Override public LocalDate getValidFrom(){ return getEntity().getValidFrom(); } @Override public LocalDate getValidTo(){ return getEntity().getValidTo(); } @Override public void setValidFrom(LocalDate value){ getEntityMarkDirty().setValidFrom(value); } @Override public void setValidTo(LocalDate value){ getEntityMarkDirty().setValidTo(value); } @Override public int getTaxpoints(){ return getEntity().getTaxpoints(); } @Override public void setTaxpoints(int value){ getEntityMarkDirty().setTaxpoints(value); } }
bundles/ch.elexis.base.ch.arzttarife.model/src/ch/elexis/base/ch/arzttarife/pandemie/model/PandemieLeistung.java
package ch.elexis.base.ch.arzttarife.pandemie.model; import java.time.LocalDate; import java.util.Optional; import ch.elexis.base.ch.arzttarife.model.service.CoreModelServiceHolder; import ch.elexis.base.ch.arzttarife.pandemie.IPandemieLeistung; import ch.elexis.core.jpa.model.adapter.AbstractIdDeleteModelAdapter; import ch.elexis.core.model.IBillableOptifier; import ch.elexis.core.model.IBillableVerifier; import ch.elexis.core.model.IBilled; import ch.elexis.core.model.IBillingSystemFactor; import ch.elexis.core.model.IXid; import ch.elexis.core.model.Identifiable; import ch.elexis.core.model.billable.AbstractOptifier; import ch.elexis.core.model.billable.DefaultVerifier; import ch.elexis.core.services.holder.BillingServiceHolder; import ch.elexis.core.services.holder.XidServiceHolder; public class PandemieLeistung extends AbstractIdDeleteModelAdapter<ch.elexis.core.jpa.entities.PandemieLeistung> implements Identifiable, IPandemieLeistung { public static final String STS_CLASS = "ch.elexis.data.PandemieLeistung"; private static IBillableOptifier<PandemieLeistung> optifier; private IBillableVerifier verifier; public PandemieLeistung(ch.elexis.core.jpa.entities.PandemieLeistung entity){ super(entity); verifier = new DefaultVerifier(); } @Override public IBillableOptifier<PandemieLeistung> getOptifier(){ if (optifier == null) { optifier = new AbstractOptifier<PandemieLeistung>(CoreModelServiceHolder.get()) { @Override protected void setPrice(PandemieLeistung billable, IBilled billed){ Optional<IBillingSystemFactor> billingFactor = BillingServiceHolder.get().getBillingSystemFactor(getCodeSystemName(), billed.getEncounter().getDate()); if (billingFactor.isPresent()) { billed.setFactor(billingFactor.get().getFactor()); } else { billed.setFactor(1.0); } int points = 0; // use cents if set if (billable.getCents() > 0) { points = billable.getCents(); } else { points = billable.getTaxpoints(); } billed.setPoints(points); } }; } return optifier; } @Override public IBillableVerifier getVerifier(){ return verifier; } @Override public String getCodeSystemName(){ return ch.elexis.core.jpa.entities.PandemieLeistung.CODESYSTEM_NAME; } @Override public String getCode(){ return getEntity().getCode(); } @Override public void setCode(String value){ getEntity().setCode(value); } @Override public String getText(){ return getEntity().getTitle(); } @Override public void setText(String value){ getEntity().setTitle(value); } @Override public String getDescription(){ return getEntity().getDescription(); } @Override public void setDescription(String value){ getEntity().setDescription(value); } @Override public String getChapter(){ return getEntity().getChapter(); } @Override public void setChapter(String value){ getEntity().setChapter(value); } @Override public int getCents(){ return getEntity().getCents(); } @Override public void setCents(int value){ getEntity().setCents(value); } @Override public String getLabel(){ return "(" + getCode() + ") " + getText(); } @Override public boolean addXid(String domain, String id, boolean updateIfExists){ return XidServiceHolder.get().addXid(this, domain, id, updateIfExists); } @Override public IXid getXid(String domain){ return XidServiceHolder.get().getXid(this, domain); } @Override public void setId(String id){ getEntityMarkDirty().setId(id); } @Override public LocalDate getValidFrom(){ return getEntity().getValidFrom(); } @Override public LocalDate getValidTo(){ return getEntity().getValidTo(); } @Override public void setValidFrom(LocalDate value){ getEntityMarkDirty().setValidFrom(value); } @Override public void setValidTo(LocalDate value){ getEntityMarkDirty().setValidTo(value); } @Override public int getTaxpoints(){ return getEntity().getTaxpoints(); } @Override public void setTaxpoints(int value){ getEntityMarkDirty().setTaxpoints(value); } }
[21303] fix code system code of swiss pandemie tarif
bundles/ch.elexis.base.ch.arzttarife.model/src/ch/elexis/base/ch/arzttarife/pandemie/model/PandemieLeistung.java
[21303] fix code system code of swiss pandemie tarif
<ide><path>undles/ch.elexis.base.ch.arzttarife.model/src/ch/elexis/base/ch/arzttarife/pandemie/model/PandemieLeistung.java <ide> @Override <ide> public String getCodeSystemName(){ <ide> return ch.elexis.core.jpa.entities.PandemieLeistung.CODESYSTEM_NAME; <add> } <add> <add> @Override <add> public String getCodeSystemCode(){ <add> return "351"; <ide> } <ide> <ide> @Override
Java
apache-2.0
445f3fe1dc0fb61ec3ac67ed9793f755293eb4f4
0
mikewalch/accumulo,ctubbsii/accumulo,apache/accumulo,mjwall/accumulo,phrocker/accumulo-1,milleruntime/accumulo,lstav/accumulo,dhutchis/accumulo,ivakegg/accumulo,ivakegg/accumulo,mikewalch/accumulo,keith-turner/accumulo,mjwall/accumulo,mjwall/accumulo,ctubbsii/accumulo,milleruntime/accumulo,ctubbsii/accumulo,dhutchis/accumulo,lstav/accumulo,ctubbsii/accumulo,ivakegg/accumulo,ivakegg/accumulo,apache/accumulo,ctubbsii/accumulo,ivakegg/accumulo,apache/accumulo,mjwall/accumulo,milleruntime/accumulo,milleruntime/accumulo,milleruntime/accumulo,apache/accumulo,phrocker/accumulo-1,lstav/accumulo,ctubbsii/accumulo,dhutchis/accumulo,dhutchis/accumulo,dhutchis/accumulo,milleruntime/accumulo,phrocker/accumulo-1,phrocker/accumulo-1,mikewalch/accumulo,mikewalch/accumulo,dhutchis/accumulo,ctubbsii/accumulo,mikewalch/accumulo,keith-turner/accumulo,mjwall/accumulo,keith-turner/accumulo,ivakegg/accumulo,lstav/accumulo,keith-turner/accumulo,phrocker/accumulo-1,milleruntime/accumulo,keith-turner/accumulo,keith-turner/accumulo,lstav/accumulo,apache/accumulo,ivakegg/accumulo,dhutchis/accumulo,mikewalch/accumulo,mjwall/accumulo,lstav/accumulo,apache/accumulo,keith-turner/accumulo,mikewalch/accumulo,mjwall/accumulo,dhutchis/accumulo,phrocker/accumulo-1,mikewalch/accumulo,phrocker/accumulo-1,apache/accumulo,dhutchis/accumulo,lstav/accumulo
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.accumulo.server.replication; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.apache.accumulo.core.client.IteratorSetting; import org.apache.accumulo.core.client.IteratorSetting.Column; import org.apache.accumulo.core.client.impl.BaseIteratorEnvironment; import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.iterators.Combiner; import org.apache.accumulo.core.iterators.DevNull; import org.apache.accumulo.core.iterators.IteratorUtil.IteratorScope; import org.apache.accumulo.core.replication.ReplicationSchema.StatusSection; import org.apache.accumulo.server.replication.proto.Replication.Status; import org.junit.Assert; import org.junit.Before; import org.junit.Test; public class StatusCombinerTest { private StatusCombiner combiner; private Key key; private Status.Builder builder; private static class TestIE extends BaseIteratorEnvironment { @Override public IteratorScope getIteratorScope() { return IteratorScope.scan; } } @Before public void initCombiner() throws IOException { key = new Key(); combiner = new StatusCombiner(); builder = Status.newBuilder(); IteratorSetting cfg = new IteratorSetting(50, StatusCombiner.class); Combiner.setColumns(cfg, Collections.singletonList(new Column(StatusSection.NAME))); combiner.init(new DevNull(), cfg.getOptions(), new TestIE()); } @Test public void returnsSameObject() { Status status = StatusUtil.ingestedUntil(10); // When combining only one message, we should get back the same instance Status ret = combiner.typedReduce(key, Collections.singleton(status).iterator()); Assert.assertEquals(status, ret); Assert.assertTrue(status == ret); } @Test public void newStatusWithNewIngest() { Status orig = StatusUtil.fileCreated(100); Status status = StatusUtil.replicatedAndIngested(10, 20); Status ret = combiner.typedReduce(key, Arrays.asList(orig, status).iterator()); Assert.assertEquals(10l, ret.getBegin()); Assert.assertEquals(20l, ret.getEnd()); Assert.assertEquals(100l, ret.getCreatedTime()); Assert.assertEquals(false, ret.getClosed()); } @Test public void newStatusWithNewIngestSingleBuilder() { Status orig = StatusUtil.fileCreated(100); Status status = StatusUtil.replicatedAndIngested(builder, 10, 20); Status ret = combiner.typedReduce(key, Arrays.asList(orig, status).iterator()); Assert.assertEquals(10l, ret.getBegin()); Assert.assertEquals(20l, ret.getEnd()); Assert.assertEquals(100l, ret.getCreatedTime()); Assert.assertEquals(false, ret.getClosed()); } @Test public void commutativeNewFile() { Status newFile = StatusUtil.fileCreated(100), firstSync = StatusUtil.ingestedUntil(100), secondSync = StatusUtil.ingestedUntil(200); Status order1 = combiner.typedReduce(key, Arrays.asList(newFile, firstSync, secondSync).iterator()), order2 = combiner.typedReduce(key, Arrays.asList(secondSync, firstSync, newFile).iterator()); Assert.assertEquals(order1, order2); } @Test public void commutativeNewFileSingleBuilder() { Status newFile = StatusUtil.fileCreated(100), firstSync = StatusUtil.ingestedUntil(builder, 100), secondSync = StatusUtil.ingestedUntil(builder, 200); Status order1 = combiner.typedReduce(key, Arrays.asList(newFile, firstSync, secondSync).iterator()), order2 = combiner.typedReduce(key, Arrays.asList(secondSync, firstSync, newFile).iterator()); Assert.assertEquals(order1, order2); } @Test public void commutativeNewUpdates() { Status newFile = StatusUtil.fileCreated(100), firstSync = StatusUtil.ingestedUntil(100), secondSync = StatusUtil.ingestedUntil(200); Status order1 = combiner.typedReduce(key, Arrays.asList(newFile, firstSync, secondSync).iterator()), order2 = combiner.typedReduce(key, Arrays.asList(newFile, secondSync, firstSync).iterator()); Assert.assertEquals(order1, order2); } @Test public void commutativeNewUpdatesSingleBuilder() { Status newFile = StatusUtil.fileCreated(100), firstSync = StatusUtil.ingestedUntil(builder, 100), secondSync = StatusUtil.ingestedUntil(builder, 200); Status order1 = combiner.typedReduce(key, Arrays.asList(newFile, firstSync, secondSync).iterator()), order2 = combiner.typedReduce(key, Arrays.asList(newFile, secondSync, firstSync).iterator()); Assert.assertEquals(order1, order2); } @Test public void commutativeWithClose() { Status newFile = StatusUtil.fileCreated(100), closed = StatusUtil.fileClosed(), secondSync = StatusUtil.ingestedUntil(200); Status order1 = combiner.typedReduce(key, Arrays.asList(newFile, closed, secondSync).iterator()), order2 = combiner.typedReduce(key, Arrays.asList(newFile, secondSync, closed).iterator()); Assert.assertEquals(order1, order2); } @Test public void commutativeWithCloseSingleBuilder() { Status newFile = StatusUtil.fileCreated(100), closed = StatusUtil.fileClosed(), secondSync = StatusUtil.ingestedUntil(builder, 200); Status order1 = combiner.typedReduce(key, Arrays.asList(newFile, closed, secondSync).iterator()), order2 = combiner.typedReduce(key, Arrays.asList(newFile, secondSync, closed).iterator()); Assert.assertEquals(order1, order2); } @Test public void commutativeWithMultipleUpdates() { Status newFile = StatusUtil.fileCreated(100), update1 = StatusUtil.ingestedUntil(100), update2 = StatusUtil.ingestedUntil(200), repl1 = StatusUtil .replicated(50), repl2 = StatusUtil.replicated(150); Status order1 = combiner.typedReduce(key, Arrays.asList(newFile, update1, repl1, update2, repl2).iterator()); // Got all replication updates before ingest updates Status permutation = combiner.typedReduce(key, Arrays.asList(newFile, repl1, update1, repl2, update2).iterator()); Assert.assertEquals(order1, permutation); // All replications before updates permutation = combiner.typedReduce(key, Arrays.asList(newFile, repl1, repl2, update1, update2).iterator()); Assert.assertEquals(order1, permutation); // All updates before replications permutation = combiner.typedReduce(key, Arrays.asList(newFile, update1, update2, repl1, repl2, update1, update2).iterator()); Assert.assertEquals(order1, permutation); } @Test public void commutativeWithMultipleUpdatesSingleBuilder() { Status newFile = StatusUtil.fileCreated(100), update1 = StatusUtil.ingestedUntil(builder, 100), update2 = StatusUtil.ingestedUntil(builder, 200), repl1 = StatusUtil .replicated(builder, 50), repl2 = StatusUtil.replicated(builder, 150); Status order1 = combiner.typedReduce(key, Arrays.asList(newFile, update1, repl1, update2, repl2).iterator()); // Got all replication updates before ingest updates Status permutation = combiner.typedReduce(key, Arrays.asList(newFile, repl1, update1, repl2, update2).iterator()); Assert.assertEquals(order1, permutation); // All replications before updates permutation = combiner.typedReduce(key, Arrays.asList(newFile, repl1, repl2, update1, update2).iterator()); Assert.assertEquals(order1, permutation); // All updates before replications permutation = combiner.typedReduce(key, Arrays.asList(newFile, update1, update2, repl1, repl2).iterator()); Assert.assertEquals(order1, permutation); } @Test public void duplicateStatuses() { Status newFile = StatusUtil.fileCreated(100), update1 = StatusUtil.ingestedUntil(builder, 100), update2 = StatusUtil.ingestedUntil(builder, 200), repl1 = StatusUtil .replicated(builder, 50), repl2 = StatusUtil.replicated(builder, 150); Status order1 = combiner.typedReduce(key, Arrays.asList(newFile, update1, repl1, update2, repl2).iterator()); // Repeat the same thing more than once Status permutation = combiner.typedReduce(key, Arrays.asList(newFile, repl1, update1, update1, repl2, update2, update2).iterator()); Assert.assertEquals(order1, permutation); } @Test public void fileClosedTimePropagated() { Status stat1 = Status.newBuilder().setBegin(10).setEnd(20).setClosed(true).setInfiniteEnd(false).setCreatedTime(50).build(); Status stat2 = Status.newBuilder().setBegin(10).setEnd(20).setClosed(true).setInfiniteEnd(false).build(); Status combined = combiner.typedReduce(key, Arrays.asList(stat1, stat2).iterator()); Assert.assertEquals(stat1, combined); } @Test public void fileClosedTimeChoosesEarliestIgnoringDefault() { Status stat1 = Status.newBuilder().setBegin(10).setEnd(20).setClosed(true).setInfiniteEnd(false).setCreatedTime(50).build(); Status stat2 = Status.newBuilder().setBegin(10).setEnd(20).setClosed(true).setInfiniteEnd(false).setCreatedTime(100).build(); Status combined = combiner.typedReduce(key, Arrays.asList(stat1, stat2).iterator()); Assert.assertEquals(stat1, combined); Status stat3 = Status.newBuilder().setBegin(10).setEnd(20).setClosed(true).setInfiniteEnd(false).setCreatedTime(100).build(); Status combined2 = combiner.typedReduce(key, Arrays.asList(combined, stat3).iterator()); Assert.assertEquals(combined, combined2); } @Test public void testCombination() { List<Status> status = new ArrayList<>(); long time = System.currentTimeMillis(); status.add(StatusUtil.fileCreated(time)); status.add(StatusUtil.openWithUnknownLength()); status.add(StatusUtil.fileClosed()); Status combined = combiner.typedReduce(new Key("row"), status.iterator()); Assert.assertEquals(time, combined.getCreatedTime()); Assert.assertTrue(combined.getInfiniteEnd()); Assert.assertTrue(combined.getClosed()); } }
server/base/src/test/java/org/apache/accumulo/server/replication/StatusCombinerTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.accumulo.server.replication; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.apache.accumulo.core.client.IteratorSetting; import org.apache.accumulo.core.client.IteratorSetting.Column; import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.iterators.Combiner; import org.apache.accumulo.core.iterators.DevNull; import org.apache.accumulo.core.replication.ReplicationSchema.StatusSection; import org.apache.accumulo.server.replication.proto.Replication.Status; import org.junit.Assert; import org.junit.Before; import org.junit.Test; public class StatusCombinerTest { private StatusCombiner combiner; private Key key; private Status.Builder builder; @Before public void initCombiner() throws IOException { key = new Key(); combiner = new StatusCombiner(); builder = Status.newBuilder(); IteratorSetting cfg = new IteratorSetting(50, StatusCombiner.class); Combiner.setColumns(cfg, Collections.singletonList(new Column(StatusSection.NAME))); combiner.init(new DevNull(), cfg.getOptions(), null); } @Test public void returnsSameObject() { Status status = StatusUtil.ingestedUntil(10); // When combining only one message, we should get back the same instance Status ret = combiner.typedReduce(key, Collections.singleton(status).iterator()); Assert.assertEquals(status, ret); Assert.assertTrue(status == ret); } @Test public void newStatusWithNewIngest() { Status orig = StatusUtil.fileCreated(100); Status status = StatusUtil.replicatedAndIngested(10, 20); Status ret = combiner.typedReduce(key, Arrays.asList(orig, status).iterator()); Assert.assertEquals(10l, ret.getBegin()); Assert.assertEquals(20l, ret.getEnd()); Assert.assertEquals(100l, ret.getCreatedTime()); Assert.assertEquals(false, ret.getClosed()); } @Test public void newStatusWithNewIngestSingleBuilder() { Status orig = StatusUtil.fileCreated(100); Status status = StatusUtil.replicatedAndIngested(builder, 10, 20); Status ret = combiner.typedReduce(key, Arrays.asList(orig, status).iterator()); Assert.assertEquals(10l, ret.getBegin()); Assert.assertEquals(20l, ret.getEnd()); Assert.assertEquals(100l, ret.getCreatedTime()); Assert.assertEquals(false, ret.getClosed()); } @Test public void commutativeNewFile() { Status newFile = StatusUtil.fileCreated(100), firstSync = StatusUtil.ingestedUntil(100), secondSync = StatusUtil.ingestedUntil(200); Status order1 = combiner.typedReduce(key, Arrays.asList(newFile, firstSync, secondSync).iterator()), order2 = combiner.typedReduce(key, Arrays.asList(secondSync, firstSync, newFile).iterator()); Assert.assertEquals(order1, order2); } @Test public void commutativeNewFileSingleBuilder() { Status newFile = StatusUtil.fileCreated(100), firstSync = StatusUtil.ingestedUntil(builder, 100), secondSync = StatusUtil.ingestedUntil(builder, 200); Status order1 = combiner.typedReduce(key, Arrays.asList(newFile, firstSync, secondSync).iterator()), order2 = combiner.typedReduce(key, Arrays.asList(secondSync, firstSync, newFile).iterator()); Assert.assertEquals(order1, order2); } @Test public void commutativeNewUpdates() { Status newFile = StatusUtil.fileCreated(100), firstSync = StatusUtil.ingestedUntil(100), secondSync = StatusUtil.ingestedUntil(200); Status order1 = combiner.typedReduce(key, Arrays.asList(newFile, firstSync, secondSync).iterator()), order2 = combiner.typedReduce(key, Arrays.asList(newFile, secondSync, firstSync).iterator()); Assert.assertEquals(order1, order2); } @Test public void commutativeNewUpdatesSingleBuilder() { Status newFile = StatusUtil.fileCreated(100), firstSync = StatusUtil.ingestedUntil(builder, 100), secondSync = StatusUtil.ingestedUntil(builder, 200); Status order1 = combiner.typedReduce(key, Arrays.asList(newFile, firstSync, secondSync).iterator()), order2 = combiner.typedReduce(key, Arrays.asList(newFile, secondSync, firstSync).iterator()); Assert.assertEquals(order1, order2); } @Test public void commutativeWithClose() { Status newFile = StatusUtil.fileCreated(100), closed = StatusUtil.fileClosed(), secondSync = StatusUtil.ingestedUntil(200); Status order1 = combiner.typedReduce(key, Arrays.asList(newFile, closed, secondSync).iterator()), order2 = combiner.typedReduce(key, Arrays.asList(newFile, secondSync, closed).iterator()); Assert.assertEquals(order1, order2); } @Test public void commutativeWithCloseSingleBuilder() { Status newFile = StatusUtil.fileCreated(100), closed = StatusUtil.fileClosed(), secondSync = StatusUtil.ingestedUntil(builder, 200); Status order1 = combiner.typedReduce(key, Arrays.asList(newFile, closed, secondSync).iterator()), order2 = combiner.typedReduce(key, Arrays.asList(newFile, secondSync, closed).iterator()); Assert.assertEquals(order1, order2); } @Test public void commutativeWithMultipleUpdates() { Status newFile = StatusUtil.fileCreated(100), update1 = StatusUtil.ingestedUntil(100), update2 = StatusUtil.ingestedUntil(200), repl1 = StatusUtil .replicated(50), repl2 = StatusUtil.replicated(150); Status order1 = combiner.typedReduce(key, Arrays.asList(newFile, update1, repl1, update2, repl2).iterator()); // Got all replication updates before ingest updates Status permutation = combiner.typedReduce(key, Arrays.asList(newFile, repl1, update1, repl2, update2).iterator()); Assert.assertEquals(order1, permutation); // All replications before updates permutation = combiner.typedReduce(key, Arrays.asList(newFile, repl1, repl2, update1, update2).iterator()); Assert.assertEquals(order1, permutation); // All updates before replications permutation = combiner.typedReduce(key, Arrays.asList(newFile, update1, update2, repl1, repl2, update1, update2).iterator()); Assert.assertEquals(order1, permutation); } @Test public void commutativeWithMultipleUpdatesSingleBuilder() { Status newFile = StatusUtil.fileCreated(100), update1 = StatusUtil.ingestedUntil(builder, 100), update2 = StatusUtil.ingestedUntil(builder, 200), repl1 = StatusUtil .replicated(builder, 50), repl2 = StatusUtil.replicated(builder, 150); Status order1 = combiner.typedReduce(key, Arrays.asList(newFile, update1, repl1, update2, repl2).iterator()); // Got all replication updates before ingest updates Status permutation = combiner.typedReduce(key, Arrays.asList(newFile, repl1, update1, repl2, update2).iterator()); Assert.assertEquals(order1, permutation); // All replications before updates permutation = combiner.typedReduce(key, Arrays.asList(newFile, repl1, repl2, update1, update2).iterator()); Assert.assertEquals(order1, permutation); // All updates before replications permutation = combiner.typedReduce(key, Arrays.asList(newFile, update1, update2, repl1, repl2).iterator()); Assert.assertEquals(order1, permutation); } @Test public void duplicateStatuses() { Status newFile = StatusUtil.fileCreated(100), update1 = StatusUtil.ingestedUntil(builder, 100), update2 = StatusUtil.ingestedUntil(builder, 200), repl1 = StatusUtil .replicated(builder, 50), repl2 = StatusUtil.replicated(builder, 150); Status order1 = combiner.typedReduce(key, Arrays.asList(newFile, update1, repl1, update2, repl2).iterator()); // Repeat the same thing more than once Status permutation = combiner.typedReduce(key, Arrays.asList(newFile, repl1, update1, update1, repl2, update2, update2).iterator()); Assert.assertEquals(order1, permutation); } @Test public void fileClosedTimePropagated() { Status stat1 = Status.newBuilder().setBegin(10).setEnd(20).setClosed(true).setInfiniteEnd(false).setCreatedTime(50).build(); Status stat2 = Status.newBuilder().setBegin(10).setEnd(20).setClosed(true).setInfiniteEnd(false).build(); Status combined = combiner.typedReduce(key, Arrays.asList(stat1, stat2).iterator()); Assert.assertEquals(stat1, combined); } @Test public void fileClosedTimeChoosesEarliestIgnoringDefault() { Status stat1 = Status.newBuilder().setBegin(10).setEnd(20).setClosed(true).setInfiniteEnd(false).setCreatedTime(50).build(); Status stat2 = Status.newBuilder().setBegin(10).setEnd(20).setClosed(true).setInfiniteEnd(false).setCreatedTime(100).build(); Status combined = combiner.typedReduce(key, Arrays.asList(stat1, stat2).iterator()); Assert.assertEquals(stat1, combined); Status stat3 = Status.newBuilder().setBegin(10).setEnd(20).setClosed(true).setInfiniteEnd(false).setCreatedTime(100).build(); Status combined2 = combiner.typedReduce(key, Arrays.asList(combined, stat3).iterator()); Assert.assertEquals(combined, combined2); } @Test public void testCombination() { List<Status> status = new ArrayList<>(); long time = System.currentTimeMillis(); status.add(StatusUtil.fileCreated(time)); status.add(StatusUtil.openWithUnknownLength()); status.add(StatusUtil.fileClosed()); Status combined = combiner.typedReduce(new Key("row"), status.iterator()); Assert.assertEquals(time, combined.getCreatedTime()); Assert.assertTrue(combined.getInfiniteEnd()); Assert.assertTrue(combined.getClosed()); } }
ACCUMULO-2232 Fix test
server/base/src/test/java/org/apache/accumulo/server/replication/StatusCombinerTest.java
ACCUMULO-2232 Fix test
<ide><path>erver/base/src/test/java/org/apache/accumulo/server/replication/StatusCombinerTest.java <ide> <ide> import org.apache.accumulo.core.client.IteratorSetting; <ide> import org.apache.accumulo.core.client.IteratorSetting.Column; <add>import org.apache.accumulo.core.client.impl.BaseIteratorEnvironment; <ide> import org.apache.accumulo.core.data.Key; <ide> import org.apache.accumulo.core.iterators.Combiner; <ide> import org.apache.accumulo.core.iterators.DevNull; <add>import org.apache.accumulo.core.iterators.IteratorUtil.IteratorScope; <ide> import org.apache.accumulo.core.replication.ReplicationSchema.StatusSection; <ide> import org.apache.accumulo.server.replication.proto.Replication.Status; <ide> import org.junit.Assert; <ide> private StatusCombiner combiner; <ide> private Key key; <ide> private Status.Builder builder; <add> <add> private static class TestIE extends BaseIteratorEnvironment { <add> @Override <add> public IteratorScope getIteratorScope() { <add> return IteratorScope.scan; <add> } <add> } <ide> <ide> @Before <ide> public void initCombiner() throws IOException { <ide> builder = Status.newBuilder(); <ide> IteratorSetting cfg = new IteratorSetting(50, StatusCombiner.class); <ide> Combiner.setColumns(cfg, Collections.singletonList(new Column(StatusSection.NAME))); <del> combiner.init(new DevNull(), cfg.getOptions(), null); <add> combiner.init(new DevNull(), cfg.getOptions(), new TestIE()); <ide> } <ide> <ide> @Test
Java
apache-2.0
bc15911fbf8e30fe5b40da3ad27ee7234b31de50
0
yapengsong/ovirt-engine,yingyun001/ovirt-engine,yapengsong/ovirt-engine,yapengsong/ovirt-engine,yapengsong/ovirt-engine,OpenUniversity/ovirt-engine,yingyun001/ovirt-engine,zerodengxinchao/ovirt-engine,halober/ovirt-engine,OpenUniversity/ovirt-engine,walteryang47/ovirt-engine,halober/ovirt-engine,walteryang47/ovirt-engine,OpenUniversity/ovirt-engine,zerodengxinchao/ovirt-engine,halober/ovirt-engine,eayun/ovirt-engine,halober/ovirt-engine,zerodengxinchao/ovirt-engine,walteryang47/ovirt-engine,eayun/ovirt-engine,eayun/ovirt-engine,walteryang47/ovirt-engine,yingyun001/ovirt-engine,yingyun001/ovirt-engine,OpenUniversity/ovirt-engine,walteryang47/ovirt-engine,eayun/ovirt-engine,yingyun001/ovirt-engine,OpenUniversity/ovirt-engine,eayun/ovirt-engine,zerodengxinchao/ovirt-engine,zerodengxinchao/ovirt-engine,yapengsong/ovirt-engine
package org.ovirt.engine.core.bll; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.regex.Pattern; import org.apache.commons.lang.StringUtils; import org.ovirt.engine.core.common.action.TagsOperationParameters; import org.ovirt.engine.core.common.action.VdcActionType; import org.ovirt.engine.core.common.businessentities.Tags; import org.ovirt.engine.core.compat.Guid; import org.ovirt.engine.core.compat.Regex; import org.ovirt.engine.core.dal.dbbroker.DbFacade; import org.ovirt.engine.core.dao.TagDAO; import org.ovirt.engine.core.utils.collections.CopyOnAccessMap; import org.ovirt.engine.core.utils.log.Log; import org.ovirt.engine.core.utils.log.LogFactory; /** * This class responsible to In memory Tags handling. On Vdc starting in memory tags tree initialized. All Tags changing * operations go throw this class */ public class TagsDirector { /** * This pattern is used to replace '\\' in the expression that may be added by handling a '_' character with an * empty string. Since we use both String and RegExp , each backslash char is represented by four backslash * characters, so for marching two backslashes we will need eight. */ private static final Pattern BACKSLASH_REMOVER = Pattern.compile("\\\\\\\\"); private enum TagReturnValueIndicator { ID, NAME } private static Log log = LogFactory.getLog(TagsDirector.class); protected static final Guid ROOT_TAG_ID = Guid.Empty; /** * In memory nodes cache for quicker access to each node by ID: O(1) instead O(lnN) of tree */ private final Map<Guid, Tags> tagsMapByID = new CopyOnAccessMap<>(new HashMap<Guid, Tags>()); /** * In memory nodes cache for quicker access to each node by name */ private final Map<String, Tags> tagsMapByName = new CopyOnAccessMap<>(new HashMap<String, Tags>()); private static TagsDirector instance = new TagsDirector(); private TagsDirector() { } /** * In memory tree initialized during initialization */ protected void init() { log.info("Start initializing " + getClass().getSimpleName()); tagsMapByID.clear(); tagsMapByName.clear(); Tags root = new Tags("root", null, true, ROOT_TAG_ID, "root"); AddTagToHash(root); AddChildren(root); log.info("Finished initializing " + getClass().getSimpleName()); } private void AddTagToHash(Tags tag) { tagsMapByID.put(tag.gettag_id(), tag); tagsMapByName.put(tag.gettag_name(), tag); if (tag.getparent_id() != null) { // If the tag has a parent, the parent should have in its children the added tag instead // of the old version of the tag , if exists Tags parentTag = tagsMapByID.get(tag.getparent_id()); if (parentTag == null) { log.error(String.format("Could not obtain tag for guid %1$s", tag.getparent_id())); return; } List<Tags> parentChildren = parentTag.getChildren(); replaceTagInChildren(tag, parentChildren); AddTagToHash(parentTag); // replace the parent tag after the modification } } private static void replaceTagInChildren(Tags tag, List<Tags> parentChildren) { for (int counter = 0; counter < parentChildren.size(); counter++) { if (parentChildren.get(counter).gettag_id().equals(tag.gettag_id())) { parentChildren.set(counter, tag); break; } } } private void RemoveTagFromHash(Tags tag) { tagsMapByID.remove(tag.gettag_id()); tagsMapByName.remove(tag.gettag_name()); } /** * Recursive tree initialization call * * @param tag */ private void AddChildren(Tags tag) { log.infoFormat("Tag {0} added to tree", tag.gettag_name()); List<Tags> children = getTagDAO().getAllForParent(tag.gettag_id()); for (Tags child : children) { AddChildren(child); log.infoFormat("Tag {0} added as child to parent {1}", child.gettag_name(), tag.gettag_name()); tag.getChildren().add(child); AddTagToHash(tag); AddTagToHash(child); } } protected TagDAO getTagDAO() { return DbFacade.getInstance().getTagDao(); } private void RemoveTagAndChildren(Tags tag) { for (Tags child : tag.getChildren()) { RemoveTagAndChildren(child); } RemoveTagFromHash(tag); } public static TagsDirector getInstance() { return instance; } public void AddTag(Tags tag) { if (tagsMapByID.containsKey(tag.getparent_id())) { Tags parent = tagsMapByID.get(tag.getparent_id()); parent.getChildren().add(tag); AddTagToHash(tag); AddTagToHash(parent); } else { log.errorFormat("Trying to add tag {0}, parent doesn't exist in Data Structure - {1}", tag.gettag_name(), tag.getparent_id()); } } /** * Remove tag operation. For tag with children all tag's children will be removed as well * * @param tagId * tag to remove */ public void RemoveTag(Guid tagId) { if (tagsMapByID.containsKey(tagId)) { Tags tag = tagsMapByID.get(tagId); RemoveTagAndChildren(tag); Tags parent = tagsMapByID.get(tag.getparent_id()); parent.getChildren().remove(tag); AddTagToHash(parent); } else { log.warnFormat("Trying to remove tag, not exists in Data Structure - {0}", tagId); } } /** * Update tag. We assume that the id doesn't change. * * @param tag */ public void UpdateTag(Tags tag) { if (tagsMapByID.containsKey(tag.gettag_id())) { Tags tagFromCache = tagsMapByID.get(tag.gettag_id()); String oldName = tagFromCache.gettag_name(); // check if tag name has changed. If it has - modify name dictionary // accordingly: if (!tag.gettag_name().equals(oldName)) { tagsMapByName.remove(oldName); } // Copy the children of the cached tag to keep the object hierarchy consistent. tag.setChildren(tagFromCache.getChildren()); AddTagToHash(tag); } else { log.warnFormat("Trying to update tag, not exists in Data Structure - {0}", tag.gettag_name()); } } public void MoveTag(Guid tagId, Guid newParent) { if (tagsMapByID.containsKey(tagId)) { Tags tag = tagsMapByID.get(tagId); if (tagsMapByID.containsKey(newParent)) { if (tagsMapByID.containsKey(tag.getparent_id())) { Tags parentTag = tagsMapByID.get(tag.getparent_id()); parentTag.getChildren().remove(tag); AddTagToHash(parentTag); } else { log.warnFormat("Trying to move tag from parent that doesn't exist in Data Structure - {0}", tag.getparent_id()); } Tags newParentTag = tagsMapByID.get(newParent); newParentTag.getChildren().add(tag); tag.setparent_id(newParent); AddTagToHash(newParentTag); // Parent got changed, modify it. updateTagInBackend(tag); } else { log.errorFormat("Trying to move tag, to parent not exists in Data Structure - {0}", newParent); } } else { log.errorFormat("Trying to move tag, not exists in Data Structure - {0}", tagId); } } protected void updateTagInBackend(Tags tag) { Backend.getInstance().runInternalAction(VdcActionType.UpdateTag, new TagsOperationParameters(tag)); } private String GetTagIdAndParentsIds(Tags tag) { StringBuilder builder = new StringBuilder(); builder.append(tag.gettag_id()); Guid tempTagId = new Guid(tag.getparent_id().toString()); while (!tempTagId.equals(Guid.Empty)) { builder.append(String.format(",%1$s", tempTagId)); tag = GetTagById(tempTagId); tempTagId = new Guid(tag.getparent_id().toString()); } return builder.toString(); } /** * This function will return the tag's ID and its parents IDs. * * @param tagId * the tag ID. * @return a comma separated list of IDs. */ public String GetTagIdAndParentsIds(Guid tagId) { Tags tag = GetTagById(tagId); return GetTagIdAndParentsIds(tag); } /** * This function will return the tag's ID and its children IDs. Its used to determine if a tag is assigned to an * entity. Tag is determined as assigned to an entity if the entity is assigned to the tag or to one of its * children. * * @param tagId * the ID of the 'root' tag. * @return a comma separated list of IDs. */ public String GetTagIdAndChildrenIds(Guid tagId) { Tags tag = GetTagById(tagId); if (tag == null) { return StringUtils.EMPTY; } StringBuilder sb = tag.getTagIdAndChildrenIds(); return sb.toString(); } public String GetTagNameAndChildrenNames(Guid tagId) { Tags tag = GetTagById(tagId); StringBuilder sb = tag.getTagNameAndChildrenNames(); return sb.toString(); } public HashSet<Guid> GetTagIdAndChildrenIdsAsSet(Guid tagId) { Tags tag = GetTagById(tagId); HashSet<Guid> set = new HashSet<>(); tag.getTagIdAndChildrenIdsAsList(set); return set; } /** * This function will return the tag's ID and its children IDs. Its used to determine if a tag is assigned to an * entity. Tag is determined as assigned to an entity if the entity is assigned to the tag or to one of its * children. * * @param tagName * the name of the 'root' tag. * @return a comma separated list of IDs. */ public String GetTagIdAndChildrenIds(String tagName) { Tags tag = GetTagByName(tagName); StringBuilder sb = tag.getTagIdAndChildrenIds(); return sb.toString(); } public String GetTagNamesAndChildrenNamesByRegExp(String tagNameRegExp) { // add RegEx chars or beginning of string ('^') and end of string ('$'): tagNameRegExp = String.format("^%1$s$", tagNameRegExp); // convert to the regular expression format: tagNameRegExp = tagNameRegExp.replace("*", ".*"); StringBuilder sb = new StringBuilder(); RecursiveGetTagsAndChildrenByRegExp(tagNameRegExp, sb, GetRootTag(), TagReturnValueIndicator.NAME); return sb.toString(); } private static void RecursiveGetTagsAndChildrenByRegExp(String tagNameRegExp, StringBuilder sb, Tags tag, TagReturnValueIndicator indicator) { if ((tag.getChildren() != null) && !tag.getChildren().isEmpty()) { tagNameRegExp = BACKSLASH_REMOVER.matcher(tagNameRegExp).replaceAll(""); for (Tags child : tag.getChildren()) { if (Regex.IsMatch(child.gettag_name(), tagNameRegExp)) { // the tag matches the regular expression -> add it and all its // children // (we prevent searching a regular expression match on them - // unnecessary). if (sb.length() == 0) { if (indicator == TagReturnValueIndicator.ID) { sb.append(child.getTagIdAndChildrenIds()); } else { sb.append(child.getTagNameAndChildrenNames()); } } else { if (indicator == TagReturnValueIndicator.ID) { sb.append(String.format(",%1$s", child.getTagIdAndChildrenIds())); } else { sb.append(String.format(",%1$s", child.getTagNameAndChildrenNames())); } } } else { RecursiveGetTagsAndChildrenByRegExp(tagNameRegExp, sb, child, indicator); } } } } /** * Get tag from in memory data structure (by ID). This tag will be with all children tree initialized as opposite to * tag from db. * * @param tagId * @return */ public Tags GetTagById(Guid tagId) { if (tagsMapByID.containsKey(tagId)) { return tagsMapByID.get(tagId); } else { return null; } } /** * Get tag from in memory data structure (by name). * * @param tagName * @return */ public Tags GetTagByName(String tagName) { if (tagsMapByName.containsKey(tagName)) { return tagsMapByName.get(tagName); } else { return null; } } /** * Gets a list of all the tags in the system. * * @return a tags list. */ public ArrayList<Tags> GetAllTags() { ArrayList<Tags> ret = new ArrayList<>(tagsMapByID.values()); // remove the root - it is not a real tag: ret.remove(GetRootTag()); return ret; } /** * Returns the root tag in the system. * * @return the root tag. */ public Tags GetRootTag() { return tagsMapByID.get(ROOT_TAG_ID); } public boolean IsTagDescestorOfTag(Guid sourceTagId, Guid potentialDescestorId) { if (sourceTagId.equals(potentialDescestorId)) { return true; } Tags tag = GetTagById(sourceTagId); if (tag != null && tag.getChildren() != null) { for (Tags childTag : tag.getChildren()) { if (IsTagDescestorOfTag(childTag.gettag_id(), potentialDescestorId)) { return true; } } } return false; } }
backend/manager/modules/bll/src/main/java/org/ovirt/engine/core/bll/TagsDirector.java
package org.ovirt.engine.core.bll; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.regex.Pattern; import org.apache.commons.lang.StringUtils; import org.ovirt.engine.core.common.action.TagsOperationParameters; import org.ovirt.engine.core.common.action.VdcActionType; import org.ovirt.engine.core.common.businessentities.Tags; import org.ovirt.engine.core.compat.Guid; import org.ovirt.engine.core.compat.Regex; import org.ovirt.engine.core.dal.dbbroker.DbFacade; import org.ovirt.engine.core.dao.TagDAO; import org.ovirt.engine.core.utils.collections.CopyOnAccessMap; import org.ovirt.engine.core.utils.log.Log; import org.ovirt.engine.core.utils.log.LogFactory; /** * This class responsible to In memory Tags handling. On Vdc starting in memory tags tree initialized. All Tags changing * operations go throw this class */ public class TagsDirector { /** * This pattern is used to replace '\\' in the expression that may be added by handling a '_' character with an * empty string. Since we use both String and RegExp , each backslash char is represented by four backslash * characters, so for marching two backslashes we will need eight. */ private static final Pattern BACKSLASH_REMOVER = Pattern.compile("\\\\\\\\"); private enum TagReturnValueIndicator { ID, NAME } private static Log log = LogFactory.getLog(TagsDirector.class); protected static final Guid ROOT_TAG_ID = Guid.Empty; /** * In memory nodes cache for quicker access to each node by ID: O(1) instead O(lnN) of tree */ private final Map<Guid, Tags> tagsMapByID = new CopyOnAccessMap<>(new HashMap<Guid, Tags>()); /** * In memory nodes cache for quicker access to each node by name */ private final Map<String, Tags> tagsMapByName = new CopyOnAccessMap<>(new HashMap<String, Tags>()); private static TagsDirector instance = new TagsDirector(); private TagsDirector() { } /** * In memory tree initialized during initialization */ protected void init() { log.info("Start initializing " + getClass().getSimpleName()); tagsMapByID.clear(); tagsMapByName.clear(); Tags root = new Tags("root", null, true, ROOT_TAG_ID, "root"); AddTagToHash(root); AddChildren(root); log.info("Finished initializing " + getClass().getSimpleName()); } private void AddTagToHash(Tags tag) { tagsMapByID.put(tag.gettag_id(), tag); tagsMapByName.put(tag.gettag_name(), tag); if (tag.getparent_id() != null) { // If the tag has a parent, the parent should have in its children the added tag instead // of the old version of the tag , if exists Tags parentTag = tagsMapByID.get(tag.getparent_id()); if (parentTag == null) { log.error(String.format("Could not obtain tag for guid %1$s", tag.getparent_id())); return; } List<Tags> parentChildren = parentTag.getChildren(); replaceTagInChildren(tag, parentChildren); AddTagToHash(parentTag); // replace the parent tag after the modification } } private static void replaceTagInChildren(Tags tag, List<Tags> parentChildren) { for (int counter = 0; counter < parentChildren.size(); counter++) { if (parentChildren.get(counter).gettag_id().equals(tag.gettag_id())) { parentChildren.set(counter, tag); break; } } } private void RemoveTagFromHash(Tags tag) { tagsMapByID.remove(tag.gettag_id()); tagsMapByName.remove(tag.gettag_name()); } /** * Recursive tree initialization call * * @param tag */ private void AddChildren(Tags tag) { log.infoFormat("Tag {0} added to tree", tag.gettag_name()); List<Tags> children = getTagDAO().getAllForParent(tag.gettag_id()); for (Tags child : children) { AddChildren(child); log.infoFormat("Tag {0} added as child to parent {1}", child.gettag_name(), tag.gettag_name()); tag.getChildren().add(child); AddTagToHash(tag); AddTagToHash(child); } } protected TagDAO getTagDAO() { return DbFacade.getInstance().getTagDao(); } private void RemoveTagAndChildren(Tags tag) { for (Tags child : tag.getChildren()) { RemoveTagAndChildren(child); } RemoveTagFromHash(tag); } public static TagsDirector getInstance() { return instance; } public void AddTag(Tags tag) { if (tagsMapByID.containsKey(tag.getparent_id())) { Tags parent = tagsMapByID.get(tag.getparent_id()); parent.getChildren().add(tag); AddTagToHash(tag); AddTagToHash(parent); } else { log.errorFormat("Trying to add tag {0}, parent doesn't exist in Data Structure - {1}", tag.gettag_name(), tag.getparent_id()); } } /** * Remove tag operation. For tag with children all tag's children will be removed as well * * @param tagId * tag to remove */ public void RemoveTag(Guid tagId) { if (tagsMapByID.containsKey(tagId)) { Tags tag = tagsMapByID.get(tagId); RemoveTagAndChildren(tag); Tags parent = tagsMapByID.get(tag.getparent_id()); parent.getChildren().remove(tag); AddTagToHash(parent); } else { log.warnFormat("Trying to remove tag, not exists in Data Structure - {0}", tagId); } } /** * Update tag. We assume that the id doesn't change. * * @param tag */ public void UpdateTag(Tags tag) { if (tagsMapByID.containsKey(tag.gettag_id())) { Tags tagFromCache = tagsMapByID.get(tag.gettag_id()); String oldName = tagFromCache.gettag_name(); // check if tag name has changed. If it has - modify name dictionary // accordingly: if (!tag.gettag_name().equals(oldName)) { tagsMapByName.remove(oldName); } // Copy the children of the cached tag to keep the object hierarchy consistent. tag.setChildren(tagFromCache.getChildren()); AddTagToHash(tag); } else { log.warnFormat("Trying to update tag, not exists in Data Structure - {0}", tag.gettag_name()); } } public void MoveTag(Guid tagId, Guid newParent) { if (tagsMapByID.containsKey(tagId)) { Tags tag = tagsMapByID.get(tagId); if (tagsMapByID.containsKey(newParent)) { if (tagsMapByID.containsKey(tag.getparent_id())) { Tags parentTag = tagsMapByID.get(tag.getparent_id()); parentTag.getChildren().remove(tag); AddTagToHash(parentTag); } else { log.warnFormat("Trying to move tag from parent that doesn't exist in Data Structure - {0}", tag.getparent_id()); } Tags newParentTag = tagsMapByID.get(newParent); newParentTag.getChildren().add(tag); tag.setparent_id(newParent); AddTagToHash(newParentTag); // Parent got changed, modify it. updateTagInBackend(tag); } else { log.errorFormat("Trying to move tag, to parent not exists in Data Structure - {0}", newParent); } } else { log.errorFormat("Trying to move tag, not exists in Data Structure - {0}", tagId); } } protected void updateTagInBackend(Tags tag) { Backend.getInstance().runInternalAction(VdcActionType.UpdateTag, new TagsOperationParameters(tag)); } private String GetTagIdAndParentsIds(Tags tag) { StringBuilder builder = new StringBuilder(); builder.append(tag.gettag_id()); Guid tempTagId = new Guid(tag.getparent_id().toString()); while (!tempTagId.equals(Guid.Empty)) { builder.append(String.format(",%1$s", tempTagId)); tag = GetTagById(tempTagId); tempTagId = new Guid(tag.getparent_id().toString()); } return builder.toString(); } /** * This function will return the tag's ID and its parents IDs. * * @param tagId * the tag ID. * @return a comma separated list of IDs. */ public String GetTagIdAndParentsIds(Guid tagId) { Tags tag = GetTagById(tagId); return GetTagIdAndParentsIds(tag); } /** * This function will return the tag's ID and its children IDs. Its used to determine if a tag is assigned to an * entity. Tag is determined as assigned to an entity if the entity is assigned to the tag or to one of its * children. * * @param tagId * the ID of the 'root' tag. * @return a comma separated list of IDs. */ public String GetTagIdAndChildrenIds(Guid tagId) { Tags tag = GetTagById(tagId); if (tag == null) { return StringUtils.EMPTY; } StringBuilder sb = tag.getTagIdAndChildrenIds(); return sb.toString(); } public String GetTagNameAndChildrenNames(Guid tagId) { Tags tag = GetTagById(tagId); StringBuilder sb = tag.getTagNameAndChildrenNames(); return sb.toString(); } public HashSet<Guid> GetTagIdAndChildrenIdsAsSet(Guid tagId) { Tags tag = GetTagById(tagId); HashSet<Guid> set = new HashSet<>(); tag.getTagIdAndChildrenIdsAsList(set); return set; } /** * This function will return the tag's ID and its children IDs. Its used to determine if a tag is assigned to an * entity. Tag is determined as assigned to an entity if the entity is assigned to the tag or to one of its * children. * * @param tagName * the name of the 'root' tag. * @return a comma separated list of IDs. */ public String GetTagIdAndChildrenIds(String tagName) { Tags tag = GetTagByName(tagName); StringBuilder sb = tag.getTagIdAndChildrenIds(); return sb.toString(); } public String GetTagNamesAndChildrenNamesByRegExp(String tagNameRegExp) { // add RegEx chars or beginning of string ('^') and end of string ('$'): tagNameRegExp = String.format("^%1$s$", tagNameRegExp); // convert to the regular expression format: tagNameRegExp = tagNameRegExp.replace("*", ".*"); StringBuilder sb = new StringBuilder(); RecursiveGetTagsAndChildrenByRegExp(tagNameRegExp, sb, GetRootTag(), TagReturnValueIndicator.NAME); return sb.toString(); } private static void RecursiveGetTagsAndChildrenByRegExp(String tagNameRegExp, StringBuilder sb, Tags tag, TagReturnValueIndicator indicator) { if ((tag.getChildren() != null) && !tag.getChildren().isEmpty()) { tagNameRegExp = BACKSLASH_REMOVER.matcher(tagNameRegExp).replaceAll(""); for (Tags child : tag.getChildren()) { if (Regex.IsMatch(child.gettag_name(), tagNameRegExp)) { // the tag matches the regular expression -> add it and all its // children // (we prevent searching a regular expression match on them - // unnecessary). if (sb.length() == 0) { if (indicator == TagReturnValueIndicator.ID) sb.append(child.getTagIdAndChildrenIds()); else sb.append(child.getTagNameAndChildrenNames()); } else { if (indicator == TagReturnValueIndicator.ID) sb.append(String.format(",%1$s", child.getTagIdAndChildrenIds())); else sb.append(String.format(",%1$s", child.getTagNameAndChildrenNames())); } } else { RecursiveGetTagsAndChildrenByRegExp(tagNameRegExp, sb, child, indicator); } } } } /** * Get tag from in memory data structure (by ID). This tag will be with all children tree initialized as opposite to * tag from db. * * @param tagId * @return */ public Tags GetTagById(Guid tagId) { if (tagsMapByID.containsKey(tagId)) { return tagsMapByID.get(tagId); } else { return null; } } /** * Get tag from in memory data structure (by name). * * @param tagName * @return */ public Tags GetTagByName(String tagName) { if (tagsMapByName.containsKey(tagName)) { return tagsMapByName.get(tagName); } else { return null; } } /** * Gets a list of all the tags in the system. * * @return a tags list. */ public ArrayList<Tags> GetAllTags() { ArrayList<Tags> ret = new ArrayList<>(tagsMapByID.values()); // remove the root - it is not a real tag: ret.remove(GetRootTag()); return ret; } /** * Returns the root tag in the system. * * @return the root tag. */ public Tags GetRootTag() { return tagsMapByID.get(ROOT_TAG_ID); } public boolean IsTagDescestorOfTag(Guid sourceTagId, Guid potentialDescestorId) { if (sourceTagId.equals(potentialDescestorId)) { return true; } Tags tag = GetTagById(sourceTagId); if (tag != null && tag.getChildren() != null) { for (Tags childTag : tag.getChildren()) { if (IsTagDescestorOfTag(childTag.gettag_id(), potentialDescestorId)) { return true; } } } return false; } }
core: TagsDirector control flow braces Added curly braces to control structures, as per proper Java conventions. Change-Id: I3d627091c2c8ef53f762f111f7c7efbdfbb300bb Signed-off-by: Allon Mureinik <[email protected]>
backend/manager/modules/bll/src/main/java/org/ovirt/engine/core/bll/TagsDirector.java
core: TagsDirector control flow braces
<ide><path>ackend/manager/modules/bll/src/main/java/org/ovirt/engine/core/bll/TagsDirector.java <ide> // (we prevent searching a regular expression match on them - <ide> // unnecessary). <ide> if (sb.length() == 0) { <del> if (indicator == TagReturnValueIndicator.ID) <add> if (indicator == TagReturnValueIndicator.ID) { <ide> sb.append(child.getTagIdAndChildrenIds()); <del> else <add> } else { <ide> sb.append(child.getTagNameAndChildrenNames()); <add> } <ide> } else { <del> if (indicator == TagReturnValueIndicator.ID) <add> if (indicator == TagReturnValueIndicator.ID) { <ide> sb.append(String.format(",%1$s", child.getTagIdAndChildrenIds())); <del> else <add> } else { <ide> sb.append(String.format(",%1$s", child.getTagNameAndChildrenNames())); <add> } <ide> } <ide> } else { <ide> RecursiveGetTagsAndChildrenByRegExp(tagNameRegExp, sb, child, indicator);
Java
apache-2.0
91a40c1924181f810f161fac206c69258a435b54
0
nhl/link-move
package com.nhl.link.move.df; import org.junit.Before; import org.junit.Test; import java.util.ArrayList; import java.util.Collections; import java.util.List; import static java.util.Arrays.asList; import static org.junit.Assert.*; public class LazyDataFrameTest { private Index columns; private List<DataRow> rows; @Before public void initDataFrameParts() { this.columns = new Index("a", "b"); this.rows = asList( new ArrayDataRow(columns, "one", 1), new ArrayDataRow(columns, "two", 2), new ArrayDataRow(columns, "three", 3), new ArrayDataRow(columns, "four", 4)); } @Test public void testForEach() { List<DataRow> consumed = new ArrayList<>(); new LazyDataFrame(columns, rows).forEach(consumed::add); assertEquals(4, consumed.size()); assertEquals(rows, consumed); } @Test public void testHead() { List<DataRow> consumed = new ArrayList<>(); new LazyDataFrame(columns, rows).head(3).forEach(consumed::add); assertEquals(3, consumed.size()); assertEquals(rows.subList(0, 3), consumed); } @Test public void testRenameColumn() { DataFrame df = new LazyDataFrame(columns, rows).renameColumn("b", "c"); assertEquals(2, df.getColumns().size()); assertNotSame(columns, df.getColumns()); assertEquals("a", df.getColumns().getColumns()[0]); assertEquals("c", df.getColumns().getColumns()[1]); List<DataRow> consumed = new ArrayList<>(); df.forEach(consumed::add); assertEquals(4, consumed.size()); assertNotEquals(rows, consumed); assertEquals("one", consumed.get(0).get("a")); assertEquals(Integer.valueOf(1), consumed.get(0).get("c")); } @Test public void testMapColumn() { DataFrame df = new LazyDataFrame(columns, rows).mapColumn("b", x -> x != null ? x.toString() : null); assertEquals(2, df.getColumns().size()); assertSame(columns, df.getColumns()); List<DataRow> consumed = new ArrayList<>(); df.forEach(consumed::add); assertEquals(4, consumed.size()); assertNotEquals(rows, consumed); assertEquals("one", consumed.get(0).get("a")); assertEquals("1", consumed.get(0).get("b")); assertEquals("2", consumed.get(1).get("b")); assertEquals("3", consumed.get(2).get("b")); assertEquals("4", consumed.get(3).get("b")); } @Test public void testMap() { DataFrame df = new LazyDataFrame(columns, rows) .map(r -> r .mapColumn(0, (String v) -> v + "_") .mapColumn(1, (Integer i) -> i != null ? i * 10 : null)); assertEquals(2, df.getColumns().size()); assertSame(columns, df.getColumns()); List<DataRow> consumed = new ArrayList<>(); df.forEach(consumed::add); assertEquals(4, consumed.size()); assertNotEquals(rows, consumed); assertEquals("one_", consumed.get(0).get("a")); assertEquals(10, consumed.get(0).get("b")); assertEquals("two_", consumed.get(1).get("a")); assertEquals(20, consumed.get(1).get("b")); assertEquals("three_", consumed.get(2).get("a")); assertEquals(30, consumed.get(2).get("b")); assertEquals("four_", consumed.get(3).get("a")); assertEquals(40, consumed.get(3).get("b")); } @Test public void testMap_ChangeRowStructure() { Index i1 = new Index("c", "d", "e"); DataFrame df = new LazyDataFrame(columns, rows) .map(r -> new ArrayDataRow( i1, r.get(0), r.get(1) != null ? ((int) r.get(1)) * 10 : 0, r.get(1))); assertSame(i1, df.getColumns()); List<DataRow> consumed = new ArrayList<>(); df.forEach(consumed::add); assertEquals(4, consumed.size()); assertNotEquals(rows, consumed); assertEquals("one", consumed.get(0).get("c")); assertEquals(10, consumed.get(0).get("d")); assertEquals(1, consumed.get(0).get("e")); assertEquals("two", consumed.get(1).get("c")); assertEquals(20, consumed.get(1).get("d")); assertEquals(2, consumed.get(1).get("e")); assertEquals("three", consumed.get(2).get("c")); assertEquals(30, consumed.get(2).get("d")); assertEquals(3, consumed.get(2).get("e")); assertEquals("four", consumed.get(3).get("c")); assertEquals(40, consumed.get(3).get("d")); assertEquals(4, consumed.get(3).get("e")); } @Test public void testMap_ChangeRowStructure_Chained() { Index i1 = new Index("c", "d", "e"); Index i2 = new Index("f", "g"); DataFrame df = new LazyDataFrame(columns, rows) .map(r -> new ArrayDataRow( i1, r.get(0), r.get(1) != null ? ((int) r.get(1)) * 10 : 0, r.get(1))) .map(r -> new ArrayDataRow( i2, r.get(0), r.get(1))); assertSame(i2, df.getColumns()); List<DataRow> consumed = new ArrayList<>(); df.forEach(consumed::add); assertEquals(4, consumed.size()); assertNotEquals(rows, consumed); assertEquals("one", consumed.get(0).get("f")); assertEquals(10, consumed.get(0).get("g")); assertEquals("two", consumed.get(1).get("f")); assertEquals(20, consumed.get(1).get("g")); assertEquals("three", consumed.get(2).get("f")); assertEquals(30, consumed.get(2).get("g")); assertEquals("four", consumed.get(3).get("f")); assertEquals(40, consumed.get(3).get("g")); } @Test public void testMap_ChangeRowStructure_EmptyDF() { Index altColumns = new Index("c", "d", "e"); DataFrame df = new LazyDataFrame(columns, Collections.emptyList()) .map(r -> new ArrayDataRow( altColumns, r.get(0), r.get(1) != null ? ((int) r.get(1)) * 10 : 0, r.get(1))); assertEquals(3, df.getColumns().size()); assertSame(altColumns, df.getColumns()); List<DataRow> consumed = new ArrayList<>(); df.forEach(consumed::add); assertEquals(0, consumed.size()); } @Test public void testToString() { DataFrame df = new LazyDataFrame(columns, rows); assertEquals("LazyDataFrame [{a:one,b:1},{a:two,b:2},{a:three,b:3},...]", df.toString()); } }
link-move-dataframe/src/test/java/com/nhl/link/move/df/LazyDataFrameTest.java
package com.nhl.link.move.df; import org.junit.Before; import org.junit.Test; import java.util.ArrayList; import java.util.Collections; import java.util.List; import static java.util.Arrays.asList; import static org.junit.Assert.*; public class LazyDataFrameTest { private Index columns; private List<DataRow> rows; @Before public void initDataFrameParts() { this.columns = new Index("a", "b"); this.rows = asList( new ArrayDataRow(columns, "one", 1), new ArrayDataRow(columns, "two", 2), new ArrayDataRow(columns, "three", 3), new ArrayDataRow(columns, "four", 4)); } @Test public void testForEach() { List<DataRow> consumed = new ArrayList<>(); new LazyDataFrame(columns, rows).forEach(consumed::add); assertEquals(4, consumed.size()); assertEquals(rows, consumed); } @Test public void testHead() { List<DataRow> consumed = new ArrayList<>(); new LazyDataFrame(columns, rows).head(3).forEach(consumed::add); assertEquals(3, consumed.size()); assertEquals(rows.subList(0, 3), consumed); } @Test public void testRenameColumn() { DataFrame df = new LazyDataFrame(columns, rows).renameColumn("b", "c"); assertEquals(2, df.getColumns().size()); assertNotSame(columns, df.getColumns()); assertEquals("a", df.getColumns().getColumns()[0]); assertEquals("c", df.getColumns().getColumns()[1]); List<DataRow> consumed = new ArrayList<>(); df.forEach(consumed::add); assertEquals(4, consumed.size()); assertNotEquals(rows, consumed); assertEquals("one", consumed.get(0).get("a")); assertEquals(Integer.valueOf(1), consumed.get(0).get("c")); } @Test public void testMapColumn() { DataFrame df = new LazyDataFrame(columns, rows).mapColumn("b", Object::toString); assertEquals(2, df.getColumns().size()); assertSame(columns, df.getColumns()); List<DataRow> consumed = new ArrayList<>(); df.forEach(consumed::add); assertEquals(4, consumed.size()); assertNotEquals(rows, consumed); assertEquals("one", consumed.get(0).get("a")); assertEquals("1", consumed.get(0).get("b")); assertEquals("2", consumed.get(1).get("b")); assertEquals("3", consumed.get(2).get("b")); assertEquals("4", consumed.get(3).get("b")); } @Test public void testMap() { DataFrame df = new LazyDataFrame(columns, rows) .map(r -> r .mapColumn(0, (String v) -> v + "_") .mapColumn(1, (Integer i) -> i * 10)); assertEquals(2, df.getColumns().size()); assertSame(columns, df.getColumns()); List<DataRow> consumed = new ArrayList<>(); df.forEach(consumed::add); assertEquals(4, consumed.size()); assertNotEquals(rows, consumed); assertEquals("one_", consumed.get(0).get("a")); assertEquals(10, consumed.get(0).get("b")); assertEquals("two_", consumed.get(1).get("a")); assertEquals(20, consumed.get(1).get("b")); assertEquals("three_", consumed.get(2).get("a")); assertEquals(30, consumed.get(2).get("b")); assertEquals("four_", consumed.get(3).get("a")); assertEquals(40, consumed.get(3).get("b")); } @Test public void testMap_ChangeRowStructure() { Index i1 = new Index("c", "d", "e"); DataFrame df = new LazyDataFrame(columns, rows) .map(r -> new ArrayDataRow( i1, r.get(0), r.get(1) != null ? ((int) r.get(1)) * 10 : 0, r.get(1))); assertSame(i1, df.getColumns()); List<DataRow> consumed = new ArrayList<>(); df.forEach(consumed::add); assertEquals(4, consumed.size()); assertNotEquals(rows, consumed); assertEquals("one", consumed.get(0).get("c")); assertEquals(10, consumed.get(0).get("d")); assertEquals(1, consumed.get(0).get("e")); assertEquals("two", consumed.get(1).get("c")); assertEquals(20, consumed.get(1).get("d")); assertEquals(2, consumed.get(1).get("e")); assertEquals("three", consumed.get(2).get("c")); assertEquals(30, consumed.get(2).get("d")); assertEquals(3, consumed.get(2).get("e")); assertEquals("four", consumed.get(3).get("c")); assertEquals(40, consumed.get(3).get("d")); assertEquals(4, consumed.get(3).get("e")); } @Test public void testMap_ChangeRowStructure_Chained() { Index i1 = new Index("c", "d", "e"); Index i2 = new Index("f", "g"); DataFrame df = new LazyDataFrame(columns, rows) .map(r -> new ArrayDataRow( i1, r.get(0), r.get(1) != null ? ((int) r.get(1)) * 10 : 0, r.get(1))) .map(r -> new ArrayDataRow( i2, r.get(0), r.get(1))); assertSame(i2, df.getColumns()); List<DataRow> consumed = new ArrayList<>(); df.forEach(consumed::add); assertEquals(4, consumed.size()); assertNotEquals(rows, consumed); assertEquals("one", consumed.get(0).get("f")); assertEquals(10, consumed.get(0).get("g")); assertEquals("two", consumed.get(1).get("f")); assertEquals(20, consumed.get(1).get("g")); assertEquals("three", consumed.get(2).get("f")); assertEquals(30, consumed.get(2).get("g")); assertEquals("four", consumed.get(3).get("f")); assertEquals(40, consumed.get(3).get("g")); } @Test public void testMap_ChangeRowStructure_EmptyDF() { Index altColumns = new Index("c", "d", "e"); DataFrame df = new LazyDataFrame(columns, Collections.emptyList()) .map(r -> new ArrayDataRow( altColumns, r.get(0), r.get(1) != null ? ((int) r.get(1)) * 10 : 0, r.get(1))); assertEquals(3, df.getColumns().size()); assertSame(altColumns, df.getColumns()); List<DataRow> consumed = new ArrayList<>(); df.forEach(consumed::add); assertEquals(0, consumed.size()); } @Test public void testToString() { DataFrame df = new LazyDataFrame(columns, rows); assertEquals("LazyDataFrame [{a:one,b:1},{a:two,b:2},{a:three,b:3},...]", df.toString()); } }
DataFrame based micro-batches #161 * null checking in test lambdas
link-move-dataframe/src/test/java/com/nhl/link/move/df/LazyDataFrameTest.java
DataFrame based micro-batches #161
<ide><path>ink-move-dataframe/src/test/java/com/nhl/link/move/df/LazyDataFrameTest.java <ide> @Test <ide> public void testMapColumn() { <ide> <del> DataFrame df = new LazyDataFrame(columns, rows).mapColumn("b", Object::toString); <add> DataFrame df = new LazyDataFrame(columns, rows).mapColumn("b", x -> x != null ? x.toString() : null); <ide> <ide> assertEquals(2, df.getColumns().size()); <ide> assertSame(columns, df.getColumns()); <ide> DataFrame df = new LazyDataFrame(columns, rows) <ide> .map(r -> r <ide> .mapColumn(0, (String v) -> v + "_") <del> .mapColumn(1, (Integer i) -> i * 10)); <add> .mapColumn(1, (Integer i) -> i != null ? i * 10 : null)); <ide> <ide> assertEquals(2, df.getColumns().size()); <ide> assertSame(columns, df.getColumns());
Java
apache-2.0
0362419d1ad6d2797f5255868134ea3eb9119566
0
ncjones/editorconfig-eclipse,johanhammar/editorconfig-eclipse,paulvi/editorconfig-eclipse,paulvi/editorconfig-eclipse,ncjones/editorconfig-eclipse,johanhammar/editorconfig-eclipse,paulvi/editorconfig-eclipse,johanhammar/editorconfig-eclipse,ncjones/editorconfig-eclipse
/* * Copyright 2015 Nathan Jones * * This file is part of "EditorConfig Eclipse". * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ncjones.editorconfig.eclipse; import org.eclipse.core.resources.IFile; import org.eclipse.core.runtime.preferences.IEclipsePreferences; import org.eclipse.core.runtime.preferences.InstanceScope; import com.ncjones.editorconfig.core.ConfigProperty; import com.ncjones.editorconfig.core.ConfigPropertyVisitor; import com.ncjones.editorconfig.core.EditorConfigService; import com.ncjones.editorconfig.core.EditorFileConfig; import com.ncjones.editorconfig.core.EndOfLineOption; import com.ncjones.editorconfig.core.IndentStyleOption; public class EditorConfigEditorActivationHandler implements EditorActivationHandler, ConfigPropertyVisitor { private final EditorConfigService editorConfigService; public EditorConfigEditorActivationHandler(final EditorConfigService editorConfigService) { this.editorConfigService = editorConfigService; } @Override public void editorActivated(final IFile editorFile) { final EditorFileConfig fileEditorConfig = getEditorFileConfig(editorFile); System.out.println("Editor activated: " + fileEditorConfig); for (final ConfigProperty<?> configProperty : fileEditorConfig.getConfigProperties()) { configProperty.accept(this); } } private EditorFileConfig getEditorFileConfig(final IFile file) { final String path = file.getLocation().toOSString(); return editorConfigService.getEditorConfig(path); } private void setPreference(final String prefsNodeName, final String key, final String value) { System.out.println(String.format("Setting preference: %s/%s=%s", prefsNodeName, key, value)); final IEclipsePreferences prefs = InstanceScope.INSTANCE.getNode(prefsNodeName); prefs.put(key, value); } @Override public void visitIndentStyle(final ConfigProperty<IndentStyleOption> property) { final Boolean spacesForTabs = property.getValue().equals(IndentStyleOption.SPACE); setPreference("org.eclipse.ui.editors", "spacesForTabs", spacesForTabs.toString()); setPreference("org.eclipse.wst.xml.core", "indentationChar", spacesForTabs ? "space" : "tab"); setPreference("org.eclipse.ant.ui", "formatter_tab_char", Boolean.toString(!spacesForTabs)); } @Override public void visitIndentSize(final ConfigProperty<Integer> property) { final String indentSizeString = property.getValue().toString(); setPreference("org.eclipse.ui.editors", "tabWidth", indentSizeString); setPreference("org.eclipse.wst.xml.core", "indentationSize", indentSizeString); setPreference("org.eclipse.ant.ui", "formatter_tab_size", indentSizeString); } @Override public void visitTabWidth(final ConfigProperty<Integer> property) { setPreference("org.eclipse.jdt.core", "org.eclipse.jdt.core.formatter.tabulation.size", property.getValue().toString()); } @Override public void visitEndOfLine(final ConfigProperty<EndOfLineOption> property) { setPreference("org.eclipse.core.runtime", "line.separator", property.getValue().getEndOfLineString()); } @Override public void visitCharset(final ConfigProperty<String> property) { setPreference("org.eclipse.core.resources", "encoding", property.getValue().toUpperCase()); } @Override public void visitTrimTrailingWhitespace(final ConfigProperty<Boolean> property) { setPreference("org.eclipse.jdt.ui", "sp_cleanup.remove_trailing_whitespaces", property.getValue().toString()); } @Override public void visitInsertFinalNewLine(final ConfigProperty<Boolean> property) { setPreference("org.eclipse.jdt.core", "org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing", property.getValue().toString()); } }
editorconfig-eclipse-plugin/src/main/java/com/ncjones/editorconfig/eclipse/EditorConfigEditorActivationHandler.java
/* * Copyright 2015 Nathan Jones * * This file is part of "EditorConfig Eclipse". * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ncjones.editorconfig.eclipse; import org.eclipse.core.resources.IFile; import org.eclipse.core.runtime.preferences.IEclipsePreferences; import org.eclipse.core.runtime.preferences.InstanceScope; import com.ncjones.editorconfig.core.ConfigProperty; import com.ncjones.editorconfig.core.ConfigPropertyVisitor; import com.ncjones.editorconfig.core.EditorConfigService; import com.ncjones.editorconfig.core.EditorFileConfig; import com.ncjones.editorconfig.core.EndOfLineOption; import com.ncjones.editorconfig.core.IndentStyleOption; public class EditorConfigEditorActivationHandler implements EditorActivationHandler, ConfigPropertyVisitor { private final EditorConfigService editorConfigService; public EditorConfigEditorActivationHandler(final EditorConfigService editorConfigService) { this.editorConfigService = editorConfigService; } @Override public void editorActivated(final IFile editorFile) { final EditorFileConfig fileEditorConfig = getEditorFileConfig(editorFile); System.out.println("Editor activated: " + fileEditorConfig); for (final ConfigProperty<?> configProperty : fileEditorConfig.getConfigProperties()) { configProperty.accept(this); } } private EditorFileConfig getEditorFileConfig(final IFile file) { final String path = file.getLocation().toOSString(); return editorConfigService.getEditorConfig(path); } private void setPreference(final String prefsNodeName, final String key, final String value) { System.out.println(String.format("Setting preference: %s/%s=%s", prefsNodeName, key, value)); final IEclipsePreferences prefs = InstanceScope.INSTANCE.getNode(prefsNodeName); prefs.put(key, value); } @Override public void visitIndentStyle(final ConfigProperty<IndentStyleOption> property) { final Boolean spacesForTabs = property.getValue().equals(IndentStyleOption.SPACE); setPreference("org.eclipse.ui.editors", "spacesForTabs", spacesForTabs.toString()); } @Override public void visitIndentSize(final ConfigProperty<Integer> property) { setPreference("org.eclipse.ui.editors", "tabWidth", property.getValue().toString()); } @Override public void visitTabWidth(final ConfigProperty<Integer> property) { setPreference("org.eclipse.jdt.core", "org.eclipse.jdt.core.formatter.tabulation.size", property.getValue().toString()); } @Override public void visitEndOfLine(final ConfigProperty<EndOfLineOption> property) { setPreference("org.eclipse.core.runtime", "line.separator", property.getValue().getEndOfLineString()); } @Override public void visitCharset(final ConfigProperty<String> property) { setPreference("org.eclipse.core.resources", "encoding", property.getValue().toUpperCase()); } @Override public void visitTrimTrailingWhitespace(final ConfigProperty<Boolean> property) { setPreference("org.eclipse.jdt.ui", "sp_cleanup.remove_trailing_whitespaces", property.getValue().toString()); } @Override public void visitInsertFinalNewLine(final ConfigProperty<Boolean> property) { setPreference("org.eclipse.jdt.core", "org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing", property.getValue().toString()); } }
set tab prefs for xml editors
editorconfig-eclipse-plugin/src/main/java/com/ncjones/editorconfig/eclipse/EditorConfigEditorActivationHandler.java
set tab prefs for xml editors
<ide><path>ditorconfig-eclipse-plugin/src/main/java/com/ncjones/editorconfig/eclipse/EditorConfigEditorActivationHandler.java <ide> public void visitIndentStyle(final ConfigProperty<IndentStyleOption> property) { <ide> final Boolean spacesForTabs = property.getValue().equals(IndentStyleOption.SPACE); <ide> setPreference("org.eclipse.ui.editors", "spacesForTabs", spacesForTabs.toString()); <add> setPreference("org.eclipse.wst.xml.core", "indentationChar", spacesForTabs ? "space" : "tab"); <add> setPreference("org.eclipse.ant.ui", "formatter_tab_char", Boolean.toString(!spacesForTabs)); <add> <ide> } <ide> <ide> @Override <ide> public void visitIndentSize(final ConfigProperty<Integer> property) { <del> setPreference("org.eclipse.ui.editors", "tabWidth", property.getValue().toString()); <add> final String indentSizeString = property.getValue().toString(); <add> setPreference("org.eclipse.ui.editors", "tabWidth", indentSizeString); <add> setPreference("org.eclipse.wst.xml.core", "indentationSize", indentSizeString); <add> setPreference("org.eclipse.ant.ui", "formatter_tab_size", indentSizeString); <ide> } <ide> <ide> @Override
Java
apache-2.0
f2465b5f6f46118859a81f678877f4792efe2df3
0
hellojavaer/ddr,hellojavaer/ddal
/* * Copyright 2017-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hellojavaer.ddal.datasource; import org.hellojavaer.ddal.core.utils.HttpUtils; import org.hellojavaer.ddal.ddr.datasource.jdbc.DDRDataSource; import org.hellojavaer.ddal.ddr.shard.ShardRouter; import org.hellojavaer.ddal.sequence.Sequence; import org.springframework.beans.factory.NoSuchBeanDefinitionException; import org.springframework.beans.factory.NoUniqueBeanDefinitionException; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.ApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; import org.springframework.context.support.FileSystemXmlApplicationContext; import org.springframework.context.support.GenericXmlApplicationContext; import org.springframework.core.io.ByteArrayResource; import org.springframework.core.io.Resource; import javax.sql.DataSource; import java.io.PrintWriter; import java.sql.Connection; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.util.LinkedHashMap; import java.util.Map; /** * * @author <a href="mailto:[email protected]">Kaiming Zou</a>,created on 22/08/2017. */ public class DefaultDDALDataSource implements DDALDataSource { private static final String JDBC_DDAL_PROTOCOL_PREFIX = "jdbc:ddal:"; private static final String THICK_PROTOCOL_PREFIX = "thick:"; private static final String THIN_PROTOCOL_PREFIX = "thin:"; @Qualifier private DataSource dataSource; private Sequence sequence; private ShardRouter shardRouter; public DefaultDDALDataSource(String url) { this(url, null, null); } /** * * jdbc:ddal:thick: * * jdbc:ddal:thin: * */ public DefaultDDALDataSource(String url, String username, String password) { if (url != null) { url = url.trim(); } if (url == null || url.length() == 0) { throw new IllegalArgumentException("url can't be null"); } if (!url.startsWith(JDBC_DDAL_PROTOCOL_PREFIX)) { throw new IllegalArgumentException("url must be start with '" + JDBC_DDAL_PROTOCOL_PREFIX + "'"); } String url1 = url.substring(JDBC_DDAL_PROTOCOL_PREFIX.length()).trim(); ApplicationContext context; if (url1.startsWith(THICK_PROTOCOL_PREFIX)) { String url2 = url1.substring(THICK_PROTOCOL_PREFIX.length()); if (url2.startsWith("classpath:") || url2.startsWith("classpath*:")) { context = new ClassPathXmlApplicationContext(url2); } else if (url2.startsWith("file:")) { context = new FileSystemXmlApplicationContext(url2); } else if (url2.startsWith("//")) { url2 = "http:" + url2; Map<String, Object> param = new LinkedHashMap<>(); param.put("username", username); param.put("password", password); String content = HttpUtils.sendPost(url2, param); Resource resource = new ByteArrayResource(content.getBytes()); GenericXmlApplicationContext genericXmlApplicationContext = new GenericXmlApplicationContext(); genericXmlApplicationContext.load(resource); genericXmlApplicationContext.refresh(); context = genericXmlApplicationContext; } else { throw new IllegalArgumentException("Unsupported protocol " + url); } } else if (url1.startsWith(THIN_PROTOCOL_PREFIX)) { // TODOD throw new IllegalArgumentException("Unsupported protocol jdbc:ddal:thin:"); } else { throw new IllegalArgumentException("Unsupported protocol " + url); } this.dataSource = getBean(context, DDRDataSource.class, "ddrDataSource"); this.sequence = getBean(context, Sequence.class, "sequence"); this.shardRouter = getBean(context, ShardRouter.class, "shardRouter"); } private <T> T getBean(ApplicationContext context, Class<T> requiredType, String beanName) { Map<String, T> map = context.getBeansOfType(requiredType); if (map == null || map.isEmpty()) { throw new NoSuchBeanDefinitionException(requiredType); } if (map.size() == 1) { return map.values().iterator().next(); } else { if (beanName != null) { T t = map.get(beanName); if (t != null) { return t; } } throw new NoUniqueBeanDefinitionException(requiredType, map.keySet()); } } protected DataSource getDataSource() { return dataSource; } @Override public Connection getConnection() throws SQLException { return getDataSource().getConnection(); } @Override public Connection getConnection(String username, String password) throws SQLException { return getDataSource().getConnection(username, password); } @Override public <T> T unwrap(Class<T> iface) throws SQLException { return getDataSource().unwrap(iface); } @Override public boolean isWrapperFor(Class<?> iface) throws SQLException { return getDataSource().isWrapperFor(iface); } @Override public PrintWriter getLogWriter() throws SQLException { return getDataSource().getLogWriter(); } @Override public void setLogWriter(PrintWriter out) throws SQLException { getDataSource().setLogWriter(out); } @Override public void setLoginTimeout(int seconds) throws SQLException { getDataSource().setLoginTimeout(seconds); } @Override public int getLoginTimeout() throws SQLException { return getDataSource().getLoginTimeout(); } @Override public java.util.logging.Logger getParentLogger() throws SQLFeatureNotSupportedException { return getDataSource().getParentLogger(); } @Override public Sequence getSequence() { return sequence; } public ShardRouter getShardRouter() { return shardRouter; } }
ddal-datasource/src/main/java/org/hellojavaer/ddal/datasource/DefaultDDALDataSource.java
/* * Copyright 2017-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hellojavaer.ddal.datasource; import org.hellojavaer.ddal.core.utils.HttpUtils; import org.hellojavaer.ddal.ddr.datasource.jdbc.DDRDataSource; import org.hellojavaer.ddal.ddr.shard.ShardRouter; import org.hellojavaer.ddal.sequence.Sequence; import org.springframework.beans.factory.NoSuchBeanDefinitionException; import org.springframework.beans.factory.NoUniqueBeanDefinitionException; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.ApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; import org.springframework.context.support.FileSystemXmlApplicationContext; import org.springframework.context.support.GenericXmlApplicationContext; import org.springframework.core.io.ByteArrayResource; import org.springframework.core.io.Resource; import javax.sql.DataSource; import java.io.PrintWriter; import java.sql.Connection; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.util.LinkedHashMap; import java.util.Map; /** * * @author <a href="mailto:[email protected]">Kaiming Zou</a>,created on 22/08/2017. */ public class DefaultDDALDataSource implements DDALDataSource { private static final String JDBC_DDAL_PROTOCOL_PREFIX = "jdbc:ddal:"; private static final String THICK_PROTOCOL_PREFIX = "thick:"; private static final String THIN_PROTOCOL_PREFIX = "thin:"; @Qualifier private DataSource dataSource; private Sequence sequence; private ShardRouter shardRouter; public DefaultDDALDataSource(String url) { this(url, null, null); } /** * * jdbc:ddal:thick: * * jdbc:ddal:thin: * */ public DefaultDDALDataSource(String url, String username, String password) { if (url != null) { url = url.trim(); } if (url == null || url.length() == 0) { throw new IllegalArgumentException("url can't be null"); } if (!url.startsWith(JDBC_DDAL_PROTOCOL_PREFIX)) { throw new IllegalArgumentException("url must be start with '" + JDBC_DDAL_PROTOCOL_PREFIX + "'"); } String url1 = url.substring(JDBC_DDAL_PROTOCOL_PREFIX.length()).trim(); ApplicationContext context; if (url1.startsWith(THICK_PROTOCOL_PREFIX)) { String url2 = url1.substring(THICK_PROTOCOL_PREFIX.length()); if (url2.startsWith("classpath:") || url2.startsWith("classpath*:")) { context = new ClassPathXmlApplicationContext(url2); } else if (url2.startsWith("file:")) { context = new FileSystemXmlApplicationContext(url2); } else if (url2.startsWith("//")) { url2 = "http:" + url2; Map<String, Object> param = new LinkedHashMap<>(); param.put("username", username); param.put("password", password); String content = HttpUtils.sendPost(url2, param); Resource resource = new ByteArrayResource(content.getBytes()); GenericXmlApplicationContext genericXmlApplicationContext = new GenericXmlApplicationContext(); genericXmlApplicationContext.load(resource); genericXmlApplicationContext.refresh(); context = genericXmlApplicationContext; } else { throw new IllegalArgumentException("Unsupported protocol:" + url); } } else if (url1.startsWith(THIN_PROTOCOL_PREFIX)) { // TODOD throw new IllegalArgumentException("Unsupport 'jdbc:ddal:thin:' protocol now"); } else { throw new IllegalArgumentException("Unsupported protocol:" + url); } this.dataSource = getBean(context, DDRDataSource.class, "ddrDataSource"); this.sequence = getBean(context, Sequence.class, "sequence"); this.shardRouter = getBean(context, ShardRouter.class, "shardRouter"); } private <T> T getBean(ApplicationContext context, Class<T> requiredType, String beanName) { Map<String, T> map = context.getBeansOfType(requiredType); if (map == null || map.isEmpty()) { throw new NoSuchBeanDefinitionException(requiredType); } if (map.size() == 1) { return map.values().iterator().next(); } else { if (beanName != null) { T t = map.get(beanName); if (t != null) { return t; } } throw new NoUniqueBeanDefinitionException(requiredType, map.keySet()); } } protected DataSource getDataSource() { return dataSource; } @Override public Connection getConnection() throws SQLException { return getDataSource().getConnection(); } @Override public Connection getConnection(String username, String password) throws SQLException { return getDataSource().getConnection(username, password); } @Override public <T> T unwrap(Class<T> iface) throws SQLException { return getDataSource().unwrap(iface); } @Override public boolean isWrapperFor(Class<?> iface) throws SQLException { return getDataSource().isWrapperFor(iface); } @Override public PrintWriter getLogWriter() throws SQLException { return getDataSource().getLogWriter(); } @Override public void setLogWriter(PrintWriter out) throws SQLException { getDataSource().setLogWriter(out); } @Override public void setLoginTimeout(int seconds) throws SQLException { getDataSource().setLoginTimeout(seconds); } @Override public int getLoginTimeout() throws SQLException { return getDataSource().getLoginTimeout(); } @Override public java.util.logging.Logger getParentLogger() throws SQLFeatureNotSupportedException { return getDataSource().getParentLogger(); } @Override public Sequence getSequence() { return sequence; } public ShardRouter getShardRouter() { return shardRouter; } }
update
ddal-datasource/src/main/java/org/hellojavaer/ddal/datasource/DefaultDDALDataSource.java
update
<ide><path>dal-datasource/src/main/java/org/hellojavaer/ddal/datasource/DefaultDDALDataSource.java <ide> genericXmlApplicationContext.refresh(); <ide> context = genericXmlApplicationContext; <ide> } else { <del> throw new IllegalArgumentException("Unsupported protocol:" + url); <add> throw new IllegalArgumentException("Unsupported protocol " + url); <ide> } <ide> } else if (url1.startsWith(THIN_PROTOCOL_PREFIX)) { <ide> // TODOD <del> throw new IllegalArgumentException("Unsupport 'jdbc:ddal:thin:' protocol now"); <add> throw new IllegalArgumentException("Unsupported protocol jdbc:ddal:thin:"); <ide> } else { <del> throw new IllegalArgumentException("Unsupported protocol:" + url); <add> throw new IllegalArgumentException("Unsupported protocol " + url); <ide> } <ide> this.dataSource = getBean(context, DDRDataSource.class, "ddrDataSource"); <ide> this.sequence = getBean(context, Sequence.class, "sequence");
Java
mit
11bd53d54b0b66aaf56a955d742cbe0e40c00398
0
brightsparklabs/asanti
/* * Created by brightSPARK Labs * www.brightsparklabs.com */ package com.brightsparklabs.asanti.reader.parser; import com.brightsparklabs.asanti.model.schema.constraint.AsnSchemaConstraint; import com.brightsparklabs.asanti.model.schema.typedefinition.*; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.text.ParseException; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Logic for parsing a Type Definition from a module within an ASN.1 schema * * @author brightSPARK Labs */ public final class AsnSchemaTypeDefinitionParser { // ------------------------------------------------------------------------- // CONSTANTS // ------------------------------------------------------------------------- /** pattern to match a SET/SEQUENCE type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_SEQUENCE = Pattern.compile( "^SEQUENCE ?\\{(.+)\\} ?(\\(.+\\))?$"); /** pattern to match a SET/SEQUENCE type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_SET = Pattern.compile( "^SET ?\\{(.+)\\} ?(\\(.+\\))?$"); /** pattern to match a ENUMERATED type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_ENUMERATED = Pattern.compile( "^ENUMERATED ?\\{(.+)\\}$"); /** pattern to match a CHOICE type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_CHOICE = Pattern.compile( "^CHOICE ?\\{(.+)\\} ?(\\(.+\\))?$"); /** pattern to match a SEQUENCE OF type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_SEQUENCE_OF = Pattern.compile( "^SEQUENCE( .+)? OF (.+)$"); /** pattern to match a SET OF type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_SET_OF = Pattern.compile( "^SET( .+)? OF (.+)$"); /** pattern to match a CLASS type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_CLASS = Pattern.compile( "^CLASS ?\\{(.+)\\}$"); /** pattern to match an OCTET STRING type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_OCTET_STRING = Pattern.compile( "^OCTET STRING ?(\\((.+)\\))?$"); /** pattern to match a BIT STRING type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_BIT_STRING = Pattern.compile( "^BIT STRING ?(\\{(.+?)\\})? ?(DEFAULT ?\\{(.+?)\\})? ?(\\((.+?)\\))?$"); /** pattern to match an IA5String type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_IA5_STRING = Pattern.compile( "^IA5String ?(\\((.+)\\))?$"); /** pattern to match a UTF8 type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_UTF8_STRING = Pattern.compile( "^UTF8String ?(\\((.+)\\))?$"); /** pattern to match a NumericString type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_NUMERIC_STRING = Pattern.compile( "^NumericString ?(\\((.+)\\))?$"); /** pattern to match a VisibleString type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_VISIBLE_STRING = Pattern.compile( "^VisibleString ?(\\((.+)\\))?$"); /** pattern to match a GeneralString type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_GENERAL_STRING = Pattern.compile( "^GeneralString ?(\\((.+)\\))?$"); /** pattern to match a GeneralizedTime type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_GENERALIZED_TIME = Pattern.compile( "^GeneralizedTime$"); /** pattern to match an Integer type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_INTEGER = Pattern.compile( "^INTEGER ?(\\{(.+?)\\})? ?(\\((.+?)\\))? ?(DEFAULT ?.+)?$"); // TODO ASN-25 remove this once ASN-25 is completed /** pattern to match a PRIMITIVE type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_PRIMITIVE = Pattern.compile( "^(BOOLEAN|DATE|CHARACTER STRING|DATE_TIME|DURATION|EMBEDDED PDV|EXTERNAL|INTEGER|OID-IRI|NULL|OBJECT IDENTIFIER|REAL|RELATIVE-OID-IRI|RELATIVE-OID|BMPString|GraphicString|ISO646String|PrintableString|TeletexString|T61String|UniversalString|VideotexString|TIME|TIME-OF-DAY|CHARACTER STRING) ?(\\{(.+)\\})? ?(\\((.+)\\))?$"); /** error message if an unknown ASN.1 built-in type is found */ private static final String ERROR_UNKNOWN_BUILT_IN_TYPE = "Parser expected a constructed built-in type (SEQUENCE, SET, ENUMERATED, SEQUENCE OF, SET OF, CHOICE, CLASS) or a primitive built-in type (BIT STRING, GeneralizedTime, Ia5String, INTEGER, NumericString, OCTET STRING, Utf8String, VisibleString, BOOLEAN, DATE, CHARACTER STRING, DATE_TIME, DURATION, EMBEDDED PDV, EXTERNAL, INTEGER, OID-IRI, NULL, OBJECT IDENTIFIER, REAL, RELATIVE-OID-IRI, RELATIVE-OID, BmpString, GeneralString, GraphicString, Iso646String, PrintableString, TeletexString, T61String, UniversalString, VideotexString, TIME, TIME-OF-DAY, CHARACTER STRING) but found: "; // ------------------------------------------------------------------------- // CLASS VARIABLES // ------------------------------------------------------------------------- /** class logger */ private static final Logger logger = LoggerFactory.getLogger(AsnSchemaTypeDefinitionParser.class .getName()); // ------------------------------------------------------------------------- // CONSTRUCTION // ------------------------------------------------------------------------- /** * Private constructor. There should be no need to ever instantiate this static class. */ private AsnSchemaTypeDefinitionParser() { throw new AssertionError(); } // ------------------------------------------------------------------------- // PUBLIC METHODS // ------------------------------------------------------------------------- /** * Parses a type definition from a module from an ASN.1 schema * * @param name * the name of the defined type (i.e. the text on the left hand side of the {@code * ::=}) * @param value * the value of the defined type (i.e. the text on the right hand side of the {@code * ::=}) * * @return an ImmutableList of {@link AsnSchemaTypeDefinition} objects representing the parsed * type definitions * * @throws ParseException * if any errors occur while parsing the type */ public static ImmutableList<AsnSchemaTypeDefinition> parse(String name, String value) throws ParseException { logger.debug("Found type definition: {} = {}", name, value); // check if defining a SEQUENCE Matcher matcher = PATTERN_TYPE_DEFINITION_SEQUENCE.matcher(value); if (matcher.matches()) { return parseSequence(name, matcher); } // check if defining a SET matcher = PATTERN_TYPE_DEFINITION_SET.matcher(value); if (matcher.matches()) { return parseSet(name, matcher); } // check if defining a CHOICE matcher = PATTERN_TYPE_DEFINITION_CHOICE.matcher(value); if (matcher.matches()) { return parseChoice(name, matcher); } // check if defining an ENUMERATED matcher = PATTERN_TYPE_DEFINITION_ENUMERATED.matcher(value); if (matcher.matches()) { return ImmutableList.<AsnSchemaTypeDefinition>of(parseEnumerated(name, matcher)); } // check if defining an OCTET STRING matcher = PATTERN_TYPE_DEFINITION_OCTET_STRING.matcher(value); if (matcher.matches()) { return ImmutableList.<AsnSchemaTypeDefinition>of(parseOctetString(name, matcher)); } // check if defining a BIT STRING matcher = PATTERN_TYPE_DEFINITION_BIT_STRING.matcher(value); if (matcher.matches()) { return ImmutableList.<AsnSchemaTypeDefinition>of(parseBitString(name, matcher)); } // check if defining an Ia5String matcher = PATTERN_TYPE_DEFINITION_IA5_STRING.matcher(value); if (matcher.matches()) { return ImmutableList.<AsnSchemaTypeDefinition>of(parseIA5String(name, matcher)); } // check if defining a Utf8String matcher = PATTERN_TYPE_DEFINITION_UTF8_STRING.matcher(value); if (matcher.matches()) { return ImmutableList.<AsnSchemaTypeDefinition>of(parseUTF8String(name, matcher)); } // check if defining a NumericString matcher = PATTERN_TYPE_DEFINITION_NUMERIC_STRING.matcher(value); if (matcher.matches()) { return ImmutableList.<AsnSchemaTypeDefinition>of(parseNumericString(name, matcher)); } // check if defining a VisibleString matcher = PATTERN_TYPE_DEFINITION_VISIBLE_STRING.matcher(value); if (matcher.matches()) { return ImmutableList.<AsnSchemaTypeDefinition>of(parseVisibleString(name, matcher)); } // check if defining a GeneralString matcher = PATTERN_TYPE_DEFINITION_GENERAL_STRING.matcher(value); if (matcher.matches()) { return ImmutableList.<AsnSchemaTypeDefinition>of(parseGeneralString(name, matcher)); } // check if defining a GeneralizedTime matcher = PATTERN_TYPE_DEFINITION_GENERALIZED_TIME.matcher(value); if (matcher.matches()) { return ImmutableList.<AsnSchemaTypeDefinition>of(parseGeneralizedTime(name)); } // check if defining a Integer matcher = PATTERN_TYPE_DEFINITION_INTEGER.matcher(value); if (matcher.matches()) { return ImmutableList.<AsnSchemaTypeDefinition>of(parseInteger(name, matcher)); } // check if defining a PRIMITIVE matcher = PATTERN_TYPE_DEFINITION_PRIMITIVE.matcher(value); if (matcher.matches()) { /* * TODO ASN-25 - handle all primitive types. Currently this is just * a catch-all to log warnings */ final String builtinType = matcher.group(1); final String error = String.format( "Cannot parse unsupported ASN.1 built-in type: %s for type: %s", builtinType, name); logger.warn(error); return ImmutableList.<AsnSchemaTypeDefinition>of(AsnSchemaTypeDefinition.NULL); } // check if defining a SEQUENCE OF matcher = PATTERN_TYPE_DEFINITION_SEQUENCE_OF.matcher(value); if (matcher.matches()) { return ImmutableList.<AsnSchemaTypeDefinition>of(parseSequenceOf(name, matcher)); } // check if defining a SET OF matcher = PATTERN_TYPE_DEFINITION_SET_OF.matcher(value); if (matcher.matches()) { return ImmutableList.<AsnSchemaTypeDefinition>of(parseSetOf(name, matcher)); } // check if defining a CLASS matcher = PATTERN_TYPE_DEFINITION_CLASS.matcher(value); if (matcher.matches()) { // TODO ASN-39 - handle CLASS logger.warn("Type Definitions for CLASS not yet supported"); return ImmutableList.<AsnSchemaTypeDefinition>of(AsnSchemaTypeDefinition.NULL); } // unknown definition final String error = ERROR_UNKNOWN_BUILT_IN_TYPE + name + " ::= " + value; throw new ParseException(error, -1); } // ------------------------------------------------------------------------- // PRIVATE METHODS // ------------------------------------------------------------------------- /** * Parses a SEQUENCE type definition * * @param name * name of the defined type * @param matcher * matcher which matched on {@link #PATTERN_TYPE_DEFINITION_SEQUENCE} * * @return an ImmutableList of {@link AsnSchemaTypeDefinition} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static ImmutableList<AsnSchemaTypeDefinition> parseSequence(String name, Matcher matcher) throws ParseException { final String componentTypesText = matcher.group(1); final String constraintText = Strings.nullToEmpty(matcher.group(2)); final ImmutableList<AsnSchemaComponentType> componentTypes = AsnSchemaComponentTypeParser.parse(name, componentTypesText); // parse any pseudo type definitions from returned component types final List<AsnSchemaTypeDefinition> parsedTypes = parsePseudoTypes(componentTypes); final AsnSchemaConstraint constraint = AsnSchemaConstraintParser.parse(constraintText); final AsnSchemaTypeDefinitionSequence typeDefinition = new AsnSchemaTypeDefinitionSequence( name, componentTypes, constraint); parsedTypes.add(typeDefinition); return ImmutableList.copyOf(parsedTypes); } /** * Parses a SET type definition * * @param name * name of the defined type * @param matcher * matcher which matched on {@link #PATTERN_TYPE_DEFINITION_SET} * * @return an ImmutableList of {@link AsnSchemaTypeDefinition} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static ImmutableList<AsnSchemaTypeDefinition> parseSet(String name, Matcher matcher) throws ParseException { final String componentTypesText = matcher.group(1); final String constraintText = Strings.nullToEmpty(matcher.group(2)); final ImmutableList<AsnSchemaComponentType> componentTypes = AsnSchemaComponentTypeParser.parse(name, componentTypesText); // parse any pseudo type definitions from returned component types final List<AsnSchemaTypeDefinition> parsedTypes = parsePseudoTypes(componentTypes); final AsnSchemaConstraint constraint = AsnSchemaConstraintParser.parse(constraintText); final AsnSchemaTypeDefinitionSet typeDefinition = new AsnSchemaTypeDefinitionSet(name, componentTypes, constraint); parsedTypes.add(typeDefinition); return ImmutableList.copyOf(parsedTypes); } /** * Parses a CHOICE type definition * * @param name * name of the defined type * @param matcher * matcher which matched on {@link #PATTERN_TYPE_DEFINITION_CHOICE} * * @return an {@link AsnSchemaTypeDefinitionChoice} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static ImmutableList<AsnSchemaTypeDefinition> parseChoice(String name, Matcher matcher) throws ParseException { final String componentTypesText = matcher.group(1); final String constraintText = Strings.nullToEmpty(matcher.group(2)); final ImmutableList<AsnSchemaComponentType> componentTypes = AsnSchemaComponentTypeParser.parse(name, componentTypesText); // parse any pseudo type definitions from returned component types final List<AsnSchemaTypeDefinition> parsedTypes = parsePseudoTypes(componentTypes); final AsnSchemaConstraint constraint = AsnSchemaConstraintParser.parse(constraintText); final AsnSchemaTypeDefinitionChoice typeDefinition = new AsnSchemaTypeDefinitionChoice(name, componentTypes, constraint); parsedTypes.add(typeDefinition); return ImmutableList.copyOf(parsedTypes); } /** * Parses an ENUMERATED type definition * * @param name * name of the defined type * @param matcher * matcher which matched on {@link #PATTERN_TYPE_DEFINITION_ENUMERATED} * * @return an {@link AsnSchemaTypeDefinitionEnumerated} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static AsnSchemaTypeDefinitionEnumerated parseEnumerated(String name, Matcher matcher) throws ParseException { final String enumeratedOptionsText = matcher.group(1); final ImmutableList<AsnSchemaNamedTag> enumeratedOptions = AsnSchemaNamedTagParser.parseEnumeratedOptions(enumeratedOptionsText); return new AsnSchemaTypeDefinitionEnumerated(name, enumeratedOptions); } /** * Parses an OCTET STRING type definition * * @param name * name of the defined type * @param matcher * matcher which matched on {@link #PATTERN_TYPE_DEFINITION_OCTET_STRING} * * @return an {@link AsnSchemaTypeDefinitionOctetString} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static AsnSchemaTypeDefinitionOctetString parseOctetString(String name, Matcher matcher) throws ParseException { final String constraintText = Strings.nullToEmpty(matcher.group(2)); return AsnSchemaTypeDefinitionPrimitiveParser.parseOctetString(name, constraintText); } /** * Parses a BIT STRING type definition * * @param name * name of the defined type * @param matcher * matcher which matched on {@link #PATTERN_TYPE_DEFINITION_BIT_STRING} * * @return an {@link AsnSchemaTypeDefinitionBitString} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static AsnSchemaTypeDefinitionBitString parseBitString(String name, Matcher matcher) throws ParseException { // TODO ASN-87 - parse list of named bits final String constraintText = Strings.nullToEmpty(matcher.group(6)); return AsnSchemaTypeDefinitionPrimitiveParser.parseBitString(name, constraintText); } /** * Parses an Ia5String type definition * * @param name * name of the defined type * @param matcher * matcher which matched on {@link #PATTERN_TYPE_DEFINITION_IA5_STRING} * * @return an {@link AsnSchemaTypeDefinitionIa5String} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static AsnSchemaTypeDefinitionIa5String parseIA5String(String name, Matcher matcher) throws ParseException { final String constraintText = Strings.nullToEmpty(matcher.group(2)); return AsnSchemaTypeDefinitionPrimitiveParser.parseIA5String(name, constraintText); } /** * Parses a Utf8String type definition * * @param name * name of the defined type * @param matcher * matcher which matched on {@link #PATTERN_TYPE_DEFINITION_UTF8_STRING} * * @return an {@link AsnSchemaTypeDefinitionUtf8String} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static AsnSchemaTypeDefinitionUtf8String parseUTF8String(String name, Matcher matcher) throws ParseException { final String constraintText = Strings.nullToEmpty(matcher.group(2)); return AsnSchemaTypeDefinitionPrimitiveParser.parseUTF8String(name, constraintText); } /** * Parses a NumericString type definition * * @param name * name of the defined type * @param matcher * matcher which matched on {@link #PATTERN_TYPE_DEFINITION_NUMERIC_STRING} * * @return an {@link AsnSchemaTypeDefinitionNumericString} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static AsnSchemaTypeDefinitionNumericString parseNumericString(String name, Matcher matcher) throws ParseException { final String constraintText = Strings.nullToEmpty(matcher.group(2)); return AsnSchemaTypeDefinitionPrimitiveParser.parseNumericString(name, constraintText); } /** * Parses a VisibleString type definition * * @param name * name of the defined type * @param matcher * matcher which matched on {@link #PATTERN_TYPE_DEFINITION_VISIBLE_STRING} * * @return an {@link AsnSchemaTypeDefinitionVisibleString} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static AsnSchemaTypeDefinitionVisibleString parseVisibleString(String name, Matcher matcher) throws ParseException { final String constraintText = Strings.nullToEmpty(matcher.group(2)); return AsnSchemaTypeDefinitionPrimitiveParser.parseVisibleString(name, constraintText); } /** * Parses a General type definition * * @param name * name of the defined type * @param matcher * matcher which matched on {@link #PATTERN_TYPE_DEFINITION_GENERAL_STRING} * * @return an {@link AsnSchemaTypeDefinitionGeneralString} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static AsnSchemaTypeDefinitionGeneralString parseGeneralString(String name, Matcher matcher) throws ParseException { final String constraintText = Strings.nullToEmpty(matcher.group(2)); return AsnSchemaTypeDefinitionPrimitiveParser.parseGeneralString(name, constraintText); } /** * Parses a GeneralizedTime type definition * * @param name * name of the defined type * * @return an {@link AsnSchemaTypeDefinitionGeneralizedTime} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static AsnSchemaTypeDefinitionGeneralizedTime parseGeneralizedTime(String name) throws ParseException { // sub-type constraints are not applicable to GeneralizedTime. return AsnSchemaTypeDefinitionPrimitiveParser.parseGeneralizedTime(name); } /** * Parses an Integer type definition * * @param name * name of the defined type * @param matcher * matcher which matched on {@link #PATTERN_TYPE_DEFINITION_INTEGER} * * @return an {@link AsnSchemaTypeDefinitionInteger} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static AsnSchemaTypeDefinitionInteger parseInteger(String name, Matcher matcher) throws ParseException { final String distinguishedValuesText = matcher.group(2); final ImmutableList<AsnSchemaNamedTag> distinguishedValues = AsnSchemaNamedTagParser.parseIntegerDistinguishedValues(distinguishedValuesText); final String constraintText = Strings.nullToEmpty(matcher.group(3)); return AsnSchemaTypeDefinitionPrimitiveParser.parseInteger(name, distinguishedValues, constraintText); } /** * Parses a SEQUENCE OF type definition * * @param name * name of the defined type * @param matcher * matcher which matched on {@link #PATTERN_TYPE_DEFINITION_SEQUENCE_OF} * * @return an {@link AsnSchemaTypeDefinitionSequenceOf} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static AsnSchemaTypeDefinitionSequenceOf parseSequenceOf(String name, Matcher matcher) throws ParseException { final String constraintText = Strings.nullToEmpty(matcher.group(1)); final String elementTypeName = matcher.group(2); final AsnSchemaConstraint constraint = AsnSchemaConstraintParser.parse(constraintText); return new AsnSchemaTypeDefinitionSequenceOf(name, elementTypeName, constraint); } /** * Parses a SET OF type definition * * @param name * name of the defined type * @param matcher * matcher which matched on {@link #PATTERN_TYPE_DEFINITION_SET_OF} * * @return an {@link AsnSchemaTypeDefinitionSetOf} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static AsnSchemaTypeDefinitionSetOf parseSetOf(String name, Matcher matcher) throws ParseException { final String constraintText = Strings.nullToEmpty(matcher.group(1)); final String elementTypeName = matcher.group(2); final AsnSchemaConstraint constraint = AsnSchemaConstraintParser.parse(constraintText); return new AsnSchemaTypeDefinitionSetOf(name, elementTypeName, constraint); } /** * Parses pseudo type definitions found in the supplied list of {@link AsnSchemaComponentType}. * * <p>Refer to {@code /docs/design.md} for details of the design. * * @param componentTypes * list of component types to parse * * @return an ImmutableList of {@link AsnSchemaTypeDefinition} representing the parsed pseudo * type definitions * * @throws ParseException * if any errors occur while parsing the type */ private static List<AsnSchemaTypeDefinition> parsePseudoTypes( Iterable<AsnSchemaComponentType> componentTypes) throws ParseException { final List<AsnSchemaTypeDefinition> parsedTypes = Lists.newArrayList(); for (final AsnSchemaComponentType component : componentTypes) { if (component instanceof AsnSchemaComponentTypeGenerated) { final AsnSchemaComponentTypeGenerated componentGenerated = (AsnSchemaComponentTypeGenerated) component; final String pseudoTypeDefinitionText = componentGenerated.getTypeDefinitionText(); final ImmutableList<AsnSchemaTypeDefinition> pseudoTypeDefinitions = parse(component .getTypeName(), pseudoTypeDefinitionText); parsedTypes.addAll(pseudoTypeDefinitions); } } return parsedTypes; } }
src/main/java/com/brightsparklabs/asanti/reader/parser/AsnSchemaTypeDefinitionParser.java
/* * Created by brightSPARK Labs * www.brightsparklabs.com */ package com.brightsparklabs.asanti.reader.parser; import com.brightsparklabs.asanti.model.schema.constraint.AsnSchemaConstraint; import com.brightsparklabs.asanti.model.schema.typedefinition.*; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.text.ParseException; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Logic for parsing a Type Definition from a module within an ASN.1 schema * * @author brightSPARK Labs */ public final class AsnSchemaTypeDefinitionParser { // ------------------------------------------------------------------------- // CONSTANTS // ------------------------------------------------------------------------- /** pattern to match a SET/SEQUENCE type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_SEQUENCE = Pattern.compile( "^SEQUENCE ?\\{(.+)\\} ?(\\(.+\\))?$"); /** pattern to match a SET/SEQUENCE type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_SET = Pattern.compile( "^SET ?\\{(.+)\\} ?(\\(.+\\))?$"); /** pattern to match a ENUMERATED type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_ENUMERATED = Pattern.compile( "^ENUMERATED ?\\{(.+)\\}$"); /** pattern to match a CHOICE type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_CHOICE = Pattern.compile( "^CHOICE ?\\{(.+)\\} ?(\\(.+\\))?$"); /** pattern to match a SEQUENCE OF type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_SEQUENCE_OF = Pattern.compile( "^SEQUENCE( .+)? OF (.+)$"); /** pattern to match a SET OF type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_SET_OF = Pattern.compile( "^SET( .+)? OF (.+)$"); /** pattern to match a CLASS type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_CLASS = Pattern.compile( "^CLASS ?\\{(.+)\\}$"); /** pattern to match an OCTET STRING type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_OCTET_STRING = Pattern.compile( "^OCTET STRING ?(\\((.+)\\))?$"); /** pattern to match a BIT STRING type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_BIT_STRING = Pattern.compile( "^BIT STRING ?(\\{(.+?)\\})? ?(DEFAULT ?\\{(.+?)\\})? ?(\\((.+?)\\))?$"); /** pattern to match an IA5String type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_IA5_STRING = Pattern.compile( "^IA5String ?(\\((.+)\\))?$"); /** pattern to match a UTF8 type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_UTF8_STRING = Pattern.compile( "^UTF8String ?(\\((.+)\\))?$"); /** pattern to match a NumericString type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_NUMERIC_STRING = Pattern.compile( "^NumericString ?(\\((.+)\\))?$"); /** pattern to match a VisibleString type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_VISIBLE_STRING = Pattern.compile( "^VisibleString ?(\\((.+)\\))?$"); /** pattern to match a GeneralString type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_GENERAL_STRING = Pattern.compile( "^GeneralString ?(\\((.+)\\))?$"); /** pattern to match a GeneralizedTime type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_GENERALIZED_TIME = Pattern.compile( "^GeneralizedTime$"); /** pattern to match an Integer type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_INTEGER = Pattern.compile( "^INTEGER ?(\\{(.+?)\\})? ?(\\((.+?)\\))? ?(DEFAULT ?.+)?$"); // TODO ASN-25 remove this once ASN-25 is completed /** pattern to match a PRIMITIVE type definition */ private static final Pattern PATTERN_TYPE_DEFINITION_PRIMITIVE = Pattern.compile( "^(BOOLEAN|DATE|CHARACTER STRING|DATE_TIME|DURATION|EMBEDDED PDV|EXTERNAL|INTEGER|OID-IRI|NULL|OBJECT IDENTIFIER|REAL|RELATIVE-OID-IRI|RELATIVE-OID|BMPString|GraphicString|ISO646String|PrintableString|TeletexString|T61String|UniversalString|VideotexString|TIME|TIME-OF-DAY|CHARACTER STRING) ?(\\{(.+)\\})? ?(\\((.+)\\))?$"); /** error message if an unknown ASN.1 built-in type is found */ private static final String ERROR_UNKNOWN_BUILT_IN_TYPE = "Parser expected a constructed built-in type (SEQUENCE, SET, ENUMERATED, SEQUENCE OF, SET OF, CHOICE, CLASS) or a primitive built-in type (BIT STRING, GeneralizedTime, Ia5String, INTEGER, NumericString, OCTET STRING, Utf8String, VisibleString, BOOLEAN, DATE, CHARACTER STRING, DATE_TIME, DURATION, EMBEDDED PDV, EXTERNAL, INTEGER, OID-IRI, NULL, OBJECT IDENTIFIER, REAL, RELATIVE-OID-IRI, RELATIVE-OID, BmpString, GeneralString, GraphicString, Iso646String, PrintableString, TeletexString, T61String, UniversalString, VideotexString, TIME, TIME-OF-DAY, CHARACTER STRING) but found: "; // ------------------------------------------------------------------------- // CLASS VARIABLES // ------------------------------------------------------------------------- /** class logger */ private static final Logger logger = LoggerFactory.getLogger(AsnSchemaTypeDefinitionParser.class .getName()); // ------------------------------------------------------------------------- // CONSTRUCTION // ------------------------------------------------------------------------- /** * Private constructor. There should be no need to ever instantiate this static class. */ private AsnSchemaTypeDefinitionParser() { throw new AssertionError(); } // ------------------------------------------------------------------------- // PUBLIC METHODS // ------------------------------------------------------------------------- /** * Parses a type definition from a module from an ASN.1 schema * * @param name * the name of the defined type (i.e. the text on the left hand side of the {@code * ::=}) * @param value * the value of the defined type (i.e. the text on the right hand side of the {@code * ::=}) * * @return an ImmutableList of {@link AsnSchemaTypeDefinition} objects representing the parsed * type definitions * * @throws ParseException * if any errors occur while parsing the type */ public static ImmutableList<AsnSchemaTypeDefinition> parse(String name, String value) throws ParseException { logger.debug("Found type definition: {} = {}", name, value); // check if defining a SEQUENCE Matcher matcher = PATTERN_TYPE_DEFINITION_SEQUENCE.matcher(value); if (matcher.matches()) { return parseSequence(name, matcher); } // check if defining a SET matcher = PATTERN_TYPE_DEFINITION_SET.matcher(value); if (matcher.matches()) { return parseSet(name, matcher); } // check if defining a CHOICE matcher = PATTERN_TYPE_DEFINITION_CHOICE.matcher(value); if (matcher.matches()) { return parseChoice(name, matcher); } // check if defining an ENUMERATED matcher = PATTERN_TYPE_DEFINITION_ENUMERATED.matcher(value); if (matcher.matches()) { return ImmutableList.<AsnSchemaTypeDefinition>of(parseEnumerated(name, matcher)); } // check if defining an OCTET STRING matcher = PATTERN_TYPE_DEFINITION_OCTET_STRING.matcher(value); if (matcher.matches()) { return ImmutableList.<AsnSchemaTypeDefinition>of(parseOctetString(name, matcher)); } // check if defining a BIT STRING matcher = PATTERN_TYPE_DEFINITION_BIT_STRING.matcher(value); if (matcher.matches()) { return ImmutableList.<AsnSchemaTypeDefinition>of(parseBitString(name, matcher)); } // check if defining an Ia5String matcher = PATTERN_TYPE_DEFINITION_IA5_STRING.matcher(value); if (matcher.matches()) { return ImmutableList.<AsnSchemaTypeDefinition>of(parseIA5String(name, matcher)); } // check if defining a Utf8String matcher = PATTERN_TYPE_DEFINITION_UTF8_STRING.matcher(value); if (matcher.matches()) { return ImmutableList.<AsnSchemaTypeDefinition>of(parseUTF8String(name, matcher)); } // check if defining a NumericString matcher = PATTERN_TYPE_DEFINITION_NUMERIC_STRING.matcher(value); if (matcher.matches()) { return ImmutableList.<AsnSchemaTypeDefinition>of(parseNumericString(name, matcher)); } // check if defining a VisibleString matcher = PATTERN_TYPE_DEFINITION_VISIBLE_STRING.matcher(value); if (matcher.matches()) { return ImmutableList.<AsnSchemaTypeDefinition>of(parseVisibleString(name, matcher)); } // check if defining a GeneralString matcher = PATTERN_TYPE_DEFINITION_GENERAL_STRING.matcher(value); if (matcher.matches()) { return ImmutableList.<AsnSchemaTypeDefinition>of(parseGeneralString(name, matcher)); } // check if defining a GeneralizedTime matcher = PATTERN_TYPE_DEFINITION_GENERALIZED_TIME.matcher(value); if (matcher.matches()) { return ImmutableList.<AsnSchemaTypeDefinition>of(parseGeneralizedTime(name)); } // check if defining a Integer matcher = PATTERN_TYPE_DEFINITION_INTEGER.matcher(value); if (matcher.matches()) { return ImmutableList.<AsnSchemaTypeDefinition>of(parseInteger(name, matcher)); } // check if defining a PRIMITIVE matcher = PATTERN_TYPE_DEFINITION_PRIMITIVE.matcher(value); if (matcher.matches()) { /* * TODO ASN-25 - handle all primitive types. Currently this is just * a catch-all to log warnings */ final String builtinType = matcher.group(1); final String error = String.format( "Cannot parse unsupported ASN.1 built-in type: %s for type: %s", builtinType, name); logger.warn(error); return ImmutableList.<AsnSchemaTypeDefinition>of(AsnSchemaTypeDefinition.NULL); } // check if defining a SEQUENCE OF matcher = PATTERN_TYPE_DEFINITION_SEQUENCE_OF.matcher(value); if (matcher.matches()) { return ImmutableList.<AsnSchemaTypeDefinition>of(parseSequenceOf(name, matcher)); } // check if defining a SET OF matcher = PATTERN_TYPE_DEFINITION_SET_OF.matcher(value); if (matcher.matches()) { return ImmutableList.<AsnSchemaTypeDefinition>of(parseSetOf(name, matcher)); } // check if defining a CLASS matcher = PATTERN_TYPE_DEFINITION_CLASS.matcher(value); if (matcher.matches()) { // TODO ASN-39 - handle CLASS logger.warn("Type Definitions for CLASS not yet supported"); return ImmutableList.<AsnSchemaTypeDefinition>of(AsnSchemaTypeDefinition.NULL); } // unknown definition final String error = ERROR_UNKNOWN_BUILT_IN_TYPE + name + " ::= " + value; throw new ParseException(error, -1); } // ------------------------------------------------------------------------- // PRIVATE METHODS // ------------------------------------------------------------------------- /** * Parses a SEQUENCE type definition * * @param name * name of the defined type * @param matcher * matcher which matched on {@link #PATTERN_TYPE_DEFINITION_SEQUENCE} * * @return an ImmutableList of {@link AsnSchemaTypeDefinition} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static ImmutableList<AsnSchemaTypeDefinition> parseSequence(String name, Matcher matcher) throws ParseException { final String componentTypesText = matcher.group(1); final String constraintText = Strings.nullToEmpty(matcher.group(2)); final ImmutableList<AsnSchemaComponentType> componentTypes = AsnSchemaComponentTypeParser.parse(name, componentTypesText); // parse any pseudo type definitions from returned component types final List<AsnSchemaTypeDefinition> parsedTypes = parsePseudoTypes(componentTypes); final AsnSchemaConstraint constraint = AsnSchemaConstraintParser.parse(constraintText); final AsnSchemaTypeDefinitionSequence typeDefinition = new AsnSchemaTypeDefinitionSequence( name, componentTypes, constraint); parsedTypes.add(typeDefinition); return ImmutableList.copyOf(parsedTypes); } /** * Parses a SET type definition * * @param name * name of the defined type * @param matcher * matcher which matched on {@link #PATTERN_TYPE_DEFINITION_SET} * * @return an ImmutableList of {@link AsnSchemaTypeDefinition} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static ImmutableList<AsnSchemaTypeDefinition> parseSet(String name, Matcher matcher) throws ParseException { final String componentTypesText = matcher.group(1); final String constraintText = Strings.nullToEmpty(matcher.group(2)); final ImmutableList<AsnSchemaComponentType> componentTypes = AsnSchemaComponentTypeParser.parse(name, componentTypesText); // parse any pseudo type definitions from returned component types final List<AsnSchemaTypeDefinition> parsedTypes = parsePseudoTypes(componentTypes); final AsnSchemaConstraint constraint = AsnSchemaConstraintParser.parse(constraintText); final AsnSchemaTypeDefinitionSet typeDefinition = new AsnSchemaTypeDefinitionSet(name, componentTypes, constraint); parsedTypes.add(typeDefinition); return ImmutableList.copyOf(parsedTypes); } /** * Parses a CHOICE type definition * * @param name * name of the defined type * @param matcher * matcher which matched on {@link #PATTERN_TYPE_DEFINITION_CHOICE} * * @return an {@link AsnSchemaTypeDefinitionChoice} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static ImmutableList<AsnSchemaTypeDefinition> parseChoice(String name, Matcher matcher) throws ParseException { final String componentTypesText = matcher.group(1); final String constraintText = Strings.nullToEmpty(matcher.group(2)); final ImmutableList<AsnSchemaComponentType> componentTypes = AsnSchemaComponentTypeParser.parse(name, componentTypesText); // parse any pseudo type definitions from returned component types final List<AsnSchemaTypeDefinition> parsedTypes = parsePseudoTypes(componentTypes); final AsnSchemaConstraint constraint = AsnSchemaConstraintParser.parse(constraintText); final AsnSchemaTypeDefinitionChoice typeDefinition = new AsnSchemaTypeDefinitionChoice(name, componentTypes, constraint); parsedTypes.add(typeDefinition); return ImmutableList.copyOf(parsedTypes); } /** * Parses an ENUMERATED type definition * * @param name * name of the defined type * @param matcher * matcher which matched on {@link #PATTERN_TYPE_DEFINITION_ENUMERATED} * * @return an {@link AsnSchemaTypeDefinitionEnumerated} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static AsnSchemaTypeDefinitionEnumerated parseEnumerated(String name, Matcher matcher) throws ParseException { final String enumeratedOptionsText = matcher.group(1); final ImmutableList<AsnSchemaNamedTag> enumeratedOptions = AsnSchemaNamedTagParser.parseEnumeratedOptions(enumeratedOptionsText); return new AsnSchemaTypeDefinitionEnumerated(name, enumeratedOptions); } /** * Parses an OCTET STRING type definition * * @param name * name of the defined type * @param matcher * matcher which matched on {@link #PATTERN_TYPE_DEFINITION_OCTET_STRING} * * @return an {@link AsnSchemaTypeDefinitionOctetString} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static AsnSchemaTypeDefinitionOctetString parseOctetString(String name, Matcher matcher) throws ParseException { final String constraintText = Strings.nullToEmpty(matcher.group(2)); return AsnSchemaTypeDefinitionPrimitiveParser.parseOctetString(name, constraintText); } /** * Parses a BIT STRING type definition * * @param name * name of the defined type * @param matcher * matcher which matched on {@link #PATTERN_TYPE_DEFINITION_BIT_STRING} * * @return an {@link AsnSchemaTypeDefinitionBitString} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static AsnSchemaTypeDefinitionBitString parseBitString(String name, Matcher matcher) throws ParseException { // TODO ASN-87 - parse list of named bits final String constraintText = Strings.nullToEmpty(matcher.group(6)); return AsnSchemaTypeDefinitionPrimitiveParser.parseBitString(name, constraintText); } /** * Parses an Ia5String type definition * * @param name * name of the defined type * @param matcher * matcher which matched on {@link #PATTERN_TYPE_DEFINITION_IA5_STRING} * * @return an {@link AsnSchemaTypeDefinitionIa5String} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static AsnSchemaTypeDefinitionIa5String parseIA5String(String name, Matcher matcher) throws ParseException { final String constraintText = Strings.nullToEmpty(matcher.group(2)); return AsnSchemaTypeDefinitionPrimitiveParser.parseIA5String(name, constraintText); } /** * Parses a Utf8String type definition * * @param name * name of the defined type * @param matcher * matcher which matched on {@link #PATTERN_TYPE_DEFINITION_UTF8_STRING} * * @return an {@link AsnSchemaTypeDefinitionUtf8String} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static AsnSchemaTypeDefinitionUtf8String parseUTF8String(String name, Matcher matcher) throws ParseException { final String constraintText = Strings.nullToEmpty(matcher.group(2)); return AsnSchemaTypeDefinitionPrimitiveParser.parseUTF8String(name, constraintText); } /** * Parses a NumericString type definition * * @param name * name of the defined type * @param matcher * matcher which matched on {@link #PATTERN_TYPE_DEFINITION_NUMERIC_STRING} * * @return an {@link AsnSchemaTypeDefinitionNumericString} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static AsnSchemaTypeDefinitionNumericString parseNumericString(String name, Matcher matcher) throws ParseException { final String constraintText = Strings.nullToEmpty(matcher.group(2)); return AsnSchemaTypeDefinitionPrimitiveParser.parseNumericString(name, constraintText); } /** * Parses a VisibleString type definition * * @param name * name of the defined type * @param matcher * matcher which matched on {@link #PATTERN_TYPE_DEFINITION_VISIBLE_STRING} * * @return an {@link AsnSchemaTypeDefinitionVisibleString} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static AsnSchemaTypeDefinitionVisibleString parseVisibleString(String name, Matcher matcher) throws ParseException { final String constraintText = Strings.nullToEmpty(matcher.group(2)); return AsnSchemaTypeDefinitionPrimitiveParser.parseVisibleString(name, constraintText); } /** * Parses a General type definition * * @param name * name of the defined type * @param matcher * matcher which matched on {@link #PATTERN_TYPE_DEFINITION_GENERAL_STRING} * * @return an {@link AsnSchemaTypeDefinitionGeneralString} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static AsnSchemaTypeDefinitionGeneralString parseGeneralString(String name, Matcher matcher) throws ParseException { final String constraintText = Strings.nullToEmpty(matcher.group(2)); return AsnSchemaTypeDefinitionPrimitiveParser.parseGeneralString(name, constraintText); } /** * Parses a GeneralizedTime type definition * * @param name * name of the defined type * * @return an {@link AsnSchemaTypeDefinitionGeneralizedTime} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static AsnSchemaTypeDefinitionGeneralizedTime parseGeneralizedTime(String name) throws ParseException { // Subtype constraints are not applicable to GeneralizedTime. return AsnSchemaTypeDefinitionPrimitiveParser.parseGeneralizedTime(name); } /** * Parses an Integer type definition * * @param name * name of the defined type * @param matcher * matcher which matched on {@link #PATTERN_TYPE_DEFINITION_INTEGER} * * @return an {@link AsnSchemaTypeDefinitionInteger} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static AsnSchemaTypeDefinitionInteger parseInteger(String name, Matcher matcher) throws ParseException { final String distinguishedValuesText = matcher.group(2); final ImmutableList<AsnSchemaNamedTag> distinguishedValues = AsnSchemaNamedTagParser.parseIntegerDistinguishedValues(distinguishedValuesText); final String constraintText = Strings.nullToEmpty(matcher.group(3)); return AsnSchemaTypeDefinitionPrimitiveParser.parseInteger(name, distinguishedValues, constraintText); } /** * Parses a SEQUENCE OF type definition * * @param name * name of the defined type * @param matcher * matcher which matched on {@link #PATTERN_TYPE_DEFINITION_SEQUENCE_OF} * * @return an {@link AsnSchemaTypeDefinitionSequenceOf} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static AsnSchemaTypeDefinitionSequenceOf parseSequenceOf(String name, Matcher matcher) throws ParseException { final String constraintText = Strings.nullToEmpty(matcher.group(1)); final String elementTypeName = matcher.group(2); final AsnSchemaConstraint constraint = AsnSchemaConstraintParser.parse(constraintText); return new AsnSchemaTypeDefinitionSequenceOf(name, elementTypeName, constraint); } /** * Parses a SET OF type definition * * @param name * name of the defined type * @param matcher * matcher which matched on {@link #PATTERN_TYPE_DEFINITION_SET_OF} * * @return an {@link AsnSchemaTypeDefinitionSetOf} representing the parsed data * * @throws ParseException * if any errors occur while parsing the type */ private static AsnSchemaTypeDefinitionSetOf parseSetOf(String name, Matcher matcher) throws ParseException { final String constraintText = Strings.nullToEmpty(matcher.group(1)); final String elementTypeName = matcher.group(2); final AsnSchemaConstraint constraint = AsnSchemaConstraintParser.parse(constraintText); return new AsnSchemaTypeDefinitionSetOf(name, elementTypeName, constraint); } /** * Parses pseudo type definitions found in the supplied list of {@link AsnSchemaComponentType}. * * <p>Refer to {@code /docs/design.md} for details of the design. * * @param componentTypes * list of component types to parse * * @return an ImmutableList of {@link AsnSchemaTypeDefinition} representing the parsed pseudo * type definitions * * @throws ParseException * if any errors occur while parsing the type */ private static List<AsnSchemaTypeDefinition> parsePseudoTypes( Iterable<AsnSchemaComponentType> componentTypes) throws ParseException { final List<AsnSchemaTypeDefinition> parsedTypes = Lists.newArrayList(); for (final AsnSchemaComponentType component : componentTypes) { if (component instanceof AsnSchemaComponentTypeGenerated) { final AsnSchemaComponentTypeGenerated componentGenerated = (AsnSchemaComponentTypeGenerated) component; final String pseudoTypeDefinitionText = componentGenerated.getTypeDefinitionText(); final ImmutableList<AsnSchemaTypeDefinition> pseudoTypeDefinitions = parse(component .getTypeName(), pseudoTypeDefinitionText); parsedTypes.addAll(pseudoTypeDefinitions); } } return parsedTypes; } }
ASN-89: Merge in develop
src/main/java/com/brightsparklabs/asanti/reader/parser/AsnSchemaTypeDefinitionParser.java
ASN-89: Merge in develop
<ide><path>rc/main/java/com/brightsparklabs/asanti/reader/parser/AsnSchemaTypeDefinitionParser.java <ide> if (matcher.matches()) <ide> { <ide> return ImmutableList.<AsnSchemaTypeDefinition>of(parseBitString(name, matcher)); <add> <ide> } <ide> <ide> // check if defining an Ia5String <ide> throws ParseException <ide> { <ide> final String enumeratedOptionsText = matcher.group(1); <del> <ide> final ImmutableList<AsnSchemaNamedTag> enumeratedOptions <ide> = AsnSchemaNamedTagParser.parseEnumeratedOptions(enumeratedOptionsText); <ide> return new AsnSchemaTypeDefinitionEnumerated(name, enumeratedOptions); <ide> private static AsnSchemaTypeDefinitionGeneralizedTime parseGeneralizedTime(String name) <ide> throws ParseException <ide> { <del> // Subtype constraints are not applicable to GeneralizedTime. <add> // sub-type constraints are not applicable to GeneralizedTime. <ide> return AsnSchemaTypeDefinitionPrimitiveParser.parseGeneralizedTime(name); <ide> } <ide> <ide> throws ParseException <ide> { <ide> final String distinguishedValuesText = matcher.group(2); <del> <ide> final ImmutableList<AsnSchemaNamedTag> distinguishedValues <ide> = AsnSchemaNamedTagParser.parseIntegerDistinguishedValues(distinguishedValuesText); <del> <ide> final String constraintText = Strings.nullToEmpty(matcher.group(3)); <ide> return AsnSchemaTypeDefinitionPrimitiveParser.parseInteger(name, <ide> distinguishedValues, <ide> parsedTypes.addAll(pseudoTypeDefinitions); <ide> } <ide> } <del> <ide> return parsedTypes; <ide> } <ide> }
Java
apache-2.0
99f53500a8a4c4f24d3bf99c7d7286e11843ca6a
0
ragnor-rs/visum
/* * Copyright (c) 2015 Zvooq LTD. * Authors: Renat Sarymsakov, Dmitriy Mozgin, Denis Volyntsev. * * This file is part of Visum. * * Visum is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Visum is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Visum. If not, see <http://www.gnu.org/licenses/>. */ package io.reist.visum.view; import android.content.Context; import android.os.Bundle; import android.support.annotation.LayoutRes; import android.support.annotation.NonNull; import android.support.v7.app.AppCompatActivity; import io.reist.visum.ComponentCache; import io.reist.visum.VisumClientHelper; import io.reist.visum.presenter.VisumPresenter; /** * Extend your activities with this class to take advantage of Visum MVP. * * Created by Defuera on 29/01/16. */ public abstract class VisumActivity<P extends VisumPresenter> extends AppCompatActivity implements VisumView<P> { private final VisumViewHelper<P> helper; /** * @deprecated use {@link #VisumActivity(int)} instead */ @SuppressWarnings("deprecation") @Deprecated public VisumActivity() { this(VisumPresenter.VIEW_ID_DEFAULT); } public VisumActivity(int viewId) { this.helper = new VisumViewHelper<>(viewId, new VisumClientHelper<>(this)); } //region VisumClient implementation @Override public final void onStartClient() { helper.onStartClient(); } @NonNull @Override public final ComponentCache getComponentCache() { return helper.getComponentCache(); } @Override public final void onStopClient() { helper.onStopClient(); } @NonNull public final Context getContext() { return this; } //endregion //region VisumView implementation @Override public void attachPresenter() {} @Override public void detachPresenter() {} //endregion //region Activity implementation @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); helper.onStartClient(); helper.onRestoreInstanceState(); setContentView(getLayoutRes()); } @Override public void onResume() { super.onResume(); helper.attachPresenter(); } @Override public void onPause() { super.onPause(); helper.detachPresenter(); } @Override public void onDestroy() { super.onDestroy(); helper.onStopClient(); } @Override public void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); helper.onSaveInstanceState(); } //endregion @LayoutRes protected abstract int getLayoutRes(); }
src/main/java/io/reist/visum/view/VisumActivity.java
/* * Copyright (c) 2015 Zvooq LTD. * Authors: Renat Sarymsakov, Dmitriy Mozgin, Denis Volyntsev. * * This file is part of Visum. * * Visum is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Visum is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Visum. If not, see <http://www.gnu.org/licenses/>. */ package io.reist.visum.view; import android.os.Bundle; import android.support.annotation.LayoutRes; import android.support.annotation.NonNull; import android.support.v7.app.AppCompatActivity; import io.reist.visum.ComponentCache; import io.reist.visum.VisumClientHelper; import io.reist.visum.presenter.VisumPresenter; /** * Extend your activities with this class to take advantage of Visum MVP. * * Created by Defuera on 29/01/16. */ public abstract class VisumActivity<P extends VisumPresenter> extends AppCompatActivity implements VisumView<P> { private final VisumViewHelper<P> helper; /** * @deprecated use {@link #VisumActivity(int)} instead */ @SuppressWarnings("deprecation") @Deprecated public VisumActivity() { this(VisumPresenter.VIEW_ID_DEFAULT); } public VisumActivity(int viewId) { this.helper = new VisumViewHelper<>(viewId, new VisumClientHelper<>(this)); } //region VisumClient implementation @Override public final void onStartClient() { helper.onStartClient(); } @NonNull @Override public final ComponentCache getComponentCache() { return helper.getComponentCache(); } @Override public final void onStopClient() { helper.onStopClient(); } //endregion //region VisumView implementation @Override public void attachPresenter() {} @Override public void detachPresenter() {} //endregion //region Activity implementation @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); helper.onStartClient(); helper.onRestoreInstanceState(); setContentView(getLayoutRes()); } @Override public void onResume() { super.onResume(); helper.attachPresenter(); } @Override public void onPause() { super.onPause(); helper.detachPresenter(); } @Override public void onDestroy() { super.onDestroy(); helper.onStopClient(); } @Override public void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); helper.onSaveInstanceState(); } //endregion @LayoutRes protected abstract int getLayoutRes(); }
minor
src/main/java/io/reist/visum/view/VisumActivity.java
minor
<ide><path>rc/main/java/io/reist/visum/view/VisumActivity.java <ide> <ide> package io.reist.visum.view; <ide> <add>import android.content.Context; <ide> import android.os.Bundle; <ide> import android.support.annotation.LayoutRes; <ide> import android.support.annotation.NonNull; <ide> @Override <ide> public final void onStopClient() { <ide> helper.onStopClient(); <add> } <add> <add> @NonNull <add> public final Context getContext() { <add> return this; <ide> } <ide> <ide> //endregion
Java
apache-2.0
7effc2c00243b4e7466402cdc9091c5bd9b72d92
0
remkop/picocli,remkop/picocli,remkop/picocli,remkop/picocli
/* Copyright 2017 Remko Popma Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package picocli; import org.junit.Ignore; import org.junit.Test; import picocli.CommandLine.Help; import picocli.CommandLine.Help.Column; import picocli.CommandLine.Help.TextTable; import picocli.CommandLine.Option; import picocli.CommandLine.Parameters; import picocli.CommandLine.Usage; import java.awt.Point; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.PrintStream; import java.io.UnsupportedEncodingException; import java.lang.String; import java.lang.reflect.Field; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Map; import static java.lang.String.format; import static org.junit.Assert.*; import static picocli.CommandLine.Help.Column.Overflow.*; /** * Tests for picoCLI's "Usage" help functionality. */ public class CommandLineHelpTest { private static String usageString(Class<?> annotatedClass) throws UnsupportedEncodingException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); CommandLine.usage(annotatedClass, new PrintStream(baos, true, "UTF8")); String result = baos.toString("UTF8"); return result; } @Test public void testUsageAnnotationDetailedUsage() throws Exception { @Usage(detailedUsageHeader = true) class Params { @Option(names = {"-f", "--file"}, required = true, description = "the file to use") File file; } String result = usageString(Params.class); assertEquals(format("" + "Usage: <main class> -f <file>%n" + " -f, --file <file> the file to use %n", ""), result); } @Test public void testUsageSeparator() throws Exception { @Usage(separator = "=", detailedUsageHeader = true) class Params { @Option(names = {"-f", "--file"}, required = true, description = "the file to use") File file; } String result = usageString(Params.class); assertEquals(format("" + "Usage: <main class> -f=<file>%n" + " -f, --file=<file> the file to use %n", ""), result); } @Test public void testShortestFirstComparator_sortsShortestFirst() { String[] values = {"12345", "12", "123", "123456", "1", "", "1234"}; Arrays.sort(values, new Help.ShortestFirst()); String[] expected = {"", "1", "12", "123", "1234", "12345", "123456"}; assertArrayEquals(expected, values); } @Test public void testShortestFirstComparator_sortsDeclarationOrderIfEqualLength() { String[] values = {"-d", "-", "-a", "--alpha", "--b", "--a", "--beta"}; Arrays.sort(values, new Help.ShortestFirst()); String[] expected = {"-", "-d", "-a", "--b", "--a", "--beta", "--alpha"}; assertArrayEquals(expected, values); } @Test public void testCreateMinimalOptionRenderer_ReturnsMinimalOptionRenderer() { assertEquals(Help.MinimalOptionRenderer.class, Help.createMinimalOptionRenderer().getClass()); } @Test public void testMinimalOptionRenderer_rendersFirstDeclaredOptionNameAndDescription() { class Example { @Option(names = {"---long", "-L"}, description = "long description") String longField; @Option(names = {"-b", "-a", "--alpha"}, description = "other") String otherField; } Help.IOptionRenderer renderer = Help.createMinimalOptionRenderer(); Help.IParameterRenderer parameterRenderer = Help.createDefaultParameterRenderer(" "); Help help = new Help(Example.class); Iterator<Map.Entry<Option, Field>> iterator = help.option2Field.entrySet().iterator(); Map.Entry<Option, Field> entry = iterator.next(); String[][] row1 = renderer.render(entry.getKey(), entry.getValue(), parameterRenderer); assertEquals(1, row1.length); assertArrayEquals(new String[]{"---long <longField>", "long description"}, row1[0]); entry = iterator.next(); String[][] row2 = renderer.render(entry.getKey(), entry.getValue(), parameterRenderer); assertEquals(1, row2.length); assertArrayEquals(new String[]{"-b <otherField>", "other"}, row2[0]); } @Test public void testCreateDefaultOptionRenderer_ReturnsDefaultOptionRenderer() { assertEquals(Help.DefaultOptionRenderer.class, Help.createDefaultOptionRenderer().getClass()); } @Test public void testDefaultOptionRenderer_rendersShortestOptionNameThenOtherOptionNamesAndDescription() { class Example { @Option(names = {"---long", "-L"}, description = "long description") String longField; @Option(names = {"-b", "-a", "--alpha"}, description = "other") String otherField; } Help.IOptionRenderer renderer = Help.createDefaultOptionRenderer(); Help.IParameterRenderer parameterRenderer = Help.createDefaultParameterRenderer(" "); Help help = new Help(Example.class); Iterator<Map.Entry<Option, Field>> iterator = help.option2Field.entrySet().iterator(); Map.Entry<Option, Field> entry = iterator.next(); String[][] row1 = renderer.render(entry.getKey(), entry.getValue(), parameterRenderer); assertEquals(1, row1.length); assertArrayEquals(Arrays.toString(row1[0]), new String[]{"-L", ",", "---long <longField>", "long description"}, row1[0]); entry = iterator.next(); String[][] row2 = renderer.render(entry.getKey(), entry.getValue(), parameterRenderer); assertEquals(1, row2.length); assertArrayEquals(Arrays.toString(row2[0]), new String[]{"-b", ",", "-a, --alpha <otherField>", "other"}, row2[0]); } @Test public void testDefaultOptionRenderer_rendersCommaOnlyIfBothShortAndLongOptionNamesExist() { class Example { @Option(names = {"-v"}, description = "shortBool") boolean shortBoolean; @Option(names = {"--verbose"}, description = "longBool") boolean longBoolean; @Option(names = {"-x", "--xeno"}, description = "combiBool") boolean combiBoolean; @Option(names = {"-s"}, description = "shortOnly") String shortOnlyField; @Option(names = {"--long"}, description = "longOnly") String longOnlyField; @Option(names = {"-b", "--beta"}, description = "combi") String combiField; } Help.IOptionRenderer renderer = Help.createDefaultOptionRenderer(); Help.IParameterRenderer parameterRenderer = Help.createDefaultParameterRenderer(" "); Help help = new Help(Example.class); String[][] expected = new String[][] { {"-v", "", "", "shortBool"}, {"", "", "--verbose", "longBool"}, {"-x", ",", "--xeno", "combiBool"}, {"-s", "", " <shortOnlyField>", "shortOnly"}, {"", "", "--long <longOnlyField>", "longOnly"}, {"-b", ",", "--beta <combiField>", "combi"}, }; int i = -1; for (Map.Entry<Option, Field> entry : help.option2Field.entrySet()) { String[][] row = renderer.render(entry.getKey(), entry.getValue(), parameterRenderer); assertEquals(1, row.length); assertArrayEquals(Arrays.toString(row[0]), expected[++i], row[0]); } } @Test public void testTextTable() { TextTable table = new TextTable(); table.addRow("-v", ",", "--verbose", "show what you're doing while you are doing it"); table.addRow("-p", null, null, "the quick brown fox jumped over the lazy dog. The quick brown fox jumped over the lazy dog."); assertEquals(String.format( " -v, --verbose show what you're doing while you are doing it %n" + " -p the quick brown fox jumped over the lazy dog. The %n" + " quick brown fox jumped over the lazy dog. %n" ,""), table.toString(new StringBuilder()).toString()); } @Test(expected = IllegalArgumentException.class) public void testTextTableAddsNewRowWhenTooManyValuesSpecified() { TextTable table = new TextTable(); table.addRow("-c", ",", "--create", "description", "INVALID", "Row 3"); // assertEquals(String.format("" + // " -c, --create description %n" + // " INVALID %n" + // " Row 3 %n" // ,""), table.toString(new StringBuilder()).toString()); } @Test public void testTextTableAddsNewRowWhenAnyColumnTooLong() { TextTable table = new TextTable(); table.addRow("-c", ",", "--create, --create2, --create3, --create4, --create5, --create6, --create7, --create8", "description"); assertEquals(String.format("" + " -c, --create, --create2, --create3, --create4, --create5, --create6, -- %n" + " create7, --create8 %n" + " description %n" ,""), table.toString(new StringBuilder()).toString()); table = new TextTable(); table.addRow("-c", ",", "--create, --create2, --create3, --create4, --create5, --create6, --createAA7, --create8", "description"); assertEquals(String.format("" + " -c, --create, --create2, --create3, --create4, --create5, --create6, -- %n" + " createAA7, --create8 %n" + " description %n" ,""), table.toString(new StringBuilder()).toString()); } @Test public void testCatUsageFormat() { @Usage(programName = "cat", summary = "Concatenate FILE(s), or standard input, to standard output.", footer = "Copyright(c) 2017") class Cat { @Parameters(paramLabel = "FILE", description = "Files whose contents to display") List<File> files; @Option(names = "--help", help = true, description = "display this help and exit") boolean help; @Option(names = "--version", help = true, description = "output version information and exit") boolean version; @Option(names = "-u", description = "(ignored)") boolean u; @Option(names = "-t", description = "equivalent to -vT") boolean t; @Option(names = "-e", description = "equivalent to -vET") boolean e; @Option(names = {"-A", "--show-all"}, description = "equivalent to -vET") boolean showAll; @Option(names = {"-s", "--squeeze-blank"}, description = "suppress repeated empty output lines") boolean squeeze; @Option(names = {"-v", "--show-nonprinting"}, description = "use ^ and M- notation, except for LDF and TAB") boolean v; @Option(names = {"-b", "--number-nonblank"}, description = "number nonempty output lines, overrides -n") boolean b; @Option(names = {"-T", "--show-tabs"}, description = "display TAB characters as ^I") boolean T; @Option(names = {"-E", "--show-ends"}, description = "display $ at end of each line") boolean E; @Option(names = {"-n", "--number"}, description = "number all output lines") boolean n; } ByteArrayOutputStream baos = new ByteArrayOutputStream(); CommandLine.usage(Cat.class, new PrintStream(baos)); String expected = String.format( "Usage: cat [OPTIONS] [FILE...]%n" + "Concatenate FILE(s), or standard input, to standard output.%n" + " -A, --show-all equivalent to -vET %n" + " -b, --number-nonblank number nonempty output lines, overrides -n %n" + " -e equivalent to -vET %n" + " -E, --show-ends display $ at end of each line %n" + " -n, --number number all output lines %n" + " -s, --squeeze-blank suppress repeated empty output lines %n" + " -t equivalent to -vT %n" + " -T, --show-tabs display TAB characters as ^I %n" + " -u (ignored) %n" + " -v, --show-nonprinting use ^ and M- notation, except for LDF and TAB %n" + " --help display this help and exit %n" + " --version output version information and exit %n" + "Copyright(c) 2017%n", ""); assertEquals(expected, baos.toString()); } @Test public void testZipUsageFormat() { @Usage(summary = { "Copyright (c) 1990-2008 Info-ZIP - Type 'zip \"-L\"' for software license.", "Zip 3.0 (July 5th 2008). Usage:", "zip [-options] [-b path] [-t mmddyyyy] [-n suffixes] [zipfile list] [-xi list]", " The default action is to add or replace zipfile entries from list, which", " can include the special name - to compress standard input.", " If zipfile and list are omitted, zip compresses stdin to stdout."} ) class Zip { @Option(names = "-f", description = "freshen: only changed files") boolean freshen; @Option(names = "-u", description = "update: only changed or new files") boolean update; @Option(names = "-d", description = "delete entries in zipfile") boolean delete; @Option(names = "-m", description = "move into zipfile (delete OS files)") boolean move; @Option(names = "-r", description = "recurse into directories") boolean recurse; @Option(names = "-j", description = "junk (don't record) directory names") boolean junk; @Option(names = "-0", description = "store only") boolean store; @Option(names = "-l", description = "convert LF to CR LF (-ll CR LF to LF)") boolean lf2crlf; @Option(names = "-1", description = "compress faster") boolean faster; @Option(names = "-9", description = "compress better") boolean better; @Option(names = "-q", description = "quiet operation") boolean quiet; @Option(names = "-v", description = "verbose operation/print version info") boolean verbose; @Option(names = "-c", description = "add one-line comments") boolean comments; @Option(names = "-z", description = "add zipfile comment") boolean zipComment; @Option(names = "-@", description = "read names from stdin") boolean readFileList; @Option(names = "-o", description = "make zipfile as old as latest entry") boolean old; @Option(names = "-x", description = "exclude the following names") boolean exclude; @Option(names = "-i", description = "include only the following names") boolean include; @Option(names = "-F", description = "fix zipfile (-FF try harder)") boolean fix; @Option(names = "-D", description = "do not add directory entries") boolean directories; @Option(names = "-A", description = "adjust self-extracting exe") boolean adjust; @Option(names = "-J", description = "junk zipfile prefix (unzipsfx)") boolean junkPrefix; @Option(names = "-T", description = "test zipfile integrity") boolean test; @Option(names = "-X", description = "eXclude eXtra file attributes") boolean excludeAttribs; @Option(names = "-y", description = "store symbolic links as the link instead of the referenced file") boolean symbolic; @Option(names = "-e", description = "encrypt") boolean encrypt; @Option(names = "-n", description = "don't compress these suffixes") boolean dontCompress; @Option(names = "-h2", description = "show more help") boolean moreHelp; } String expected = String.format("" + "Copyright (c) 1990-2008 Info-ZIP - Type 'zip \"-L\"' for software license.%n" + "Zip 3.0 (July 5th 2008). Usage:%n" + "zip [-options] [-b path] [-t mmddyyyy] [-n suffixes] [zipfile list] [-xi list]%n" + " The default action is to add or replace zipfile entries from list, which%n" + " can include the special name - to compress standard input.%n" + " If zipfile and list are omitted, zip compresses stdin to stdout.%n" + " -f freshen: only changed files -u update: only changed or new files %n" + " -d delete entries in zipfile -m move into zipfile (delete OS files) %n" + " -r recurse into directories -j junk (don't record) directory names %n" + " -0 store only -l convert LF to CR LF (-ll CR LF to LF)%n" + " -1 compress faster -9 compress better %n" + " -q quiet operation -v verbose operation/print version info %n" + " -c add one-line comments -z add zipfile comment %n" + " -@ read names from stdin -o make zipfile as old as latest entry %n" + " -x exclude the following names -i include only the following names %n" + " -F fix zipfile (-FF try harder) -D do not add directory entries %n" + " -A adjust self-extracting exe -J junk zipfile prefix (unzipsfx) %n" + " -T test zipfile integrity -X eXclude eXtra file attributes %n" + " -y store symbolic links as the link instead of the referenced file %n" + " -e encrypt -n don't compress these suffixes %n" + " -h2 show more help %n", ""); Help help = new Help(Zip.class); StringBuilder sb = new StringBuilder(); help.appendSummaryTo(sb); // show the first 6 lines, including copyright, description and usage TextTable textTable = new TextTable(new Column(5, 2, TRUNCATE), // values should fit new Column(30, 2, SPAN), // overflow into adjacent columns new Column(4, 1, TRUNCATE), // values should fit again new Column(39, 2, WRAP)); // overflow into next row (same column) textTable.optionRenderer = Help.createMinimalOptionRenderer(); // define and install a custom renderer textTable.layout = new Help.ILayout() { // define and install a custom layout Point previous = new Point(0, 0); public void layout(Option option, Field field, String[][] values, TextTable table) { String[] columnValues = values[0]; // we know renderer creates a single row with two values // We want to show two options on one row, next to each other, // unless the first option spanned multiple columns (in which case there are not enough columns left) int col = previous.x + 1; if (col == 1 || col + columnValues.length > table.columns.length) { // if true, write into next row // table also adds an empty row if a text value spanned multiple columns if (table.rowCount() == 0 || table.rowCount() == previous.y + 1) { // avoid adding 2 empty rows table.addEmptyRow(); // create the slots to write the text values into } col = 0; // we are starting a new row, reset the column to write into } for (int i = 0; i < columnValues.length; i++) { // always write to the last row, column depends on what happened previously previous = table.putValue(table.rowCount() - 1, col + i, columnValues[i]); } } }; // Now that the textTable has a renderer and layout installed, // we add Options to the textTable to build up the option details help text. // Note that we don't sort the options, so they appear in the order the fields are declared in the Zip class. for (Option option : help.option2Field.keySet()) { if (!option.hidden()) { textTable.addOption(option, help.option2Field.get(option)); } } textTable.toString(sb); // finally, copy the options details help text into the StringBuilder assertEquals(expected, sb.toString()); } /** for Netstat test */ private enum Protocol {IP, IPv6, ICMP, ICMPv6, TCP, TCPv6, UDP, UDPv6} @Test public void testNetstatUsageFormat() { @Usage(programName = "NETSTAT", separator = " ", detailedUsageHeader = true, summary = {"Displays protocol statistics and current TCP/IP network connections.", ""}) class Netstat { @Option(names="-a", description="Displays all connections and listening ports.") boolean displayAll; @Option(names="-b", description="Displays the executable involved in creating each connection or " + "listening port. In some cases well-known executables host " + "multiple independent components, and in these cases the " + "sequence of components involved in creating the connection " + "or listening port is displayed. In this case the executable " + "name is in [] at the bottom, on top is the component it called, " + "and so forth until TCP/IP was reached. Note that this option " + "can be time-consuming and will fail unless you have sufficient " + "permissions.") boolean displayExecutable; @Option(names="-e", description="Displays Ethernet statistics. This may be combined with the -s option.") boolean displayEthernetStats; @Option(names="-f", description="Displays Fully Qualified Domain Names (FQDN) for foreign addresses.") boolean displayFQCN; @Option(names="-n", description="Displays addresses and port numbers in numerical form.") boolean displayNumerical; @Option(names="-o", description="Displays the owning process ID associated with each connection.") boolean displayOwningProcess; @Option(names="-p", paramLabel = "proto", description="Shows connections for the protocol specified by proto; proto " + "may be any of: TCP, UDP, TCPv6, or UDPv6. If used with the -s " + "option to display per-protocol statistics, proto may be any of: " + "IP, IPv6, ICMP, ICMPv6, TCP, TCPv6, UDP, or UDPv6.") Protocol proto; @Option(names="-q", description="Displays all connections, listening ports, and bound " + "nonlistening TCP ports. Bound nonlistening ports may or may not " + "be associated with an active connection.") boolean query; @Option(names="-r", description="Displays the routing table.") boolean displayRoutingTable; @Option(names="-s", description="Displays per-protocol statistics. By default, statistics are " + "shown for IP, IPv6, ICMP, ICMPv6, TCP, TCPv6, UDP, and UDPv6; " + "the -p option may be used to specify a subset of the default.") boolean displayStatistics; @Option(names="-t", description="Displays the current connection offload state.") boolean displayOffloadState; @Option(names="-x", description="Displays NetworkDirect connections, listeners, and shared endpoints.") boolean displayNetDirect; @Option(names="-y", description="Displays the TCP connection template for all connections. " + "Cannot be combined with the other options.") boolean displayTcpConnectionTemplate; @Parameters(arity = "0..1", paramLabel = "interval", description = "" + "Redisplays selected statistics, pausing interval seconds " + "between each display. Press CTRL+C to stop redisplaying " + "statistics. If omitted, netstat will print the current " + "configuration information once.") int interval; } StringBuilder sb = new StringBuilder(); Help help = new Help(Netstat.class); help.appendSummaryTo(sb).appendDetailedUsagePatternsTo("", null, sb); sb.append(System.getProperty("line.separator")); TextTable textTable = new TextTable( new Column(15, 2, TRUNCATE), new Column(65, 1, WRAP)); textTable.optionRenderer = Help.createMinimalOptionRenderer(); textTable.parameterRenderer = help.parameterRenderer; textTable.indentWrappedLines = 0; for (Option option : help.option2Field.keySet()) { textTable.addOption(option, help.option2Field.get(option)); } // FIXME needs Show positional parameters details in TextTable similar to option details #48 // textTable.addOption(help.positionalParametersField.getAnnotation(CommandLine.Parameters.class), help.positionalParametersField); textTable.toString(sb); String expected = String.format("" + "Displays protocol statistics and current TCP/IP network connections.%n" + "%n" + "NETSTAT [-a] [-b] [-e] [-f] [-n] [-o] [-p proto] [-q] [-r] [-s] [-t] [-x] [-y] [interval]%n" + // FIXME needs Show multiple detailed usage header lines for mutually exclusive options #46 // "NETSTAT [-a] [-b] [-e] [-f] [-n] [-o] [-p proto] [-q] [-r] [-s] [-t] [-x] [interval]%n" + // "NETSTAT [-y] [interval]%n" + "%n" + " -a Displays all connections and listening ports. %n" + " -b Displays the executable involved in creating each connection or %n" + " listening port. In some cases well-known executables host %n" + " multiple independent components, and in these cases the %n" + " sequence of components involved in creating the connection or %n" + " listening port is displayed. In this case the executable name %n" + " is in [] at the bottom, on top is the component it called, and %n" + " so forth until TCP/IP was reached. Note that this option can be %n" + " time-consuming and will fail unless you have sufficient %n" + " permissions. %n" + " -e Displays Ethernet statistics. This may be combined with the -s %n" + " option. %n" + " -f Displays Fully Qualified Domain Names (FQDN) for foreign %n" + " addresses. %n" + " -n Displays addresses and port numbers in numerical form. %n" + " -o Displays the owning process ID associated with each connection. %n" + " -p proto Shows connections for the protocol specified by proto; proto %n" + " may be any of: TCP, UDP, TCPv6, or UDPv6. If used with the -s %n" + " option to display per-protocol statistics, proto may be any of: %n" + " IP, IPv6, ICMP, ICMPv6, TCP, TCPv6, UDP, or UDPv6. %n" + " -q Displays all connections, listening ports, and bound %n" + " nonlistening TCP ports. Bound nonlistening ports may or may not %n" + " be associated with an active connection. %n" + " -r Displays the routing table. %n" + " -s Displays per-protocol statistics. By default, statistics are %n" + " shown for IP, IPv6, ICMP, ICMPv6, TCP, TCPv6, UDP, and UDPv6; %n" + " the -p option may be used to specify a subset of the default. %n" + " -t Displays the current connection offload state. %n" + " -x Displays NetworkDirect connections, listeners, and shared %n" + " endpoints. %n" + " -y Displays the TCP connection template for all connections. %n" + " Cannot be combined with the other options. %n" // FIXME needs Show positional parameters details in TextTable similar to option details #48 // " interval Redisplays selected statistics, pausing interval seconds %n" + // " between each display. Press CTRL+C to stop redisplaying %n" + // " statistics. If omitted, netstat will print the current %n" + // " configuration information once. %n" , ""); assertEquals(expected, sb.toString()); } }
src/test/java/picocli/CommandLineHelpTest.java
/* Copyright 2017 Remko Popma Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package picocli; import org.junit.Ignore; import org.junit.Test; import picocli.CommandLine.Help; import picocli.CommandLine.Help.Column; import picocli.CommandLine.Help.TextTable; import picocli.CommandLine.Option; import picocli.CommandLine.Parameters; import picocli.CommandLine.Usage; import java.awt.Point; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.PrintStream; import java.io.UnsupportedEncodingException; import java.lang.String; import java.lang.reflect.Field; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Map; import static java.lang.String.format; import static org.junit.Assert.*; import static picocli.CommandLine.Help.Column.Overflow.*; /** * Tests for picoCLI's "Usage" help functionality. */ public class CommandLineHelpTest { private static String usageString(Class<?> annotatedClass) throws UnsupportedEncodingException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); CommandLine.usage(annotatedClass, new PrintStream(baos, true, "UTF8")); String result = baos.toString("UTF8"); return result; } @Test public void testUsageAnnotationDetailedUsage() throws Exception { @Usage(detailedUsageHeader = true) class Params { @Option(names = {"-f", "--file"}, required = true, description = "the file to use") File file; } String result = usageString(Params.class); assertEquals(format("" + "Usage: <main class> -f <file>%n" + " -f, --file <file> the file to use %n", ""), result); } @Test public void testUsageSeparator() throws Exception { @Usage(separator = "=", detailedUsageHeader = true) class Params { @Option(names = {"-f", "--file"}, required = true, description = "the file to use") File file; } String result = usageString(Params.class); assertEquals(format("" + "Usage: <main class> -f=<file>%n" + " -f, --file=<file> the file to use %n", ""), result); } @Test public void testShortestFirstComparator_sortsShortestFirst() { String[] values = {"12345", "12", "123", "123456", "1", "", "1234"}; Arrays.sort(values, new Help.ShortestFirst()); String[] expected = {"", "1", "12", "123", "1234", "12345", "123456"}; assertArrayEquals(expected, values); } @Test public void testShortestFirstComparator_sortsDeclarationOrderIfEqualLength() { String[] values = {"-d", "-", "-a", "--alpha", "--b", "--a", "--beta"}; Arrays.sort(values, new Help.ShortestFirst()); String[] expected = {"-", "-d", "-a", "--b", "--a", "--beta", "--alpha"}; assertArrayEquals(expected, values); } @Test public void testCreateMinimalOptionRenderer_ReturnsMinimalOptionRenderer() { assertEquals(Help.MinimalOptionRenderer.class, Help.createMinimalOptionRenderer().getClass()); } @Test public void testMinimalOptionRenderer_rendersFirstDeclaredOptionNameAndDescription() { class Example { @Option(names = {"---long", "-L"}, description = "long description") String longField; @Option(names = {"-b", "-a", "--alpha"}, description = "other") String otherField; } Help.IOptionRenderer renderer = Help.createMinimalOptionRenderer(); Help.IParameterRenderer parameterRenderer = Help.createDefaultParameterRenderer(" "); Help help = new Help(Example.class); Iterator<Map.Entry<Option, Field>> iterator = help.option2Field.entrySet().iterator(); Map.Entry<Option, Field> entry = iterator.next(); String[][] row1 = renderer.render(entry.getKey(), entry.getValue(), parameterRenderer); assertEquals(1, row1.length); assertArrayEquals(new String[]{"---long <longField>", "long description"}, row1[0]); entry = iterator.next(); String[][] row2 = renderer.render(entry.getKey(), entry.getValue(), parameterRenderer); assertEquals(1, row2.length); assertArrayEquals(new String[]{"-b <otherField>", "other"}, row2[0]); } @Test public void testCreateDefaultOptionRenderer_ReturnsMinimalOptionRenderer() { assertEquals(Help.DefaultOptionRenderer.class, Help.createDefaultOptionRenderer().getClass()); } @Test public void testDefaultOptionRenderer_rendersShortestOptionNameThenOtherOptionNamesAndDescription() { class Example { @Option(names = {"---long", "-L"}, description = "long description") String longField; @Option(names = {"-b", "-a", "--alpha"}, description = "other") String otherField; } Help.IOptionRenderer renderer = Help.createDefaultOptionRenderer(); Help.IParameterRenderer parameterRenderer = Help.createDefaultParameterRenderer(" "); Help help = new Help(Example.class); Iterator<Map.Entry<Option, Field>> iterator = help.option2Field.entrySet().iterator(); Map.Entry<Option, Field> entry = iterator.next(); String[][] row1 = renderer.render(entry.getKey(), entry.getValue(), parameterRenderer); assertEquals(1, row1.length); assertArrayEquals(Arrays.toString(row1[0]), new String[]{"-L", ",", "---long <longField>", "long description"}, row1[0]); entry = iterator.next(); String[][] row2 = renderer.render(entry.getKey(), entry.getValue(), parameterRenderer); assertEquals(1, row2.length); assertArrayEquals(Arrays.toString(row2[0]), new String[]{"-b", ",", "-a, --alpha <otherField>", "other"}, row2[0]); } @Test public void testDefaultOptionRenderer_rendersCommaOnlyIfBothShortAndLongOptionNamesExist() { class Example { @Option(names = {"-v"}, description = "shortBool") boolean shortBoolean; @Option(names = {"--verbose"}, description = "longBool") boolean longBoolean; @Option(names = {"-x", "--xeno"}, description = "combiBool") boolean combiBoolean; @Option(names = {"-s"}, description = "shortOnly") String shortOnlyField; @Option(names = {"--long"}, description = "longOnly") String longOnlyField; @Option(names = {"-b", "--beta"}, description = "combi") String combiField; } Help.IOptionRenderer renderer = Help.createDefaultOptionRenderer(); Help.IParameterRenderer parameterRenderer = Help.createDefaultParameterRenderer(" "); Help help = new Help(Example.class); String[][] expected = new String[][] { {"-v", "", "", "shortBool"}, {"", "", "--verbose", "longBool"}, {"-x", ",", "--xeno", "combiBool"}, {"-s", "", " <shortOnlyField>", "shortOnly"}, {"", "", "--long <longOnlyField>", "longOnly"}, {"-b", ",", "--beta <combiField>", "combi"}, }; int i = -1; for (Map.Entry<Option, Field> entry : help.option2Field.entrySet()) { String[][] row = renderer.render(entry.getKey(), entry.getValue(), parameterRenderer); assertEquals(1, row.length); assertArrayEquals(Arrays.toString(row[0]), expected[++i], row[0]); } } @Test public void testTextTable() { TextTable table = new TextTable(); table.addRow("-v", ",", "--verbose", "show what you're doing while you are doing it"); table.addRow("-p", null, null, "the quick brown fox jumped over the lazy dog. The quick brown fox jumped over the lazy dog."); assertEquals(String.format( " -v, --verbose show what you're doing while you are doing it %n" + " -p the quick brown fox jumped over the lazy dog. The %n" + " quick brown fox jumped over the lazy dog. %n" ,""), table.toString(new StringBuilder()).toString()); } @Test(expected = IllegalArgumentException.class) public void testTextTableAddsNewRowWhenTooManyValuesSpecified() { TextTable table = new TextTable(); table.addRow("-c", ",", "--create", "description", "INVALID", "Row 3"); // assertEquals(String.format("" + // " -c, --create description %n" + // " INVALID %n" + // " Row 3 %n" // ,""), table.toString(new StringBuilder()).toString()); } @Test public void testTextTableAddsNewRowWhenAnyColumnTooLong() { TextTable table = new TextTable(); table.addRow("-c", ",", "--create, --create2, --create3, --create4, --create5, --create6, --create7, --create8", "description"); assertEquals(String.format("" + " -c, --create, --create2, --create3, --create4, --create5, --create6, -- %n" + " create7, --create8 %n" + " description %n" ,""), table.toString(new StringBuilder()).toString()); table = new TextTable(); table.addRow("-c", ",", "--create, --create2, --create3, --create4, --create5, --create6, --createAA7, --create8", "description"); assertEquals(String.format("" + " -c, --create, --create2, --create3, --create4, --create5, --create6, -- %n" + " createAA7, --create8 %n" + " description %n" ,""), table.toString(new StringBuilder()).toString()); } @Test public void testCatUsageFormat() { @Usage(programName = "cat", summary = "Concatenate FILE(s), or standard input, to standard output.", footer = "Copyright(c) 2017") class Cat { @Parameters(paramLabel = "FILE", description = "Files whose contents to display") List<File> files; @Option(names = "--help", help = true, description = "display this help and exit") boolean help; @Option(names = "--version", help = true, description = "output version information and exit") boolean version; @Option(names = "-u", description = "(ignored)") boolean u; @Option(names = "-t", description = "equivalent to -vT") boolean t; @Option(names = "-e", description = "equivalent to -vET") boolean e; @Option(names = {"-A", "--show-all"}, description = "equivalent to -vET") boolean showAll; @Option(names = {"-s", "--squeeze-blank"}, description = "suppress repeated empty output lines") boolean squeeze; @Option(names = {"-v", "--show-nonprinting"}, description = "use ^ and M- notation, except for LDF and TAB") boolean v; @Option(names = {"-b", "--number-nonblank"}, description = "number nonempty output lines, overrides -n") boolean b; @Option(names = {"-T", "--show-tabs"}, description = "display TAB characters as ^I") boolean T; @Option(names = {"-E", "--show-ends"}, description = "display $ at end of each line") boolean E; @Option(names = {"-n", "--number"}, description = "number all output lines") boolean n; } ByteArrayOutputStream baos = new ByteArrayOutputStream(); CommandLine.usage(Cat.class, new PrintStream(baos)); String expected = String.format( "Usage: cat [OPTIONS] [FILE...]%n" + "Concatenate FILE(s), or standard input, to standard output.%n" + " -A, --show-all equivalent to -vET %n" + " -b, --number-nonblank number nonempty output lines, overrides -n %n" + " -e equivalent to -vET %n" + " -E, --show-ends display $ at end of each line %n" + " -n, --number number all output lines %n" + " -s, --squeeze-blank suppress repeated empty output lines %n" + " -t equivalent to -vT %n" + " -T, --show-tabs display TAB characters as ^I %n" + " -u (ignored) %n" + " -v, --show-nonprinting use ^ and M- notation, except for LDF and TAB %n" + " --help display this help and exit %n" + " --version output version information and exit %n" + "Copyright(c) 2017%n", ""); assertEquals(expected, baos.toString()); } @Test public void testZipUsageFormat() { @Usage(summary = { "Copyright (c) 1990-2008 Info-ZIP - Type 'zip \"-L\"' for software license.", "Zip 3.0 (July 5th 2008). Usage:", "zip [-options] [-b path] [-t mmddyyyy] [-n suffixes] [zipfile list] [-xi list]", " The default action is to add or replace zipfile entries from list, which", " can include the special name - to compress standard input.", " If zipfile and list are omitted, zip compresses stdin to stdout."} ) class Zip { @Option(names = "-f", description = "freshen: only changed files") boolean freshen; @Option(names = "-u", description = "update: only changed or new files") boolean update; @Option(names = "-d", description = "delete entries in zipfile") boolean delete; @Option(names = "-m", description = "move into zipfile (delete OS files)") boolean move; @Option(names = "-r", description = "recurse into directories") boolean recurse; @Option(names = "-j", description = "junk (don't record) directory names") boolean junk; @Option(names = "-0", description = "store only") boolean store; @Option(names = "-l", description = "convert LF to CR LF (-ll CR LF to LF)") boolean lf2crlf; @Option(names = "-1", description = "compress faster") boolean faster; @Option(names = "-9", description = "compress better") boolean better; @Option(names = "-q", description = "quiet operation") boolean quiet; @Option(names = "-v", description = "verbose operation/print version info") boolean verbose; @Option(names = "-c", description = "add one-line comments") boolean comments; @Option(names = "-z", description = "add zipfile comment") boolean zipComment; @Option(names = "-@", description = "read names from stdin") boolean readFileList; @Option(names = "-o", description = "make zipfile as old as latest entry") boolean old; @Option(names = "-x", description = "exclude the following names") boolean exclude; @Option(names = "-i", description = "include only the following names") boolean include; @Option(names = "-F", description = "fix zipfile (-FF try harder)") boolean fix; @Option(names = "-D", description = "do not add directory entries") boolean directories; @Option(names = "-A", description = "adjust self-extracting exe") boolean adjust; @Option(names = "-J", description = "junk zipfile prefix (unzipsfx)") boolean junkPrefix; @Option(names = "-T", description = "test zipfile integrity") boolean test; @Option(names = "-X", description = "eXclude eXtra file attributes") boolean excludeAttribs; @Option(names = "-y", description = "store symbolic links as the link instead of the referenced file") boolean symbolic; @Option(names = "-e", description = "encrypt") boolean encrypt; @Option(names = "-n", description = "don't compress these suffixes") boolean dontCompress; @Option(names = "-h2", description = "show more help") boolean moreHelp; } String expected = String.format("" + "Copyright (c) 1990-2008 Info-ZIP - Type 'zip \"-L\"' for software license.%n" + "Zip 3.0 (July 5th 2008). Usage:%n" + "zip [-options] [-b path] [-t mmddyyyy] [-n suffixes] [zipfile list] [-xi list]%n" + " The default action is to add or replace zipfile entries from list, which%n" + " can include the special name - to compress standard input.%n" + " If zipfile and list are omitted, zip compresses stdin to stdout.%n" + " -f freshen: only changed files -u update: only changed or new files %n" + " -d delete entries in zipfile -m move into zipfile (delete OS files) %n" + " -r recurse into directories -j junk (don't record) directory names %n" + " -0 store only -l convert LF to CR LF (-ll CR LF to LF)%n" + " -1 compress faster -9 compress better %n" + " -q quiet operation -v verbose operation/print version info %n" + " -c add one-line comments -z add zipfile comment %n" + " -@ read names from stdin -o make zipfile as old as latest entry %n" + " -x exclude the following names -i include only the following names %n" + " -F fix zipfile (-FF try harder) -D do not add directory entries %n" + " -A adjust self-extracting exe -J junk zipfile prefix (unzipsfx) %n" + " -T test zipfile integrity -X eXclude eXtra file attributes %n" + " -y store symbolic links as the link instead of the referenced file %n" + " -e encrypt -n don't compress these suffixes %n" + " -h2 show more help %n", ""); Help help = new Help(Zip.class); StringBuilder sb = new StringBuilder(); help.appendSummaryTo(sb); // show the first 6 lines, including copyright, description and usage TextTable textTable = new TextTable(new Column(5, 2, TRUNCATE), // values should fit new Column(30, 2, SPAN), // overflow into adjacent columns new Column(4, 1, TRUNCATE), // values should fit again new Column(39, 2, WRAP)); // overflow into next row (same column) textTable.optionRenderer = Help.createMinimalOptionRenderer(); // define and install a custom renderer textTable.layout = new Help.ILayout() { // define and install a custom layout Point previous = new Point(0, 0); public void layout(Option option, Field field, String[][] values, TextTable table) { String[] columnValues = values[0]; // we know renderer creates a single row with two values // We want to show two options on one row, next to each other, // unless the first option spanned multiple columns (in which case there are not enough columns left) int col = previous.x + 1; if (col == 1 || col + columnValues.length > table.columns.length) { // if true, write into next row // table also adds an empty row if a text value spanned multiple columns if (table.rowCount() == 0 || table.rowCount() == previous.y + 1) { // avoid adding 2 empty rows table.addEmptyRow(); // create the slots to write the text values into } col = 0; // we are starting a new row, reset the column to write into } for (int i = 0; i < columnValues.length; i++) { // always write to the last row, column depends on what happened previously previous = table.putValue(table.rowCount() - 1, col + i, columnValues[i]); } } }; // Now that the textTable has a renderer and layout installed, // we add Options to the textTable to build up the option details help text. // Note that we don't sort the options, so they appear in the order the fields are declared in the Zip class. for (Option option : help.option2Field.keySet()) { if (!option.hidden()) { textTable.addOption(option, help.option2Field.get(option)); } } textTable.toString(sb); // finally, copy the options details help text into the StringBuilder assertEquals(expected, sb.toString()); } /** for Netstat test */ private enum Protocol {IP, IPv6, ICMP, ICMPv6, TCP, TCPv6, UDP, UDPv6} @Test public void testNetstatUsageFormat() { @Usage(programName = "NETSTAT", separator = " ", detailedUsageHeader = true, summary = {"Displays protocol statistics and current TCP/IP network connections.", ""}) class Netstat { @Option(names="-a", description="Displays all connections and listening ports.") boolean displayAll; @Option(names="-b", description="Displays the executable involved in creating each connection or " + "listening port. In some cases well-known executables host " + "multiple independent components, and in these cases the " + "sequence of components involved in creating the connection " + "or listening port is displayed. In this case the executable " + "name is in [] at the bottom, on top is the component it called, " + "and so forth until TCP/IP was reached. Note that this option " + "can be time-consuming and will fail unless you have sufficient " + "permissions.") boolean displayExecutable; @Option(names="-e", description="Displays Ethernet statistics. This may be combined with the -s option.") boolean displayEthernetStats; @Option(names="-f", description="Displays Fully Qualified Domain Names (FQDN) for foreign addresses.") boolean displayFQCN; @Option(names="-n", description="Displays addresses and port numbers in numerical form.") boolean displayNumerical; @Option(names="-o", description="Displays the owning process ID associated with each connection.") boolean displayOwningProcess; @Option(names="-p", paramLabel = "proto", description="Shows connections for the protocol specified by proto; proto " + "may be any of: TCP, UDP, TCPv6, or UDPv6. If used with the -s " + "option to display per-protocol statistics, proto may be any of: " + "IP, IPv6, ICMP, ICMPv6, TCP, TCPv6, UDP, or UDPv6.") Protocol proto; @Option(names="-q", description="Displays all connections, listening ports, and bound " + "nonlistening TCP ports. Bound nonlistening ports may or may not " + "be associated with an active connection.") boolean query; @Option(names="-r", description="Displays the routing table.") boolean displayRoutingTable; @Option(names="-s", description="Displays per-protocol statistics. By default, statistics are " + "shown for IP, IPv6, ICMP, ICMPv6, TCP, TCPv6, UDP, and UDPv6; " + "the -p option may be used to specify a subset of the default.") boolean displayStatistics; @Option(names="-t", description="Displays the current connection offload state.") boolean displayOffloadState; @Option(names="-x", description="Displays NetworkDirect connections, listeners, and shared endpoints.") boolean displayNetDirect; @Option(names="-y", description="Displays the TCP connection template for all connections. " + "Cannot be combined with the other options.") boolean displayTcpConnectionTemplate; @Parameters(arity = "0..1", paramLabel = "interval", description = "" + "Redisplays selected statistics, pausing interval seconds " + "between each display. Press CTRL+C to stop redisplaying " + "statistics. If omitted, netstat will print the current " + "configuration information once.") int interval; } StringBuilder sb = new StringBuilder(); Help help = new Help(Netstat.class); help.appendSummaryTo(sb).appendDetailedUsagePatternsTo("", null, sb); sb.append(System.getProperty("line.separator")); TextTable textTable = new TextTable( new Column(15, 2, TRUNCATE), new Column(65, 1, WRAP)); textTable.optionRenderer = Help.createMinimalOptionRenderer(); textTable.parameterRenderer = help.parameterRenderer; textTable.indentWrappedLines = 0; for (Option option : help.option2Field.keySet()) { textTable.addOption(option, help.option2Field.get(option)); } // FIXME needs Show positional parameters details in TextTable similar to option details #48 // textTable.addOption(help.positionalParametersField.getAnnotation(CommandLine.Parameters.class), help.positionalParametersField); textTable.toString(sb); String expected = String.format("" + "Displays protocol statistics and current TCP/IP network connections.%n" + "%n" + "NETSTAT [-a] [-b] [-e] [-f] [-n] [-o] [-p proto] [-q] [-r] [-s] [-t] [-x] [-y] [interval]%n" + // FIXME needs Show multiple detailed usage header lines for mutually exclusive options #46 // "NETSTAT [-a] [-b] [-e] [-f] [-n] [-o] [-p proto] [-q] [-r] [-s] [-t] [-x] [interval]%n" + // "NETSTAT [-y] [interval]%n" + "%n" + " -a Displays all connections and listening ports. %n" + " -b Displays the executable involved in creating each connection or %n" + " listening port. In some cases well-known executables host %n" + " multiple independent components, and in these cases the %n" + " sequence of components involved in creating the connection or %n" + " listening port is displayed. In this case the executable name %n" + " is in [] at the bottom, on top is the component it called, and %n" + " so forth until TCP/IP was reached. Note that this option can be %n" + " time-consuming and will fail unless you have sufficient %n" + " permissions. %n" + " -e Displays Ethernet statistics. This may be combined with the -s %n" + " option. %n" + " -f Displays Fully Qualified Domain Names (FQDN) for foreign %n" + " addresses. %n" + " -n Displays addresses and port numbers in numerical form. %n" + " -o Displays the owning process ID associated with each connection. %n" + " -p proto Shows connections for the protocol specified by proto; proto %n" + " may be any of: TCP, UDP, TCPv6, or UDPv6. If used with the -s %n" + " option to display per-protocol statistics, proto may be any of: %n" + " IP, IPv6, ICMP, ICMPv6, TCP, TCPv6, UDP, or UDPv6. %n" + " -q Displays all connections, listening ports, and bound %n" + " nonlistening TCP ports. Bound nonlistening ports may or may not %n" + " be associated with an active connection. %n" + " -r Displays the routing table. %n" + " -s Displays per-protocol statistics. By default, statistics are %n" + " shown for IP, IPv6, ICMP, ICMPv6, TCP, TCPv6, UDP, and UDPv6; %n" + " the -p option may be used to specify a subset of the default. %n" + " -t Displays the current connection offload state. %n" + " -x Displays NetworkDirect connections, listeners, and shared %n" + " endpoints. %n" + " -y Displays the TCP connection template for all connections. %n" + " Cannot be combined with the other options. %n" // FIXME needs Show positional parameters details in TextTable similar to option details #48 // " interval Redisplays selected statistics, pausing interval seconds %n" + // " between each display. Press CTRL+C to stop redisplaying %n" + // " statistics. If omitted, netstat will print the current %n" + // " configuration information once. %n" , ""); assertEquals(expected, sb.toString()); } }
fix typo
src/test/java/picocli/CommandLineHelpTest.java
fix typo
<ide><path>rc/test/java/picocli/CommandLineHelpTest.java <ide> } <ide> <ide> @Test <del> public void testCreateDefaultOptionRenderer_ReturnsMinimalOptionRenderer() { <add> public void testCreateDefaultOptionRenderer_ReturnsDefaultOptionRenderer() { <ide> assertEquals(Help.DefaultOptionRenderer.class, Help.createDefaultOptionRenderer().getClass()); <ide> } <ide>
Java
apache-2.0
e40703d739e8474b227531e70634a8fdd867005d
0
nus-ncl/service-web,nus-ncl/service-web,nus-ncl/service-web,nus-ncl/service-web
package sg.ncl; import lombok.extern.slf4j.Slf4j; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.client.ClientHttpResponse; import org.springframework.web.client.ResponseErrorHandler; import sg.ncl.testbed_interface.RestUtil; import java.io.IOException; /** * @author Te Ye */ @Slf4j public class MyResponseErrorHandler implements ResponseErrorHandler { @Override public void handleError(ClientHttpResponse response) throws IOException { String statusText; if (response.getStatusText() == null) { statusText = ""; } else { statusText = response.getStatusText(); } log.error("Response error: {} {}", response.getStatusCode(), statusText); } @Override public boolean hasError(ClientHttpResponse response) throws IOException { return RestUtil.isError(response.getStatusCode()); } }
src/main/java/sg/ncl/MyResponseErrorHandler.java
package sg.ncl; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.client.ClientHttpResponse; import org.springframework.web.client.ResponseErrorHandler; import sg.ncl.testbed_interface.RestUtil; import java.io.IOException; /** * @author Te Ye */ public class MyResponseErrorHandler implements ResponseErrorHandler { private static final Logger log = LoggerFactory.getLogger(MyResponseErrorHandler.class); @Override public void handleError(ClientHttpResponse response) throws IOException { String statusText; if (response.getStatusText() == null) { statusText = ""; } else { statusText = response.getStatusText(); } log.error("Response error: {} {}", response.getStatusCode(), statusText); } @Override public boolean hasError(ClientHttpResponse response) throws IOException { return RestUtil.isError(response.getStatusCode()); } }
DEV-789 change to Slf4j
src/main/java/sg/ncl/MyResponseErrorHandler.java
DEV-789 change to Slf4j
<ide><path>rc/main/java/sg/ncl/MyResponseErrorHandler.java <ide> package sg.ncl; <ide> <add>import lombok.extern.slf4j.Slf4j; <ide> import org.slf4j.Logger; <ide> import org.slf4j.LoggerFactory; <ide> import org.springframework.http.client.ClientHttpResponse; <ide> /** <ide> * @author Te Ye <ide> */ <add>@Slf4j <ide> public class MyResponseErrorHandler implements ResponseErrorHandler { <del> <del> private static final Logger log = LoggerFactory.getLogger(MyResponseErrorHandler.class); <ide> <ide> @Override <ide> public void handleError(ClientHttpResponse response) throws IOException { <ide> log.error("Response error: {} {}", response.getStatusCode(), statusText); <ide> } <ide> <add> <ide> @Override <ide> public boolean hasError(ClientHttpResponse response) throws IOException { <ide> return RestUtil.isError(response.getStatusCode());
JavaScript
mit
a701a374880045a3fbab3ea9aab40a4e8101acb6
0
danielwippermann/resol-vbus
/*! resol-vbus | Copyright (c) 2013-2018, Daniel Wippermann | MIT license */ 'use strict'; const Customizer = require('./customizer'); const _ = require('./lodash'); const optionKeys = [ 'connection', 'maxRounds', 'triesPerValue', 'timeoutPerValue', 'masterTimeout', ]; class ConnectionCustomizer extends Customizer { /** * Constructs a new ConnectionCustomizer instance and optionally initializes its * members with the given values. * * @constructs * @augments Customizer * @param {object} [options] Initialization values for this instance's members * @param {number} [options.connection] {@link ConnectionCustomizer#connection} * @param {number} [options.maxRounds] {@link ConnectionCustomizer#maxRounds} * @param {number} [options.triesPerValue] {@link ConnectionCustomizer#triesPerValue} * @param {number} [options.timeoutPerValue] {@link ConnectionCustomizer#timeoutPerValue} * @param {number} [options.masterTimeout] {@link ConnectionCustomizer#masterTimeout} * * @classdesc * A ConnectionCustomizer uses an established connection to a device * to transfer sets of configuration values over it. */ constructor(options) { super(options); _.extend(this, _.pick(options, optionKeys)); } /** * Load a set of configuration values from a device. * * See {@link Customizer#loadConfiguration} for details. */ async _loadConfiguration(configuration, options) { options = _.defaults({}, options, { action: 'get', }); const callback = (config, round) => { if (options.optimize) { return this._optimizeLoadConfiguration(config); } else { if (round === 1) { _.forEach(configuration, (value) => { value.pending = true; }); return configuration; } else { return config; } } }; return this.transceiveConfiguration(options, callback); } /** * Save a set of configuration values to a device. * * See {@link Customizer#saveConfiguration} for details. */ async _saveConfiguration(newConfiguration, oldConfigurstion, options) { options = _.defaults({}, options, { action: 'set', actionOptions: { save: true, }, }); const callback = (config, round) => { if (options.optimize) { if (round === 1) { return this._optimizeSaveConfiguration(newConfiguration, oldConfigurstion); } else { return this._optimizeSaveConfiguration(newConfiguration, config); } } else { if (round === 1) { _.forEach(newConfiguration, (value) => { value.pending = true; }); return newConfiguration; } else { return config; } } }; return this.transceiveConfiguration(options, callback); } /** * Transceives a controller configuration set, handling timeouts, retries etc. * * @param {object} options Options * @param {number} [options.maxRounds] {@link ConnectionCustomizer#maxRounds} * @param {number} [options.triesPerValue] {@link ConnectionCustomizer#triesPerValue} * @param {number} [options.timeoutPerValue] {@link ConnectionCustomizer#timeoutPerValue} * @param {number} [options.masterTimeout] {@link ConnectionCustomizer#masterTimeout} * @param {string} options.action Action to perform, can be `'get'` or `'set'`. * @param {object} [options.actionOptions] Options object to forward to the action to perform. * @param {function} [options.reportProgress] Callback to inform about progress. * @param {function} [options.checkCanceled] Callback to check whether the operation should be canceled. * @param {function} optimizerCallback Callback to optimize configuration between rounds. * @return {object} Promise that resolves to the configuration or `null` on timeout. */ async transceiveConfiguration(options, optimizerCallback) { if (_.isFunction(options)) { optimizerCallback = options; options = null; } options = _.defaults({}, options, { maxRounds: this.maxRounds, triesPerValue: this.triesPerValue, timeoutPerValue: this.timeoutPerValue, masterTimeout: this.masterTimeout, action: null, actionOptions: null, reportProgress: null, checkCanceled: null, }); const { connection } = this; const address = this.deviceAddress; async function check() { if (options.checkCanceled) { if (await options.checkCanceled()) { throw new Error('Canceled'); } } await connection.createConnectedPromise(); } let config = null; const state = { masterAddress: null, masterLastContacted: null, }; const reportProgress = function(progress) { if (options.reportProgress) { options.reportProgress(progress); } }; for (let round = 1; round <= options.maxRounds; round++) { await check(); reportProgress({ message: 'OPTIMIZING_VALUES', round, }); config = await optimizerCallback(config, round); await check(); const pendingValues = config.filter((value) => { return value.pending; }); if (pendingValues.length > 0) { for (let index = 0; index < pendingValues.length; index++) { const valueInfo = pendingValues [index++]; let reportProgress; if (options.reportProgress) { reportProgress = (progress) => { progress = _.extend({}, progress, { valueId: valueInfo.valueId, valueIndex: valueInfo.valueIndex, valueIdHash: valueInfo.valueIdHash, valueNr: index, valueCount: pendingValues.length, }); return options.reportProgress(progress); }; } await check(); const datagram = await this.transceiveValue(valueInfo, valueInfo.value, { triesPerValue: options.triesPerValue, timeoutPerValue: options.timeoutPerValue, action: options.action, actionOptions: options.actionOptions, reportProgress, }, state); valueInfo.pending = false; valueInfo.transceived = !!datagram; if (datagram) { valueInfo.value = datagram.value; } } } else { break; } } if (state.masterLastContacted !== null) { reportProgress({ message: 'RELEASING_BUS', }); await connection.releaseBus(address); } return config; } /** * Transceive a controller value over this connection, handling * timeouts, retries etc. * * @param {object|number} valueInfoOrIndex Value info object or value index * @param {number} valueInfo.valueIndex Value index * @param {number} valueInfo.valueIdHash Value ID hash * @param {number} value Value * @param {object} options Options * @param {number} options.triesPerValue {@link ConnectionCustomizer#triesPerValue} * @param {number} options.timeoutPerValue {@link ConnectionCustomizer#timeoutPerValue} * @param {number} options.masterTimeout {@link ConnectionCustomizer#masterTimeout} * @param {string} options.action Action to perform, can be `'get'` or `'set'`. * @param {object} [options.actionOptions] Options object to forward to the action to perform. * @param {function} [options.reportProgress] Callback to inform about progress. * @param {function} [options.checkCanceled] Callback to check whether the operation should be canceled. * @param {object} state State to share between multiple calls to this method. * @returns {object} Promise that resolves with the datagram received or `null` on timeout. */ async transceiveValue(valueInfo, value, options, state) { const doWork = async (resolve, reject) => { let timer; const done = function(err, result) { if (timer) { clearTimeout(timer); timer = null; } if (err) { reject(err); } else { resolve(result); } }; if (!_.isObject(valueInfo)) { valueInfo = { valueIndex: valueInfo, }; } if (state === undefined) { state = {}; } options = _.defaults({}, options, { triesPerValue: this.triesPerValue, timeoutPerValue: this.timeoutPerValue, masterTimeout: this.masterTimeout, action: null, actionOptions: null, reportProgress: null, checkCanceled: null, }); state = _.defaults(state, { masterAddress: this.deviceAddress, masterLastContacted: Date.now(), }); const { connection } = this; const address = this.deviceAddress; async function check() { if (options.checkCanceled) { if (await options.checkCanceled()) { done(new Error('Canceled')); } } await connection.createConnectedPromise(); } const onTimeout = function() { done(null, null); }; timer = setTimeout(onTimeout, options.timeoutPerValue); let result; try { for (let tries = 1; tries <= options.triesPerValue; tries++) { const reportProgress = function(message) { if (options.reportProgress) { options.reportProgress({ message, tries, valueIndex: valueInfo.valueIndex, valueInfo, }); } }; await check(); if ((tries > 1) && (state.masterLastContacted !== null)) { reportProgress('RELEASING_BUS'); state.masterLastContacted = null; await connection.releaseBus(state.masterAddress); } await check(); if ((state.masterLastContacted === null) && (options.masterTimeout !== null)) { reportProgress('WAITING_FOR_FREE_BUS'); const datagram = await connection.waitForFreeBus(); // TODO: optional timeout? if (datagram) { state.masterAddress = datagram.sourceAddress; } else { state.masterAddress = null; } } await check(); let contactMaster; if (state.masterAddress === null) { contactMaster = false; } else if (state.masterAddress === address) { contactMaster = false; } else if (state.masterLastContacted === null) { contactMaster = true; } else if ((Date.now() - state.masterLastContacted) >= options.masterTimeout) { contactMaster = true; } else { contactMaster = false; } if (contactMaster) { reportProgress('CONTACTING_MASTER'); state.masterLastContacted = Date.now(); await connection.getValueById(state.masterAddress, 0, { timeout: 500, tries: 1, }); } await check(); if (state.masterAddress === address) { state.masterLastContacted = Date.now(); } if (_.isNumber(valueInfo.valueIndex)) { // nop } else if (_.isNumber(valueInfo.valueIdHash)) { reportProgress('LOOKING_UP_VALUE'); const datagram = await connection.getValueIdByIdHash(address, valueInfo.valueIdHash, options.actionOptions); if (datagram && datagram.valueId) { valueInfo.valueIndex = datagram.valueId; } } await check(); if (state.masterAddress === address) { state.masterLastContacted = Date.now(); } if (!_.isNumber(valueInfo.valueIndex)) { result = null; } else if (options.action === 'get') { reportProgress('GETTING_VALUE'); result = await connection.getValueById(address, valueInfo.valueIndex, options.actionOptions); } else if (options.action === 'set') { reportProgress('SETTING_VALUE'); result = await connection.setValueById(address, valueInfo.valueIndex, value, options.actionOptions); } else { throw new Error('Unknown action "' + options.action + '"'); } if (result) { break; } } } finally { if (timer) { clearTimeout(timer); timer = null; } } return result; }; return new Promise((resolve, reject) => { doWork(resolve, reject).then(resolve, reject); }); } } Object.assign(ConnectionCustomizer.prototype, /** @lends ConnectionCustomizer.prototype */ { /** * The connection to use for transfer of the configuration values. * @type {Connection} */ connection: null, /** * Maximum number of optimization rounds for {@link transceiveConfiguration}. * @type {number} * @default 10 */ maxRounds: 10, /** * Amount of retries to transceive one value. * Between two tries the VBus is released and then re-acquired. * @type {number} * @default 2 */ triesPerValue: 2, /** * Timeout in milliseconds after which the transceive times out. * @type {number} * @default 30000 */ timeoutPerValue: 30000, /** * Interval in milliseconds in which * the VBus master is contacted to reissue the VBus clearance. * @type {number} * @default 8000 */ masterTimeout: 8000, }); module.exports = ConnectionCustomizer;
src/connection-customizer.js
/*! resol-vbus | Copyright (c) 2013-2018, Daniel Wippermann | MIT license */ 'use strict'; const Customizer = require('./customizer'); const _ = require('./lodash'); const optionKeys = [ 'connection', 'maxRounds', 'triesPerValue', 'timeoutPerValue', 'masterTimeout', ]; class ConnectionCustomizer extends Customizer { /** * Constructs a new ConnectionCustomizer instance and optionally initializes its * members with the given values. * * @constructs * @augments Customizer * @param {object} [options] Initialization values for this instance's members * @param {number} [options.connection] {@link ConnectionCustomizer#connection} * @param {number} [options.maxRounds] {@link ConnectionCustomizer#maxRounds} * @param {number} [options.triesPerValue] {@link ConnectionCustomizer#triesPerValue} * @param {number} [options.timeoutPerValue] {@link ConnectionCustomizer#timeoutPerValue} * @param {number} [options.masterTimeout] {@link ConnectionCustomizer#masterTimeout} * * @classdesc * A ConnectionCustomizer uses an established connection to a device * to transfer sets of configuration values over it. */ constructor(options) { super(options); _.extend(this, _.pick(options, optionKeys)); } /** * Load a set of configuration values from a device. * * See {@link Customizer#loadConfiguration} for details. */ async _loadConfiguration(configuration, options) { options = _.defaults({}, options, { action: 'get', }); const callback = (config, round) => { if (options.optimize) { return this._optimizeLoadConfiguration(config); } else { if (round === 1) { _.forEach(configuration, (value) => { value.pending = true; }); return configuration; } else { return config; } } }; return this.transceiveConfiguration(options, callback); } /** * Save a set of configuration values to a device. * * See {@link Customizer#saveConfiguration} for details. */ async _saveConfiguration(newConfiguration, oldConfigurstion, options) { options = _.defaults({}, options, { action: 'set', actionOptions: { save: true, }, }); const callback = (config, round) => { if (options.optimize) { if (round === 1) { return this._optimizeSaveConfiguration(newConfiguration, oldConfigurstion); } else { return this._optimizeSaveConfiguration(newConfiguration, config); } } else { if (round === 1) { _.forEach(newConfiguration, (value) => { value.pending = true; }); return newConfiguration; } else { return config; } } }; return this.transceiveConfiguration(options, callback); } /** * Transceives a controller configuration set, handling timeouts, retries etc. * * @param {object} options Options * @param {number} [options.maxRounds] {@link ConnectionCustomizer#maxRounds} * @param {number} [options.triesPerValue] {@link ConnectionCustomizer#triesPerValue} * @param {number} [options.timeoutPerValue] {@link ConnectionCustomizer#timeoutPerValue} * @param {number} [options.masterTimeout] {@link ConnectionCustomizer#masterTimeout} * @param {string} options.action Action to perform, can be `'get'` or `'set'`. * @param {object} [options.actionOptions] Options object to forward to the action to perform. * @param {function} [options.reportProgress] Callback to inform about progress. * @param {function} [options.checkCanceled] Callback to check whether the operation should be canceled. * @param {function} optimizerCallback Callback to optimize configuration between rounds. * @return {object} Promise that resolves to the configuration or `null` on timeout. */ async transceiveConfiguration(options, optimizerCallback) { if (_.isFunction(options)) { optimizerCallback = options; options = null; } options = _.defaults({}, options, { maxRounds: this.maxRounds, triesPerValue: this.triesPerValue, timeoutPerValue: this.timeoutPerValue, masterTimeout: this.masterTimeout, action: null, actionOptions: null, reportProgress: null, checkCanceled: null, }); const { connection } = this; const address = this.deviceAddress; async function check() { if (options.checkCanceled) { if (await options.checkCanceled()) { throw new Error('Canceled'); } } await connection.createConnectedPromise(); } let config = null; const state = { masterAddress: null, masterLastContacted: null, }; const reportProgress = function(progress) { if (options.reportProgress) { options.reportProgress(progress); } }; for (let round = 1; round <= options.maxRounds; round++) { await check(); reportProgress({ message: 'OPTIMIZING_VALUES', round, }); config = await optimizerCallback(config, round); await check(); const pendingValues = config.filter((value) => { return value.pending; }); if (pendingValues.length > 0) { for (let index = 0; index < pendingValues.length; index++) { const valueInfo = pendingValues [index++]; let reportProgress; if (options.reportProgress) { reportProgress = (progress) => { progress = _.extend({}, progress, { valueId: valueInfo.valueId, valueIndex: valueInfo.valueIndex, valueIdHash: valueInfo.valueIdHash, valueNr: index, valueCount: pendingValues.length, }); return options.reportProgress(progress); }; } await check(); const datagram = await this.transceiveValue(valueInfo, valueInfo.value, { triesPerValue: options.triesPerValue, timeoutPerValue: options.timeoutPerValue, action: options.action, actionOptions: options.actionOptions, reportProgress, }, state); valueInfo.pending = false; valueInfo.transceived = !!datagram; if (datagram) { valueInfo.value = datagram.value; } } } else { break; } } if (state.masterLastContacted !== null) { reportProgress({ message: 'RELEASING_BUS', }); await connection.releaseBus(address); } return config; } /** * Transceive a controller value over this connection, handling * timeouts, retries etc. * * @param {object|number} valueInfoOrIndex Value info object or value index * @param {number} valueInfo.valueIndex Value index * @param {number} valueInfo.valueIdHash Value ID hash * @param {number} value Value * @param {object} options Options * @param {number} options.triesPerValue {@link ConnectionCustomizer#triesPerValue} * @param {number} options.timeoutPerValue {@link ConnectionCustomizer#timeoutPerValue} * @param {number} options.masterTimeout {@link ConnectionCustomizer#masterTimeout} * @param {string} options.action Action to perform, can be `'get'` or `'set'`. * @param {object} [options.actionOptions] Options object to forward to the action to perform. * @param {function} [options.reportProgress] Callback to inform about progress. * @param {function} [options.checkCanceled] Callback to check whether the operation should be canceled. * @param {object} state State to share between multiple calls to this method. * @returns {object} Promise that resolves with the datagram received or `null` on timeout. */ async transceiveValue(valueInfo, value, options, state) { const doWork = async (resolve, reject) => { let timer; const done = function(err, result) { if (timer) { clearTimeout(timer); timer = null; } if (err) { reject(err); } else { resolve(result); } }; if (!_.isObject(valueInfo)) { valueInfo = { valueIndex: valueInfo, }; } if (state === undefined) { state = {}; } options = _.defaults({}, options, { triesPerValue: this.triesPerValue, timeoutPerValue: this.timeoutPerValue, masterTimeout: this.masterTimeout, action: null, actionOptions: null, reportProgress: null, checkCanceled: null, }); state = _.defaults(state, { masterAddress: this.deviceAddress, masterLastContacted: Date.now(), }); const { connection } = this; const address = this.deviceAddress; async function check() { if (options.checkCanceled) { if (await options.checkCanceled()) { reject(new Error('Canceled')); } } await connection.createConnectedPromise(); } const onTimeout = function() { done(null, null); }; timer = setTimeout(onTimeout, options.timeoutPerValue); let result; for (let tries = 1; tries <= options.triesPerValue; tries++) { const reportProgress = function(message) { if (options.reportProgress) { options.reportProgress({ message, tries, valueIndex: valueInfo.valueIndex, valueInfo, }); } }; await check(); if ((tries > 1) && (state.masterLastContacted !== null)) { reportProgress('RELEASING_BUS'); state.masterLastContacted = null; await connection.releaseBus(state.masterAddress); } await check(); if ((state.masterLastContacted === null) && (options.masterTimeout !== null)) { reportProgress('WAITING_FOR_FREE_BUS'); const datagram = await connection.waitForFreeBus(); // TODO: optional timeout? if (datagram) { state.masterAddress = datagram.sourceAddress; } else { state.masterAddress = null; } } await check(); let contactMaster; if (state.masterAddress === null) { contactMaster = false; } else if (state.masterAddress === address) { contactMaster = false; } else if (state.masterLastContacted === null) { contactMaster = true; } else if ((Date.now() - state.masterLastContacted) >= options.masterTimeout) { contactMaster = true; } else { contactMaster = false; } if (contactMaster) { reportProgress('CONTACTING_MASTER'); state.masterLastContacted = Date.now(); await connection.getValueById(state.masterAddress, 0, { timeout: 500, tries: 1, }); } await check(); if (state.masterAddress === address) { state.masterLastContacted = Date.now(); } if (_.isNumber(valueInfo.valueIndex)) { // nop } else if (_.isNumber(valueInfo.valueIdHash)) { reportProgress('LOOKING_UP_VALUE'); const datagram = await connection.getValueIdByIdHash(address, valueInfo.valueIdHash, options.actionOptions); if (datagram && datagram.valueId) { valueInfo.valueIndex = datagram.valueId; } } await check(); if (state.masterAddress === address) { state.masterLastContacted = Date.now(); } if (!_.isNumber(valueInfo.valueIndex)) { result = null; } else if (options.action === 'get') { reportProgress('GETTING_VALUE'); result = await connection.getValueById(address, valueInfo.valueIndex, options.actionOptions); } else if (options.action === 'set') { reportProgress('SETTING_VALUE'); result = await connection.setValueById(address, valueInfo.valueIndex, value, options.actionOptions); } else { throw new Error('Unknown action "' + options.action + '"'); } if (result) { break; } } return result; }; return new Promise((resolve, reject) => { doWork(resolve, reject).then(resolve, reject); }); } } Object.assign(ConnectionCustomizer.prototype, /** @lends ConnectionCustomizer.prototype */ { /** * The connection to use for transfer of the configuration values. * @type {Connection} */ connection: null, /** * Maximum number of optimization rounds for {@link transceiveConfiguration}. * @type {number} * @default 10 */ maxRounds: 10, /** * Amount of retries to transceive one value. * Between two tries the VBus is released and then re-acquired. * @type {number} * @default 2 */ triesPerValue: 2, /** * Timeout in milliseconds after which the transceive times out. * @type {number} * @default 30000 */ timeoutPerValue: 30000, /** * Interval in milliseconds in which * the VBus master is contacted to reissue the VBus clearance. * @type {number} * @default 8000 */ masterTimeout: 8000, }); module.exports = ConnectionCustomizer;
Fix dangling timer bug.
src/connection-customizer.js
Fix dangling timer bug.
<ide><path>rc/connection-customizer.js <ide> async function check() { <ide> if (options.checkCanceled) { <ide> if (await options.checkCanceled()) { <del> reject(new Error('Canceled')); <add> done(new Error('Canceled')); <ide> } <ide> } <ide> <ide> timer = setTimeout(onTimeout, options.timeoutPerValue); <ide> <ide> let result; <del> for (let tries = 1; tries <= options.triesPerValue; tries++) { <del> const reportProgress = function(message) { <del> if (options.reportProgress) { <del> options.reportProgress({ <del> message, <del> tries, <del> valueIndex: valueInfo.valueIndex, <del> valueInfo, <add> try { <add> for (let tries = 1; tries <= options.triesPerValue; tries++) { <add> const reportProgress = function(message) { <add> if (options.reportProgress) { <add> options.reportProgress({ <add> message, <add> tries, <add> valueIndex: valueInfo.valueIndex, <add> valueInfo, <add> }); <add> } <add> }; <add> <add> await check(); <add> <add> if ((tries > 1) && (state.masterLastContacted !== null)) { <add> reportProgress('RELEASING_BUS'); <add> <add> state.masterLastContacted = null; <add> <add> await connection.releaseBus(state.masterAddress); <add> } <add> <add> await check(); <add> <add> if ((state.masterLastContacted === null) && (options.masterTimeout !== null)) { <add> reportProgress('WAITING_FOR_FREE_BUS'); <add> <add> const datagram = await connection.waitForFreeBus(); // TODO: optional timeout? <add> <add> if (datagram) { <add> state.masterAddress = datagram.sourceAddress; <add> } else { <add> state.masterAddress = null; <add> } <add> } <add> <add> await check(); <add> <add> let contactMaster; <add> if (state.masterAddress === null) { <add> contactMaster = false; <add> } else if (state.masterAddress === address) { <add> contactMaster = false; <add> } else if (state.masterLastContacted === null) { <add> contactMaster = true; <add> } else if ((Date.now() - state.masterLastContacted) >= options.masterTimeout) { <add> contactMaster = true; <add> } else { <add> contactMaster = false; <add> } <add> if (contactMaster) { <add> reportProgress('CONTACTING_MASTER'); <add> <add> state.masterLastContacted = Date.now(); <add> <add> await connection.getValueById(state.masterAddress, 0, { <add> timeout: 500, <add> tries: 1, <ide> }); <ide> } <del> }; <del> <del> await check(); <del> <del> if ((tries > 1) && (state.masterLastContacted !== null)) { <del> reportProgress('RELEASING_BUS'); <del> <del> state.masterLastContacted = null; <del> <del> await connection.releaseBus(state.masterAddress); <del> } <del> <del> await check(); <del> <del> if ((state.masterLastContacted === null) && (options.masterTimeout !== null)) { <del> reportProgress('WAITING_FOR_FREE_BUS'); <del> <del> const datagram = await connection.waitForFreeBus(); // TODO: optional timeout? <del> <del> if (datagram) { <del> state.masterAddress = datagram.sourceAddress; <add> <add> await check(); <add> <add> if (state.masterAddress === address) { <add> state.masterLastContacted = Date.now(); <add> } <add> <add> if (_.isNumber(valueInfo.valueIndex)) { <add> // nop <add> } else if (_.isNumber(valueInfo.valueIdHash)) { <add> reportProgress('LOOKING_UP_VALUE'); <add> <add> const datagram = await connection.getValueIdByIdHash(address, valueInfo.valueIdHash, options.actionOptions); <add> <add> if (datagram && datagram.valueId) { <add> valueInfo.valueIndex = datagram.valueId; <add> } <add> } <add> <add> await check(); <add> <add> if (state.masterAddress === address) { <add> state.masterLastContacted = Date.now(); <add> } <add> <add> if (!_.isNumber(valueInfo.valueIndex)) { <add> result = null; <add> } else if (options.action === 'get') { <add> reportProgress('GETTING_VALUE'); <add> <add> result = await connection.getValueById(address, valueInfo.valueIndex, options.actionOptions); <add> } else if (options.action === 'set') { <add> reportProgress('SETTING_VALUE'); <add> <add> result = await connection.setValueById(address, valueInfo.valueIndex, value, options.actionOptions); <ide> } else { <del> state.masterAddress = null; <del> } <del> } <del> <del> await check(); <del> <del> let contactMaster; <del> if (state.masterAddress === null) { <del> contactMaster = false; <del> } else if (state.masterAddress === address) { <del> contactMaster = false; <del> } else if (state.masterLastContacted === null) { <del> contactMaster = true; <del> } else if ((Date.now() - state.masterLastContacted) >= options.masterTimeout) { <del> contactMaster = true; <del> } else { <del> contactMaster = false; <del> } <del> if (contactMaster) { <del> reportProgress('CONTACTING_MASTER'); <del> <del> state.masterLastContacted = Date.now(); <del> <del> await connection.getValueById(state.masterAddress, 0, { <del> timeout: 500, <del> tries: 1, <del> }); <del> } <del> <del> await check(); <del> <del> if (state.masterAddress === address) { <del> state.masterLastContacted = Date.now(); <del> } <del> <del> if (_.isNumber(valueInfo.valueIndex)) { <del> // nop <del> } else if (_.isNumber(valueInfo.valueIdHash)) { <del> reportProgress('LOOKING_UP_VALUE'); <del> <del> const datagram = await connection.getValueIdByIdHash(address, valueInfo.valueIdHash, options.actionOptions); <del> <del> if (datagram && datagram.valueId) { <del> valueInfo.valueIndex = datagram.valueId; <del> } <del> } <del> <del> await check(); <del> <del> if (state.masterAddress === address) { <del> state.masterLastContacted = Date.now(); <del> } <del> <del> if (!_.isNumber(valueInfo.valueIndex)) { <del> result = null; <del> } else if (options.action === 'get') { <del> reportProgress('GETTING_VALUE'); <del> <del> result = await connection.getValueById(address, valueInfo.valueIndex, options.actionOptions); <del> } else if (options.action === 'set') { <del> reportProgress('SETTING_VALUE'); <del> <del> result = await connection.setValueById(address, valueInfo.valueIndex, value, options.actionOptions); <del> } else { <del> throw new Error('Unknown action "' + options.action + '"'); <del> } <del> <del> if (result) { <del> break; <add> throw new Error('Unknown action "' + options.action + '"'); <add> } <add> <add> if (result) { <add> break; <add> } <add> } <add> } finally { <add> if (timer) { <add> clearTimeout(timer); <add> timer = null; <ide> } <ide> } <ide>
Java
apache-2.0
333816fba47963e11ca1fbacc512e78877121b97
0
MatheMatrix/zstack,AlanJager/zstack,AlanJager/zstack,MatheMatrix/zstack,zstackorg/zstack,MatheMatrix/zstack,zstackorg/zstack,zstackio/zstack,zstackio/zstack,zstackio/zstack,AlanJager/zstack
package org.zstack.compute.vm; import org.apache.commons.lang.StringUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.DataIntegrityViolationException; import org.springframework.transaction.annotation.Transactional; import org.zstack.compute.allocator.HostAllocatorManager; import org.zstack.core.Platform; import org.zstack.core.cascade.CascadeConstant; import org.zstack.core.cascade.CascadeFacade; import org.zstack.core.cloudbus.*; import org.zstack.core.componentloader.PluginRegistry; import org.zstack.core.db.*; import org.zstack.core.db.SimpleQuery.Op; import org.zstack.core.defer.Defer; import org.zstack.core.defer.Deferred; import org.zstack.core.jsonlabel.JsonLabel; import org.zstack.core.thread.ChainTask; import org.zstack.core.thread.SyncTaskChain; import org.zstack.core.thread.ThreadFacade; import org.zstack.core.workflow.FlowChainBuilder; import org.zstack.core.workflow.ShareFlow; import org.zstack.header.allocator.*; import org.zstack.header.apimediator.ApiMessageInterceptionException; import org.zstack.header.cluster.ClusterInventory; import org.zstack.header.cluster.ClusterState; import org.zstack.header.cluster.ClusterVO; import org.zstack.header.cluster.ClusterVO_; import org.zstack.header.configuration.*; import org.zstack.header.core.Completion; import org.zstack.header.core.NoErrorCompletion; import org.zstack.header.core.NopeCompletion; import org.zstack.header.core.ReturnValueCompletion; import org.zstack.header.core.workflow.*; import org.zstack.header.errorcode.ErrorCode; import org.zstack.header.errorcode.OperationFailureException; import org.zstack.header.errorcode.SysErrors; import org.zstack.header.exception.CloudRuntimeException; import org.zstack.header.host.*; import org.zstack.header.image.ImageConstant.ImageMediaType; import org.zstack.header.image.*; import org.zstack.header.message.*; import org.zstack.header.network.l3.*; import org.zstack.header.storage.primary.*; import org.zstack.header.storage.snapshot.MarkRootVolumeAsSnapshotMsg; import org.zstack.header.storage.snapshot.VolumeSnapshotConstant; import org.zstack.header.vm.*; import org.zstack.header.vm.ChangeVmMetaDataMsg.AtomicHostUuid; import org.zstack.header.vm.ChangeVmMetaDataMsg.AtomicVmState; import org.zstack.header.vm.VmAbnormalLifeCycleStruct.VmAbnormalLifeCycleOperation; import org.zstack.header.vm.VmInstanceConstant.Params; import org.zstack.header.vm.VmInstanceConstant.VmOperation; import org.zstack.header.vm.VmInstanceDeletionPolicyManager.VmInstanceDeletionPolicy; import org.zstack.header.vm.VmInstanceSpec.CdRomSpec; import org.zstack.header.vm.VmInstanceSpec.HostName; import org.zstack.header.vm.VmInstanceSpec.IsoSpec; import org.zstack.header.vm.cdrom.*; import org.zstack.header.volume.*; import org.zstack.identity.Account; import org.zstack.identity.AccountManager; import org.zstack.tag.SystemTagCreator; import org.zstack.tag.SystemTagUtils; import org.zstack.utils.*; import org.zstack.utils.data.SizeUnit; import org.zstack.utils.function.ForEachFunction; import org.zstack.utils.function.Function; import org.zstack.utils.gson.JSONObjectUtil; import org.zstack.utils.logging.CLogger; import javax.persistence.TypedQuery; import java.util.*; import java.util.stream.Collectors; import static org.zstack.core.Platform.err; import static org.zstack.core.Platform.operr; import static org.zstack.utils.CollectionDSL.*; public class VmInstanceBase extends AbstractVmInstance { protected static final CLogger logger = Utils.getLogger(VmInstanceBase.class); @Autowired protected CloudBus bus; @Autowired protected DatabaseFacade dbf; @Autowired protected ThreadFacade thdf; @Autowired protected VmInstanceManager vmMgr; @Autowired protected VmInstanceExtensionPointEmitter extEmitter; @Autowired protected VmInstanceNotifyPointEmitter notifyEmitter; @Autowired protected CascadeFacade casf; @Autowired protected AccountManager acntMgr; @Autowired protected EventFacade evtf; @Autowired protected PluginRegistry pluginRgty; @Autowired protected VmInstanceDeletionPolicyManager deletionPolicyMgr; @Autowired private HostAllocatorManager hostAllocatorMgr; protected VmInstanceVO self; protected VmInstanceVO originalCopy; protected String syncThreadName; private void checkState(final String hostUuid, final NoErrorCompletion completion) { CheckVmStateOnHypervisorMsg msg = new CheckVmStateOnHypervisorMsg(); msg.setVmInstanceUuids(list(self.getUuid())); msg.setHostUuid(hostUuid); bus.makeTargetServiceIdByResourceUuid(msg, HostConstant.SERVICE_ID, hostUuid); bus.send(msg, new CloudBusCallBack(completion) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { logger.warn(String.format("unable to check state of the vm[uuid:%s] on the host[uuid:%s], %s;" + "put the VM into the Unknown state", self.getUuid(), hostUuid, reply.getError())); changeVmStateInDb(VmInstanceStateEvent.unknown); completion.done(); return; } CheckVmStateOnHypervisorReply r = reply.castReply(); String state = r.getStates().get(self.getUuid()); if (state == null) { changeVmStateInDb(VmInstanceStateEvent.unknown); completion.done(); return; } if (VmInstanceState.Running.toString().equals(state)) { changeVmStateInDb(VmInstanceStateEvent.running, ()-> self.setHostUuid(hostUuid)); } else if (VmInstanceState.Stopped.toString().equals(state) && self.getState().equals(VmInstanceState.Destroying)) { changeVmStateInDb(VmInstanceStateEvent.destroyed); } else if (VmInstanceState.Stopped.toString().equals(state)) { changeVmStateInDb(VmInstanceStateEvent.stopped); } else if (VmInstanceState.Paused.toString().equals(state)) { changeVmStateInDb(VmInstanceStateEvent.paused); } else { throw new CloudRuntimeException(String.format( "CheckVmStateOnHypervisorMsg should only report states[Running, Paused or Stopped]," + "but it reports %s for the vm[uuid:%s] on the host[uuid:%s]", state, self.getUuid(), hostUuid)); } completion.done(); } }); } protected void destroy(final VmInstanceDeletionPolicy deletionPolicy, Message msg, final Completion completion) { if (deletionPolicy == VmInstanceDeletionPolicy.DBOnly) { completion.success(); return; } if (deletionPolicy == VmInstanceDeletionPolicy.KeepVolume && self.getState().equals(VmInstanceState.Destroyed)) { completion.success(); return; } final VmInstanceInventory inv = VmInstanceInventory.valueOf(self); VmInstanceSpec spec = buildSpecFromInventory(inv, VmOperation.Destroy); if (msg instanceof ReleaseResourceMessage) { spec.setIgnoreResourceReleaseFailure(((ReleaseResourceMessage) msg).isIgnoreResourceReleaseFailure()); } self = changeVmStateInDb(VmInstanceStateEvent.destroying); FlowChain chain = getDestroyVmWorkFlowChain(inv); setFlowMarshaller(chain); chain.setName(String.format("destroy-vm-%s", self.getUuid())); chain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); chain.getData().put(Params.DeletionPolicy, deletionPolicy); chain.done(new FlowDoneHandler(completion) { @Override public void handle(Map data) { completion.success(); } }).error(new FlowErrorHandler(completion) { @Override public void handle(final ErrorCode errCode, Map data) { if (originalCopy.getState() == VmInstanceState.Running) { checkState(originalCopy.getHostUuid(), new NoErrorCompletion(completion) { @Override public void done() { completion.fail(errCode); } }); } else { changeVmStateInDb(VmInstanceStateEvent.unknown); completion.fail(errCode); } } }).start(); } protected VmInstanceVO getSelf() { return self; } protected VmInstanceInventory getSelfInventory() { return VmInstanceInventory.valueOf(self); } public VmInstanceBase(VmInstanceVO vo) { this.self = vo; this.syncThreadName = "Vm-" + vo.getUuid(); this.originalCopy = ObjectUtils.newAndCopy(vo, vo.getClass()); } protected VmInstanceVO refreshVO() { return refreshVO(false); } protected VmInstanceVO refreshVO(boolean noException) { VmInstanceVO vo = self; self = dbf.findByUuid(self.getUuid(), VmInstanceVO.class); if (self == null && noException) { return null; } if (self == null) { throw new OperationFailureException(operr("vm[uuid:%s, name:%s] has been deleted", vo.getUuid(), vo.getName())); } originalCopy = ObjectUtils.newAndCopy(vo, vo.getClass()); return self; } protected FlowChain getCreateVmWorkFlowChain(VmInstanceInventory inv) { return vmMgr.getCreateVmWorkFlowChain(inv); } protected FlowChain getStopVmWorkFlowChain(VmInstanceInventory inv) { return vmMgr.getStopVmWorkFlowChain(inv); } protected FlowChain getRebootVmWorkFlowChain(VmInstanceInventory inv) { return vmMgr.getRebootVmWorkFlowChain(inv); } protected FlowChain getStartVmWorkFlowChain(VmInstanceInventory inv) { return vmMgr.getStartVmWorkFlowChain(inv); } protected FlowChain getDestroyVmWorkFlowChain(VmInstanceInventory inv) { return vmMgr.getDestroyVmWorkFlowChain(inv); } protected FlowChain getExpungeVmWorkFlowChain(VmInstanceInventory inv) { return vmMgr.getExpungeVmWorkFlowChain(inv); } protected FlowChain getMigrateVmWorkFlowChain(VmInstanceInventory inv) { return vmMgr.getMigrateVmWorkFlowChain(inv); } protected FlowChain getAttachUninstantiatedVolumeWorkFlowChain(VmInstanceInventory inv) { return vmMgr.getAttachUninstantiatedVolumeWorkFlowChain(inv); } protected FlowChain getAttachIsoWorkFlowChain(VmInstanceInventory inv) { return vmMgr.getAttachIsoWorkFlowChain(inv); } protected FlowChain getDetachIsoWorkFlowChain(VmInstanceInventory inv) { return vmMgr.getDetachIsoWorkFlowChain(inv); } protected FlowChain getPauseVmWorkFlowChain(VmInstanceInventory inv) { return vmMgr.getPauseWorkFlowChain(inv); } protected FlowChain getResumeVmWorkFlowChain(VmInstanceInventory inv) { return vmMgr.getResumeVmWorkFlowChain(inv); } protected VmInstanceVO changeVmStateInDb(VmInstanceStateEvent stateEvent) { return changeVmStateInDb(stateEvent, null); } protected VmInstanceVO changeVmStateInDb(VmInstanceStateEvent stateEvent, Runnable runnable) { VmInstanceState bs = self.getState(); final VmInstanceState state = self.getState().nextState(stateEvent); SQLBatch sql = new SQLBatch(){ @Override protected void scripts() { self = findByUuid(self.getUuid(), self.getClass()); if (runnable != null) { runnable.run(); } if (state == VmInstanceState.Stopped) { // cleanup the hostUuid if the VM is stopped if (self.getHostUuid() != null) { self.setLastHostUuid(self.getHostUuid()); } self.setHostUuid(null); } self.setState(state); self = merge(self); } }; try { sql.execute(); } catch (DataIntegrityViolationException e){ sql.execute(); } if (bs != state) { logger.debug(String.format("vm[uuid:%s] changed state from %s to %s in db", self.getUuid(), bs, state)); VmCanonicalEvents.VmStateChangedData data = new VmCanonicalEvents.VmStateChangedData(); data.setVmUuid(self.getUuid()); data.setOldState(bs.toString()); data.setNewState(state.toString()); data.setInventory(getSelfInventory()); evtf.fire(VmCanonicalEvents.VM_FULL_STATE_CHANGED_PATH, data); VmInstanceInventory inv = getSelfInventory(); CollectionUtils.safeForEach(pluginRgty.getExtensionList(VmStateChangedExtensionPoint.class), new ForEachFunction<VmStateChangedExtensionPoint>() { @Override public void run(VmStateChangedExtensionPoint ext) { ext.vmStateChanged(inv, bs, self.getState()); } }); //TODO: remove this notifyEmitter.notifyVmStateChange(VmInstanceInventory.valueOf(self), bs, state); } return self; } @Override @MessageSafe public void handleMessage(final Message msg) { if (msg instanceof APIMessage) { handleApiMessage((APIMessage) msg); } else { handleLocalMessage(msg); } } protected void handleLocalMessage(Message msg) { if (msg instanceof InstantiateNewCreatedVmInstanceMsg) { handle((InstantiateNewCreatedVmInstanceMsg) msg); } else if (msg instanceof StartVmInstanceMsg) { handle((StartVmInstanceMsg) msg); } else if (msg instanceof StopVmInstanceMsg) { handle((StopVmInstanceMsg) msg); } else if (msg instanceof RebootVmInstanceMsg) { handle((RebootVmInstanceMsg) msg); } else if (msg instanceof ChangeVmStateMsg) { handle((ChangeVmStateMsg) msg); } else if (msg instanceof DestroyVmInstanceMsg) { handle((DestroyVmInstanceMsg) msg); } else if (msg instanceof AttachNicToVmMsg) { handle((AttachNicToVmMsg) msg); } else if (msg instanceof CreateTemplateFromVmRootVolumeMsg) { handle((CreateTemplateFromVmRootVolumeMsg) msg); } else if (msg instanceof VmInstanceDeletionMsg) { handle((VmInstanceDeletionMsg) msg); } else if (msg instanceof VmAttachNicMsg) { handle((VmAttachNicMsg) msg); } else if (msg instanceof MigrateVmMsg) { handle((MigrateVmMsg) msg); } else if (msg instanceof DetachDataVolumeFromVmMsg) { handle((DetachDataVolumeFromVmMsg) msg); } else if (msg instanceof AttachDataVolumeToVmMsg) { handle((AttachDataVolumeToVmMsg) msg); } else if (msg instanceof GetVmMigrationTargetHostMsg) { handle((GetVmMigrationTargetHostMsg) msg); } else if (msg instanceof ChangeVmMetaDataMsg) { handle((ChangeVmMetaDataMsg) msg); } else if (msg instanceof LockVmInstanceMsg) { handle((LockVmInstanceMsg) msg); } else if (msg instanceof DetachNicFromVmMsg) { handle((DetachNicFromVmMsg) msg); } else if (msg instanceof VmStateChangedOnHostMsg) { handle((VmStateChangedOnHostMsg) msg); } else if (msg instanceof VmCheckOwnStateMsg) { handle((VmCheckOwnStateMsg) msg); } else if (msg instanceof ExpungeVmMsg) { handle((ExpungeVmMsg) msg); } else if (msg instanceof HaStartVmInstanceMsg) { handle((HaStartVmInstanceMsg) msg); } else if (msg instanceof OverlayMessage) { handle((OverlayMessage) msg); } else if (msg instanceof ReimageVmInstanceMsg) { handle((ReimageVmInstanceMsg) msg); } else if (msg instanceof GetVmStartingCandidateClustersHostsMsg) { handle((GetVmStartingCandidateClustersHostsMsg) msg); } else if (msg instanceof MigrateVmInnerMsg) { handle((MigrateVmInnerMsg) msg); } else if (msg instanceof AddL3NetworkToVmNicMsg) { handle((AddL3NetworkToVmNicMsg) msg); } else if (msg instanceof DeleteL3NetworkFromVmNicMsg) { handle((DeleteL3NetworkFromVmNicMsg) msg); } else if (msg instanceof DetachIsoFromVmInstanceMsg) { handle((DetachIsoFromVmInstanceMsg) msg); } else if (msg instanceof DeleteVmCdRomMsg) { handle((DeleteVmCdRomMsg) msg); } else if (msg instanceof CreateVmCdRomMsg) { handle((CreateVmCdRomMsg) msg); } else { VmInstanceBaseExtensionFactory ext = vmMgr.getVmInstanceBaseExtensionFactory(msg); if (ext != null) { VmInstance v = ext.getVmInstance(self); v.handleMessage(msg); } else { bus.dealWithUnknownMessage(msg); } } } private void handle(CreateVmCdRomMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { CreateVmCdRomReply reply = new CreateVmCdRomReply(); doCreateVmCdRom(msg, new ReturnValueCompletion<VmCdRomInventory>(msg) { @Override public void success(VmCdRomInventory inv) { reply.setInventory(inv); bus.reply(msg, reply); chain.next(); } @Override public void fail(ErrorCode errorCode) { reply.setError(errorCode); bus.reply(msg, reply); chain.next(); } }); } @Override public String getName() { return String.format("create-vm-%s-cd-rom", msg.getVmInstanceUuid()); } }); } private void handle(MigrateVmInnerMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getName() { return String.format("migrate-vm-%s", self.getUuid()); } @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { migrateVm(msg, new Completion(chain) { @Override public void success() { MigrateVmInnerReply evt = new MigrateVmInnerReply(); evt.setInventory(VmInstanceInventory.valueOf(self)); bus.reply(msg, evt); chain.next(); } @Override public void fail(ErrorCode errorCode) { MigrateVmInnerReply evt = new MigrateVmInnerReply(); evt.setError(errorCode); bus.reply(msg, evt); chain.next(); } }); } }); } private void handle(final APIGetVmStartingCandidateClustersHostsMsg msg) { APIGetVmStartingCandidateClustersHostsReply reply = new APIGetVmStartingCandidateClustersHostsReply(); final GetVmStartingCandidateClustersHostsMsg gmsg = new GetVmStartingCandidateClustersHostsMsg(); gmsg.setUuid(msg.getUuid()); bus.makeLocalServiceId(gmsg, VmInstanceConstant.SERVICE_ID); bus.send(gmsg, new CloudBusCallBack(msg) { @Override public void run(MessageReply re) { GetVmStartingCandidateClustersHostsReply greply = (GetVmStartingCandidateClustersHostsReply) re; if (!re.isSuccess()) { reply.setSuccess(false); reply.setError(re.getError()); if (greply.getHostInventories() != null) { reply.setHostInventories(greply.getHostInventories()); reply.setClusterInventories(greply.getClusterInventories()); } } else { reply.setHostInventories(greply.getHostInventories()); reply.setClusterInventories(greply.getClusterInventories()); } bus.reply(msg, reply); } }); } private void handle(final GetVmStartingCandidateClustersHostsMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { GetVmStartingCandidateClustersHostsReply reply = new GetVmStartingCandidateClustersHostsReply(); getStartingCandidateHosts(msg, new ReturnValueCompletion<AllocateHostDryRunReply>(chain) { @Override public void success(AllocateHostDryRunReply returnValue) { List<HostInventory> hosts = ((AllocateHostDryRunReply) returnValue).getHosts(); if (!hosts.isEmpty()) { List<String> cuuids = CollectionUtils.transformToList(hosts, new Function<String, HostInventory>() { @Override public String call(HostInventory arg) { return arg.getClusterUuid(); } }); SimpleQuery<ClusterVO> cq = dbf.createQuery(ClusterVO.class); cq.add(ClusterVO_.uuid, Op.IN, cuuids); List<ClusterVO> cvos = cq.list(); reply.setClusterInventories(ClusterInventory.valueOf(cvos)); reply.setHostInventories(hosts); } else { reply.setHostInventories(hosts); reply.setClusterInventories(new ArrayList<>()); } bus.reply(msg, reply); chain.next(); } @Override public void fail(ErrorCode errorCode) { if (HostAllocatorError.NO_AVAILABLE_HOST.toString().equals(errorCode.getCode())) { reply.setHostInventories(new ArrayList<>()); reply.setClusterInventories(new ArrayList<>()); } else { reply.setError(errorCode); } reply.setSuccess(false); bus.reply(msg, reply); chain.next(); } }); } @Override public String getName() { return "get-starting-candidate-hosts"; } }); } private void getStartingCandidateHosts(final NeedReplyMessage msg, final ReturnValueCompletion completion) { refreshVO(); ErrorCode err = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (err != null) { throw new OperationFailureException(err); } final DesignatedAllocateHostMsg amsg = new DesignatedAllocateHostMsg(); amsg.setCpuCapacity(self.getCpuNum()); amsg.setMemoryCapacity(self.getMemorySize()); amsg.setVmInstance(VmInstanceInventory.valueOf(self)); amsg.setServiceId(bus.makeLocalServiceId(HostAllocatorConstant.SERVICE_ID)); amsg.setAllocatorStrategy(self.getAllocatorStrategy()); amsg.setVmOperation(VmOperation.Start.toString()); if (self.getImageUuid() != null && dbf.findByUuid(self.getImageUuid(), ImageVO.class) != null) { amsg.setImage(ImageInventory.valueOf(dbf.findByUuid(self.getImageUuid(), ImageVO.class))); } amsg.setL3NetworkUuids(VmNicHelper.getL3Uuids(VmNicInventory.valueOf(self.getVmNics()))); amsg.setDryRun(true); amsg.setListAllHosts(true); bus.send(amsg, new CloudBusCallBack(completion) { @Override public void run(MessageReply re) { if (!re.isSuccess()) { completion.fail(re.getError()); } else { completion.success(re); } } }); } private void handle(final HaStartVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { refreshVO(); HaStartVmJudger judger; try { Class clz = Class.forName(msg.getJudgerClassName()); judger = (HaStartVmJudger) clz.newInstance(); } catch (Exception e) { throw new CloudRuntimeException(e); } final HaStartVmInstanceReply reply = new HaStartVmInstanceReply(); if (!judger.whetherStartVm(getSelfInventory())) { bus.reply(msg, reply); chain.next(); return; } logger.debug(String.format("HaStartVmJudger[%s] says the VM[uuid:%s, name:%s] is qualified for HA start, now we are starting it", judger.getClass(), self.getUuid(), self.getName())); SQL.New(VmInstanceVO.class).eq(VmInstanceVO_.uuid, self.getUuid()) .set(VmInstanceVO_.state, VmInstanceState.Stopped) .update(); startVm(msg, new Completion(msg, chain) { @Override public void success() { reply.setInventory(getSelfInventory()); bus.reply(msg, reply); chain.next(); } @Override public void fail(ErrorCode errorCode) { reply.setError(errorCode); bus.reply(msg, reply); chain.next(); } }); } @Override public String getName() { return "ha-start-vm"; } }); } private void changeVmIp(final String l3Uuid, final String ip, final Completion completion) { final VmNicVO targetNic = CollectionUtils.find(self.getVmNics(), new Function<VmNicVO, VmNicVO>() { @Override public VmNicVO call(VmNicVO arg) { for (UsedIpVO ip : arg.getUsedIps()) { if (ip.getL3NetworkUuid().equals(l3Uuid)) { return arg; } } return null; } }); if (targetNic == null) { throw new OperationFailureException(operr("the vm[uuid:%s] has no nic on the L3 network[uuid:%s]", self.getUuid(), l3Uuid)); } if (ip.equals(targetNic.getIp())) { completion.success(); return; } final UsedIpInventory oldIp = new UsedIpInventory(); for (UsedIpVO ipvo : targetNic.getUsedIps()) { if (ipvo.getL3NetworkUuid().equals(l3Uuid)) { oldIp.setIp(ipvo.getIp()); oldIp.setGateway(ipvo.getGateway()); oldIp.setNetmask(ipvo.getNetmask()); oldIp.setL3NetworkUuid(ipvo.getL3NetworkUuid()); oldIp.setUuid(ipvo.getUuid()); } } final FlowChain chain = FlowChainBuilder.newShareFlowChain(); chain.setName(String.format("change-vm-ip-to-%s-l3-%s-vm-%s", ip, l3Uuid, self.getUuid())); chain.then(new ShareFlow() { UsedIpInventory newIp; String oldIpUuid = oldIp.getUuid(); @Override public void setup() { flow(new Flow() { String __name__ = "acquire-new-ip"; @Override public void run(final FlowTrigger trigger, Map data) { AllocateIpMsg amsg = new AllocateIpMsg(); amsg.setL3NetworkUuid(l3Uuid); amsg.setRequiredIp(ip); bus.makeTargetServiceIdByResourceUuid(amsg, L3NetworkConstant.SERVICE_ID, l3Uuid); bus.send(amsg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { trigger.fail(reply.getError()); } else { AllocateIpReply r = reply.castReply(); newIp = r.getIpInventory(); trigger.next(); } } }); } @Override public void rollback(FlowRollback trigger, Map data) { if (newIp != null) { ReturnIpMsg rmsg = new ReturnIpMsg(); rmsg.setL3NetworkUuid(newIp.getL3NetworkUuid()); rmsg.setUsedIpUuid(newIp.getUuid()); bus.makeTargetServiceIdByResourceUuid(rmsg, L3NetworkConstant.SERVICE_ID, newIp.getL3NetworkUuid()); bus.send(rmsg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { trigger.rollback(); } }); } else { trigger.rollback(); } } }); flow(new NoRollbackFlow() { String __name__ = "change-ip-in-database"; @Override public void run(FlowTrigger trigger, Map data) { /* for multiple IP address, change nic.ip ONLY when set static ip of of default IP */ for (VmNicExtensionPoint ext : pluginRgty.getExtensionList(VmNicExtensionPoint.class)) { ext.afterAddIpAddress(targetNic.getUuid(), newIp.getUuid()); } trigger.next(); } }); flow(new NoRollbackFlow() { String __name__ = "return-old-ip"; @Override public void run(FlowTrigger trigger, Map data) { ReturnIpMsg rmsg = new ReturnIpMsg(); rmsg.setUsedIpUuid(oldIpUuid); rmsg.setL3NetworkUuid(oldIp.getL3NetworkUuid()); bus.makeTargetServiceIdByResourceUuid(rmsg, L3NetworkConstant.SERVICE_ID, oldIp.getL3NetworkUuid()); bus.send(rmsg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { for (VmNicExtensionPoint ext : pluginRgty.getExtensionList(VmNicExtensionPoint.class)) { ext.afterDelIpAddress(targetNic.getUuid(), oldIpUuid); } trigger.next(); } }); } }); done(new FlowDoneHandler(completion) { @Override public void handle(Map data) { final VmInstanceInventory vm = getSelfInventory(); final VmNicInventory nic = VmNicInventory.valueOf(targetNic); CollectionUtils.safeForEach(pluginRgty.getExtensionList(VmIpChangedExtensionPoint.class), new ForEachFunction<VmIpChangedExtensionPoint>() { @Override public void run(VmIpChangedExtensionPoint ext) { ext.vmIpChanged(vm, nic, oldIp, newIp); } }); completion.success(); } }); error(new FlowErrorHandler(completion) { @Override public void handle(ErrorCode errCode, Map data) { completion.fail(errCode); } }); } }).start(); } private void handle(final ExpungeVmMsg msg) { final ExpungeVmReply reply = new ExpungeVmReply(); thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { expunge(msg, new Completion(msg, chain) { @Override public void success() { bus.reply(msg, reply); chain.next(); } @Override public void fail(ErrorCode errorCode) { reply.setError(errorCode); bus.reply(msg, reply); chain.next(); } }); } @Override public String getName() { return "expunge-vm"; } }); } private void expunge(Message msg, final Completion completion) { refreshVO(); final VmInstanceInventory inv = getSelfInventory(); CollectionUtils.safeForEach(pluginRgty.getExtensionList(VmBeforeExpungeExtensionPoint.class), arg -> arg.vmBeforeExpunge(inv)); ErrorCode error = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (error != null) { throw new OperationFailureException(error); } if (inv.getAllVolumes().size() > 1) { throw new CloudRuntimeException(String.format("why the deleted vm[uuid:%s] has data volumes??? %s", self.getUuid(), JSONObjectUtil.toJsonString(inv.getAllVolumes()))); } VmInstanceSpec spec = buildSpecFromInventory(inv, VmOperation.Expunge); FlowChain chain = getExpungeVmWorkFlowChain(inv); setFlowMarshaller(chain); chain.setName(String.format("expunge-vm-%s", self.getUuid())); chain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); chain.getData().put(Params.DeletionPolicy, VmInstanceDeletionPolicy.Direct); chain.done(new FlowDoneHandler(completion) { @Override public void handle(Map data) { CollectionUtils.safeForEach(pluginRgty.getExtensionList(VmAfterExpungeExtensionPoint.class), arg -> arg.vmAfterExpunge(inv)); callVmJustBeforeDeleteFromDbExtensionPoint(); dbf.reload(self); dbf.removeCollection(self.getVmNics(), VmNicVO.class); dbf.removeCollection(self.getVmCdRoms(), VmCdRomVO.class); dbf.remove(self); logger.debug(String.format("successfully expunged the vm[uuid:%s]", self.getUuid())); dbf.eoCleanup(VmInstanceVO.class, Collections.singletonList(self.getUuid())); completion.success(); } }).error(new FlowErrorHandler(completion) { @Override public void handle(final ErrorCode errCode, Map data) { completion.fail(errCode); } }).start(); } private void handle(final VmCheckOwnStateMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { refreshVO(); final VmCheckOwnStateReply reply = new VmCheckOwnStateReply(); if (self.getHostUuid() == null) { // no way to check bus.reply(msg, reply); chain.next(); return; } final CheckVmStateOnHypervisorMsg cmsg = new CheckVmStateOnHypervisorMsg(); cmsg.setVmInstanceUuids(list(self.getUuid())); cmsg.setHostUuid(self.getHostUuid()); bus.makeTargetServiceIdByResourceUuid(cmsg, HostConstant.SERVICE_ID, self.getHostUuid()); bus.send(cmsg, new CloudBusCallBack(msg, chain) { @Override public void run(MessageReply r) { if (!r.isSuccess()) { reply.setError(r.getError()); bus.reply(msg, r); chain.next(); return; } CheckVmStateOnHypervisorReply cr = r.castReply(); String s = cr.getStates().get(self.getUuid()); VmInstanceState state = VmInstanceState.valueOf(s); if (state != self.getState()) { VmStateChangedOnHostMsg vcmsg = new VmStateChangedOnHostMsg(); vcmsg.setHostUuid(self.getHostUuid()); vcmsg.setVmInstanceUuid(self.getUuid()); vcmsg.setStateOnHost(state); bus.makeTargetServiceIdByResourceUuid(vcmsg, VmInstanceConstant.SERVICE_ID, self.getUuid()); bus.send(vcmsg); } bus.reply(msg, reply); chain.next(); } }); } @Override public String getName() { return "check-state"; } }); } private void handle(final VmStateChangedOnHostMsg msg) { logger.debug(String.format("get VmStateChangedOnHostMsg for vm[uuid:%s], on host[uuid:%s], which tracing state is [%s]" + " and current state on host is [%s]", msg.getVmInstanceUuid(), msg.getHostUuid(), msg.getVmStateAtTracingMoment(), msg.getStateOnHost())); thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { if (msg.isFromSync()) { return syncThreadName; } else { return String.format("change-vm-state-%s", syncThreadName); } } @Override public void run(final SyncTaskChain chain) { logger.debug(String.format("running sync task %s with sync signature %s", getName(), getSyncSignature())); vmStateChangeOnHost(msg, new NoErrorCompletion(chain) { @Override public void done() { chain.next(); } }); } @Override public String getName() { return String.format("vm-%s-state-change-on-the-host-%s", msg.getVmInstanceUuid(), msg.getHostUuid()); } }); } private VmAbnormalLifeCycleOperation getVmAbnormalLifeCycleOperation(String originalHostUuid, String currentHostUuid, VmInstanceState originalState, VmInstanceState currentState) { if (originalState == VmInstanceState.Stopped && currentState == VmInstanceState.Running) { return VmAbnormalLifeCycleOperation.VmRunningOnTheHost; } if (originalState == VmInstanceState.Running && currentState == VmInstanceState.Stopped && currentHostUuid.equals(originalHostUuid)) { return VmAbnormalLifeCycleOperation.VmStoppedOnTheSameHost; } if (VmInstanceState.intermediateStates.contains(originalState) && currentState == VmInstanceState.Running) { return VmAbnormalLifeCycleOperation.VmRunningFromIntermediateState; } if (VmInstanceState.intermediateStates.contains(originalState) && currentState == VmInstanceState.Stopped) { return VmAbnormalLifeCycleOperation.VmStoppedFromIntermediateState; } if (originalState == VmInstanceState.Running && currentState == VmInstanceState.Paused && currentHostUuid.equals(originalHostUuid)) { return VmAbnormalLifeCycleOperation.VmPausedFromRunningStateHostNotChanged; } if (originalState == VmInstanceState.Unknown && currentState == VmInstanceState.Paused && currentHostUuid.equals(originalHostUuid)) { return VmAbnormalLifeCycleOperation.VmPausedFromUnknownStateHostNotChanged; } if (originalState == VmInstanceState.Unknown && currentState == VmInstanceState.Running && currentHostUuid.equals(originalHostUuid)) { return VmAbnormalLifeCycleOperation.VmRunningFromUnknownStateHostNotChanged; } if (originalState == VmInstanceState.Unknown && currentState == VmInstanceState.Running && !currentHostUuid.equals(originalHostUuid)) { return VmAbnormalLifeCycleOperation.VmRunningFromUnknownStateHostChanged; } if (originalState == VmInstanceState.Unknown && currentState == VmInstanceState.Stopped && currentHostUuid.equals(originalHostUuid)) { return VmAbnormalLifeCycleOperation.VmStoppedOnTheSameHost; } if (originalState == VmInstanceState.Unknown && currentState == VmInstanceState.Stopped && originalHostUuid == null && currentHostUuid.equals(self.getLastHostUuid())) { return VmAbnormalLifeCycleOperation.VmStoppedFromUnknownStateHostNotChanged; } if (originalState == VmInstanceState.Running && originalState == currentState && !currentHostUuid.equals(originalHostUuid)) { return VmAbnormalLifeCycleOperation.VmMigrateToAnotherHost; } if (originalState == VmInstanceState.Paused && currentState == VmInstanceState.Running && currentHostUuid.equals(originalHostUuid)) { return VmAbnormalLifeCycleOperation.VmRunningFromPausedStateHostNotChanged; } if (originalState == VmInstanceState.Paused && currentState == VmInstanceState.Stopped && currentHostUuid.equals(originalHostUuid)) { return VmAbnormalLifeCycleOperation.VmStoppedFromPausedStateHostNotChanged; } throw new CloudRuntimeException(String.format("unknown VM[uuid:%s] abnormal state combination[original state: %s," + " current state: %s, original host:%s, current host:%s]", self.getUuid(), originalState, currentState, originalHostUuid, currentHostUuid)); } private void vmStateChangeOnHost(final VmStateChangedOnHostMsg msg, final NoErrorCompletion completion) { final VmStateChangedOnHostReply reply = new VmStateChangedOnHostReply(); if (refreshVO(true) == null) { // the vm has been deleted reply.setError(operr("the vm has been deleted")); bus.reply(msg, reply); completion.done(); return; } if (msg.getVmStateAtTracingMoment() != null) { // the vm tracer periodically reports vms's state. It catches an old state // before an vm operation(start, stop, reboot, migrate) completes. Ignore this VmInstanceState expected = VmInstanceState.valueOf(msg.getVmStateAtTracingMoment()); if (expected != self.getState()) { bus.reply(msg, reply); completion.done(); return; } } final String originalHostUuid = self.getHostUuid(); final String currentHostUuid = msg.getHostUuid(); final VmInstanceState originalState = self.getState(); final VmInstanceState currentState = VmInstanceState.valueOf(msg.getStateOnHost()); if (originalState == currentState && currentHostUuid.equals(originalHostUuid)) { logger.debug(String.format("vm[uuid:%s]'s state[%s] is inline with its state on the host[uuid:%s], ignore VmStateChangeOnHostMsg", self.getUuid(), originalState, originalHostUuid)); bus.reply(msg, reply); completion.done(); return; } if (originalState == VmInstanceState.Stopped && currentState == VmInstanceState.Unknown) { bus.reply(msg, reply); completion.done(); return; } final Runnable fireEvent = () -> { VmTracerCanonicalEvents.VmStateChangedOnHostData data = new VmTracerCanonicalEvents.VmStateChangedOnHostData(); data.setVmUuid(self.getUuid()); data.setFrom(originalState); data.setTo(self.getState()); data.setOriginalHostUuid(originalHostUuid); data.setCurrentHostUuid(self.getHostUuid()); evtf.fire(VmTracerCanonicalEvents.VM_STATE_CHANGED_PATH, data); }; if (currentState == VmInstanceState.Unknown) { changeVmStateInDb(VmInstanceStateEvent.unknown); fireEvent.run(); bus.reply(msg, reply); completion.done(); return; } VmAbnormalLifeCycleOperation operation = getVmAbnormalLifeCycleOperation(originalHostUuid, currentHostUuid, originalState, currentState); if (operation == VmAbnormalLifeCycleOperation.VmRunningFromUnknownStateHostNotChanged) { // the vm is detected on the host again. It's largely because the host disconnected before // and now reconnected changeVmStateInDb(VmInstanceStateEvent.running, ()-> self.setHostUuid(msg.getHostUuid())); fireEvent.run(); bus.reply(msg, reply); completion.done(); return; } else if (operation == VmAbnormalLifeCycleOperation.VmStoppedFromUnknownStateHostNotChanged) { // the vm comes out of the unknown state to the stopped state // it happens when an operation failure led the vm from the stopped state to the unknown state, // and later on the vm was detected as stopped on the host again changeVmStateInDb(VmInstanceStateEvent.stopped, ()-> self.setHostUuid(null)); fireEvent.run(); bus.reply(msg, reply); completion.done(); return; } else if (operation == VmAbnormalLifeCycleOperation.VmStoppedFromPausedStateHostNotChanged) { changeVmStateInDb(VmInstanceStateEvent.stopped, ()-> self.setHostUuid(msg.getHostUuid())); fireEvent.run(); bus.reply(msg, reply); completion.done(); return; } else if (operation == VmAbnormalLifeCycleOperation.VmPausedFromUnknownStateHostNotChanged) { //some reason led vm to unknown state and the paused vm are detected on the host again changeVmStateInDb(VmInstanceStateEvent.paused, ()-> self.setHostUuid(msg.getHostUuid())); fireEvent.run(); bus.reply(msg, reply); completion.done(); return; } else if (operation == VmAbnormalLifeCycleOperation.VmPausedFromRunningStateHostNotChanged) { // just synchronize database changeVmStateInDb(VmInstanceStateEvent.paused, ()->self.setHostUuid(msg.getHostUuid())); fireEvent.run(); bus.reply(msg, reply); completion.done(); return; } else if (operation == VmAbnormalLifeCycleOperation.VmRunningFromPausedStateHostNotChanged) { // just synchronize database changeVmStateInDb(VmInstanceStateEvent.running, ()->self.setHostUuid(msg.getHostUuid())); fireEvent.run(); bus.reply(msg, reply); completion.done(); return; } List<VmAbnormalLifeCycleExtensionPoint> exts = pluginRgty.getExtensionList(VmAbnormalLifeCycleExtensionPoint.class); VmAbnormalLifeCycleStruct struct = new VmAbnormalLifeCycleStruct(); struct.setCurrentHostUuid(currentHostUuid); struct.setCurrentState(currentState); struct.setOriginalHostUuid(originalHostUuid); struct.setOriginalState(originalState); struct.setVmInstance(getSelfInventory()); struct.setOperation(operation); logger.debug(String.format("the vm[uuid:%s]'s state changed abnormally on the host[uuid:%s]," + " ZStack is going to take the operation[%s]," + "[original state: %s, current state: %s, original host: %s, current host:%s]", self.getUuid(), currentHostUuid, operation, originalState, currentState, originalHostUuid, currentHostUuid)); FlowChain chain = FlowChainBuilder.newSimpleFlowChain(); chain.setName(String.format("handle-abnormal-lifecycle-of-vm-%s", self.getUuid())); chain.getData().put(Params.AbnormalLifeCycleStruct, struct); chain.allowEmptyFlow(); for (VmAbnormalLifeCycleExtensionPoint ext : exts) { Flow flow = ext.createVmAbnormalLifeCycleHandlingFlow(struct); chain.then(flow); } chain.done(new FlowDoneHandler(completion) { @Override public void handle(Map data) { if (currentState == VmInstanceState.Running) { changeVmStateInDb(VmInstanceStateEvent.running, ()-> self.setHostUuid(currentHostUuid)); } else if (currentState == VmInstanceState.Stopped) { changeVmStateInDb(VmInstanceStateEvent.stopped); } fireEvent.run(); bus.reply(msg, reply); completion.done(); } }).error(new FlowErrorHandler(completion) { @Override public void handle(ErrorCode errCode, Map data) { logger.warn(String.format("failed to handle abnormal lifecycle of the vm[uuid:%s, original state: %s, current state:%s," + "original host: %s, current host: %s], %s", self.getUuid(), originalState, currentState, originalHostUuid, currentHostUuid, errCode)); reply.setError(errCode); bus.reply(msg, reply); completion.done(); } }).start(); } private List<String> buildUserdata() { return new UserdataBuilder().buildByVmUuid(self.getUuid()); } private void handle(final DetachNicFromVmMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { final DetachNicFromVmReply reply = new DetachNicFromVmReply(); refreshVO(); if (self.getState() == VmInstanceState.Destroyed) { // the cascade framework may send this message when // the vm has been destroyed VmNicVO nic = CollectionUtils.find(self.getVmNics(), new Function<VmNicVO, VmNicVO>() { @Override public VmNicVO call(VmNicVO arg) { return msg.getVmNicUuid().equals(arg.getUuid()) ? arg : null; } }); if (nic != null) { dbf.remove(nic); } bus.reply(msg, reply); chain.next(); return; } final ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (allowed != null) { reply.setError(allowed); bus.reply(msg, reply); chain.next(); return; } FlowChain fchain = FlowChainBuilder.newSimpleFlowChain(); fchain.setName(String.format("l3-network-detach-from-vm-%s", msg.getVmInstanceUuid())); fchain.then(new NoRollbackFlow() { String __name__ = "before-detach-nic"; @Override public void run(FlowTrigger trigger, Map data) { VmNicInventory nic = VmNicInventory.valueOf((VmNicVO) Q.New(VmNicVO.class).eq(VmNicVO_.uuid, msg.getVmNicUuid()).find()); beforeDetachNic(nic, new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }).then(new NoRollbackFlow() { String __name__ = "detach-nic"; @Override public void run(FlowTrigger trigger, Map data) { detachNic(msg.getVmNicUuid(), true, new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }).done(new FlowDoneHandler(msg) { @Override public void handle(Map data) { self = dbf.reload(self); bus.reply(msg, reply); chain.next(); } }).error(new FlowErrorHandler(msg) { @Override public void handle(ErrorCode errCode, Map data) { reply.setError(errCode); bus.reply(msg, reply); chain.next(); } }).start(); } @Override public String getName() { return "nic-detach"; } }); } private void handle(final AddL3NetworkToVmNicMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { final AddL3NetworkToVmNicReply reply = new AddL3NetworkToVmNicReply(); refreshVO(); final ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (allowed != null) { reply.setError(allowed); bus.reply(msg, reply); chain.next(); return; } VmNicVO vmNicVO = dbf.findByUuid(msg.getVmNicUuid(), VmNicVO.class); final VmInstanceSpec spec = buildSpecFromInventory(getSelfInventory(), VmOperation.AttachNic); spec.setDestNics(list(VmNicInventory.valueOf(vmNicVO))); L3NetworkVO l3Vo = dbf.findByUuid(msg.getNewL3Uuid(), L3NetworkVO.class); spec.setL3Networks(list(new VmNicSpec(L3NetworkInventory.valueOf(l3Vo)))); FlowChain fchain = FlowChainBuilder.newSimpleFlowChain(); fchain.setName(String.format("update-vmNic-%s-to-backend", msg.getVmInstanceUuid())); fchain.getData().put(Params.VmInstanceSpec.toString(), spec); fchain.then(new VmInstantiateResourceOnAttachingNicFlow()); fchain.then(new VmUpdateNicOnHypervisorFlow()); fchain.done(new FlowDoneHandler(msg) { @Override public void handle(Map data) { self = dbf.reload(self); bus.reply(msg, reply); chain.next(); } }).error(new FlowErrorHandler(msg) { @Override public void handle(ErrorCode errCode, Map data) { reply.setError(errCode); bus.reply(msg, reply); chain.next(); } }).start(); } @Override public String getName() { return "update-vmNic-to-backend"; } }); } private void handle(final DeleteL3NetworkFromVmNicMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { final DeleteL3NetworkFromVmNicReply reply = new DeleteL3NetworkFromVmNicReply(); refreshVO(); final ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (allowed != null) { reply.setError(allowed); bus.reply(msg, reply); chain.next(); return; } VmNicVO vmNicVO = dbf.findByUuid(msg.getVmNicUuid(), VmNicVO.class); final VmInstanceSpec spec = buildSpecFromInventory(getSelfInventory(), VmOperation.AttachNic); spec.setDestNics(list(VmNicInventory.valueOf(vmNicVO))); L3NetworkVO l3Vo = dbf.findByUuid(msg.getNewL3Uuid(), L3NetworkVO.class); spec.setL3Networks(list(new VmNicSpec(L3NetworkInventory.valueOf(l3Vo)))); FlowChain fchain = FlowChainBuilder.newSimpleFlowChain(); fchain.setName(String.format("update-vmNic-%s-to-backend", msg.getVmInstanceUuid())); fchain.getData().put(Params.VmInstanceSpec.toString(), spec); fchain.then(new VmReleaseResourceOnDetachingNicFlow()); fchain.done(new FlowDoneHandler(msg) { @Override public void handle(Map data) { self = dbf.reload(self); bus.reply(msg, reply); chain.next(); } }).error(new FlowErrorHandler(msg) { @Override public void handle(ErrorCode errCode, Map data) { reply.setError(errCode); bus.reply(msg, reply); chain.next(); } }).start(); } @Override public String getName() { return "update-vmNic-to-backend"; } }); } private void handle(final LockVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { logger.debug(String.format("locked vm[uuid:%s] for %s", self.getUuid(), msg.getReason())); evtf.on(LockResourceMessage.UNLOCK_CANONICAL_EVENT_PATH, new AutoOffEventCallback() { @Override public boolean run(Map tokens, Object data) { if (msg.getUnlockKey().equals(data)) { logger.debug(String.format("unlocked vm[uuid:%s] that was locked by %s", self.getUuid(), msg.getReason())); chain.next(); return true; } return false; } }); LockVmInstanceReply reply = new LockVmInstanceReply(); bus.reply(msg, reply); } @Override public String getName() { return String.format("lock-vm-%s", self.getUuid()); } }); } private void handle(final ChangeVmMetaDataMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { changeMetaData(msg); chain.next(); } @Override public String getName() { return String.format("change-meta-data-of-vm-%s", self.getUuid()); } }); } private void changeMetaData(ChangeVmMetaDataMsg msg) { ChangeVmMetaDataReply reply = new ChangeVmMetaDataReply(); refreshVO(); if (self == null) { bus.reply(msg, reply); return; } AtomicVmState s = msg.getState(); AtomicHostUuid h = msg.getHostUuid(); if (msg.isNeedHostAndStateBothMatch()) { if (s != null && h != null && s.getExpected() == self.getState()) { if ((h.getExpected() == null && self.getHostUuid() == null) || (h.getExpected() != null && h.getExpected().equals(self.getHostUuid()))) { changeVmStateInDb(s.getValue().getDrivenEvent(), ()-> { self.setHostUuid(h.getValue()); }); reply.setChangeStateDone(true); reply.setChangeHostUuidDone(true); } } } else { if (s != null && s.getExpected() == self.getState()) { changeVmStateInDb(s.getValue().getDrivenEvent()); reply.setChangeStateDone(true); } if (h != null) { if ((h.getExpected() == null && self.getHostUuid() == null) || (h.getExpected() != null && h.getExpected().equals(self.getHostUuid()))) { self.setHostUuid(h.getValue()); dbf.update(self); reply.setChangeHostUuidDone(true); } } } bus.reply(msg, reply); } private void getVmMigrationTargetHost(Message msg, final ReturnValueCompletion<List<HostInventory>> completion) { refreshVO(); ErrorCode allowed = validateOperationByState(msg, self.getState(), VmErrors.MIGRATE_ERROR); if (allowed != null) { completion.fail(allowed); return; } final DesignatedAllocateHostMsg amsg = new DesignatedAllocateHostMsg(); amsg.setCpuCapacity(self.getCpuNum()); amsg.setMemoryCapacity(self.getMemorySize()); amsg.getAvoidHostUuids().add(self.getHostUuid()); if (msg instanceof GetVmMigrationTargetHostMsg) { GetVmMigrationTargetHostMsg gmsg = (GetVmMigrationTargetHostMsg) msg; if (gmsg.getAvoidHostUuids() != null) { amsg.getAvoidHostUuids().addAll(gmsg.getAvoidHostUuids()); } } else { if (msg instanceof APIMessage){ if (((APIMessage) msg).getSystemTags() != null){ amsg.setSystemTags(new ArrayList<String>(((APIMessage) msg).getSystemTags())); } } } amsg.setVmInstance(VmInstanceInventory.valueOf(self)); amsg.setServiceId(bus.makeLocalServiceId(HostAllocatorConstant.SERVICE_ID)); amsg.setAllocatorStrategy(HostAllocatorConstant.MIGRATE_VM_ALLOCATOR_TYPE); amsg.setVmOperation(VmOperation.Migrate.toString()); amsg.setL3NetworkUuids(VmNicHelper.getL3Uuids(VmNicInventory.valueOf(self.getVmNics()))); amsg.setDryRun(true); amsg.setAllowNoL3Networks(true); bus.send(amsg, new CloudBusCallBack(completion) { @Override public void run(MessageReply re) { if (!re.isSuccess()) { if (HostAllocatorError.NO_AVAILABLE_HOST.toString().equals(re.getError().getCode())) { completion.success(new ArrayList<HostInventory>()); } else { completion.fail(re.getError()); } } else { completion.success(((AllocateHostDryRunReply) re).getHosts()); } } }); } private void handle(final GetVmMigrationTargetHostMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { final GetVmMigrationTargetHostReply reply = new GetVmMigrationTargetHostReply(); getVmMigrationTargetHost(msg, new ReturnValueCompletion<List<HostInventory>>(msg, chain) { @Override public void success(List<HostInventory> returnValue) { reply.setHosts(returnValue); bus.reply(msg, reply); chain.next(); } @Override public void fail(ErrorCode errorCode) { reply.setError(errorCode); bus.reply(msg, reply); chain.next(); } }); } @Override public String getName() { return String.format("get-migration-target-host-for-vm-%s", self.getUuid()); } }); } private void handle(final AttachDataVolumeToVmMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { attachDataVolume(msg, new NoErrorCompletion(chain) { @Override public void done() { chain.next(); } }); } @Override public String getName() { return String.format("attach-volume-%s-to-vm-%s", msg.getVolume().getUuid(), msg.getVmInstanceUuid()); } }); } private void handle(final DetachDataVolumeFromVmMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { detachVolume(msg, new NoErrorCompletion(chain) { @Override public void done() { chain.next(); } }); } @Override public String getName() { return String.format("detach-volume-%s-from-vm-%s", msg.getVolume().getUuid(), msg.getVmInstanceUuid()); } }); } private void handle(final MigrateVmMsg msg) { final MigrateVmReply reply = new MigrateVmReply(); thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { migrateVm(msg, new Completion(chain) { @Override public void success() { bus.reply(msg, reply); chain.next(); } @Override public void fail(ErrorCode errorCode) { reply.setError(errorCode); bus.reply(msg, reply); chain.next(); } }); } @Override public String getName() { return String.format("migrate-vm-%s", self.getUuid()); } }); } private void attachNic(final Message msg, final List<String> l3Uuids, final ReturnValueCompletion<VmNicInventory> completion) { thdf.chainSubmit(new ChainTask(completion) { @Override public String getSyncSignature() { return syncThreadName; } @Override @Deferred public void run(final SyncTaskChain chain) { refreshVO(); ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (allowed != null) { completion.fail(allowed); return; } class SetDefaultL3Network { private boolean isSet = false; void set() { if (self.getDefaultL3NetworkUuid() == null) { self.setDefaultL3NetworkUuid(l3Uuids.get(0)); self = dbf.updateAndRefresh(self); isSet = true; } } void rollback() { if (isSet) { self.setDefaultL3NetworkUuid(null); dbf.update(self); } } } class SetStaticIp { private boolean isSet = false; void set() { if (!(msg instanceof APIAttachL3NetworkToVmMsg)) { return; } APIAttachL3NetworkToVmMsg amsg = (APIAttachL3NetworkToVmMsg) msg; if (amsg.getStaticIpMap().isEmpty()) { return; } for (Map.Entry<String, String> e : amsg.getStaticIpMap().entrySet()) { new StaticIpOperator().setStaticIp(self.getUuid(), e.getKey(), e.getValue()); } isSet = true; } void rollback() { if (isSet) { APIAttachL3NetworkToVmMsg amsg = (APIAttachL3NetworkToVmMsg) msg; for (Map.Entry<String, String> e : amsg.getStaticIpMap().entrySet()) { new StaticIpOperator().deleteStaticIpByVmUuidAndL3Uuid(self.getUuid(), e.getKey()); } } } } final SetDefaultL3Network setDefaultL3Network = new SetDefaultL3Network(); setDefaultL3Network.set(); Defer.guard(new Runnable() { @Override public void run() { setDefaultL3Network.rollback(); } }); final SetStaticIp setStaticIp = new SetStaticIp(); setStaticIp.set(); Defer.guard(new Runnable() { @Override public void run() { setStaticIp.rollback(); } }); final VmInstanceSpec spec = buildSpecFromInventory(getSelfInventory(), VmOperation.AttachNic); final VmInstanceInventory vm = spec.getVmInventory(); List<L3NetworkInventory> l3s = new ArrayList<>(); for (String l3Uuid : l3Uuids) { L3NetworkVO l3vo = dbf.findByUuid(l3Uuid, L3NetworkVO.class); final L3NetworkInventory l3 = L3NetworkInventory.valueOf(l3vo); l3s.add(l3); for (VmPreAttachL3NetworkExtensionPoint ext : pluginRgty.getExtensionList(VmPreAttachL3NetworkExtensionPoint.class)) { ext.vmPreAttachL3Network(vm, l3); } } spec.setL3Networks(list(new VmNicSpec(l3s))); spec.setDestNics(new ArrayList<VmNicInventory>()); CollectionUtils.safeForEach(pluginRgty.getExtensionList(VmBeforeAttachL3NetworkExtensionPoint.class), new ForEachFunction<VmBeforeAttachL3NetworkExtensionPoint>() { @Override public void run(VmBeforeAttachL3NetworkExtensionPoint arg) { for (L3NetworkInventory l3 : l3s) { arg.vmBeforeAttachL3Network(vm, l3); } } }); FlowChain flowChain = FlowChainBuilder.newSimpleFlowChain(); setFlowMarshaller(flowChain); flowChain.setName(String.format("attachNic-vm-%s-l3-%s", self.getUuid(), l3Uuids.get(0))); flowChain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); flowChain.then(new VmAllocateNicFlow()); flowChain.then(new VmAttachL3NetworkToNicFlow()); flowChain.then(new VmSetDefaultL3NetworkOnAttachingFlow()); if (self.getState() == VmInstanceState.Running) { flowChain.then(new VmInstantiateResourceOnAttachingNicFlow()); flowChain.then(new VmAttachNicOnHypervisorFlow()); } flowChain.done(new FlowDoneHandler(chain) { @Override public void handle(Map data) { CollectionUtils.safeForEach(pluginRgty.getExtensionList(VmAfterAttachL3NetworkExtensionPoint.class), new ForEachFunction<VmAfterAttachL3NetworkExtensionPoint>() { @Override public void run(VmAfterAttachL3NetworkExtensionPoint arg) { for (L3NetworkInventory l3 : l3s) { arg.vmAfterAttachL3Network(vm, l3); } } }); VmNicInventory nic = spec.getDestNics().get(0); completion.success(nic); chain.next(); } }).error(new FlowErrorHandler(chain) { @Override public void handle(final ErrorCode errCode, Map data) { CollectionUtils.safeForEach(pluginRgty.getExtensionList(VmFailToAttachL3NetworkExtensionPoint.class), new ForEachFunction<VmFailToAttachL3NetworkExtensionPoint>() { @Override public void run(VmFailToAttachL3NetworkExtensionPoint arg) { for (L3NetworkInventory l3 : l3s) { arg.vmFailToAttachL3Network(vm, l3, errCode); } } }); setDefaultL3Network.rollback(); setStaticIp.rollback(); completion.fail(errCode); chain.next(); } }).start(); } @Override public String getName() { return String.format("attachNic-vm-%s-l3-%s", self.getUuid(), l3Uuids.get(0)); } }); } private void attachNic(final APIAttachVmNicToVmMsg msg, final ReturnValueCompletion<VmNicInventory> completion) { thdf.chainSubmit(new ChainTask(completion) { @Override public String getSyncSignature() { return syncThreadName; } @Override @Deferred public void run(final SyncTaskChain chain) { refreshVO(); ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (allowed != null) { completion.fail(allowed); return; } String vmNicUuid = msg.getVmNicUuid(); VmNicVO vmNicVO = dbf.findByUuid(vmNicUuid, VmNicVO.class); String l3Uuid = VmNicHelper.getPrimaryL3Uuid(VmNicInventory.valueOf(vmNicVO)); class SetDefaultL3Network { private boolean isSet = false; void set() { if (self.getDefaultL3NetworkUuid() == null) { self.setDefaultL3NetworkUuid(l3Uuid); self = dbf.updateAndRefresh(self); isSet = true; } } void rollback() { if (isSet) { self.setDefaultL3NetworkUuid(null); dbf.update(self); } } } final SetDefaultL3Network setDefaultL3Network = new SetDefaultL3Network(); setDefaultL3Network.set(); Defer.guard(new Runnable() { @Override public void run() { setDefaultL3Network.rollback(); } }); final VmInstanceSpec spec = buildSpecFromInventory(getSelfInventory(), VmOperation.AttachNic); spec.setVmInventory(VmInstanceInventory.valueOf(self)); L3NetworkVO l3vo = dbf.findByUuid(l3Uuid, L3NetworkVO.class); final L3NetworkInventory l3 = L3NetworkInventory.valueOf(l3vo); final VmInstanceInventory vm = getSelfInventory(); for (VmPreAttachL3NetworkExtensionPoint ext : pluginRgty.getExtensionList(VmPreAttachL3NetworkExtensionPoint.class)) { ext.vmPreAttachL3Network(vm, l3); } spec.setL3Networks(list(new VmNicSpec(l3))); CollectionUtils.safeForEach(pluginRgty.getExtensionList(VmBeforeAttachL3NetworkExtensionPoint.class), new ForEachFunction<VmBeforeAttachL3NetworkExtensionPoint>() { @Override public void run(VmBeforeAttachL3NetworkExtensionPoint arg) { arg.vmBeforeAttachL3Network(vm, l3); } }); FlowChain flowChain = FlowChainBuilder.newSimpleFlowChain(); setFlowMarshaller(flowChain); flowChain.setName(String.format("attachNic-vm-%s-nic-%s", self.getUuid(), vmNicVO.getUuid())); flowChain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); flowChain.then(new Flow() { String __name__ = "update-nic"; @Override public void run(FlowTrigger trigger, Map data) { final BitSet deviceIdBitmap = new BitSet(512); for (VmNicInventory nic : spec.getVmInventory().getVmNics()) { deviceIdBitmap.set(nic.getDeviceId()); } int deviceId = deviceIdBitmap.nextClearBit(0); deviceIdBitmap.set(deviceId); String internalName = VmNicVO.generateNicInternalName(spec.getVmInventory().getInternalId(), deviceId); UpdateQuery.New(VmNicVO.class) .eq(VmNicVO_.uuid, vmNicUuid) .set(VmNicVO_.vmInstanceUuid, self.getUuid()) .set(VmNicVO_.deviceId, deviceId) .set(VmNicVO_.internalName, internalName) .set(VmNicVO_.hypervisorType, spec.getVmInventory().getHypervisorType()) .update(); vmNicVO.setVmInstanceUuid(self.getUuid()); vmNicVO.setDeviceId(deviceId); vmNicVO.setInternalName(internalName); vmNicVO.setHypervisorType(spec.getVmInventory().getHypervisorType()); spec.getDestNics().add(VmNicInventory.valueOf(vmNicVO)); trigger.next(); } @Override public void rollback(FlowRollback trigger, Map data) { UpdateQuery.New(VmNicVO.class) .eq(VmNicVO_.uuid, vmNicUuid) .set(VmNicVO_.vmInstanceUuid, null) .update(); trigger.rollback(); } }); flowChain.then(new VmSetDefaultL3NetworkOnAttachingFlow()); if (self.getState() == VmInstanceState.Running) { flowChain.then(new VmInstantiateResourceOnAttachingNicFlow()); flowChain.then(new VmAttachNicOnHypervisorFlow()); } flowChain.done(new FlowDoneHandler(chain) { @Override public void handle(Map data) { CollectionUtils.safeForEach(pluginRgty.getExtensionList(VmAfterAttachL3NetworkExtensionPoint.class), new ForEachFunction<VmAfterAttachL3NetworkExtensionPoint>() { @Override public void run(VmAfterAttachL3NetworkExtensionPoint arg) { arg.vmAfterAttachL3Network(vm, l3); } }); VmNicInventory nic = spec.getDestNics().get(0); completion.success(nic); chain.next(); } }).error(new FlowErrorHandler(chain) { @Override public void handle(final ErrorCode errCode, Map data) { CollectionUtils.safeForEach(pluginRgty.getExtensionList(VmFailToAttachL3NetworkExtensionPoint.class), new ForEachFunction<VmFailToAttachL3NetworkExtensionPoint>() { @Override public void run(VmFailToAttachL3NetworkExtensionPoint arg) { arg.vmFailToAttachL3Network(vm, l3, errCode); } }); setDefaultL3Network.rollback(); completion.fail(errCode); chain.next(); } }).start(); } @Override public String getName() { return String.format("attachNic-vm-%s-nic-%s", self.getUuid(), msg.getVmNicUuid()); } }); } private void handle(final VmAttachNicMsg msg) { final VmAttachNicReply reply = new VmAttachNicReply(); attachNic(msg, Collections.singletonList(msg.getL3NetworkUuid()), new ReturnValueCompletion<VmNicInventory>(msg) { @Override public void success(VmNicInventory nic) { reply.setInventroy(nic); bus.reply(msg, reply); } @Override public void fail(ErrorCode errorCode) { reply.setError(errorCode); bus.reply(msg, reply); } }); } private void callVmJustBeforeDeleteFromDbExtensionPoint() { VmInstanceInventory inv = getSelfInventory(); CollectionUtils.safeForEach(pluginRgty.getExtensionList(VmJustBeforeDeleteFromDbExtensionPoint.class), p -> p.vmJustBeforeDeleteFromDb(inv)); } protected void doDestroy(final VmInstanceDeletionPolicy deletionPolicy, Message msg, final Completion completion) { final VmInstanceInventory inv = VmInstanceInventory.valueOf(self); extEmitter.beforeDestroyVm(inv); destroy(deletionPolicy, msg, new Completion(completion) { @Override public void success() { extEmitter.afterDestroyVm(inv); logger.debug(String.format("successfully deleted vm instance[name:%s, uuid:%s]", self.getName(), self.getUuid())); if (deletionPolicy == VmInstanceDeletionPolicy.Direct) { changeVmStateInDb(VmInstanceStateEvent.destroyed); callVmJustBeforeDeleteFromDbExtensionPoint(); dbf.removeCollection(self.getVmCdRoms(), VmCdRomVO.class); dbf.remove(getSelf()); } else if (deletionPolicy == VmInstanceDeletionPolicy.DBOnly || deletionPolicy == VmInstanceDeletionPolicy.KeepVolume) { new SQLBatch() { @Override protected void scripts() { callVmJustBeforeDeleteFromDbExtensionPoint(); sql(VmNicVO.class).eq(VmNicVO_.vmInstanceUuid, self.getUuid()).hardDelete(); sql(VolumeVO.class).eq(VolumeVO_.vmInstanceUuid, self.getUuid()) .eq(VolumeVO_.type, VolumeType.Root) .hardDelete(); sql(VmCdRomVO.class).eq(VmCdRomVO_.vmInstanceUuid, self.getUuid()).hardDelete(); sql(VmInstanceVO.class).eq(VmInstanceVO_.uuid, self.getUuid()).hardDelete(); } }.execute(); } else if (deletionPolicy == VmInstanceDeletionPolicy.Delay) { changeVmStateInDb(VmInstanceStateEvent.destroyed, ()-> self.setHostUuid(null)); } else if (deletionPolicy == VmInstanceDeletionPolicy.Never) { logger.warn(String.format("the vm[uuid:%s] is deleted, but by it's deletion policy[Never]," + " the root volume is not deleted on the primary storage", self.getUuid())); changeVmStateInDb(VmInstanceStateEvent.destroyed, ()-> self.setHostUuid(null)); } completion.success(); } @Override public void fail(ErrorCode errorCode) { extEmitter.failedToDestroyVm(inv, errorCode); logger.debug(String.format("failed to delete vm instance[name:%s, uuid:%s], because %s", self.getName(), self.getUuid(), errorCode)); completion.fail(errorCode); } }); } private VmInstanceDeletionPolicy getVmDeletionPolicy(final VmInstanceDeletionMsg msg) { if (self.getState() == VmInstanceState.Created) { return VmInstanceDeletionPolicy.DBOnly; } return msg.getDeletionPolicy() == null ? deletionPolicyMgr.getDeletionPolicy(self.getUuid()) : VmInstanceDeletionPolicy.valueOf(msg.getDeletionPolicy()); } private void handle(final VmInstanceDeletionMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { final VmInstanceDeletionReply r = new VmInstanceDeletionReply(); final VmInstanceDeletionPolicy deletionPolicy = getVmDeletionPolicy(msg); self = dbf.findByUuid(self.getUuid(), VmInstanceVO.class); if (self == null || self.getState() == VmInstanceState.Destroyed) { // the vm has been destroyed, most likely by rollback if (deletionPolicy != VmInstanceDeletionPolicy.DBOnly && deletionPolicy != VmInstanceDeletionPolicy.KeepVolume) { bus.reply(msg, r); chain.next(); return; } } destroyHook(deletionPolicy, msg, new Completion(msg, chain) { @Override public void success() { bus.reply(msg, r); chain.next(); } @Override public void fail(ErrorCode errorCode) { r.setError(errorCode); bus.reply(msg, r); chain.next(); } }); } @Override public String getName() { return "delete-vm"; } }); } protected void destroyHook(VmInstanceDeletionPolicy deletionPolicy, Message msg, Completion completion) { doDestroy(deletionPolicy, msg, completion); } private void handle(final RebootVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getName() { return String.format("reboot-vm-%s", self.getUuid()); } @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { rebootVm(msg, chain); } }); } private void rebootVm(final RebootVmInstanceMsg msg, final SyncTaskChain chain) { rebootVm(msg, new Completion(chain) { @Override public void success() { RebootVmInstanceReply reply = new RebootVmInstanceReply(); VmInstanceInventory inv = VmInstanceInventory.valueOf(self); reply.setInventory(inv); bus.reply(msg, reply); chain.next(); } @Override public void fail(ErrorCode errorCode) { RebootVmInstanceReply reply = new RebootVmInstanceReply(); reply.setError(err(VmErrors.REBOOT_ERROR, errorCode, errorCode.getDetails())); bus.reply(msg, reply); chain.next(); } }); } private void handle(final StopVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getName() { return String.format("stop-vm-%s", self.getUuid()); } @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { stopVm(msg, chain); } }); } private void stopVm(final StopVmInstanceMsg msg, final SyncTaskChain chain) { stopVm(msg, new Completion(chain) { @Override public void success() { StopVmInstanceReply reply = new StopVmInstanceReply(); VmInstanceInventory inv = VmInstanceInventory.valueOf(self); reply.setInventory(inv); bus.reply(msg, reply); chain.next(); } @Override public void fail(ErrorCode errorCode) { StopVmInstanceReply reply = new StopVmInstanceReply(); reply.setError(err(VmErrors.STOP_ERROR, errorCode, errorCode.getDetails())); bus.reply(msg, reply); chain.next(); } }); } private void handle(final StartVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getName() { return String.format("start-vm-%s", self.getUuid()); } @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { startVm(msg, chain); } }); } private void createTemplateFromRootVolume(final CreateTemplateFromVmRootVolumeMsg msg, final SyncTaskChain chain) { boolean callNext = true; try { refreshVO(); ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (allowed != null) { bus.replyErrorByMessageType(msg, allowed); return; } final CreateTemplateFromVmRootVolumeReply reply = new CreateTemplateFromVmRootVolumeReply(); CreateTemplateFromVolumeOnPrimaryStorageMsg cmsg = new CreateTemplateFromVolumeOnPrimaryStorageMsg(); cmsg.setVolumeInventory(msg.getRootVolumeInventory()); cmsg.setBackupStorageUuid(msg.getBackupStorageUuid()); cmsg.setImageInventory(msg.getImageInventory()); bus.makeTargetServiceIdByResourceUuid(cmsg, PrimaryStorageConstant.SERVICE_ID, msg.getRootVolumeInventory().getPrimaryStorageUuid()); bus.send(cmsg, new CloudBusCallBack(chain) { private void fail(ErrorCode errorCode) { reply.setError(operr(errorCode, "failed to create template from root volume[uuid:%s] on primary storage[uuid:%s]", msg.getRootVolumeInventory().getUuid(), msg.getRootVolumeInventory().getPrimaryStorageUuid())); logger.warn(reply.getError().getDetails()); bus.reply(msg, reply); } @Override public void run(MessageReply r) { if (!r.isSuccess()) { fail(r.getError()); } else { CreateTemplateFromVolumeOnPrimaryStorageReply creply = (CreateTemplateFromVolumeOnPrimaryStorageReply) r; reply.setInstallPath(creply.getTemplateBackupStorageInstallPath()); reply.setFormat(creply.getFormat()); bus.reply(msg, reply); } chain.next(); } }); callNext = false; } finally { if (callNext) { chain.next(); } } } private void handle(final CreateTemplateFromVmRootVolumeMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getName() { return String.format("create-template-from-%s", self.getUuid()); } @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { createTemplateFromRootVolume(msg, chain); } }); } private void handle(final AttachNicToVmMsg msg) { ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (allowed != null) { bus.replyErrorByMessageType(msg, allowed); return; } AttachNicToVmOnHypervisorMsg amsg = new AttachNicToVmOnHypervisorMsg(); amsg.setVmUuid(self.getUuid()); amsg.setHostUuid(self.getHostUuid()); amsg.setNics(msg.getNics()); bus.makeTargetServiceIdByResourceUuid(amsg, HostConstant.SERVICE_ID, self.getHostUuid()); bus.send(amsg, new CloudBusCallBack(msg) { @Override public void run(MessageReply reply) { if (self.getDefaultL3NetworkUuid() == null) { self.setDefaultL3NetworkUuid(msg.getNics().get(0).getL3NetworkUuid()); self = dbf.updateAndRefresh(self); logger.debug(String.format("set the VM[uuid: %s]'s default L3 network[uuid:%s], as it doen't have one before", self.getUuid(), self.getDefaultL3NetworkUuid())); } AttachNicToVmReply r = new AttachNicToVmReply(); if (!reply.isSuccess()) { r.setError(err(VmErrors.ATTACH_NETWORK_ERROR, r.getError(), r.getError().getDetails())); } bus.reply(msg, r); } }); } private void handle(final DestroyVmInstanceMsg msg) { final DestroyVmInstanceReply reply = new DestroyVmInstanceReply(); final String issuer = VmInstanceVO.class.getSimpleName(); VmDeletionStruct s = new VmDeletionStruct(); if (msg.getDeletionPolicy() == null) { s.setDeletionPolicy(deletionPolicyMgr.getDeletionPolicy(self.getUuid())); } else { s.setDeletionPolicy(msg.getDeletionPolicy()); } s.setInventory(getSelfInventory()); final List<VmDeletionStruct> ctx = list(s); final FlowChain chain = FlowChainBuilder.newSimpleFlowChain(); chain.setName(String.format("destroy-vm-%s", self.getUuid())); chain.then(new NoRollbackFlow() { @Override public void run(final FlowTrigger trigger, Map data) { casf.asyncCascade(CascadeConstant.DELETION_FORCE_DELETE_CODE, issuer, ctx, new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }).done(new FlowDoneHandler(msg) { @Override public void handle(Map data) { casf.asyncCascadeFull(CascadeConstant.DELETION_CLEANUP_CODE, issuer, ctx, new NopeCompletion()); bus.reply(msg, reply); } }).error(new FlowErrorHandler(msg) { @Override public void handle(final ErrorCode errCode, Map data) { reply.setError(errCode); bus.reply(msg, reply); } }).start(); } protected void handle(final ChangeVmStateMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getName() { return String.format("change-vm-state-%s", self.getUuid()); } @Override public String getSyncSignature() { return syncThreadName; } @Override @Deferred public void run(SyncTaskChain chain) { refreshVO(); Defer.defer(() -> { ChangeVmStateReply reply = new ChangeVmStateReply(); bus.reply(msg, reply); }); if (self == null) { // vm has been deleted by previous request // this happens when delete vm request queued before // change state request from vm tracer. // in this case, ignore change state request logger.debug(String.format("vm[uuid:%s] has been deleted, ignore change vm state request from vm tracer", msg.getVmInstanceUuid())); chain.next(); return; } changeVmStateInDb(VmInstanceStateEvent.valueOf(msg.getStateEvent())); chain.next(); } }); } protected void setFlowMarshaller(FlowChain chain) { chain.setFlowMarshaller(new FlowMarshaller() { @Override public Flow marshalTheNextFlow(String previousFlowClassName, String nextFlowClassName, FlowChain chain, Map data) { Flow nflow = null; for (MarshalVmOperationFlowExtensionPoint mext : pluginRgty.getExtensionList(MarshalVmOperationFlowExtensionPoint.class)) { VmInstanceSpec spec = (VmInstanceSpec) data.get(VmInstanceConstant.Params.VmInstanceSpec.toString()); nflow = mext.marshalVmOperationFlow(previousFlowClassName, nextFlowClassName, chain, spec); if (nflow != null) { logger.debug(String.format("a VM[uuid: %s, operation: %s] operation flow[%s] is changed to the flow[%s] by %s", self.getUuid(), spec.getCurrentVmOperation(), nextFlowClassName, nflow.getClass(), mext.getClass())); break; } } return nflow; } }); } protected void selectBootOrder(VmInstanceSpec spec) { if (spec.getCurrentVmOperation() == null) { throw new CloudRuntimeException("selectBootOrder must be called after VmOperation is set"); } List<CdRomSpec> cdRomSpecs = spec.getCdRomSpecs().stream() .filter(cdRom -> cdRom.getImageUuid() != null) .collect(Collectors.toList()); if (spec.getCurrentVmOperation() == VmOperation.NewCreate && !cdRomSpecs.isEmpty()) { ImageVO imageVO = dbf.findByUuid(spec.getVmInventory().getImageUuid(), ImageVO.class); assert imageVO != null; if(imageVO.getMediaType() == ImageMediaType.ISO) { spec.setBootOrders(list(VmBootDevice.CdRom.toString())); } else { spec.setBootOrders(list(VmBootDevice.HardDisk.toString())); } } else { String order = VmSystemTags.BOOT_ORDER.getTokenByResourceUuid(self.getUuid(), VmSystemTags.BOOT_ORDER_TOKEN); if (order == null) { spec.setBootOrders(list(VmBootDevice.HardDisk.toString())); } else { spec.setBootOrders(list(order.split(","))); // set vm to boot from cdrom once only if (VmSystemTags.CDROM_BOOT_ONCE.hasTag(self.getUuid(), VmInstanceVO.class)) { VmSystemTags.BOOT_ORDER.deleteInherentTag(self.getUuid()); VmSystemTags.CDROM_BOOT_ONCE.deleteInherentTag(self.getUuid()); } } } } protected void instantiateVmFromNewCreate(final InstantiateNewCreatedVmInstanceMsg msg, final SyncTaskChain taskChain) { refreshVO(); ErrorCode error = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (error != null) { throw new OperationFailureException(error); } error = extEmitter.preStartNewCreatedVm(msg.getVmInstanceInventory()); if (error != null) { throw new OperationFailureException(error); } InstantiateNewCreatedVmInstanceReply reply = new InstantiateNewCreatedVmInstanceReply(); instantiateVmFromNewCreate(InstantiateVmFromNewCreatedStruct.fromMessage(msg), new Completion(msg, taskChain) { @Override public void success() { self = dbf.reload(self); reply.setVmInventory(getSelfInventory()); bus.reply(msg, reply); taskChain.next(); } @Override public void fail(ErrorCode errorCode) { reply.setError(errorCode); bus.reply(msg, reply); taskChain.next(); } }); } protected void handle(final InstantiateNewCreatedVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getName() { return String.format("create-vm-%s", self.getUuid()); } @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { instantiateVmFromNewCreate(msg, chain); } }); } @Transactional(readOnly = true) protected List<ImageInventory> getImageCandidatesForVm(ImageMediaType type) { String psUuid = getSelfInventory().getRootVolume().getPrimaryStorageUuid(); PrimaryStorageVO ps = dbf.getEntityManager().find(PrimaryStorageVO.class, psUuid); PrimaryStorageType psType = PrimaryStorageType.valueOf(ps.getType()); List<String> bsUuids = psType.findBackupStorage(psUuid); if (bsUuids == null) { String sql = "select img" + " from ImageVO img, ImageBackupStorageRefVO ref, BackupStorageVO bs, BackupStorageZoneRefVO bsRef" + " where ref.imageUuid = img.uuid" + " and img.mediaType = :imgType" + " and img.status = :status" + " and img.system = :system" + " and bs.uuid = ref.backupStorageUuid" + " and bs.type in (:bsTypes)" + " and bs.uuid = bsRef.backupStorageUuid" + " and bsRef.zoneUuid = :zoneUuid"; TypedQuery<ImageVO> q = dbf.getEntityManager().createQuery(sql, ImageVO.class); q.setParameter("zoneUuid", getSelfInventory().getZoneUuid()); if (type != null) { q.setParameter("imgType", type); } q.setParameter("status", ImageStatus.Ready); q.setParameter("system", false); q.setParameter("bsTypes", hostAllocatorMgr.getBackupStorageTypesByPrimaryStorageTypeFromMetrics(ps.getType())); return ImageInventory.valueOf(q.getResultList()); } else if (!bsUuids.isEmpty()) { String sql = "select img" + " from ImageVO img, ImageBackupStorageRefVO ref, BackupStorageVO bs, BackupStorageZoneRefVO bsRef" + " where ref.imageUuid = img.uuid" + " and img.mediaType = :imgType" + " and img.status = :status" + " and img.system = :system" + " and bs.uuid = ref.backupStorageUuid" + " and bs.uuid in (:bsUuids)" + " and bs.uuid = bsRef.backupStorageUuid" + " and bsRef.zoneUuid = :zoneUuid"; TypedQuery<ImageVO> q = dbf.getEntityManager().createQuery(sql, ImageVO.class); q.setParameter("zoneUuid", getSelfInventory().getZoneUuid()); if (type != null) { q.setParameter("imgType", type); } q.setParameter("status", ImageStatus.Ready); q.setParameter("system", false); q.setParameter("bsUuids", bsUuids); return ImageInventory.valueOf(q.getResultList()); } else { return new ArrayList<>(); } } protected void handleApiMessage(APIMessage msg) { if (msg instanceof APIStopVmInstanceMsg) { handle((APIStopVmInstanceMsg) msg); } else if (msg instanceof APIRebootVmInstanceMsg) { handle((APIRebootVmInstanceMsg) msg); } else if (msg instanceof APIDestroyVmInstanceMsg) { handle((APIDestroyVmInstanceMsg) msg); } else if (msg instanceof APIStartVmInstanceMsg) { handle((APIStartVmInstanceMsg) msg); } else if (msg instanceof APIMigrateVmMsg) { handle((APIMigrateVmMsg) msg); } else if (msg instanceof APIAttachL3NetworkToVmMsg) { handle((APIAttachL3NetworkToVmMsg) msg); } else if(msg instanceof APIAttachVmNicToVmMsg) { handle((APIAttachVmNicToVmMsg) msg); } else if (msg instanceof APIGetVmMigrationCandidateHostsMsg) { handle((APIGetVmMigrationCandidateHostsMsg) msg); } else if (msg instanceof APIGetVmAttachableDataVolumeMsg) { handle((APIGetVmAttachableDataVolumeMsg) msg); } else if (msg instanceof APIUpdateVmInstanceMsg) { handle((APIUpdateVmInstanceMsg) msg); } else if (msg instanceof APIChangeInstanceOfferingMsg) { handle((APIChangeInstanceOfferingMsg) msg); } else if (msg instanceof APIDetachL3NetworkFromVmMsg) { handle((APIDetachL3NetworkFromVmMsg) msg); } else if (msg instanceof APIGetVmAttachableL3NetworkMsg) { handle((APIGetVmAttachableL3NetworkMsg) msg); } else if (msg instanceof APIAttachIsoToVmInstanceMsg) { handle((APIAttachIsoToVmInstanceMsg) msg); } else if (msg instanceof APIDetachIsoFromVmInstanceMsg) { handle((APIDetachIsoFromVmInstanceMsg) msg); } else if (msg instanceof APIExpungeVmInstanceMsg) { handle((APIExpungeVmInstanceMsg) msg); } else if (msg instanceof APIRecoverVmInstanceMsg) { handle((APIRecoverVmInstanceMsg) msg); } else if (msg instanceof APISetVmBootOrderMsg) { handle((APISetVmBootOrderMsg) msg); } else if (msg instanceof APISetVmConsolePasswordMsg) { handle((APISetVmConsolePasswordMsg) msg); } else if (msg instanceof APIGetVmBootOrderMsg) { handle((APIGetVmBootOrderMsg) msg); } else if (msg instanceof APIDeleteVmConsolePasswordMsg) { handle((APIDeleteVmConsolePasswordMsg) msg); } else if (msg instanceof APIGetVmConsolePasswordMsg) { handle((APIGetVmConsolePasswordMsg) msg); } else if (msg instanceof APIGetVmConsoleAddressMsg) { handle((APIGetVmConsoleAddressMsg) msg); } else if (msg instanceof APISetVmHostnameMsg) { handle((APISetVmHostnameMsg) msg); } else if (msg instanceof APISetVmBootModeMsg) { handle((APISetVmBootModeMsg) msg); } else if (msg instanceof APIDeleteVmBootModeMsg) { handle((APIDeleteVmBootModeMsg) msg); } else if (msg instanceof APIDeleteVmHostnameMsg) { handle((APIDeleteVmHostnameMsg) msg); } else if (msg instanceof APISetVmStaticIpMsg) { handle((APISetVmStaticIpMsg) msg); } else if (msg instanceof APIDeleteVmStaticIpMsg) { handle((APIDeleteVmStaticIpMsg) msg); } else if (msg instanceof APIGetVmHostnameMsg) { handle((APIGetVmHostnameMsg) msg); } else if (msg instanceof APIGetVmStartingCandidateClustersHostsMsg) { handle((APIGetVmStartingCandidateClustersHostsMsg) msg); } else if (msg instanceof APIGetVmCapabilitiesMsg) { handle((APIGetVmCapabilitiesMsg) msg); } else if (msg instanceof APISetVmSshKeyMsg) { handle((APISetVmSshKeyMsg) msg); } else if (msg instanceof APIGetVmSshKeyMsg) { handle((APIGetVmSshKeyMsg) msg); } else if (msg instanceof APIDeleteVmSshKeyMsg) { handle((APIDeleteVmSshKeyMsg) msg); } else if (msg instanceof APIGetCandidateIsoForAttachingVmMsg) { handle((APIGetCandidateIsoForAttachingVmMsg) msg); } else if (msg instanceof APIPauseVmInstanceMsg) { handle((APIPauseVmInstanceMsg) msg); } else if (msg instanceof APIResumeVmInstanceMsg) { handle((APIResumeVmInstanceMsg) msg); } else if (msg instanceof APIReimageVmInstanceMsg) { handle((APIReimageVmInstanceMsg) msg); } else if (msg instanceof APIDeleteVmCdRomMsg) { handle((APIDeleteVmCdRomMsg) msg); } else if (msg instanceof APICreateVmCdRomMsg) { handle((APICreateVmCdRomMsg) msg); } else if (msg instanceof APIUpdateVmCdRomMsg) { handle((APIUpdateVmCdRomMsg) msg); } else if (msg instanceof APISetVmInstanceDefaultCdRomMsg) { handle((APISetVmInstanceDefaultCdRomMsg) msg); } else { VmInstanceBaseExtensionFactory ext = vmMgr.getVmInstanceBaseExtensionFactory(msg); if (ext != null) { VmInstance v = ext.getVmInstance(self); v.handleMessage(msg); } else { bus.dealWithUnknownMessage(msg); } } } private void handle(APIGetCandidateIsoForAttachingVmMsg msg) { APIGetCandidateIsoForAttachingVmReply reply = new APIGetCandidateIsoForAttachingVmReply(); if (self.getState() != VmInstanceState.Running && self.getState() != VmInstanceState.Stopped) { reply.setInventories(new ArrayList<>()); bus.reply(msg, reply); return; } List<ImageInventory> result = getImageCandidatesForVm(ImageMediaType.ISO); List<String> vmIsoList = IsoOperator.getIsoUuidByVmUuid(msg.getVmInstanceUuid()); result = result.stream() .filter(iso -> !vmIsoList.contains(iso.getUuid())) .collect(Collectors.toList()); for (VmAttachIsoExtensionPoint ext : pluginRgty.getExtensionList(VmAttachIsoExtensionPoint.class)) { ext.filtCandidateIsos(msg.getVmInstanceUuid(), result); } reply.setInventories(result); bus.reply(msg, reply); } private void handle(APIGetVmCapabilitiesMsg msg) { APIGetVmCapabilitiesReply reply = new APIGetVmCapabilitiesReply(); VmCapabilities capabilities = new VmCapabilities(); checkPrimaryStorageCapabilities(capabilities); checkImageMediaTypeCapabilities(capabilities); extEmitter.getVmCapabilities(getSelfInventory(), capabilities); reply.setCapabilities(capabilities.toMap()); bus.reply(msg, reply); } private void checkPrimaryStorageCapabilities(VmCapabilities capabilities) { VolumeInventory rootVolume = getSelfInventory().getRootVolume(); if (rootVolume == null) { capabilities.setSupportLiveMigration(false); capabilities.setSupportVolumeMigration(false); } else { SimpleQuery<PrimaryStorageVO> q = dbf.createQuery(PrimaryStorageVO.class); q.select(PrimaryStorageVO_.type); q.add(PrimaryStorageVO_.uuid, Op.EQ, rootVolume.getPrimaryStorageUuid()); String type = q.findValue(); PrimaryStorageType psType = PrimaryStorageType.valueOf(type); if (self.getState() != VmInstanceState.Running) { capabilities.setSupportLiveMigration(false); } else { capabilities.setSupportLiveMigration(psType.isSupportVmLiveMigration()); } if (self.getState() != VmInstanceState.Stopped) { capabilities.setSupportVolumeMigration(false); } else { capabilities.setSupportVolumeMigration(psType.isSupportVolumeMigration()); } } } private void checkImageMediaTypeCapabilities(VmCapabilities capabilities) { ImageVO vo = null; ImageMediaType imageMediaType; if (self.getImageUuid() != null) { vo = dbf.findByUuid(self.getImageUuid(), ImageVO.class); } if (vo == null) { imageMediaType = null; } else { imageMediaType = vo.getMediaType(); } if (imageMediaType == ImageMediaType.ISO || imageMediaType == null) { capabilities.setSupportReimage(false); } else { capabilities.setSupportReimage(true); } } private void handle(APIGetVmHostnameMsg msg) { String hostname = VmSystemTags.HOSTNAME.getTokenByResourceUuid(self.getUuid(), VmSystemTags.HOSTNAME_TOKEN); APIGetVmHostnameReply reply = new APIGetVmHostnameReply(); reply.setHostname(hostname); bus.reply(msg, reply); } private void handle(final APIDeleteVmStaticIpMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { APIDeleteVmStaticIpEvent evt = new APIDeleteVmStaticIpEvent(msg.getId()); new StaticIpOperator().deleteStaticIpByVmUuidAndL3Uuid(self.getUuid(), msg.getL3NetworkUuid()); bus.publish(evt); chain.next(); } @Override public String getName() { return "delete-static-ip"; } }); } private void handle(final APISetVmStaticIpMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { setStaticIp(msg, new NoErrorCompletion(msg, chain) { @Override public void done() { chain.next(); } }); } @Override public String getName() { return "set-static-ip"; } }); } private void setStaticIp(final APISetVmStaticIpMsg msg, final NoErrorCompletion completion) { refreshVO(); ErrorCode error = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (error != null) { throw new OperationFailureException(error); } final APISetVmStaticIpEvent evt = new APISetVmStaticIpEvent(msg.getId()); changeVmIp(msg.getL3NetworkUuid(), msg.getIp(), new Completion(msg, completion) { @Override public void success() { new StaticIpOperator().setStaticIp(self.getUuid(), msg.getL3NetworkUuid(), msg.getIp()); bus.publish(evt); completion.done(); } @Override public void fail(ErrorCode errorCode) { evt.setError(errorCode); bus.publish(evt); completion.done(); } }); } private void handle(APISetVmBootModeMsg msg) { SystemTagCreator creator = VmSystemTags.BOOT_MODE.newSystemTagCreator(self.getUuid()); creator.setTagByTokens(map( e(VmSystemTags.BOOT_MODE_TOKEN, msg.getBootMode()) )); creator.recreate = true; creator.create(); APISetVmBootModeEvent evt = new APISetVmBootModeEvent(msg.getId()); bus.publish(evt); } private void handle(APIDeleteVmBootModeMsg msg) { APIDeleteVmBootModeEvent evt = new APIDeleteVmBootModeEvent(msg.getId()); VmSystemTags.BOOT_MODE.delete(self.getUuid()); bus.publish(evt); } private void handle(APIDeleteVmHostnameMsg msg) { APIDeleteVmHostnameEvent evt = new APIDeleteVmHostnameEvent(msg.getId()); VmSystemTags.HOSTNAME.delete(self.getUuid()); bus.publish(evt); } private void handle(APISetVmHostnameMsg msg) { if (!VmSystemTags.HOSTNAME.hasTag(self.getUuid())) { SystemTagCreator creator = VmSystemTags.HOSTNAME.newSystemTagCreator(self.getUuid()); creator.setTagByTokens(map( e(VmSystemTags.HOSTNAME_TOKEN, msg.getHostname()) )); creator.create(); } else { VmSystemTags.HOSTNAME.update(self.getUuid(), VmSystemTags.HOSTNAME.instantiateTag( map(e(VmSystemTags.HOSTNAME_TOKEN, msg.getHostname())) )); } APISetVmHostnameEvent evt = new APISetVmHostnameEvent(msg.getId()); bus.publish(evt); } private void handle(final APIGetVmConsoleAddressMsg msg) { refreshVO(); ErrorCode error = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (error != null) { throw new OperationFailureException(error); } final APIGetVmConsoleAddressReply creply = new APIGetVmConsoleAddressReply(); GetVmConsoleAddressFromHostMsg hmsg = new GetVmConsoleAddressFromHostMsg(); hmsg.setHostUuid(self.getHostUuid()); hmsg.setVmInstanceUuid(self.getUuid()); bus.makeTargetServiceIdByResourceUuid(hmsg, HostConstant.SERVICE_ID, self.getHostUuid()); bus.send(hmsg, new CloudBusCallBack(msg) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { creply.setError(reply.getError()); } else { GetVmConsoleAddressFromHostReply hr = reply.castReply(); creply.setHostIp(hr.getHostIp()); creply.setPort(hr.getPort()); creply.setProtocol(hr.getProtocol()); } bus.reply(msg, creply); } }); } private void handle(APIGetVmBootOrderMsg msg) { APIGetVmBootOrderReply reply = new APIGetVmBootOrderReply(); String order = VmSystemTags.BOOT_ORDER.getTokenByResourceUuid(self.getUuid(), VmSystemTags.BOOT_ORDER_TOKEN); if (order != null) { reply.setOrder(list(order.split(","))); } else if (order == null && !IsoOperator.isIsoAttachedToVm(msg.getUuid())) { reply.setOrder(list(VmBootDevice.HardDisk.toString())); } else { reply.setOrder(list(VmBootDevice.HardDisk.toString(), VmBootDevice.CdRom.toString())); } bus.reply(msg, reply); } private void handle(APISetVmBootOrderMsg msg) { APISetVmBootOrderEvent evt = new APISetVmBootOrderEvent(msg.getId()); if (msg.getBootOrder() != null) { SystemTagCreator creator = VmSystemTags.BOOT_ORDER.newSystemTagCreator(self.getUuid()); creator.inherent = true; creator.recreate = true; creator.setTagByTokens(map(e(VmSystemTags.BOOT_ORDER_TOKEN, StringUtils.join(msg.getBootOrder(), ",")))); creator.create(); } else { VmSystemTags.BOOT_ORDER.deleteInherentTag(self.getUuid()); } boolean cdromBootOnce = false; if (msg.getSystemTags() != null && !msg.getSystemTags().isEmpty()) { Optional<String> opt = msg.getSystemTags().stream().filter(s -> VmSystemTags.CDROM_BOOT_ONCE.isMatch(s)).findAny(); if (opt.isPresent()) { cdromBootOnce = Boolean.parseBoolean( VmSystemTags.CDROM_BOOT_ONCE.getTokenByTag(opt.get(), VmSystemTags.CDROM_BOOT_ONCE_TOKEN) ); } } if (cdromBootOnce) { SystemTagCreator creator = VmSystemTags.CDROM_BOOT_ONCE.newSystemTagCreator(self.getUuid()); creator.inherent = true; creator.recreate = true; creator.setTagByTokens(map(e(VmSystemTags.CDROM_BOOT_ONCE_TOKEN, String.valueOf(true)))); creator.create(); } else { VmSystemTags.CDROM_BOOT_ONCE.deleteInherentTag(self.getUuid()); } evt.setInventory(getSelfInventory()); bus.publish(evt); } private void handle(APISetVmConsolePasswordMsg msg) { APISetVmConsolePasswordEvent evt = new APISetVmConsolePasswordEvent(msg.getId()); SystemTagCreator creator = VmSystemTags.CONSOLE_PASSWORD.newSystemTagCreator(self.getUuid()); creator.setTagByTokens(map(e(VmSystemTags.CONSOLE_PASSWORD_TOKEN, msg.getConsolePassword()))); creator.recreate = true; creator.create(); evt.setInventory(getSelfInventory()); bus.publish(evt); } private void handle(APIGetVmConsolePasswordMsg msg) { APIGetVmConsolePasswordReply reply = new APIGetVmConsolePasswordReply(); String consolePassword = VmSystemTags.CONSOLE_PASSWORD.getTokenByResourceUuid(self.getUuid(), VmSystemTags.CONSOLE_PASSWORD_TOKEN); reply.setConsolePassword(consolePassword); bus.reply(msg, reply); } private void handle(APIDeleteVmConsolePasswordMsg msg) { APIDeleteVmConsolePasswordEvent evt = new APIDeleteVmConsolePasswordEvent(msg.getId()); VmSystemTags.CONSOLE_PASSWORD.delete(self.getUuid()); evt.setInventory(getSelfInventory()); bus.publish(evt); } private void handle(APISetVmSshKeyMsg msg) { APISetVmSshKeyEvent evt = new APISetVmSshKeyEvent(msg.getId()); SystemTagCreator creator = VmSystemTags.SSHKEY.newSystemTagCreator(self.getUuid()); creator.setTagByTokens(map(e(VmSystemTags.SSHKEY_TOKEN, msg.getSshKey()))); creator.recreate = true; creator.create(); evt.setInventory(getSelfInventory()); bus.publish(evt); } private void handle(APIGetVmSshKeyMsg msg) { APIGetVmSshKeyReply reply = new APIGetVmSshKeyReply(); String sshKey = VmSystemTags.SSHKEY.getTokenByResourceUuid(self.getUuid(), VmSystemTags.SSHKEY_TOKEN); reply.setSshKey(sshKey); bus.reply(msg, reply); } private void handle(APIDeleteVmSshKeyMsg msg) { APIDeleteVmSshKeyEvent evt = new APIDeleteVmSshKeyEvent(msg.getId()); VmSystemTags.SSHKEY.delete(self.getUuid()); evt.setInventory(getSelfInventory()); bus.publish(evt); } private boolean ipExists(final String l3uuid, final String ipAddress) { SimpleQuery<VmNicVO> q = dbf.createQuery(VmNicVO.class); q.add(VmNicVO_.l3NetworkUuid, Op.EQ, l3uuid); q.add(VmNicVO_.ip, Op.EQ, ipAddress); return q.isExists(); } // If the VM is assigned static IP and it is now occupied, we will // remove the static IP tag so that it can acquire IP dynamically. // c.f. issue #1639 private void checkIpConflict(final String vmUuid) { StaticIpOperator ipo = new StaticIpOperator(); for (Map.Entry<String, String> entry : ipo.getStaticIpbyVmUuid(vmUuid).entrySet()) { if (ipExists(entry.getKey(), entry.getValue())) { ipo.deleteStaticIpByVmUuidAndL3Uuid(vmUuid, entry.getKey()); } } } private void recoverVm(final Completion completion) { final VmInstanceInventory vm = getSelfInventory(); final List<RecoverVmExtensionPoint> exts = pluginRgty.getExtensionList(RecoverVmExtensionPoint.class); for (RecoverVmExtensionPoint ext : exts) { ext.preRecoverVm(vm); } CollectionUtils.forEach(exts, new ForEachFunction<RecoverVmExtensionPoint>() { @Override public void run(RecoverVmExtensionPoint ext) { ext.beforeRecoverVm(vm); } }); FlowChain chain = FlowChainBuilder.newShareFlowChain(); chain.setName(String.format("recover-vm-%s", self.getUuid())); chain.then(new ShareFlow() { @Override public void setup() { flow(new NoRollbackFlow() { String __name__ = "check-ip-conflict"; @Override public void run(FlowTrigger trigger, Map data) { checkIpConflict(vm.getUuid()); trigger.next(); } }); flow(new NoRollbackFlow() { String __name__ = "recover-root-volume"; @Override public void run(final FlowTrigger trigger, Map data) { RecoverVolumeMsg msg = new RecoverVolumeMsg(); msg.setVolumeUuid(self.getRootVolumeUuid()); bus.makeTargetServiceIdByResourceUuid(msg, VolumeConstant.SERVICE_ID, self.getRootVolumeUuid()); bus.send(msg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { trigger.fail(reply.getError()); } else { trigger.next(); } } }); } }); flow(new NoRollbackFlow() { String __name__ = "recover-vm"; @Override public void run(FlowTrigger trigger, Map data) { self = changeVmStateInDb(VmInstanceStateEvent.stopped); CollectionUtils.forEach(exts, new ForEachFunction<RecoverVmExtensionPoint>() { @Override public void run(RecoverVmExtensionPoint ext) { ext.afterRecoverVm(vm); } }); trigger.next(); } }); done(new FlowDoneHandler(completion) { @Override public void handle(Map data) { completion.success(); } }); error(new FlowErrorHandler(completion) { @Override public void handle(ErrorCode errCode, Map data) { completion.fail(errCode); } }); } }).start(); } private void handle(final APIRecoverVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { final APIRecoverVmInstanceEvent evt = new APIRecoverVmInstanceEvent(msg.getId()); refreshVO(); ErrorCode error = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (error != null) { evt.setError(error); bus.publish(evt); chain.next(); return; } recoverVm(new Completion(msg, chain) { @Override public void success() { evt.setInventory(getSelfInventory()); bus.publish(evt); chain.next(); } @Override public void fail(ErrorCode errorCode) { evt.setError(errorCode); bus.publish(evt); chain.next(); } }); } @Override public String getName() { return "recover-vm"; } }); } private void handle(final APIExpungeVmInstanceMsg msg) { final APIExpungeVmInstanceEvent evt = new APIExpungeVmInstanceEvent(msg.getId()); thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { expunge(msg, new Completion(msg, chain) { @Override public void success() { bus.publish(evt); chain.next(); } @Override public void fail(ErrorCode errorCode) { evt.setError(errorCode); bus.publish(evt); chain.next(); } }); } @Override public String getName() { return "expunge-vm-by-api"; } }); } private void handle(final DetachIsoFromVmInstanceMsg msg) { DetachIsoFromVmInstanceReply reply = new DetachIsoFromVmInstanceReply(); detachIso(msg.getIsoUuid() ,new Completion(msg) { @Override public void success() { bus.reply(msg, reply); } @Override public void fail(ErrorCode errorCode) { reply.setError(errorCode); bus.reply(msg, reply); } }); } private void handle(final APIDetachIsoFromVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { final APIDetachIsoFromVmInstanceEvent evt = new APIDetachIsoFromVmInstanceEvent(msg.getId()); refreshVO(); ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (allowed != null) { evt.setError(allowed); bus.publish(evt); chain.next(); return; } detachIso(msg.getIsoUuid() ,new Completion(msg, chain) { @Override public void success() { self = dbf.reload(self); evt.setInventory(getSelfInventory()); bus.publish(evt); chain.next(); } @Override public void fail(ErrorCode errorCode) { evt.setError(errorCode); bus.publish(evt); chain.next(); } }); } @Override public String getName() { return String.format("detach-iso-from-vm-%s", self.getUuid()); } }); } private void detachIso(final String isoUuid, final Completion completion) { if (!IsoOperator.isIsoAttachedToVm(self.getUuid())) { completion.success(); return; } if (!IsoOperator.getIsoUuidByVmUuid(self.getUuid()).contains(isoUuid)) { completion.success(); return; } VmCdRomVO targetVmCdRomVO = Q.New(VmCdRomVO.class) .eq(VmCdRomVO_.vmInstanceUuid, self.getUuid()) .eq(VmCdRomVO_.isoUuid, isoUuid) .find(); assert targetVmCdRomVO != null; if (self.getState() == VmInstanceState.Stopped || self.getState() == VmInstanceState.Destroyed) { targetVmCdRomVO.setIsoUuid(null); targetVmCdRomVO.setIsoInstallPath(null); dbf.update(targetVmCdRomVO); new IsoOperator().syncVmIsoSystemTag(self.getUuid()); completion.success(); return; } VmInstanceSpec spec = buildSpecFromInventory(getSelfInventory(), VmOperation.DetachIso); boolean isoNotExist = spec.getDestIsoList().stream().noneMatch(isoSpec -> isoSpec.getImageUuid().equals(isoUuid)); if (isoNotExist) { // the image ISO has been deleted from backup storage // try to detach it from the VM anyway IsoSpec isoSpec = new IsoSpec(); isoSpec.setImageUuid(isoUuid); spec.getDestIsoList().add(isoSpec); logger.debug(String.format("the iso[uuid:%s] has been deleted, try to detach it from the VM[uuid:%s] anyway", isoUuid, self.getUuid())); } FlowChain chain = getDetachIsoWorkFlowChain(spec.getVmInventory()); chain.setName(String.format("detach-iso-%s-from-vm-%s", isoUuid, self.getUuid())); chain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); chain.getData().put(VmInstanceConstant.Params.DetachingIsoUuid.toString(), isoUuid); setFlowMarshaller(chain); chain.done(new FlowDoneHandler(completion) { @Override public void handle(Map data) { targetVmCdRomVO.setIsoUuid(null); targetVmCdRomVO.setIsoInstallPath(null); dbf.update(targetVmCdRomVO); new IsoOperator().syncVmIsoSystemTag(self.getUuid()); completion.success(); } }).error(new FlowErrorHandler(completion) { @Override public void handle(ErrorCode errCode, Map data) { completion.fail(errCode); } }).start(); } @Transactional(readOnly = true) private List<L3NetworkInventory> getAttachableL3Network(String accountUuid) { List<String> l3Uuids = acntMgr.getResourceUuidsCanAccessByAccount(accountUuid, L3NetworkVO.class); if (l3Uuids != null && l3Uuids.isEmpty()) { return new ArrayList<L3NetworkInventory>(); } if (self.getClusterUuid() == null){ return getAttachableL3NetworkWhenClusterUuidSetNull(l3Uuids); } String sql; TypedQuery<L3NetworkVO> q; if (self.getVmNics().isEmpty()) { if (l3Uuids == null) { // accessed by a system admin sql = "select l3" + " from L3NetworkVO l3, VmInstanceVO vm, L2NetworkVO l2, L2NetworkClusterRefVO l2ref" + " where vm.uuid = :uuid" + " and vm.clusterUuid = l2ref.clusterUuid" + " and l2ref.l2NetworkUuid = l2.uuid" + " and l2.uuid = l3.l2NetworkUuid" + " and l3.state = :l3State" + " and l3.category != :l3Category" + " group by l3.uuid"; q = dbf.getEntityManager().createQuery(sql, L3NetworkVO.class); q.setParameter("l3Category", L3NetworkCategory.System); } else { // accessed by a normal account sql = "select l3" + " from L3NetworkVO l3, VmInstanceVO vm, L2NetworkVO l2, L2NetworkClusterRefVO l2ref" + " where vm.uuid = :uuid" + " and vm.clusterUuid = l2ref.clusterUuid" + " and l2ref.l2NetworkUuid = l2.uuid" + " and l2.uuid = l3.l2NetworkUuid" + " and l3.state = :l3State" + " and l3.uuid in (:l3uuids)" + " and l3.category != :l3Category" + " group by l3.uuid"; q = dbf.getEntityManager().createQuery(sql, L3NetworkVO.class); q.setParameter("l3uuids", l3Uuids); q.setParameter("l3Category", L3NetworkCategory.System); } } else { if (l3Uuids == null) { // accessed by a system admin sql = "select l3" + " from L3NetworkVO l3, VmInstanceVO vm, L2NetworkVO l2, L2NetworkClusterRefVO l2ref" + " where l3.uuid not in" + " (select ip.l3NetworkUuid from VmNicVO nic, UsedIpVO ip where ip.vmNicUuid = nic.uuid and nic.vmInstanceUuid = :uuid)" + " and vm.uuid = :uuid" + " and vm.clusterUuid = l2ref.clusterUuid" + " and l2ref.l2NetworkUuid = l2.uuid" + " and l2.uuid = l3.l2NetworkUuid" + " and l3.state = :l3State" + " and l3.category != :l3Category" + " group by l3.uuid"; q = dbf.getEntityManager().createQuery(sql, L3NetworkVO.class); q.setParameter("l3Category", L3NetworkCategory.System); } else { // accessed by a normal account sql = "select l3" + " from L3NetworkVO l3, VmInstanceVO vm, L2NetworkVO l2, L2NetworkClusterRefVO l2ref" + " where l3.uuid not in" + " (select ip.l3NetworkUuid from VmNicVO nic, UsedIpVO ip where ip.vmNicUuid = nic.uuid and nic.vmInstanceUuid = :uuid)" + " and vm.uuid = :uuid" + " and vm.clusterUuid = l2ref.clusterUuid" + " and l2ref.l2NetworkUuid = l2.uuid" + " and l2.uuid = l3.l2NetworkUuid" + " and l3.state = :l3State" + " and l3.category != :l3Category" + " and l3.uuid in (:l3uuids)" + " group by l3.uuid"; q = dbf.getEntityManager().createQuery(sql, L3NetworkVO.class); q.setParameter("l3uuids", l3Uuids); q.setParameter("l3Category", L3NetworkCategory.System); } } q.setParameter("l3State", L3NetworkState.Enabled); q.setParameter("uuid", self.getUuid()); List<L3NetworkVO> l3s = q.getResultList(); return L3NetworkInventory.valueOf(l3s); } @Transactional(readOnly = true) private List<L3NetworkInventory> getAttachableL3NetworkWhenClusterUuidSetNull(List<String> uuids){ return new SQLBatchWithReturn<List<L3NetworkInventory>>() { @Override protected List<L3NetworkInventory> scripts(){ String rootPsUuid = self.getRootVolume().getPrimaryStorageUuid(); //Get Candidate ClusterUuids From Primary Storage List<String> clusterUuids = q(PrimaryStorageClusterRefVO.class) .select(PrimaryStorageClusterRefVO_.clusterUuid) .eq(PrimaryStorageClusterRefVO_.primaryStorageUuid, rootPsUuid) .listValues(); //filtering the ClusterUuid by vmNic L3s one by one if (!self.getVmNics().isEmpty()){ for (String l3uuid: self.getVmNics().stream().flatMap(nic -> VmNicHelper.getL3Uuids(VmNicInventory.valueOf(nic)).stream()) .distinct().collect(Collectors.toList())){ clusterUuids = getCandidateClusterUuidsFromAttachedL3(l3uuid, clusterUuids); if (clusterUuids.isEmpty()){ return new ArrayList<>(); } } } //Get enabled l3 from the Candidate ClusterUuids List<L3NetworkVO> l3s = sql("select l3" + " from L3NetworkVO l3, L2NetworkVO l2, " + " L2NetworkClusterRefVO l2ref" + " where l2.uuid = l3.l2NetworkUuid " + " and l2.uuid = l2ref.l2NetworkUuid " + " and l2ref.clusterUuid in (:Uuids)" + " and l3.state = :l3State " + " and l3.category != :l3Category" + " group by l3.uuid") .param("Uuids", clusterUuids) .param("l3Category", L3NetworkCategory.System) .param("l3State", L3NetworkState.Enabled).list(); if (l3s.isEmpty()){ return new ArrayList<>(); } //filter result if normal user if (uuids != null) { l3s = l3s.stream().filter(l3 -> uuids.contains(l3.getUuid())).collect(Collectors.toList()); } if (l3s.isEmpty()){ return new ArrayList<>(); } //filter l3 that already attached if (!self.getVmNics().isEmpty()) { List<String> vmL3Uuids = self.getVmNics().stream().flatMap(nic -> VmNicHelper.getL3Uuids(VmNicInventory.valueOf(nic)).stream()) .distinct().collect(Collectors.toList()); l3s = l3s.stream().filter(l3 -> !vmL3Uuids.contains(l3.getUuid())).collect(Collectors.toList()); } return L3NetworkInventory.valueOf(l3s); } private List<String> getCandidateClusterUuidsFromAttachedL3(String l3Uuid, List<String> clusterUuids) { return sql("select l2ref.clusterUuid " + " from L3NetworkVO l3, L2NetworkVO l2, L2NetworkClusterRefVO l2ref " + " where l3.uuid = :l3Uuid " + " and l3.l2NetworkUuid = l2.uuid " + " and l2.uuid = l2ref.l2NetworkUuid" + " and l3.category != :l3Category" + " and l2ref.clusterUuid in (:uuids) " + " group by l2ref.clusterUuid", String.class) .param("l3Uuid", l3Uuid) .param("l3Category", L3NetworkCategory.System) .param("uuids", clusterUuids).list(); } }.execute(); } private void handle(APIGetVmAttachableL3NetworkMsg msg) { APIGetVmAttachableL3NetworkReply reply = new APIGetVmAttachableL3NetworkReply(); reply.setInventories(getAttachableL3Network(msg.getSession().getAccountUuid())); bus.reply(msg, reply); } private void handle(final APIAttachIsoToVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { final APIAttachIsoToVmInstanceEvent evt = new APIAttachIsoToVmInstanceEvent(msg.getId()); refreshVO(); ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (allowed != null) { evt.setError(allowed); bus.publish(evt); chain.next(); return; } attachIso(msg.getIsoUuid(), msg.getCdRomUuid(), new Completion(msg, chain) { @Override public void success() { self = dbf.reload(self); evt.setInventory(getSelfInventory()); bus.publish(evt); chain.next(); } @Override public void fail(ErrorCode errorCode) { evt.setError(errorCode); bus.publish(evt); chain.next(); } }); } @Override public String getName() { return String.format("attach-iso-%s-to-vm-%s", msg.getIsoUuid(), self.getUuid()); } }); } private void attachIso(final String isoUuid, String specifiedCdRomUuid, final Completion completion) { checkIfIsoAttachable(isoUuid); IsoOperator.checkAttachIsoToVm(self.getUuid(), isoUuid); List<VmInstanceInventory> vms = list(VmInstanceInventory.valueOf(self)); for (VmAttachIsoExtensionPoint ext : pluginRgty.getExtensionList(VmAttachIsoExtensionPoint.class)) { ErrorCode err = ext.filtCandidateVms(isoUuid, vms); if (err != null) { completion.fail(err); return; } } VmCdRomVO vmCdRomVO = null; if (StringUtils.isNotEmpty(specifiedCdRomUuid)) { vmCdRomVO = dbf.findByUuid(specifiedCdRomUuid, VmCdRomVO.class); } else { vmCdRomVO = IsoOperator.getEmptyCdRom(self.getUuid()); } final VmCdRomVO targetVmCdRomVO = vmCdRomVO; if (self.getState() == VmInstanceState.Stopped) { targetVmCdRomVO.setIsoUuid(isoUuid); dbf.update(targetVmCdRomVO); completion.success(); new IsoOperator().syncVmIsoSystemTag(self.getUuid()); return; } final ImageInventory iso = ImageInventory.valueOf(dbf.findByUuid(isoUuid, ImageVO.class)); VmInstanceSpec spec = buildSpecFromInventory(getSelfInventory(), VmOperation.AttachIso); IsoSpec isoSpec = new IsoSpec(); isoSpec.setImageUuid(isoUuid); isoSpec.setDeviceId(targetVmCdRomVO.getDeviceId()); spec.getDestIsoList().add(isoSpec); FlowChain chain = getAttachIsoWorkFlowChain(spec.getVmInventory()); chain.setName(String.format("attach-iso-%s-to-vm-%s", isoUuid, self.getUuid())); chain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); chain.getData().put(Params.AttachingIsoInventory.toString(), iso); setFlowMarshaller(chain); chain.done(new FlowDoneHandler(completion) { @Override public void handle(Map data) { // new IsoOperator().attachIsoToVm(self.getUuid(), isoUuid); final VmInstanceSpec spec = (VmInstanceSpec) data.get(VmInstanceConstant.Params.VmInstanceSpec.toString()); final VmInstanceSpec.IsoSpec isoSpec = spec.getDestIsoList().stream() .filter(s -> s.getImageUuid().equals(isoUuid)) .findAny() .get(); targetVmCdRomVO.setIsoUuid(isoUuid); targetVmCdRomVO.setIsoInstallPath(isoSpec.getInstallPath()); dbf.update(targetVmCdRomVO); new IsoOperator().syncVmIsoSystemTag(self.getUuid()); completion.success(); } }).error(new FlowErrorHandler(completion) { @Override public void handle(ErrorCode errCode, Map data) { completion.fail(errCode); } }).start(); } @Transactional(readOnly = true) private void checkIfIsoAttachable(String isoUuid) { String psUuid = getSelfInventory().getRootVolume().getPrimaryStorageUuid(); String sql = "select count(i)" + " from ImageCacheVO i" + " where i.primaryStorageUuid = :psUuid" + " and i.imageUuid = :isoUuid"; TypedQuery<Long> q = dbf.getEntityManager().createQuery(sql, Long.class); q.setParameter("psUuid", psUuid); q.setParameter("isoUuid", isoUuid); Long count = q.getSingleResult(); if (count > 0) { // on the same primary storage return; } PrimaryStorageVO psvo = dbf.getEntityManager().find(PrimaryStorageVO.class, psUuid); PrimaryStorageType type = PrimaryStorageType.valueOf(psvo.getType()); List<String> bsUuids = type.findBackupStorage(psUuid); if (bsUuids == null) { List<String> possibleBsTypes = hostAllocatorMgr.getBackupStorageTypesByPrimaryStorageTypeFromMetrics(psvo.getType()); sql = "select count(bs)" + " from BackupStorageVO bs, ImageBackupStorageRefVO ref" + " where bs.uuid = ref.backupStorageUuid" + " and ref.imageUuid = :imgUuid" + " and bs.type in (:bsTypes)"; q = dbf.getEntityManager().createQuery(sql, Long.class); q.setParameter("imgUuid", isoUuid); q.setParameter("bsTypes", possibleBsTypes); count = q.getSingleResult(); if (count > 0) { return; } } else if (!bsUuids.isEmpty()) { sql = "select count(bs)" + " from BackupStorageVO bs, ImageBackupStorageRefVO ref" + " where bs.uuid = ref.backupStorageUuid" + " and ref.imageUuid = :imgUuid" + " and bs.uuid in (:bsUuids)"; q = dbf.getEntityManager().createQuery(sql, Long.class); q.setParameter("imgUuid", isoUuid); q.setParameter("bsUuids", bsUuids); count = q.getSingleResult(); if (count > 0) { return; } } throw new OperationFailureException(operr("the ISO[uuid:%s] is on backup storage that is not compatible of the primary storage[uuid:%s]" + " where the VM[name:%s, uuid:%s] is on", isoUuid, psUuid, self.getName(), self.getUuid())); } private void handle(final APIDetachL3NetworkFromVmMsg msg) { VmNicVO vmNicVO = dbf.findByUuid(msg.getVmNicUuid(), VmNicVO.class); String vmNicAccountUuid = acntMgr.getOwnerAccountUuidOfResource(vmNicVO.getUuid()); thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { final APIDetachL3NetworkFromVmEvent evt = new APIDetachL3NetworkFromVmEvent(msg.getId()); refreshVO(); ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (allowed != null) { evt.setError(allowed); bus.publish(evt); chain.next(); return; } FlowChain fchain = FlowChainBuilder.newSimpleFlowChain(); fchain.setName(String.format("detach-l3-network-from-vm-%s", msg.getVmInstanceUuid())); fchain.then(new NoRollbackFlow() { String __name__ = "before-detach-nic"; @Override public void run(FlowTrigger trigger, Map data) { VmNicInventory nic = VmNicInventory.valueOf((VmNicVO) Q.New(VmNicVO.class).eq(VmNicVO_.uuid, msg.getVmNicUuid()).find()); beforeDetachNic(nic, new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }).then(new NoRollbackFlow() { String __name__ = "detach-nic"; @Override public void run(FlowTrigger trigger, Map data) { String releaseNicFlag = msg.getSystemTags() == null ? null : SystemTagUtils.findTagValue(msg.getSystemTags(), VmSystemTags.RELEASE_NIC_AFTER_DETACH_NIC, VmSystemTags.RELEASE_NIC_AFTER_DETACH_NIC_TOKEN); boolean releaseNic = releaseNicFlag == null ? true : Boolean.valueOf(releaseNicFlag); detachNic(msg.getVmNicUuid(), releaseNic, new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }).done(new FlowDoneHandler(msg) { @Override public void handle(Map data) { self = dbf.reload(self); evt.setInventory(VmInstanceInventory.valueOf(self)); bus.publish(evt); chain.next(); VmNicInventory vmNicInventory = VmNicInventory.valueOf(vmNicVO); VmNicCanonicalEvents.VmNicEventData vmNicEventData = new VmNicCanonicalEvents.VmNicEventData(); vmNicEventData.setCurrentStatus(VmInstanceState.Destroyed.toString()); vmNicEventData.setAccountUuid(vmNicAccountUuid); vmNicEventData.setInventory(vmNicInventory); evtf.fire(VmNicCanonicalEvents.VM_NIC_DELETED_PATH, vmNicEventData); } }).error(new FlowErrorHandler(msg) { @Override public void handle(ErrorCode errCode, Map data) { evt.setError(errCode); bus.publish(evt); chain.next(); } }).start(); } @Override public String getName() { return "detach-nic"; } }); } protected void beforeDetachNic(VmNicInventory nicInventory, Completion completion) { completion.success(); } // switch vm default nic if vm current default nic is input parm nic protected void selectDefaultL3(VmNicInventory nic) { if (self.getDefaultL3NetworkUuid() != null && !VmNicHelper.isDefaultNic(nic, VmInstanceInventory.valueOf(self))) { return; } final VmInstanceInventory vm = getSelfInventory(); final String previousDefaultL3 = vm.getDefaultL3NetworkUuid(); // the nic has been removed, reload self = dbf.reload(self); final VmNicVO candidate = CollectionUtils.find(self.getVmNics(), new Function<VmNicVO, VmNicVO>() { @Override public VmNicVO call(VmNicVO arg) { return arg.getUuid().equals(nic.getUuid()) ? null : arg; } }); if (candidate != null) { String newDefaultL3 = VmNicHelper.getPrimaryL3Uuid(VmNicInventory.valueOf(candidate)); CollectionUtils.safeForEach( pluginRgty.getExtensionList(VmDefaultL3NetworkChangedExtensionPoint.class), new ForEachFunction<VmDefaultL3NetworkChangedExtensionPoint>() { @Override public void run(VmDefaultL3NetworkChangedExtensionPoint ext) { ext.vmDefaultL3NetworkChanged(vm, previousDefaultL3, newDefaultL3); } }); self.setDefaultL3NetworkUuid(newDefaultL3); logger.debug(String.format( "after detaching the nic[uuid:%s, L3 uuid:%s], change the default L3 of the VM[uuid:%s]" + " to the L3 network[uuid: %s]", nic.getUuid(), VmNicHelper.getL3Uuids(nic), self.getUuid(), newDefaultL3)); } else { self.setDefaultL3NetworkUuid(null); logger.debug(String.format( "after detaching the nic[uuid:%s, L3 uuid:%s], change the default L3 of the VM[uuid:%s]" + " to null, as the VM has no other nics", nic.getUuid(), VmNicHelper.getL3Uuids(nic), self.getUuid())); } self = dbf.updateAndRefresh(self); } private void detachNic(final String nicUuid, boolean releaseNic, final Completion completion) { VmNicVO vmNicVO = CollectionUtils.find(self.getVmNics(), new Function<VmNicVO, VmNicVO>() { @Override public VmNicVO call(VmNicVO arg) { return arg.getUuid().equals(nicUuid) ? arg : null; } }); if (vmNicVO == null) { completion.success(); return; } final VmNicInventory nic = VmNicInventory.valueOf( CollectionUtils.find(self.getVmNics(), new Function<VmNicVO, VmNicVO>() { @Override public VmNicVO call(VmNicVO arg) { return arg.getUuid().equals(nicUuid) ? arg : null; } }) ); for (VmDetachNicExtensionPoint ext : pluginRgty.getExtensionList(VmDetachNicExtensionPoint.class)) { ext.preDetachNic(nic); } CollectionUtils.safeForEach(pluginRgty.getExtensionList(VmDetachNicExtensionPoint.class), new ForEachFunction<VmDetachNicExtensionPoint>() { @Override public void run(VmDetachNicExtensionPoint arg) { arg.beforeDetachNic(nic); } }); final VmInstanceSpec spec = buildSpecFromInventory(getSelfInventory(), VmOperation.DetachNic); spec.setVmInventory(VmInstanceInventory.valueOf(self)); spec.setDestNics(list(nic)); L3NetworkInventory l3Inv = L3NetworkInventory.valueOf(dbf.findByUuid(nic.getL3NetworkUuid(), L3NetworkVO.class)); spec.setL3Networks(list(new VmNicSpec(l3Inv))); FlowChain flowChain = FlowChainBuilder.newSimpleFlowChain(); flowChain.setName(String.format("detachNic-vm-%s-nic-%s", self.getUuid(), nicUuid)); setFlowMarshaller(flowChain); flowChain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); flowChain.getData().put(Params.ReleaseNicAfterDetachNic.toString(), releaseNic); if (self.getState() == VmInstanceState.Running) { flowChain.then(new VmDetachNicOnHypervisorFlow()); } flowChain.then(new VmReleaseResourceOnDetachingNicFlow()); flowChain.then(new VmDetachNicFlow()); flowChain.done(new FlowDoneHandler(completion) { @Override public void handle(Map data) { selectDefaultL3(nic); removeStaticIp(); CollectionUtils.safeForEach(pluginRgty.getExtensionList(VmDetachNicExtensionPoint.class), new ForEachFunction<VmDetachNicExtensionPoint>() { @Override public void run(VmDetachNicExtensionPoint arg) { arg.afterDetachNic(nic); } }); completion.success(); } private void removeStaticIp() { for (UsedIpInventory ip : nic.getUsedIps()) { new StaticIpOperator().deleteStaticIpByVmUuidAndL3Uuid(self.getUuid(), ip.getL3NetworkUuid()); } } }).error(new FlowErrorHandler(completion) { @Override public void handle(final ErrorCode errCode, Map data) { CollectionUtils.safeForEach(pluginRgty.getExtensionList(VmDetachNicExtensionPoint.class), new ForEachFunction<VmDetachNicExtensionPoint>() { @Override public void run(VmDetachNicExtensionPoint arg) { arg.failedToDetachNic(nic, errCode); } }); completion.fail(errCode); } }).start(); } private void handle(final APIChangeInstanceOfferingMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { APIChangeInstanceOfferingEvent evt = new APIChangeInstanceOfferingEvent(msg.getId()); refreshVO(); ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (allowed != null) { evt.setError(allowed); bus.publish(evt); chain.next(); return; } changeOffering(msg, new Completion(msg, chain) { @Override public void success() { refreshVO(); evt.setInventory(getSelfInventory()); bus.publish(evt); chain.next(); } @Override public void fail(ErrorCode errorCode) { evt.setError(errorCode); bus.publish(evt); chain.next(); } }); } @Override public String getName() { return "change-instance-offering"; } }); } private void changeOffering(APIChangeInstanceOfferingMsg msg, final Completion completion) { final InstanceOfferingVO newOfferingVO = dbf.findByUuid(msg.getInstanceOfferingUuid(), InstanceOfferingVO.class); final InstanceOfferingInventory inv = InstanceOfferingInventory.valueOf(newOfferingVO); final VmInstanceInventory vm = getSelfInventory(); List<ChangeInstanceOfferingExtensionPoint> exts = pluginRgty.getExtensionList(ChangeInstanceOfferingExtensionPoint.class); exts.forEach(ext -> ext.preChangeInstanceOffering(vm, inv)); CollectionUtils.safeForEach(exts, ext -> ext.beforeChangeInstanceOffering(vm, inv)); changeCpuAndMemory(inv.getCpuNum(), inv.getMemorySize(), new Completion(completion) { @Override public void success() { self.setAllocatorStrategy(inv.getAllocatorStrategy()); self.setInstanceOfferingUuid(msg.getInstanceOfferingUuid()); self = dbf.updateAndRefresh(self); CollectionUtils.safeForEach(exts, ext -> ext.afterChangeInstanceOffering(vm, inv)); completion.success(); } @Override public void fail(ErrorCode errorCode) { completion.fail(errorCode); } }); } private void changeCpuAndMemory(final int cpuNum, final long memorySize, final Completion completion) { if (self.getState() == VmInstanceState.Stopped) { self.setCpuNum(cpuNum); self.setMemorySize(memorySize); self = dbf.updateAndRefresh(self); completion.success(); return; } final int oldCpuNum = self.getCpuNum(); final long oldMemorySize = self.getMemorySize(); class AlignmentStruct { long alignedMemory; } final AlignmentStruct struct = new AlignmentStruct(); struct.alignedMemory = memorySize; FlowChain chain = FlowChainBuilder.newSimpleFlowChain(); chain.setName(String.format("change-cpu-and-memory-of-vm-%s", self.getUuid())); chain.then(new NoRollbackFlow() { String __name__ = "align-memory"; @Override public void run(FlowTrigger chain, Map data) { // align memory long increaseMemory = memorySize - oldMemorySize; long remainderMemory = increaseMemory % SizeUnit.MEGABYTE.toByte(128); if (increaseMemory != 0 && remainderMemory != 0) { if (remainderMemory < SizeUnit.MEGABYTE.toByte(128) / 2) { increaseMemory = increaseMemory / SizeUnit.MEGABYTE.toByte(128) * SizeUnit.MEGABYTE.toByte(128); } else { increaseMemory = (increaseMemory / SizeUnit.MEGABYTE.toByte(128) + 1) * SizeUnit.MEGABYTE.toByte(128); } if (increaseMemory == 0) { struct.alignedMemory = oldMemorySize + SizeUnit.MEGABYTE.toByte(128); } else { struct.alignedMemory = oldMemorySize + increaseMemory; } logger.debug(String.format("automatically align memory from %s to %s", memorySize, struct.alignedMemory)); } chain.next(); } }).then(new Flow() { String __name__ = String.format("allocate-host-capacity-on-host-%s", self.getHostUuid()); boolean result = false; @Override public void run(FlowTrigger chain, Map data) { DesignatedAllocateHostMsg msg = new DesignatedAllocateHostMsg(); msg.setCpuCapacity(cpuNum - oldCpuNum); msg.setMemoryCapacity(struct.alignedMemory - oldMemorySize); msg.setAllocatorStrategy(HostAllocatorConstant.DESIGNATED_HOST_ALLOCATOR_STRATEGY_TYPE); msg.setVmInstance(VmInstanceInventory.valueOf(self)); if (self.getImageUuid() != null && dbf.findByUuid(self.getImageUuid(), ImageVO.class) != null) { msg.setImage(ImageInventory.valueOf(dbf.findByUuid(self.getImageUuid(), ImageVO.class))); } msg.setHostUuid(self.getHostUuid()); msg.setFullAllocate(false); msg.setL3NetworkUuids(VmNicHelper.getL3Uuids(VmNicInventory.valueOf(self.getVmNics()))); msg.setServiceId(bus.makeLocalServiceId(HostAllocatorConstant.SERVICE_ID)); bus.send(msg, new CloudBusCallBack(chain) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { ErrorCode err = operr("host[uuid:%s] capacity is not enough to offer cpu[%s], memory[%s bytes]", self.getHostUuid(), cpuNum - oldCpuNum, struct.alignedMemory - oldMemorySize); err.setCause(reply.getError()); chain.fail(err); } else { result = true; logger.debug(String.format("reserve memory %s bytes and cpu %s on host[uuid:%s]", memorySize - self.getMemorySize(), cpuNum - self.getCpuNum(), self.getHostUuid())); chain.next(); } } }); } @Override public void rollback(FlowRollback chain, Map data) { if (result) { ReturnHostCapacityMsg msg = new ReturnHostCapacityMsg(); msg.setCpuCapacity(cpuNum - oldCpuNum); msg.setMemoryCapacity(struct.alignedMemory - oldMemorySize); msg.setHostUuid(self.getHostUuid()); msg.setServiceId(bus.makeLocalServiceId(HostAllocatorConstant.SERVICE_ID)); bus.send(msg); } chain.rollback(); } }).then(new NoRollbackFlow() { String __name__ = String.format("change-cpu-of-vm-%s", self.getUuid()); @Override public void run(FlowTrigger chain, Map data) { if (cpuNum != self.getCpuNum()) { IncreaseVmCpuMsg msg = new IncreaseVmCpuMsg(); msg.setVmInstanceUuid(self.getUuid()); msg.setHostUuid(self.getHostUuid()); msg.setCpuNum(cpuNum); bus.makeTargetServiceIdByResourceUuid(msg, HostConstant.SERVICE_ID, self.getHostUuid()); bus.send(msg, new CloudBusCallBack(chain) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { logger.error("failed to update cpu"); chain.fail(reply.getError()); } else { IncreaseVmCpuReply r = reply.castReply(); self.setCpuNum(r.getCpuNum()); chain.next(); } } }); } else { chain.next(); } } }).then(new NoRollbackFlow() { String __name__ = String.format("change-memory-of-vm-%s", self.getUuid()); @Override public void run(FlowTrigger chain, Map data) { if (struct.alignedMemory != self.getMemorySize()) { IncreaseVmMemoryMsg msg = new IncreaseVmMemoryMsg(); msg.setVmInstanceUuid(self.getUuid()); msg.setHostUuid(self.getHostUuid()); msg.setMemorySize(struct.alignedMemory); bus.makeTargetServiceIdByResourceUuid(msg, HostConstant.SERVICE_ID, self.getHostUuid()); bus.send(msg, new CloudBusCallBack(chain) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { logger.error("failed to update memory"); chain.fail(reply.getError()); } else { IncreaseVmMemoryReply r = reply.castReply(); self.setMemorySize(r.getMemorySize()); chain.next(); } } }); } else { chain.next(); } } }).done(new FlowDoneHandler(completion) { @Override public void handle(Map data) { dbf.update(self); completion.success(); } }).error(new FlowErrorHandler(completion) { @Override public void handle(ErrorCode errCode, Map data) { completion.fail(errCode); } }).start(); } private void handle(final APIUpdateVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { APIUpdateVmInstanceEvent evt = new APIUpdateVmInstanceEvent(msg.getId()); refreshVO(); List<Runnable> extensions = new ArrayList<Runnable>(); final VmInstanceInventory vm = getSelfInventory(); boolean update = false; if (msg.getName() != null) { self.setName(msg.getName()); update = true; } if (msg.getDescription() != null) { self.setDescription(msg.getDescription()); update = true; } if (msg.getState() != null) { self.setState(VmInstanceState.valueOf(msg.getState())); update = true; if (!vm.getState().equals(msg.getState())) { extensions.add(new Runnable() { @Override public void run() { logger.debug(String.format("vm[uuid:%s] changed state from %s to %s", self.getUuid(), vm.getState(), msg.getState())); VmCanonicalEvents.VmStateChangedData data = new VmCanonicalEvents.VmStateChangedData(); data.setVmUuid(self.getUuid()); data.setOldState(vm.getState()); data.setNewState(msg.getState()); data.setInventory(getSelfInventory()); evtf.fire(VmCanonicalEvents.VM_FULL_STATE_CHANGED_PATH, data); } }); } } if (msg.getDefaultL3NetworkUuid() != null) { self.setDefaultL3NetworkUuid(msg.getDefaultL3NetworkUuid()); update = true; if (!msg.getDefaultL3NetworkUuid().equals(vm.getDefaultL3NetworkUuid())) { extensions.add(new Runnable() { @Override public void run() { for (VmDefaultL3NetworkChangedExtensionPoint ext : pluginRgty.getExtensionList(VmDefaultL3NetworkChangedExtensionPoint.class)) { ext.vmDefaultL3NetworkChanged(vm, vm.getDefaultL3NetworkUuid(), msg.getDefaultL3NetworkUuid()); } } }); } } if (msg.getPlatform() != null) { self.setPlatform(msg.getPlatform()); update = true; } if (update) { dbf.update(self); } updateVmIsoFirstOrder(msg.getSystemTags()); CollectionUtils.safeForEach(extensions, Runnable::run); if (msg.getCpuNum() != null || msg.getMemorySize() != null) { int cpuNum = msg.getCpuNum() == null ? self.getCpuNum() : msg.getCpuNum(); long memory = msg.getMemorySize() == null ? self.getMemorySize() : msg.getMemorySize(); changeCpuAndMemory(cpuNum, memory, new Completion(msg, chain) { @Override public void success() { refreshVO(); evt.setInventory(getSelfInventory()); bus.publish(evt); chain.next(); } @Override public void fail(ErrorCode errorCode) { evt.setError(errorCode); bus.publish(evt); chain.next(); } }); } else { evt.setInventory(getSelfInventory()); bus.publish(evt); chain.next(); } } @Override public String getName() { return "update-vm-info"; } }); } // Specify an iso as the first one, restart vm effective private void updateVmIsoFirstOrder(List<String> systemTags){ if(systemTags == null || systemTags.isEmpty()){ return; } String isoUuid = SystemTagUtils.findTagValue(systemTags, VmSystemTags.ISO, VmSystemTags.ISO_TOKEN); if (isoUuid == null){ return; } String vmUuid = self.getUuid(); List<String> isoList = IsoOperator.getIsoUuidByVmUuid(vmUuid); if (!isoList.contains(isoUuid)) { throw new OperationFailureException(operr("ISO[uuid:%s] is not attached to VM[uuid:%s]", isoUuid , self.getUuid())); } List<VmCdRomVO> cdRomVOS = Q.New(VmCdRomVO.class) .eq(VmCdRomVO_.vmInstanceUuid, self.getUuid()) .orderBy(VmCdRomVO_.deviceId, SimpleQuery.Od.ASC) .list(); if (cdRomVOS.size() <= 1) { return; } if (isoUuid.equals(cdRomVOS.get(0).getIsoUuid())) { return; } Optional<VmCdRomVO> opt = cdRomVOS.stream().filter(v -> v.getIsoUuid().equals(isoUuid)).findAny(); if (!opt.isPresent()) { return; } VmCdRomVO sourceCdRomVO = opt.get(); VmCdRomVO targetCdRomVO = cdRomVOS.get(0); String targetCdRomIsoUuid = targetCdRomVO.getIsoUuid(); String path = targetCdRomVO.getIsoInstallPath(); targetCdRomVO.setIsoUuid(sourceCdRomVO.getIsoUuid()); targetCdRomVO.setIsoInstallPath(sourceCdRomVO.getIsoInstallPath()); sourceCdRomVO.setIsoUuid(targetCdRomIsoUuid); sourceCdRomVO.setIsoInstallPath(path); new SQLBatch() { @Override protected void scripts() { merge(targetCdRomVO); merge(sourceCdRomVO); } }.execute(); } @Transactional(readOnly = true) private List<VolumeVO> getAttachableVolume(String accountUuid) { if (!self.getState().equals(VmInstanceState.Stopped) && self.getPlatform().equals(ImagePlatform.Other.toString())) { return Collections.emptyList(); } List<String> volUuids = acntMgr.getResourceUuidsCanAccessByAccount(accountUuid, VolumeVO.class); if (volUuids != null && volUuids.isEmpty()) { return Collections.emptyList(); } List<String> formats = VolumeFormat.getVolumeFormatSupportedByHypervisorTypeInString(self.getHypervisorType()); if (formats.isEmpty()) { throw new CloudRuntimeException(String.format("cannot find volume formats for the hypervisor type[%s]", self.getHypervisorType())); } String sql; List<VolumeVO> vos; /* * Cluster1: [PS1, PS2, PS3] * Cluster2: [PS1, PS2] * Cluster3: [PS1, PS2, PS3] * * Assume a stopped vm which has no clusterUuid and root volume on PS1 * then it can attach all suitable data volumes from [PS1, PS2] * because PS1 is attached to [Cluster1, Cluster2, Cluster3] * and they all have [PS1, PS2] attached */ List<String> psUuids = null; if (self.getClusterUuid() == null) { // 1. get clusterUuids of VM->RV->PS sql = "select cls.uuid from" + " ClusterVO cls, VolumeVO vol, VmInstanceVO vm, PrimaryStorageClusterRefVO ref" + " where vm.uuid = :vmUuid" + " and vol.uuid = vm.rootVolumeUuid" + " and ref.primaryStorageUuid = vol.primaryStorageUuid" + " and cls.uuid = ref.clusterUuid" + " and cls.state = :clsState" + " group by cls.uuid"; List<String> clusterUuids = SQL.New(sql) .param("vmUuid", self.getUuid()) .param("clsState", ClusterState.Enabled) .list(); // 2. get all PS that attachs to clusterUuids sql = "select ps.uuid from PrimaryStorageVO ps" + " inner join PrimaryStorageClusterRefVO ref on ref.primaryStorageUuid = ps.uuid" + " inner join ClusterVO cls on cls.uuid = ref.clusterUuid" + " where cls.uuid in (:clusterUuids)" + " and ps.state = :psState" + " and ps.status = :psStatus" + " group by ps.uuid" + " having count(distinct cls.uuid) = :clsCount"; psUuids = SQL.New(sql) .param("clusterUuids", clusterUuids) .param("psState", PrimaryStorageState.Enabled) .param("psStatus", PrimaryStorageStatus.Connected) .param("clsCount", (long)clusterUuids.size()) .list(); } if (volUuids == null) { // accessed by a system admin // if vm.clusterUuid is not null sql = "select vol" + " from VolumeVO vol, VmInstanceVO vm, PrimaryStorageClusterRefVO ref" + " where vol.type = :type" + " and vol.state = :volState" + " and vol.status = :volStatus" + " and vol.format in (:formats)" + " and vol.vmInstanceUuid is null" + " and vm.clusterUuid = ref.clusterUuid" + " and ref.primaryStorageUuid = vol.primaryStorageUuid" + " and vm.uuid = :vmUuid" + " group by vol.uuid"; TypedQuery<VolumeVO> q = dbf.getEntityManager().createQuery(sql, VolumeVO.class); q.setParameter("volState", VolumeState.Enabled); q.setParameter("volStatus", VolumeStatus.Ready); q.setParameter("formats", formats); q.setParameter("vmUuid", self.getUuid()); q.setParameter("type", VolumeType.Data); vos = q.getResultList(); // if vm.clusterUuid is null if (self.getClusterUuid() == null) { // 3. get data volume candidates from psUuids sql = "select vol from VolumeVO vol" + " where vol.primaryStorageUuid in (:psUuids)" + " and vol.type = :volType" + " and vol.state = :volState" + " and vol.status = :volStatus" + " and vol.format in (:formats)" + " and vol.vmInstanceUuid is null" + " group by vol.uuid"; List<VolumeVO> dvs = SQL.New(sql) .param("psUuids", psUuids) .param("volType", VolumeType.Data) .param("volState", VolumeState.Enabled) .param("volStatus", VolumeStatus.Ready) .param("formats", formats) .list(); vos.addAll(dvs); } // for NotInstantiated data volumes sql = "select vol" + " from VolumeVO vol" + " where vol.type = :type" + " and vol.status = :volStatus" + " and vol.state = :volState" + " group by vol.uuid"; q = dbf.getEntityManager().createQuery(sql, VolumeVO.class); q.setParameter("type", VolumeType.Data); q.setParameter("volState", VolumeState.Enabled); q.setParameter("volStatus", VolumeStatus.NotInstantiated); vos.addAll(q.getResultList()); } else { // accessed by a normal account // if vm.clusterUuid is not null sql = "select vol" + " from VolumeVO vol, VmInstanceVO vm, PrimaryStorageClusterRefVO ref" + " where vol.type = :type" + " and vol.state = :volState" + " and vol.status = :volStatus" + " and vol.format in (:formats)" + " and vol.vmInstanceUuid is null" + " and vm.clusterUuid = ref.clusterUuid" + " and ref.primaryStorageUuid = vol.primaryStorageUuid" + " and vol.uuid in (:volUuids)" + " and vm.uuid = :vmUuid" + " group by vol.uuid"; TypedQuery<VolumeVO> q = dbf.getEntityManager().createQuery(sql, VolumeVO.class); q.setParameter("volState", VolumeState.Enabled); q.setParameter("volStatus", VolumeStatus.Ready); q.setParameter("vmUuid", self.getUuid()); q.setParameter("formats", formats); q.setParameter("type", VolumeType.Data); q.setParameter("volUuids", volUuids); vos = q.getResultList(); // if vm.clusterUuid is null if (self.getClusterUuid() == null) { // 3. get data volume candidates from psUuids sql = "select vol from VolumeVO vol" + " where vol.primaryStorageUuid in (:psUuids)" + " and vol.type = :volType" + " and vol.state = :volState" + " and vol.status = :volStatus" + " and vol.format in (:formats)" + " and vol.vmInstanceUuid is null" + " and vol.uuid in (:volUuids)" + " group by vol.uuid"; List<VolumeVO> dvs = SQL.New(sql) .param("psUuids", psUuids) .param("volType", VolumeType.Data) .param("volState", VolumeState.Enabled) .param("volStatus", VolumeStatus.Ready) .param("formats", formats) .param("volUuids", volUuids) .list(); vos.addAll(dvs); } // for NotInstantiated data volumes sql = "select vol" + " from VolumeVO vol" + " where vol.type = :type" + " and vol.status = :volStatus" + " and vol.state = :volState" + " and vol.uuid in (:volUuids)" + " group by vol.uuid"; q = dbf.getEntityManager().createQuery(sql, VolumeVO.class); q.setParameter("type", VolumeType.Data); q.setParameter("volState", VolumeState.Enabled); q.setParameter("volUuids", volUuids); q.setParameter("volStatus", VolumeStatus.NotInstantiated); vos.addAll(q.getResultList()); } for (GetAttachableVolumeExtensionPoint ext : pluginRgty.getExtensionList(GetAttachableVolumeExtensionPoint.class)) { if (!vos.isEmpty()) { vos = ext.returnAttachableVolumes(getSelfInventory(), vos); } } return vos; } private void handle(APIGetVmAttachableDataVolumeMsg msg) { APIGetVmAttachableDataVolumeReply reply = new APIGetVmAttachableDataVolumeReply(); reply.setInventories(VolumeInventory.valueOf(getAttachableVolume(msg.getSession().getAccountUuid()))); bus.reply(msg, reply); } private void handle(final APIGetVmMigrationCandidateHostsMsg msg) { final APIGetVmMigrationCandidateHostsReply reply = new APIGetVmMigrationCandidateHostsReply(); getVmMigrationTargetHost(msg, new ReturnValueCompletion<List<HostInventory>>(msg) { @Override public void success(List<HostInventory> returnValue) { reply.setInventories(returnValue); bus.reply(msg, reply); } @Override public void fail(ErrorCode errorCode) { reply.setError(errorCode); bus.reply(msg, reply); } }); } private void handle(final APIAttachL3NetworkToVmMsg msg) { final APIAttachL3NetworkToVmEvent evt = new APIAttachL3NetworkToVmEvent(msg.getId()); final String vmNicInvKey = "vmNicInventory"; FlowChain chain = FlowChainBuilder.newSimpleFlowChain(); chain.setName(String.format("attach-l3-network-to-vm-%s", msg.getVmInstanceUuid())); chain.then(new NoRollbackFlow() { String __name__ = "attach-nic"; @Override public void run(FlowTrigger trigger, Map data) { List<String> l3Uuids = new ArrayList<>(); /* put primary L3 at first */ l3Uuids.add(msg.getL3NetworkUuid()); l3Uuids.addAll(msg.getSecondaryL3Uuids()); attachNic(msg, l3Uuids, new ReturnValueCompletion<VmNicInventory>(msg) { @Override public void success(VmNicInventory returnValue) { data.put(vmNicInvKey, returnValue); trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }).then(new NoRollbackFlow() { String __name__ = "after-attach-nic"; @Override public void run(FlowTrigger trigger, Map data) { afterAttachNic((VmNicInventory) data.get(vmNicInvKey), new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }).done(new FlowDoneHandler(msg) { @Override public void handle(Map data) { self = dbf.reload(self); evt.setInventory(VmInstanceInventory.valueOf(self)); bus.publish(evt); VmNicInventory vmNicInventory = (VmNicInventory) data.get(vmNicInvKey); VmNicCanonicalEvents.VmNicEventData vmNicEventData = new VmNicCanonicalEvents.VmNicEventData(); vmNicEventData.setCurrentStatus(self.getState().toString()); String vmNicAccountUuid = acntMgr.getOwnerAccountUuidOfResource(vmNicInventory.getUuid()); vmNicEventData.setAccountUuid(vmNicAccountUuid); vmNicEventData.setInventory(vmNicInventory); evtf.fire(VmNicCanonicalEvents.VM_NIC_CREATED_PATH, vmNicEventData); } }).error(new FlowErrorHandler(msg) { @Override public void handle(ErrorCode errCode, Map data) { evt.setError(errCode); bus.publish(evt); } }).start(); } private void handle(final APIAttachVmNicToVmMsg msg) { final APIAttachVmNicToVmEvent evt = new APIAttachVmNicToVmEvent(msg.getId()); final String vmNicInvKey = "vmNicInventory"; FlowChain chain = FlowChainBuilder.newSimpleFlowChain(); chain.setName(String.format("attach-nic-to-vm-%s", msg.getVmInstanceUuid())); chain.then(new NoRollbackFlow() { String __name__ = "attach-nic"; @Override public void run(FlowTrigger trigger, Map data) { attachNic(msg, new ReturnValueCompletion<VmNicInventory>(msg) { @Override public void success(VmNicInventory returnValue) { data.put(vmNicInvKey, returnValue); trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }).then(new NoRollbackFlow() { String __name__ = "after-attach-nic"; @Override public void run(FlowTrigger trigger, Map data) { afterAttachNic((VmNicInventory) data.get(vmNicInvKey), new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }).done(new FlowDoneHandler(msg) { @Override public void handle(Map data) { self = dbf.reload(self); evt.setInventory(VmInstanceInventory.valueOf(self)); bus.publish(evt); } }).error(new FlowErrorHandler(msg) { @Override public void handle(ErrorCode errCode, Map data) { evt.setError(errCode); bus.publish(evt); } }).start(); } protected void afterAttachNic(VmNicInventory nicInventory, Completion completion) { completion.success(); } private void detachVolume(final DetachDataVolumeFromVmMsg msg, final NoErrorCompletion completion) { final DetachDataVolumeFromVmReply reply = new DetachDataVolumeFromVmReply(); refreshVO(true); if (self == null || VmInstanceState.Destroyed == self.getState()) { // the vm is destroyed, the data volume must have been detached bus.reply(msg, reply); completion.done(); return; } ErrorCode allowed = validateOperationByState(msg, self.getState(), VmErrors.DETACH_VOLUME_ERROR); if (allowed != null) { throw new OperationFailureException(allowed); } final VolumeInventory volume = msg.getVolume(); VolumeVO vvo = dbf.findByUuid(volume.getUuid(), VolumeVO.class); // the volume is already detached, skip the bellow actions, except sharable if (vvo.getVmInstanceUuid() == null && !vvo.isShareable()) { extEmitter.afterDetachVolume(getSelfInventory(), volume, new Completion(completion) { @Override public void success() { bus.reply(msg, reply); completion.done(); } @Override public void fail(ErrorCode errorCode) { reply.setError(errorCode); bus.reply(msg, reply); completion.done(); } }); return; } extEmitter.preDetachVolume(getSelfInventory(), volume); extEmitter.beforeDetachVolume(getSelfInventory(), volume); if (self.getState() == VmInstanceState.Stopped) { extEmitter.afterDetachVolume(getSelfInventory(), volume, new Completion(completion) { @Override public void success() { bus.reply(msg, reply); completion.done(); } @Override public void fail(ErrorCode errorCode) { reply.setError(errorCode); bus.reply(msg, reply); completion.done(); } }); return; } // VmInstanceState.Running String hostUuid = self.getHostUuid(); DetachVolumeFromVmOnHypervisorMsg dmsg = new DetachVolumeFromVmOnHypervisorMsg(); dmsg.setVmInventory(VmInstanceInventory.valueOf(self)); dmsg.setInventory(volume); dmsg.setHostUuid(hostUuid); bus.makeTargetServiceIdByResourceUuid(dmsg, HostConstant.SERVICE_ID, hostUuid); bus.send(dmsg, new CloudBusCallBack(msg, completion) { @Override public void run(final MessageReply r) { if (!r.isSuccess()) { reply.setError(r.getError()); extEmitter.failedToDetachVolume(getSelfInventory(), volume, r.getError()); bus.reply(msg, reply); completion.done(); } else { extEmitter.afterDetachVolume(getSelfInventory(), volume, new Completion(completion) { @Override public void success() { // update Volumevo before exit message queue vvo.setVmInstanceUuid(null); dbf.updateAndRefresh(vvo); bus.reply(msg, reply); completion.done(); } @Override public void fail(ErrorCode errorCode) { reply.setError(errorCode); bus.reply(msg, reply); completion.done(); } }); } } }); } protected void attachDataVolume(final AttachDataVolumeToVmMsg msg, final NoErrorCompletion completion) { final AttachDataVolumeToVmReply reply = new AttachDataVolumeToVmReply(); refreshVO(); ErrorCode err = validateOperationByState(msg, self.getState(), VmErrors.ATTACH_VOLUME_ERROR); if (err != null) { throw new OperationFailureException(err); } Map data = new HashMap(); final VolumeInventory volume = msg.getVolume(); new VmAttachVolumeValidator().validate(msg.getVmInstanceUuid(), volume.getUuid()); extEmitter.preAttachVolume(getSelfInventory(), volume); extEmitter.beforeAttachVolume(getSelfInventory(), volume, data); VmInstanceSpec spec = new VmInstanceSpec(); spec.setMessage(msg); spec.setVmInventory(VmInstanceInventory.valueOf(self)); spec.setCurrentVmOperation(VmOperation.AttachVolume); spec.setDestDataVolumes(list(volume)); FlowChain chain; if (volume.getStatus().equals(VolumeStatus.Ready.toString())) { chain = FlowChainBuilder.newSimpleFlowChain(); chain.then(new VmAssignDeviceIdToAttachingVolumeFlow()); chain.then(new VmAttachVolumeOnHypervisorFlow()); } else { chain = getAttachUninstantiatedVolumeWorkFlowChain(spec.getVmInventory()); } setFlowMarshaller(chain); List<VolumeInventory> attachedVolumes = getAllDataVolumes(getSelfInventory()); attachedVolumes.removeIf(it -> it.getDeviceId() == null || it.getUuid().equals(volume.getUuid())); chain.setName(String.format("vm-%s-attach-volume-%s", self.getUuid(), volume.getUuid())); chain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); chain.getData().put(VmInstanceConstant.Params.AttachingVolumeInventory.toString(), volume); chain.getData().put(Params.AttachedDataVolumeInventories.toString(), attachedVolumes); chain.done(new FlowDoneHandler(msg, completion) { @Override public void handle(Map data) { extEmitter.afterAttachVolume(getSelfInventory(), volume); reply.setHypervisorType(self.getHypervisorType()); bus.reply(msg, reply); completion.done(); } }).error(new FlowErrorHandler(msg, completion) { @Override public void handle(final ErrorCode errCode, Map data) { extEmitter.failedToAttachVolume(getSelfInventory(), volume, errCode, data); reply.setError(err(VmErrors.ATTACH_VOLUME_ERROR, errCode, errCode.getDetails())); bus.reply(msg, reply); completion.done(); } }).start(); } protected void migrateVm(final Message msg, final Completion completion) { refreshVO(); ErrorCode allowed = validateOperationByState(msg, self.getState(), VmErrors.MIGRATE_ERROR); if (allowed != null) { completion.fail(allowed); return; } VmInstanceInventory pinv = getSelfInventory(); for (VmPreMigrationExtensionPoint ext : pluginRgty.getExtensionList(VmPreMigrationExtensionPoint.class)) { ext.preVmMigration(pinv); } VmInstanceInventory inv = VmInstanceInventory.valueOf(self); final VmInstanceSpec spec = buildSpecFromInventory(inv, VmOperation.Migrate); final VmInstanceState originState = self.getState(); changeVmStateInDb(VmInstanceStateEvent.migrating); spec.setMessage(msg); FlowChain chain = getMigrateVmWorkFlowChain(inv); setFlowMarshaller(chain); String lastHostUuid = self.getHostUuid(); chain.setName(String.format("migrate-vm-%s", self.getUuid())); chain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); chain.done(new FlowDoneHandler(completion) { @Override public void handle(final Map data) { VmInstanceSpec spec = (VmInstanceSpec) data.get(VmInstanceConstant.Params.VmInstanceSpec.toString()); HostInventory host = spec.getDestHost(); self = changeVmStateInDb(VmInstanceStateEvent.running, ()-> { self.setZoneUuid(host.getZoneUuid()); self.setClusterUuid(host.getClusterUuid()); self.setLastHostUuid(lastHostUuid); self.setHostUuid(host.getUuid()); }); VmInstanceInventory vm = VmInstanceInventory.valueOf(self); extEmitter.afterMigrateVm(vm, vm.getLastHostUuid()); completion.success(); } }).error(new FlowErrorHandler(completion) { @Override public void handle(final ErrorCode errCode, Map data) { extEmitter.failedToMigrateVm(VmInstanceInventory.valueOf(self), spec.getDestHost().getUuid(), errCode); if (HostErrors.FAILED_TO_MIGRATE_VM_ON_HYPERVISOR.isEqual(errCode.getCode())) { checkState(originalCopy.getHostUuid(), new NoErrorCompletion(completion) { @Override public void done() { completion.fail(errCode); } }); } else { self.setState(originState); self = dbf.updateAndRefresh(self); completion.fail(errCode); } } }).start(); } protected void handle(final APIMigrateVmMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getName() { return String.format("migrate-vm-%s", self.getUuid()); } @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { migrateVm(msg, new Completion(chain) { @Override public void success() { APIMigrateVmEvent evt = new APIMigrateVmEvent(msg.getId()); evt.setInventory(VmInstanceInventory.valueOf(self)); bus.publish(evt); chain.next(); } @Override public void fail(ErrorCode errorCode) { APIMigrateVmEvent evt = new APIMigrateVmEvent(msg.getId()); evt.setError(errorCode); bus.publish(evt); chain.next(); } }); } }); } protected void startVm(final Message msg, final Completion completion) { refreshVO(); ErrorCode allowed = validateOperationByState(msg, self.getState(), null); if (allowed != null) { completion.fail(allowed); return; } if (self.getState() == VmInstanceState.Created) { InstantiateVmFromNewCreatedStruct struct = new JsonLabel().get( InstantiateVmFromNewCreatedStruct.makeLabelKey(self.getUuid()), InstantiateVmFromNewCreatedStruct.class); struct.setStrategy(VmCreationStrategy.InstantStart); instantiateVmFromNewCreate(struct, completion); return; } VmInstanceInventory inv = VmInstanceInventory.valueOf(self); ErrorCode preStart = extEmitter.preStartVm(inv); if (preStart != null) { completion.fail(preStart); return; } final VmInstanceSpec spec = buildSpecFromInventory(inv, VmOperation.Start); spec.setMessage(msg); if (msg instanceof APIStartVmInstanceMsg) { APIStartVmInstanceMsg amsg = (APIStartVmInstanceMsg) msg; spec.setRequiredClusterUuid(amsg.getClusterUuid()); spec.setRequiredHostUuid(amsg.getHostUuid()); spec.setUsbRedirect(VmSystemTags.USB_REDIRECT.getTokenByResourceUuid(self.getUuid(), VmSystemTags.USB_REDIRECT_TOKEN)); spec.setEnableRDP(VmSystemTags.RDP_ENABLE.getTokenByResourceUuid(self.getUuid(), VmSystemTags.RDP_ENABLE_TOKEN)); spec.setVDIMonitorNumber(VmSystemTags.VDI_MONITOR_NUMBER.getTokenByResourceUuid(self.getUuid(), VmSystemTags.VDI_MONITOR_NUMBER_TOKEN)); } if (msg instanceof HaStartVmInstanceMsg) { spec.setSoftAvoidHostUuids(((HaStartVmInstanceMsg) msg).getSoftAvoidHostUuids()); } else if (msg instanceof StartVmInstanceMsg) { spec.setSoftAvoidHostUuids(((StartVmInstanceMsg) msg).getSoftAvoidHostUuids()); } if (spec.getDestNics().isEmpty()) { throw new OperationFailureException(operr("unable to start the vm[uuid:%s]." + " It doesn't have any nic, please attach a nic and try again", self.getUuid())); } final VmInstanceState originState = self.getState(); changeVmStateInDb(VmInstanceStateEvent.starting); logger.debug("we keep vm state on 'Starting' until startVm over or restart mn."); extEmitter.beforeStartVm(VmInstanceInventory.valueOf(self)); FlowChain chain = getStartVmWorkFlowChain(inv); setFlowMarshaller(chain); String recentHostUuid = self.getHostUuid() == null ? self.getLastHostUuid() : self.getHostUuid(); String vmHostUuid = self.getHostUuid(); String vmLastHostUuid = self.getLastHostUuid(); chain.setName(String.format("start-vm-%s", self.getUuid())); chain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); chain.done(new FlowDoneHandler(completion) { @Override public void handle(final Map data) { VmInstanceSpec spec = (VmInstanceSpec) data.get(VmInstanceConstant.Params.VmInstanceSpec.toString()); self = changeVmStateInDb(VmInstanceStateEvent.running, ()-> new SQLBatch() { @Override protected void scripts() { // reload self because some nics may have been deleted in start phase because a former L3Network deletion. // reload to avoid JPA EntityNotFoundException self = findByUuid(self.getUuid(), VmInstanceVO.class); if (q(HostVO.class).eq(HostVO_.uuid, recentHostUuid).isExists()) { self.setLastHostUuid(recentHostUuid); } else { self.setLastHostUuid(null); } self.setHostUuid(spec.getDestHost().getUuid()); self.setClusterUuid(spec.getDestHost().getClusterUuid()); self.setZoneUuid(spec.getDestHost().getZoneUuid()); } }.execute()); logger.debug(String.format("vm[uuid:%s] is running ..", self.getUuid())); VmInstanceInventory inv = VmInstanceInventory.valueOf(self); extEmitter.afterStartVm(inv); completion.success(); } }).error(new FlowErrorHandler(completion) { @Override public void handle(final ErrorCode errCode, Map data) { // reload self because some nics may have been deleted in start phase because a former L3Network deletion. // reload to avoid JPA EntityNotFoundException self = dbf.reload(self); extEmitter.failedToStartVm(VmInstanceInventory.valueOf(self), errCode); VmInstanceSpec spec = (VmInstanceSpec) data.get(Params.VmInstanceSpec.toString()); // update vm state to origin state before checking state // avoid sending redundant vm state change event // refer to: ZSTAC-18174 new SQLBatch() { @Override protected void scripts() { self.setState(originState); self.setHostUuid(vmHostUuid); self.setLastHostUuid(q(HostVO.class).eq(HostVO_.uuid, vmLastHostUuid).isExists() ? vmLastHostUuid : null); self = merge(self); } }.execute(); if (HostErrors.FAILED_TO_START_VM_ON_HYPERVISOR.isEqual(errCode.getCode())) { checkState(spec.getDestHost().getUuid(), new NoErrorCompletion(completion) { @Override public void done() { completion.fail(errCode); } }); return; } completion.fail(errCode); } }).start(); } private VmInstanceSpec buildVmInstanceSpecFromStruct(InstantiateVmFromNewCreatedStruct struct) { final VmInstanceSpec spec = new VmInstanceSpec(); spec.setRequiredPrimaryStorageUuidForRootVolume(struct.getPrimaryStorageUuidForRootVolume()); spec.setRequiredPrimaryStorageUuidForDataVolume(struct.getPrimaryStorageUuidForDataVolume()); spec.setDataVolumeSystemTags(struct.getDataVolumeSystemTags()); spec.setRootVolumeSystemTags(struct.getRootVolumeSystemTags()); spec.setRequiredHostUuid(struct.getRequiredHostUuid()); spec.setVmInventory(getSelfInventory()); if (struct.getL3NetworkUuids() != null && !struct.getL3NetworkUuids().isEmpty()) { SimpleQuery<L3NetworkVO> nwquery = dbf.createQuery(L3NetworkVO.class); nwquery.add(L3NetworkVO_.uuid, Op.IN, VmNicSpec.getL3UuidsOfSpec(struct.getL3NetworkUuids())); List<L3NetworkVO> vos = nwquery.list(); List<L3NetworkInventory> nws = L3NetworkInventory.valueOf(vos); // order L3 networks by the order they specified in the API List<VmNicSpec> nicSpecs = new ArrayList<>(); for (VmNicSpec nicSpec : struct.getL3NetworkUuids()) { List<L3NetworkInventory> l3s = new ArrayList<>(); for (L3NetworkInventory inv : nicSpec.l3Invs) { L3NetworkInventory l3 = CollectionUtils.find(nws, new Function<L3NetworkInventory, L3NetworkInventory>() { @Override public L3NetworkInventory call(L3NetworkInventory arg) { return arg.getUuid().equals(inv.getUuid()) ? arg : null; } }); if (l3 == null) { throw new OperationFailureException(operr( "Unable to find L3Network[uuid:%s] to start the current vm, it may have been deleted, " + "Operation suggestion: delete this vm, recreate a new vm", inv.getUuid())); } l3s.add(l3); } if (!l3s.isEmpty()) { nicSpecs.add(new VmNicSpec(l3s)); } } spec.setL3Networks(nicSpecs); } else { spec.setL3Networks(new ArrayList<>()); } if (struct.getDataDiskOfferingUuids() != null && !struct.getDataDiskOfferingUuids().isEmpty()) { SimpleQuery<DiskOfferingVO> dquery = dbf.createQuery(DiskOfferingVO.class); dquery.add(DiskOfferingVO_.uuid, SimpleQuery.Op.IN, struct.getDataDiskOfferingUuids()); List<DiskOfferingVO> vos = dquery.list(); // allow create multiple data volume from the same disk offering List<DiskOfferingInventory> disks = new ArrayList<>(); for (final String duuid : struct.getDataDiskOfferingUuids()) { DiskOfferingVO dvo = CollectionUtils.find(vos, new Function<DiskOfferingVO, DiskOfferingVO>() { @Override public DiskOfferingVO call(DiskOfferingVO arg) { if (duuid.equals(arg.getUuid())) { return arg; } return null; } }); disks.add(DiskOfferingInventory.valueOf(dvo)); } spec.setDataDiskOfferings(disks); } else { spec.setDataDiskOfferings(new ArrayList<>()); } if (struct.getRootDiskOfferingUuid() != null) { DiskOfferingVO rootDisk = dbf.findByUuid(struct.getRootDiskOfferingUuid(), DiskOfferingVO.class); spec.setRootDiskOffering(DiskOfferingInventory.valueOf(rootDisk)); } ImageVO imvo = dbf.findByUuid(spec.getVmInventory().getImageUuid(), ImageVO.class); List<CdRomSpec> cdRomSpecs = buildVmCdRomSpecsForNewCreated(spec); spec.setCdRomSpecs(cdRomSpecs); spec.getImageSpec().setInventory(ImageInventory.valueOf(imvo)); spec.setCurrentVmOperation(VmOperation.NewCreate); if (self.getClusterUuid() != null || struct.getRequiredHostUuid() != null) { spec.setHostAllocatorStrategy(HostAllocatorConstant.DESIGNATED_HOST_ALLOCATOR_STRATEGY_TYPE); } buildHostname(spec); spec.setUserdataList(buildUserdata()); selectBootOrder(spec); spec.setConsolePassword(VmSystemTags.CONSOLE_PASSWORD. getTokenByResourceUuid(self.getUuid(), VmSystemTags.CONSOLE_PASSWORD_TOKEN)); spec.setUsbRedirect(VmSystemTags.USB_REDIRECT.getTokenByResourceUuid(self.getUuid(), VmSystemTags.USB_REDIRECT_TOKEN)); if (struct.getStrategy() == VmCreationStrategy.CreateStopped) { spec.setCreatePaused(true); } return spec; } private List<CdRomSpec> buildVmCdRomSpecsForNewCreated(VmInstanceSpec vmSpec) { List<VmInstanceSpec.CdRomSpec> cdRomSpecs = new ArrayList<>(); VmInstanceInventory vmInventory = vmSpec.getVmInventory(); String vmUuid = vmInventory.getUuid(); // vm image is iso ImageVO imvo = dbf.findByUuid(vmInventory.getImageUuid(), ImageVO.class); if (imvo.getMediaType() == ImageMediaType.ISO) { CdRomSpec cdRomSpec = new CdRomSpec(); cdRomSpec.setDeviceId(cdRomSpecs.size()); cdRomSpec.setImageUuid(imvo.getUuid()); cdRomSpecs.add(cdRomSpec); } // createWithoutCdRom boolean hasTag = VmSystemTags.CREATE_WITHOUT_CD_ROM.hasTag(vmUuid); boolean flagWithoutCdRom = false; if (hasTag) { String withoutCdRom = VmSystemTags.CREATE_WITHOUT_CD_ROM.getTokenByResourceUuid(vmUuid, VmSystemTags.CREATE_WITHOUT_CD_ROM_TOKEN); flagWithoutCdRom = Boolean.parseBoolean(withoutCdRom); } if (flagWithoutCdRom) { return cdRomSpecs; } // cdroms hasTag = VmSystemTags.CREATE_VM_CD_ROM_LIST.hasTag(vmUuid); if (hasTag) { Map<String, String> tokens = VmSystemTags.CREATE_VM_CD_ROM_LIST.getTokensByResourceUuid(vmUuid); List<String> cdRoms = new ArrayList<>(); cdRoms.add(tokens.get(VmSystemTags.CD_ROM_0)); cdRoms.add(tokens.get(VmSystemTags.CD_ROM_1)); cdRoms.add(tokens.get(VmSystemTags.CD_ROM_2)); // remove vm image iso, image iso has been added cdRoms.removeAll(cdRomSpecs.stream().map(CdRomSpec::getImageUuid).collect(Collectors.toList())); for (String cdRom : cdRoms) { if (cdRom == null || VmInstanceConstant.NONE_CDROM.equalsIgnoreCase(cdRom)) { continue; } CdRomSpec cdRomSpec = new CdRomSpec(); cdRomSpec.setDeviceId(cdRomSpecs.size()); String imageUuid = VmInstanceConstant.EMPTY_CDROM.equalsIgnoreCase(cdRom) ? null : cdRom; cdRomSpec.setImageUuid(imageUuid); cdRomSpecs.add(cdRomSpec); } } else { int defaultCdRomNum = VmGlobalConfig.VM_DEFAULT_CD_ROM_NUM.value(Integer.class); while (defaultCdRomNum > cdRomSpecs.size()) { CdRomSpec cdRomSpec = new CdRomSpec(); cdRomSpec.setDeviceId(cdRomSpecs.size()); cdRomSpecs.add(cdRomSpec); } } int max = VmGlobalConfig.MAXIMUM_CD_ROM_NUM.value(Integer.class); if (cdRomSpecs.size() > max) { throw new OperationFailureException(operr("One vm cannot create %s CDROMs, vm can only add %s CDROMs", cdRomSpecs.size(), max)); } return cdRomSpecs; } private void instantiateVmFromNewCreate(InstantiateVmFromNewCreatedStruct struct, Completion completion) { VmInstanceSpec spec = buildVmInstanceSpecFromStruct(struct); changeVmStateInDb(VmInstanceStateEvent.starting); CollectionUtils.safeForEach(pluginRgty.getExtensionList(BeforeStartNewCreatedVmExtensionPoint.class), new ForEachFunction<BeforeStartNewCreatedVmExtensionPoint>() { @Override public void run(BeforeStartNewCreatedVmExtensionPoint ext) { ext.beforeStartNewCreatedVm(spec); } }); extEmitter.beforeStartNewCreatedVm(VmInstanceInventory.valueOf(self)); FlowChain chain = getCreateVmWorkFlowChain(getSelfInventory()); setFlowMarshaller(chain); chain.setName(String.format("create-vm-%s", self.getUuid())); chain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); chain.then(new NoRollbackFlow() { String __name__ = "after-started-vm-" + self.getUuid(); @Override public void run(FlowTrigger trigger, Map data) { VmInstanceSpec spec = (VmInstanceSpec) data.get(VmInstanceConstant.Params.VmInstanceSpec.toString()); changeVmStateInDb(struct.getStrategy() == VmCreationStrategy.InstantStart ? VmInstanceStateEvent.running : VmInstanceStateEvent.paused, ()-> { self.setLastHostUuid(spec.getDestHost().getUuid()); self.setHostUuid(spec.getDestHost().getUuid()); self.setClusterUuid(spec.getDestHost().getClusterUuid()); self.setZoneUuid(spec.getDestHost().getZoneUuid()); self.setHypervisorType(spec.getDestHost().getHypervisorType()); self.setRootVolumeUuid(spec.getDestRootVolume().getUuid()); }); logger.debug(String.format("vm[uuid:%s] is started ..", self.getUuid())); VmInstanceInventory inv = VmInstanceInventory.valueOf(self); extEmitter.afterStartNewCreatedVm(inv); trigger.next(); } }); if (struct.getStrategy() == VmCreationStrategy.CreateStopped) { chain.then(new NoRollbackFlow() { String __name__ = "stop-vm-" + self.getUuid(); @Override public void run(FlowTrigger trigger, Map data) { StopVmInstanceMsg smsg = new StopVmInstanceMsg(); smsg.setVmInstanceUuid(self.getUuid()); smsg.setGcOnFailure(true); smsg.setType(StopVmType.cold.toString()); stopVm(smsg, new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }); } chain.done(new FlowDoneHandler(completion) { @Override public void handle(final Map data) { logger.debug(String.format("vm[uuid:%s] is created ..", self.getUuid())); completion.success(); } }).error(new FlowErrorHandler(completion) { @Override public void handle(final ErrorCode errCode, Map data) { extEmitter.failedToStartNewCreatedVm(VmInstanceInventory.valueOf(self), errCode); dbf.remove(self); // clean up EO, otherwise API-retry may cause conflict if // the resource uuid is set try { dbf.eoCleanup(VmInstanceVO.class, CollectionDSL.list(self.getUuid())); } catch (Exception e) { logger.warn(e.getMessage()); } completion.fail(operr(errCode, errCode.getDetails())); } }).start(); } protected void startVm(final StartVmInstanceMsg msg, final SyncTaskChain taskChain) { startVm(msg, new Completion(taskChain) { @Override public void success() { VmInstanceInventory inv = VmInstanceInventory.valueOf(self); StartVmInstanceReply reply = new StartVmInstanceReply(); reply.setInventory(inv); bus.reply(msg, reply); taskChain.next(); } @Override public void fail(ErrorCode errorCode) { StartVmInstanceReply reply = new StartVmInstanceReply(); reply.setError(err(VmErrors.START_ERROR, errorCode, errorCode.getDetails())); bus.reply(msg, reply); taskChain.next(); } }); } protected void startVm(final APIStartVmInstanceMsg msg, final SyncTaskChain taskChain) { startVm(msg, new Completion(taskChain) { @Override public void success() { VmInstanceInventory inv = VmInstanceInventory.valueOf(self); APIStartVmInstanceEvent evt = new APIStartVmInstanceEvent(msg.getId()); evt.setInventory(inv); bus.publish(evt); taskChain.next(); } @Override public void fail(ErrorCode errorCode) { APIStartVmInstanceEvent evt = new APIStartVmInstanceEvent(msg.getId()); evt.setError(err(VmErrors.START_ERROR, errorCode, errorCode.getDetails())); bus.publish(evt); taskChain.next(); } }); } protected void handle(final APIStartVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getName() { return String.format("start-vm-%s", self.getUuid()); } @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { startVm(msg, chain); } }); } protected void handle(final APIDestroyVmInstanceMsg msg) { final APIDestroyVmInstanceEvent evt = new APIDestroyVmInstanceEvent(msg.getId()); destroyVm(msg, new Completion(msg) { @Override public void success() { bus.publish(evt); } @Override public void fail(ErrorCode errorCode) { evt.setError(errorCode); bus.publish(evt); } }); } private void destroyVm(APIDestroyVmInstanceMsg msg, final Completion completion) { final String issuer = VmInstanceVO.class.getSimpleName(); final List<VmDeletionStruct> ctx = new ArrayList<VmDeletionStruct>(); VmDeletionStruct s = new VmDeletionStruct(); s.setInventory(getSelfInventory()); s.setDeletionPolicy(deletionPolicyMgr.getDeletionPolicy(self.getUuid())); ctx.add(s); FlowChain chain = FlowChainBuilder.newSimpleFlowChain(); chain.setName(String.format("delete-vm-%s", msg.getUuid())); if (msg.getDeletionMode() == APIDeleteMessage.DeletionMode.Permissive) { chain.then(new NoRollbackFlow() { @Override public void run(final FlowTrigger trigger, Map data) { casf.asyncCascade(CascadeConstant.DELETION_CHECK_CODE, issuer, ctx, new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }).then(new NoRollbackFlow() { @Override public void run(final FlowTrigger trigger, Map data) { casf.asyncCascade(CascadeConstant.DELETION_DELETE_CODE, issuer, ctx, new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }); } else { chain.then(new NoRollbackFlow() { @Override public void run(final FlowTrigger trigger, Map data) { casf.asyncCascade(CascadeConstant.DELETION_FORCE_DELETE_CODE, issuer, ctx, new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }); } chain.done(new FlowDoneHandler(msg) { @Override public void handle(Map data) { casf.asyncCascadeFull(CascadeConstant.DELETION_CLEANUP_CODE, issuer, ctx, new NopeCompletion()); completion.success(); } }).error(new FlowErrorHandler(msg) { @Override public void handle(ErrorCode errCode, Map data) { completion.fail(err(SysErrors.DELETE_RESOURCE_ERROR, errCode, errCode.getDetails())); } }).start(); } protected void buildHostname(VmInstanceSpec spec) { String defaultHostname = VmSystemTags.HOSTNAME.getTag(self.getUuid()); if (defaultHostname == null) { return; } HostName dhname = new HostName(); dhname.setL3NetworkUuid(self.getDefaultL3NetworkUuid()); dhname.setHostname(VmSystemTags.HOSTNAME.getTokenByTag(defaultHostname, VmSystemTags.HOSTNAME_TOKEN)); spec.getHostnames().add(dhname); } protected VmInstanceSpec buildSpecFromInventory(VmInstanceInventory inv, VmOperation operation) { VmInstanceSpec spec = new VmInstanceSpec(); spec.setUserdataList(buildUserdata()); // for L3Network that has been deleted List<String> nicUuidToDel = CollectionUtils.transformToList(inv.getVmNics(), new Function<String, VmNicInventory>() { @Override public String call(VmNicInventory arg) { return arg.getL3NetworkUuid() == null ? arg.getUuid() : null; } }); if (!nicUuidToDel.isEmpty()) { dbf.removeByPrimaryKeys(nicUuidToDel, VmNicVO.class); self = dbf.findByUuid(inv.getUuid(), VmInstanceVO.class); inv = VmInstanceInventory.valueOf(self); } spec.setDestNics(inv.getVmNics()); List<VmNicSpec> nicSpecs = new ArrayList<>(); for (VmNicInventory nic : inv.getVmNics()) { List<L3NetworkInventory> l3Invs = new ArrayList<>(); /* if destroy vm, then recover vm, ip address of nic has been deleted */ if (nic.getUsedIps() != null && !nic.getUsedIps().isEmpty()) { for (UsedIpInventory ip : nic.getUsedIps()) { L3NetworkVO l3Vo = dbf.findByUuid(ip.getL3NetworkUuid(), L3NetworkVO.class); if (l3Vo != null) { l3Invs.add(L3NetworkInventory.valueOf(l3Vo)); } } } if (l3Invs.isEmpty()) { L3NetworkVO l3Vo = dbf.findByUuid(nic.getL3NetworkUuid(), L3NetworkVO.class); if (l3Vo != null) { l3Invs.add(L3NetworkInventory.valueOf(l3Vo)); } List<String> secondaryNetworksList = new DualStackNicSecondaryNetworksOperator().getSecondaryNetworksByVmUuidNic(inv.getUuid(), nic.getL3NetworkUuid()); if (secondaryNetworksList != null && !secondaryNetworksList.isEmpty()) { for (String uuid : secondaryNetworksList) { l3Vo = dbf.findByUuid(uuid, L3NetworkVO.class); if (l3Vo != null) { l3Invs.add(L3NetworkInventory.valueOf(l3Vo)); } } } } nicSpecs.add(new VmNicSpec(l3Invs)); } spec.setL3Networks(nicSpecs); String huuid = inv.getHostUuid() == null ? inv.getLastHostUuid() : inv.getHostUuid(); if (huuid != null) { HostVO hvo = dbf.findByUuid(huuid, HostVO.class); if (hvo != null) { spec.setDestHost(HostInventory.valueOf(hvo)); } } VolumeInventory rootVol = inv.getRootVolume(); Optional.ofNullable(rootVol).ifPresent(it -> { spec.setDestRootVolume(it); spec.setRequiredPrimaryStorageUuidForRootVolume(it.getPrimaryStorageUuid()); }); spec.setDestDataVolumes(getAllDataVolumes(inv)); // When starting an imported VM, we might not have an image UUID. if (inv.getImageUuid() != null) { ImageVO imgvo = dbf.findByUuid(inv.getImageUuid(), ImageVO.class); ImageInventory imginv = null; if (imgvo == null) { // the image has been deleted, use EO instead ImageEO imgeo = dbf.findByUuid(inv.getImageUuid(), ImageEO.class); imginv = ImageInventory.valueOf(imgeo); } else { imginv = ImageInventory.valueOf(imgvo); } spec.getImageSpec().setInventory(imginv); } spec.setVmInventory(inv); buildHostname(spec); List<VmCdRomVO> cdRomVOS = Q.New(VmCdRomVO.class) .eq(VmCdRomVO_.vmInstanceUuid, inv.getUuid()) .orderBy(VmCdRomVO_.deviceId, SimpleQuery.Od.ASC) .list(); for (VmCdRomVO cdRomVO : cdRomVOS) { CdRomSpec cdRomSpec = new CdRomSpec(); cdRomSpec.setUuid(cdRomVO.getUuid()); String isoUuid = cdRomVO.getIsoUuid(); if (isoUuid != null) { if(dbf.isExist(isoUuid, ImageVO.class)) { cdRomSpec.setImageUuid(isoUuid); cdRomSpec.setInstallPath(cdRomVO.getIsoInstallPath()); } else { //TODO logger.warn(String.format("iso[uuid:%s] is deleted, however, the VM[uuid:%s] still has it attached", isoUuid, self.getUuid())); } } cdRomSpec.setDeviceId(cdRomVO.getDeviceId()); spec.getCdRomSpecs().add(cdRomSpec); } spec.setCurrentVmOperation(operation); selectBootOrder(spec); spec.setConsolePassword(VmSystemTags.CONSOLE_PASSWORD. getTokenByResourceUuid(self.getUuid(), VmSystemTags.CONSOLE_PASSWORD_TOKEN)); return spec; } private List<VolumeInventory> getAllDataVolumes(VmInstanceInventory inv) { List<VolumeInventory> dataVols = inv.getAllVolumes().stream() .filter(it -> !it.getUuid().equals(inv.getRootVolumeUuid())) .collect(Collectors.toList()); List<BuildVolumeSpecExtensionPoint> exts = pluginRgty.getExtensionList(BuildVolumeSpecExtensionPoint.class); exts.forEach(e -> dataVols.addAll(e.supplyAdditionalVolumesForVmInstance(inv.getUuid()))); return dataVols; } protected void rebootVm(final Message msg, final Completion completion) { refreshVO(); ErrorCode allowed = validateOperationByState(msg, self.getState(), null); if (allowed != null) { completion.fail(allowed); return; } VmInstanceInventory inv = VmInstanceInventory.valueOf(self); ErrorCode preReboot = extEmitter.preRebootVm(inv); if (preReboot != null) { completion.fail(preReboot); return; } final VmInstanceSpec spec = buildSpecFromInventory(inv, VmOperation.Reboot); spec.setDestHost(HostInventory.valueOf(dbf.findByUuid(self.getHostUuid(), HostVO.class))); final VmInstanceState originState = self.getState(); changeVmStateInDb(VmInstanceStateEvent.rebooting); extEmitter.beforeRebootVm(VmInstanceInventory.valueOf(self)); spec.setMessage(msg); FlowChain chain = getRebootVmWorkFlowChain(inv); setFlowMarshaller(chain); chain.setName(String.format("reboot-vm-%s", self.getUuid())); chain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); chain.done(new FlowDoneHandler(completion) { @Override public void handle(Map data) { self = changeVmStateInDb(VmInstanceStateEvent.running); VmInstanceInventory inv = VmInstanceInventory.valueOf(self); extEmitter.afterRebootVm(inv); completion.success(); } }).error(new FlowErrorHandler(completion) { @Override public void handle(final ErrorCode errCode, Map data) { extEmitter.failedToRebootVm(VmInstanceInventory.valueOf(self), errCode); if (HostErrors.FAILED_TO_STOP_VM_ON_HYPERVISOR.isEqual(errCode.getCode()) || HostErrors.FAILED_TO_START_VM_ON_HYPERVISOR.isEqual(errCode.getCode())) { checkState(originalCopy.getHostUuid(), new NoErrorCompletion(completion) { @Override public void done() { self = refreshVO(); if ((originState == VmInstanceState.Running || originState == VmInstanceState.Paused) && self.getState() == VmInstanceState.Stopped) { returnHostCpacity(spec.getDestHost().getUuid()); } completion.fail(errCode); } }); } else { self.setState(originState); self = dbf.updateAndRefresh(self); completion.fail(errCode); } } }).start(); } protected void returnHostCpacity(String hostUuid) { ReturnHostCapacityMsg rmsg = new ReturnHostCapacityMsg(); rmsg.setCpuCapacity(self.getCpuNum()); rmsg.setMemoryCapacity(self.getMemorySize()); rmsg.setHostUuid(hostUuid); rmsg.setServiceId(bus.makeLocalServiceId(HostAllocatorConstant.SERVICE_ID)); bus.send(rmsg); } protected void rebootVm(final APIRebootVmInstanceMsg msg, final SyncTaskChain taskChain) { rebootVm(msg, new Completion(taskChain) { @Override public void success() { APIRebootVmInstanceEvent evt = new APIRebootVmInstanceEvent(msg.getId()); VmInstanceInventory inv = VmInstanceInventory.valueOf(self); evt.setInventory(inv); bus.publish(evt); taskChain.next(); } @Override public void fail(ErrorCode errorCode) { APIRebootVmInstanceEvent evt = new APIRebootVmInstanceEvent(msg.getId()); evt.setError(err(VmErrors.REBOOT_ERROR, errorCode, errorCode.getDetails())); bus.publish(evt); taskChain.next(); } }); } protected void handle(final APIRebootVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getName() { return String.format("reboot-vm-%s", self.getUuid()); } @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { rebootVm(msg, chain); } }); } protected void stopVm(final APIStopVmInstanceMsg msg, final SyncTaskChain taskChain) { stopVm(msg, new Completion(taskChain) { @Override public void success() { APIStopVmInstanceEvent evt = new APIStopVmInstanceEvent(msg.getId()); VmInstanceInventory inv = VmInstanceInventory.valueOf(self); evt.setInventory(inv); bus.publish(evt); taskChain.next(); } @Override public void fail(ErrorCode errorCode) { APIStopVmInstanceEvent evt = new APIStopVmInstanceEvent(msg.getId()); evt.setError(err(VmErrors.STOP_ERROR, errorCode, errorCode.getDetails())); bus.publish(evt); taskChain.next(); } }); } private void stopVm(final Message msg, final Completion completion) { refreshVO(); ErrorCode allowed = validateOperationByState(msg, self.getState(), null); if (allowed != null) { completion.fail(allowed); return; } if (self.getState() == VmInstanceState.Stopped) { completion.success(); return; } VmInstanceInventory inv = VmInstanceInventory.valueOf(self); ErrorCode preStop = extEmitter.preStopVm(inv); if (preStop != null) { completion.fail(preStop); return; } final VmInstanceSpec spec = buildSpecFromInventory(inv, VmOperation.Stop); spec.setMessage(msg); if (msg instanceof StopVmInstanceMsg) { spec.setGcOnStopFailure(((StopVmInstanceMsg) msg).isGcOnFailure()); } if (msg instanceof ReleaseResourceMessage) { spec.setIgnoreResourceReleaseFailure(((ReleaseResourceMessage) msg).isIgnoreResourceReleaseFailure()); } final VmInstanceState originState = self.getState(); changeVmStateInDb(VmInstanceStateEvent.stopping); extEmitter.beforeStopVm(VmInstanceInventory.valueOf(self)); FlowChain chain = getStopVmWorkFlowChain(inv); setFlowMarshaller(chain); chain.setName(String.format("stop-vm-%s", self.getUuid())); chain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); chain.done(new FlowDoneHandler(completion) { @Override public void handle(Map data) { self = changeVmStateInDb(VmInstanceStateEvent.stopped); VmInstanceInventory inv = VmInstanceInventory.valueOf(self); extEmitter.afterStopVm(inv); completion.success(); } }).error(new FlowErrorHandler(completion) { @Override public void handle(final ErrorCode errCode, Map data) { VmInstanceInventory inv = VmInstanceInventory.valueOf(self); extEmitter.failedToStopVm(inv, errCode); if (HostErrors.FAILED_TO_STOP_VM_ON_HYPERVISOR.isEqual(errCode.getCode())) { checkState(originalCopy.getHostUuid(), new NoErrorCompletion(completion) { @Override public void done() { completion.fail(errCode); } }); } else { self.setState(originState); self = dbf.updateAndRefresh(self); completion.fail(errCode); } } }).start(); } protected void handle(final APIStopVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getName() { return String.format("stop-vm-%s", self.getUuid()); } @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { stopVm(msg, chain); } }); } protected void pauseVm(final APIPauseVmInstanceMsg msg, final SyncTaskChain taskChain) { pauseVm(msg, new Completion(taskChain) { @Override public void success() { APIPauseVmInstanceEvent evt = new APIPauseVmInstanceEvent(msg.getId()); VmInstanceInventory inv = VmInstanceInventory.valueOf(self); evt.setInventory(inv); bus.publish(evt); taskChain.next(); } @Override public void fail(ErrorCode errorCode) { APIPauseVmInstanceEvent evt = new APIPauseVmInstanceEvent(msg.getId()); evt.setError(err(VmErrors.SUSPEND_ERROR, errorCode, errorCode.getDetails())); bus.publish(evt); taskChain.next(); } }); } protected void pauseVm(final Message msg, Completion completion) { refreshVO(); ErrorCode allowed = validateOperationByState(msg, self.getState(), null); if (allowed != null) { completion.fail(allowed); return; } if (self.getState() == VmInstanceState.Paused) { completion.success(); return; } VmInstanceInventory inv = VmInstanceInventory.valueOf(self); final VmInstanceSpec spec = buildSpecFromInventory(inv, VmOperation.Pause); spec.setMessage(msg); final VmInstanceState originState = self.getState(); changeVmStateInDb(VmInstanceStateEvent.pausing); FlowChain chain = getPauseVmWorkFlowChain(inv); setFlowMarshaller(chain); chain.setName(String.format("pause-vm-%s", self.getUuid())); chain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); chain.done(new FlowDoneHandler(completion) { @Override public void handle(Map Data) { self = changeVmStateInDb(VmInstanceStateEvent.paused); completion.success(); } }).error(new FlowErrorHandler(completion) { @Override public void handle(final ErrorCode errCode, Map data) { self.setState(originState); self = dbf.updateAndRefresh(self); completion.fail(errCode); } }).start(); } protected void handle(final APIPauseVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { pauseVm(msg, chain); } @Override public String getName() { return String.format("pause-vm-%s", msg.getVmInstanceUuid()); } }); } protected void resumeVm(final APIResumeVmInstanceMsg msg, final SyncTaskChain taskChain) { resumeVm(msg, new Completion(taskChain) { @Override public void success() { APIResumeVmInstanceEvent evt = new APIResumeVmInstanceEvent(msg.getId()); VmInstanceInventory inv = VmInstanceInventory.valueOf(self); evt.setInventory(inv); bus.publish(evt); taskChain.next(); } @Override public void fail(ErrorCode errorCode) { APIResumeVmInstanceEvent evt = new APIResumeVmInstanceEvent(msg.getId()); evt.setError(err(VmErrors.RESUME_ERROR, errorCode, errorCode.getDetails())); bus.publish(evt); taskChain.next(); } }); } protected void resumeVm(final Message msg, Completion completion) { refreshVO(); ErrorCode allowed = validateOperationByState(msg, self.getState(), null); if (allowed != null) { completion.fail(allowed); return; } VmInstanceInventory inv = VmInstanceInventory.valueOf(self); final VmInstanceSpec spec = buildSpecFromInventory(inv, VmOperation.Resume); spec.setMessage(msg); final VmInstanceState originState = self.getState(); changeVmStateInDb(VmInstanceStateEvent.resuming); FlowChain chain = getResumeVmWorkFlowChain(inv); setFlowMarshaller(chain); chain.setName(String.format("resume-vm-%s", self.getUuid())); chain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); chain.done(new FlowDoneHandler(completion) { @Override public void handle(Map Data) { self = changeVmStateInDb(VmInstanceStateEvent.running); completion.success(); } }).error(new FlowErrorHandler(completion) { @Override public void handle(final ErrorCode errCode, Map data) { self.setState(originState); self = dbf.updateAndRefresh(self); completion.fail(errCode); } }).start(); } protected void handle(final APIResumeVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { resumeVm(msg, chain); } @Override public String getName() { return String.format("resume-vm-%s", msg.getVmInstanceUuid()); } }); } private void handle(final APIReimageVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { reimageVmInstance(msg, new NoErrorCompletion(chain) { @Override public void done() { chain.next(); } }); } @Override public String getName() { return "reimage-vminstance"; } }); } private void handle(final APIDeleteVmCdRomMsg msg) { APIDeleteVmCdRomEvent event = new APIDeleteVmCdRomEvent(msg.getId()); DeleteVmCdRomMsg deleteVmCdRomMsg = new DeleteVmCdRomMsg(); deleteVmCdRomMsg.setVmInstanceUuid(msg.getVmInstanceUuid()); deleteVmCdRomMsg.setCdRomUuid(msg.getUuid()); bus.makeLocalServiceId(deleteVmCdRomMsg, VmInstanceConstant.SERVICE_ID); bus.send(deleteVmCdRomMsg, new CloudBusCallBack(msg) { @Override public void run(MessageReply reply) { if (reply.isSuccess()) { event.setInventory(VmInstanceInventory.valueOf(self)); } else { event.setError(reply.getError()); } bus.publish(event); } }); } private void handle(final DeleteVmCdRomMsg msg) { DeleteVmCdRomReply reply = new DeleteVmCdRomReply(); thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (allowed != null) { reply.setError(allowed); bus.reply(msg, reply); chain.next(); return; } deleteVmCdRom(msg.getCdRomUuid(), new Completion(chain) { @Override public void success() { bus.reply(msg, reply); chain.next(); } @Override public void fail(ErrorCode errorCode) { reply.setError(errorCode); bus.reply(msg, reply); chain.next(); } }); } @Override public String getName() { return String.format("delete-vm-cdRom-%s", msg.getCdRomUuid()); } }); } private void deleteVmCdRom(String cdRomUuid, Completion completion) { boolean exist = dbf.isExist(cdRomUuid, VmCdRomVO.class); if (!exist) { completion.success(); return; } dbf.removeByPrimaryKey(cdRomUuid, VmCdRomVO.class); completion.success(); } private void doCreateVmCdRom(CreateVmCdRomMsg msg, ReturnValueCompletion<VmCdRomInventory> completion) { long vmCdRomNum = Q.New(VmCdRomVO.class) .eq(VmCdRomVO_.vmInstanceUuid, msg.getVmInstanceUuid()) .count(); int max = VmGlobalConfig.MAXIMUM_CD_ROM_NUM.value(Integer.class); if (max <= vmCdRomNum) { completion.fail(operr("VM[uuid:%s] can only add %s CDROMs", msg.getVmInstanceUuid(), max)); return; } if (msg.getIsoUuid() != null) { boolean targetIsoUsed = Q.New(VmCdRomVO.class) .eq(VmCdRomVO_.vmInstanceUuid, msg.getVmInstanceUuid()) .eq(VmCdRomVO_.isoUuid, msg.getIsoUuid()) .isExists(); if (targetIsoUsed) { completion.fail(operr("VM[uuid:%s] already has an ISO[uuid:%s] attached", msg.getVmInstanceUuid(), msg.getIsoUuid())); return; } } List<Integer> deviceIds = Q.New(VmCdRomVO.class) .select(VmCdRomVO_.deviceId) .eq(VmCdRomVO_.vmInstanceUuid, msg.getVmInstanceUuid()) .listValues(); BitSet full = new BitSet(deviceIds.size() + 1); deviceIds.forEach(full::set); int targetDeviceId = full.nextClearBit(0); if (targetDeviceId >= max) { completion.fail(operr("VM[uuid:%s] can only add %s CDROMs", msg.getVmInstanceUuid(), max)); return; } VmCdRomVO cdRomVO = new VmCdRomVO(); String cdRomUuid = msg.getResourceUuid() != null ? msg.getResourceUuid() : Platform.getUuid(); cdRomVO.setUuid(cdRomUuid); cdRomVO.setDeviceId(targetDeviceId); cdRomVO.setIsoUuid(msg.getIsoUuid()); cdRomVO.setVmInstanceUuid(msg.getVmInstanceUuid()); cdRomVO.setName(msg.getName()); String acntUuid = Account.getAccountUuidOfResource(msg.getVmInstanceUuid()); cdRomVO.setAccountUuid(acntUuid); cdRomVO.setDescription(msg.getDescription()); cdRomVO = dbf.persistAndRefresh(cdRomVO); completion.success(VmCdRomInventory.valueOf(cdRomVO)); } private void handle(final APICreateVmCdRomMsg msg) { APICreateVmCdRomEvent event = new APICreateVmCdRomEvent(msg.getId()); CreateVmCdRomMsg cmsg = new CreateVmCdRomMsg(); cmsg.setResourceUuid(msg.getResourceUuid()); cmsg.setName(msg.getName()); cmsg.setIsoUuid(msg.getIsoUuid()); cmsg.setVmInstanceUuid(msg.getVmInstanceUuid()); cmsg.setDescription(msg.getDescription()); bus.makeTargetServiceIdByResourceUuid(cmsg, VmInstanceConstant.SERVICE_ID, cmsg.getVmInstanceUuid()); bus.send(cmsg, new CloudBusCallBack(msg) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { event.setError(reply.getError()); bus.publish(event); return; } CreateVmCdRomReply r1 = reply.castReply(); event.setInventory(r1.getInventory()); bus.publish(event); } }); } private void handle(APIUpdateVmCdRomMsg msg) { APIUpdateVmCdRomEvent event = new APIUpdateVmCdRomEvent(msg.getId()); VmCdRomVO vmCdRomVO = dbf.findByUuid(msg.getUuid(), VmCdRomVO.class); boolean update = false; if (msg.getName() != null) { vmCdRomVO.setName(msg.getName()); update = true; } if (msg.getDescription() != null ) { vmCdRomVO.setDescription(msg.getDescription()); update = true; } if (update) { vmCdRomVO = dbf.updateAndRefresh(vmCdRomVO); } event.setInventory(VmCdRomInventory.valueOf(vmCdRomVO)); bus.publish(event); } private void handle(APISetVmInstanceDefaultCdRomMsg msg) { APISetVmInstanceDefaultCdRomEvent event = new APISetVmInstanceDefaultCdRomEvent(msg.getId()); thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { setVmInstanceDefaultCdRom(msg.getUuid(), new Completion(chain) { @Override public void success() { VmCdRomVO cdRomVO = dbf.findByUuid(msg.getUuid(), VmCdRomVO.class); event.setInventory(VmCdRomInventory.valueOf(cdRomVO)); bus.publish(event); chain.next(); } @Override public void fail(ErrorCode errorCode) { event.setError(errorCode); bus.publish(event); chain.next(); } }); } @Override public String getName() { return String.format("set-vmInstance-%s-default-cdRom-%s", msg.getVmInstanceUuid(), msg.getUuid()); } }); } private void setVmInstanceDefaultCdRom(String vmCdRomUuid, Completion completion) { // update target cdRom deviceId // update the source cdRom deviceId new SQLBatch(){ @Override protected void scripts() { List<VmCdRomVO> cdRomVOS = q(VmCdRomVO.class) .eq(VmCdRomVO_.vmInstanceUuid, self.getUuid()) .orderBy(VmCdRomVO_.deviceId, SimpleQuery.Od.ASC) .list(); Map<String, Integer> cdRomUUidDeviceIdMap = cdRomVOS.stream().collect(Collectors.toMap(VmCdRomVO::getUuid, i -> i.getDeviceId())); int deviceId = cdRomUUidDeviceIdMap.get(vmCdRomUuid); VmCdRomVO beforeDefaultCdRomVO = null; for (VmCdRomVO vmCdRomVO : cdRomVOS) { if (vmCdRomVO.getDeviceId() == 0) { beforeDefaultCdRomVO = vmCdRomVO; sql(VmCdRomVO.class) .eq(VmCdRomVO_.uuid, vmCdRomVO.getUuid()) .set(VmCdRomVO_.deviceId, VmInstanceConstant.MAXIMUM_CDROM_NUMBER) .update(); continue; } if (vmCdRomUuid.equals(vmCdRomVO.getUuid())) { sql(VmCdRomVO.class) .eq(VmCdRomVO_.uuid, vmCdRomVO.getUuid()) .set(VmCdRomVO_.deviceId, 0) .update(); continue; } } if (beforeDefaultCdRomVO != null) { sql(VmCdRomVO.class) .eq(VmCdRomVO_.uuid, beforeDefaultCdRomVO.getUuid()) .set(VmCdRomVO_.deviceId, deviceId) .update(); } } }.execute(); completion.success(); } private void reimageVmInstance(final APIReimageVmInstanceMsg msg, NoErrorCompletion completion) { final APIReimageVmInstanceEvent evt = new APIReimageVmInstanceEvent(msg.getId()); String rootVolumeUuid = Q.New(VmInstanceVO.class).select(VmInstanceVO_.rootVolumeUuid) .eq(VmInstanceVO_.uuid, msg.getVmInstanceUuid()) .findValue(); ReimageVmInstanceMsg rmsg = new ReimageVmInstanceMsg(); rmsg.setVmInstanceUuid(msg.getVmInstanceUuid()); rmsg.setAccountUuid(msg.getSession().getAccountUuid()); bus.makeTargetServiceIdByResourceUuid(rmsg, VmInstanceConstant.SERVICE_ID, msg.getVmInstanceUuid()); ReimageVolumeOverlayMsg omsg = new ReimageVolumeOverlayMsg(); omsg.setMessage(rmsg); omsg.setVolumeUuid(rootVolumeUuid); bus.makeTargetServiceIdByResourceUuid(omsg, VolumeConstant.SERVICE_ID, rootVolumeUuid); bus.send(omsg, new CloudBusCallBack(completion, evt) { @Override public void run(MessageReply reply) { if (reply.isSuccess()){ self = refreshVO(); VmInstanceInventory inv = VmInstanceInventory.valueOf(self); evt.setInventory(inv); bus.publish(evt); } else { evt.setError(reply.getError()); bus.publish(evt); } completion.done(); } }); } private void handle(ReimageVmInstanceMsg msg){ ReimageVmInstanceReply reply = new ReimageVmInstanceReply(); self = refreshVO(); VolumeVO rootVolume = dbf.findByUuid(self.getRootVolumeUuid(), VolumeVO.class); VolumeInventory rootVolumeInventory = VolumeInventory.valueOf(rootVolume); // check vm stopped { if (self.getState() != VmInstanceState.Stopped) { throw new ApiMessageInterceptionException(err( VmErrors.RE_IMAGE_VM_NOT_IN_STOPPED_STATE, "unable to reset volume[uuid:%s] to origin image[uuid:%s]," + " the vm[uuid:%s] volume attached to is not in Stopped state, current state is %s", rootVolume.getUuid(), rootVolume.getRootImageUuid(), rootVolume.getVmInstanceUuid(), self.getState() )); } } // check image cache to ensure image type is not ISO { SimpleQuery<ImageCacheVO> q = dbf.createQuery(ImageCacheVO.class); q.select(ImageCacheVO_.mediaType); q.add(ImageCacheVO_.imageUuid, Op.EQ, rootVolume.getRootImageUuid()); q.setLimit(1); ImageMediaType imageMediaType = q.findValue(); if (imageMediaType == null) { throw new OperationFailureException(err( VmErrors.RE_IMAGE_CANNOT_FIND_IMAGE_CACHE, "unable to reset volume[uuid:%s] to origin image[uuid:%s]," + " cannot find image cache.", rootVolume.getUuid(), rootVolume.getRootImageUuid() )); } if (imageMediaType.toString().equals("ISO")) { throw new OperationFailureException(err( VmErrors.RE_IMAGE_IMAGE_MEDIA_TYPE_SHOULD_NOT_BE_ISO, "unable to reset volume[uuid:%s] to origin image[uuid:%s]," + " for image type is ISO", rootVolume.getUuid(), rootVolume.getRootImageUuid() )); } } // do the re-image op FlowChain chain = FlowChainBuilder.newShareFlowChain(); chain.setName(String.format("reset-root-volume-%s-from-image-%s", rootVolume.getUuid(), rootVolume.getRootImageUuid())); chain.then(new ShareFlow() { VolumeVO vo = rootVolume; @Override public void setup() { flow(new NoRollbackFlow() { String __name__ = "mark-root-volume-as-snapshot-on-primary-storage"; @Override public void run(final FlowTrigger trigger, Map data) { MarkRootVolumeAsSnapshotMsg gmsg = new MarkRootVolumeAsSnapshotMsg(); rootVolumeInventory.setDescription(String.format("save snapshot for reimage vm [uuid:%s]", msg.getVmInstanceUuid())); rootVolumeInventory.setName(String.format("reimage-vm-point-%s-%s", msg.getVmInstanceUuid(), TimeUtils.getCurrentTimeStamp("yyyyMMddHHmmss"))); gmsg.setVolume(rootVolumeInventory); gmsg.setAccountUuid(msg.getAccountUuid()); bus.makeLocalServiceId(gmsg, VolumeSnapshotConstant.SERVICE_ID); bus.send(gmsg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { if (reply.isSuccess()) { trigger.next(); } else { trigger.fail(reply.getError()); } } }); } }); flow(new NoRollbackFlow() { String __name__ = "reset-root-volume-from-image-on-primary-storage"; @Override public void run(final FlowTrigger trigger, Map data) { ReInitRootVolumeFromTemplateOnPrimaryStorageMsg rmsg = new ReInitRootVolumeFromTemplateOnPrimaryStorageMsg(); rmsg.setVolume(rootVolumeInventory); bus.makeTargetServiceIdByResourceUuid(rmsg, PrimaryStorageConstant.SERVICE_ID, rootVolumeInventory.getPrimaryStorageUuid()); bus.send(rmsg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { if (reply.isSuccess()) { ReInitRootVolumeFromTemplateOnPrimaryStorageReply re = (ReInitRootVolumeFromTemplateOnPrimaryStorageReply) reply; vo.setInstallPath(re.getNewVolumeInstallPath()); vo = dbf.updateAndRefresh(vo); trigger.next(); } else { trigger.fail(reply.getError()); } } }); } }); flow(new NoRollbackFlow() { String __name__ = "sync-volume-size-after-reimage"; @Override public void run(final FlowTrigger trigger, Map data) { SyncVolumeSizeMsg smsg = new SyncVolumeSizeMsg(); smsg.setVolumeUuid(vo.getUuid()); bus.makeTargetServiceIdByResourceUuid(smsg, VolumeConstant.SERVICE_ID, rootVolumeInventory.getUuid()); bus.send(smsg, new CloudBusCallBack(msg) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { trigger.fail(reply.getError()); return; } vo.setSize(((SyncVolumeSizeReply) reply).getSize()); trigger.next(); } }); } }); flow(new NoRollbackFlow() { String __name__ = "return-primary-storage-capacity"; @Override public void run(FlowTrigger trigger, Map data) { if (vo.getSize() == rootVolumeInventory.getSize()) { trigger.next(); return; } IncreasePrimaryStorageCapacityMsg imsg = new IncreasePrimaryStorageCapacityMsg(); imsg.setPrimaryStorageUuid(rootVolume.getPrimaryStorageUuid()); imsg.setDiskSize(rootVolumeInventory.getSize() - vo.getSize()); bus.makeTargetServiceIdByResourceUuid(imsg, PrimaryStorageConstant.SERVICE_ID, rootVolume.getPrimaryStorageUuid()); bus.send(imsg); trigger.next(); } }); done(new FlowDoneHandler(msg) { @Override public void handle(Map data) { dbf.update(vo); List<AfterReimageVmInstanceExtensionPoint> list = pluginRgty.getExtensionList( AfterReimageVmInstanceExtensionPoint.class); for (AfterReimageVmInstanceExtensionPoint ext : list) { ext.afterReimageVmInstance(rootVolumeInventory); } self = dbf.reload(self); bus.reply(msg, reply); } }); error(new FlowErrorHandler(msg) { @Override public void handle(ErrorCode errCode, Map data) { logger.warn(String.format("failed to restore volume[uuid:%s] to image[uuid:%s], %s", rootVolumeInventory.getUuid(), rootVolumeInventory.getRootImageUuid(), errCode)); reply.setError(errCode); bus.reply(msg, reply); } }); } }).start(); } private void handle(OverlayMessage msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { doOverlayMessage(msg, new NoErrorCompletion(chain) { @Override public void done() { chain.next(); } }); } @Override public String getName() { return "overlay-message"; } }); } private void doOverlayMessage(OverlayMessage msg, NoErrorCompletion noErrorCompletion) { bus.send(msg.getMessage(), new CloudBusCallBack(msg, noErrorCompletion) { @Override public void run(MessageReply reply) { bus.reply(msg, reply); noErrorCompletion.done(); } }); } }
compute/src/main/java/org/zstack/compute/vm/VmInstanceBase.java
package org.zstack.compute.vm; import org.apache.commons.lang.StringUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.DataIntegrityViolationException; import org.springframework.transaction.annotation.Transactional; import org.zstack.compute.allocator.HostAllocatorManager; import org.zstack.core.Platform; import org.zstack.core.cascade.CascadeConstant; import org.zstack.core.cascade.CascadeFacade; import org.zstack.core.cloudbus.*; import org.zstack.core.componentloader.PluginRegistry; import org.zstack.core.db.*; import org.zstack.core.db.SimpleQuery.Op; import org.zstack.core.defer.Defer; import org.zstack.core.defer.Deferred; import org.zstack.core.jsonlabel.JsonLabel; import org.zstack.core.thread.ChainTask; import org.zstack.core.thread.SyncTaskChain; import org.zstack.core.thread.ThreadFacade; import org.zstack.core.workflow.FlowChainBuilder; import org.zstack.core.workflow.ShareFlow; import org.zstack.header.allocator.*; import org.zstack.header.apimediator.ApiMessageInterceptionException; import org.zstack.header.cluster.ClusterInventory; import org.zstack.header.cluster.ClusterState; import org.zstack.header.cluster.ClusterVO; import org.zstack.header.cluster.ClusterVO_; import org.zstack.header.configuration.*; import org.zstack.header.core.Completion; import org.zstack.header.core.NoErrorCompletion; import org.zstack.header.core.NopeCompletion; import org.zstack.header.core.ReturnValueCompletion; import org.zstack.header.core.workflow.*; import org.zstack.header.errorcode.ErrorCode; import org.zstack.header.errorcode.OperationFailureException; import org.zstack.header.errorcode.SysErrors; import org.zstack.header.exception.CloudRuntimeException; import org.zstack.header.host.*; import org.zstack.header.image.ImageConstant.ImageMediaType; import org.zstack.header.image.*; import org.zstack.header.message.*; import org.zstack.header.network.l3.*; import org.zstack.header.storage.primary.*; import org.zstack.header.storage.snapshot.MarkRootVolumeAsSnapshotMsg; import org.zstack.header.storage.snapshot.VolumeSnapshotConstant; import org.zstack.header.vm.*; import org.zstack.header.vm.ChangeVmMetaDataMsg.AtomicHostUuid; import org.zstack.header.vm.ChangeVmMetaDataMsg.AtomicVmState; import org.zstack.header.vm.VmAbnormalLifeCycleStruct.VmAbnormalLifeCycleOperation; import org.zstack.header.vm.VmInstanceConstant.Params; import org.zstack.header.vm.VmInstanceConstant.VmOperation; import org.zstack.header.vm.VmInstanceDeletionPolicyManager.VmInstanceDeletionPolicy; import org.zstack.header.vm.VmInstanceSpec.HostName; import org.zstack.header.vm.VmInstanceSpec.IsoSpec; import org.zstack.header.vm.VmInstanceSpec.CdRomSpec; import org.zstack.header.vm.cdrom.*; import org.zstack.header.volume.*; import org.zstack.identity.Account; import org.zstack.identity.AccountManager; import org.zstack.tag.SystemTagCreator; import org.zstack.tag.SystemTagUtils; import org.zstack.utils.*; import org.zstack.utils.data.SizeUnit; import org.zstack.utils.function.ForEachFunction; import org.zstack.utils.function.Function; import org.zstack.utils.gson.JSONObjectUtil; import org.zstack.utils.logging.CLogger; import javax.persistence.TypedQuery; import java.util.*; import java.util.stream.Collectors; import static org.zstack.core.Platform.operr; import static org.zstack.core.Platform.err; import static java.util.Arrays.asList; import static org.zstack.utils.CollectionDSL.*; public class VmInstanceBase extends AbstractVmInstance { protected static final CLogger logger = Utils.getLogger(VmInstanceBase.class); @Autowired protected CloudBus bus; @Autowired protected DatabaseFacade dbf; @Autowired protected ThreadFacade thdf; @Autowired protected VmInstanceManager vmMgr; @Autowired protected VmInstanceExtensionPointEmitter extEmitter; @Autowired protected VmInstanceNotifyPointEmitter notifyEmitter; @Autowired protected CascadeFacade casf; @Autowired protected AccountManager acntMgr; @Autowired protected EventFacade evtf; @Autowired protected PluginRegistry pluginRgty; @Autowired protected VmInstanceDeletionPolicyManager deletionPolicyMgr; @Autowired private HostAllocatorManager hostAllocatorMgr; protected VmInstanceVO self; protected VmInstanceVO originalCopy; protected String syncThreadName; private void checkState(final String hostUuid, final NoErrorCompletion completion) { CheckVmStateOnHypervisorMsg msg = new CheckVmStateOnHypervisorMsg(); msg.setVmInstanceUuids(list(self.getUuid())); msg.setHostUuid(hostUuid); bus.makeTargetServiceIdByResourceUuid(msg, HostConstant.SERVICE_ID, hostUuid); bus.send(msg, new CloudBusCallBack(completion) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { logger.warn(String.format("unable to check state of the vm[uuid:%s] on the host[uuid:%s], %s;" + "put the VM into the Unknown state", self.getUuid(), hostUuid, reply.getError())); changeVmStateInDb(VmInstanceStateEvent.unknown); completion.done(); return; } CheckVmStateOnHypervisorReply r = reply.castReply(); String state = r.getStates().get(self.getUuid()); if (state == null) { changeVmStateInDb(VmInstanceStateEvent.unknown); completion.done(); return; } if (VmInstanceState.Running.toString().equals(state)) { changeVmStateInDb(VmInstanceStateEvent.running, ()-> self.setHostUuid(hostUuid)); } else if (VmInstanceState.Stopped.toString().equals(state) && self.getState().equals(VmInstanceState.Destroying)) { changeVmStateInDb(VmInstanceStateEvent.destroyed); } else if (VmInstanceState.Stopped.toString().equals(state)) { changeVmStateInDb(VmInstanceStateEvent.stopped); } else if (VmInstanceState.Paused.toString().equals(state)) { changeVmStateInDb(VmInstanceStateEvent.paused); } else { throw new CloudRuntimeException(String.format( "CheckVmStateOnHypervisorMsg should only report states[Running, Paused or Stopped]," + "but it reports %s for the vm[uuid:%s] on the host[uuid:%s]", state, self.getUuid(), hostUuid)); } completion.done(); } }); } protected void destroy(final VmInstanceDeletionPolicy deletionPolicy, Message msg, final Completion completion) { if (deletionPolicy == VmInstanceDeletionPolicy.DBOnly) { completion.success(); return; } if (deletionPolicy == VmInstanceDeletionPolicy.KeepVolume && self.getState().equals(VmInstanceState.Destroyed)) { completion.success(); return; } final VmInstanceInventory inv = VmInstanceInventory.valueOf(self); VmInstanceSpec spec = buildSpecFromInventory(inv, VmOperation.Destroy); if (msg instanceof ReleaseResourceMessage) { spec.setIgnoreResourceReleaseFailure(((ReleaseResourceMessage) msg).isIgnoreResourceReleaseFailure()); } self = changeVmStateInDb(VmInstanceStateEvent.destroying); FlowChain chain = getDestroyVmWorkFlowChain(inv); setFlowMarshaller(chain); chain.setName(String.format("destroy-vm-%s", self.getUuid())); chain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); chain.getData().put(Params.DeletionPolicy, deletionPolicy); chain.done(new FlowDoneHandler(completion) { @Override public void handle(Map data) { completion.success(); } }).error(new FlowErrorHandler(completion) { @Override public void handle(final ErrorCode errCode, Map data) { if (originalCopy.getState() == VmInstanceState.Running) { checkState(originalCopy.getHostUuid(), new NoErrorCompletion(completion) { @Override public void done() { completion.fail(errCode); } }); } else { changeVmStateInDb(VmInstanceStateEvent.unknown); completion.fail(errCode); } } }).start(); } protected VmInstanceVO getSelf() { return self; } protected VmInstanceInventory getSelfInventory() { return VmInstanceInventory.valueOf(self); } public VmInstanceBase(VmInstanceVO vo) { this.self = vo; this.syncThreadName = "Vm-" + vo.getUuid(); this.originalCopy = ObjectUtils.newAndCopy(vo, vo.getClass()); } protected VmInstanceVO refreshVO() { return refreshVO(false); } protected VmInstanceVO refreshVO(boolean noException) { VmInstanceVO vo = self; self = dbf.findByUuid(self.getUuid(), VmInstanceVO.class); if (self == null && noException) { return null; } if (self == null) { throw new OperationFailureException(operr("vm[uuid:%s, name:%s] has been deleted", vo.getUuid(), vo.getName())); } originalCopy = ObjectUtils.newAndCopy(vo, vo.getClass()); return self; } protected FlowChain getCreateVmWorkFlowChain(VmInstanceInventory inv) { return vmMgr.getCreateVmWorkFlowChain(inv); } protected FlowChain getStopVmWorkFlowChain(VmInstanceInventory inv) { return vmMgr.getStopVmWorkFlowChain(inv); } protected FlowChain getRebootVmWorkFlowChain(VmInstanceInventory inv) { return vmMgr.getRebootVmWorkFlowChain(inv); } protected FlowChain getStartVmWorkFlowChain(VmInstanceInventory inv) { return vmMgr.getStartVmWorkFlowChain(inv); } protected FlowChain getDestroyVmWorkFlowChain(VmInstanceInventory inv) { return vmMgr.getDestroyVmWorkFlowChain(inv); } protected FlowChain getExpungeVmWorkFlowChain(VmInstanceInventory inv) { return vmMgr.getExpungeVmWorkFlowChain(inv); } protected FlowChain getMigrateVmWorkFlowChain(VmInstanceInventory inv) { return vmMgr.getMigrateVmWorkFlowChain(inv); } protected FlowChain getAttachUninstantiatedVolumeWorkFlowChain(VmInstanceInventory inv) { return vmMgr.getAttachUninstantiatedVolumeWorkFlowChain(inv); } protected FlowChain getAttachIsoWorkFlowChain(VmInstanceInventory inv) { return vmMgr.getAttachIsoWorkFlowChain(inv); } protected FlowChain getDetachIsoWorkFlowChain(VmInstanceInventory inv) { return vmMgr.getDetachIsoWorkFlowChain(inv); } protected FlowChain getPauseVmWorkFlowChain(VmInstanceInventory inv) { return vmMgr.getPauseWorkFlowChain(inv); } protected FlowChain getResumeVmWorkFlowChain(VmInstanceInventory inv) { return vmMgr.getResumeVmWorkFlowChain(inv); } protected VmInstanceVO changeVmStateInDb(VmInstanceStateEvent stateEvent) { return changeVmStateInDb(stateEvent, null); } protected VmInstanceVO changeVmStateInDb(VmInstanceStateEvent stateEvent, Runnable runnable) { VmInstanceState bs = self.getState(); final VmInstanceState state = self.getState().nextState(stateEvent); SQLBatch sql = new SQLBatch(){ @Override protected void scripts() { self = findByUuid(self.getUuid(), self.getClass()); if (runnable != null) { runnable.run(); } if (state == VmInstanceState.Stopped) { // cleanup the hostUuid if the VM is stopped if (self.getHostUuid() != null) { self.setLastHostUuid(self.getHostUuid()); } self.setHostUuid(null); } self.setState(state); self = merge(self); } }; try { sql.execute(); } catch (DataIntegrityViolationException e){ sql.execute(); } if (bs != state) { logger.debug(String.format("vm[uuid:%s] changed state from %s to %s in db", self.getUuid(), bs, state)); VmCanonicalEvents.VmStateChangedData data = new VmCanonicalEvents.VmStateChangedData(); data.setVmUuid(self.getUuid()); data.setOldState(bs.toString()); data.setNewState(state.toString()); data.setInventory(getSelfInventory()); evtf.fire(VmCanonicalEvents.VM_FULL_STATE_CHANGED_PATH, data); VmInstanceInventory inv = getSelfInventory(); CollectionUtils.safeForEach(pluginRgty.getExtensionList(VmStateChangedExtensionPoint.class), new ForEachFunction<VmStateChangedExtensionPoint>() { @Override public void run(VmStateChangedExtensionPoint ext) { ext.vmStateChanged(inv, bs, self.getState()); } }); //TODO: remove this notifyEmitter.notifyVmStateChange(VmInstanceInventory.valueOf(self), bs, state); } return self; } @Override @MessageSafe public void handleMessage(final Message msg) { if (msg instanceof APIMessage) { handleApiMessage((APIMessage) msg); } else { handleLocalMessage(msg); } } protected void handleLocalMessage(Message msg) { if (msg instanceof InstantiateNewCreatedVmInstanceMsg) { handle((InstantiateNewCreatedVmInstanceMsg) msg); } else if (msg instanceof StartVmInstanceMsg) { handle((StartVmInstanceMsg) msg); } else if (msg instanceof StopVmInstanceMsg) { handle((StopVmInstanceMsg) msg); } else if (msg instanceof RebootVmInstanceMsg) { handle((RebootVmInstanceMsg) msg); } else if (msg instanceof ChangeVmStateMsg) { handle((ChangeVmStateMsg) msg); } else if (msg instanceof DestroyVmInstanceMsg) { handle((DestroyVmInstanceMsg) msg); } else if (msg instanceof AttachNicToVmMsg) { handle((AttachNicToVmMsg) msg); } else if (msg instanceof CreateTemplateFromVmRootVolumeMsg) { handle((CreateTemplateFromVmRootVolumeMsg) msg); } else if (msg instanceof VmInstanceDeletionMsg) { handle((VmInstanceDeletionMsg) msg); } else if (msg instanceof VmAttachNicMsg) { handle((VmAttachNicMsg) msg); } else if (msg instanceof MigrateVmMsg) { handle((MigrateVmMsg) msg); } else if (msg instanceof DetachDataVolumeFromVmMsg) { handle((DetachDataVolumeFromVmMsg) msg); } else if (msg instanceof AttachDataVolumeToVmMsg) { handle((AttachDataVolumeToVmMsg) msg); } else if (msg instanceof GetVmMigrationTargetHostMsg) { handle((GetVmMigrationTargetHostMsg) msg); } else if (msg instanceof ChangeVmMetaDataMsg) { handle((ChangeVmMetaDataMsg) msg); } else if (msg instanceof LockVmInstanceMsg) { handle((LockVmInstanceMsg) msg); } else if (msg instanceof DetachNicFromVmMsg) { handle((DetachNicFromVmMsg) msg); } else if (msg instanceof VmStateChangedOnHostMsg) { handle((VmStateChangedOnHostMsg) msg); } else if (msg instanceof VmCheckOwnStateMsg) { handle((VmCheckOwnStateMsg) msg); } else if (msg instanceof ExpungeVmMsg) { handle((ExpungeVmMsg) msg); } else if (msg instanceof HaStartVmInstanceMsg) { handle((HaStartVmInstanceMsg) msg); } else if (msg instanceof OverlayMessage) { handle((OverlayMessage) msg); } else if (msg instanceof ReimageVmInstanceMsg) { handle((ReimageVmInstanceMsg) msg); } else if (msg instanceof GetVmStartingCandidateClustersHostsMsg) { handle((GetVmStartingCandidateClustersHostsMsg) msg); } else if (msg instanceof MigrateVmInnerMsg) { handle((MigrateVmInnerMsg) msg); } else if (msg instanceof AddL3NetworkToVmNicMsg) { handle((AddL3NetworkToVmNicMsg) msg); } else if (msg instanceof DeleteL3NetworkFromVmNicMsg) { handle((DeleteL3NetworkFromVmNicMsg) msg); } else if (msg instanceof DetachIsoFromVmInstanceMsg) { handle((DetachIsoFromVmInstanceMsg) msg); } else if (msg instanceof DeleteVmCdRomMsg) { handle((DeleteVmCdRomMsg) msg); } else if (msg instanceof CreateVmCdRomMsg) { handle((CreateVmCdRomMsg) msg); } else { VmInstanceBaseExtensionFactory ext = vmMgr.getVmInstanceBaseExtensionFactory(msg); if (ext != null) { VmInstance v = ext.getVmInstance(self); v.handleMessage(msg); } else { bus.dealWithUnknownMessage(msg); } } } private void handle(CreateVmCdRomMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { CreateVmCdRomReply reply = new CreateVmCdRomReply(); doCreateVmCdRom(msg, new ReturnValueCompletion<VmCdRomInventory>(msg) { @Override public void success(VmCdRomInventory inv) { reply.setInventory(inv); bus.reply(msg, reply); chain.next(); } @Override public void fail(ErrorCode errorCode) { reply.setError(errorCode); bus.reply(msg, reply); chain.next(); } }); } @Override public String getName() { return String.format("create-vm-%s-cd-rom", msg.getVmInstanceUuid()); } }); } private void handle(MigrateVmInnerMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getName() { return String.format("migrate-vm-%s", self.getUuid()); } @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { migrateVm(msg, new Completion(chain) { @Override public void success() { MigrateVmInnerReply evt = new MigrateVmInnerReply(); evt.setInventory(VmInstanceInventory.valueOf(self)); bus.reply(msg, evt); chain.next(); } @Override public void fail(ErrorCode errorCode) { MigrateVmInnerReply evt = new MigrateVmInnerReply(); evt.setError(errorCode); bus.reply(msg, evt); chain.next(); } }); } }); } private void handle(final APIGetVmStartingCandidateClustersHostsMsg msg) { APIGetVmStartingCandidateClustersHostsReply reply = new APIGetVmStartingCandidateClustersHostsReply(); final GetVmStartingCandidateClustersHostsMsg gmsg = new GetVmStartingCandidateClustersHostsMsg(); gmsg.setUuid(msg.getUuid()); bus.makeLocalServiceId(gmsg, VmInstanceConstant.SERVICE_ID); bus.send(gmsg, new CloudBusCallBack(msg) { @Override public void run(MessageReply re) { GetVmStartingCandidateClustersHostsReply greply = (GetVmStartingCandidateClustersHostsReply) re; if (!re.isSuccess()) { reply.setSuccess(false); reply.setError(re.getError()); if (greply.getHostInventories() != null) { reply.setHostInventories(greply.getHostInventories()); reply.setClusterInventories(greply.getClusterInventories()); } } else { reply.setHostInventories(greply.getHostInventories()); reply.setClusterInventories(greply.getClusterInventories()); } bus.reply(msg, reply); } }); } private void handle(final GetVmStartingCandidateClustersHostsMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { GetVmStartingCandidateClustersHostsReply reply = new GetVmStartingCandidateClustersHostsReply(); getStartingCandidateHosts(msg, new ReturnValueCompletion<AllocateHostDryRunReply>(chain) { @Override public void success(AllocateHostDryRunReply returnValue) { List<HostInventory> hosts = ((AllocateHostDryRunReply) returnValue).getHosts(); if (!hosts.isEmpty()) { List<String> cuuids = CollectionUtils.transformToList(hosts, new Function<String, HostInventory>() { @Override public String call(HostInventory arg) { return arg.getClusterUuid(); } }); SimpleQuery<ClusterVO> cq = dbf.createQuery(ClusterVO.class); cq.add(ClusterVO_.uuid, Op.IN, cuuids); List<ClusterVO> cvos = cq.list(); reply.setClusterInventories(ClusterInventory.valueOf(cvos)); reply.setHostInventories(hosts); } else { reply.setHostInventories(hosts); reply.setClusterInventories(new ArrayList<>()); } bus.reply(msg, reply); chain.next(); } @Override public void fail(ErrorCode errorCode) { if (HostAllocatorError.NO_AVAILABLE_HOST.toString().equals(errorCode.getCode())) { reply.setHostInventories(new ArrayList<>()); reply.setClusterInventories(new ArrayList<>()); } else { reply.setError(errorCode); } reply.setSuccess(false); bus.reply(msg, reply); chain.next(); } }); } @Override public String getName() { return "get-starting-candidate-hosts"; } }); } private void getStartingCandidateHosts(final NeedReplyMessage msg, final ReturnValueCompletion completion) { refreshVO(); ErrorCode err = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (err != null) { throw new OperationFailureException(err); } final DesignatedAllocateHostMsg amsg = new DesignatedAllocateHostMsg(); amsg.setCpuCapacity(self.getCpuNum()); amsg.setMemoryCapacity(self.getMemorySize()); amsg.setVmInstance(VmInstanceInventory.valueOf(self)); amsg.setServiceId(bus.makeLocalServiceId(HostAllocatorConstant.SERVICE_ID)); amsg.setAllocatorStrategy(self.getAllocatorStrategy()); amsg.setVmOperation(VmOperation.Start.toString()); if (self.getImageUuid() != null && dbf.findByUuid(self.getImageUuid(), ImageVO.class) != null) { amsg.setImage(ImageInventory.valueOf(dbf.findByUuid(self.getImageUuid(), ImageVO.class))); } amsg.setL3NetworkUuids(VmNicHelper.getL3Uuids(VmNicInventory.valueOf(self.getVmNics()))); amsg.setDryRun(true); amsg.setListAllHosts(true); bus.send(amsg, new CloudBusCallBack(completion) { @Override public void run(MessageReply re) { if (!re.isSuccess()) { completion.fail(re.getError()); } else { completion.success(re); } } }); } private void handle(final HaStartVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { refreshVO(); HaStartVmJudger judger; try { Class clz = Class.forName(msg.getJudgerClassName()); judger = (HaStartVmJudger) clz.newInstance(); } catch (Exception e) { throw new CloudRuntimeException(e); } final HaStartVmInstanceReply reply = new HaStartVmInstanceReply(); if (!judger.whetherStartVm(getSelfInventory())) { bus.reply(msg, reply); chain.next(); return; } logger.debug(String.format("HaStartVmJudger[%s] says the VM[uuid:%s, name:%s] is qualified for HA start, now we are starting it", judger.getClass(), self.getUuid(), self.getName())); SQL.New(VmInstanceVO.class).eq(VmInstanceVO_.uuid, self.getUuid()) .set(VmInstanceVO_.state, VmInstanceState.Stopped) .update(); startVm(msg, new Completion(msg, chain) { @Override public void success() { reply.setInventory(getSelfInventory()); bus.reply(msg, reply); chain.next(); } @Override public void fail(ErrorCode errorCode) { reply.setError(errorCode); bus.reply(msg, reply); chain.next(); } }); } @Override public String getName() { return "ha-start-vm"; } }); } private void changeVmIp(final String l3Uuid, final String ip, final Completion completion) { final VmNicVO targetNic = CollectionUtils.find(self.getVmNics(), new Function<VmNicVO, VmNicVO>() { @Override public VmNicVO call(VmNicVO arg) { for (UsedIpVO ip : arg.getUsedIps()) { if (ip.getL3NetworkUuid().equals(l3Uuid)) { return arg; } } return null; } }); if (targetNic == null) { throw new OperationFailureException(operr("the vm[uuid:%s] has no nic on the L3 network[uuid:%s]", self.getUuid(), l3Uuid)); } if (ip.equals(targetNic.getIp())) { completion.success(); return; } final UsedIpInventory oldIp = new UsedIpInventory(); for (UsedIpVO ipvo : targetNic.getUsedIps()) { if (ipvo.getL3NetworkUuid().equals(l3Uuid)) { oldIp.setIp(ipvo.getIp()); oldIp.setGateway(ipvo.getGateway()); oldIp.setNetmask(ipvo.getNetmask()); oldIp.setL3NetworkUuid(ipvo.getL3NetworkUuid()); oldIp.setUuid(ipvo.getUuid()); } } final FlowChain chain = FlowChainBuilder.newShareFlowChain(); chain.setName(String.format("change-vm-ip-to-%s-l3-%s-vm-%s", ip, l3Uuid, self.getUuid())); chain.then(new ShareFlow() { UsedIpInventory newIp; String oldIpUuid = oldIp.getUuid(); @Override public void setup() { flow(new Flow() { String __name__ = "acquire-new-ip"; @Override public void run(final FlowTrigger trigger, Map data) { AllocateIpMsg amsg = new AllocateIpMsg(); amsg.setL3NetworkUuid(l3Uuid); amsg.setRequiredIp(ip); bus.makeTargetServiceIdByResourceUuid(amsg, L3NetworkConstant.SERVICE_ID, l3Uuid); bus.send(amsg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { trigger.fail(reply.getError()); } else { AllocateIpReply r = reply.castReply(); newIp = r.getIpInventory(); trigger.next(); } } }); } @Override public void rollback(FlowRollback trigger, Map data) { if (newIp != null) { ReturnIpMsg rmsg = new ReturnIpMsg(); rmsg.setL3NetworkUuid(newIp.getL3NetworkUuid()); rmsg.setUsedIpUuid(newIp.getUuid()); bus.makeTargetServiceIdByResourceUuid(rmsg, L3NetworkConstant.SERVICE_ID, newIp.getL3NetworkUuid()); bus.send(rmsg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { trigger.rollback(); } }); } else { trigger.rollback(); } } }); flow(new NoRollbackFlow() { String __name__ = "change-ip-in-database"; @Override public void run(FlowTrigger trigger, Map data) { /* for multiple IP address, change nic.ip ONLY when set static ip of of default IP */ for (VmNicExtensionPoint ext : pluginRgty.getExtensionList(VmNicExtensionPoint.class)) { ext.afterAddIpAddress(targetNic.getUuid(), newIp.getUuid()); } trigger.next(); } }); flow(new NoRollbackFlow() { String __name__ = "return-old-ip"; @Override public void run(FlowTrigger trigger, Map data) { ReturnIpMsg rmsg = new ReturnIpMsg(); rmsg.setUsedIpUuid(oldIpUuid); rmsg.setL3NetworkUuid(oldIp.getL3NetworkUuid()); bus.makeTargetServiceIdByResourceUuid(rmsg, L3NetworkConstant.SERVICE_ID, oldIp.getL3NetworkUuid()); bus.send(rmsg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { for (VmNicExtensionPoint ext : pluginRgty.getExtensionList(VmNicExtensionPoint.class)) { ext.afterDelIpAddress(targetNic.getUuid(), oldIpUuid); } trigger.next(); } }); } }); done(new FlowDoneHandler(completion) { @Override public void handle(Map data) { final VmInstanceInventory vm = getSelfInventory(); final VmNicInventory nic = VmNicInventory.valueOf(targetNic); CollectionUtils.safeForEach(pluginRgty.getExtensionList(VmIpChangedExtensionPoint.class), new ForEachFunction<VmIpChangedExtensionPoint>() { @Override public void run(VmIpChangedExtensionPoint ext) { ext.vmIpChanged(vm, nic, oldIp, newIp); } }); completion.success(); } }); error(new FlowErrorHandler(completion) { @Override public void handle(ErrorCode errCode, Map data) { completion.fail(errCode); } }); } }).start(); } private void handle(final ExpungeVmMsg msg) { final ExpungeVmReply reply = new ExpungeVmReply(); thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { expunge(msg, new Completion(msg, chain) { @Override public void success() { bus.reply(msg, reply); chain.next(); } @Override public void fail(ErrorCode errorCode) { reply.setError(errorCode); bus.reply(msg, reply); chain.next(); } }); } @Override public String getName() { return "expunge-vm"; } }); } private void expunge(Message msg, final Completion completion) { refreshVO(); final VmInstanceInventory inv = getSelfInventory(); CollectionUtils.safeForEach(pluginRgty.getExtensionList(VmBeforeExpungeExtensionPoint.class), arg -> arg.vmBeforeExpunge(inv)); ErrorCode error = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (error != null) { throw new OperationFailureException(error); } if (inv.getAllVolumes().size() > 1) { throw new CloudRuntimeException(String.format("why the deleted vm[uuid:%s] has data volumes??? %s", self.getUuid(), JSONObjectUtil.toJsonString(inv.getAllVolumes()))); } VmInstanceSpec spec = buildSpecFromInventory(inv, VmOperation.Expunge); FlowChain chain = getExpungeVmWorkFlowChain(inv); setFlowMarshaller(chain); chain.setName(String.format("expunge-vm-%s", self.getUuid())); chain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); chain.getData().put(Params.DeletionPolicy, VmInstanceDeletionPolicy.Direct); chain.done(new FlowDoneHandler(completion) { @Override public void handle(Map data) { CollectionUtils.safeForEach(pluginRgty.getExtensionList(VmAfterExpungeExtensionPoint.class), arg -> arg.vmAfterExpunge(inv)); callVmJustBeforeDeleteFromDbExtensionPoint(); dbf.reload(self); dbf.removeCollection(self.getVmNics(), VmNicVO.class); dbf.removeCollection(self.getVmCdRoms(), VmCdRomVO.class); dbf.remove(self); logger.debug(String.format("successfully expunged the vm[uuid:%s]", self.getUuid())); dbf.eoCleanup(VmInstanceVO.class, Collections.singletonList(self.getUuid())); completion.success(); } }).error(new FlowErrorHandler(completion) { @Override public void handle(final ErrorCode errCode, Map data) { completion.fail(errCode); } }).start(); } private void handle(final VmCheckOwnStateMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { refreshVO(); final VmCheckOwnStateReply reply = new VmCheckOwnStateReply(); if (self.getHostUuid() == null) { // no way to check bus.reply(msg, reply); chain.next(); return; } final CheckVmStateOnHypervisorMsg cmsg = new CheckVmStateOnHypervisorMsg(); cmsg.setVmInstanceUuids(list(self.getUuid())); cmsg.setHostUuid(self.getHostUuid()); bus.makeTargetServiceIdByResourceUuid(cmsg, HostConstant.SERVICE_ID, self.getHostUuid()); bus.send(cmsg, new CloudBusCallBack(msg, chain) { @Override public void run(MessageReply r) { if (!r.isSuccess()) { reply.setError(r.getError()); bus.reply(msg, r); chain.next(); return; } CheckVmStateOnHypervisorReply cr = r.castReply(); String s = cr.getStates().get(self.getUuid()); VmInstanceState state = VmInstanceState.valueOf(s); if (state != self.getState()) { VmStateChangedOnHostMsg vcmsg = new VmStateChangedOnHostMsg(); vcmsg.setHostUuid(self.getHostUuid()); vcmsg.setVmInstanceUuid(self.getUuid()); vcmsg.setStateOnHost(state); bus.makeTargetServiceIdByResourceUuid(vcmsg, VmInstanceConstant.SERVICE_ID, self.getUuid()); bus.send(vcmsg); } bus.reply(msg, reply); chain.next(); } }); } @Override public String getName() { return "check-state"; } }); } private void handle(final VmStateChangedOnHostMsg msg) { logger.debug(String.format("get VmStateChangedOnHostMsg for vm[uuid:%s], on host[uuid:%s], which tracing state is [%s]" + " and current state on host is [%s]", msg.getVmInstanceUuid(), msg.getHostUuid(), msg.getVmStateAtTracingMoment(), msg.getStateOnHost())); thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { if (msg.isFromSync()) { return syncThreadName; } else { return String.format("change-vm-state-%s", syncThreadName); } } @Override public void run(final SyncTaskChain chain) { logger.debug(String.format("running sync task %s with sync signature %s", getName(), getSyncSignature())); vmStateChangeOnHost(msg, new NoErrorCompletion(chain) { @Override public void done() { chain.next(); } }); } @Override public String getName() { return String.format("vm-%s-state-change-on-the-host-%s", self.getUuid(), msg.getHostUuid()); } }); } private VmAbnormalLifeCycleOperation getVmAbnormalLifeCycleOperation(String originalHostUuid, String currentHostUuid, VmInstanceState originalState, VmInstanceState currentState) { if (originalState == VmInstanceState.Stopped && currentState == VmInstanceState.Running) { return VmAbnormalLifeCycleOperation.VmRunningOnTheHost; } if (originalState == VmInstanceState.Running && currentState == VmInstanceState.Stopped && currentHostUuid.equals(originalHostUuid)) { return VmAbnormalLifeCycleOperation.VmStoppedOnTheSameHost; } if (VmInstanceState.intermediateStates.contains(originalState) && currentState == VmInstanceState.Running) { return VmAbnormalLifeCycleOperation.VmRunningFromIntermediateState; } if (VmInstanceState.intermediateStates.contains(originalState) && currentState == VmInstanceState.Stopped) { return VmAbnormalLifeCycleOperation.VmStoppedFromIntermediateState; } if (originalState == VmInstanceState.Running && currentState == VmInstanceState.Paused && currentHostUuid.equals(originalHostUuid)) { return VmAbnormalLifeCycleOperation.VmPausedFromRunningStateHostNotChanged; } if (originalState == VmInstanceState.Unknown && currentState == VmInstanceState.Paused && currentHostUuid.equals(originalHostUuid)) { return VmAbnormalLifeCycleOperation.VmPausedFromUnknownStateHostNotChanged; } if (originalState == VmInstanceState.Unknown && currentState == VmInstanceState.Running && currentHostUuid.equals(originalHostUuid)) { return VmAbnormalLifeCycleOperation.VmRunningFromUnknownStateHostNotChanged; } if (originalState == VmInstanceState.Unknown && currentState == VmInstanceState.Running && !currentHostUuid.equals(originalHostUuid)) { return VmAbnormalLifeCycleOperation.VmRunningFromUnknownStateHostChanged; } if (originalState == VmInstanceState.Unknown && currentState == VmInstanceState.Stopped && currentHostUuid.equals(originalHostUuid)) { return VmAbnormalLifeCycleOperation.VmStoppedOnTheSameHost; } if (originalState == VmInstanceState.Unknown && currentState == VmInstanceState.Stopped && originalHostUuid == null && currentHostUuid.equals(self.getLastHostUuid())) { return VmAbnormalLifeCycleOperation.VmStoppedFromUnknownStateHostNotChanged; } if (originalState == VmInstanceState.Running && originalState == currentState && !currentHostUuid.equals(originalHostUuid)) { return VmAbnormalLifeCycleOperation.VmMigrateToAnotherHost; } if (originalState == VmInstanceState.Paused && currentState == VmInstanceState.Running && currentHostUuid.equals(originalHostUuid)) { return VmAbnormalLifeCycleOperation.VmRunningFromPausedStateHostNotChanged; } if (originalState == VmInstanceState.Paused && currentState == VmInstanceState.Stopped && currentHostUuid.equals(originalHostUuid)) { return VmAbnormalLifeCycleOperation.VmStoppedFromPausedStateHostNotChanged; } throw new CloudRuntimeException(String.format("unknown VM[uuid:%s] abnormal state combination[original state: %s," + " current state: %s, original host:%s, current host:%s]", self.getUuid(), originalState, currentState, originalHostUuid, currentHostUuid)); } private void vmStateChangeOnHost(final VmStateChangedOnHostMsg msg, final NoErrorCompletion completion) { final VmStateChangedOnHostReply reply = new VmStateChangedOnHostReply(); if (refreshVO(true) == null) { // the vm has been deleted reply.setError(operr("the vm has been deleted")); bus.reply(msg, reply); completion.done(); return; } if (msg.getVmStateAtTracingMoment() != null) { // the vm tracer periodically reports vms's state. It catches an old state // before an vm operation(start, stop, reboot, migrate) completes. Ignore this VmInstanceState expected = VmInstanceState.valueOf(msg.getVmStateAtTracingMoment()); if (expected != self.getState()) { bus.reply(msg, reply); completion.done(); return; } } final String originalHostUuid = self.getHostUuid(); final String currentHostUuid = msg.getHostUuid(); final VmInstanceState originalState = self.getState(); final VmInstanceState currentState = VmInstanceState.valueOf(msg.getStateOnHost()); if (originalState == currentState && currentHostUuid.equals(originalHostUuid)) { logger.debug(String.format("vm[uuid:%s]'s state[%s] is inline with its state on the host[uuid:%s], ignore VmStateChangeOnHostMsg", self.getUuid(), originalState, originalHostUuid)); bus.reply(msg, reply); completion.done(); return; } if (originalState == VmInstanceState.Stopped && currentState == VmInstanceState.Unknown) { bus.reply(msg, reply); completion.done(); return; } final Runnable fireEvent = () -> { VmTracerCanonicalEvents.VmStateChangedOnHostData data = new VmTracerCanonicalEvents.VmStateChangedOnHostData(); data.setVmUuid(self.getUuid()); data.setFrom(originalState); data.setTo(self.getState()); data.setOriginalHostUuid(originalHostUuid); data.setCurrentHostUuid(self.getHostUuid()); evtf.fire(VmTracerCanonicalEvents.VM_STATE_CHANGED_PATH, data); }; if (currentState == VmInstanceState.Unknown) { changeVmStateInDb(VmInstanceStateEvent.unknown); fireEvent.run(); bus.reply(msg, reply); completion.done(); return; } VmAbnormalLifeCycleOperation operation = getVmAbnormalLifeCycleOperation(originalHostUuid, currentHostUuid, originalState, currentState); if (operation == VmAbnormalLifeCycleOperation.VmRunningFromUnknownStateHostNotChanged) { // the vm is detected on the host again. It's largely because the host disconnected before // and now reconnected changeVmStateInDb(VmInstanceStateEvent.running, ()-> self.setHostUuid(msg.getHostUuid())); fireEvent.run(); bus.reply(msg, reply); completion.done(); return; } else if (operation == VmAbnormalLifeCycleOperation.VmStoppedFromUnknownStateHostNotChanged) { // the vm comes out of the unknown state to the stopped state // it happens when an operation failure led the vm from the stopped state to the unknown state, // and later on the vm was detected as stopped on the host again changeVmStateInDb(VmInstanceStateEvent.stopped, ()-> self.setHostUuid(null)); fireEvent.run(); bus.reply(msg, reply); completion.done(); return; } else if (operation == VmAbnormalLifeCycleOperation.VmStoppedFromPausedStateHostNotChanged) { changeVmStateInDb(VmInstanceStateEvent.stopped, ()-> self.setHostUuid(msg.getHostUuid())); fireEvent.run(); bus.reply(msg, reply); completion.done(); return; } else if (operation == VmAbnormalLifeCycleOperation.VmPausedFromUnknownStateHostNotChanged) { //some reason led vm to unknown state and the paused vm are detected on the host again changeVmStateInDb(VmInstanceStateEvent.paused, ()-> self.setHostUuid(msg.getHostUuid())); fireEvent.run(); bus.reply(msg, reply); completion.done(); return; } else if (operation == VmAbnormalLifeCycleOperation.VmPausedFromRunningStateHostNotChanged) { // just synchronize database changeVmStateInDb(VmInstanceStateEvent.paused, ()->self.setHostUuid(msg.getHostUuid())); fireEvent.run(); bus.reply(msg, reply); completion.done(); return; } else if (operation == VmAbnormalLifeCycleOperation.VmRunningFromPausedStateHostNotChanged) { // just synchronize database changeVmStateInDb(VmInstanceStateEvent.running, ()->self.setHostUuid(msg.getHostUuid())); fireEvent.run(); bus.reply(msg, reply); completion.done(); return; } List<VmAbnormalLifeCycleExtensionPoint> exts = pluginRgty.getExtensionList(VmAbnormalLifeCycleExtensionPoint.class); VmAbnormalLifeCycleStruct struct = new VmAbnormalLifeCycleStruct(); struct.setCurrentHostUuid(currentHostUuid); struct.setCurrentState(currentState); struct.setOriginalHostUuid(originalHostUuid); struct.setOriginalState(originalState); struct.setVmInstance(getSelfInventory()); struct.setOperation(operation); logger.debug(String.format("the vm[uuid:%s]'s state changed abnormally on the host[uuid:%s]," + " ZStack is going to take the operation[%s]," + "[original state: %s, current state: %s, original host: %s, current host:%s]", self.getUuid(), currentHostUuid, operation, originalState, currentState, originalHostUuid, currentHostUuid)); FlowChain chain = FlowChainBuilder.newSimpleFlowChain(); chain.setName(String.format("handle-abnormal-lifecycle-of-vm-%s", self.getUuid())); chain.getData().put(Params.AbnormalLifeCycleStruct, struct); chain.allowEmptyFlow(); for (VmAbnormalLifeCycleExtensionPoint ext : exts) { Flow flow = ext.createVmAbnormalLifeCycleHandlingFlow(struct); chain.then(flow); } chain.done(new FlowDoneHandler(completion) { @Override public void handle(Map data) { if (currentState == VmInstanceState.Running) { changeVmStateInDb(VmInstanceStateEvent.running, ()-> self.setHostUuid(currentHostUuid)); } else if (currentState == VmInstanceState.Stopped) { changeVmStateInDb(VmInstanceStateEvent.stopped); } fireEvent.run(); bus.reply(msg, reply); completion.done(); } }).error(new FlowErrorHandler(completion) { @Override public void handle(ErrorCode errCode, Map data) { logger.warn(String.format("failed to handle abnormal lifecycle of the vm[uuid:%s, original state: %s, current state:%s," + "original host: %s, current host: %s], %s", self.getUuid(), originalState, currentState, originalHostUuid, currentHostUuid, errCode)); reply.setError(errCode); bus.reply(msg, reply); completion.done(); } }).start(); } private List<String> buildUserdata() { return new UserdataBuilder().buildByVmUuid(self.getUuid()); } private void handle(final DetachNicFromVmMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { final DetachNicFromVmReply reply = new DetachNicFromVmReply(); refreshVO(); if (self.getState() == VmInstanceState.Destroyed) { // the cascade framework may send this message when // the vm has been destroyed VmNicVO nic = CollectionUtils.find(self.getVmNics(), new Function<VmNicVO, VmNicVO>() { @Override public VmNicVO call(VmNicVO arg) { return msg.getVmNicUuid().equals(arg.getUuid()) ? arg : null; } }); if (nic != null) { dbf.remove(nic); } bus.reply(msg, reply); chain.next(); return; } final ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (allowed != null) { reply.setError(allowed); bus.reply(msg, reply); chain.next(); return; } FlowChain fchain = FlowChainBuilder.newSimpleFlowChain(); fchain.setName(String.format("l3-network-detach-from-vm-%s", msg.getVmInstanceUuid())); fchain.then(new NoRollbackFlow() { String __name__ = "before-detach-nic"; @Override public void run(FlowTrigger trigger, Map data) { VmNicInventory nic = VmNicInventory.valueOf((VmNicVO) Q.New(VmNicVO.class).eq(VmNicVO_.uuid, msg.getVmNicUuid()).find()); beforeDetachNic(nic, new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }).then(new NoRollbackFlow() { String __name__ = "detach-nic"; @Override public void run(FlowTrigger trigger, Map data) { detachNic(msg.getVmNicUuid(), true, new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }).done(new FlowDoneHandler(msg) { @Override public void handle(Map data) { self = dbf.reload(self); bus.reply(msg, reply); chain.next(); } }).error(new FlowErrorHandler(msg) { @Override public void handle(ErrorCode errCode, Map data) { reply.setError(errCode); bus.reply(msg, reply); chain.next(); } }).start(); } @Override public String getName() { return "nic-detach"; } }); } private void handle(final AddL3NetworkToVmNicMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { final AddL3NetworkToVmNicReply reply = new AddL3NetworkToVmNicReply(); refreshVO(); final ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (allowed != null) { reply.setError(allowed); bus.reply(msg, reply); chain.next(); return; } VmNicVO vmNicVO = dbf.findByUuid(msg.getVmNicUuid(), VmNicVO.class); final VmInstanceSpec spec = buildSpecFromInventory(getSelfInventory(), VmOperation.AttachNic); spec.setDestNics(list(VmNicInventory.valueOf(vmNicVO))); L3NetworkVO l3Vo = dbf.findByUuid(msg.getNewL3Uuid(), L3NetworkVO.class); spec.setL3Networks(list(new VmNicSpec(L3NetworkInventory.valueOf(l3Vo)))); FlowChain fchain = FlowChainBuilder.newSimpleFlowChain(); fchain.setName(String.format("update-vmNic-%s-to-backend", msg.getVmInstanceUuid())); fchain.getData().put(Params.VmInstanceSpec.toString(), spec); fchain.then(new VmInstantiateResourceOnAttachingNicFlow()); fchain.then(new VmUpdateNicOnHypervisorFlow()); fchain.done(new FlowDoneHandler(msg) { @Override public void handle(Map data) { self = dbf.reload(self); bus.reply(msg, reply); chain.next(); } }).error(new FlowErrorHandler(msg) { @Override public void handle(ErrorCode errCode, Map data) { reply.setError(errCode); bus.reply(msg, reply); chain.next(); } }).start(); } @Override public String getName() { return "update-vmNic-to-backend"; } }); } private void handle(final DeleteL3NetworkFromVmNicMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { final DeleteL3NetworkFromVmNicReply reply = new DeleteL3NetworkFromVmNicReply(); refreshVO(); final ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (allowed != null) { reply.setError(allowed); bus.reply(msg, reply); chain.next(); return; } VmNicVO vmNicVO = dbf.findByUuid(msg.getVmNicUuid(), VmNicVO.class); final VmInstanceSpec spec = buildSpecFromInventory(getSelfInventory(), VmOperation.AttachNic); spec.setDestNics(list(VmNicInventory.valueOf(vmNicVO))); L3NetworkVO l3Vo = dbf.findByUuid(msg.getNewL3Uuid(), L3NetworkVO.class); spec.setL3Networks(list(new VmNicSpec(L3NetworkInventory.valueOf(l3Vo)))); FlowChain fchain = FlowChainBuilder.newSimpleFlowChain(); fchain.setName(String.format("update-vmNic-%s-to-backend", msg.getVmInstanceUuid())); fchain.getData().put(Params.VmInstanceSpec.toString(), spec); fchain.then(new VmReleaseResourceOnDetachingNicFlow()); fchain.done(new FlowDoneHandler(msg) { @Override public void handle(Map data) { self = dbf.reload(self); bus.reply(msg, reply); chain.next(); } }).error(new FlowErrorHandler(msg) { @Override public void handle(ErrorCode errCode, Map data) { reply.setError(errCode); bus.reply(msg, reply); chain.next(); } }).start(); } @Override public String getName() { return "update-vmNic-to-backend"; } }); } private void handle(final LockVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { logger.debug(String.format("locked vm[uuid:%s] for %s", self.getUuid(), msg.getReason())); evtf.on(LockResourceMessage.UNLOCK_CANONICAL_EVENT_PATH, new AutoOffEventCallback() { @Override public boolean run(Map tokens, Object data) { if (msg.getUnlockKey().equals(data)) { logger.debug(String.format("unlocked vm[uuid:%s] that was locked by %s", self.getUuid(), msg.getReason())); chain.next(); return true; } return false; } }); LockVmInstanceReply reply = new LockVmInstanceReply(); bus.reply(msg, reply); } @Override public String getName() { return String.format("lock-vm-%s", self.getUuid()); } }); } private void handle(final ChangeVmMetaDataMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { changeMetaData(msg); chain.next(); } @Override public String getName() { return String.format("change-meta-data-of-vm-%s", self.getUuid()); } }); } private void changeMetaData(ChangeVmMetaDataMsg msg) { ChangeVmMetaDataReply reply = new ChangeVmMetaDataReply(); refreshVO(); if (self == null) { bus.reply(msg, reply); return; } AtomicVmState s = msg.getState(); AtomicHostUuid h = msg.getHostUuid(); if (msg.isNeedHostAndStateBothMatch()) { if (s != null && h != null && s.getExpected() == self.getState()) { if ((h.getExpected() == null && self.getHostUuid() == null) || (h.getExpected() != null && h.getExpected().equals(self.getHostUuid()))) { changeVmStateInDb(s.getValue().getDrivenEvent(), ()-> { self.setHostUuid(h.getValue()); }); reply.setChangeStateDone(true); reply.setChangeHostUuidDone(true); } } } else { if (s != null && s.getExpected() == self.getState()) { changeVmStateInDb(s.getValue().getDrivenEvent()); reply.setChangeStateDone(true); } if (h != null) { if ((h.getExpected() == null && self.getHostUuid() == null) || (h.getExpected() != null && h.getExpected().equals(self.getHostUuid()))) { self.setHostUuid(h.getValue()); dbf.update(self); reply.setChangeHostUuidDone(true); } } } bus.reply(msg, reply); } private void getVmMigrationTargetHost(Message msg, final ReturnValueCompletion<List<HostInventory>> completion) { refreshVO(); ErrorCode allowed = validateOperationByState(msg, self.getState(), VmErrors.MIGRATE_ERROR); if (allowed != null) { completion.fail(allowed); return; } final DesignatedAllocateHostMsg amsg = new DesignatedAllocateHostMsg(); amsg.setCpuCapacity(self.getCpuNum()); amsg.setMemoryCapacity(self.getMemorySize()); amsg.getAvoidHostUuids().add(self.getHostUuid()); if (msg instanceof GetVmMigrationTargetHostMsg) { GetVmMigrationTargetHostMsg gmsg = (GetVmMigrationTargetHostMsg) msg; if (gmsg.getAvoidHostUuids() != null) { amsg.getAvoidHostUuids().addAll(gmsg.getAvoidHostUuids()); } } else { if (msg instanceof APIMessage){ if (((APIMessage) msg).getSystemTags() != null){ amsg.setSystemTags(new ArrayList<String>(((APIMessage) msg).getSystemTags())); } } } amsg.setVmInstance(VmInstanceInventory.valueOf(self)); amsg.setServiceId(bus.makeLocalServiceId(HostAllocatorConstant.SERVICE_ID)); amsg.setAllocatorStrategy(HostAllocatorConstant.MIGRATE_VM_ALLOCATOR_TYPE); amsg.setVmOperation(VmOperation.Migrate.toString()); amsg.setL3NetworkUuids(VmNicHelper.getL3Uuids(VmNicInventory.valueOf(self.getVmNics()))); amsg.setDryRun(true); amsg.setAllowNoL3Networks(true); bus.send(amsg, new CloudBusCallBack(completion) { @Override public void run(MessageReply re) { if (!re.isSuccess()) { if (HostAllocatorError.NO_AVAILABLE_HOST.toString().equals(re.getError().getCode())) { completion.success(new ArrayList<HostInventory>()); } else { completion.fail(re.getError()); } } else { completion.success(((AllocateHostDryRunReply) re).getHosts()); } } }); } private void handle(final GetVmMigrationTargetHostMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { final GetVmMigrationTargetHostReply reply = new GetVmMigrationTargetHostReply(); getVmMigrationTargetHost(msg, new ReturnValueCompletion<List<HostInventory>>(msg, chain) { @Override public void success(List<HostInventory> returnValue) { reply.setHosts(returnValue); bus.reply(msg, reply); chain.next(); } @Override public void fail(ErrorCode errorCode) { reply.setError(errorCode); bus.reply(msg, reply); chain.next(); } }); } @Override public String getName() { return String.format("get-migration-target-host-for-vm-%s", self.getUuid()); } }); } private void handle(final AttachDataVolumeToVmMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { attachDataVolume(msg, new NoErrorCompletion(chain) { @Override public void done() { chain.next(); } }); } @Override public String getName() { return String.format("attach-volume-%s-to-vm-%s", msg.getVolume().getUuid(), msg.getVmInstanceUuid()); } }); } private void handle(final DetachDataVolumeFromVmMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { detachVolume(msg, new NoErrorCompletion(chain) { @Override public void done() { chain.next(); } }); } @Override public String getName() { return String.format("detach-volume-%s-from-vm-%s", msg.getVolume().getUuid(), msg.getVmInstanceUuid()); } }); } private void handle(final MigrateVmMsg msg) { final MigrateVmReply reply = new MigrateVmReply(); thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { migrateVm(msg, new Completion(chain) { @Override public void success() { bus.reply(msg, reply); chain.next(); } @Override public void fail(ErrorCode errorCode) { reply.setError(errorCode); bus.reply(msg, reply); chain.next(); } }); } @Override public String getName() { return String.format("migrate-vm-%s", self.getUuid()); } }); } private void attachNic(final Message msg, final List<String> l3Uuids, final ReturnValueCompletion<VmNicInventory> completion) { thdf.chainSubmit(new ChainTask(completion) { @Override public String getSyncSignature() { return syncThreadName; } @Override @Deferred public void run(final SyncTaskChain chain) { refreshVO(); ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (allowed != null) { completion.fail(allowed); return; } class SetDefaultL3Network { private boolean isSet = false; void set() { if (self.getDefaultL3NetworkUuid() == null) { self.setDefaultL3NetworkUuid(l3Uuids.get(0)); self = dbf.updateAndRefresh(self); isSet = true; } } void rollback() { if (isSet) { self.setDefaultL3NetworkUuid(null); dbf.update(self); } } } class SetStaticIp { private boolean isSet = false; void set() { if (!(msg instanceof APIAttachL3NetworkToVmMsg)) { return; } APIAttachL3NetworkToVmMsg amsg = (APIAttachL3NetworkToVmMsg) msg; if (amsg.getStaticIpMap().isEmpty()) { return; } for (Map.Entry<String, String> e : amsg.getStaticIpMap().entrySet()) { new StaticIpOperator().setStaticIp(self.getUuid(), e.getKey(), e.getValue()); } isSet = true; } void rollback() { if (isSet) { APIAttachL3NetworkToVmMsg amsg = (APIAttachL3NetworkToVmMsg) msg; for (Map.Entry<String, String> e : amsg.getStaticIpMap().entrySet()) { new StaticIpOperator().deleteStaticIpByVmUuidAndL3Uuid(self.getUuid(), e.getKey()); } } } } final SetDefaultL3Network setDefaultL3Network = new SetDefaultL3Network(); setDefaultL3Network.set(); Defer.guard(new Runnable() { @Override public void run() { setDefaultL3Network.rollback(); } }); final SetStaticIp setStaticIp = new SetStaticIp(); setStaticIp.set(); Defer.guard(new Runnable() { @Override public void run() { setStaticIp.rollback(); } }); final VmInstanceSpec spec = buildSpecFromInventory(getSelfInventory(), VmOperation.AttachNic); final VmInstanceInventory vm = spec.getVmInventory(); List<L3NetworkInventory> l3s = new ArrayList<>(); for (String l3Uuid : l3Uuids) { L3NetworkVO l3vo = dbf.findByUuid(l3Uuid, L3NetworkVO.class); final L3NetworkInventory l3 = L3NetworkInventory.valueOf(l3vo); l3s.add(l3); for (VmPreAttachL3NetworkExtensionPoint ext : pluginRgty.getExtensionList(VmPreAttachL3NetworkExtensionPoint.class)) { ext.vmPreAttachL3Network(vm, l3); } } spec.setL3Networks(list(new VmNicSpec(l3s))); spec.setDestNics(new ArrayList<VmNicInventory>()); CollectionUtils.safeForEach(pluginRgty.getExtensionList(VmBeforeAttachL3NetworkExtensionPoint.class), new ForEachFunction<VmBeforeAttachL3NetworkExtensionPoint>() { @Override public void run(VmBeforeAttachL3NetworkExtensionPoint arg) { for (L3NetworkInventory l3 : l3s) { arg.vmBeforeAttachL3Network(vm, l3); } } }); FlowChain flowChain = FlowChainBuilder.newSimpleFlowChain(); setFlowMarshaller(flowChain); flowChain.setName(String.format("attachNic-vm-%s-l3-%s", self.getUuid(), l3Uuids.get(0))); flowChain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); flowChain.then(new VmAllocateNicFlow()); flowChain.then(new VmAttachL3NetworkToNicFlow()); flowChain.then(new VmSetDefaultL3NetworkOnAttachingFlow()); if (self.getState() == VmInstanceState.Running) { flowChain.then(new VmInstantiateResourceOnAttachingNicFlow()); flowChain.then(new VmAttachNicOnHypervisorFlow()); } flowChain.done(new FlowDoneHandler(chain) { @Override public void handle(Map data) { CollectionUtils.safeForEach(pluginRgty.getExtensionList(VmAfterAttachL3NetworkExtensionPoint.class), new ForEachFunction<VmAfterAttachL3NetworkExtensionPoint>() { @Override public void run(VmAfterAttachL3NetworkExtensionPoint arg) { for (L3NetworkInventory l3 : l3s) { arg.vmAfterAttachL3Network(vm, l3); } } }); VmNicInventory nic = spec.getDestNics().get(0); completion.success(nic); chain.next(); } }).error(new FlowErrorHandler(chain) { @Override public void handle(final ErrorCode errCode, Map data) { CollectionUtils.safeForEach(pluginRgty.getExtensionList(VmFailToAttachL3NetworkExtensionPoint.class), new ForEachFunction<VmFailToAttachL3NetworkExtensionPoint>() { @Override public void run(VmFailToAttachL3NetworkExtensionPoint arg) { for (L3NetworkInventory l3 : l3s) { arg.vmFailToAttachL3Network(vm, l3, errCode); } } }); setDefaultL3Network.rollback(); setStaticIp.rollback(); completion.fail(errCode); chain.next(); } }).start(); } @Override public String getName() { return String.format("attachNic-vm-%s-l3-%s", self.getUuid(), l3Uuids.get(0)); } }); } private void attachNic(final APIAttachVmNicToVmMsg msg, final ReturnValueCompletion<VmNicInventory> completion) { thdf.chainSubmit(new ChainTask(completion) { @Override public String getSyncSignature() { return syncThreadName; } @Override @Deferred public void run(final SyncTaskChain chain) { refreshVO(); ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (allowed != null) { completion.fail(allowed); return; } String vmNicUuid = msg.getVmNicUuid(); VmNicVO vmNicVO = dbf.findByUuid(vmNicUuid, VmNicVO.class); String l3Uuid = VmNicHelper.getPrimaryL3Uuid(VmNicInventory.valueOf(vmNicVO)); class SetDefaultL3Network { private boolean isSet = false; void set() { if (self.getDefaultL3NetworkUuid() == null) { self.setDefaultL3NetworkUuid(l3Uuid); self = dbf.updateAndRefresh(self); isSet = true; } } void rollback() { if (isSet) { self.setDefaultL3NetworkUuid(null); dbf.update(self); } } } final SetDefaultL3Network setDefaultL3Network = new SetDefaultL3Network(); setDefaultL3Network.set(); Defer.guard(new Runnable() { @Override public void run() { setDefaultL3Network.rollback(); } }); final VmInstanceSpec spec = buildSpecFromInventory(getSelfInventory(), VmOperation.AttachNic); spec.setVmInventory(VmInstanceInventory.valueOf(self)); L3NetworkVO l3vo = dbf.findByUuid(l3Uuid, L3NetworkVO.class); final L3NetworkInventory l3 = L3NetworkInventory.valueOf(l3vo); final VmInstanceInventory vm = getSelfInventory(); for (VmPreAttachL3NetworkExtensionPoint ext : pluginRgty.getExtensionList(VmPreAttachL3NetworkExtensionPoint.class)) { ext.vmPreAttachL3Network(vm, l3); } spec.setL3Networks(list(new VmNicSpec(l3))); CollectionUtils.safeForEach(pluginRgty.getExtensionList(VmBeforeAttachL3NetworkExtensionPoint.class), new ForEachFunction<VmBeforeAttachL3NetworkExtensionPoint>() { @Override public void run(VmBeforeAttachL3NetworkExtensionPoint arg) { arg.vmBeforeAttachL3Network(vm, l3); } }); FlowChain flowChain = FlowChainBuilder.newSimpleFlowChain(); setFlowMarshaller(flowChain); flowChain.setName(String.format("attachNic-vm-%s-nic-%s", self.getUuid(), vmNicVO.getUuid())); flowChain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); flowChain.then(new Flow() { String __name__ = "update-nic"; @Override public void run(FlowTrigger trigger, Map data) { final BitSet deviceIdBitmap = new BitSet(512); for (VmNicInventory nic : spec.getVmInventory().getVmNics()) { deviceIdBitmap.set(nic.getDeviceId()); } int deviceId = deviceIdBitmap.nextClearBit(0); deviceIdBitmap.set(deviceId); String internalName = VmNicVO.generateNicInternalName(spec.getVmInventory().getInternalId(), deviceId); UpdateQuery.New(VmNicVO.class) .eq(VmNicVO_.uuid, vmNicUuid) .set(VmNicVO_.vmInstanceUuid, self.getUuid()) .set(VmNicVO_.deviceId, deviceId) .set(VmNicVO_.internalName, internalName) .set(VmNicVO_.hypervisorType, spec.getVmInventory().getHypervisorType()) .update(); vmNicVO.setVmInstanceUuid(self.getUuid()); vmNicVO.setDeviceId(deviceId); vmNicVO.setInternalName(internalName); vmNicVO.setHypervisorType(spec.getVmInventory().getHypervisorType()); spec.getDestNics().add(VmNicInventory.valueOf(vmNicVO)); trigger.next(); } @Override public void rollback(FlowRollback trigger, Map data) { UpdateQuery.New(VmNicVO.class) .eq(VmNicVO_.uuid, vmNicUuid) .set(VmNicVO_.vmInstanceUuid, null) .update(); trigger.rollback(); } }); flowChain.then(new VmSetDefaultL3NetworkOnAttachingFlow()); if (self.getState() == VmInstanceState.Running) { flowChain.then(new VmInstantiateResourceOnAttachingNicFlow()); flowChain.then(new VmAttachNicOnHypervisorFlow()); } flowChain.done(new FlowDoneHandler(chain) { @Override public void handle(Map data) { CollectionUtils.safeForEach(pluginRgty.getExtensionList(VmAfterAttachL3NetworkExtensionPoint.class), new ForEachFunction<VmAfterAttachL3NetworkExtensionPoint>() { @Override public void run(VmAfterAttachL3NetworkExtensionPoint arg) { arg.vmAfterAttachL3Network(vm, l3); } }); VmNicInventory nic = spec.getDestNics().get(0); completion.success(nic); chain.next(); } }).error(new FlowErrorHandler(chain) { @Override public void handle(final ErrorCode errCode, Map data) { CollectionUtils.safeForEach(pluginRgty.getExtensionList(VmFailToAttachL3NetworkExtensionPoint.class), new ForEachFunction<VmFailToAttachL3NetworkExtensionPoint>() { @Override public void run(VmFailToAttachL3NetworkExtensionPoint arg) { arg.vmFailToAttachL3Network(vm, l3, errCode); } }); setDefaultL3Network.rollback(); completion.fail(errCode); chain.next(); } }).start(); } @Override public String getName() { return String.format("attachNic-vm-%s-nic-%s", self.getUuid(), msg.getVmNicUuid()); } }); } private void handle(final VmAttachNicMsg msg) { final VmAttachNicReply reply = new VmAttachNicReply(); attachNic(msg, Collections.singletonList(msg.getL3NetworkUuid()), new ReturnValueCompletion<VmNicInventory>(msg) { @Override public void success(VmNicInventory nic) { reply.setInventroy(nic); bus.reply(msg, reply); } @Override public void fail(ErrorCode errorCode) { reply.setError(errorCode); bus.reply(msg, reply); } }); } private void callVmJustBeforeDeleteFromDbExtensionPoint() { VmInstanceInventory inv = getSelfInventory(); CollectionUtils.safeForEach(pluginRgty.getExtensionList(VmJustBeforeDeleteFromDbExtensionPoint.class), p -> p.vmJustBeforeDeleteFromDb(inv)); } protected void doDestroy(final VmInstanceDeletionPolicy deletionPolicy, Message msg, final Completion completion) { final VmInstanceInventory inv = VmInstanceInventory.valueOf(self); extEmitter.beforeDestroyVm(inv); destroy(deletionPolicy, msg, new Completion(completion) { @Override public void success() { extEmitter.afterDestroyVm(inv); logger.debug(String.format("successfully deleted vm instance[name:%s, uuid:%s]", self.getName(), self.getUuid())); if (deletionPolicy == VmInstanceDeletionPolicy.Direct) { changeVmStateInDb(VmInstanceStateEvent.destroyed); callVmJustBeforeDeleteFromDbExtensionPoint(); dbf.removeCollection(self.getVmCdRoms(), VmCdRomVO.class); dbf.remove(getSelf()); } else if (deletionPolicy == VmInstanceDeletionPolicy.DBOnly || deletionPolicy == VmInstanceDeletionPolicy.KeepVolume) { new SQLBatch() { @Override protected void scripts() { callVmJustBeforeDeleteFromDbExtensionPoint(); sql(VmNicVO.class).eq(VmNicVO_.vmInstanceUuid, self.getUuid()).hardDelete(); sql(VolumeVO.class).eq(VolumeVO_.vmInstanceUuid, self.getUuid()) .eq(VolumeVO_.type, VolumeType.Root) .hardDelete(); sql(VmCdRomVO.class).eq(VmCdRomVO_.vmInstanceUuid, self.getUuid()).hardDelete(); sql(VmInstanceVO.class).eq(VmInstanceVO_.uuid, self.getUuid()).hardDelete(); } }.execute(); } else if (deletionPolicy == VmInstanceDeletionPolicy.Delay) { changeVmStateInDb(VmInstanceStateEvent.destroyed, ()-> self.setHostUuid(null)); } else if (deletionPolicy == VmInstanceDeletionPolicy.Never) { logger.warn(String.format("the vm[uuid:%s] is deleted, but by it's deletion policy[Never]," + " the root volume is not deleted on the primary storage", self.getUuid())); changeVmStateInDb(VmInstanceStateEvent.destroyed, ()-> self.setHostUuid(null)); } completion.success(); } @Override public void fail(ErrorCode errorCode) { extEmitter.failedToDestroyVm(inv, errorCode); logger.debug(String.format("failed to delete vm instance[name:%s, uuid:%s], because %s", self.getName(), self.getUuid(), errorCode)); completion.fail(errorCode); } }); } private VmInstanceDeletionPolicy getVmDeletionPolicy(final VmInstanceDeletionMsg msg) { if (self.getState() == VmInstanceState.Created) { return VmInstanceDeletionPolicy.DBOnly; } return msg.getDeletionPolicy() == null ? deletionPolicyMgr.getDeletionPolicy(self.getUuid()) : VmInstanceDeletionPolicy.valueOf(msg.getDeletionPolicy()); } private void handle(final VmInstanceDeletionMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { final VmInstanceDeletionReply r = new VmInstanceDeletionReply(); final VmInstanceDeletionPolicy deletionPolicy = getVmDeletionPolicy(msg); self = dbf.findByUuid(self.getUuid(), VmInstanceVO.class); if (self == null || self.getState() == VmInstanceState.Destroyed) { // the vm has been destroyed, most likely by rollback if (deletionPolicy != VmInstanceDeletionPolicy.DBOnly && deletionPolicy != VmInstanceDeletionPolicy.KeepVolume) { bus.reply(msg, r); chain.next(); return; } } destroyHook(deletionPolicy, msg, new Completion(msg, chain) { @Override public void success() { bus.reply(msg, r); chain.next(); } @Override public void fail(ErrorCode errorCode) { r.setError(errorCode); bus.reply(msg, r); chain.next(); } }); } @Override public String getName() { return "delete-vm"; } }); } protected void destroyHook(VmInstanceDeletionPolicy deletionPolicy, Message msg, Completion completion) { doDestroy(deletionPolicy, msg, completion); } private void handle(final RebootVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getName() { return String.format("reboot-vm-%s", self.getUuid()); } @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { rebootVm(msg, chain); } }); } private void rebootVm(final RebootVmInstanceMsg msg, final SyncTaskChain chain) { rebootVm(msg, new Completion(chain) { @Override public void success() { RebootVmInstanceReply reply = new RebootVmInstanceReply(); VmInstanceInventory inv = VmInstanceInventory.valueOf(self); reply.setInventory(inv); bus.reply(msg, reply); chain.next(); } @Override public void fail(ErrorCode errorCode) { RebootVmInstanceReply reply = new RebootVmInstanceReply(); reply.setError(err(VmErrors.REBOOT_ERROR, errorCode, errorCode.getDetails())); bus.reply(msg, reply); chain.next(); } }); } private void handle(final StopVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getName() { return String.format("stop-vm-%s", self.getUuid()); } @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { stopVm(msg, chain); } }); } private void stopVm(final StopVmInstanceMsg msg, final SyncTaskChain chain) { stopVm(msg, new Completion(chain) { @Override public void success() { StopVmInstanceReply reply = new StopVmInstanceReply(); VmInstanceInventory inv = VmInstanceInventory.valueOf(self); reply.setInventory(inv); bus.reply(msg, reply); chain.next(); } @Override public void fail(ErrorCode errorCode) { StopVmInstanceReply reply = new StopVmInstanceReply(); reply.setError(err(VmErrors.STOP_ERROR, errorCode, errorCode.getDetails())); bus.reply(msg, reply); chain.next(); } }); } private void handle(final StartVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getName() { return String.format("start-vm-%s", self.getUuid()); } @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { startVm(msg, chain); } }); } private void createTemplateFromRootVolume(final CreateTemplateFromVmRootVolumeMsg msg, final SyncTaskChain chain) { boolean callNext = true; try { refreshVO(); ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (allowed != null) { bus.replyErrorByMessageType(msg, allowed); return; } final CreateTemplateFromVmRootVolumeReply reply = new CreateTemplateFromVmRootVolumeReply(); CreateTemplateFromVolumeOnPrimaryStorageMsg cmsg = new CreateTemplateFromVolumeOnPrimaryStorageMsg(); cmsg.setVolumeInventory(msg.getRootVolumeInventory()); cmsg.setBackupStorageUuid(msg.getBackupStorageUuid()); cmsg.setImageInventory(msg.getImageInventory()); bus.makeTargetServiceIdByResourceUuid(cmsg, PrimaryStorageConstant.SERVICE_ID, msg.getRootVolumeInventory().getPrimaryStorageUuid()); bus.send(cmsg, new CloudBusCallBack(chain) { private void fail(ErrorCode errorCode) { reply.setError(operr(errorCode, "failed to create template from root volume[uuid:%s] on primary storage[uuid:%s]", msg.getRootVolumeInventory().getUuid(), msg.getRootVolumeInventory().getPrimaryStorageUuid())); logger.warn(reply.getError().getDetails()); bus.reply(msg, reply); } @Override public void run(MessageReply r) { if (!r.isSuccess()) { fail(r.getError()); } else { CreateTemplateFromVolumeOnPrimaryStorageReply creply = (CreateTemplateFromVolumeOnPrimaryStorageReply) r; reply.setInstallPath(creply.getTemplateBackupStorageInstallPath()); reply.setFormat(creply.getFormat()); bus.reply(msg, reply); } chain.next(); } }); callNext = false; } finally { if (callNext) { chain.next(); } } } private void handle(final CreateTemplateFromVmRootVolumeMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getName() { return String.format("create-template-from-%s", self.getUuid()); } @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { createTemplateFromRootVolume(msg, chain); } }); } private void handle(final AttachNicToVmMsg msg) { ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (allowed != null) { bus.replyErrorByMessageType(msg, allowed); return; } AttachNicToVmOnHypervisorMsg amsg = new AttachNicToVmOnHypervisorMsg(); amsg.setVmUuid(self.getUuid()); amsg.setHostUuid(self.getHostUuid()); amsg.setNics(msg.getNics()); bus.makeTargetServiceIdByResourceUuid(amsg, HostConstant.SERVICE_ID, self.getHostUuid()); bus.send(amsg, new CloudBusCallBack(msg) { @Override public void run(MessageReply reply) { if (self.getDefaultL3NetworkUuid() == null) { self.setDefaultL3NetworkUuid(msg.getNics().get(0).getL3NetworkUuid()); self = dbf.updateAndRefresh(self); logger.debug(String.format("set the VM[uuid: %s]'s default L3 network[uuid:%s], as it doen't have one before", self.getUuid(), self.getDefaultL3NetworkUuid())); } AttachNicToVmReply r = new AttachNicToVmReply(); if (!reply.isSuccess()) { r.setError(err(VmErrors.ATTACH_NETWORK_ERROR, r.getError(), r.getError().getDetails())); } bus.reply(msg, r); } }); } private void handle(final DestroyVmInstanceMsg msg) { final DestroyVmInstanceReply reply = new DestroyVmInstanceReply(); final String issuer = VmInstanceVO.class.getSimpleName(); VmDeletionStruct s = new VmDeletionStruct(); if (msg.getDeletionPolicy() == null) { s.setDeletionPolicy(deletionPolicyMgr.getDeletionPolicy(self.getUuid())); } else { s.setDeletionPolicy(msg.getDeletionPolicy()); } s.setInventory(getSelfInventory()); final List<VmDeletionStruct> ctx = list(s); final FlowChain chain = FlowChainBuilder.newSimpleFlowChain(); chain.setName(String.format("destroy-vm-%s", self.getUuid())); chain.then(new NoRollbackFlow() { @Override public void run(final FlowTrigger trigger, Map data) { casf.asyncCascade(CascadeConstant.DELETION_FORCE_DELETE_CODE, issuer, ctx, new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }).done(new FlowDoneHandler(msg) { @Override public void handle(Map data) { casf.asyncCascadeFull(CascadeConstant.DELETION_CLEANUP_CODE, issuer, ctx, new NopeCompletion()); bus.reply(msg, reply); } }).error(new FlowErrorHandler(msg) { @Override public void handle(final ErrorCode errCode, Map data) { reply.setError(errCode); bus.reply(msg, reply); } }).start(); } protected void handle(final ChangeVmStateMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getName() { return String.format("change-vm-state-%s", self.getUuid()); } @Override public String getSyncSignature() { return syncThreadName; } @Override @Deferred public void run(SyncTaskChain chain) { refreshVO(); Defer.defer(() -> { ChangeVmStateReply reply = new ChangeVmStateReply(); bus.reply(msg, reply); }); if (self == null) { // vm has been deleted by previous request // this happens when delete vm request queued before // change state request from vm tracer. // in this case, ignore change state request logger.debug(String.format("vm[uuid:%s] has been deleted, ignore change vm state request from vm tracer", msg.getVmInstanceUuid())); chain.next(); return; } changeVmStateInDb(VmInstanceStateEvent.valueOf(msg.getStateEvent())); chain.next(); } }); } protected void setFlowMarshaller(FlowChain chain) { chain.setFlowMarshaller(new FlowMarshaller() { @Override public Flow marshalTheNextFlow(String previousFlowClassName, String nextFlowClassName, FlowChain chain, Map data) { Flow nflow = null; for (MarshalVmOperationFlowExtensionPoint mext : pluginRgty.getExtensionList(MarshalVmOperationFlowExtensionPoint.class)) { VmInstanceSpec spec = (VmInstanceSpec) data.get(VmInstanceConstant.Params.VmInstanceSpec.toString()); nflow = mext.marshalVmOperationFlow(previousFlowClassName, nextFlowClassName, chain, spec); if (nflow != null) { logger.debug(String.format("a VM[uuid: %s, operation: %s] operation flow[%s] is changed to the flow[%s] by %s", self.getUuid(), spec.getCurrentVmOperation(), nextFlowClassName, nflow.getClass(), mext.getClass())); break; } } return nflow; } }); } protected void selectBootOrder(VmInstanceSpec spec) { if (spec.getCurrentVmOperation() == null) { throw new CloudRuntimeException("selectBootOrder must be called after VmOperation is set"); } List<CdRomSpec> cdRomSpecs = spec.getCdRomSpecs().stream() .filter(cdRom -> cdRom.getImageUuid() != null) .collect(Collectors.toList()); if (spec.getCurrentVmOperation() == VmOperation.NewCreate && !cdRomSpecs.isEmpty()) { ImageVO imageVO = dbf.findByUuid(spec.getVmInventory().getImageUuid(), ImageVO.class); assert imageVO != null; if(imageVO.getMediaType() == ImageMediaType.ISO) { spec.setBootOrders(list(VmBootDevice.CdRom.toString())); } else { spec.setBootOrders(list(VmBootDevice.HardDisk.toString())); } } else { String order = VmSystemTags.BOOT_ORDER.getTokenByResourceUuid(self.getUuid(), VmSystemTags.BOOT_ORDER_TOKEN); if (order == null) { spec.setBootOrders(list(VmBootDevice.HardDisk.toString())); } else { spec.setBootOrders(list(order.split(","))); // set vm to boot from cdrom once only if (VmSystemTags.CDROM_BOOT_ONCE.hasTag(self.getUuid(), VmInstanceVO.class)) { VmSystemTags.BOOT_ORDER.deleteInherentTag(self.getUuid()); VmSystemTags.CDROM_BOOT_ONCE.deleteInherentTag(self.getUuid()); } } } } protected void instantiateVmFromNewCreate(final InstantiateNewCreatedVmInstanceMsg msg, final SyncTaskChain taskChain) { refreshVO(); ErrorCode error = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (error != null) { throw new OperationFailureException(error); } error = extEmitter.preStartNewCreatedVm(msg.getVmInstanceInventory()); if (error != null) { throw new OperationFailureException(error); } InstantiateNewCreatedVmInstanceReply reply = new InstantiateNewCreatedVmInstanceReply(); instantiateVmFromNewCreate(InstantiateVmFromNewCreatedStruct.fromMessage(msg), new Completion(msg, taskChain) { @Override public void success() { self = dbf.reload(self); reply.setVmInventory(getSelfInventory()); bus.reply(msg, reply); taskChain.next(); } @Override public void fail(ErrorCode errorCode) { reply.setError(errorCode); bus.reply(msg, reply); taskChain.next(); } }); } protected void handle(final InstantiateNewCreatedVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getName() { return String.format("create-vm-%s", self.getUuid()); } @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { instantiateVmFromNewCreate(msg, chain); } }); } @Transactional(readOnly = true) protected List<ImageInventory> getImageCandidatesForVm(ImageMediaType type) { String psUuid = getSelfInventory().getRootVolume().getPrimaryStorageUuid(); PrimaryStorageVO ps = dbf.getEntityManager().find(PrimaryStorageVO.class, psUuid); PrimaryStorageType psType = PrimaryStorageType.valueOf(ps.getType()); List<String> bsUuids = psType.findBackupStorage(psUuid); if (bsUuids == null) { String sql = "select img" + " from ImageVO img, ImageBackupStorageRefVO ref, BackupStorageVO bs, BackupStorageZoneRefVO bsRef" + " where ref.imageUuid = img.uuid" + " and img.mediaType = :imgType" + " and img.status = :status" + " and img.system = :system" + " and bs.uuid = ref.backupStorageUuid" + " and bs.type in (:bsTypes)" + " and bs.uuid = bsRef.backupStorageUuid" + " and bsRef.zoneUuid = :zoneUuid"; TypedQuery<ImageVO> q = dbf.getEntityManager().createQuery(sql, ImageVO.class); q.setParameter("zoneUuid", getSelfInventory().getZoneUuid()); if (type != null) { q.setParameter("imgType", type); } q.setParameter("status", ImageStatus.Ready); q.setParameter("system", false); q.setParameter("bsTypes", hostAllocatorMgr.getBackupStorageTypesByPrimaryStorageTypeFromMetrics(ps.getType())); return ImageInventory.valueOf(q.getResultList()); } else if (!bsUuids.isEmpty()) { String sql = "select img" + " from ImageVO img, ImageBackupStorageRefVO ref, BackupStorageVO bs, BackupStorageZoneRefVO bsRef" + " where ref.imageUuid = img.uuid" + " and img.mediaType = :imgType" + " and img.status = :status" + " and img.system = :system" + " and bs.uuid = ref.backupStorageUuid" + " and bs.uuid in (:bsUuids)" + " and bs.uuid = bsRef.backupStorageUuid" + " and bsRef.zoneUuid = :zoneUuid"; TypedQuery<ImageVO> q = dbf.getEntityManager().createQuery(sql, ImageVO.class); q.setParameter("zoneUuid", getSelfInventory().getZoneUuid()); if (type != null) { q.setParameter("imgType", type); } q.setParameter("status", ImageStatus.Ready); q.setParameter("system", false); q.setParameter("bsUuids", bsUuids); return ImageInventory.valueOf(q.getResultList()); } else { return new ArrayList<>(); } } protected void handleApiMessage(APIMessage msg) { if (msg instanceof APIStopVmInstanceMsg) { handle((APIStopVmInstanceMsg) msg); } else if (msg instanceof APIRebootVmInstanceMsg) { handle((APIRebootVmInstanceMsg) msg); } else if (msg instanceof APIDestroyVmInstanceMsg) { handle((APIDestroyVmInstanceMsg) msg); } else if (msg instanceof APIStartVmInstanceMsg) { handle((APIStartVmInstanceMsg) msg); } else if (msg instanceof APIMigrateVmMsg) { handle((APIMigrateVmMsg) msg); } else if (msg instanceof APIAttachL3NetworkToVmMsg) { handle((APIAttachL3NetworkToVmMsg) msg); } else if(msg instanceof APIAttachVmNicToVmMsg) { handle((APIAttachVmNicToVmMsg) msg); } else if (msg instanceof APIGetVmMigrationCandidateHostsMsg) { handle((APIGetVmMigrationCandidateHostsMsg) msg); } else if (msg instanceof APIGetVmAttachableDataVolumeMsg) { handle((APIGetVmAttachableDataVolumeMsg) msg); } else if (msg instanceof APIUpdateVmInstanceMsg) { handle((APIUpdateVmInstanceMsg) msg); } else if (msg instanceof APIChangeInstanceOfferingMsg) { handle((APIChangeInstanceOfferingMsg) msg); } else if (msg instanceof APIDetachL3NetworkFromVmMsg) { handle((APIDetachL3NetworkFromVmMsg) msg); } else if (msg instanceof APIGetVmAttachableL3NetworkMsg) { handle((APIGetVmAttachableL3NetworkMsg) msg); } else if (msg instanceof APIAttachIsoToVmInstanceMsg) { handle((APIAttachIsoToVmInstanceMsg) msg); } else if (msg instanceof APIDetachIsoFromVmInstanceMsg) { handle((APIDetachIsoFromVmInstanceMsg) msg); } else if (msg instanceof APIExpungeVmInstanceMsg) { handle((APIExpungeVmInstanceMsg) msg); } else if (msg instanceof APIRecoverVmInstanceMsg) { handle((APIRecoverVmInstanceMsg) msg); } else if (msg instanceof APISetVmBootOrderMsg) { handle((APISetVmBootOrderMsg) msg); } else if (msg instanceof APISetVmConsolePasswordMsg) { handle((APISetVmConsolePasswordMsg) msg); } else if (msg instanceof APIGetVmBootOrderMsg) { handle((APIGetVmBootOrderMsg) msg); } else if (msg instanceof APIDeleteVmConsolePasswordMsg) { handle((APIDeleteVmConsolePasswordMsg) msg); } else if (msg instanceof APIGetVmConsolePasswordMsg) { handle((APIGetVmConsolePasswordMsg) msg); } else if (msg instanceof APIGetVmConsoleAddressMsg) { handle((APIGetVmConsoleAddressMsg) msg); } else if (msg instanceof APISetVmHostnameMsg) { handle((APISetVmHostnameMsg) msg); } else if (msg instanceof APISetVmBootModeMsg) { handle((APISetVmBootModeMsg) msg); } else if (msg instanceof APIDeleteVmBootModeMsg) { handle((APIDeleteVmBootModeMsg) msg); } else if (msg instanceof APIDeleteVmHostnameMsg) { handle((APIDeleteVmHostnameMsg) msg); } else if (msg instanceof APISetVmStaticIpMsg) { handle((APISetVmStaticIpMsg) msg); } else if (msg instanceof APIDeleteVmStaticIpMsg) { handle((APIDeleteVmStaticIpMsg) msg); } else if (msg instanceof APIGetVmHostnameMsg) { handle((APIGetVmHostnameMsg) msg); } else if (msg instanceof APIGetVmStartingCandidateClustersHostsMsg) { handle((APIGetVmStartingCandidateClustersHostsMsg) msg); } else if (msg instanceof APIGetVmCapabilitiesMsg) { handle((APIGetVmCapabilitiesMsg) msg); } else if (msg instanceof APISetVmSshKeyMsg) { handle((APISetVmSshKeyMsg) msg); } else if (msg instanceof APIGetVmSshKeyMsg) { handle((APIGetVmSshKeyMsg) msg); } else if (msg instanceof APIDeleteVmSshKeyMsg) { handle((APIDeleteVmSshKeyMsg) msg); } else if (msg instanceof APIGetCandidateIsoForAttachingVmMsg) { handle((APIGetCandidateIsoForAttachingVmMsg) msg); } else if (msg instanceof APIPauseVmInstanceMsg) { handle((APIPauseVmInstanceMsg) msg); } else if (msg instanceof APIResumeVmInstanceMsg) { handle((APIResumeVmInstanceMsg) msg); } else if (msg instanceof APIReimageVmInstanceMsg) { handle((APIReimageVmInstanceMsg) msg); } else if (msg instanceof APIDeleteVmCdRomMsg) { handle((APIDeleteVmCdRomMsg) msg); } else if (msg instanceof APICreateVmCdRomMsg) { handle((APICreateVmCdRomMsg) msg); } else if (msg instanceof APIUpdateVmCdRomMsg) { handle((APIUpdateVmCdRomMsg) msg); } else if (msg instanceof APISetVmInstanceDefaultCdRomMsg) { handle((APISetVmInstanceDefaultCdRomMsg) msg); } else { VmInstanceBaseExtensionFactory ext = vmMgr.getVmInstanceBaseExtensionFactory(msg); if (ext != null) { VmInstance v = ext.getVmInstance(self); v.handleMessage(msg); } else { bus.dealWithUnknownMessage(msg); } } } private void handle(APIGetCandidateIsoForAttachingVmMsg msg) { APIGetCandidateIsoForAttachingVmReply reply = new APIGetCandidateIsoForAttachingVmReply(); if (self.getState() != VmInstanceState.Running && self.getState() != VmInstanceState.Stopped) { reply.setInventories(new ArrayList<>()); bus.reply(msg, reply); return; } List<ImageInventory> result = getImageCandidatesForVm(ImageMediaType.ISO); List<String> vmIsoList = IsoOperator.getIsoUuidByVmUuid(msg.getVmInstanceUuid()); result = result.stream() .filter(iso -> !vmIsoList.contains(iso.getUuid())) .collect(Collectors.toList()); for (VmAttachIsoExtensionPoint ext : pluginRgty.getExtensionList(VmAttachIsoExtensionPoint.class)) { ext.filtCandidateIsos(msg.getVmInstanceUuid(), result); } reply.setInventories(result); bus.reply(msg, reply); } private void handle(APIGetVmCapabilitiesMsg msg) { APIGetVmCapabilitiesReply reply = new APIGetVmCapabilitiesReply(); VmCapabilities capabilities = new VmCapabilities(); checkPrimaryStorageCapabilities(capabilities); checkImageMediaTypeCapabilities(capabilities); extEmitter.getVmCapabilities(getSelfInventory(), capabilities); reply.setCapabilities(capabilities.toMap()); bus.reply(msg, reply); } private void checkPrimaryStorageCapabilities(VmCapabilities capabilities) { VolumeInventory rootVolume = getSelfInventory().getRootVolume(); if (rootVolume == null) { capabilities.setSupportLiveMigration(false); capabilities.setSupportVolumeMigration(false); } else { SimpleQuery<PrimaryStorageVO> q = dbf.createQuery(PrimaryStorageVO.class); q.select(PrimaryStorageVO_.type); q.add(PrimaryStorageVO_.uuid, Op.EQ, rootVolume.getPrimaryStorageUuid()); String type = q.findValue(); PrimaryStorageType psType = PrimaryStorageType.valueOf(type); if (self.getState() != VmInstanceState.Running) { capabilities.setSupportLiveMigration(false); } else { capabilities.setSupportLiveMigration(psType.isSupportVmLiveMigration()); } if (self.getState() != VmInstanceState.Stopped) { capabilities.setSupportVolumeMigration(false); } else { capabilities.setSupportVolumeMigration(psType.isSupportVolumeMigration()); } } } private void checkImageMediaTypeCapabilities(VmCapabilities capabilities) { ImageVO vo = null; ImageMediaType imageMediaType; if (self.getImageUuid() != null) { vo = dbf.findByUuid(self.getImageUuid(), ImageVO.class); } if (vo == null) { imageMediaType = null; } else { imageMediaType = vo.getMediaType(); } if (imageMediaType == ImageMediaType.ISO || imageMediaType == null) { capabilities.setSupportReimage(false); } else { capabilities.setSupportReimage(true); } } private void handle(APIGetVmHostnameMsg msg) { String hostname = VmSystemTags.HOSTNAME.getTokenByResourceUuid(self.getUuid(), VmSystemTags.HOSTNAME_TOKEN); APIGetVmHostnameReply reply = new APIGetVmHostnameReply(); reply.setHostname(hostname); bus.reply(msg, reply); } private void handle(final APIDeleteVmStaticIpMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { APIDeleteVmStaticIpEvent evt = new APIDeleteVmStaticIpEvent(msg.getId()); new StaticIpOperator().deleteStaticIpByVmUuidAndL3Uuid(self.getUuid(), msg.getL3NetworkUuid()); bus.publish(evt); chain.next(); } @Override public String getName() { return "delete-static-ip"; } }); } private void handle(final APISetVmStaticIpMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { setStaticIp(msg, new NoErrorCompletion(msg, chain) { @Override public void done() { chain.next(); } }); } @Override public String getName() { return "set-static-ip"; } }); } private void setStaticIp(final APISetVmStaticIpMsg msg, final NoErrorCompletion completion) { refreshVO(); ErrorCode error = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (error != null) { throw new OperationFailureException(error); } final APISetVmStaticIpEvent evt = new APISetVmStaticIpEvent(msg.getId()); changeVmIp(msg.getL3NetworkUuid(), msg.getIp(), new Completion(msg, completion) { @Override public void success() { new StaticIpOperator().setStaticIp(self.getUuid(), msg.getL3NetworkUuid(), msg.getIp()); bus.publish(evt); completion.done(); } @Override public void fail(ErrorCode errorCode) { evt.setError(errorCode); bus.publish(evt); completion.done(); } }); } private void handle(APISetVmBootModeMsg msg) { SystemTagCreator creator = VmSystemTags.BOOT_MODE.newSystemTagCreator(self.getUuid()); creator.setTagByTokens(map( e(VmSystemTags.BOOT_MODE_TOKEN, msg.getBootMode()) )); creator.recreate = true; creator.create(); APISetVmBootModeEvent evt = new APISetVmBootModeEvent(msg.getId()); bus.publish(evt); } private void handle(APIDeleteVmBootModeMsg msg) { APIDeleteVmBootModeEvent evt = new APIDeleteVmBootModeEvent(msg.getId()); VmSystemTags.BOOT_MODE.delete(self.getUuid()); bus.publish(evt); } private void handle(APIDeleteVmHostnameMsg msg) { APIDeleteVmHostnameEvent evt = new APIDeleteVmHostnameEvent(msg.getId()); VmSystemTags.HOSTNAME.delete(self.getUuid()); bus.publish(evt); } private void handle(APISetVmHostnameMsg msg) { if (!VmSystemTags.HOSTNAME.hasTag(self.getUuid())) { SystemTagCreator creator = VmSystemTags.HOSTNAME.newSystemTagCreator(self.getUuid()); creator.setTagByTokens(map( e(VmSystemTags.HOSTNAME_TOKEN, msg.getHostname()) )); creator.create(); } else { VmSystemTags.HOSTNAME.update(self.getUuid(), VmSystemTags.HOSTNAME.instantiateTag( map(e(VmSystemTags.HOSTNAME_TOKEN, msg.getHostname())) )); } APISetVmHostnameEvent evt = new APISetVmHostnameEvent(msg.getId()); bus.publish(evt); } private void handle(final APIGetVmConsoleAddressMsg msg) { refreshVO(); ErrorCode error = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (error != null) { throw new OperationFailureException(error); } final APIGetVmConsoleAddressReply creply = new APIGetVmConsoleAddressReply(); GetVmConsoleAddressFromHostMsg hmsg = new GetVmConsoleAddressFromHostMsg(); hmsg.setHostUuid(self.getHostUuid()); hmsg.setVmInstanceUuid(self.getUuid()); bus.makeTargetServiceIdByResourceUuid(hmsg, HostConstant.SERVICE_ID, self.getHostUuid()); bus.send(hmsg, new CloudBusCallBack(msg) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { creply.setError(reply.getError()); } else { GetVmConsoleAddressFromHostReply hr = reply.castReply(); creply.setHostIp(hr.getHostIp()); creply.setPort(hr.getPort()); creply.setProtocol(hr.getProtocol()); } bus.reply(msg, creply); } }); } private void handle(APIGetVmBootOrderMsg msg) { APIGetVmBootOrderReply reply = new APIGetVmBootOrderReply(); String order = VmSystemTags.BOOT_ORDER.getTokenByResourceUuid(self.getUuid(), VmSystemTags.BOOT_ORDER_TOKEN); if (order != null) { reply.setOrder(list(order.split(","))); } else if (order == null && !IsoOperator.isIsoAttachedToVm(msg.getUuid())) { reply.setOrder(list(VmBootDevice.HardDisk.toString())); } else { reply.setOrder(list(VmBootDevice.HardDisk.toString(), VmBootDevice.CdRom.toString())); } bus.reply(msg, reply); } private void handle(APISetVmBootOrderMsg msg) { APISetVmBootOrderEvent evt = new APISetVmBootOrderEvent(msg.getId()); if (msg.getBootOrder() != null) { SystemTagCreator creator = VmSystemTags.BOOT_ORDER.newSystemTagCreator(self.getUuid()); creator.inherent = true; creator.recreate = true; creator.setTagByTokens(map(e(VmSystemTags.BOOT_ORDER_TOKEN, StringUtils.join(msg.getBootOrder(), ",")))); creator.create(); } else { VmSystemTags.BOOT_ORDER.deleteInherentTag(self.getUuid()); } boolean cdromBootOnce = false; if (msg.getSystemTags() != null && !msg.getSystemTags().isEmpty()) { Optional<String> opt = msg.getSystemTags().stream().filter(s -> VmSystemTags.CDROM_BOOT_ONCE.isMatch(s)).findAny(); if (opt.isPresent()) { cdromBootOnce = Boolean.parseBoolean( VmSystemTags.CDROM_BOOT_ONCE.getTokenByTag(opt.get(), VmSystemTags.CDROM_BOOT_ONCE_TOKEN) ); } } if (cdromBootOnce) { SystemTagCreator creator = VmSystemTags.CDROM_BOOT_ONCE.newSystemTagCreator(self.getUuid()); creator.inherent = true; creator.recreate = true; creator.setTagByTokens(map(e(VmSystemTags.CDROM_BOOT_ONCE_TOKEN, String.valueOf(true)))); creator.create(); } else { VmSystemTags.CDROM_BOOT_ONCE.deleteInherentTag(self.getUuid()); } evt.setInventory(getSelfInventory()); bus.publish(evt); } private void handle(APISetVmConsolePasswordMsg msg) { APISetVmConsolePasswordEvent evt = new APISetVmConsolePasswordEvent(msg.getId()); SystemTagCreator creator = VmSystemTags.CONSOLE_PASSWORD.newSystemTagCreator(self.getUuid()); creator.setTagByTokens(map(e(VmSystemTags.CONSOLE_PASSWORD_TOKEN, msg.getConsolePassword()))); creator.recreate = true; creator.create(); evt.setInventory(getSelfInventory()); bus.publish(evt); } private void handle(APIGetVmConsolePasswordMsg msg) { APIGetVmConsolePasswordReply reply = new APIGetVmConsolePasswordReply(); String consolePassword = VmSystemTags.CONSOLE_PASSWORD.getTokenByResourceUuid(self.getUuid(), VmSystemTags.CONSOLE_PASSWORD_TOKEN); reply.setConsolePassword(consolePassword); bus.reply(msg, reply); } private void handle(APIDeleteVmConsolePasswordMsg msg) { APIDeleteVmConsolePasswordEvent evt = new APIDeleteVmConsolePasswordEvent(msg.getId()); VmSystemTags.CONSOLE_PASSWORD.delete(self.getUuid()); evt.setInventory(getSelfInventory()); bus.publish(evt); } private void handle(APISetVmSshKeyMsg msg) { APISetVmSshKeyEvent evt = new APISetVmSshKeyEvent(msg.getId()); SystemTagCreator creator = VmSystemTags.SSHKEY.newSystemTagCreator(self.getUuid()); creator.setTagByTokens(map(e(VmSystemTags.SSHKEY_TOKEN, msg.getSshKey()))); creator.recreate = true; creator.create(); evt.setInventory(getSelfInventory()); bus.publish(evt); } private void handle(APIGetVmSshKeyMsg msg) { APIGetVmSshKeyReply reply = new APIGetVmSshKeyReply(); String sshKey = VmSystemTags.SSHKEY.getTokenByResourceUuid(self.getUuid(), VmSystemTags.SSHKEY_TOKEN); reply.setSshKey(sshKey); bus.reply(msg, reply); } private void handle(APIDeleteVmSshKeyMsg msg) { APIDeleteVmSshKeyEvent evt = new APIDeleteVmSshKeyEvent(msg.getId()); VmSystemTags.SSHKEY.delete(self.getUuid()); evt.setInventory(getSelfInventory()); bus.publish(evt); } private boolean ipExists(final String l3uuid, final String ipAddress) { SimpleQuery<VmNicVO> q = dbf.createQuery(VmNicVO.class); q.add(VmNicVO_.l3NetworkUuid, Op.EQ, l3uuid); q.add(VmNicVO_.ip, Op.EQ, ipAddress); return q.isExists(); } // If the VM is assigned static IP and it is now occupied, we will // remove the static IP tag so that it can acquire IP dynamically. // c.f. issue #1639 private void checkIpConflict(final String vmUuid) { StaticIpOperator ipo = new StaticIpOperator(); for (Map.Entry<String, String> entry : ipo.getStaticIpbyVmUuid(vmUuid).entrySet()) { if (ipExists(entry.getKey(), entry.getValue())) { ipo.deleteStaticIpByVmUuidAndL3Uuid(vmUuid, entry.getKey()); } } } private void recoverVm(final Completion completion) { final VmInstanceInventory vm = getSelfInventory(); final List<RecoverVmExtensionPoint> exts = pluginRgty.getExtensionList(RecoverVmExtensionPoint.class); for (RecoverVmExtensionPoint ext : exts) { ext.preRecoverVm(vm); } CollectionUtils.forEach(exts, new ForEachFunction<RecoverVmExtensionPoint>() { @Override public void run(RecoverVmExtensionPoint ext) { ext.beforeRecoverVm(vm); } }); FlowChain chain = FlowChainBuilder.newShareFlowChain(); chain.setName(String.format("recover-vm-%s", self.getUuid())); chain.then(new ShareFlow() { @Override public void setup() { flow(new NoRollbackFlow() { String __name__ = "check-ip-conflict"; @Override public void run(FlowTrigger trigger, Map data) { checkIpConflict(vm.getUuid()); trigger.next(); } }); flow(new NoRollbackFlow() { String __name__ = "recover-root-volume"; @Override public void run(final FlowTrigger trigger, Map data) { RecoverVolumeMsg msg = new RecoverVolumeMsg(); msg.setVolumeUuid(self.getRootVolumeUuid()); bus.makeTargetServiceIdByResourceUuid(msg, VolumeConstant.SERVICE_ID, self.getRootVolumeUuid()); bus.send(msg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { trigger.fail(reply.getError()); } else { trigger.next(); } } }); } }); flow(new NoRollbackFlow() { String __name__ = "recover-vm"; @Override public void run(FlowTrigger trigger, Map data) { self = changeVmStateInDb(VmInstanceStateEvent.stopped); CollectionUtils.forEach(exts, new ForEachFunction<RecoverVmExtensionPoint>() { @Override public void run(RecoverVmExtensionPoint ext) { ext.afterRecoverVm(vm); } }); trigger.next(); } }); done(new FlowDoneHandler(completion) { @Override public void handle(Map data) { completion.success(); } }); error(new FlowErrorHandler(completion) { @Override public void handle(ErrorCode errCode, Map data) { completion.fail(errCode); } }); } }).start(); } private void handle(final APIRecoverVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { final APIRecoverVmInstanceEvent evt = new APIRecoverVmInstanceEvent(msg.getId()); refreshVO(); ErrorCode error = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (error != null) { evt.setError(error); bus.publish(evt); chain.next(); return; } recoverVm(new Completion(msg, chain) { @Override public void success() { evt.setInventory(getSelfInventory()); bus.publish(evt); chain.next(); } @Override public void fail(ErrorCode errorCode) { evt.setError(errorCode); bus.publish(evt); chain.next(); } }); } @Override public String getName() { return "recover-vm"; } }); } private void handle(final APIExpungeVmInstanceMsg msg) { final APIExpungeVmInstanceEvent evt = new APIExpungeVmInstanceEvent(msg.getId()); thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { expunge(msg, new Completion(msg, chain) { @Override public void success() { bus.publish(evt); chain.next(); } @Override public void fail(ErrorCode errorCode) { evt.setError(errorCode); bus.publish(evt); chain.next(); } }); } @Override public String getName() { return "expunge-vm-by-api"; } }); } private void handle(final DetachIsoFromVmInstanceMsg msg) { DetachIsoFromVmInstanceReply reply = new DetachIsoFromVmInstanceReply(); detachIso(msg.getIsoUuid() ,new Completion(msg) { @Override public void success() { bus.reply(msg, reply); } @Override public void fail(ErrorCode errorCode) { reply.setError(errorCode); bus.reply(msg, reply); } }); } private void handle(final APIDetachIsoFromVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { final APIDetachIsoFromVmInstanceEvent evt = new APIDetachIsoFromVmInstanceEvent(msg.getId()); refreshVO(); ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (allowed != null) { evt.setError(allowed); bus.publish(evt); chain.next(); return; } detachIso(msg.getIsoUuid() ,new Completion(msg, chain) { @Override public void success() { self = dbf.reload(self); evt.setInventory(getSelfInventory()); bus.publish(evt); chain.next(); } @Override public void fail(ErrorCode errorCode) { evt.setError(errorCode); bus.publish(evt); chain.next(); } }); } @Override public String getName() { return String.format("detach-iso-from-vm-%s", self.getUuid()); } }); } private void detachIso(final String isoUuid, final Completion completion) { if (!IsoOperator.isIsoAttachedToVm(self.getUuid())) { completion.success(); return; } if (!IsoOperator.getIsoUuidByVmUuid(self.getUuid()).contains(isoUuid)) { completion.success(); return; } VmCdRomVO targetVmCdRomVO = Q.New(VmCdRomVO.class) .eq(VmCdRomVO_.vmInstanceUuid, self.getUuid()) .eq(VmCdRomVO_.isoUuid, isoUuid) .find(); assert targetVmCdRomVO != null; if (self.getState() == VmInstanceState.Stopped || self.getState() == VmInstanceState.Destroyed) { targetVmCdRomVO.setIsoUuid(null); targetVmCdRomVO.setIsoInstallPath(null); dbf.update(targetVmCdRomVO); new IsoOperator().syncVmIsoSystemTag(self.getUuid()); completion.success(); return; } VmInstanceSpec spec = buildSpecFromInventory(getSelfInventory(), VmOperation.DetachIso); boolean isoNotExist = spec.getDestIsoList().stream().noneMatch(isoSpec -> isoSpec.getImageUuid().equals(isoUuid)); if (isoNotExist) { // the image ISO has been deleted from backup storage // try to detach it from the VM anyway IsoSpec isoSpec = new IsoSpec(); isoSpec.setImageUuid(isoUuid); spec.getDestIsoList().add(isoSpec); logger.debug(String.format("the iso[uuid:%s] has been deleted, try to detach it from the VM[uuid:%s] anyway", isoUuid, self.getUuid())); } FlowChain chain = getDetachIsoWorkFlowChain(spec.getVmInventory()); chain.setName(String.format("detach-iso-%s-from-vm-%s", isoUuid, self.getUuid())); chain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); chain.getData().put(VmInstanceConstant.Params.DetachingIsoUuid.toString(), isoUuid); setFlowMarshaller(chain); chain.done(new FlowDoneHandler(completion) { @Override public void handle(Map data) { targetVmCdRomVO.setIsoUuid(null); targetVmCdRomVO.setIsoInstallPath(null); dbf.update(targetVmCdRomVO); new IsoOperator().syncVmIsoSystemTag(self.getUuid()); completion.success(); } }).error(new FlowErrorHandler(completion) { @Override public void handle(ErrorCode errCode, Map data) { completion.fail(errCode); } }).start(); } @Transactional(readOnly = true) private List<L3NetworkInventory> getAttachableL3Network(String accountUuid) { List<String> l3Uuids = acntMgr.getResourceUuidsCanAccessByAccount(accountUuid, L3NetworkVO.class); if (l3Uuids != null && l3Uuids.isEmpty()) { return new ArrayList<L3NetworkInventory>(); } if (self.getClusterUuid() == null){ return getAttachableL3NetworkWhenClusterUuidSetNull(l3Uuids); } String sql; TypedQuery<L3NetworkVO> q; if (self.getVmNics().isEmpty()) { if (l3Uuids == null) { // accessed by a system admin sql = "select l3" + " from L3NetworkVO l3, VmInstanceVO vm, L2NetworkVO l2, L2NetworkClusterRefVO l2ref" + " where vm.uuid = :uuid" + " and vm.clusterUuid = l2ref.clusterUuid" + " and l2ref.l2NetworkUuid = l2.uuid" + " and l2.uuid = l3.l2NetworkUuid" + " and l3.state = :l3State" + " and l3.category != :l3Category" + " group by l3.uuid"; q = dbf.getEntityManager().createQuery(sql, L3NetworkVO.class); q.setParameter("l3Category", L3NetworkCategory.System); } else { // accessed by a normal account sql = "select l3" + " from L3NetworkVO l3, VmInstanceVO vm, L2NetworkVO l2, L2NetworkClusterRefVO l2ref" + " where vm.uuid = :uuid" + " and vm.clusterUuid = l2ref.clusterUuid" + " and l2ref.l2NetworkUuid = l2.uuid" + " and l2.uuid = l3.l2NetworkUuid" + " and l3.state = :l3State" + " and l3.uuid in (:l3uuids)" + " and l3.category != :l3Category" + " group by l3.uuid"; q = dbf.getEntityManager().createQuery(sql, L3NetworkVO.class); q.setParameter("l3uuids", l3Uuids); q.setParameter("l3Category", L3NetworkCategory.System); } } else { if (l3Uuids == null) { // accessed by a system admin sql = "select l3" + " from L3NetworkVO l3, VmInstanceVO vm, L2NetworkVO l2, L2NetworkClusterRefVO l2ref" + " where l3.uuid not in" + " (select ip.l3NetworkUuid from VmNicVO nic, UsedIpVO ip where ip.vmNicUuid = nic.uuid and nic.vmInstanceUuid = :uuid)" + " and vm.uuid = :uuid" + " and vm.clusterUuid = l2ref.clusterUuid" + " and l2ref.l2NetworkUuid = l2.uuid" + " and l2.uuid = l3.l2NetworkUuid" + " and l3.state = :l3State" + " and l3.category != :l3Category" + " group by l3.uuid"; q = dbf.getEntityManager().createQuery(sql, L3NetworkVO.class); q.setParameter("l3Category", L3NetworkCategory.System); } else { // accessed by a normal account sql = "select l3" + " from L3NetworkVO l3, VmInstanceVO vm, L2NetworkVO l2, L2NetworkClusterRefVO l2ref" + " where l3.uuid not in" + " (select ip.l3NetworkUuid from VmNicVO nic, UsedIpVO ip where ip.vmNicUuid = nic.uuid and nic.vmInstanceUuid = :uuid)" + " and vm.uuid = :uuid" + " and vm.clusterUuid = l2ref.clusterUuid" + " and l2ref.l2NetworkUuid = l2.uuid" + " and l2.uuid = l3.l2NetworkUuid" + " and l3.state = :l3State" + " and l3.category != :l3Category" + " and l3.uuid in (:l3uuids)" + " group by l3.uuid"; q = dbf.getEntityManager().createQuery(sql, L3NetworkVO.class); q.setParameter("l3uuids", l3Uuids); q.setParameter("l3Category", L3NetworkCategory.System); } } q.setParameter("l3State", L3NetworkState.Enabled); q.setParameter("uuid", self.getUuid()); List<L3NetworkVO> l3s = q.getResultList(); return L3NetworkInventory.valueOf(l3s); } @Transactional(readOnly = true) private List<L3NetworkInventory> getAttachableL3NetworkWhenClusterUuidSetNull(List<String> uuids){ return new SQLBatchWithReturn<List<L3NetworkInventory>>() { @Override protected List<L3NetworkInventory> scripts(){ String rootPsUuid = self.getRootVolume().getPrimaryStorageUuid(); //Get Candidate ClusterUuids From Primary Storage List<String> clusterUuids = q(PrimaryStorageClusterRefVO.class) .select(PrimaryStorageClusterRefVO_.clusterUuid) .eq(PrimaryStorageClusterRefVO_.primaryStorageUuid, rootPsUuid) .listValues(); //filtering the ClusterUuid by vmNic L3s one by one if (!self.getVmNics().isEmpty()){ for (String l3uuid: self.getVmNics().stream().flatMap(nic -> VmNicHelper.getL3Uuids(VmNicInventory.valueOf(nic)).stream()) .distinct().collect(Collectors.toList())){ clusterUuids = getCandidateClusterUuidsFromAttachedL3(l3uuid, clusterUuids); if (clusterUuids.isEmpty()){ return new ArrayList<>(); } } } //Get enabled l3 from the Candidate ClusterUuids List<L3NetworkVO> l3s = sql("select l3" + " from L3NetworkVO l3, L2NetworkVO l2, " + " L2NetworkClusterRefVO l2ref" + " where l2.uuid = l3.l2NetworkUuid " + " and l2.uuid = l2ref.l2NetworkUuid " + " and l2ref.clusterUuid in (:Uuids)" + " and l3.state = :l3State " + " and l3.category != :l3Category" + " group by l3.uuid") .param("Uuids", clusterUuids) .param("l3Category", L3NetworkCategory.System) .param("l3State", L3NetworkState.Enabled).list(); if (l3s.isEmpty()){ return new ArrayList<>(); } //filter result if normal user if (uuids != null) { l3s = l3s.stream().filter(l3 -> uuids.contains(l3.getUuid())).collect(Collectors.toList()); } if (l3s.isEmpty()){ return new ArrayList<>(); } //filter l3 that already attached if (!self.getVmNics().isEmpty()) { List<String> vmL3Uuids = self.getVmNics().stream().flatMap(nic -> VmNicHelper.getL3Uuids(VmNicInventory.valueOf(nic)).stream()) .distinct().collect(Collectors.toList()); l3s = l3s.stream().filter(l3 -> !vmL3Uuids.contains(l3.getUuid())).collect(Collectors.toList()); } return L3NetworkInventory.valueOf(l3s); } private List<String> getCandidateClusterUuidsFromAttachedL3(String l3Uuid, List<String> clusterUuids) { return sql("select l2ref.clusterUuid " + " from L3NetworkVO l3, L2NetworkVO l2, L2NetworkClusterRefVO l2ref " + " where l3.uuid = :l3Uuid " + " and l3.l2NetworkUuid = l2.uuid " + " and l2.uuid = l2ref.l2NetworkUuid" + " and l3.category != :l3Category" + " and l2ref.clusterUuid in (:uuids) " + " group by l2ref.clusterUuid", String.class) .param("l3Uuid", l3Uuid) .param("l3Category", L3NetworkCategory.System) .param("uuids", clusterUuids).list(); } }.execute(); } private void handle(APIGetVmAttachableL3NetworkMsg msg) { APIGetVmAttachableL3NetworkReply reply = new APIGetVmAttachableL3NetworkReply(); reply.setInventories(getAttachableL3Network(msg.getSession().getAccountUuid())); bus.reply(msg, reply); } private void handle(final APIAttachIsoToVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { final APIAttachIsoToVmInstanceEvent evt = new APIAttachIsoToVmInstanceEvent(msg.getId()); refreshVO(); ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (allowed != null) { evt.setError(allowed); bus.publish(evt); chain.next(); return; } attachIso(msg.getIsoUuid(), msg.getCdRomUuid(), new Completion(msg, chain) { @Override public void success() { self = dbf.reload(self); evt.setInventory(getSelfInventory()); bus.publish(evt); chain.next(); } @Override public void fail(ErrorCode errorCode) { evt.setError(errorCode); bus.publish(evt); chain.next(); } }); } @Override public String getName() { return String.format("attach-iso-%s-to-vm-%s", msg.getIsoUuid(), self.getUuid()); } }); } private void attachIso(final String isoUuid, String specifiedCdRomUuid, final Completion completion) { checkIfIsoAttachable(isoUuid); IsoOperator.checkAttachIsoToVm(self.getUuid(), isoUuid); List<VmInstanceInventory> vms = list(VmInstanceInventory.valueOf(self)); for (VmAttachIsoExtensionPoint ext : pluginRgty.getExtensionList(VmAttachIsoExtensionPoint.class)) { ErrorCode err = ext.filtCandidateVms(isoUuid, vms); if (err != null) { completion.fail(err); return; } } VmCdRomVO vmCdRomVO = null; if (StringUtils.isNotEmpty(specifiedCdRomUuid)) { vmCdRomVO = dbf.findByUuid(specifiedCdRomUuid, VmCdRomVO.class); } else { vmCdRomVO = IsoOperator.getEmptyCdRom(self.getUuid()); } final VmCdRomVO targetVmCdRomVO = vmCdRomVO; if (self.getState() == VmInstanceState.Stopped) { targetVmCdRomVO.setIsoUuid(isoUuid); dbf.update(targetVmCdRomVO); completion.success(); new IsoOperator().syncVmIsoSystemTag(self.getUuid()); return; } final ImageInventory iso = ImageInventory.valueOf(dbf.findByUuid(isoUuid, ImageVO.class)); VmInstanceSpec spec = buildSpecFromInventory(getSelfInventory(), VmOperation.AttachIso); IsoSpec isoSpec = new IsoSpec(); isoSpec.setImageUuid(isoUuid); isoSpec.setDeviceId(targetVmCdRomVO.getDeviceId()); spec.getDestIsoList().add(isoSpec); FlowChain chain = getAttachIsoWorkFlowChain(spec.getVmInventory()); chain.setName(String.format("attach-iso-%s-to-vm-%s", isoUuid, self.getUuid())); chain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); chain.getData().put(Params.AttachingIsoInventory.toString(), iso); setFlowMarshaller(chain); chain.done(new FlowDoneHandler(completion) { @Override public void handle(Map data) { // new IsoOperator().attachIsoToVm(self.getUuid(), isoUuid); final VmInstanceSpec spec = (VmInstanceSpec) data.get(VmInstanceConstant.Params.VmInstanceSpec.toString()); final VmInstanceSpec.IsoSpec isoSpec = spec.getDestIsoList().stream() .filter(s -> s.getImageUuid().equals(isoUuid)) .findAny() .get(); targetVmCdRomVO.setIsoUuid(isoUuid); targetVmCdRomVO.setIsoInstallPath(isoSpec.getInstallPath()); dbf.update(targetVmCdRomVO); new IsoOperator().syncVmIsoSystemTag(self.getUuid()); completion.success(); } }).error(new FlowErrorHandler(completion) { @Override public void handle(ErrorCode errCode, Map data) { completion.fail(errCode); } }).start(); } @Transactional(readOnly = true) private void checkIfIsoAttachable(String isoUuid) { String psUuid = getSelfInventory().getRootVolume().getPrimaryStorageUuid(); String sql = "select count(i)" + " from ImageCacheVO i" + " where i.primaryStorageUuid = :psUuid" + " and i.imageUuid = :isoUuid"; TypedQuery<Long> q = dbf.getEntityManager().createQuery(sql, Long.class); q.setParameter("psUuid", psUuid); q.setParameter("isoUuid", isoUuid); Long count = q.getSingleResult(); if (count > 0) { // on the same primary storage return; } PrimaryStorageVO psvo = dbf.getEntityManager().find(PrimaryStorageVO.class, psUuid); PrimaryStorageType type = PrimaryStorageType.valueOf(psvo.getType()); List<String> bsUuids = type.findBackupStorage(psUuid); if (bsUuids == null) { List<String> possibleBsTypes = hostAllocatorMgr.getBackupStorageTypesByPrimaryStorageTypeFromMetrics(psvo.getType()); sql = "select count(bs)" + " from BackupStorageVO bs, ImageBackupStorageRefVO ref" + " where bs.uuid = ref.backupStorageUuid" + " and ref.imageUuid = :imgUuid" + " and bs.type in (:bsTypes)"; q = dbf.getEntityManager().createQuery(sql, Long.class); q.setParameter("imgUuid", isoUuid); q.setParameter("bsTypes", possibleBsTypes); count = q.getSingleResult(); if (count > 0) { return; } } else if (!bsUuids.isEmpty()) { sql = "select count(bs)" + " from BackupStorageVO bs, ImageBackupStorageRefVO ref" + " where bs.uuid = ref.backupStorageUuid" + " and ref.imageUuid = :imgUuid" + " and bs.uuid in (:bsUuids)"; q = dbf.getEntityManager().createQuery(sql, Long.class); q.setParameter("imgUuid", isoUuid); q.setParameter("bsUuids", bsUuids); count = q.getSingleResult(); if (count > 0) { return; } } throw new OperationFailureException(operr("the ISO[uuid:%s] is on backup storage that is not compatible of the primary storage[uuid:%s]" + " where the VM[name:%s, uuid:%s] is on", isoUuid, psUuid, self.getName(), self.getUuid())); } private void handle(final APIDetachL3NetworkFromVmMsg msg) { VmNicVO vmNicVO = dbf.findByUuid(msg.getVmNicUuid(), VmNicVO.class); String vmNicAccountUuid = acntMgr.getOwnerAccountUuidOfResource(vmNicVO.getUuid()); thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { final APIDetachL3NetworkFromVmEvent evt = new APIDetachL3NetworkFromVmEvent(msg.getId()); refreshVO(); ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (allowed != null) { evt.setError(allowed); bus.publish(evt); chain.next(); return; } FlowChain fchain = FlowChainBuilder.newSimpleFlowChain(); fchain.setName(String.format("detach-l3-network-from-vm-%s", msg.getVmInstanceUuid())); fchain.then(new NoRollbackFlow() { String __name__ = "before-detach-nic"; @Override public void run(FlowTrigger trigger, Map data) { VmNicInventory nic = VmNicInventory.valueOf((VmNicVO) Q.New(VmNicVO.class).eq(VmNicVO_.uuid, msg.getVmNicUuid()).find()); beforeDetachNic(nic, new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }).then(new NoRollbackFlow() { String __name__ = "detach-nic"; @Override public void run(FlowTrigger trigger, Map data) { String releaseNicFlag = msg.getSystemTags() == null ? null : SystemTagUtils.findTagValue(msg.getSystemTags(), VmSystemTags.RELEASE_NIC_AFTER_DETACH_NIC, VmSystemTags.RELEASE_NIC_AFTER_DETACH_NIC_TOKEN); boolean releaseNic = releaseNicFlag == null ? true : Boolean.valueOf(releaseNicFlag); detachNic(msg.getVmNicUuid(), releaseNic, new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }).done(new FlowDoneHandler(msg) { @Override public void handle(Map data) { self = dbf.reload(self); evt.setInventory(VmInstanceInventory.valueOf(self)); bus.publish(evt); chain.next(); VmNicInventory vmNicInventory = VmNicInventory.valueOf(vmNicVO); VmNicCanonicalEvents.VmNicEventData vmNicEventData = new VmNicCanonicalEvents.VmNicEventData(); vmNicEventData.setCurrentStatus(VmInstanceState.Destroyed.toString()); vmNicEventData.setAccountUuid(vmNicAccountUuid); vmNicEventData.setInventory(vmNicInventory); evtf.fire(VmNicCanonicalEvents.VM_NIC_DELETED_PATH, vmNicEventData); } }).error(new FlowErrorHandler(msg) { @Override public void handle(ErrorCode errCode, Map data) { evt.setError(errCode); bus.publish(evt); chain.next(); } }).start(); } @Override public String getName() { return "detach-nic"; } }); } protected void beforeDetachNic(VmNicInventory nicInventory, Completion completion) { completion.success(); } // switch vm default nic if vm current default nic is input parm nic protected void selectDefaultL3(VmNicInventory nic) { if (self.getDefaultL3NetworkUuid() != null && !VmNicHelper.isDefaultNic(nic, VmInstanceInventory.valueOf(self))) { return; } final VmInstanceInventory vm = getSelfInventory(); final String previousDefaultL3 = vm.getDefaultL3NetworkUuid(); // the nic has been removed, reload self = dbf.reload(self); final VmNicVO candidate = CollectionUtils.find(self.getVmNics(), new Function<VmNicVO, VmNicVO>() { @Override public VmNicVO call(VmNicVO arg) { return arg.getUuid().equals(nic.getUuid()) ? null : arg; } }); if (candidate != null) { String newDefaultL3 = VmNicHelper.getPrimaryL3Uuid(VmNicInventory.valueOf(candidate)); CollectionUtils.safeForEach( pluginRgty.getExtensionList(VmDefaultL3NetworkChangedExtensionPoint.class), new ForEachFunction<VmDefaultL3NetworkChangedExtensionPoint>() { @Override public void run(VmDefaultL3NetworkChangedExtensionPoint ext) { ext.vmDefaultL3NetworkChanged(vm, previousDefaultL3, newDefaultL3); } }); self.setDefaultL3NetworkUuid(newDefaultL3); logger.debug(String.format( "after detaching the nic[uuid:%s, L3 uuid:%s], change the default L3 of the VM[uuid:%s]" + " to the L3 network[uuid: %s]", nic.getUuid(), VmNicHelper.getL3Uuids(nic), self.getUuid(), newDefaultL3)); } else { self.setDefaultL3NetworkUuid(null); logger.debug(String.format( "after detaching the nic[uuid:%s, L3 uuid:%s], change the default L3 of the VM[uuid:%s]" + " to null, as the VM has no other nics", nic.getUuid(), VmNicHelper.getL3Uuids(nic), self.getUuid())); } self = dbf.updateAndRefresh(self); } private void detachNic(final String nicUuid, boolean releaseNic, final Completion completion) { VmNicVO vmNicVO = CollectionUtils.find(self.getVmNics(), new Function<VmNicVO, VmNicVO>() { @Override public VmNicVO call(VmNicVO arg) { return arg.getUuid().equals(nicUuid) ? arg : null; } }); if (vmNicVO == null) { completion.success(); return; } final VmNicInventory nic = VmNicInventory.valueOf( CollectionUtils.find(self.getVmNics(), new Function<VmNicVO, VmNicVO>() { @Override public VmNicVO call(VmNicVO arg) { return arg.getUuid().equals(nicUuid) ? arg : null; } }) ); for (VmDetachNicExtensionPoint ext : pluginRgty.getExtensionList(VmDetachNicExtensionPoint.class)) { ext.preDetachNic(nic); } CollectionUtils.safeForEach(pluginRgty.getExtensionList(VmDetachNicExtensionPoint.class), new ForEachFunction<VmDetachNicExtensionPoint>() { @Override public void run(VmDetachNicExtensionPoint arg) { arg.beforeDetachNic(nic); } }); final VmInstanceSpec spec = buildSpecFromInventory(getSelfInventory(), VmOperation.DetachNic); spec.setVmInventory(VmInstanceInventory.valueOf(self)); spec.setDestNics(list(nic)); L3NetworkInventory l3Inv = L3NetworkInventory.valueOf(dbf.findByUuid(nic.getL3NetworkUuid(), L3NetworkVO.class)); spec.setL3Networks(list(new VmNicSpec(l3Inv))); FlowChain flowChain = FlowChainBuilder.newSimpleFlowChain(); flowChain.setName(String.format("detachNic-vm-%s-nic-%s", self.getUuid(), nicUuid)); setFlowMarshaller(flowChain); flowChain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); flowChain.getData().put(Params.ReleaseNicAfterDetachNic.toString(), releaseNic); if (self.getState() == VmInstanceState.Running) { flowChain.then(new VmDetachNicOnHypervisorFlow()); } flowChain.then(new VmReleaseResourceOnDetachingNicFlow()); flowChain.then(new VmDetachNicFlow()); flowChain.done(new FlowDoneHandler(completion) { @Override public void handle(Map data) { selectDefaultL3(nic); removeStaticIp(); CollectionUtils.safeForEach(pluginRgty.getExtensionList(VmDetachNicExtensionPoint.class), new ForEachFunction<VmDetachNicExtensionPoint>() { @Override public void run(VmDetachNicExtensionPoint arg) { arg.afterDetachNic(nic); } }); completion.success(); } private void removeStaticIp() { for (UsedIpInventory ip : nic.getUsedIps()) { new StaticIpOperator().deleteStaticIpByVmUuidAndL3Uuid(self.getUuid(), ip.getL3NetworkUuid()); } } }).error(new FlowErrorHandler(completion) { @Override public void handle(final ErrorCode errCode, Map data) { CollectionUtils.safeForEach(pluginRgty.getExtensionList(VmDetachNicExtensionPoint.class), new ForEachFunction<VmDetachNicExtensionPoint>() { @Override public void run(VmDetachNicExtensionPoint arg) { arg.failedToDetachNic(nic, errCode); } }); completion.fail(errCode); } }).start(); } private void handle(final APIChangeInstanceOfferingMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { APIChangeInstanceOfferingEvent evt = new APIChangeInstanceOfferingEvent(msg.getId()); refreshVO(); ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (allowed != null) { evt.setError(allowed); bus.publish(evt); chain.next(); return; } changeOffering(msg, new Completion(msg, chain) { @Override public void success() { refreshVO(); evt.setInventory(getSelfInventory()); bus.publish(evt); chain.next(); } @Override public void fail(ErrorCode errorCode) { evt.setError(errorCode); bus.publish(evt); chain.next(); } }); } @Override public String getName() { return "change-instance-offering"; } }); } private void changeOffering(APIChangeInstanceOfferingMsg msg, final Completion completion) { final InstanceOfferingVO newOfferingVO = dbf.findByUuid(msg.getInstanceOfferingUuid(), InstanceOfferingVO.class); final InstanceOfferingInventory inv = InstanceOfferingInventory.valueOf(newOfferingVO); final VmInstanceInventory vm = getSelfInventory(); List<ChangeInstanceOfferingExtensionPoint> exts = pluginRgty.getExtensionList(ChangeInstanceOfferingExtensionPoint.class); exts.forEach(ext -> ext.preChangeInstanceOffering(vm, inv)); CollectionUtils.safeForEach(exts, ext -> ext.beforeChangeInstanceOffering(vm, inv)); changeCpuAndMemory(inv.getCpuNum(), inv.getMemorySize(), new Completion(completion) { @Override public void success() { self.setAllocatorStrategy(inv.getAllocatorStrategy()); self.setInstanceOfferingUuid(msg.getInstanceOfferingUuid()); self = dbf.updateAndRefresh(self); CollectionUtils.safeForEach(exts, ext -> ext.afterChangeInstanceOffering(vm, inv)); completion.success(); } @Override public void fail(ErrorCode errorCode) { completion.fail(errorCode); } }); } private void changeCpuAndMemory(final int cpuNum, final long memorySize, final Completion completion) { if (self.getState() == VmInstanceState.Stopped) { self.setCpuNum(cpuNum); self.setMemorySize(memorySize); self = dbf.updateAndRefresh(self); completion.success(); return; } final int oldCpuNum = self.getCpuNum(); final long oldMemorySize = self.getMemorySize(); class AlignmentStruct { long alignedMemory; } final AlignmentStruct struct = new AlignmentStruct(); struct.alignedMemory = memorySize; FlowChain chain = FlowChainBuilder.newSimpleFlowChain(); chain.setName(String.format("change-cpu-and-memory-of-vm-%s", self.getUuid())); chain.then(new NoRollbackFlow() { String __name__ = "align-memory"; @Override public void run(FlowTrigger chain, Map data) { // align memory long increaseMemory = memorySize - oldMemorySize; long remainderMemory = increaseMemory % SizeUnit.MEGABYTE.toByte(128); if (increaseMemory != 0 && remainderMemory != 0) { if (remainderMemory < SizeUnit.MEGABYTE.toByte(128) / 2) { increaseMemory = increaseMemory / SizeUnit.MEGABYTE.toByte(128) * SizeUnit.MEGABYTE.toByte(128); } else { increaseMemory = (increaseMemory / SizeUnit.MEGABYTE.toByte(128) + 1) * SizeUnit.MEGABYTE.toByte(128); } if (increaseMemory == 0) { struct.alignedMemory = oldMemorySize + SizeUnit.MEGABYTE.toByte(128); } else { struct.alignedMemory = oldMemorySize + increaseMemory; } logger.debug(String.format("automatically align memory from %s to %s", memorySize, struct.alignedMemory)); } chain.next(); } }).then(new Flow() { String __name__ = String.format("allocate-host-capacity-on-host-%s", self.getHostUuid()); boolean result = false; @Override public void run(FlowTrigger chain, Map data) { DesignatedAllocateHostMsg msg = new DesignatedAllocateHostMsg(); msg.setCpuCapacity(cpuNum - oldCpuNum); msg.setMemoryCapacity(struct.alignedMemory - oldMemorySize); msg.setAllocatorStrategy(HostAllocatorConstant.DESIGNATED_HOST_ALLOCATOR_STRATEGY_TYPE); msg.setVmInstance(VmInstanceInventory.valueOf(self)); if (self.getImageUuid() != null && dbf.findByUuid(self.getImageUuid(), ImageVO.class) != null) { msg.setImage(ImageInventory.valueOf(dbf.findByUuid(self.getImageUuid(), ImageVO.class))); } msg.setHostUuid(self.getHostUuid()); msg.setFullAllocate(false); msg.setL3NetworkUuids(VmNicHelper.getL3Uuids(VmNicInventory.valueOf(self.getVmNics()))); msg.setServiceId(bus.makeLocalServiceId(HostAllocatorConstant.SERVICE_ID)); bus.send(msg, new CloudBusCallBack(chain) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { ErrorCode err = operr("host[uuid:%s] capacity is not enough to offer cpu[%s], memory[%s bytes]", self.getHostUuid(), cpuNum - oldCpuNum, struct.alignedMemory - oldMemorySize); err.setCause(reply.getError()); chain.fail(err); } else { result = true; logger.debug(String.format("reserve memory %s bytes and cpu %s on host[uuid:%s]", memorySize - self.getMemorySize(), cpuNum - self.getCpuNum(), self.getHostUuid())); chain.next(); } } }); } @Override public void rollback(FlowRollback chain, Map data) { if (result) { ReturnHostCapacityMsg msg = new ReturnHostCapacityMsg(); msg.setCpuCapacity(cpuNum - oldCpuNum); msg.setMemoryCapacity(struct.alignedMemory - oldMemorySize); msg.setHostUuid(self.getHostUuid()); msg.setServiceId(bus.makeLocalServiceId(HostAllocatorConstant.SERVICE_ID)); bus.send(msg); } chain.rollback(); } }).then(new NoRollbackFlow() { String __name__ = String.format("change-cpu-of-vm-%s", self.getUuid()); @Override public void run(FlowTrigger chain, Map data) { if (cpuNum != self.getCpuNum()) { IncreaseVmCpuMsg msg = new IncreaseVmCpuMsg(); msg.setVmInstanceUuid(self.getUuid()); msg.setHostUuid(self.getHostUuid()); msg.setCpuNum(cpuNum); bus.makeTargetServiceIdByResourceUuid(msg, HostConstant.SERVICE_ID, self.getHostUuid()); bus.send(msg, new CloudBusCallBack(chain) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { logger.error("failed to update cpu"); chain.fail(reply.getError()); } else { IncreaseVmCpuReply r = reply.castReply(); self.setCpuNum(r.getCpuNum()); chain.next(); } } }); } else { chain.next(); } } }).then(new NoRollbackFlow() { String __name__ = String.format("change-memory-of-vm-%s", self.getUuid()); @Override public void run(FlowTrigger chain, Map data) { if (struct.alignedMemory != self.getMemorySize()) { IncreaseVmMemoryMsg msg = new IncreaseVmMemoryMsg(); msg.setVmInstanceUuid(self.getUuid()); msg.setHostUuid(self.getHostUuid()); msg.setMemorySize(struct.alignedMemory); bus.makeTargetServiceIdByResourceUuid(msg, HostConstant.SERVICE_ID, self.getHostUuid()); bus.send(msg, new CloudBusCallBack(chain) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { logger.error("failed to update memory"); chain.fail(reply.getError()); } else { IncreaseVmMemoryReply r = reply.castReply(); self.setMemorySize(r.getMemorySize()); chain.next(); } } }); } else { chain.next(); } } }).done(new FlowDoneHandler(completion) { @Override public void handle(Map data) { dbf.update(self); completion.success(); } }).error(new FlowErrorHandler(completion) { @Override public void handle(ErrorCode errCode, Map data) { completion.fail(errCode); } }).start(); } private void handle(final APIUpdateVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { APIUpdateVmInstanceEvent evt = new APIUpdateVmInstanceEvent(msg.getId()); refreshVO(); List<Runnable> extensions = new ArrayList<Runnable>(); final VmInstanceInventory vm = getSelfInventory(); boolean update = false; if (msg.getName() != null) { self.setName(msg.getName()); update = true; } if (msg.getDescription() != null) { self.setDescription(msg.getDescription()); update = true; } if (msg.getState() != null) { self.setState(VmInstanceState.valueOf(msg.getState())); update = true; if (!vm.getState().equals(msg.getState())) { extensions.add(new Runnable() { @Override public void run() { logger.debug(String.format("vm[uuid:%s] changed state from %s to %s", self.getUuid(), vm.getState(), msg.getState())); VmCanonicalEvents.VmStateChangedData data = new VmCanonicalEvents.VmStateChangedData(); data.setVmUuid(self.getUuid()); data.setOldState(vm.getState()); data.setNewState(msg.getState()); data.setInventory(getSelfInventory()); evtf.fire(VmCanonicalEvents.VM_FULL_STATE_CHANGED_PATH, data); } }); } } if (msg.getDefaultL3NetworkUuid() != null) { self.setDefaultL3NetworkUuid(msg.getDefaultL3NetworkUuid()); update = true; if (!msg.getDefaultL3NetworkUuid().equals(vm.getDefaultL3NetworkUuid())) { extensions.add(new Runnable() { @Override public void run() { for (VmDefaultL3NetworkChangedExtensionPoint ext : pluginRgty.getExtensionList(VmDefaultL3NetworkChangedExtensionPoint.class)) { ext.vmDefaultL3NetworkChanged(vm, vm.getDefaultL3NetworkUuid(), msg.getDefaultL3NetworkUuid()); } } }); } } if (msg.getPlatform() != null) { self.setPlatform(msg.getPlatform()); update = true; } if (update) { dbf.update(self); } updateVmIsoFirstOrder(msg.getSystemTags()); CollectionUtils.safeForEach(extensions, Runnable::run); if (msg.getCpuNum() != null || msg.getMemorySize() != null) { int cpuNum = msg.getCpuNum() == null ? self.getCpuNum() : msg.getCpuNum(); long memory = msg.getMemorySize() == null ? self.getMemorySize() : msg.getMemorySize(); changeCpuAndMemory(cpuNum, memory, new Completion(msg, chain) { @Override public void success() { refreshVO(); evt.setInventory(getSelfInventory()); bus.publish(evt); chain.next(); } @Override public void fail(ErrorCode errorCode) { evt.setError(errorCode); bus.publish(evt); chain.next(); } }); } else { evt.setInventory(getSelfInventory()); bus.publish(evt); chain.next(); } } @Override public String getName() { return "update-vm-info"; } }); } // Specify an iso as the first one, restart vm effective private void updateVmIsoFirstOrder(List<String> systemTags){ if(systemTags == null || systemTags.isEmpty()){ return; } String isoUuid = SystemTagUtils.findTagValue(systemTags, VmSystemTags.ISO, VmSystemTags.ISO_TOKEN); if (isoUuid == null){ return; } String vmUuid = self.getUuid(); List<String> isoList = IsoOperator.getIsoUuidByVmUuid(vmUuid); if (!isoList.contains(isoUuid)) { throw new OperationFailureException(operr("ISO[uuid:%s] is not attached to VM[uuid:%s]", isoUuid , self.getUuid())); } List<VmCdRomVO> cdRomVOS = Q.New(VmCdRomVO.class) .eq(VmCdRomVO_.vmInstanceUuid, self.getUuid()) .orderBy(VmCdRomVO_.deviceId, SimpleQuery.Od.ASC) .list(); if (cdRomVOS.size() <= 1) { return; } if (isoUuid.equals(cdRomVOS.get(0).getIsoUuid())) { return; } Optional<VmCdRomVO> opt = cdRomVOS.stream().filter(v -> v.getIsoUuid().equals(isoUuid)).findAny(); if (!opt.isPresent()) { return; } VmCdRomVO sourceCdRomVO = opt.get(); VmCdRomVO targetCdRomVO = cdRomVOS.get(0); String targetCdRomIsoUuid = targetCdRomVO.getIsoUuid(); String path = targetCdRomVO.getIsoInstallPath(); targetCdRomVO.setIsoUuid(sourceCdRomVO.getIsoUuid()); targetCdRomVO.setIsoInstallPath(sourceCdRomVO.getIsoInstallPath()); sourceCdRomVO.setIsoUuid(targetCdRomIsoUuid); sourceCdRomVO.setIsoInstallPath(path); new SQLBatch() { @Override protected void scripts() { merge(targetCdRomVO); merge(sourceCdRomVO); } }.execute(); } @Transactional(readOnly = true) private List<VolumeVO> getAttachableVolume(String accountUuid) { if (!self.getState().equals(VmInstanceState.Stopped) && self.getPlatform().equals(ImagePlatform.Other.toString())) { return Collections.emptyList(); } List<String> volUuids = acntMgr.getResourceUuidsCanAccessByAccount(accountUuid, VolumeVO.class); if (volUuids != null && volUuids.isEmpty()) { return Collections.emptyList(); } List<String> formats = VolumeFormat.getVolumeFormatSupportedByHypervisorTypeInString(self.getHypervisorType()); if (formats.isEmpty()) { throw new CloudRuntimeException(String.format("cannot find volume formats for the hypervisor type[%s]", self.getHypervisorType())); } String sql; List<VolumeVO> vos; /* * Cluster1: [PS1, PS2, PS3] * Cluster2: [PS1, PS2] * Cluster3: [PS1, PS2, PS3] * * Assume a stopped vm which has no clusterUuid and root volume on PS1 * then it can attach all suitable data volumes from [PS1, PS2] * because PS1 is attached to [Cluster1, Cluster2, Cluster3] * and they all have [PS1, PS2] attached */ List<String> psUuids = null; if (self.getClusterUuid() == null) { // 1. get clusterUuids of VM->RV->PS sql = "select cls.uuid from" + " ClusterVO cls, VolumeVO vol, VmInstanceVO vm, PrimaryStorageClusterRefVO ref" + " where vm.uuid = :vmUuid" + " and vol.uuid = vm.rootVolumeUuid" + " and ref.primaryStorageUuid = vol.primaryStorageUuid" + " and cls.uuid = ref.clusterUuid" + " and cls.state = :clsState" + " group by cls.uuid"; List<String> clusterUuids = SQL.New(sql) .param("vmUuid", self.getUuid()) .param("clsState", ClusterState.Enabled) .list(); // 2. get all PS that attachs to clusterUuids sql = "select ps.uuid from PrimaryStorageVO ps" + " inner join PrimaryStorageClusterRefVO ref on ref.primaryStorageUuid = ps.uuid" + " inner join ClusterVO cls on cls.uuid = ref.clusterUuid" + " where cls.uuid in (:clusterUuids)" + " and ps.state = :psState" + " and ps.status = :psStatus" + " group by ps.uuid" + " having count(distinct cls.uuid) = :clsCount"; psUuids = SQL.New(sql) .param("clusterUuids", clusterUuids) .param("psState", PrimaryStorageState.Enabled) .param("psStatus", PrimaryStorageStatus.Connected) .param("clsCount", (long)clusterUuids.size()) .list(); } if (volUuids == null) { // accessed by a system admin // if vm.clusterUuid is not null sql = "select vol" + " from VolumeVO vol, VmInstanceVO vm, PrimaryStorageClusterRefVO ref" + " where vol.type = :type" + " and vol.state = :volState" + " and vol.status = :volStatus" + " and vol.format in (:formats)" + " and vol.vmInstanceUuid is null" + " and vm.clusterUuid = ref.clusterUuid" + " and ref.primaryStorageUuid = vol.primaryStorageUuid" + " and vm.uuid = :vmUuid" + " group by vol.uuid"; TypedQuery<VolumeVO> q = dbf.getEntityManager().createQuery(sql, VolumeVO.class); q.setParameter("volState", VolumeState.Enabled); q.setParameter("volStatus", VolumeStatus.Ready); q.setParameter("formats", formats); q.setParameter("vmUuid", self.getUuid()); q.setParameter("type", VolumeType.Data); vos = q.getResultList(); // if vm.clusterUuid is null if (self.getClusterUuid() == null) { // 3. get data volume candidates from psUuids sql = "select vol from VolumeVO vol" + " where vol.primaryStorageUuid in (:psUuids)" + " and vol.type = :volType" + " and vol.state = :volState" + " and vol.status = :volStatus" + " and vol.format in (:formats)" + " and vol.vmInstanceUuid is null" + " group by vol.uuid"; List<VolumeVO> dvs = SQL.New(sql) .param("psUuids", psUuids) .param("volType", VolumeType.Data) .param("volState", VolumeState.Enabled) .param("volStatus", VolumeStatus.Ready) .param("formats", formats) .list(); vos.addAll(dvs); } // for NotInstantiated data volumes sql = "select vol" + " from VolumeVO vol" + " where vol.type = :type" + " and vol.status = :volStatus" + " and vol.state = :volState" + " group by vol.uuid"; q = dbf.getEntityManager().createQuery(sql, VolumeVO.class); q.setParameter("type", VolumeType.Data); q.setParameter("volState", VolumeState.Enabled); q.setParameter("volStatus", VolumeStatus.NotInstantiated); vos.addAll(q.getResultList()); } else { // accessed by a normal account // if vm.clusterUuid is not null sql = "select vol" + " from VolumeVO vol, VmInstanceVO vm, PrimaryStorageClusterRefVO ref" + " where vol.type = :type" + " and vol.state = :volState" + " and vol.status = :volStatus" + " and vol.format in (:formats)" + " and vol.vmInstanceUuid is null" + " and vm.clusterUuid = ref.clusterUuid" + " and ref.primaryStorageUuid = vol.primaryStorageUuid" + " and vol.uuid in (:volUuids)" + " and vm.uuid = :vmUuid" + " group by vol.uuid"; TypedQuery<VolumeVO> q = dbf.getEntityManager().createQuery(sql, VolumeVO.class); q.setParameter("volState", VolumeState.Enabled); q.setParameter("volStatus", VolumeStatus.Ready); q.setParameter("vmUuid", self.getUuid()); q.setParameter("formats", formats); q.setParameter("type", VolumeType.Data); q.setParameter("volUuids", volUuids); vos = q.getResultList(); // if vm.clusterUuid is null if (self.getClusterUuid() == null) { // 3. get data volume candidates from psUuids sql = "select vol from VolumeVO vol" + " where vol.primaryStorageUuid in (:psUuids)" + " and vol.type = :volType" + " and vol.state = :volState" + " and vol.status = :volStatus" + " and vol.format in (:formats)" + " and vol.vmInstanceUuid is null" + " and vol.uuid in (:volUuids)" + " group by vol.uuid"; List<VolumeVO> dvs = SQL.New(sql) .param("psUuids", psUuids) .param("volType", VolumeType.Data) .param("volState", VolumeState.Enabled) .param("volStatus", VolumeStatus.Ready) .param("formats", formats) .param("volUuids", volUuids) .list(); vos.addAll(dvs); } // for NotInstantiated data volumes sql = "select vol" + " from VolumeVO vol" + " where vol.type = :type" + " and vol.status = :volStatus" + " and vol.state = :volState" + " and vol.uuid in (:volUuids)" + " group by vol.uuid"; q = dbf.getEntityManager().createQuery(sql, VolumeVO.class); q.setParameter("type", VolumeType.Data); q.setParameter("volState", VolumeState.Enabled); q.setParameter("volUuids", volUuids); q.setParameter("volStatus", VolumeStatus.NotInstantiated); vos.addAll(q.getResultList()); } for (GetAttachableVolumeExtensionPoint ext : pluginRgty.getExtensionList(GetAttachableVolumeExtensionPoint.class)) { if (!vos.isEmpty()) { vos = ext.returnAttachableVolumes(getSelfInventory(), vos); } } return vos; } private void handle(APIGetVmAttachableDataVolumeMsg msg) { APIGetVmAttachableDataVolumeReply reply = new APIGetVmAttachableDataVolumeReply(); reply.setInventories(VolumeInventory.valueOf(getAttachableVolume(msg.getSession().getAccountUuid()))); bus.reply(msg, reply); } private void handle(final APIGetVmMigrationCandidateHostsMsg msg) { final APIGetVmMigrationCandidateHostsReply reply = new APIGetVmMigrationCandidateHostsReply(); getVmMigrationTargetHost(msg, new ReturnValueCompletion<List<HostInventory>>(msg) { @Override public void success(List<HostInventory> returnValue) { reply.setInventories(returnValue); bus.reply(msg, reply); } @Override public void fail(ErrorCode errorCode) { reply.setError(errorCode); bus.reply(msg, reply); } }); } private void handle(final APIAttachL3NetworkToVmMsg msg) { final APIAttachL3NetworkToVmEvent evt = new APIAttachL3NetworkToVmEvent(msg.getId()); final String vmNicInvKey = "vmNicInventory"; FlowChain chain = FlowChainBuilder.newSimpleFlowChain(); chain.setName(String.format("attach-l3-network-to-vm-%s", msg.getVmInstanceUuid())); chain.then(new NoRollbackFlow() { String __name__ = "attach-nic"; @Override public void run(FlowTrigger trigger, Map data) { List<String> l3Uuids = new ArrayList<>(); /* put primary L3 at first */ l3Uuids.add(msg.getL3NetworkUuid()); l3Uuids.addAll(msg.getSecondaryL3Uuids()); attachNic(msg, l3Uuids, new ReturnValueCompletion<VmNicInventory>(msg) { @Override public void success(VmNicInventory returnValue) { data.put(vmNicInvKey, returnValue); trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }).then(new NoRollbackFlow() { String __name__ = "after-attach-nic"; @Override public void run(FlowTrigger trigger, Map data) { afterAttachNic((VmNicInventory) data.get(vmNicInvKey), new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }).done(new FlowDoneHandler(msg) { @Override public void handle(Map data) { self = dbf.reload(self); evt.setInventory(VmInstanceInventory.valueOf(self)); bus.publish(evt); VmNicInventory vmNicInventory = (VmNicInventory) data.get(vmNicInvKey); VmNicCanonicalEvents.VmNicEventData vmNicEventData = new VmNicCanonicalEvents.VmNicEventData(); vmNicEventData.setCurrentStatus(self.getState().toString()); String vmNicAccountUuid = acntMgr.getOwnerAccountUuidOfResource(vmNicInventory.getUuid()); vmNicEventData.setAccountUuid(vmNicAccountUuid); vmNicEventData.setInventory(vmNicInventory); evtf.fire(VmNicCanonicalEvents.VM_NIC_CREATED_PATH, vmNicEventData); } }).error(new FlowErrorHandler(msg) { @Override public void handle(ErrorCode errCode, Map data) { evt.setError(errCode); bus.publish(evt); } }).start(); } private void handle(final APIAttachVmNicToVmMsg msg) { final APIAttachVmNicToVmEvent evt = new APIAttachVmNicToVmEvent(msg.getId()); final String vmNicInvKey = "vmNicInventory"; FlowChain chain = FlowChainBuilder.newSimpleFlowChain(); chain.setName(String.format("attach-nic-to-vm-%s", msg.getVmInstanceUuid())); chain.then(new NoRollbackFlow() { String __name__ = "attach-nic"; @Override public void run(FlowTrigger trigger, Map data) { attachNic(msg, new ReturnValueCompletion<VmNicInventory>(msg) { @Override public void success(VmNicInventory returnValue) { data.put(vmNicInvKey, returnValue); trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }).then(new NoRollbackFlow() { String __name__ = "after-attach-nic"; @Override public void run(FlowTrigger trigger, Map data) { afterAttachNic((VmNicInventory) data.get(vmNicInvKey), new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }).done(new FlowDoneHandler(msg) { @Override public void handle(Map data) { self = dbf.reload(self); evt.setInventory(VmInstanceInventory.valueOf(self)); bus.publish(evt); } }).error(new FlowErrorHandler(msg) { @Override public void handle(ErrorCode errCode, Map data) { evt.setError(errCode); bus.publish(evt); } }).start(); } protected void afterAttachNic(VmNicInventory nicInventory, Completion completion) { completion.success(); } private void detachVolume(final DetachDataVolumeFromVmMsg msg, final NoErrorCompletion completion) { final DetachDataVolumeFromVmReply reply = new DetachDataVolumeFromVmReply(); refreshVO(true); if (self == null || VmInstanceState.Destroyed == self.getState()) { // the vm is destroyed, the data volume must have been detached bus.reply(msg, reply); completion.done(); return; } ErrorCode allowed = validateOperationByState(msg, self.getState(), VmErrors.DETACH_VOLUME_ERROR); if (allowed != null) { throw new OperationFailureException(allowed); } final VolumeInventory volume = msg.getVolume(); VolumeVO vvo = dbf.findByUuid(volume.getUuid(), VolumeVO.class); // the volume is already detached, skip the bellow actions, except sharable if (vvo.getVmInstanceUuid() == null && !vvo.isShareable()) { extEmitter.afterDetachVolume(getSelfInventory(), volume, new Completion(completion) { @Override public void success() { bus.reply(msg, reply); completion.done(); } @Override public void fail(ErrorCode errorCode) { reply.setError(errorCode); bus.reply(msg, reply); completion.done(); } }); return; } extEmitter.preDetachVolume(getSelfInventory(), volume); extEmitter.beforeDetachVolume(getSelfInventory(), volume); if (self.getState() == VmInstanceState.Stopped) { extEmitter.afterDetachVolume(getSelfInventory(), volume, new Completion(completion) { @Override public void success() { bus.reply(msg, reply); completion.done(); } @Override public void fail(ErrorCode errorCode) { reply.setError(errorCode); bus.reply(msg, reply); completion.done(); } }); return; } // VmInstanceState.Running String hostUuid = self.getHostUuid(); DetachVolumeFromVmOnHypervisorMsg dmsg = new DetachVolumeFromVmOnHypervisorMsg(); dmsg.setVmInventory(VmInstanceInventory.valueOf(self)); dmsg.setInventory(volume); dmsg.setHostUuid(hostUuid); bus.makeTargetServiceIdByResourceUuid(dmsg, HostConstant.SERVICE_ID, hostUuid); bus.send(dmsg, new CloudBusCallBack(msg, completion) { @Override public void run(final MessageReply r) { if (!r.isSuccess()) { reply.setError(r.getError()); extEmitter.failedToDetachVolume(getSelfInventory(), volume, r.getError()); bus.reply(msg, reply); completion.done(); } else { extEmitter.afterDetachVolume(getSelfInventory(), volume, new Completion(completion) { @Override public void success() { // update Volumevo before exit message queue vvo.setVmInstanceUuid(null); dbf.updateAndRefresh(vvo); bus.reply(msg, reply); completion.done(); } @Override public void fail(ErrorCode errorCode) { reply.setError(errorCode); bus.reply(msg, reply); completion.done(); } }); } } }); } protected void attachDataVolume(final AttachDataVolumeToVmMsg msg, final NoErrorCompletion completion) { final AttachDataVolumeToVmReply reply = new AttachDataVolumeToVmReply(); refreshVO(); ErrorCode err = validateOperationByState(msg, self.getState(), VmErrors.ATTACH_VOLUME_ERROR); if (err != null) { throw new OperationFailureException(err); } Map data = new HashMap(); final VolumeInventory volume = msg.getVolume(); new VmAttachVolumeValidator().validate(msg.getVmInstanceUuid(), volume.getUuid()); extEmitter.preAttachVolume(getSelfInventory(), volume); extEmitter.beforeAttachVolume(getSelfInventory(), volume, data); VmInstanceSpec spec = new VmInstanceSpec(); spec.setMessage(msg); spec.setVmInventory(VmInstanceInventory.valueOf(self)); spec.setCurrentVmOperation(VmOperation.AttachVolume); spec.setDestDataVolumes(list(volume)); FlowChain chain; if (volume.getStatus().equals(VolumeStatus.Ready.toString())) { chain = FlowChainBuilder.newSimpleFlowChain(); chain.then(new VmAssignDeviceIdToAttachingVolumeFlow()); chain.then(new VmAttachVolumeOnHypervisorFlow()); } else { chain = getAttachUninstantiatedVolumeWorkFlowChain(spec.getVmInventory()); } setFlowMarshaller(chain); List<VolumeInventory> attachedVolumes = getAllDataVolumes(getSelfInventory()); attachedVolumes.removeIf(it -> it.getDeviceId() == null || it.getUuid().equals(volume.getUuid())); chain.setName(String.format("vm-%s-attach-volume-%s", self.getUuid(), volume.getUuid())); chain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); chain.getData().put(VmInstanceConstant.Params.AttachingVolumeInventory.toString(), volume); chain.getData().put(Params.AttachedDataVolumeInventories.toString(), attachedVolumes); chain.done(new FlowDoneHandler(msg, completion) { @Override public void handle(Map data) { extEmitter.afterAttachVolume(getSelfInventory(), volume); reply.setHypervisorType(self.getHypervisorType()); bus.reply(msg, reply); completion.done(); } }).error(new FlowErrorHandler(msg, completion) { @Override public void handle(final ErrorCode errCode, Map data) { extEmitter.failedToAttachVolume(getSelfInventory(), volume, errCode, data); reply.setError(err(VmErrors.ATTACH_VOLUME_ERROR, errCode, errCode.getDetails())); bus.reply(msg, reply); completion.done(); } }).start(); } protected void migrateVm(final Message msg, final Completion completion) { refreshVO(); ErrorCode allowed = validateOperationByState(msg, self.getState(), VmErrors.MIGRATE_ERROR); if (allowed != null) { completion.fail(allowed); return; } VmInstanceInventory pinv = getSelfInventory(); for (VmPreMigrationExtensionPoint ext : pluginRgty.getExtensionList(VmPreMigrationExtensionPoint.class)) { ext.preVmMigration(pinv); } VmInstanceInventory inv = VmInstanceInventory.valueOf(self); final VmInstanceSpec spec = buildSpecFromInventory(inv, VmOperation.Migrate); final VmInstanceState originState = self.getState(); changeVmStateInDb(VmInstanceStateEvent.migrating); spec.setMessage(msg); FlowChain chain = getMigrateVmWorkFlowChain(inv); setFlowMarshaller(chain); String lastHostUuid = self.getHostUuid(); chain.setName(String.format("migrate-vm-%s", self.getUuid())); chain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); chain.done(new FlowDoneHandler(completion) { @Override public void handle(final Map data) { VmInstanceSpec spec = (VmInstanceSpec) data.get(VmInstanceConstant.Params.VmInstanceSpec.toString()); HostInventory host = spec.getDestHost(); self = changeVmStateInDb(VmInstanceStateEvent.running, ()-> { self.setZoneUuid(host.getZoneUuid()); self.setClusterUuid(host.getClusterUuid()); self.setLastHostUuid(lastHostUuid); self.setHostUuid(host.getUuid()); }); VmInstanceInventory vm = VmInstanceInventory.valueOf(self); extEmitter.afterMigrateVm(vm, vm.getLastHostUuid()); completion.success(); } }).error(new FlowErrorHandler(completion) { @Override public void handle(final ErrorCode errCode, Map data) { extEmitter.failedToMigrateVm(VmInstanceInventory.valueOf(self), spec.getDestHost().getUuid(), errCode); if (HostErrors.FAILED_TO_MIGRATE_VM_ON_HYPERVISOR.isEqual(errCode.getCode())) { checkState(originalCopy.getHostUuid(), new NoErrorCompletion(completion) { @Override public void done() { completion.fail(errCode); } }); } else { self.setState(originState); self = dbf.updateAndRefresh(self); completion.fail(errCode); } } }).start(); } protected void handle(final APIMigrateVmMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getName() { return String.format("migrate-vm-%s", self.getUuid()); } @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { migrateVm(msg, new Completion(chain) { @Override public void success() { APIMigrateVmEvent evt = new APIMigrateVmEvent(msg.getId()); evt.setInventory(VmInstanceInventory.valueOf(self)); bus.publish(evt); chain.next(); } @Override public void fail(ErrorCode errorCode) { APIMigrateVmEvent evt = new APIMigrateVmEvent(msg.getId()); evt.setError(errorCode); bus.publish(evt); chain.next(); } }); } }); } protected void startVm(final Message msg, final Completion completion) { refreshVO(); ErrorCode allowed = validateOperationByState(msg, self.getState(), null); if (allowed != null) { completion.fail(allowed); return; } if (self.getState() == VmInstanceState.Created) { InstantiateVmFromNewCreatedStruct struct = new JsonLabel().get( InstantiateVmFromNewCreatedStruct.makeLabelKey(self.getUuid()), InstantiateVmFromNewCreatedStruct.class); struct.setStrategy(VmCreationStrategy.InstantStart); instantiateVmFromNewCreate(struct, completion); return; } VmInstanceInventory inv = VmInstanceInventory.valueOf(self); ErrorCode preStart = extEmitter.preStartVm(inv); if (preStart != null) { completion.fail(preStart); return; } final VmInstanceSpec spec = buildSpecFromInventory(inv, VmOperation.Start); spec.setMessage(msg); if (msg instanceof APIStartVmInstanceMsg) { APIStartVmInstanceMsg amsg = (APIStartVmInstanceMsg) msg; spec.setRequiredClusterUuid(amsg.getClusterUuid()); spec.setRequiredHostUuid(amsg.getHostUuid()); spec.setUsbRedirect(VmSystemTags.USB_REDIRECT.getTokenByResourceUuid(self.getUuid(), VmSystemTags.USB_REDIRECT_TOKEN)); spec.setEnableRDP(VmSystemTags.RDP_ENABLE.getTokenByResourceUuid(self.getUuid(), VmSystemTags.RDP_ENABLE_TOKEN)); spec.setVDIMonitorNumber(VmSystemTags.VDI_MONITOR_NUMBER.getTokenByResourceUuid(self.getUuid(), VmSystemTags.VDI_MONITOR_NUMBER_TOKEN)); } if (msg instanceof HaStartVmInstanceMsg) { spec.setSoftAvoidHostUuids(((HaStartVmInstanceMsg) msg).getSoftAvoidHostUuids()); } else if (msg instanceof StartVmInstanceMsg) { spec.setSoftAvoidHostUuids(((StartVmInstanceMsg) msg).getSoftAvoidHostUuids()); } if (spec.getDestNics().isEmpty()) { throw new OperationFailureException(operr("unable to start the vm[uuid:%s]." + " It doesn't have any nic, please attach a nic and try again", self.getUuid())); } final VmInstanceState originState = self.getState(); changeVmStateInDb(VmInstanceStateEvent.starting); logger.debug("we keep vm state on 'Starting' until startVm over or restart mn."); extEmitter.beforeStartVm(VmInstanceInventory.valueOf(self)); FlowChain chain = getStartVmWorkFlowChain(inv); setFlowMarshaller(chain); String recentHostUuid = self.getHostUuid() == null ? self.getLastHostUuid() : self.getHostUuid(); String vmHostUuid = self.getHostUuid(); String vmLastHostUuid = self.getLastHostUuid(); chain.setName(String.format("start-vm-%s", self.getUuid())); chain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); chain.done(new FlowDoneHandler(completion) { @Override public void handle(final Map data) { VmInstanceSpec spec = (VmInstanceSpec) data.get(VmInstanceConstant.Params.VmInstanceSpec.toString()); self = changeVmStateInDb(VmInstanceStateEvent.running, ()-> new SQLBatch() { @Override protected void scripts() { // reload self because some nics may have been deleted in start phase because a former L3Network deletion. // reload to avoid JPA EntityNotFoundException self = findByUuid(self.getUuid(), VmInstanceVO.class); if (q(HostVO.class).eq(HostVO_.uuid, recentHostUuid).isExists()) { self.setLastHostUuid(recentHostUuid); } else { self.setLastHostUuid(null); } self.setHostUuid(spec.getDestHost().getUuid()); self.setClusterUuid(spec.getDestHost().getClusterUuid()); self.setZoneUuid(spec.getDestHost().getZoneUuid()); } }.execute()); logger.debug(String.format("vm[uuid:%s] is running ..", self.getUuid())); VmInstanceInventory inv = VmInstanceInventory.valueOf(self); extEmitter.afterStartVm(inv); completion.success(); } }).error(new FlowErrorHandler(completion) { @Override public void handle(final ErrorCode errCode, Map data) { // reload self because some nics may have been deleted in start phase because a former L3Network deletion. // reload to avoid JPA EntityNotFoundException self = dbf.reload(self); extEmitter.failedToStartVm(VmInstanceInventory.valueOf(self), errCode); VmInstanceSpec spec = (VmInstanceSpec) data.get(Params.VmInstanceSpec.toString()); // update vm state to origin state before checking state // avoid sending redundant vm state change event // refer to: ZSTAC-18174 new SQLBatch() { @Override protected void scripts() { self.setState(originState); self.setHostUuid(vmHostUuid); self.setLastHostUuid(q(HostVO.class).eq(HostVO_.uuid, vmLastHostUuid).isExists() ? vmLastHostUuid : null); self = merge(self); } }.execute(); if (HostErrors.FAILED_TO_START_VM_ON_HYPERVISOR.isEqual(errCode.getCode())) { checkState(spec.getDestHost().getUuid(), new NoErrorCompletion(completion) { @Override public void done() { completion.fail(errCode); } }); return; } completion.fail(errCode); } }).start(); } private VmInstanceSpec buildVmInstanceSpecFromStruct(InstantiateVmFromNewCreatedStruct struct) { final VmInstanceSpec spec = new VmInstanceSpec(); spec.setRequiredPrimaryStorageUuidForRootVolume(struct.getPrimaryStorageUuidForRootVolume()); spec.setRequiredPrimaryStorageUuidForDataVolume(struct.getPrimaryStorageUuidForDataVolume()); spec.setDataVolumeSystemTags(struct.getDataVolumeSystemTags()); spec.setRootVolumeSystemTags(struct.getRootVolumeSystemTags()); spec.setRequiredHostUuid(struct.getRequiredHostUuid()); spec.setVmInventory(getSelfInventory()); if (struct.getL3NetworkUuids() != null && !struct.getL3NetworkUuids().isEmpty()) { SimpleQuery<L3NetworkVO> nwquery = dbf.createQuery(L3NetworkVO.class); nwquery.add(L3NetworkVO_.uuid, Op.IN, VmNicSpec.getL3UuidsOfSpec(struct.getL3NetworkUuids())); List<L3NetworkVO> vos = nwquery.list(); List<L3NetworkInventory> nws = L3NetworkInventory.valueOf(vos); // order L3 networks by the order they specified in the API List<VmNicSpec> nicSpecs = new ArrayList<>(); for (VmNicSpec nicSpec : struct.getL3NetworkUuids()) { List<L3NetworkInventory> l3s = new ArrayList<>(); for (L3NetworkInventory inv : nicSpec.l3Invs) { L3NetworkInventory l3 = CollectionUtils.find(nws, new Function<L3NetworkInventory, L3NetworkInventory>() { @Override public L3NetworkInventory call(L3NetworkInventory arg) { return arg.getUuid().equals(inv.getUuid()) ? arg : null; } }); if (l3 == null) { throw new OperationFailureException(operr( "Unable to find L3Network[uuid:%s] to start the current vm, it may have been deleted, " + "Operation suggestion: delete this vm, recreate a new vm", inv.getUuid())); } l3s.add(l3); } if (!l3s.isEmpty()) { nicSpecs.add(new VmNicSpec(l3s)); } } spec.setL3Networks(nicSpecs); } else { spec.setL3Networks(new ArrayList<>()); } if (struct.getDataDiskOfferingUuids() != null && !struct.getDataDiskOfferingUuids().isEmpty()) { SimpleQuery<DiskOfferingVO> dquery = dbf.createQuery(DiskOfferingVO.class); dquery.add(DiskOfferingVO_.uuid, SimpleQuery.Op.IN, struct.getDataDiskOfferingUuids()); List<DiskOfferingVO> vos = dquery.list(); // allow create multiple data volume from the same disk offering List<DiskOfferingInventory> disks = new ArrayList<>(); for (final String duuid : struct.getDataDiskOfferingUuids()) { DiskOfferingVO dvo = CollectionUtils.find(vos, new Function<DiskOfferingVO, DiskOfferingVO>() { @Override public DiskOfferingVO call(DiskOfferingVO arg) { if (duuid.equals(arg.getUuid())) { return arg; } return null; } }); disks.add(DiskOfferingInventory.valueOf(dvo)); } spec.setDataDiskOfferings(disks); } else { spec.setDataDiskOfferings(new ArrayList<>()); } if (struct.getRootDiskOfferingUuid() != null) { DiskOfferingVO rootDisk = dbf.findByUuid(struct.getRootDiskOfferingUuid(), DiskOfferingVO.class); spec.setRootDiskOffering(DiskOfferingInventory.valueOf(rootDisk)); } ImageVO imvo = dbf.findByUuid(spec.getVmInventory().getImageUuid(), ImageVO.class); List<CdRomSpec> cdRomSpecs = buildVmCdRomSpecsForNewCreated(spec); spec.setCdRomSpecs(cdRomSpecs); spec.getImageSpec().setInventory(ImageInventory.valueOf(imvo)); spec.setCurrentVmOperation(VmOperation.NewCreate); if (self.getClusterUuid() != null || struct.getRequiredHostUuid() != null) { spec.setHostAllocatorStrategy(HostAllocatorConstant.DESIGNATED_HOST_ALLOCATOR_STRATEGY_TYPE); } buildHostname(spec); spec.setUserdataList(buildUserdata()); selectBootOrder(spec); spec.setConsolePassword(VmSystemTags.CONSOLE_PASSWORD. getTokenByResourceUuid(self.getUuid(), VmSystemTags.CONSOLE_PASSWORD_TOKEN)); spec.setUsbRedirect(VmSystemTags.USB_REDIRECT.getTokenByResourceUuid(self.getUuid(), VmSystemTags.USB_REDIRECT_TOKEN)); if (struct.getStrategy() == VmCreationStrategy.CreateStopped) { spec.setCreatePaused(true); } return spec; } private List<CdRomSpec> buildVmCdRomSpecsForNewCreated(VmInstanceSpec vmSpec) { List<VmInstanceSpec.CdRomSpec> cdRomSpecs = new ArrayList<>(); VmInstanceInventory vmInventory = vmSpec.getVmInventory(); String vmUuid = vmInventory.getUuid(); // vm image is iso ImageVO imvo = dbf.findByUuid(vmInventory.getImageUuid(), ImageVO.class); if (imvo.getMediaType() == ImageMediaType.ISO) { CdRomSpec cdRomSpec = new CdRomSpec(); cdRomSpec.setDeviceId(cdRomSpecs.size()); cdRomSpec.setImageUuid(imvo.getUuid()); cdRomSpecs.add(cdRomSpec); } // createWithoutCdRom boolean hasTag = VmSystemTags.CREATE_WITHOUT_CD_ROM.hasTag(vmUuid); boolean flagWithoutCdRom = false; if (hasTag) { String withoutCdRom = VmSystemTags.CREATE_WITHOUT_CD_ROM.getTokenByResourceUuid(vmUuid, VmSystemTags.CREATE_WITHOUT_CD_ROM_TOKEN); flagWithoutCdRom = Boolean.parseBoolean(withoutCdRom); } if (flagWithoutCdRom) { return cdRomSpecs; } // cdroms hasTag = VmSystemTags.CREATE_VM_CD_ROM_LIST.hasTag(vmUuid); if (hasTag) { Map<String, String> tokens = VmSystemTags.CREATE_VM_CD_ROM_LIST.getTokensByResourceUuid(vmUuid); List<String> cdRoms = new ArrayList<>(); cdRoms.add(tokens.get(VmSystemTags.CD_ROM_0)); cdRoms.add(tokens.get(VmSystemTags.CD_ROM_1)); cdRoms.add(tokens.get(VmSystemTags.CD_ROM_2)); // remove vm image iso, image iso has been added cdRoms.removeAll(cdRomSpecs.stream().map(CdRomSpec::getImageUuid).collect(Collectors.toList())); for (String cdRom : cdRoms) { if (cdRom == null || VmInstanceConstant.NONE_CDROM.equalsIgnoreCase(cdRom)) { continue; } CdRomSpec cdRomSpec = new CdRomSpec(); cdRomSpec.setDeviceId(cdRomSpecs.size()); String imageUuid = VmInstanceConstant.EMPTY_CDROM.equalsIgnoreCase(cdRom) ? null : cdRom; cdRomSpec.setImageUuid(imageUuid); cdRomSpecs.add(cdRomSpec); } } else { int defaultCdRomNum = VmGlobalConfig.VM_DEFAULT_CD_ROM_NUM.value(Integer.class); while (defaultCdRomNum > cdRomSpecs.size()) { CdRomSpec cdRomSpec = new CdRomSpec(); cdRomSpec.setDeviceId(cdRomSpecs.size()); cdRomSpecs.add(cdRomSpec); } } int max = VmGlobalConfig.MAXIMUM_CD_ROM_NUM.value(Integer.class); if (cdRomSpecs.size() > max) { throw new OperationFailureException(operr("One vm cannot create %s CDROMs, vm can only add %s CDROMs", cdRomSpecs.size(), max)); } return cdRomSpecs; } private void instantiateVmFromNewCreate(InstantiateVmFromNewCreatedStruct struct, Completion completion) { VmInstanceSpec spec = buildVmInstanceSpecFromStruct(struct); changeVmStateInDb(VmInstanceStateEvent.starting); CollectionUtils.safeForEach(pluginRgty.getExtensionList(BeforeStartNewCreatedVmExtensionPoint.class), new ForEachFunction<BeforeStartNewCreatedVmExtensionPoint>() { @Override public void run(BeforeStartNewCreatedVmExtensionPoint ext) { ext.beforeStartNewCreatedVm(spec); } }); extEmitter.beforeStartNewCreatedVm(VmInstanceInventory.valueOf(self)); FlowChain chain = getCreateVmWorkFlowChain(getSelfInventory()); setFlowMarshaller(chain); chain.setName(String.format("create-vm-%s", self.getUuid())); chain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); chain.then(new NoRollbackFlow() { String __name__ = "after-started-vm-" + self.getUuid(); @Override public void run(FlowTrigger trigger, Map data) { VmInstanceSpec spec = (VmInstanceSpec) data.get(VmInstanceConstant.Params.VmInstanceSpec.toString()); changeVmStateInDb(struct.getStrategy() == VmCreationStrategy.InstantStart ? VmInstanceStateEvent.running : VmInstanceStateEvent.paused, ()-> { self.setLastHostUuid(spec.getDestHost().getUuid()); self.setHostUuid(spec.getDestHost().getUuid()); self.setClusterUuid(spec.getDestHost().getClusterUuid()); self.setZoneUuid(spec.getDestHost().getZoneUuid()); self.setHypervisorType(spec.getDestHost().getHypervisorType()); self.setRootVolumeUuid(spec.getDestRootVolume().getUuid()); }); logger.debug(String.format("vm[uuid:%s] is started ..", self.getUuid())); VmInstanceInventory inv = VmInstanceInventory.valueOf(self); extEmitter.afterStartNewCreatedVm(inv); trigger.next(); } }); if (struct.getStrategy() == VmCreationStrategy.CreateStopped) { chain.then(new NoRollbackFlow() { String __name__ = "stop-vm-" + self.getUuid(); @Override public void run(FlowTrigger trigger, Map data) { StopVmInstanceMsg smsg = new StopVmInstanceMsg(); smsg.setVmInstanceUuid(self.getUuid()); smsg.setGcOnFailure(true); smsg.setType(StopVmType.cold.toString()); stopVm(smsg, new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }); } chain.done(new FlowDoneHandler(completion) { @Override public void handle(final Map data) { logger.debug(String.format("vm[uuid:%s] is created ..", self.getUuid())); completion.success(); } }).error(new FlowErrorHandler(completion) { @Override public void handle(final ErrorCode errCode, Map data) { extEmitter.failedToStartNewCreatedVm(VmInstanceInventory.valueOf(self), errCode); dbf.remove(self); // clean up EO, otherwise API-retry may cause conflict if // the resource uuid is set try { dbf.eoCleanup(VmInstanceVO.class, CollectionDSL.list(self.getUuid())); } catch (Exception e) { logger.warn(e.getMessage()); } completion.fail(operr(errCode, errCode.getDetails())); } }).start(); } protected void startVm(final StartVmInstanceMsg msg, final SyncTaskChain taskChain) { startVm(msg, new Completion(taskChain) { @Override public void success() { VmInstanceInventory inv = VmInstanceInventory.valueOf(self); StartVmInstanceReply reply = new StartVmInstanceReply(); reply.setInventory(inv); bus.reply(msg, reply); taskChain.next(); } @Override public void fail(ErrorCode errorCode) { StartVmInstanceReply reply = new StartVmInstanceReply(); reply.setError(err(VmErrors.START_ERROR, errorCode, errorCode.getDetails())); bus.reply(msg, reply); taskChain.next(); } }); } protected void startVm(final APIStartVmInstanceMsg msg, final SyncTaskChain taskChain) { startVm(msg, new Completion(taskChain) { @Override public void success() { VmInstanceInventory inv = VmInstanceInventory.valueOf(self); APIStartVmInstanceEvent evt = new APIStartVmInstanceEvent(msg.getId()); evt.setInventory(inv); bus.publish(evt); taskChain.next(); } @Override public void fail(ErrorCode errorCode) { APIStartVmInstanceEvent evt = new APIStartVmInstanceEvent(msg.getId()); evt.setError(err(VmErrors.START_ERROR, errorCode, errorCode.getDetails())); bus.publish(evt); taskChain.next(); } }); } protected void handle(final APIStartVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getName() { return String.format("start-vm-%s", self.getUuid()); } @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { startVm(msg, chain); } }); } protected void handle(final APIDestroyVmInstanceMsg msg) { final APIDestroyVmInstanceEvent evt = new APIDestroyVmInstanceEvent(msg.getId()); destroyVm(msg, new Completion(msg) { @Override public void success() { bus.publish(evt); } @Override public void fail(ErrorCode errorCode) { evt.setError(errorCode); bus.publish(evt); } }); } private void destroyVm(APIDestroyVmInstanceMsg msg, final Completion completion) { final String issuer = VmInstanceVO.class.getSimpleName(); final List<VmDeletionStruct> ctx = new ArrayList<VmDeletionStruct>(); VmDeletionStruct s = new VmDeletionStruct(); s.setInventory(getSelfInventory()); s.setDeletionPolicy(deletionPolicyMgr.getDeletionPolicy(self.getUuid())); ctx.add(s); FlowChain chain = FlowChainBuilder.newSimpleFlowChain(); chain.setName(String.format("delete-vm-%s", msg.getUuid())); if (msg.getDeletionMode() == APIDeleteMessage.DeletionMode.Permissive) { chain.then(new NoRollbackFlow() { @Override public void run(final FlowTrigger trigger, Map data) { casf.asyncCascade(CascadeConstant.DELETION_CHECK_CODE, issuer, ctx, new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }).then(new NoRollbackFlow() { @Override public void run(final FlowTrigger trigger, Map data) { casf.asyncCascade(CascadeConstant.DELETION_DELETE_CODE, issuer, ctx, new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }); } else { chain.then(new NoRollbackFlow() { @Override public void run(final FlowTrigger trigger, Map data) { casf.asyncCascade(CascadeConstant.DELETION_FORCE_DELETE_CODE, issuer, ctx, new Completion(trigger) { @Override public void success() { trigger.next(); } @Override public void fail(ErrorCode errorCode) { trigger.fail(errorCode); } }); } }); } chain.done(new FlowDoneHandler(msg) { @Override public void handle(Map data) { casf.asyncCascadeFull(CascadeConstant.DELETION_CLEANUP_CODE, issuer, ctx, new NopeCompletion()); completion.success(); } }).error(new FlowErrorHandler(msg) { @Override public void handle(ErrorCode errCode, Map data) { completion.fail(err(SysErrors.DELETE_RESOURCE_ERROR, errCode, errCode.getDetails())); } }).start(); } protected void buildHostname(VmInstanceSpec spec) { String defaultHostname = VmSystemTags.HOSTNAME.getTag(self.getUuid()); if (defaultHostname == null) { return; } HostName dhname = new HostName(); dhname.setL3NetworkUuid(self.getDefaultL3NetworkUuid()); dhname.setHostname(VmSystemTags.HOSTNAME.getTokenByTag(defaultHostname, VmSystemTags.HOSTNAME_TOKEN)); spec.getHostnames().add(dhname); } protected VmInstanceSpec buildSpecFromInventory(VmInstanceInventory inv, VmOperation operation) { VmInstanceSpec spec = new VmInstanceSpec(); spec.setUserdataList(buildUserdata()); // for L3Network that has been deleted List<String> nicUuidToDel = CollectionUtils.transformToList(inv.getVmNics(), new Function<String, VmNicInventory>() { @Override public String call(VmNicInventory arg) { return arg.getL3NetworkUuid() == null ? arg.getUuid() : null; } }); if (!nicUuidToDel.isEmpty()) { dbf.removeByPrimaryKeys(nicUuidToDel, VmNicVO.class); self = dbf.findByUuid(inv.getUuid(), VmInstanceVO.class); inv = VmInstanceInventory.valueOf(self); } spec.setDestNics(inv.getVmNics()); List<VmNicSpec> nicSpecs = new ArrayList<>(); for (VmNicInventory nic : inv.getVmNics()) { List<L3NetworkInventory> l3Invs = new ArrayList<>(); /* if destroy vm, then recover vm, ip address of nic has been deleted */ if (nic.getUsedIps() != null && !nic.getUsedIps().isEmpty()) { for (UsedIpInventory ip : nic.getUsedIps()) { L3NetworkVO l3Vo = dbf.findByUuid(ip.getL3NetworkUuid(), L3NetworkVO.class); if (l3Vo != null) { l3Invs.add(L3NetworkInventory.valueOf(l3Vo)); } } } if (l3Invs.isEmpty()) { L3NetworkVO l3Vo = dbf.findByUuid(nic.getL3NetworkUuid(), L3NetworkVO.class); if (l3Vo != null) { l3Invs.add(L3NetworkInventory.valueOf(l3Vo)); } List<String> secondaryNetworksList = new DualStackNicSecondaryNetworksOperator().getSecondaryNetworksByVmUuidNic(inv.getUuid(), nic.getL3NetworkUuid()); if (secondaryNetworksList != null && !secondaryNetworksList.isEmpty()) { for (String uuid : secondaryNetworksList) { l3Vo = dbf.findByUuid(uuid, L3NetworkVO.class); if (l3Vo != null) { l3Invs.add(L3NetworkInventory.valueOf(l3Vo)); } } } } nicSpecs.add(new VmNicSpec(l3Invs)); } spec.setL3Networks(nicSpecs); String huuid = inv.getHostUuid() == null ? inv.getLastHostUuid() : inv.getHostUuid(); if (huuid != null) { HostVO hvo = dbf.findByUuid(huuid, HostVO.class); if (hvo != null) { spec.setDestHost(HostInventory.valueOf(hvo)); } } VolumeInventory rootVol = inv.getRootVolume(); Optional.ofNullable(rootVol).ifPresent(it -> { spec.setDestRootVolume(it); spec.setRequiredPrimaryStorageUuidForRootVolume(it.getPrimaryStorageUuid()); }); spec.setDestDataVolumes(getAllDataVolumes(inv)); // When starting an imported VM, we might not have an image UUID. if (inv.getImageUuid() != null) { ImageVO imgvo = dbf.findByUuid(inv.getImageUuid(), ImageVO.class); ImageInventory imginv = null; if (imgvo == null) { // the image has been deleted, use EO instead ImageEO imgeo = dbf.findByUuid(inv.getImageUuid(), ImageEO.class); imginv = ImageInventory.valueOf(imgeo); } else { imginv = ImageInventory.valueOf(imgvo); } spec.getImageSpec().setInventory(imginv); } spec.setVmInventory(inv); buildHostname(spec); List<VmCdRomVO> cdRomVOS = Q.New(VmCdRomVO.class) .eq(VmCdRomVO_.vmInstanceUuid, inv.getUuid()) .orderBy(VmCdRomVO_.deviceId, SimpleQuery.Od.ASC) .list(); for (VmCdRomVO cdRomVO : cdRomVOS) { CdRomSpec cdRomSpec = new CdRomSpec(); cdRomSpec.setUuid(cdRomVO.getUuid()); String isoUuid = cdRomVO.getIsoUuid(); if (isoUuid != null) { if(dbf.isExist(isoUuid, ImageVO.class)) { cdRomSpec.setImageUuid(isoUuid); cdRomSpec.setInstallPath(cdRomVO.getIsoInstallPath()); } else { //TODO logger.warn(String.format("iso[uuid:%s] is deleted, however, the VM[uuid:%s] still has it attached", isoUuid, self.getUuid())); } } cdRomSpec.setDeviceId(cdRomVO.getDeviceId()); spec.getCdRomSpecs().add(cdRomSpec); } spec.setCurrentVmOperation(operation); selectBootOrder(spec); spec.setConsolePassword(VmSystemTags.CONSOLE_PASSWORD. getTokenByResourceUuid(self.getUuid(), VmSystemTags.CONSOLE_PASSWORD_TOKEN)); return spec; } private List<VolumeInventory> getAllDataVolumes(VmInstanceInventory inv) { List<VolumeInventory> dataVols = inv.getAllVolumes().stream() .filter(it -> !it.getUuid().equals(inv.getRootVolumeUuid())) .collect(Collectors.toList()); List<BuildVolumeSpecExtensionPoint> exts = pluginRgty.getExtensionList(BuildVolumeSpecExtensionPoint.class); exts.forEach(e -> dataVols.addAll(e.supplyAdditionalVolumesForVmInstance(inv.getUuid()))); return dataVols; } protected void rebootVm(final Message msg, final Completion completion) { refreshVO(); ErrorCode allowed = validateOperationByState(msg, self.getState(), null); if (allowed != null) { completion.fail(allowed); return; } VmInstanceInventory inv = VmInstanceInventory.valueOf(self); ErrorCode preReboot = extEmitter.preRebootVm(inv); if (preReboot != null) { completion.fail(preReboot); return; } final VmInstanceSpec spec = buildSpecFromInventory(inv, VmOperation.Reboot); spec.setDestHost(HostInventory.valueOf(dbf.findByUuid(self.getHostUuid(), HostVO.class))); final VmInstanceState originState = self.getState(); changeVmStateInDb(VmInstanceStateEvent.rebooting); extEmitter.beforeRebootVm(VmInstanceInventory.valueOf(self)); spec.setMessage(msg); FlowChain chain = getRebootVmWorkFlowChain(inv); setFlowMarshaller(chain); chain.setName(String.format("reboot-vm-%s", self.getUuid())); chain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); chain.done(new FlowDoneHandler(completion) { @Override public void handle(Map data) { self = changeVmStateInDb(VmInstanceStateEvent.running); VmInstanceInventory inv = VmInstanceInventory.valueOf(self); extEmitter.afterRebootVm(inv); completion.success(); } }).error(new FlowErrorHandler(completion) { @Override public void handle(final ErrorCode errCode, Map data) { extEmitter.failedToRebootVm(VmInstanceInventory.valueOf(self), errCode); if (HostErrors.FAILED_TO_STOP_VM_ON_HYPERVISOR.isEqual(errCode.getCode()) || HostErrors.FAILED_TO_START_VM_ON_HYPERVISOR.isEqual(errCode.getCode())) { checkState(originalCopy.getHostUuid(), new NoErrorCompletion(completion) { @Override public void done() { self = refreshVO(); if ((originState == VmInstanceState.Running || originState == VmInstanceState.Paused) && self.getState() == VmInstanceState.Stopped) { returnHostCpacity(spec.getDestHost().getUuid()); } completion.fail(errCode); } }); } else { self.setState(originState); self = dbf.updateAndRefresh(self); completion.fail(errCode); } } }).start(); } protected void returnHostCpacity(String hostUuid) { ReturnHostCapacityMsg rmsg = new ReturnHostCapacityMsg(); rmsg.setCpuCapacity(self.getCpuNum()); rmsg.setMemoryCapacity(self.getMemorySize()); rmsg.setHostUuid(hostUuid); rmsg.setServiceId(bus.makeLocalServiceId(HostAllocatorConstant.SERVICE_ID)); bus.send(rmsg); } protected void rebootVm(final APIRebootVmInstanceMsg msg, final SyncTaskChain taskChain) { rebootVm(msg, new Completion(taskChain) { @Override public void success() { APIRebootVmInstanceEvent evt = new APIRebootVmInstanceEvent(msg.getId()); VmInstanceInventory inv = VmInstanceInventory.valueOf(self); evt.setInventory(inv); bus.publish(evt); taskChain.next(); } @Override public void fail(ErrorCode errorCode) { APIRebootVmInstanceEvent evt = new APIRebootVmInstanceEvent(msg.getId()); evt.setError(err(VmErrors.REBOOT_ERROR, errorCode, errorCode.getDetails())); bus.publish(evt); taskChain.next(); } }); } protected void handle(final APIRebootVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getName() { return String.format("reboot-vm-%s", self.getUuid()); } @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { rebootVm(msg, chain); } }); } protected void stopVm(final APIStopVmInstanceMsg msg, final SyncTaskChain taskChain) { stopVm(msg, new Completion(taskChain) { @Override public void success() { APIStopVmInstanceEvent evt = new APIStopVmInstanceEvent(msg.getId()); VmInstanceInventory inv = VmInstanceInventory.valueOf(self); evt.setInventory(inv); bus.publish(evt); taskChain.next(); } @Override public void fail(ErrorCode errorCode) { APIStopVmInstanceEvent evt = new APIStopVmInstanceEvent(msg.getId()); evt.setError(err(VmErrors.STOP_ERROR, errorCode, errorCode.getDetails())); bus.publish(evt); taskChain.next(); } }); } private void stopVm(final Message msg, final Completion completion) { refreshVO(); ErrorCode allowed = validateOperationByState(msg, self.getState(), null); if (allowed != null) { completion.fail(allowed); return; } if (self.getState() == VmInstanceState.Stopped) { completion.success(); return; } VmInstanceInventory inv = VmInstanceInventory.valueOf(self); ErrorCode preStop = extEmitter.preStopVm(inv); if (preStop != null) { completion.fail(preStop); return; } final VmInstanceSpec spec = buildSpecFromInventory(inv, VmOperation.Stop); spec.setMessage(msg); if (msg instanceof StopVmInstanceMsg) { spec.setGcOnStopFailure(((StopVmInstanceMsg) msg).isGcOnFailure()); } if (msg instanceof ReleaseResourceMessage) { spec.setIgnoreResourceReleaseFailure(((ReleaseResourceMessage) msg).isIgnoreResourceReleaseFailure()); } final VmInstanceState originState = self.getState(); changeVmStateInDb(VmInstanceStateEvent.stopping); extEmitter.beforeStopVm(VmInstanceInventory.valueOf(self)); FlowChain chain = getStopVmWorkFlowChain(inv); setFlowMarshaller(chain); chain.setName(String.format("stop-vm-%s", self.getUuid())); chain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); chain.done(new FlowDoneHandler(completion) { @Override public void handle(Map data) { self = changeVmStateInDb(VmInstanceStateEvent.stopped); VmInstanceInventory inv = VmInstanceInventory.valueOf(self); extEmitter.afterStopVm(inv); completion.success(); } }).error(new FlowErrorHandler(completion) { @Override public void handle(final ErrorCode errCode, Map data) { VmInstanceInventory inv = VmInstanceInventory.valueOf(self); extEmitter.failedToStopVm(inv, errCode); if (HostErrors.FAILED_TO_STOP_VM_ON_HYPERVISOR.isEqual(errCode.getCode())) { checkState(originalCopy.getHostUuid(), new NoErrorCompletion(completion) { @Override public void done() { completion.fail(errCode); } }); } else { self.setState(originState); self = dbf.updateAndRefresh(self); completion.fail(errCode); } } }).start(); } protected void handle(final APIStopVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getName() { return String.format("stop-vm-%s", self.getUuid()); } @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { stopVm(msg, chain); } }); } protected void pauseVm(final APIPauseVmInstanceMsg msg, final SyncTaskChain taskChain) { pauseVm(msg, new Completion(taskChain) { @Override public void success() { APIPauseVmInstanceEvent evt = new APIPauseVmInstanceEvent(msg.getId()); VmInstanceInventory inv = VmInstanceInventory.valueOf(self); evt.setInventory(inv); bus.publish(evt); taskChain.next(); } @Override public void fail(ErrorCode errorCode) { APIPauseVmInstanceEvent evt = new APIPauseVmInstanceEvent(msg.getId()); evt.setError(err(VmErrors.SUSPEND_ERROR, errorCode, errorCode.getDetails())); bus.publish(evt); taskChain.next(); } }); } protected void pauseVm(final Message msg, Completion completion) { refreshVO(); ErrorCode allowed = validateOperationByState(msg, self.getState(), null); if (allowed != null) { completion.fail(allowed); return; } if (self.getState() == VmInstanceState.Paused) { completion.success(); return; } VmInstanceInventory inv = VmInstanceInventory.valueOf(self); final VmInstanceSpec spec = buildSpecFromInventory(inv, VmOperation.Pause); spec.setMessage(msg); final VmInstanceState originState = self.getState(); changeVmStateInDb(VmInstanceStateEvent.pausing); FlowChain chain = getPauseVmWorkFlowChain(inv); setFlowMarshaller(chain); chain.setName(String.format("pause-vm-%s", self.getUuid())); chain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); chain.done(new FlowDoneHandler(completion) { @Override public void handle(Map Data) { self = changeVmStateInDb(VmInstanceStateEvent.paused); completion.success(); } }).error(new FlowErrorHandler(completion) { @Override public void handle(final ErrorCode errCode, Map data) { self.setState(originState); self = dbf.updateAndRefresh(self); completion.fail(errCode); } }).start(); } protected void handle(final APIPauseVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { pauseVm(msg, chain); } @Override public String getName() { return String.format("pause-vm-%s", msg.getVmInstanceUuid()); } }); } protected void resumeVm(final APIResumeVmInstanceMsg msg, final SyncTaskChain taskChain) { resumeVm(msg, new Completion(taskChain) { @Override public void success() { APIResumeVmInstanceEvent evt = new APIResumeVmInstanceEvent(msg.getId()); VmInstanceInventory inv = VmInstanceInventory.valueOf(self); evt.setInventory(inv); bus.publish(evt); taskChain.next(); } @Override public void fail(ErrorCode errorCode) { APIResumeVmInstanceEvent evt = new APIResumeVmInstanceEvent(msg.getId()); evt.setError(err(VmErrors.RESUME_ERROR, errorCode, errorCode.getDetails())); bus.publish(evt); taskChain.next(); } }); } protected void resumeVm(final Message msg, Completion completion) { refreshVO(); ErrorCode allowed = validateOperationByState(msg, self.getState(), null); if (allowed != null) { completion.fail(allowed); return; } VmInstanceInventory inv = VmInstanceInventory.valueOf(self); final VmInstanceSpec spec = buildSpecFromInventory(inv, VmOperation.Resume); spec.setMessage(msg); final VmInstanceState originState = self.getState(); changeVmStateInDb(VmInstanceStateEvent.resuming); FlowChain chain = getResumeVmWorkFlowChain(inv); setFlowMarshaller(chain); chain.setName(String.format("resume-vm-%s", self.getUuid())); chain.getData().put(VmInstanceConstant.Params.VmInstanceSpec.toString(), spec); chain.done(new FlowDoneHandler(completion) { @Override public void handle(Map Data) { self = changeVmStateInDb(VmInstanceStateEvent.running); completion.success(); } }).error(new FlowErrorHandler(completion) { @Override public void handle(final ErrorCode errCode, Map data) { self.setState(originState); self = dbf.updateAndRefresh(self); completion.fail(errCode); } }).start(); } protected void handle(final APIResumeVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { resumeVm(msg, chain); } @Override public String getName() { return String.format("resume-vm-%s", msg.getVmInstanceUuid()); } }); } private void handle(final APIReimageVmInstanceMsg msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { reimageVmInstance(msg, new NoErrorCompletion(chain) { @Override public void done() { chain.next(); } }); } @Override public String getName() { return "reimage-vminstance"; } }); } private void handle(final APIDeleteVmCdRomMsg msg) { APIDeleteVmCdRomEvent event = new APIDeleteVmCdRomEvent(msg.getId()); DeleteVmCdRomMsg deleteVmCdRomMsg = new DeleteVmCdRomMsg(); deleteVmCdRomMsg.setVmInstanceUuid(msg.getVmInstanceUuid()); deleteVmCdRomMsg.setCdRomUuid(msg.getUuid()); bus.makeLocalServiceId(deleteVmCdRomMsg, VmInstanceConstant.SERVICE_ID); bus.send(deleteVmCdRomMsg, new CloudBusCallBack(msg) { @Override public void run(MessageReply reply) { if (reply.isSuccess()) { event.setInventory(VmInstanceInventory.valueOf(self)); } else { event.setError(reply.getError()); } bus.publish(event); } }); } private void handle(final DeleteVmCdRomMsg msg) { DeleteVmCdRomReply reply = new DeleteVmCdRomReply(); thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(final SyncTaskChain chain) { ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR); if (allowed != null) { reply.setError(allowed); bus.reply(msg, reply); chain.next(); return; } deleteVmCdRom(msg.getCdRomUuid(), new Completion(chain) { @Override public void success() { bus.reply(msg, reply); chain.next(); } @Override public void fail(ErrorCode errorCode) { reply.setError(errorCode); bus.reply(msg, reply); chain.next(); } }); } @Override public String getName() { return String.format("delete-vm-cdRom-%s", msg.getCdRomUuid()); } }); } private void deleteVmCdRom(String cdRomUuid, Completion completion) { boolean exist = dbf.isExist(cdRomUuid, VmCdRomVO.class); if (!exist) { completion.success(); return; } dbf.removeByPrimaryKey(cdRomUuid, VmCdRomVO.class); completion.success(); } private void doCreateVmCdRom(CreateVmCdRomMsg msg, ReturnValueCompletion<VmCdRomInventory> completion) { long vmCdRomNum = Q.New(VmCdRomVO.class) .eq(VmCdRomVO_.vmInstanceUuid, msg.getVmInstanceUuid()) .count(); int max = VmGlobalConfig.MAXIMUM_CD_ROM_NUM.value(Integer.class); if (max <= vmCdRomNum) { completion.fail(operr("VM[uuid:%s] can only add %s CDROMs", msg.getVmInstanceUuid(), max)); return; } if (msg.getIsoUuid() != null) { boolean targetIsoUsed = Q.New(VmCdRomVO.class) .eq(VmCdRomVO_.vmInstanceUuid, msg.getVmInstanceUuid()) .eq(VmCdRomVO_.isoUuid, msg.getIsoUuid()) .isExists(); if (targetIsoUsed) { completion.fail(operr("VM[uuid:%s] already has an ISO[uuid:%s] attached", msg.getVmInstanceUuid(), msg.getIsoUuid())); return; } } List<Integer> deviceIds = Q.New(VmCdRomVO.class) .select(VmCdRomVO_.deviceId) .eq(VmCdRomVO_.vmInstanceUuid, msg.getVmInstanceUuid()) .listValues(); BitSet full = new BitSet(deviceIds.size() + 1); deviceIds.forEach(full::set); int targetDeviceId = full.nextClearBit(0); if (targetDeviceId >= max) { completion.fail(operr("VM[uuid:%s] can only add %s CDROMs", msg.getVmInstanceUuid(), max)); return; } VmCdRomVO cdRomVO = new VmCdRomVO(); String cdRomUuid = msg.getResourceUuid() != null ? msg.getResourceUuid() : Platform.getUuid(); cdRomVO.setUuid(cdRomUuid); cdRomVO.setDeviceId(targetDeviceId); cdRomVO.setIsoUuid(msg.getIsoUuid()); cdRomVO.setVmInstanceUuid(msg.getVmInstanceUuid()); cdRomVO.setName(msg.getName()); String acntUuid = Account.getAccountUuidOfResource(msg.getVmInstanceUuid()); cdRomVO.setAccountUuid(acntUuid); cdRomVO.setDescription(msg.getDescription()); cdRomVO = dbf.persistAndRefresh(cdRomVO); completion.success(VmCdRomInventory.valueOf(cdRomVO)); } private void handle(final APICreateVmCdRomMsg msg) { APICreateVmCdRomEvent event = new APICreateVmCdRomEvent(msg.getId()); CreateVmCdRomMsg cmsg = new CreateVmCdRomMsg(); cmsg.setResourceUuid(msg.getResourceUuid()); cmsg.setName(msg.getName()); cmsg.setIsoUuid(msg.getIsoUuid()); cmsg.setVmInstanceUuid(msg.getVmInstanceUuid()); cmsg.setDescription(msg.getDescription()); bus.makeTargetServiceIdByResourceUuid(cmsg, VmInstanceConstant.SERVICE_ID, cmsg.getVmInstanceUuid()); bus.send(cmsg, new CloudBusCallBack(msg) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { event.setError(reply.getError()); bus.publish(event); return; } CreateVmCdRomReply r1 = reply.castReply(); event.setInventory(r1.getInventory()); bus.publish(event); } }); } private void handle(APIUpdateVmCdRomMsg msg) { APIUpdateVmCdRomEvent event = new APIUpdateVmCdRomEvent(msg.getId()); VmCdRomVO vmCdRomVO = dbf.findByUuid(msg.getUuid(), VmCdRomVO.class); boolean update = false; if (msg.getName() != null) { vmCdRomVO.setName(msg.getName()); update = true; } if (msg.getDescription() != null ) { vmCdRomVO.setDescription(msg.getDescription()); update = true; } if (update) { vmCdRomVO = dbf.updateAndRefresh(vmCdRomVO); } event.setInventory(VmCdRomInventory.valueOf(vmCdRomVO)); bus.publish(event); } private void handle(APISetVmInstanceDefaultCdRomMsg msg) { APISetVmInstanceDefaultCdRomEvent event = new APISetVmInstanceDefaultCdRomEvent(msg.getId()); thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { setVmInstanceDefaultCdRom(msg.getUuid(), new Completion(chain) { @Override public void success() { VmCdRomVO cdRomVO = dbf.findByUuid(msg.getUuid(), VmCdRomVO.class); event.setInventory(VmCdRomInventory.valueOf(cdRomVO)); bus.publish(event); chain.next(); } @Override public void fail(ErrorCode errorCode) { event.setError(errorCode); bus.publish(event); chain.next(); } }); } @Override public String getName() { return String.format("set-vmInstance-%s-default-cdRom-%s", msg.getVmInstanceUuid(), msg.getUuid()); } }); } private void setVmInstanceDefaultCdRom(String vmCdRomUuid, Completion completion) { // update target cdRom deviceId // update the source cdRom deviceId new SQLBatch(){ @Override protected void scripts() { List<VmCdRomVO> cdRomVOS = q(VmCdRomVO.class) .eq(VmCdRomVO_.vmInstanceUuid, self.getUuid()) .orderBy(VmCdRomVO_.deviceId, SimpleQuery.Od.ASC) .list(); Map<String, Integer> cdRomUUidDeviceIdMap = cdRomVOS.stream().collect(Collectors.toMap(VmCdRomVO::getUuid, i -> i.getDeviceId())); int deviceId = cdRomUUidDeviceIdMap.get(vmCdRomUuid); VmCdRomVO beforeDefaultCdRomVO = null; for (VmCdRomVO vmCdRomVO : cdRomVOS) { if (vmCdRomVO.getDeviceId() == 0) { beforeDefaultCdRomVO = vmCdRomVO; sql(VmCdRomVO.class) .eq(VmCdRomVO_.uuid, vmCdRomVO.getUuid()) .set(VmCdRomVO_.deviceId, VmInstanceConstant.MAXIMUM_CDROM_NUMBER) .update(); continue; } if (vmCdRomUuid.equals(vmCdRomVO.getUuid())) { sql(VmCdRomVO.class) .eq(VmCdRomVO_.uuid, vmCdRomVO.getUuid()) .set(VmCdRomVO_.deviceId, 0) .update(); continue; } } if (beforeDefaultCdRomVO != null) { sql(VmCdRomVO.class) .eq(VmCdRomVO_.uuid, beforeDefaultCdRomVO.getUuid()) .set(VmCdRomVO_.deviceId, deviceId) .update(); } } }.execute(); completion.success(); } private void reimageVmInstance(final APIReimageVmInstanceMsg msg, NoErrorCompletion completion) { final APIReimageVmInstanceEvent evt = new APIReimageVmInstanceEvent(msg.getId()); String rootVolumeUuid = Q.New(VmInstanceVO.class).select(VmInstanceVO_.rootVolumeUuid) .eq(VmInstanceVO_.uuid, msg.getVmInstanceUuid()) .findValue(); ReimageVmInstanceMsg rmsg = new ReimageVmInstanceMsg(); rmsg.setVmInstanceUuid(msg.getVmInstanceUuid()); rmsg.setAccountUuid(msg.getSession().getAccountUuid()); bus.makeTargetServiceIdByResourceUuid(rmsg, VmInstanceConstant.SERVICE_ID, msg.getVmInstanceUuid()); ReimageVolumeOverlayMsg omsg = new ReimageVolumeOverlayMsg(); omsg.setMessage(rmsg); omsg.setVolumeUuid(rootVolumeUuid); bus.makeTargetServiceIdByResourceUuid(omsg, VolumeConstant.SERVICE_ID, rootVolumeUuid); bus.send(omsg, new CloudBusCallBack(completion, evt) { @Override public void run(MessageReply reply) { if (reply.isSuccess()){ self = refreshVO(); VmInstanceInventory inv = VmInstanceInventory.valueOf(self); evt.setInventory(inv); bus.publish(evt); } else { evt.setError(reply.getError()); bus.publish(evt); } completion.done(); } }); } private void handle(ReimageVmInstanceMsg msg){ ReimageVmInstanceReply reply = new ReimageVmInstanceReply(); self = refreshVO(); VolumeVO rootVolume = dbf.findByUuid(self.getRootVolumeUuid(), VolumeVO.class); VolumeInventory rootVolumeInventory = VolumeInventory.valueOf(rootVolume); // check vm stopped { if (self.getState() != VmInstanceState.Stopped) { throw new ApiMessageInterceptionException(err( VmErrors.RE_IMAGE_VM_NOT_IN_STOPPED_STATE, "unable to reset volume[uuid:%s] to origin image[uuid:%s]," + " the vm[uuid:%s] volume attached to is not in Stopped state, current state is %s", rootVolume.getUuid(), rootVolume.getRootImageUuid(), rootVolume.getVmInstanceUuid(), self.getState() )); } } // check image cache to ensure image type is not ISO { SimpleQuery<ImageCacheVO> q = dbf.createQuery(ImageCacheVO.class); q.select(ImageCacheVO_.mediaType); q.add(ImageCacheVO_.imageUuid, Op.EQ, rootVolume.getRootImageUuid()); q.setLimit(1); ImageMediaType imageMediaType = q.findValue(); if (imageMediaType == null) { throw new OperationFailureException(err( VmErrors.RE_IMAGE_CANNOT_FIND_IMAGE_CACHE, "unable to reset volume[uuid:%s] to origin image[uuid:%s]," + " cannot find image cache.", rootVolume.getUuid(), rootVolume.getRootImageUuid() )); } if (imageMediaType.toString().equals("ISO")) { throw new OperationFailureException(err( VmErrors.RE_IMAGE_IMAGE_MEDIA_TYPE_SHOULD_NOT_BE_ISO, "unable to reset volume[uuid:%s] to origin image[uuid:%s]," + " for image type is ISO", rootVolume.getUuid(), rootVolume.getRootImageUuid() )); } } // do the re-image op FlowChain chain = FlowChainBuilder.newShareFlowChain(); chain.setName(String.format("reset-root-volume-%s-from-image-%s", rootVolume.getUuid(), rootVolume.getRootImageUuid())); chain.then(new ShareFlow() { VolumeVO vo = rootVolume; @Override public void setup() { flow(new NoRollbackFlow() { String __name__ = "mark-root-volume-as-snapshot-on-primary-storage"; @Override public void run(final FlowTrigger trigger, Map data) { MarkRootVolumeAsSnapshotMsg gmsg = new MarkRootVolumeAsSnapshotMsg(); rootVolumeInventory.setDescription(String.format("save snapshot for reimage vm [uuid:%s]", msg.getVmInstanceUuid())); rootVolumeInventory.setName(String.format("reimage-vm-point-%s-%s", msg.getVmInstanceUuid(), TimeUtils.getCurrentTimeStamp("yyyyMMddHHmmss"))); gmsg.setVolume(rootVolumeInventory); gmsg.setAccountUuid(msg.getAccountUuid()); bus.makeLocalServiceId(gmsg, VolumeSnapshotConstant.SERVICE_ID); bus.send(gmsg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { if (reply.isSuccess()) { trigger.next(); } else { trigger.fail(reply.getError()); } } }); } }); flow(new NoRollbackFlow() { String __name__ = "reset-root-volume-from-image-on-primary-storage"; @Override public void run(final FlowTrigger trigger, Map data) { ReInitRootVolumeFromTemplateOnPrimaryStorageMsg rmsg = new ReInitRootVolumeFromTemplateOnPrimaryStorageMsg(); rmsg.setVolume(rootVolumeInventory); bus.makeTargetServiceIdByResourceUuid(rmsg, PrimaryStorageConstant.SERVICE_ID, rootVolumeInventory.getPrimaryStorageUuid()); bus.send(rmsg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { if (reply.isSuccess()) { ReInitRootVolumeFromTemplateOnPrimaryStorageReply re = (ReInitRootVolumeFromTemplateOnPrimaryStorageReply) reply; vo.setInstallPath(re.getNewVolumeInstallPath()); vo = dbf.updateAndRefresh(vo); trigger.next(); } else { trigger.fail(reply.getError()); } } }); } }); flow(new NoRollbackFlow() { String __name__ = "sync-volume-size-after-reimage"; @Override public void run(final FlowTrigger trigger, Map data) { SyncVolumeSizeMsg smsg = new SyncVolumeSizeMsg(); smsg.setVolumeUuid(vo.getUuid()); bus.makeTargetServiceIdByResourceUuid(smsg, VolumeConstant.SERVICE_ID, rootVolumeInventory.getUuid()); bus.send(smsg, new CloudBusCallBack(msg) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { trigger.fail(reply.getError()); return; } vo.setSize(((SyncVolumeSizeReply) reply).getSize()); trigger.next(); } }); } }); flow(new NoRollbackFlow() { String __name__ = "return-primary-storage-capacity"; @Override public void run(FlowTrigger trigger, Map data) { if (vo.getSize() == rootVolumeInventory.getSize()) { trigger.next(); return; } IncreasePrimaryStorageCapacityMsg imsg = new IncreasePrimaryStorageCapacityMsg(); imsg.setPrimaryStorageUuid(rootVolume.getPrimaryStorageUuid()); imsg.setDiskSize(rootVolumeInventory.getSize() - vo.getSize()); bus.makeTargetServiceIdByResourceUuid(imsg, PrimaryStorageConstant.SERVICE_ID, rootVolume.getPrimaryStorageUuid()); bus.send(imsg); trigger.next(); } }); done(new FlowDoneHandler(msg) { @Override public void handle(Map data) { dbf.update(vo); List<AfterReimageVmInstanceExtensionPoint> list = pluginRgty.getExtensionList( AfterReimageVmInstanceExtensionPoint.class); for (AfterReimageVmInstanceExtensionPoint ext : list) { ext.afterReimageVmInstance(rootVolumeInventory); } self = dbf.reload(self); bus.reply(msg, reply); } }); error(new FlowErrorHandler(msg) { @Override public void handle(ErrorCode errCode, Map data) { logger.warn(String.format("failed to restore volume[uuid:%s] to image[uuid:%s], %s", rootVolumeInventory.getUuid(), rootVolumeInventory.getRootImageUuid(), errCode)); reply.setError(errCode); bus.reply(msg, reply); } }); } }).start(); } private void handle(OverlayMessage msg) { thdf.chainSubmit(new ChainTask(msg) { @Override public String getSyncSignature() { return syncThreadName; } @Override public void run(SyncTaskChain chain) { doOverlayMessage(msg, new NoErrorCompletion(chain) { @Override public void done() { chain.next(); } }); } @Override public String getName() { return "overlay-message"; } }); } private void doOverlayMessage(OverlayMessage msg, NoErrorCompletion noErrorCompletion) { bus.send(msg.getMessage(), new CloudBusCallBack(msg, noErrorCompletion) { @Override public void run(MessageReply reply) { bus.reply(msg, reply); noErrorCompletion.done(); } }); } }
fix a potential nullptr Signed-off-by: Qun Li <[email protected]>
compute/src/main/java/org/zstack/compute/vm/VmInstanceBase.java
fix a potential nullptr
<ide><path>ompute/src/main/java/org/zstack/compute/vm/VmInstanceBase.java <ide> import org.zstack.header.vm.VmInstanceConstant.Params; <ide> import org.zstack.header.vm.VmInstanceConstant.VmOperation; <ide> import org.zstack.header.vm.VmInstanceDeletionPolicyManager.VmInstanceDeletionPolicy; <add>import org.zstack.header.vm.VmInstanceSpec.CdRomSpec; <ide> import org.zstack.header.vm.VmInstanceSpec.HostName; <ide> import org.zstack.header.vm.VmInstanceSpec.IsoSpec; <del>import org.zstack.header.vm.VmInstanceSpec.CdRomSpec; <ide> import org.zstack.header.vm.cdrom.*; <ide> import org.zstack.header.volume.*; <ide> import org.zstack.identity.Account; <ide> import java.util.*; <ide> import java.util.stream.Collectors; <ide> <add>import static org.zstack.core.Platform.err; <ide> import static org.zstack.core.Platform.operr; <del>import static org.zstack.core.Platform.err; <del>import static java.util.Arrays.asList; <ide> import static org.zstack.utils.CollectionDSL.*; <ide> <ide> <ide> <ide> @Override <ide> public String getName() { <del> return String.format("vm-%s-state-change-on-the-host-%s", self.getUuid(), msg.getHostUuid()); <add> return String.format("vm-%s-state-change-on-the-host-%s", msg.getVmInstanceUuid(), msg.getHostUuid()); <ide> } <ide> }); <ide> }
Java
bsd-3-clause
d97052d6bef316f9d093d7f7799de5a285feaef2
0
NCIP/cab2b,NCIP/cab2b,NCIP/cab2b
package edu.wustl.cab2b.client.ui; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Component; import java.awt.Dimension; import java.awt.GradientPaint; import java.awt.Image; import java.awt.geom.Point2D; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import javax.swing.JScrollPane; import javax.swing.JSplitPane; import javax.swing.border.EmptyBorder; import org.jdesktop.swingx.JXPanel; import org.jdesktop.swingx.JXTitledPanel; import org.jdesktop.swingx.painter.gradient.BasicGradientPainter; import org.openide.util.Utilities; import edu.wustl.cab2b.client.ui.controls.Cab2bButton; import edu.wustl.cab2b.client.ui.controls.Cab2bPanel; import edu.wustl.cab2b.client.ui.controls.Cab2bTitledPanel; import edu.wustl.cab2b.client.ui.dag.MainDagPanel; import edu.wustl.cab2b.client.ui.main.AbstractTypePanel; import edu.wustl.cab2b.client.ui.main.IComponent; import edu.wustl.cab2b.client.ui.query.ClientPathFinder; import edu.wustl.cab2b.client.ui.query.IClientQueryBuilderInterface; import edu.wustl.cab2b.client.ui.query.IPathFinder; import edu.wustl.cab2b.client.ui.util.CommonUtils; import edu.wustl.cab2b.client.ui.util.CommonUtils.DagImageConstants; import edu.wustl.common.querysuite.exceptions.MultipleRootsException; import edu.wustl.common.querysuite.queryobject.ICondition; import edu.wustl.common.querysuite.queryobject.IConstraintEntity; import edu.wustl.common.querysuite.queryobject.IExpression; import edu.wustl.common.querysuite.queryobject.IExpressionId; import edu.wustl.common.querysuite.queryobject.IRule; import edu.wustl.common.querysuite.queryobject.RelationalOperator; /** * This is the panel for the Add limit tab from the main search dialog. The * class is also an instance of the {@link ContentPanel},so that child * component can cause this panel to refresh in a way required by this panel. * * @author mahesh_iyer */ public class AddLimitPanel extends ContentPanel implements IUpdateAddLimitUIInterface { private static final long serialVersionUID = 1L; /** The titled panel for the top panel. */ private JXTitledPanel m_topCenterPanel = null; private AbstractSearchResultPanel m_searchResultPanel; /** Scroll pane for the top panel. */ private JScrollPane m_scrollPane = null; /** * The dynamically generated panel for the selected class from an * advanced/category search. */ private JXPanel m_ContentForTopPanel = null; /** The advanced search panel along with the results panel. */ private AbstractCategorySearchPanel categSearchPanel = null; /** The titled panel for the bottom panel. */ private JXTitledPanel m_bottomCenterPanel = null; /** The simple view for the rules added. This is to be replaced by the DAG. */ private MainDagPanel m_contentForBottomCenterPanel = null; /** Split pane between the top and center titled panels. */ private JSplitPane m_innerPane = null; /** Split pane between the LHS and RHS sections of the main panel. */ private JSplitPane m_outerPane = null; /** * Default constructor */ AddLimitPanel() { initGUI(); } /** * Method initializes the panel by appropriately laying out child * components. */ private void initGUI() { this.setLayout(new BorderLayout()); /* * Pass the reference , so that the child can cause the parent to * refresh for any events triggered in the child. */ if (categSearchPanel == null) categSearchPanel = new AddLimitCategorySearchPanel(this); /* The top center titled panel */ m_topCenterPanel = new Cab2bTitledPanel("Define Search Rules"); m_topCenterPanel.setTitleForeground(Color.BLACK); /* Set a gradient painter for the title panel */ GradientPaint gp = new GradientPaint(new Point2D.Double(.3d, 0), new Color(185, 211, 238), new Point2D.Double(.7, 0), Color.WHITE); m_topCenterPanel.setTitlePainter(new BasicGradientPainter(gp)); /* * Set the preferred size for the top panel, as against the preferred * size for the contentPanel/child panel itself. Doing the later has the * undesired result of content panel getting clipped even if the actual * length is more than the set preferred size, and the vertical scroll * bars never come into existence. Again, size set based on usability * specs. */ m_topCenterPanel.setPreferredSize(new Dimension(546, 341)); m_topCenterPanel.setBorder(new EmptyBorder(1, 1, 1, 1)); /* * JXTitledPanels work better with only panel as child, and hence the * following panel. */ this.m_ContentForTopPanel = new Cab2bPanel(); this.m_ContentForTopPanel.setBorder(new EmptyBorder(1, 1, 1, 1)); this.m_scrollPane = new JScrollPane(this.m_ContentForTopPanel); this.m_scrollPane.getViewport().setBackground(Color.WHITE); this.m_topCenterPanel.add(this.m_scrollPane); /* The bottom center titled panel.Initialization on the same lines. */ m_bottomCenterPanel = new Cab2bTitledPanel("Limit Set"); m_bottomCenterPanel.setTitleForeground(Color.BLACK); m_bottomCenterPanel.setBorder(new EmptyBorder(1, 1, 1, 1)); m_bottomCenterPanel.setTitlePainter(new BasicGradientPainter(gp)); /** * Generate ImageMap */ Map<DagImageConstants, Image> imageMap = new HashMap<DagImageConstants, Image>(); imageMap.put(DagImageConstants.SelectIcon, Utilities.loadImage("select_icon.gif")); imageMap.put(DagImageConstants.selectMOIcon, Utilities.loadImage("select_icon_mo.gif")); imageMap.put(DagImageConstants.ArrowSelectIcon, Utilities.loadImage("arrow_icon.gif")); imageMap.put(DagImageConstants.ArrowSelectMOIcon, Utilities.loadImage("arrow_icon_mo.gif")); imageMap.put(DagImageConstants.ParenthesisIcon, Utilities.loadImage("parenthesis_icon.gif")); imageMap.put(DagImageConstants.ParenthesisMOIcon, Utilities.loadImage("parenthesis_icon_mo.gif")); imageMap.put(DagImageConstants.DocumentPaperIcon, Utilities.loadImage("paper_grid.png")); imageMap.put(DagImageConstants.PortImageIcon, Utilities.loadImage("port.gif")); IPathFinder pathFinder = new ClientPathFinder(); m_contentForBottomCenterPanel = new MainDagPanel(this, imageMap, pathFinder, false); m_bottomCenterPanel.add(m_contentForBottomCenterPanel); /* Add components to the conetent pane. */ this.m_innerPane = new JSplitPane(JSplitPane.VERTICAL_SPLIT, m_topCenterPanel, m_bottomCenterPanel); //this.m_innerPane.setDividerLocation(0.5D); this.m_innerPane.setOneTouchExpandable(false); this.m_innerPane.setBorder(null); this.m_innerPane.setDividerSize(4); this.m_outerPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT, categSearchPanel, this.m_innerPane); this.m_outerPane.setDividerLocation(0.2D); this.m_outerPane.setOneTouchExpandable(false); this.m_outerPane.setBorder(null); this.m_outerPane.setDividerSize(4); this.m_outerPane.setDividerLocation(242); this.add(BorderLayout.CENTER, this.m_outerPane); } /** * Method to add search result panel * @param resultPanel */ public void addResultsPanel(AbstractSearchResultPanel resultPanel) { categSearchPanel.m_searchPanel.addResultsPanel(resultPanel); } /** * This method takes the newly added expression and renders the node * accordingly * * @param expressionId */ public void refreshBottomCenterPanel(IExpressionId expressionId) { // Here code to handle adding new limit will appear try { m_contentForBottomCenterPanel.updateGraph(expressionId); } catch (MultipleRootsException e) { CommonUtils.handleException(e, this, true, true, false, false); } this.updateUI(); } /** * The method is a custom implementation for the refresh method from the * {@link ContentPanel} interface. Custom implementation is to simply set * the provided panel as the content panel for the top titled panel. * * @param panelToBeRefreshed * The panel to be refreshed. * * @param strClassNameAsTitle * The class/category name for which the dynamic UI is generated. */ public void refresh(JXPanel[] arrPanel, String strClassNameAsTitle) { /* Set the title for the top titled panel. */ this.m_topCenterPanel.setTitle("Define Search Rules '" + strClassNameAsTitle + "'"); this.m_ContentForTopPanel.removeAll(); int length = arrPanel.length; /* Add the individual panels to the top content panel. */ for (int i = 0; i < length; i++) { if (arrPanel[i] != null) { this.m_ContentForTopPanel.add("br", arrPanel[i]); } } validate(); } /** * The method returns a reference to the bottom content panel. This is * invoked by the main panel in order to form the query. * * @return JXPanel The bottom content panel. */ public JXPanel getBottomCenterPanel() { return this.m_contentForBottomCenterPanel; } public void setQueryObject(IClientQueryBuilderInterface query) { m_contentForBottomCenterPanel.setQueryObject(query); } public void editAddLimitUI(IExpression expression) { IConstraintEntity entity = expression.getConstraintEntity(); JXPanel[] panels = m_searchResultPanel.getEditLimitPanels(expression); // passing appropriate class name refresh(panels, edu.wustl.cab2b.common.util.Utility.getDisplayName(entity.getDynamicExtensionsEntity())); IRule rule = (IRule) expression.getOperand(0); int totalConditions = rule.size(); // Populate panels with corresponding value for (int i = 0; i < totalConditions; i++) { ICondition condition = rule.getCondition(i); setValueForAttribute(panels, condition); } validate(); } private void setValueForAttribute(JXPanel[] panels, ICondition condition) { //Don't consider panel 1 and panel end for getting attribute values //because first and last panels are Edit Limit button panels for (int i = 1; i < panels.length - 1; i++) { IComponent panel = (IComponent) panels[i]; String panelAttributeName = panel.getAttributeName(); int compareVal = panelAttributeName.compareToIgnoreCase(condition.getAttribute().getName()); if (0 == compareVal) { RelationalOperator operator = condition.getRelationalOperator(); panel.setCondition(edu.wustl.cab2b.client.ui.query.Utility.displayStringForRelationalOperator(operator)); ArrayList<String> values = (ArrayList<String>) condition.getValues(); panel.setValues(values); break; } } } public void setSearchResultPanel(AbstractSearchResultPanel searchResultPanel) { m_searchResultPanel = searchResultPanel; } public AbstractSearchResultPanel getSearchResultPanel() { return m_searchResultPanel; } /* * Method to clear (refresh) AddLimitUI when Node is in edit mode * (non-Javadoc) * @see edu.wustl.cab2b.client.ui.IUpdateAddLimitUIInterface#clearAddLimitUI(edu.wustl.common.querysuite.queryobject.IExpression) */ public void clearAddLimitUI() { Component[] components = m_ContentForTopPanel.getComponents(); resetButton((Cab2bPanel)components[0], m_searchResultPanel.getAddLimitButtonTop()); resetButton((Cab2bPanel)components[components.length - 1], m_searchResultPanel.getAddLimitButtonBottom()); /* Add the individual panels to the top content panel. */ for (Component component : components) { if (component instanceof AbstractTypePanel) { ((AbstractTypePanel) component).resetPanel(); } } validate(); } private void resetButton(Cab2bPanel cab2bPanel, Cab2bButton cab2bButton) { cab2bPanel.removeAll(); cab2bPanel.add(cab2bButton); } public void resetPanel() { clearAddLimitUI(); m_contentForBottomCenterPanel.clearDagPanel(); updateUI(); } public AbstractCategorySearchPanel getSearchPanel() { return categSearchPanel; } public void setSearchText(String searchText) { categSearchPanel.getSearchPanel().setSearchtext(searchText); } public String getSearchText() { return categSearchPanel.getSearchPanel().getSearchtext(); } public void setSearchPanel(AbstractCategorySearchPanel panel) { categSearchPanel = panel; m_searchResultPanel = panel.getSearchResultPanel(); } public void refreshBottomCenterPanel(JXPanel panel) { // TODO Auto-generated method stub } }
source/client/main/edu/wustl/cab2b/client/ui/AddLimitPanel.java
package edu.wustl.cab2b.client.ui; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Component; import java.awt.Dimension; import java.awt.GradientPaint; import java.awt.Image; import java.awt.geom.Point2D; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import javax.swing.JScrollPane; import javax.swing.JSplitPane; import javax.swing.border.EmptyBorder; import org.jdesktop.swingx.JXPanel; import org.jdesktop.swingx.JXTitledPanel; import org.jdesktop.swingx.painter.gradient.BasicGradientPainter; import org.openide.util.Utilities; import edu.wustl.cab2b.client.ui.controls.Cab2bPanel; import edu.wustl.cab2b.client.ui.controls.Cab2bTitledPanel; import edu.wustl.cab2b.client.ui.dag.MainDagPanel; import edu.wustl.cab2b.client.ui.main.AbstractTypePanel; import edu.wustl.cab2b.client.ui.main.IComponent; import edu.wustl.cab2b.client.ui.query.ClientPathFinder; import edu.wustl.cab2b.client.ui.query.IClientQueryBuilderInterface; import edu.wustl.cab2b.client.ui.query.IPathFinder; import edu.wustl.cab2b.client.ui.util.CommonUtils; import edu.wustl.cab2b.client.ui.util.CommonUtils.DagImageConstants; import edu.wustl.common.querysuite.exceptions.MultipleRootsException; import edu.wustl.common.querysuite.queryobject.ICondition; import edu.wustl.common.querysuite.queryobject.IConstraintEntity; import edu.wustl.common.querysuite.queryobject.IExpression; import edu.wustl.common.querysuite.queryobject.IExpressionId; import edu.wustl.common.querysuite.queryobject.IRule; import edu.wustl.common.querysuite.queryobject.RelationalOperator; /** * This is the panel for the Add limit tab from the main search dialog. The * class is also an instance of the {@link ContentPanel},so that child * component can cause this panel to refresh in a way required by this panel. * * @author mahesh_iyer */ public class AddLimitPanel extends ContentPanel implements IUpdateAddLimitUIInterface { private static final long serialVersionUID = 1L; /** The titled panel for the top panel. */ private JXTitledPanel m_topCenterPanel = null; private AbstractSearchResultPanel m_searchResultPanel; /** Scroll pane for the top panel. */ private JScrollPane m_scrollPane = null; /** * The dynamically generated panel for the selected class from an * advanced/category search. */ private JXPanel m_ContentForTopPanel = null; /** The advanced search panel along with the results panel. */ private AbstractCategorySearchPanel categSearchPanel = null; /** The titled panel for the bottom panel. */ private JXTitledPanel m_bottomCenterPanel = null; /** The simple view for the rules added. This is to be replaced by the DAG. */ private MainDagPanel m_contentForBottomCenterPanel = null; /** Split pane between the top and center titled panels. */ private JSplitPane m_innerPane = null; /** Split pane between the LHS and RHS sections of the main panel. */ private JSplitPane m_outerPane = null; /** * Default constructor */ AddLimitPanel() { initGUI(); } /** * Method initializes the panel by appropriately laying out child * components. */ private void initGUI() { this.setLayout(new BorderLayout()); /* * Pass the reference , so that the child can cause the parent to * refresh for any events triggered in the child. */ if (categSearchPanel == null) categSearchPanel = new AddLimitCategorySearchPanel(this); /* The top center titled panel */ m_topCenterPanel = new Cab2bTitledPanel("Define Search Rules"); m_topCenterPanel.setTitleForeground(Color.BLACK); /* Set a gradient painter for the title panel */ GradientPaint gp = new GradientPaint(new Point2D.Double(.3d, 0), new Color(185, 211, 238), new Point2D.Double(.7, 0), Color.WHITE); m_topCenterPanel.setTitlePainter(new BasicGradientPainter(gp)); /* * Set the preferred size for the top panel, as against the preferred * size for the contentPanel/child panel itself. Doing the later has the * undesired result of content panel getting clipped even if the actual * length is more than the set preferred size, and the vertical scroll * bars never come into existence. Again, size set based on usability * specs. */ m_topCenterPanel.setPreferredSize(new Dimension(546, 341)); m_topCenterPanel.setBorder(new EmptyBorder(1, 1, 1, 1)); /* * JXTitledPanels work better with only panel as child, and hence the * following panel. */ this.m_ContentForTopPanel = new Cab2bPanel(); this.m_ContentForTopPanel.setBorder(new EmptyBorder(1, 1, 1, 1)); this.m_scrollPane = new JScrollPane(this.m_ContentForTopPanel); this.m_scrollPane.getViewport().setBackground(Color.WHITE); this.m_topCenterPanel.add(this.m_scrollPane); /* The bottom center titled panel.Initialization on the same lines. */ m_bottomCenterPanel = new Cab2bTitledPanel("Limit Set"); m_bottomCenterPanel.setTitleForeground(Color.BLACK); m_bottomCenterPanel.setBorder(new EmptyBorder(1, 1, 1, 1)); m_bottomCenterPanel.setTitlePainter(new BasicGradientPainter(gp)); /** * Generate ImageMap */ Map<DagImageConstants, Image> imageMap = new HashMap<DagImageConstants, Image>(); imageMap.put(DagImageConstants.SelectIcon, Utilities.loadImage("select_icon.gif")); imageMap.put(DagImageConstants.selectMOIcon, Utilities.loadImage("select_icon_mo.gif")); imageMap.put(DagImageConstants.ArrowSelectIcon, Utilities.loadImage("arrow_icon.gif")); imageMap.put(DagImageConstants.ArrowSelectMOIcon, Utilities.loadImage("arrow_icon_mo.gif")); imageMap.put(DagImageConstants.ParenthesisIcon, Utilities.loadImage("parenthesis_icon.gif")); imageMap.put(DagImageConstants.ParenthesisMOIcon, Utilities.loadImage("parenthesis_icon_mo.gif")); imageMap.put(DagImageConstants.DocumentPaperIcon, Utilities.loadImage("paper_grid.png")); imageMap.put(DagImageConstants.PortImageIcon, Utilities.loadImage("port.gif")); IPathFinder pathFinder = new ClientPathFinder(); m_contentForBottomCenterPanel = new MainDagPanel(this, imageMap, pathFinder, false); m_bottomCenterPanel.add(m_contentForBottomCenterPanel); /* Add components to the conetent pane. */ this.m_innerPane = new JSplitPane(JSplitPane.VERTICAL_SPLIT, m_topCenterPanel, m_bottomCenterPanel); //this.m_innerPane.setDividerLocation(0.5D); this.m_innerPane.setOneTouchExpandable(false); this.m_innerPane.setBorder(null); this.m_innerPane.setDividerSize(4); this.m_outerPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT, categSearchPanel, this.m_innerPane); this.m_outerPane.setDividerLocation(0.2D); this.m_outerPane.setOneTouchExpandable(false); this.m_outerPane.setBorder(null); this.m_outerPane.setDividerSize(4); this.m_outerPane.setDividerLocation(242); this.add(BorderLayout.CENTER, this.m_outerPane); } /** * Method to add search result panel * @param resultPanel */ public void addResultsPanel(AbstractSearchResultPanel resultPanel) { categSearchPanel.m_searchPanel.addResultsPanel(resultPanel); } /** * This method takes the newly added expression and renders the node * accordingly * * @param expressionId */ public void refreshBottomCenterPanel(IExpressionId expressionId) { // Here code to handle adding new limit will appear try { m_contentForBottomCenterPanel.updateGraph(expressionId); } catch (MultipleRootsException e) { CommonUtils.handleException(e, this, true, true, false, false); } this.updateUI(); } /** * The method is a custom implementation for the refresh method from the * {@link ContentPanel} interface. Custom implementation is to simply set * the provided panel as the content panel for the top titled panel. * * @param panelToBeRefreshed * The panel to be refreshed. * * @param strClassNameAsTitle * The class/category name for which the dynamic UI is generated. */ public void refresh(JXPanel[] arrPanel, String strClassNameAsTitle) { /* Set the title for the top titled panel. */ this.m_topCenterPanel.setTitle("Define Search Rules '" + strClassNameAsTitle + "'"); this.m_ContentForTopPanel.removeAll(); int length = arrPanel.length; /* Add the individual panels to the top content panel. */ for (int i = 0; i < length; i++) { if (arrPanel[i] != null) { this.m_ContentForTopPanel.add("br", arrPanel[i]); } } validate(); } /** * The method returns a reference to the bottom content panel. This is * invoked by the main panel in order to form the query. * * @return JXPanel The bottom content panel. */ public JXPanel getBottomCenterPanel() { return this.m_contentForBottomCenterPanel; } public void setQueryObject(IClientQueryBuilderInterface query) { m_contentForBottomCenterPanel.setQueryObject(query); } public void editAddLimitUI(IExpression expression) { IConstraintEntity entity = expression.getConstraintEntity(); JXPanel[] panels = m_searchResultPanel.getEditLimitPanels(expression); // passing appropriate class name refresh(panels, edu.wustl.cab2b.common.util.Utility.getDisplayName(entity.getDynamicExtensionsEntity())); IRule rule = (IRule) expression.getOperand(0); int totalConditions = rule.size(); // Populate panels with corresponding value for (int i = 0; i < totalConditions; i++) { ICondition condition = rule.getCondition(i); setValueForAttribute(panels, condition); } validate(); } private void setValueForAttribute(JXPanel[] panels, ICondition condition) { //Don't consider panel 1 and panel end for getting attribute values //because first and last panels are Edit Limit button panels for (int i = 1; i < panels.length - 1; i++) { IComponent panel = (IComponent) panels[i]; String panelAttributeName = panel.getAttributeName(); int compareVal = panelAttributeName.compareToIgnoreCase(condition.getAttribute().getName()); if (0 == compareVal) { RelationalOperator operator = condition.getRelationalOperator(); panel.setCondition(edu.wustl.cab2b.client.ui.query.Utility.displayStringForRelationalOperator(operator)); ArrayList<String> values = (ArrayList<String>) condition.getValues(); panel.setValues(values); break; } } } public void setSearchResultPanel(AbstractSearchResultPanel searchResultPanel) { m_searchResultPanel = searchResultPanel; } public AbstractSearchResultPanel getSearchResultPanel() { return m_searchResultPanel; } /* * Method to clear (refresh) AddLimitUI when Node is in edit mode * (non-Javadoc) * @see edu.wustl.cab2b.client.ui.IUpdateAddLimitUIInterface#clearAddLimitUI(edu.wustl.common.querysuite.queryobject.IExpression) */ public void clearAddLimitUI() { Component[] components = m_ContentForTopPanel.getComponents(); /* Add the individual panels to the top content panel. */ for (Component component : components) { if (component instanceof AbstractTypePanel) { ((AbstractTypePanel) component).resetPanel(); } } validate(); } public void resetPanel() { clearAddLimitUI(); m_contentForBottomCenterPanel.clearDagPanel(); } @Override public AbstractCategorySearchPanel getSearchPanel() { return categSearchPanel; } public void setSearchText(String searchText) { categSearchPanel.getSearchPanel().setSearchtext(searchText); } public String getSearchText() { return categSearchPanel.getSearchPanel().getSearchtext(); } @Override public void setSearchPanel(AbstractCategorySearchPanel panel) { categSearchPanel = panel; m_searchResultPanel = panel.getSearchResultPanel(); } @Override public void refreshBottomCenterPanel(JXPanel panel) { // TODO Auto-generated method stub } }
Fix for refreshing the Limit buttons
source/client/main/edu/wustl/cab2b/client/ui/AddLimitPanel.java
Fix for refreshing the Limit buttons
<ide><path>ource/client/main/edu/wustl/cab2b/client/ui/AddLimitPanel.java <ide> import org.jdesktop.swingx.painter.gradient.BasicGradientPainter; <ide> import org.openide.util.Utilities; <ide> <add>import edu.wustl.cab2b.client.ui.controls.Cab2bButton; <ide> import edu.wustl.cab2b.client.ui.controls.Cab2bPanel; <ide> import edu.wustl.cab2b.client.ui.controls.Cab2bTitledPanel; <ide> import edu.wustl.cab2b.client.ui.dag.MainDagPanel; <ide> */ <ide> public void clearAddLimitUI() { <ide> Component[] components = m_ContentForTopPanel.getComponents(); <add> <add> resetButton((Cab2bPanel)components[0], m_searchResultPanel.getAddLimitButtonTop()); <add> resetButton((Cab2bPanel)components[components.length - 1], m_searchResultPanel.getAddLimitButtonBottom()); <ide> <ide> /* Add the individual panels to the top content panel. */ <ide> for (Component component : components) { <ide> } <ide> validate(); <ide> } <add> <add> private void resetButton(Cab2bPanel cab2bPanel, Cab2bButton cab2bButton) { <add> cab2bPanel.removeAll(); <add> cab2bPanel.add(cab2bButton); <add> } <ide> <ide> public void resetPanel() { <ide> clearAddLimitUI(); <ide> m_contentForBottomCenterPanel.clearDagPanel(); <del> } <del> <del> @Override <add> updateUI(); <add> } <add> <ide> public AbstractCategorySearchPanel getSearchPanel() { <ide> return categSearchPanel; <ide> } <ide> return categSearchPanel.getSearchPanel().getSearchtext(); <ide> } <ide> <del> @Override <ide> public void setSearchPanel(AbstractCategorySearchPanel panel) { <ide> categSearchPanel = panel; <ide> m_searchResultPanel = panel.getSearchResultPanel(); <ide> } <ide> <del> @Override <ide> public void refreshBottomCenterPanel(JXPanel panel) { <ide> // TODO Auto-generated method stub <del> <del> } <add> } <add> <ide> }
JavaScript
mit
590bca649cf07e953797d80443c7941c2c092e5a
0
xyos/horarios,xyos/horarios,xyos/horarios
define(['./module'],function (services){ 'use strict'; services.service('ScheduleService', function($http, $q, Schedule){ var initialItems = { 'busy': [1048448,1048448, 1048448, 1048448, 1048448, 1048448, 1048448], 'groups': [{ code : '', schedule : [1048448, 1048448, 1048448, 1048448, 1048448 ,1048448, 1048448], subject : '-no hay horario', name : '', lateHours: false, earlyHours: false }] }; var initialSchedule = new Schedule(initialItems); initialSchedule.parseRows(); var schedules = []; schedules.push(initialSchedule); var activeSchedule = schedules[0]; var reset = function(){ schedules = []; schedules.push(initialSchedule); activeSchedule = schedules[0]; }; var busyRows = []; var busyQuery = function(){ var query = ''; var days = []; _.forEach(busyRows, function(hour){ _.forEach(hour, function(day, index){ if(angular.isUndefined(days[index])){ days[index] = day ? '1':'0'; } else { days[index] += day ? '1':'0'; } }); }); _.forEach(days,function(day){ query += parseInt(day.split('').reverse().join(''), 2) + ','; }); return query.substring(0, query.length - 1); }; var subjectQuery = ''; var getScheduleQuery = function(){ return '/api/v1.0/schedule/subjects=' + subjectQuery + '&busy=' + busyQuery(); }; var mergeSchedule = function(schedule) { schedule.parseRows(); //console.log(schedule.rows); for(var i = 0, max = activeSchedule.rows.length; i < max ; i++){ for(var j = 0, max2 = activeSchedule.rows[i].length ; j < max2; j++){ if(activeSchedule.rows[i][j].color + activeSchedule.rows[i][j].name !== schedule.rows[i][j].color + schedule.rows[i][j].name){ activeSchedule.rows[i][j] = schedule.rows[i][j]; } } } activeSchedule.index = schedule.index; return schedule; }; return { getActive: function(){ return activeSchedule; }, setSubjectQuery: function(query){ subjectQuery = query; }, getQuery: function(){ return getScheduleQuery(); }, setActive: function(index){ mergeSchedule(schedules[index]); }, reset: reset, fetch: function(){ schedules = []; return $http.get(getScheduleQuery()) .then(function(response){ if(_.isEmpty(response.data)){ var s = new Schedule(initialItems); s.index = 0; schedules.push(s); } response.data.forEach(function(sched,index){ var schedule = new Schedule(sched); schedule.index = index; schedules.push(schedule); }); }); }, get: function(index){ return schedules[index]; }, setBusy: function(busy) { busyRows = busy; }, getBusy: function(busy) { return busyRows; }, getList: function(){ if(_.isEmpty(schedules)){ reset(); } return schedules; } }; }); });
horarios/static/js/app/schedule/service.js
define(['./module'],function (services){ 'use strict'; services.service('ScheduleService', function($http, $q, Schedule){ var initialItems = { 'busy': [0, 0, 0, 0, 0, 0, 0], 'groups': [{ code : '', schedule : [1048448, 1048448, 1048448, 1048448, 1048448 ,1048448, 1048448], subject : '-no hay horario', name : '', lateHours: false, earlyHours: false }] }; var initialSchedule = new Schedule(initialItems); initialSchedule.parseRows(); var schedules = []; schedules.push(initialSchedule); var activeSchedule = schedules[0]; var reset = function(){ schedules = []; schedules.push(initialSchedule); activeSchedule = schedules[0]; }; var busyRows = []; var busyQuery = function(){ var query = ''; var days = []; _.forEach(busyRows, function(hour){ _.forEach(hour, function(day, index){ if(angular.isUndefined(days[index])){ days[index] = day ? '1':'0'; } else { days[index] += day ? '1':'0'; } }); }); _.forEach(days,function(day){ query += parseInt(day.split('').reverse().join(''), 2) + ','; }); return query.substring(0, query.length - 1); }; var subjectQuery = ''; var getScheduleQuery = function(){ return '/api/v1.0/schedule/subjects=' + subjectQuery + '&busy=' + busyQuery(); }; var mergeSchedule = function(schedule) { schedule.parseRows(); //console.log(schedule.rows); for(var i = 0, max = activeSchedule.rows.length; i < max ; i++){ for(var j = 0, max2 = activeSchedule.rows[i].length ; j < max2; j++){ if(activeSchedule.rows[i][j].color + activeSchedule.rows[i][j].name !== schedule.rows[i][j].color + schedule.rows[i][j].name){ activeSchedule.rows[i][j] = schedule.rows[i][j]; } } } activeSchedule.index = schedule.index; return schedule; }; return { getActive: function(){ return activeSchedule; }, setSubjectQuery: function(query){ subjectQuery = query; }, getQuery: function(){ return getScheduleQuery(); }, setActive: function(index){ mergeSchedule(schedules[index]); }, reset: reset, fetch: function(){ schedules = []; return $http.get(getScheduleQuery()) .then(function(response){ if(_.isEmpty(response.data)){ schedules.push(initialSchedule); } response.data.forEach(function(sched,index){ var schedule = new Schedule(sched); schedule.index = index; schedules.push(schedule); }); }); }, get: function(index){ return schedules[index]; }, setBusy: function(busy) { busyRows = busy; }, getBusy: function(busy) { return busyRows; }, getList: function(){ if(_.isEmpty(schedules)){ reset(); } return schedules; } }; }); });
Show no schedule when there's no possible schedule
horarios/static/js/app/schedule/service.js
Show no schedule when there's no possible schedule
<ide><path>orarios/static/js/app/schedule/service.js <ide> 'use strict'; <ide> services.service('ScheduleService', function($http, $q, Schedule){ <ide> var initialItems = { <del> 'busy': [0, 0, 0, 0, 0, 0, 0], <add> 'busy': [1048448,1048448, 1048448, 1048448, 1048448, 1048448, 1048448], <ide> 'groups': [{ <ide> code : '', <ide> schedule : [1048448, 1048448, 1048448, 1048448, 1048448 ,1048448, 1048448], <ide> return $http.get(getScheduleQuery()) <ide> .then(function(response){ <ide> if(_.isEmpty(response.data)){ <del> schedules.push(initialSchedule); <add> var s = new Schedule(initialItems); <add> s.index = 0; <add> schedules.push(s); <ide> } <ide> response.data.forEach(function(sched,index){ <ide>
Java
mpl-2.0
bf4d6ccd5c97b7ce7ce430a1a7e72828bf468ea9
0
Skelril/Skree
/* * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ package com.skelril.skree.content.aid; import org.spongepowered.api.event.Listener; import org.spongepowered.api.event.message.MessageChannelEvent; import org.spongepowered.api.text.Text; import org.spongepowered.api.text.action.TextActions; import org.spongepowered.api.text.format.TextColors; public class ChatCommandAid { @Listener public void onPlayerChat(MessageChannelEvent.Chat event) { String rawText = event.getRawMessage().toPlain(); if (rawText.matches("\\./.*")) { // Remove the comment String rawCommand = rawText.replaceFirst("//.*", ""); // Replace the "./" with "/" and then trim the string String command = rawCommand.replaceFirst("\\./", "/").trim(); // Remove the command, and the comment block, as well as its spaces String message = rawText.replaceFirst(rawCommand, "").replaceFirst("// *", "").trim(); // Send a composite message of the command, a space, and then the comment text event.getFormatter().setBody(Text.of( Text.of( TextColors.DARK_GREEN, TextActions.showText(Text.of("Click to type:\n", command)), TextActions.suggestCommand(command), command ), " ", message )); } } }
src/main/java/com/skelril/skree/content/aid/ChatCommandAid.java
/* * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ package com.skelril.skree.content.aid; import org.spongepowered.api.event.Listener; import org.spongepowered.api.event.message.MessageChannelEvent; import org.spongepowered.api.text.Text; import org.spongepowered.api.text.action.TextActions; import org.spongepowered.api.text.format.TextColors; public class ChatCommandAid { @Listener public void onPlayerChat(MessageChannelEvent.Chat event) { String rawText = event.getRawMessage().toPlain(); if (rawText.matches("\\./.*")) { String rawCommand = rawText.replaceFirst("//.*", ""); String command = rawCommand.replaceFirst("\\./", "/").trim(); String message = rawText.replaceFirst(rawCommand, "").replaceFirst("//", ""); event.getFormatter().setBody(Text.of( Text.of( TextColors.DARK_GREEN, TextActions.showText(Text.of("Click to type:\n", command)), TextActions.suggestCommand(command), command ), message )); } } }
Fixed some whitespace issues with the chat command aid
src/main/java/com/skelril/skree/content/aid/ChatCommandAid.java
Fixed some whitespace issues with the chat command aid
<ide><path>rc/main/java/com/skelril/skree/content/aid/ChatCommandAid.java <ide> public void onPlayerChat(MessageChannelEvent.Chat event) { <ide> String rawText = event.getRawMessage().toPlain(); <ide> if (rawText.matches("\\./.*")) { <add> // Remove the comment <ide> String rawCommand = rawText.replaceFirst("//.*", ""); <add> // Replace the "./" with "/" and then trim the string <ide> String command = rawCommand.replaceFirst("\\./", "/").trim(); <del> String message = rawText.replaceFirst(rawCommand, "").replaceFirst("//", ""); <add> // Remove the command, and the comment block, as well as its spaces <add> String message = rawText.replaceFirst(rawCommand, "").replaceFirst("// *", "").trim(); <add> <add> // Send a composite message of the command, a space, and then the comment text <ide> event.getFormatter().setBody(Text.of( <ide> Text.of( <ide> TextColors.DARK_GREEN, <ide> TextActions.suggestCommand(command), <ide> command <ide> ), <add> " ", <ide> message <ide> )); <ide> }
JavaScript
mit
4040347dcc35bec339c6e7aac76849d4e7643778
0
olecom/enjsms,olecom/enjsms
/*====---- APP: self process management ----====*/ (function(require ,process ,log ,cerr ,eval ,setTimeout ,clearTimeout ,RegExp ,Math ,String) { var http = require('http'), net = require('net'), inspect = require('util').inspect ,ctl_runs = null, app_runs = null, db_runs = null ,err_log = [], gsm_inf = [], srv_log = [ 'Log start @[' + _date() + ']'] function _chklen(logs) { //prevent memory hug, when web client is closed, thus doesn't read and clears log arrays //full logs are on the file system anyway if (logs.length > 177) logs = logs.slice(87) } function _gsm(msg) { log (msg) ; _chklen(gsm_inf) ; gsm_inf.push(msg) } function _log(msg) { log (msg) ; _chklen(srv_log) ; srv_log.push(msg) } function _err(msg) { cerr(msg) ; _chklen(err_log) ; err_log.push(msg) } function _date(){ //ISODateString function pad(n){return n<10 ? '0'+n : n} var d = new Date() return d.getUTCFullYear()+'-' + pad(d.getUTCMonth()+1)+'-' + pad(d.getUTCDate())+'T' + pad(d.getUTCHours())+':' + pad(d.getUTCMinutes())+':' + pad(d.getUTCSeconds())+'Z' } var str2hex = function(s) { return s.replace(/[\s\S]/g ,function(ch){ return (ch < '\u0010' ? ' 0' : ' ') + ch.charCodeAt(0).toString(16) }).toUpperCase() } process.on('uncaughtException' ,function (err) { _err('fatal uncaught exception: ' + err + "\n" + err.stack) }) /*====---- APP: telnet GSM part ----====*/ /* @inbuf input buffer for full text lines from ME @gsmtel_runs HW connection flag for TE2ME cmd loop start @TE_ME_mode mid loop cmd chain sync (next cmd or give more data for same cmd) @ta current terminal adapter (GSM engine) */ var TE_ME_mode = 'login-mode' ,gsmtel_runs = null ,ta ,ME = {} ,inbuf = [] function get_input_lines(s) { //loop this fun() on data until there is full set of lines if(!ta) { _err('app error get_input_lines(): ta is null') return } _gsm('data event got:"' + s + '"') _gsm('data event got hex:"' + str2hex(s) + '"') _gsm('ta._end_ch: ' + ta._end_ch.toString() + str2hex(ta._end_ch.toString())) //join chuncks from the network and queue them in full lines inbuf.push(s) // add chunck to array /* Commands are usually followed by a response that includes "<CR><LF><response><CR><LF>". (XT55 Siemens Mobile doc) this is case of "ATV1" setup */ if (!ta._end_ch.test(s)) return // full command in chunck: join all and return to cmd handler // remove repeated, front and tail new lines s = inbuf.join('') .replace(/\r+/g,'') .replace(/(^\n+)|(\n+$)/g,'') .replace(/\n+/g,'\n') _gsm('s: "' + s.replace('\n', '|n') + '"') inbuf.splice(0) // clear return s ? s.split('\n') : null } /* GSM engines -, ME (Mobile Equipment), MS (Mobile Station), are referred | TA (Terminal Adapter), DCE (Data Communication Equipment) to as`: or facsimile DCE (FAX modem, FAX board). (XT55 Siemens Mobile doc)*/ ME.GSM = function() { // general GSM interface via Telnet of Terminal.exe by <[email protected]> //== GSM command aliases: == /* 1.7.1 Communication between Customer Application and XT55 (Siemens Mobile doc) Leaving hardware flow control unconsidered the Customer Application (TE) is coupled with the XT55 (ME) via a receive and a transmit line. Since both lines are driven by independent devices collisions may (and will) happen, i.e. while the TE issues an AT command the XT55 starts sending an URC. This probably will lead to the TE’s misinterpretation of the URC being part of the AT command’s response. To avoid this conflict the following measures must be taken: = If an AT command is finished (with "OK" or "ERROR") the TE shall always wait at least 100 milliseconds before sending the next one. This gives the XT55 the opportunity to transmit pending URCs and get necessary service. Note that some AT commands may require more delay after "OK" or "ERROR" response, refer to the following command specifications for details. = The TE shall communicate with the XT55 using activated echo (ATE1), i.e. the XT55 echoes characters received from the TE. Hence, when the TE receives the echo of the first character "A" of the AT command just sent by itself it has control over both the receive and the transmit paths. This way no URC can be issued by the XT55 in between. i knew that!!! */ //modules. default set up this.modules = [ { modid:'единственный' ,ownum:null ,re:null ,cmdq: [] ,op: '??' ,sigq: 0} ] ,this.modqlenTotal = 0 ,this.defmod = 1 // counts from one ,this.curm = null // general setup to: ta.modules[ta.defmod - 1] ,this._end_ch = /\n$/ ,this._cmdle = '\r\n'// usual command's ending ,this.atsetup = 'ate1v1+CMEE=2' // _atok: /OK$/ || v0 -- _atok: /0$/ //data this.initcmds = function() { return [ this.atsetup ,this.info ,this.signal ,'at+COPS=3,0;+COPS?' ] } ,this.info = 'ati' ,this.signal = 'at+CSQ' ,this.cfgOpName = 'at+COPS=3,0' ,this.getOpName = 'at+COPS?' //== private == ,this.__err = function(e) { _err('GSM error: ' + ta._cmd + (e ? e : '')) return ta._yes_next + '-err' } ,this.__errfa = function(e) { _err('GSM error fatal: ' + e) gsmtel_runs == 'fatal error' ta.curm.cmdq.splice(0) if(ta._cmdTimeoutH) { clearTimeout(ta._cmdTimeoutH) ta._cmdTimeoutH = null } return 'reconnect-fatal-err' } ,this.__nop = function(e) {} //== const ,this._yes_next = 'yes-next' ,this._atok = /OK$/ ,this._ater = 'ERR' //== var ,this._err = function(e) {} ,this._cmd = 'no command' ,this._atdata = [] ,this._sync_ok = null // std handlers ,this._hsync = 'handle-sync' ,this._handle = function(tamode, e) {} ,this._async_handlers = [] ,this._appcb = null // ??? application's call back ,this._timeoutLogin = 1024 ,this._timeoutAtSync = 1024 ,this._timeoutUSSD = 8192 //== public == ,this.login = function(e) { _gsm("login: GSM via Terminal Telnet server") this._cmd = 'login' //serial terminal program writes first line on telnet: "Terminal remote server" this._sync_ok = /^Terminal/ this._err = this.__err this._handle = this.__handle return this._hsync } ,this.get = function(e) { _gsm("get: noop") //empty command in this gsm interface, goto next cmd in que return this._yes_next } ,this._in_releaseTimeout = null ,this._cmd_releaseTimeout = 0 ,this.releaseH = this.__nop ,this._USSDtimeoutH = null ,this.do_release = function(){ _gsm('gsm do release') ta._handle = ta.__handle ; TE_ME_mode = null// std handler && its mode if(ta._appcb) ta._appcb = null// multimodule cmds can't clear this process.nextTick(_do_TELNET2MODULES_cmd_loop) } ,this._cmd_get = 'get' ,this._cmd_release = 'release' ,this.release = function(e) { //`release` gives back AT control on module, // but any AT data is queued by module for next `get` //`release` does not clear modules's cmdq //TODO: only `get` or `AT` errors by timeout must clear cmdq ta._cmd = ta._cmd_release gsmtel_runs = ta._cmd if(ta._in_releaseTimeout){// pending `release` called directly clearTimeout(ta._in_releaseTimeout) ta._cmd_releaseTimeout = 0 ta._in_releaseTimeout = null } if(ta._cmd_releaseTimeout > 0){ ta._in_releaseTimeout = setTimeout(ta.do_release, ta._cmd_releaseTimeout) } else ta.do_release() //returns nothing } ,this.logout = function() {} ,this._cmdTimeoutH = null ,this._smsle = "\u001a" + this._cmdle ,this.sms_mem_setup = 'smsmem' // `ate1` is needed; it is in `atsetup` // which also is in initcmds and any cmdq push ,this._sms_mem_read = 'at+cmgf=1;+cpms?;+cmgl="ALL";+cpms?' ,this.rcvd_sms = []//+CDSI: "SM",11 async event of sms delivery and sync +CMGL ,this.smsmem = function(sock){// for curmod, read all SM, ME in db, remove all _gsm('OK we in smsmem ta.curm.cmdq: ' + ta.curm.cmdq) ta._err = ta.__err gsmtel_runs = ta._cmd = ta._sms_mem_read ta._end_ch = /\n$/ sock.write(ta._cmd+ta._cmdle) clearTimeout(ta._cmdTimeoutH) ta._cmdTimeoutH = setTimeout(ta.do_at_timeout, ta._timeoutAtSync) /*ate1+cmgf=1;+cpms?;+cmgl="ALL";+cpms? +CPMS: "SM",24,30,"SM",24,30,"SM",24,30 +CMGL: 0,"REC READ","+375297656850",,"08/12/01,10:16:24+08" 0414043E04310440043E04350020044304420440043E002100200421043E043B043D04350447043D […] +CMGL: 21,"REC READ",6,233,"+375298022483",145,"12/05/15,03:16:34+12","12/05/15,03:16:39+12",0 +CMGL: 23,"REC READ",6,252,"+375298022483",145,"12/05/24,08:29:48+12","12/05/24,08:29:54+12",0 +CMGL: 11,"REC UNREAD",6,229,"+375298022483",145,"12/05/15,03:00:37+12","12/05/15,03:00:42+12",0 +CMGL: 9,"REC READ",6,47,,,"12/05/23,04:29:55+12","12/05/23,04:30:02+12",0 //First Octet ^^^^^^^(id) +CMGL: 3,"STO UNSENT","1111111",, Hpmini5101-1499000 +CMGL: 11,"STO UNSENT","",, OK OK +CPMS: "SM",24,30,"SM",24,30,"SM",24,30 OK */ ta._handle = function(ta_lines_arr, samode){ var i ,l ,m for (i in ta._async_handlers){ ta._async_handlers[i](ta_lines_arr) }// possible async stuff ta.releaseH(ta_lines_arr) i = 0 while(l = ta_lines_arr[i++]){ if(/ERROR/.test(l)){ return ta.do_at_timeout(' error: ' + l) } if(gsmtel_runs == ta._cmd){ if(l == ta._cmd){// ate1+cmgf=1;+cpms?;+cmgl="ALL";+cpms? continue// head sync OK } m = l.match(/^[+]CPMS: "..",(\d+),/) if(m){// +CPMS: "SM",24,30,"SM",24,30,"SM",24,30 m = parseInt(m[1]) if (m > 0){ gsmtel_runs = m// recs to read _gsm('sms 2 read: ' + m) ta._sync_ok = l// cmd end sync } else { clearTimeout(ta._cmdTimeoutH) return ta._yes_next } } } else {// tail sync + read exactly m "shall be records": /^[+]CMGL: / if((ta._sync_ok == l) && (ta._yes_next == gsmtel_runs)){ clearTimeout(ta._cmdTimeoutH) db_read_gsm_mem((ta.rcvd_sms.join('\n') + '\n').split('+CMGL')) ta.rcvd_sms.splice(0) return ta._yes_next// obviously it will be OK or timeout } ta.rcvd_sms.push(l) if(/^[+]CMGL: /.test(l)){// count a record if (0 == (--gsmtel_runs)){ gsmtel_runs = ta._yes_next } } } } clearTimeout(ta._cmdTimeoutH)// +delta for reading step ta._cmdTimeoutH = setTimeout(ta._err, ta._timeoutAtSync) return ta._hsync// new gsmtel_runs => next step }//fun() _handle return ta._hsync// cmd is set } //mode=2 (buffer all) SMS-DELIVER=1 BM=0 SMS-STATUS-REPORT=2 are stored in memory ,this._sms_setup = 'at+cmgf=1;+cnmi=2,1,0,2,1' //echo off (echo of mgs itself is not needed) //49: (33:SMS Send + SMS-STATUS-REPORT request) + (16:time is present in relative format) //167 = 1 day = 24 hours sms validity period:> 12+(167-143)*30/60 //0 = 0 (higher protocol) //8 = UCS2, 0 = GSM codepages ,this._sms_smp_ucs2 = 'ate0;+csmp=49,167,0,8' ,this._sms_smp_asci = 'ate0;+csmp=49,167,0,0' ,this._timeoutSendSMS = 1024 << 2 ,this._smst = this._timeoutSendSMS ,this.sms2send = []//+CMGS: 152 at cmd got sms id ,this.do_smsTimeout = function(now){// serious error, clear cmdq, `release` module if(now && ta._cmdTimeoutH) clearTimeout(ta._cmdTimeoutH) ta._cmdTimeoutH = null _err('sms setup timout, schedule modules') if(!ta) return ta.curm.cmdq.splice(0) _gsm('sms write ESC + at') gsmtel.write('\u001b' + ta._cmdle)//ESC will prevent possible hung in wating for sms text+\u001a //gsmtel.write('\u001b' + ta._cmdle) gsmtel.write('at' + ta._cmdle)//make device live after ESC (or it stucks) ta.release()// hard release } ,this.sms = function(sock){ /* smscmds in cmdq: [ 'at+cmgf=1;+cnmi=2,1,0,2,1', 'sms', 'ate0+csmp=49,167,0,8;+CMGS="+375298022483"', '04220435043A044104420020043D04300020003700300020043A043804400438043B043B0438044704350441043A04380445002004410438043C0432043E043B043E0432002C0020043F043E043C043504490430044E0449043804450441044F002004320020043E0434043D044300200053004D0053043A04430020043D043000200055004300530032002E', 'ate0+csmp=49,167,0,0;+CMGS="+375298022483"', 'Next part is pure ASCII and can be 140 charachters long. Word split charachters are dots spaces semicolons etc. This text has 210 symbols...' ] NOTE: 'release' will end this cmdq */ ta._cmd = ta.sms2send[0].atcmd gsmtel_runs = ta._cmd// setup timeout flag ta._cmdTimeoutH = setTimeout(ta.do_smsTimeout, ta._smst) ta._end_ch = /[ \n]$/ ta._atdata_handler = null ta._sync_ok = /^>/ _gsm('sms: ' + ta._cmd) sock.write(ta._cmd+ta._cmdle) ta._handle = function(ta_lines_arr, samode){ var m ,i for (i in ta._async_handlers) {//async handlers ta._async_handlers[i](ta_lines_arr) } ta.releaseH(ta_lines_arr) _gsm('smsH gsmtel_runs, ta._cmd: ' + gsmtel_runs +' '+ ta._cmd) /* Handling message send sequence: atv0+cmgs="+375298022483" > MSGBODY<SUB> +CMGS: 52 | +CMS ERROR: 500 | possible async messages | OK | */ if(gsmtel_runs == ta._cmd){ if(/ERROR/.test(ta_lines_arr[0])){ ta.sms2send[0].mid = ta_lines_arr[0] return ta.do_smsTimeout(true) } gsmtel_runs = ta._smsle _gsm('smH sms write: ' + ta.sms2send[0].m) sock.write(ta.sms2send[0].m + ta._smsle) // fall thru } i = 0 _gsm('smH sms sync not err') do { _gsm('smH i = ' + i + 'line: ' + ta_lines_arr[i] + 'ta._sync_ok: ' + ta._sync_ok) if(/ERROR/.test(ta_lines_arr[i])){ ta.sms2send[0].mid = ta_lines_arr[i] return ta.do_smsTimeout(true) } m = ta_lines_arr[i].match(/^[+]CMGS:(.*)$/) if(m){// id of sms + time ta.sms2send[0].dateS = new Date() ta.sms2send[0].mid = parseInt(m[1]) } m = null _gsm('smH atok test i = ' + i + 'line: ' + ta_lines_arr[i]) if(ta._atok.test(ta_lines_arr[i])){// sms sent, goto next sms _gsm('sent sms: ' + inspect(ta.sms2send[0])) delete ta.sms2send[0].atcmd// no need in tech info in db ta.sms2send[0].module = ta.curm.ownum + ' ' + ta.curm.modid taout.save(ta.sms2send[0] ,function(e, rec){ if(e) { _err('db err save sent sms: ' + e) return } _gsm('db saved: ' + inspect(rec)) })// async race with shift??? ta.sms2send.shift()// next sms if(ta.sms2send.length > 0){ ta._cmd = ta.sms2send[0].atcmd sock.write(ta._cmd + ta._cmdle) gsmtel_runs = ta._cmd return ta._hsync// next sms } clearTimeout(ta._cmdTimeoutH) ta._cmdTimeoutH = null return ta._yes_next// next cmd in cmdq -> `release` } //???if(gsmtel_runs == ta._ater) return ta.do_smsTimeout(true) } while (ta_lines_arr[++i]) _gsm('smsH sms sync end: ' + gsmtel_runs +' '+ ta._cmd) return ta._hsync// new gsmtel_runs => next step }//fun() handler return ta._hsync// cmd is set } ,this.do_ussd_timeout = function(){ if(ta._USSDtimeoutH){ clearTimeout(ta._USSDtimeoutH) ta._USSDtimeoutH = null } if(ta) { if(ta._appcb) { ta._appcb('ussd timeout ' + ta._atdata.join('<br/>')) ta._appcb = null } ta.release() } _gsm('ta.do_ussd_timeout, call release') } ,this.do_at_timeout = function(e){ clearTimeout(ta._cmdTimeoutH) ta._cmdTimeoutH = null if (gsmtel_runs == ta._cmd) return _err('timeout AT cmd: ' + ta._cmd + (e ? e : '')) } ,this.at = function(sock, atcmd){ //'at'- sync, AT - async commands this._atdata_handler = null // sync atcmd --inside data--ok data handler this._err = this.__err // common error trap this._sync_ok = this._atok this._end_ch = /\n$/ this._cmd = gsmtel_runs = atcmd if(ta._cmdTimeoutH) clearTimeout(ta._cmdTimeoutH) ta._cmdTimeoutH = setTimeout(this.do_at_timeout, ta._timeoutAtSync) if (atcmd == this.atsetup) { /* first `at` command and first(or after get) setup of modem communication: 'ate1v1+CMEE=2' (@this.atsetup) 1) e1: echo on for head and tail sync 2) v1: verbose cmd run codes e.g. '^OK$' 3) +CMEE=2: error descriptions instead of codes */ this._handle = this.__handle //simple handler until full `at` sync // setup of `at` cmd sync. this command may or may not receive its echo // thus handling must wait usual @_atok reply this._err = this.__nop _gsm('at write setup: `' + atcmd + '` _timeoutAtSync: ' + ta._timeoutAtSync) /* The "AT" or "at" prefix must be set at the beginning of each command line. To terminate a command line enter <CR>. (XT55 Siemens Mobile doc) <CR><LF> is used here: */ sock.write(atcmd + ta._cmdle) if(0 == this._async_handlers.length) this._async_handlers.push( this.SRVSTh ,this.CSQh ,this.CUSDh ,this.CUSDht ) //set up all async handlers return this._hsync } else if (this._handle !== this.__athandle){ // head and tail sync of any `at` command this._handle = this.__athandle } /* 1) async AT commands with same prefix NOTE: upper case AT. This command issues 'OK', ta transmit, async ta recv, then ta reply with same prefix as cmd itself(+CUSD): ,---- USSD request -- |AT+CUSD=1,"*100#",15 | |OK .... some time delta |+CUSD: 0,"Balans=3394r (OP=9777 do 09.05 23:19) Ostatok: MB=43.6 min=232",15 `---- 2) aync AT command's preliminary results with same prefix(+CMGS), final via other command (+CDSI) ,---- SMS sending -- |at+cmgs="+375298077782" |hi, olecom.[ctrl+x] # с русской кодировкой херь нужно разбираться # ушла SMS, id=152 |+CMGS: 152 |OK | # SMS-STATUS-REPORT получен в память SM позицию 11 # (настройки могут быть разные куда писать) |+CDSI: "SM",11 | |+CMS ERROR: 305 # выбираем какую память читать |at+cpms="SM" |OK |+CPMS: 19,30,19,30,19,30 | # читаем позицию 11 |at+cmgr=11 # мессага id=152 доставлена (фотмат этой хери может быть разный) |+CMGR: "REC UNREAD",6,152,"+375298022483",145,"12/03/22,02:42:12+12","12/03/22,02:42:17+12",0 #второй раз уже пишет, что прочитано |at+cmgr=11 |+CMGR: "REC READ",6,152,"+375298022483",145,"12/03/22,02:42:12+12","12/03/22,02:42:17+12",0 `---- ATV[10]: OK 0 Command executed, no errors CONNECT 1 Link established RING 2 Ring detected NO CARRIER 3 Link not established or disconnected ERROR 4 Invalid command or command line too long NO DIALTONE 6 No dial tone, dialling impossible, wrong mode BUSY 7 Remote station busy at+cmgr=1 +CMS ERROR: 321 AT+CEER +CEER: No cause information available OK 320 Memory failure 321 Invalid memory index 322 Memory full */ if(/^AT[+]CUSD=1/.test(atcmd)) { ta._in_ussd = null ta._USSDtimeoutH = setTimeout(ta.do_ussd_timeout, ta._timeoutUSSD) _gsm('set ussd timeout ' + ta._timeoutUSSD) ta._cmd_releaseTimeout = 777 + ta._timeoutUSSD// delay `release` ta._end_ch = /[\n ]$/ } else if(/^AT[+]CMGS/.test(atcmd)) {// non sync testing version ta._end_ch = /[ \n]$/ // normal or error(by timeout) case ta._sync_ok = /> / ta._cmd_releaseTimeout = 4444 } else switch (atcmd) { case 'ati': /* data inside cmd - ok block */ this._handle = function(ta_lines_arr) { for (var i in ta_lines_arr) { if (this._sync_ok.test(ta_lines_arr[i])) { app.gsm = 'GSM:&nbsp;' + this._atdata.splice(1).join('<br/>') this._atdata.splice(0) this._handle = this.__handle gsmtel_runs = this._atok clearTimeout(ta._cmdTimeoutH) ta._cmdTimeoutH = null return this._yes_next } else this._atdata.push(ta_lines_arr[i]) } _gsm('ati handler awaits @_sync_ok') return 'ati-loop'+this._hsync } break case this.getOpName: this._atdata_handler = this.COPSh break case this.CSQ: this._atdata_handler = this.CSQh break } _gsm('at write: `' + atcmd + '`') sock.write(atcmd + ta._cmdle) return this._hsync } /* Handlers NOTE: async handler must look up all atdata[] for its match */ ,this.SRVSTh = function(atdata) { for(var i in atdata) { if (/SRVST:2/.test(atdata[i])) {//async: ^SRVST:2 app.op = '??? ' ta.curm.cmdq.unshift(ta.getOpName) ta.curm.cmdq.unshift(ta.atsetup) } } } ,this.COPSh = function(atdata) { for(var i in atdata) { if (/COPS:/.test(atdata[i])) {// async: +COPS: 0,0,"MTS.BY",2 ta.curm.op = atdata[i].replace(/(^[^"]+")|("[^"]*$)/g,'') if(gsmtel_runs == ta.getOpName && ta._cmdTimeoutH) { clearTimeout(ta._cmdTimeoutH) ta._cmdTimeoutH = null } break } } } ,this.CSQ = 'at+CSQ' ,this.CSQh = function(atdata) { var d if (this.CSQ == atdata[0]) {// sync: '+CSQ: 20,99' d = atdata[1] gsmtel_runs = this._atok clearTimeout(ta._cmdTimeoutH) ta._cmdTimeoutH = null } else for(var i in atdata) { if (/RSSI:/.test(atdata[i])) {// async: '^RSSI:25' d = atdata[i] break } } if (d) ta.curm.sigq = d.replace(/[^:]*:([^,]+).*/,'$1') +'/31' } ,this._in_ussd = null ,this.CUSDht = function(atdata) {// ussd multiline tail async if(ta._in_ussd) for(var i in atdata) { ta._atdata.push(atdata[i])// push multiline data //full reply or ussd error responses if (/",[^,]*$/.test(atdata[i]) || /^[+]CUSD: .$/.test(atdata[i])) {// async RE: str start _gsm('USSD tail: ' + atdata[i] + ' ta._in_ussd: ' + ta._in_ussd) if(ta._appcb) { ta._appcb(ta._atdata.join('<br/>')) ta._appcb = null ta._atdata.splice(0) } if('cancel' == ta._in_ussd) { gsmtel.write('\u001b')// bad useing global var, but gsmtel.write('AT+CUSD=2')// don't care of result } ta._in_ussd = null ta.do_ussd_timeout() return }// read all multiline ussd reply } } ,this.CUSDh = function(atdata) {// ussd head async /* 0 no further user action required (network initiated USSD-Notify, or no further information needed after mobile initiated operation) 1 further user action required (network initiated USSD-Request, or further information needed after mobile initiated operation) 2 USSD terminated by network 3 other local client has responded 4 operation not supported 5 network time out */ //??? не понимаю почему здесь не сработал `this`??? нужна привязка к глобальному `ta` // так как я не знаю контекста этого `this`, лучше использовать глобальные переменные и не мучиться for(var i in atdata) { // async: '+CUSD: 0,"Vash balans sostavlyaet minus 3511 rublej... if (/^[+]CUSD: [012345]/.test(atdata[i])) { _gsm('USSD head: ' + atdata[i]) if (/^[+]CUSD: 0/.test(atdata[i])) { ta._in_ussd = 't' // cancel USSD continuation (portals spam, errors etc.) } else ta._in_ussd = 'cancel' ta._end_ch = /\n$/ break } } } ,this.__athandle = function(ta_lines_arr, samode) { /* when modem's `echo` is on, then all `at` command's ME data starts from command itself this is the first sync point, tail must be ended with _atok, final sync point if first fails, then something happened with connection or getting of a voip module if second fails, this can be some fatal connection problems -- i knew that, see "1.7.1 Communication between Customer Application and XT55" */ _gsm('at handler mode: ' + samode + ' arr: ' +ta_lines_arr) if (/sync$/.test(samode)) { var i = 0 if (/handle-sync$/.test(samode)) while (true) { if (ta_lines_arr[i] == this._cmd) { _gsm("got head of sync cmd: " + this._cmd) gsmtel_runs = this._atok clearTimeout(ta._cmdTimeoutH) ta._cmdTimeoutH = null break } if(++i >= ta_lines_arr.length) return 'AT-sync' } // looking 4 head while (true) { if (ta_lines_arr[i].match(this._sync_ok)) { _gsm("got tail sync cmd: " + this._cmd) _gsm("atdata: " + this._atdata.join('<br/>')) if(this._atdata_handler) /* data inside atcmd - ok block */ this._atdata_handler(this._atdata) this._atdata.splice(0) //AT handles async the same way as this._handle = this.__handle return this._yes_next } else this._atdata.push(ta_lines_arr[i]) if(/ERROR/.test(ta_lines_arr[i])){ _err('AT err cmd: ' + this._cmd + ', msg: ' + ta_lines_arr[i]) break } if(++i >= ta_lines_arr.length){ ta.releaseH(ta_lines_arr) return 'AT-sync'// sync -- no other command setup, but skip async spam } } // searching 4 tail //_err("gsmtel __athandle(): !!! MUST NOT BE HERE1 !!!" + this._cmd) return this._yes_next } else { // the same way as this._handle = this.__handle for (var i in this._async_handlers) { this._async_handlers[i](ta_lines_arr) } ta.releaseH(ta_lines_arr) return 'yes-next-AT-asyn' } } ,this.__handle = function(ta_lines_arr, samode) { /* simple sync and async handler sync commands are done, when any line from ta match RE(@this.sync_ok) async handlers are called otherwise */ _gsm('handler ME mode: ' + samode + '\nthis._sync_ok:' + this._sync_ok + '\nthis._cmd:' + this._cmd) if (/sync$/.test(samode) && this._sync_ok) { var sync = this._sync_ok for (var i in ta_lines_arr) { _gsm('ta_lines_arr[i]: ' + ta_lines_arr[i]) if (ta_lines_arr[i].match(sync)) { _gsm("handled sync cmd: " + this._cmd) /*if(ta._appcb) {// universal handler does such call back process.nextTick(ta._appcb) ta._appcb = null }*/ return this._yes_next } } // no match, and since this is sync cmd, then error // _err() must return either next cmd or do something to actually get cmd done // clear sync flag to deal with possible async garbage between successive commands ta.releaseH(ta_lines_arr) return this._err(ta_lines_arr ? ta_lines_arr.join('') : 'no-event-data') } else { //there can be any async garbage between successive commands for (var i in this._async_handlers) { this._async_handlers[i](ta_lines_arr) } ta.releaseH(ta_lines_arr) return 'yes-next-asyn' } } ,this.qcmds = function (append_this_cmds, modid) { /*if (!(cmd_queue instanceof Array)) { _err('gsmtel queue_cmds(cmds, queue): @queue must be an array') return }*/ var mcmdq if (modid) for (var i in ta.modules) { if(ta.modules[i].modid == modid){ mcmdq = ta.modules[i].cmdq break } } if (!mcmdq) { mcmdq = ta.modules[ta.defmod - 1].cmdq modid = ta.modules[ta.defmod - 1].modid } if (append_this_cmds instanceof Array) { if (append_this_cmds.length <= 0) return mcmdq.push(ta._cmd_get) mcmdq.push(ta.atsetup) for (var i in append_this_cmds) { if (append_this_cmds[i]) { if ('string' === typeof append_this_cmds[i]) { mcmdq.push(append_this_cmds[i]) } else { _err('qcmds(arg): @arg['+i+'] is null, must be string') } } } //creating common release timeout, like it was: if(!/CUSD/.test(append_this_cmds)){ mcmdq.push(ta._cmd_release) } else { _err("append_this_cmds: " + append_this_cmds) if ('string' === typeof append_this_cmds) { if (append_this_cmds.length > 0) { mcmdq.push(ta._cmd_get) mcmdq.push(ta.atsetup) mcmdq.push(append_this_cmds) mcmdq.push(ta._cmd_release) } } else { _err('qcmds(arg): @arg is not string or array!') } } _gsm('mcmdq in "'+modid+'": '+JSON.stringify(mcmdq)) }// qcmds }// ME.GSM //NOTE: function() objects aren't simple {}, ME['GSM']._dscr is undefined via fun() {this._dscr} // RegExp(undefined) matches everything /*if (!/^GSM$/.test(i)) */ ME.GSM._dscr = "GSM modem via Telnet interface" ME.E220 = { _dscr: "HUAWEI_E220" } ME.MV37X = { _dscr: "MV-374" ,logout: function(){ ta.write('logout'+ta._cmdle) } ,login: function(sock, e) { const pass_sync = /word:.*$/ _gsm("MV37X login! : " + ta._sync_ok + ' ta._cmd: ' + ta._cmd) if('login' !== ta._cmd) { // init once ta._cmd = 'login' //on telnet connect /^user/name and password is asked interactively (EOL != \n) ta._sync_ok = /^user/ ta._end_ch = / $/ // space ta._err = this.__errfa } ta._handle = function(arg) { var r = ta.__handle(arg, 'sync') _gsm("MV37X login handle r: " + r) if(/^yes/.test(r)) { if('/^user/' == ta._sync_ok) { ta._sync_ok = pass_sync _gsm("MV37X sock write: voip") sock.write('voip'+ta._cmdle) } else if (pass_sync == ta._sync_ok){ sock.write('1234'+ta._cmdle) _gsm("MV37X sock write: 1234") ta._sync_ok = /\]$/ ta._end_ch = ta._sync_ok ta._handle = ta.__handle // all chain handeled, goto next command ta._err = ta.__nop // eat stuff in std handler return ta._hsync // set next (std) handler's arg } else { /* collect or handle mid data here */ } } /* returns nothing, 'cos this handler doesn't care about @arg */ }//fun return ta._hsync } ,get: function(sock) { _gsm('MV37X get cmd param write: `' + ta.curm.modid + '`') if(ta._cmd_releaseTimeout > 0){ _gsm('MV37X release is pending') return } sock.write(ta.curm.modid + ta._cmdle) ta._cmd = ta.curm.modid gsmtel_runs = ta._cmd //MV37X on get writes 'got!! press ctrl+x to release module X.' ta._sync_ok = /^got/ ta._end_ch = /[\]\n]$/ ta._handle = function(ta_lines_arr){ var i = 0 _gsm('`get` handle data') do { if(/bad command/.test(ta_lines_arr[i])){ _err("telnet err bad cmd") ta.curm.cmdq.splice(0) return ta._yes_next// empty cmdq will schedule modules } while(gsmtel_runs == ta._cmd){ if (ta_lines_arr[i] == gsmtel_runs){ _gsm('`get` head: ' + ta.curm.modid) gsmtel_runs = ta._end_ch clearTimeout(ta._cmdTimeoutH) ta._cmdTimeoutH = setTimeout(function(){ ta._cmdTimeoutH = null if(gsmtel_runs == ta._sync_ok){ //release current, schedule modules ta.curm.cmdq.splice(0) process.nextTick(_do_TELNET2MODULES_cmd_loop) _gsm('timeout cannot get: ' + ta.curm.modid) } }, ta._timeoutGet) } break } while(gsmtel_runs == ta._end_ch){ if (ta_lines_arr[i].match(ta._sync_ok)){ _gsm("got sync ok telnet cmd: " + ta._cmd) clearTimeout(ta._cmdTimeoutH) gsmtel_runs = ta._sync_ok return ta._yes_next } break } } while (ta_lines_arr[++i]) } ta._cmdTimeoutH = setTimeout(function(){ if(gsmtel_runs == ta._cmd){ //release current, schedule modules ta.curm.cmdq.splice(0) process.nextTick(_do_TELNET2MODULES_cmd_loop) _err('timeout, clear cmdq, cannot get: ' + ta.curm.modid) } ta._cmdTimeoutH = null }, ta._timeoutGet) return ta._hsync// wait this cmd } ,releaseH: function(ta_lines_arr){// sync or async handler //if(gsmtel_runs == ta._cmd_release) for(var i in ta_lines_arr){ if (/^release/.test(ta_lines_arr[i])) { _gsm('releaseH: ' + ta.curm.modid) gsmtel_runs = ta._cmd_release //ta.curm.cmdq.splice(0) } if(ta._end_ch.test(ta_lines_arr[i]) && gsmtel_runs == ta._cmd_release) process.nextTick(_do_TELNET2MODULES_cmd_loop) } } ,do_release: function(){ if(!ta || !gsmtel_runs) return// this fully async event may get such case gsmtel.write('\u0018') _gsm("MV37X release. send CTRL+X CAN 0x18, after: " + ta._cmd_releaseTimeout) ta._sync_ok = /^release/ // switch std sync handler to MV37X's telnet cmds ta._end_ch = /\]$/ // restore telnet from ATs ta._err = ta.__nop ta._handle = ta.releaseH if(ta._appcb) ta._appcb = null ta._cmd_releaseTimeout = 0// allow schedule modules //process.nextTick(_do_TELNET2MODULES_cmd_loop) } } var modring = 0 function _do_TELNET2MODULES_cmd_loop() { /* Modules manager On Telnet connect MODEL setup is done. Current module is set to default one or first otherwise. In its cmd queue's head `login` command is inserted and do_TE2ME handler is called. It events until cmdq is empty, thus nextTicking this manager. */ if(!gsmtel_runs || !ta) { _gsm('telnet2modules: NOLINK') return } if(0 == modring){// first run if(ta.modules.length <= 0) { _err('app err: ta.modules[] is empty') return } modring = ta.defmod } if(ta._cmd_releaseTimeout > 0){// `release` is pending, reschedule process.nextTick(_do_TELNET2MODULES_cmd_loop) return } ta.modqlenTotal = 0 for (var i in ta.modules) ta.modqlenTotal += ta.modules[i].cmdq.length _gsm('sch: ta.modqlenTotal: ' + ta.modqlenTotal) if(ta.modqlenTotal <= 0) return// nothing to do, wait app commands _gsm('sch: modring: ' + modring + " cmdq: "+ ta.modules[modring - 1].cmdq) var cm = modring while (ta.modules[modring - 1].cmdq.length <= 0){ if(++modring > ta.modules.length) modring = 1 _gsm('sch: modring2: ' + modring) /*if (cm == modring){ return //ring is over, but there are total commands }*/ } _gsm('sch: selected modring = ' + modring) ta.curm = ta.modules[modring - 1] _gsm('sch: selecting "' + ta.curm.modid + '"') // give currently selected module into evented data handling TE_ME_mode = ta._yes_next process.nextTick(_do_TE2ME_cmd_loop) } function _do_TE2ME_cmd_loop(ta_lines_arr) { /* Main command and data handling @ta_lines_arr: if defined, then data event has been accured, and there are some data lines to sent to sync or async handlers @ta_lines_arr: undefined, set up the first command from the @ta.curm.cmdq queue @gsmtel_runs: if null, then nothing happens (@ta.curm.cmdq can be cleared, because link is down and new set up chain of command will be needed and queued on connect) */ if(!gsmtel_runs) { //TODO: check if user closes all manually `connect` && `disconnect` commands //ta.curm.cmdq.splice(0) // clear cmds // last cmd in queue must receive error // not last, but currently set up handler must get show stop event _gsm('telnet: NOLINK') return } var next_cmd _gsm('do loop, TE_ME_mode: ' + TE_ME_mode) if (ta_lines_arr) { _gsm('cmd handle: ' + (ta_lines_arr.join('|'))) next_cmd = ta._handle(ta_lines_arr, TE_ME_mode) if(!next_cmd) {// handler without yes_next, wait for more data _gsm('no next more data') return } } else next_cmd = TE_ME_mode// first setup _gsm('handler ret || cmd to setup: ' + next_cmd) while (RegExp(ta._yes_next).test(next_cmd)) { var c = ta.curm.cmdq[0] _gsm('cmd to setup: ' + c) if (!c) { ta._cmd = TE_ME_mode = ta._yes_next +" end of module's cmd queue" //schedule modules process.nextTick(_do_TELNET2MODULES_cmd_loop) return //end cmd queue } else if(/^at/i.test(c)) { //AT: specially handeled subset of commands next_cmd = ta.at(gsmtel, c) } else if(ta.hasOwnProperty(c)) { next_cmd = ta[c](gsmtel) } else { _gsm('direct write of:' + c) gsmtel.write(c) //loop next cmd } ta.curm.cmdq.shift() //loop next cmd } TE_ME_mode = next_cmd // sets up new mode in handlers } var gsmtel_addr = { //GSM_TELNET="localhost:8023" port: process.env.GSM_TELNET.replace(/[^:]*:/,'') ,fqdn: process.env.GSM_TELNET.replace(/:[^:]*/,'') } ,gsmtel function gsmtel_init() { modring = 0 gsmtel_runs = null TE_ME_mode = 'login-mode' app.gsm = 'connecting....' //reloads modules store in extjs } function gsmtel_configure() { var model = process.env.GSM_MODEL, i, j //v0:,"_atok": /^0$/ //NOTE: JSON.parse() doen't do REs, so it must be implemented in conf load ta = new ME.GSM if (/^{/.test(model)) { /*GSM_MODEL='{ "name": "MV001: MV-374 / MV-378 VoIP GSM Gateway" ,"module1": { "own":"+375298714075", "numre": "+37529[2578] +37533" } ,"module2": { "own":"set me in cfg", "numre": "+37529[136] +37544" } ,"default": 1 ,"_other_cfg": "be-be" }'*/ try { var cfg = JSON.parse(model) for(i in ME) { if(RegExp(ME[i]._dscr).test(cfg.name)) { var m = ME[i] for(j in m) { ta[j] = m[j] } // add own interface stuff to default break } } ta._dscr = cfg.name ta.modules.splice(0)// remove default j = 0 for(i in cfg){ if(!/(^default)|(^name)|(^_)/.test(i)) { var m if(cfg.default == ++j) ta.defmod = j //default module number in (array + 1) m = {} // new module object m.modid = i m.op = '??' // stats m.sigq = 0 m.ownum = cfg[i].own m.re = [] cfg[i].numre.replace(/[+]/g, '^[+]').split(' ').forEach( function(re_str){ if(re_str) m.re.push(RegExp(re_str)) } ) m.cmdq = [] ta.modules.push(m) } else if(/^_/.test(i)) ta[i] = cfg[i] // possible other cfg stuff } if(!j) { _err('model JSON config err: no modules found') } else if(!ta.defmod) { _gsm('model module selection: "default" module number is out of range or is not defined, setting "module1"') ta.defmod = 1 } if(ta._atsetup) ta.atsetup = ta._atsetup j = ta.initcmds() for (i in ta.modules) ta.qcmds(j ,ta.modules[i].modid) } catch(e) { _err('model JSON config err: ' + e + e.stack) _gsm('JSON cfg err, using default module config') ta = new ME.GSM } } else { //simple GSM_MODEL='HUAWEI_E220 HSDPA USB modem' for(var i in ME) { if(RegExp(ME[i]._dscr).test(model)) { var m = ME[i] for(var j in m) { ta[j] = m[j] }// add own stuff to default break } } ta._dscr = model ta.qcmds(ta.initcmds()) } } function gsmtel_ok(){ if(!gsmtel_runs){ _gsm('gsmtel_runs is null, wait and reconnect (4 sec) ...') gsmtel_runs = 'reconnect' _log('@[' + _date() + '] gsm telnet: reconnecting...') /* NOTE: gsmtel's socket timout must be less than this one or `node` will say: (node) warning: possible EventEmitter memory leak detected. 11 listeners added. Use emitter.setMaxListeners() to increase limit. */ setTimeout(gsmtel_ok, 4096) } else if ('reconnect' == gsmtel_runs) {// no connection, try later gsmtel_runs = null gsmtel.connect(gsmtel_addr.port, gsmtel_addr.fqdn /*, no need in another callback*/) } } function gsmtel_setup(){ gsmtel = net.connect(gsmtel_addr.port, gsmtel_addr.fqdn, gsmtel_ok) gsmtel.setTimeout(1024)//see NOTE in gsmtel_ok() above gsmtel_configure()// initcmds are in cmdq tain = ta._dscr.match(/^([^ :,.;]+)/)[1] _gsm('db collection prefix: ' + tain) taout = db.collection(tain+'_taout') tain = db.collection(tain+'_tain' ) tain.stats(function(e, stats){ if(stats.count <= 0){// if db income is empty, fill it from SIM for (var i in ta.modules) ta.qcmds(ta.sms_mem_setup ,ta.modules[i].modid) db_runs = 'init' } }) gsmtel.on('connect', function(){ gsmtel.setEncoding('ascii') //_gsm('ta: ' + inspect(ta)) gsmtel_runs = '@[' + _date() + '] gsm telnet: connected to ' + gsmtel_addr.fqdn + ':' + gsmtel_addr.port _log(gsmtel_runs) /*`login` runs current module's cmd queue, empty cmdq schedules modules by calling _do_TELNET2MODULES_cmd_loop() otherwise this timeout will reboot gsmtel: */ setTimeout(function(){ if(gsmtel_runs && ta && 'login' == ta._cmd) { _err('\n'+ '==== FATAL ERROR: ====\n'+ 'Telnet login fails. Maybe module config is wrong:\n"'+ process.env.GSM_MODEL+'"\n'+ '====') gsmtel.end() } }, ta._timeoutLogin) if(!ta.curm)// setup of current module ta.curm = ta.modules[ta.defmod - 1] ta.curm.cmdq.unshift('login')// first module runs `login` TE_ME_mode = ta._yes_next /* NOTE: this must be run as soon as possible to habdle any login prompts */ process.nextTick(_do_TE2ME_cmd_loop) }) // set up event handlers once gsmtel.on('data', function(chBuffer){ var lines = get_input_lines(chBuffer.toString()) _gsm('gsmtel `data` event lines:' + (lines ? lines.join('|'): 'null')) if (null == lines) return _do_TE2ME_cmd_loop(lines) }) gsmtel.on('end', function(){ _gsm('gsmtel `end` event') // other end closed connection FIN packet gsmtel_init() //TODO: if !user setTimeout(gsmtel_ok, 4096) }) gsmtel.on('error', function(e){ //NOTE: net error handler must not be inside init callback!!! if (e) { _err('gsm telnet {addr:'+process.env.GSM_TELNET+'} err : ' + e) gsmtel_init() setTimeout(gsmtel_ok, 4096) return } }) } /*====---- APP: http web part ----====*/ var express = require('express') ,app = express() ,app_srv app.configure(function(){ // app.set('views', __dirname + '/views') // app.set('view engine', 'jade') app.use(express.bodyParser()) //parse JSON into objects app.use(express.methodOverride()) app.use(app.router) //app.use('/extjs', express.static(__dirname + '/../../extjs-4.1.0-rc2')) //app.use('/extjs/examples/ux', express.static(__dirname + '/../../extjs-4.1.0-rc2/examples/ux')) //app.use('/extjs/ux', express.static(__dirname + '/../../extjs-4.1.0-rc2/examples/ux')) app.use('/extjs', express.static(__dirname + '/../../extjs-4.1')) app.use('/extjs/examples/ux', express.static(__dirname + '/../../extjs-4.1/ux')) app.use(express.static(__dirname + '/../_ui-web')) app.use(function errorHandler(err, req, res, next){ if (err.status) res.statusCode = err.status; if (res.statusCode < 400) res.statusCode = 500; res.writeHead(res.statusCode, { 'Content-Type': 'text/plain' }); res.end(err.stack); }) }) /* app.configure('development', function(){ }) app.configure('production', function () { })*/ // Routes app.get('/', function (req, res) { res.redirect('/telsms.htm') } ) app._htmlf = function(m, re) { return String(m).replace(/\n/g, '<br/>') } app_gsm = function(logmsg, atcmds_arr, cb, module) { if (logmsg) _gsm(logmsg) if(!gsmtel_runs) { return { success: !true ,msg: 'telnet: NOLINK' } } else if ('reconnect' == gsmtel_runs) { return { success: !true ,msg: 'telnet: reconnecting...' } } else if (!ta) { return { success: !true ,msg: 'ME is undefined. Unexpected.' } } ta._appcb = cb ta.qcmds(atcmds_arr, module) process.nextTick(_do_TELNET2MODULES_cmd_loop) // post-event queuing is preferable here } /* Первый SMS через форму: aT+cmgs="+375298022483" > test sms ^BOOT:10389262,0,0,0,6 +CMGS: 239 OK ExtJS SMS form load: smsNumber: +375298077782 smsBody(str or array): text smsModule: module1 "+375297XXYY677" */ function mk_sms_body(smsText) { // based on to_ascii() from uglify-js by Mihai Bazon const smsA = 140, smsU = smsA / 2 var a = true, aa = true, ws, c = 0, tc = 0, s = [] smsText.replace(/[\s\S]/g, function(ch) { var co = ch.charCodeAt(0) a = a && (128 > co) ++tc ++c if (/[ .,\n\r:;!?]/.test(ch)) ws = c if(a) { if(smsA == c ) { s.push({ascii:a, count:c}) ; tc -= c ; c = 0 } if(aa) aa = !true } else { if (c > smsU && c <= smsA && !aa) { if (ws) { s.push({ascii:true, count:ws}) ; tc -= ws ; c -= ws ; ws = 0 } else { s.push({ascii:true, count:c - 1}) ; tc -= c - 1 ; c -= 1 } aa = true a = true } if(smsU == c) { /*if (ws) { s.push({ascii:a, count:ws}) ; tc -= ws ; c -= ws ; ws = 0 } else { s.push({ascii:a, count:c}) ; tc -= c ; c = 0 }*/ s.push({ascii:a, count:c}) ; tc -= c ; c = 0 if(aa) aa = !true a = true } } }) if(tc) { s.push({ascii:a, count:tc}) } return s } function UCS2(text){// based on to_ascii() from uglify-js by Mihai Bazon return text.replace(/[\s\S]/g ,function(ch){ ch = ch.charCodeAt(0) return (128 > ch ? "00" : "0") + ch.toString(16) }).toUpperCase() } function unUCS2(hext){ return hext.replace(/..../g ,function(ch){ return String.fromCharCode(parseInt(ch ,16)) }) } app_sms = function(smsnum, smsbody, cb, module) { if(!gsmtel_runs) { return { success: !true ,msg: 'gsm: NOLINK'} } else if ('reconnect' == gsmtel_runs) { return { success: !true ,msg: 'gsm: reconnecting...'} } else if (!ta) { return { success: !true ,msg: 'ME is undefined. Unexpected.'} } var i ,j ,k ,m //normalize numbers: +375 29 8077782 +375 (29) 8077782 (29) 80-777-82 +37529234234 ,smsnums = smsnum.replace(/ +/g,' ') .replace(/ *[(]/g,' +375') .replace(/ *[+]375 *([+]375)/g,' $1') .replace(/(-)|([)] *)/g,'').replace(/ +(\d\d) +/g,'$1') .split(' ') //ascii and ucs2 body parts, ta._sms_smp ,smsbods = mk_sms_body(smsbody) _gsm("sms 2 " + smsnums) _gsm('smsbods: ' + inspect(smsbods)) ta._smst = ta._timeoutSendSMS for(i in smsnums) { if(!smsnums[i]) _err((1+parseInt(i)) + 'й номер пуст.') else { k = 0 for(j in smsbods) { m = { num: smsnums[i] ,dateQ: new Date() ,dateS: null ,mid: null ,module: null } if(smsbods[j].ascii) { m.atcmd = ta._sms_smp_asci + ';+CMGS="'+smsnums[i]+'"' m.m = smsbody.substr(k, k + smsbods[j].count) } else { m.atcmd = ta._sms_smp_ucs2 + ';+CMGS="'+smsnums[i]+'"' m.m = UCS2(smsbody.substr(k, k + smsbods[j].count)) } ta.sms2send.push(m) k += smsbods[j].count ta._smst += ta._smst// sms times timeout } } } if(ta.sms2send.length > 0){ _gsm('sms2send: ' + inspect(ta.sms2send)) ta.qcmds([ ta._sms_setup, 'sms' ], module) process.nextTick(_do_TELNET2MODULES_cmd_loop) return { success: true ,msg: 'SMS `AT` executed'} } return { success: !true ,msg: 'no SMS 2 send'} } //TODO: app.post('/qsms.json', function (req, res) { app.post('/sms.json', function (req, res) { var ret if (!req.body.smsNumber) ret = { success: !true ,msg: "form's smsNumber is null. Unexpected." };else ret = app_sms(req.body.smsNumber ,req.body.smsBody ,function(msg) { res.json({ success: true ,msg: msg }) } ,req.body.smsModule.replace(/ .*$/, '')) if (ret) res.json(ret)// error or other info which ends res here } ) app.post('/ussd.json', function (req, res) { //ExtJS USSD form load var ret if (!req.body.ussdNumber) ret = { success: !true ,msg: "form's ussdNumber is null. Unexpected." };else ret = app_gsm('ussd: ' + req.body.ussdNumber ,['AT+CUSD=1,"'+req.body.ussdNumber+'",15'] //ExtJS ussd form reply format: { "success": true, "msg": "A command was done" } //http error reply is plain text (hacked connect's errorhandler middleware) ,function(msg) { msg = msg.replace(/(^[^"]+")|("[^"]+$)/g,'') tain.save({ m: msg ,num: req.body.ussdNumber ,d: new Date() } ,function(e){ if(e) _err('db ussd save err: ' + e) }) res.json({ success: true ,msg: msg }) } ,req.body.module.replace(/ .*$/, '') ) if (ret) res.json(ret) // error or other info which ends res here } ) app.get('/gsmemr.json', function (req, res) { //ExtJS table load: USSD and SMS from DB: start=80&limit=20 db_runs = '' for (var i in ta.modules){ ta.qcmds(ta.sms_mem_setup ,ta.modules[i].modid) db_runs += '>' } db_runs += 'E' process.nextTick(_do_TELNET2MODULES_cmd_loop) res.json({ success: true }) } ) app.get('/tain.json', function (req, res) { //ExtJS table load: USSD and SMS from DB: start=80&limit=20 var r = {} tain.find().sort({$natural: -1}) .skip(parseInt(req.query.start)) .limit(parseInt(req.query.limit), function(e, recin) { r.data = recin tain.stats(function(e, stats){ r.total = stats.count res.json(r) }) }) } ) app.get('/taout.json', function (req, res) { //ExtJS table load: sent SMS var r = {} taout.find().sort({$natural: -1}) .skip(parseInt(req.query.start)) .limit(parseInt(req.query.limit), function(e, recout) { r.data = recout taout.stats(function(e, stats){ r.total = stats.count res.json(r) }) }) } ) app.get('/swhw_stat.json', function (req, res) { //ExtJS will load this once in a while into Ext Store for dataview var i, logs = [], gsms = [], errs = [] if (srv_log.length > 0) { for (i in srv_log) { logs.push(app._htmlf(srv_log[i])) } srv_log.splice(0) } if (gsm_inf.length > 0) { for (i in gsm_inf) { gsms.push(app._htmlf(gsm_inf[i])) } gsm_inf.splice(0) } if (err_log.length > 0) { for (i in err_log) { errs.push(app._htmlf(err_log[i])) } err_log.splice(0) } modules = [] if (ta) for (i in ta.modules){ modules.push({op: ta.modules[i].op, sigq: ta.modules[i].sigq }) } res.json({ stats: [ { os: app.os ,server: app.server ,db: app.db ,uptime: Math.ceil(process.uptime()) ,gsm: app.gsm } ] ,modules: modules ,logs: logs, gsms: gsms, errs: errs } ) if(app.gsm) app.gsm = null } ) app.get('/mods.json', function (req, res) { // serv static store of configured modules var m if (ta) { m = [] for (var i in ta.modules) { m.push({d: ta.modules[i].modid+ (ta.modules[i].ownum ? ' "'+ta.modules[i].ownum+'"':'')}) } } else m = [{d:'нет связи с движком GSM'}] res.json(m) } ) /* \ / Error handling for web app, http control channel. \/ All errors are fatal except -- EAGAIN && EINTR while reading something. /\ Latter must be handled by nodeJS. Thus we exit(1) if there is any. \/ /\ External watchdog or user must take care about running this "forever". / \ */ var ctl = http.createServer(function(req, res){ var status = 200, len = 0, body = null if ('/cmd_exit' == req.url){ process.nextTick(function(){ process.exit(0) }) } else if ('/sts_running' == req.url) { } else if ('/cmd_stat' == req.url) { if ('GET' == req.method) { body = Math.ceil(process.uptime()).toString() len = body.length } } res.writeHead(status, { 'Content-Length': len, 'Content-Type': 'text/plain' }) res.end(body) }) app_srv = http.Server(app) app_srv.on('error', function(e){ if (/EADDR.*/.test(e.code)){ _err("web app can't listen host:port='*':" + process.env.JSAPPCTLPORT + "\n" + e + "\npossible 'app.conf' 'JSAPPCTLPORT' port collision or bad IP address") } else { _err("web app: " + e) //state_machine_append(err) } if (!app_runs) process.exit(1) } ) ctl.on('error', function(e){ //NOTE: net error handler must not be inside init callback!!! if (EADDRINUSE == e.code) { //'EADDRINUSE' 'EADDRNOTAVAIL' _err("controlling channel can't listen host:port='127.0.0.1':" + process.env.JSAPPCTLPORT + "\n" + e + "\npossible 'app.conf' 'JSAPPCTLPORT' port collision") } else { _err("controlling channel: " + e) } if (!ctl_runs) process.exit(1) } ) app_srv.on('listening', function(){ app_runs = _date() } ) ctl.on('listening', function(){ ctl_runs = _date() } ) app_srv.on('close', function(){ app_runs = null } ) ctl.on('close', function(){ ctl_runs = null } ) process.on('exit' ,function(){ if(gsmtel) try { if(ta && ta.logout) ta.logout() gsmtel.end() } catch(e) {} log('telsms nodeJS exit.') }) /*====---- APP: memory = data base ----====*/ function db_read_gsm_mem(arr){ /* == SMS-STATUS-REPORT ==: 15: ": 21,"REC READ",6,233,"+375298022483",145,"12/05/15,03:16:34+12","12/05/15,03:16:39+12",0" smsas RE: ^^^^^^^^+++^ das RE: ^++++++++++++++++++++^^^++++++++++++++++++++^ == SMS-DELIVER ==: 14: ": 20,"REC READ","+375297253059",,"10/04/21,15:11:51+12"\n003700390033003100350031003904210430…." smsd RE: ^^^^^^^+++++++++++++^ ^++++++++++++++++++++^^ = dad */ var smss = /READ",6,([^,]+),/ ,smsd = /READ","([^"]+)"/ ,gsmd = /(\d\d)[/](\d\d)[/](\d\d),(\d\d):(\d\d):(\d\d)/ ,das = /"([^"]+)","([^"]+)"/ ,dad = /"([^"]+)"\n([\s\S]+)\n$/ ,csms5 = /^050003/ ,csms6 = /^060804/ ,reports = [] ,ucs2body ,m ,r ,i ,d arr.shift()// remove empty first `split` _gsm('sms arr len: ' + arr.length) for(i in arr){// record header can be removed in UI r = { m: arr[i] }// default if(m = r.m.match(smsd)){ r.num = m[1] if(m = r.m.match(dad)){ d = m[1].match(gsmd) r.d = new Date('20'+d[1],parseInt(d[2])-1,d[3],d[4],d[5],d[6]) d = m[2] if(csms5.test(d)){// multipart concatenated SMS d = 12 // skip header, decode body } else if(csms6.test(d)){ d = 14 } else d = 0 d = m[2].substr(d) ucs2body = RegExp('^0[04][0-9A-F]{'+(d.length-2)+'}') if(ucs2body.test(d)){// match UCS2 at whole string length r.m = unUCS2(d) } else r.m = d } } else if(m = r.m.match(smss)){ r.mid = parseInt(m[1] ,10) if(m = r.m.match(das)){ d = m[1].match(gsmd)// poslan Date() r.p = new Date('20'+d[1],parseInt(d[2])-1,d[3],d[4],d[5],d[6]) d = m[2].match(gsmd)// dostavlen Date() r.d = new Date('20'+d[1],parseInt(d[2])-1,d[3],d[4],d[5],d[6]) //if (status){// sms status reports and check of mid //no deletes reports.push(r) //} } } if(/^>/.test(db_runs)) if (!/,"REC UNREAD",/.test(r.m)) continue tain.save(r ,function(e){ if(e) _err('db GSM save err: ' + e) }) } tain.ensureIndex({mid: 1} ,{sparse: true}) db_runs = db_runs.replace(/>/,'')// for every module if(reports.length > 0){ for(i in reports){ // arg is send by value, not by ref, thus update is safe in loop fix_mid_date_update(reports[i]) } } } function fix_mid_date_update(d){ var dl ,dh = (15*60*1000)// 15 minutes // local send date +- possible delta with GSM time, can be in config dl = new Date(d.p.getTime() - dh) dh = new Date(d.p.getTime() + dh) taout.find({mid: d.mid, dateS: {$gt: dl ,$lt: dh }} ,function(e ,r){ if(e) { _err('db.taout mid err: ' + e) return } if (r.length == 1){ _log('4updater found: ' + inspect(r)) //real i.e. GSM send and receive time taout.update({ _id: r[0]._id }, { $set: { mid: -d.mid, dateS: d.p, dateR: d.d } }) } }) } // init try { // third party modules better to try{} db_runs = false var mongo = require('mongojs'), db ,tain ,taout } catch (e) { cerr("[error] mongojs init: " + e) process.exit(1) } function db_run_check(){ if (!process.env.MONGODS) { _log("db: `process.env.MONGODS` is null, no db set") return } // mongodb-native or mongojs needs to redo connection on error db = mongo.connect(process.env.MONGODS + '/test') db.admin( function(aerr, a) { if(aerr){ _err("db.admin(): " + aerr) setTimeout(db_run_check ,4096) return } a.command({buildInfo: 1}, function(e ,d) { if(e){ setTimeout(db_run_check ,4096) _err('db.admin.command():' + e) return } app.db = "mongodb v" + d.documents[0]['version'] _log("telsms DB server: " + app.db + "@" + process.env.MONGODS + "\n") db_runs = _date() app_srv.listen(process.env.JSAPPJOBPORT, function(){ _log( "telsms Express server is listening on port " + process.env.JSAPPJOBPORT + " in " + app.settings.env + " mode\n"+ "controlling channel is http://127.0.0.1:" + process.env.JSAPPCTLPORT + "\n") app.os = process.platform + '@' + process.arch app.server = 'nodeJS v' + process.versions['node'] //setting up link with gsm app.gsm = 'connecting....' gsmtel_setup() } ) } ) }//cb admin ) }// once per app run, make init of global its parts ctl.listen(process.env.JSAPPCTLPORT, '127.0.0.1', db_run_check) })( require ,process ,console.log ,console.error ,eval ,setTimeout ,clearTimeout ,RegExp ,Math ,String) //olecom: telsms.js ends here
enjsms/_app/telsms.js
/*====---- APP: self process management ----====*/ (function(require ,process ,log ,cerr ,eval ,setTimeout ,clearTimeout ,RegExp ,Math ,String) { var http = require('http'), net = require('net'), inspect = require('util').inspect ,ctl_runs = null, app_runs = null, db_runs = null ,err_log = [], gsm_inf = [], srv_log = [ 'Log start @[' + _date() + ']'] function _chklen(logs) { //prevent memory hug, when web client is closed, thus doesn't read and clears log arrays //full logs are on the file system anyway if (logs.length > 177) logs = logs.slice(87) } function _gsm(msg) { log (msg) ; _chklen(gsm_inf) ; gsm_inf.push(msg) } function _log(msg) { log (msg) ; _chklen(srv_log) ; srv_log.push(msg) } function _err(msg) { cerr(msg) ; _chklen(err_log) ; err_log.push(msg) } function _date(){ //ISODateString function pad(n){return n<10 ? '0'+n : n} var d = new Date() return d.getUTCFullYear()+'-' + pad(d.getUTCMonth()+1)+'-' + pad(d.getUTCDate())+'T' + pad(d.getUTCHours())+':' + pad(d.getUTCMinutes())+':' + pad(d.getUTCSeconds())+'Z' } var str2hex = function(s) { return s.replace(/[\s\S]/g ,function(ch){ return (ch < '\u0010' ? ' 0' : ' ') + ch.charCodeAt(0).toString(16) }).toUpperCase() } process.on('uncaughtException' ,function (err) { _err('fatal uncaught exception: ' + err + "\n" + err.stack) }) /*====---- APP: telnet GSM part ----====*/ /* @inbuf input buffer for full text lines from ME @gsmtel_runs HW connection flag for TE2ME cmd loop start @TE_ME_mode mid loop cmd chain sync (next cmd or give more data for same cmd) @ta current terminal adapter (GSM engine) */ var TE_ME_mode = 'login-mode' ,gsmtel_runs = null ,ta ,ME = {} ,inbuf = [] function get_input_lines(s) { //loop this fun() on data until there is full set of lines if(!ta) { _err('app error get_input_lines(): ta is null') return } _gsm('data event got:"' + s + '"') _gsm('data event got hex:"' + str2hex(s) + '"') _gsm('ta._end_ch: ' + ta._end_ch.toString() + str2hex(ta._end_ch.toString())) //join chuncks from the network and queue them in full lines inbuf.push(s) // add chunck to array /* Commands are usually followed by a response that includes "<CR><LF><response><CR><LF>". (XT55 Siemens Mobile doc) this is case of "ATV1" setup */ if (!ta._end_ch.test(s)) return // full command in chunck: join all and return to cmd handler // remove repeated, front and tail new lines s = inbuf.join('') .replace(/\r+/g,'') .replace(/(^\n+)|(\n+$)/g,'') .replace(/\n+/g,'\n') _gsm('s: "' + s.replace('\n', '|n') + '"') inbuf.splice(0) // clear return s ? s.split('\n') : null } /* GSM engines -, ME (Mobile Equipment), MS (Mobile Station), are referred | TA (Terminal Adapter), DCE (Data Communication Equipment) to as`: or facsimile DCE (FAX modem, FAX board). (XT55 Siemens Mobile doc)*/ ME.GSM = function() { // general GSM interface via Telnet of Terminal.exe by <[email protected]> //== GSM command aliases: == /* 1.7.1 Communication between Customer Application and XT55 (Siemens Mobile doc) Leaving hardware flow control unconsidered the Customer Application (TE) is coupled with the XT55 (ME) via a receive and a transmit line. Since both lines are driven by independent devices collisions may (and will) happen, i.e. while the TE issues an AT command the XT55 starts sending an URC. This probably will lead to the TE’s misinterpretation of the URC being part of the AT command’s response. To avoid this conflict the following measures must be taken: = If an AT command is finished (with "OK" or "ERROR") the TE shall always wait at least 100 milliseconds before sending the next one. This gives the XT55 the opportunity to transmit pending URCs and get necessary service. Note that some AT commands may require more delay after "OK" or "ERROR" response, refer to the following command specifications for details. = The TE shall communicate with the XT55 using activated echo (ATE1), i.e. the XT55 echoes characters received from the TE. Hence, when the TE receives the echo of the first character "A" of the AT command just sent by itself it has control over both the receive and the transmit paths. This way no URC can be issued by the XT55 in between. i knew that!!! */ //modules. default set up this.modules = [ { modid:'единственный' ,ownum:null ,re:null ,cmdq: [] ,op: '??' ,sigq: 0} ] ,this.modqlenTotal = 0 ,this.defmod = 1 // counts from one ,this.curm = null // general setup to: ta.modules[ta.defmod - 1] ,this._end_ch = /\n$/ ,this._cmdle = '\r\n'// usual command's ending ,this.atsetup = 'ate1v1+CMEE=2' // _atok: /OK$/ || v0 -- _atok: /0$/ //data this.initcmds = function() { return [ this.atsetup ,this.info ,this.signal ,'at+COPS=3,0;+COPS?' ] } ,this.info = 'ati' ,this.signal = 'at+CSQ' ,this.cfgOpName = 'at+COPS=3,0' ,this.getOpName = 'at+COPS?' //== private == ,this.__err = function(e) { _err('GSM error: ' + ta._cmd + (e ? e : '')) return ta._yes_next + '-err' } ,this.__errfa = function(e) { _err('GSM error fatal: ' + e) gsmtel_runs == 'fatal error' ta.curm.cmdq.splice(0) if(ta._cmdTimeoutH) { clearTimeout(ta._cmdTimeoutH) ta._cmdTimeoutH = null } return 'reconnect-fatal-err' } ,this.__nop = function(e) {} //== const ,this._yes_next = 'yes-next' ,this._atok = /OK$/ ,this._ater = 'ERR' //== var ,this._err = function(e) {} ,this._cmd = 'no command' ,this._atdata = [] ,this._sync_ok = null // std handlers ,this._hsync = 'handle-sync' ,this._handle = function(tamode, e) {} ,this._async_handlers = [] ,this._appcb = null // ??? application's call back ,this._timeoutLogin = 1024 ,this._timeoutAtSync = 1024 ,this._timeoutUSSD = 8192 //== public == ,this.login = function(e) { _gsm("login: GSM via Terminal Telnet server") this._cmd = 'login' //serial terminal program writes first line on telnet: "Terminal remote server" this._sync_ok = /^Terminal/ this._err = this.__err this._handle = this.__handle return this._hsync } ,this.get = function(e) { _gsm("get: noop") //empty command in this gsm interface, goto next cmd in que return this._yes_next } ,this._in_releaseTimeout = null ,this._cmd_releaseTimeout = 0 ,this.releaseH = this.__nop ,this._USSDtimeoutH = null ,this.do_release = function(){ _gsm('gsm do release') ta._handle = ta.__handle ; TE_ME_mode = null// std handler && its mode process.nextTick(_do_TELNET2MODULES_cmd_loop) } ,this._cmd_get = 'get' ,this._cmd_release = 'release' ,this.release = function(e) { //`release` gives back AT control on module, // but any AT data is queued by module for next `get` //`release` does not clear modules's cmdq //TODO: only `get` or `AT` errors by timeout must clear cmdq ta._cmd = ta._cmd_release gsmtel_runs = ta._cmd if(ta._in_releaseTimeout){// pending `release` called directly clearTimeout(ta._in_releaseTimeout) ta._cmd_releaseTimeout = 0 ta._in_releaseTimeout = null } if(ta._cmd_releaseTimeout > 0){ ta._in_releaseTimeout = setTimeout(ta.do_release, ta._cmd_releaseTimeout) } else ta.do_release() //returns nothing } ,this.logout = function() {} ,this._cmdTimeoutH = null ,this._smsle = "\u001a" + this._cmdle ,this.sms_mem_setup = 'smsmem' // `ate1` is needed; it is in `atsetup` // which also is in initcmds and any cmdq push ,this._sms_mem_read = 'at+cmgf=1;+cpms?;+cmgl="ALL";+cpms?' ,this.rcvd_sms = []//+CDSI: "SM",11 async event of sms delivery and sync +CMGL ,this.smsmem = function(sock){// for curmod, read all SM, ME in db, remove all _gsm('OK we in smsmem ta.curm.cmdq: ' + ta.curm.cmdq) ta._err = ta.__err gsmtel_runs = ta._cmd = ta._sms_mem_read ta._end_ch = /\n$/ sock.write(ta._cmd+ta._cmdle) clearTimeout(ta._cmdTimeoutH) ta._cmdTimeoutH = setTimeout(ta.do_at_timeout, ta._timeoutAtSync) /*ate1+cmgf=1;+cpms?;+cmgl="ALL";+cpms? +CPMS: "SM",24,30,"SM",24,30,"SM",24,30 +CMGL: 0,"REC READ","+375297656850",,"08/12/01,10:16:24+08" 0414043E04310440043E04350020044304420440043E002100200421043E043B043D04350447043D […] +CMGL: 21,"REC READ",6,233,"+375298022483",145,"12/05/15,03:16:34+12","12/05/15,03:16:39+12",0 +CMGL: 23,"REC READ",6,252,"+375298022483",145,"12/05/24,08:29:48+12","12/05/24,08:29:54+12",0 +CMGL: 11,"REC UNREAD",6,229,"+375298022483",145,"12/05/15,03:00:37+12","12/05/15,03:00:42+12",0 +CMGL: 9,"REC READ",6,47,,,"12/05/23,04:29:55+12","12/05/23,04:30:02+12",0 //First Octet ^^^^^^^(id) +CMGL: 3,"STO UNSENT","1111111",, Hpmini5101-1499000 +CMGL: 11,"STO UNSENT","",, OK OK +CPMS: "SM",24,30,"SM",24,30,"SM",24,30 OK */ ta._handle = function(ta_lines_arr, samode){ var i ,l ,m for (i in ta._async_handlers){ ta._async_handlers[i](ta_lines_arr) }// possible async stuff ta.releaseH(ta_lines_arr) i = 0 while(l = ta_lines_arr[i++]){ if(/ERROR/.test(l)){ return ta.do_at_timeout(' error: ' + l) } if(gsmtel_runs == ta._cmd){ if(l == ta._cmd){// ate1+cmgf=1;+cpms?;+cmgl="ALL";+cpms? continue// head sync OK } m = l.match(/^[+]CPMS: "..",(\d+),/) if(m){// +CPMS: "SM",24,30,"SM",24,30,"SM",24,30 m = parseInt(m[1]) if (m > 0){ gsmtel_runs = m// recs to read _gsm('sms 2 read: ' + m) ta._sync_ok = l// cmd end sync } else { clearTimeout(ta._cmdTimeoutH) return ta._yes_next } } } else {// tail sync + read exactly m "shall be records": /^[+]CMGL: / if((ta._sync_ok == l) && (ta._yes_next == gsmtel_runs)){ clearTimeout(ta._cmdTimeoutH) db_read_mem((ta.rcvd_sms.join('\n') + '\n').split('+CMGL'), 'ALL') ta.rcvd_sms.splice(0) return ta._yes_next// obviously it will be OK or timeout } ta.rcvd_sms.push(l) if(/^[+]CMGL: /.test(l)){// count a record if (0 == (--gsmtel_runs)){ gsmtel_runs = ta._yes_next } } } } clearTimeout(ta._cmdTimeoutH)// +delta for reading step ta._cmdTimeoutH = setTimeout(ta._err, ta._timeoutAtSync) return ta._hsync// new gsmtel_runs => next step }//fun() _handle return ta._hsync// cmd is set } //mode=2 (buffer all) SMS-DELIVER=1 BM=0 SMS-STATUS-REPORT=2 are stored in memory ,this._sms_setup = 'at+cmgf=1;+cnmi=2,1,0,2,1' //echo off (echo of mgs itself is not needed) //49: (33:SMS Send + SMS-STATUS-REPORT request) + (16:time is present in relative format) //167 = 1 day = 24 hours sms validity period:> 12+(167-143)*30/60 //0 = 0 (higher protocol) //8 = UCS2, 0 = GSM codepages ,this._sms_smp_ucs2 = 'ate0+csmp=49,167,0,8' ,this._sms_smp_asci = 'ate0+csmp=49,167,0,0' ,this._timeoutSendSMS = 1024 << 2 ,this._smst = this._timeoutSendSMS ,this.sms2send = []//+CMGS: 152 at cmd got sms id ,this.do_smsTimeout = function(now){// serious error, clear cmdq, `release` module if(now && ta._cmdTimeoutH) clearTimeout(ta._cmdTimeoutH) ta._cmdTimeoutH = null _err('sms setup timout, schedule modules') if(!ta) return ta.curm.cmdq.splice(0) _gsm('sms write ESC + at') gsmtel.write('\u001b' + ta._cmdle)//ESC will prevent possible hung in wating for sms text+\u001a //gsmtel.write('\u001b' + ta._cmdle) gsmtel.write('at' + ta._cmdle)//make device live after ESC (or it stucks) ta.release()// hard release } ,this.sms = function(sock){ /* smscmds in cmdq: [ 'at+cmgf=1;+cnmi=2,1,0,2,1', 'sms', 'ate0+csmp=49,167,0,8;+CMGS="+375298022483"', '04220435043A044104420020043D04300020003700300020043A043804400438043B043B0438044704350441043A04380445002004410438043C0432043E043B043E0432002C0020043F043E043C043504490430044E0449043804450441044F002004320020043E0434043D044300200053004D0053043A04430020043D043000200055004300530032002E', 'ate0+csmp=49,167,0,0;+CMGS="+375298022483"', 'Next part is pure ASCII and can be 140 charachters long. Word split charachters are dots spaces semicolons etc. This text has 210 symbols...' ] NOTE: 'release' will end this cmdq */ //!! ta.curm.cmdq.shift()// `sms` //!! ta._cmd = ta.curm.cmdq[0] ta._cmd = ta.sms2send[0].atcmd gsmtel_runs = ta._cmd// setup timeout flag ta._cmdTimeoutH = setTimeout(ta.do_smsTimeout, ta._smst) ta._end_ch = /[ \n]$/ ta._atdata_handler = null ta._sync_ok = /^>/ _gsm('sms: ' + ta._cmd) sock.write(ta._cmd+ta._cmdle) //!! ta.sent_sms.push(ta._cmd)// next element will follow with error or id ta._handle = function(ta_lines_arr, samode){ var m ,i for (i in ta._async_handlers) {//async handlers ta._async_handlers[i](ta_lines_arr) } ta.releaseH(ta_lines_arr) _gsm('smsH gsmtel_runs, ta._cmd: ' + gsmtel_runs +' '+ ta._cmd) /* Handling message send sequence: atv0+cmgs="+375298022483" > MSGBODY<SUB> +CMGS: 52 | +CMS ERROR: 500 | possible async messages | OK | */ if(gsmtel_runs == ta._cmd){ if(/ERROR/.test(ta_lines_arr[0])){ ta.sms2send[0].mid = ta_lines_arr[0] //!! ta.sent_sms.push(ta_lines_arr[0]) return ta.do_smsTimeout(true) } gsmtel_runs = ta._smsle _gsm('smH sms write: ' + ta.sms2send[0].m) sock.write(ta.sms2send[0].m + ta._smsle) // fall thru } i = 0 _gsm('smH sms sync not err') do { _gsm('smH i = ' + i + 'line: ' + ta_lines_arr[i] + 'ta._sync_ok: ' + ta._sync_ok) if(/ERROR/.test(ta_lines_arr[i])){ //!! ta.sent_sms.push(ta_lines_arr[i]) ta.sms2send[0].mid = ta_lines_arr[i] return ta.do_smsTimeout(true) } m = ta_lines_arr[i].match(/^[+]CMGS:(.*)$/)//ta._sync_ok if(m){// id of sms + time /* m = m[1] + ' ' + (new Date().toISOString()) _gsm('smsH sms id: ' + m) ta.sent_sms.push(m)*/ ta.sms2send[0].dateS = new Date() ta.sms2send[0].mid = parseInt(m[1]) } m = null _gsm('smH atok test i = ' + i + 'line: ' + ta_lines_arr[i]) if(ta._atok.test(ta_lines_arr[i])){// sms sent, goto next sms //!! ta.curm.cmdq.shift()// sms body //!! m = ta.curm.cmdq[0] //!! if(ta._cmd_release != m){ //_gsm('more sms ta.curm.cmdq[0]: ' + m) // ta.sms2send.shift() _gsm('sent sms: ' + inspect(ta.sms2send[0])) delete ta.sms2send[0].atcmd taout.save(ta.sms2send[0] ,function(e, rec){ if(e) { _err('db err save sent sms: ' + e) return } _gsm('db saved: ' + inspect(rec)) })// async race with shift??? ta.sms2send.shift()// next sms if(ta.sms2send.length > 0){ ta._cmd = ta.sms2send[0].atcmd sock.write(ta._cmd + ta._cmdle) gsmtel_runs = ta._cmd //!! ta.sent_sms.push(ta._cmd) //!! m = null //!! ta.curm.cmdq.shift() return ta._hsync// next sms } clearTimeout(ta._cmdTimeoutH) ta._cmdTimeoutH = null return ta._yes_next// next cmd in cmdq -> `release` } //???if(gsmtel_runs == ta._ater) return ta.do_smsTimeout(true) } while (ta_lines_arr[++i]) _gsm('smsH sms sync end: ' + gsmtel_runs +' '+ ta._cmd) return ta._hsync// new gsmtel_runs => next step }//fun() handler return ta._hsync// cmd is set } ,this.do_ussd_timeout = function(){ if(ta._USSDtimeoutH){ clearTimeout(ta._USSDtimeoutH) ta._USSDtimeoutH = null } if(ta) { if(ta._appcb) { ta._appcb('ussd timeout ' + ta._atdata.join('<br/>')) ta._appcb = null } ta.release() } _gsm('ta.do_ussd_timeout, call release') } ,this.do_at_timeout = function(e){ clearTimeout(ta._cmdTimeoutH) ta._cmdTimeoutH = null if (gsmtel_runs == ta._cmd) return _err('timeout AT cmd: ' + ta._cmd + (e ? e : '')) } ,this.at = function(sock, atcmd){ //'at'- sync, AT - async commands this._atdata_handler = null // sync atcmd --inside data--ok data handler this._err = this.__err // common error trap this._sync_ok = this._atok this._end_ch = /\n$/ this._cmd = gsmtel_runs = atcmd if(ta._cmdTimeoutH) clearTimeout(ta._cmdTimeoutH) ta._cmdTimeoutH = setTimeout(this.do_at_timeout, ta._timeoutAtSync) if (atcmd == this.atsetup) { /* first `at` command and first(or after get) setup of modem communication: 'ate1v1+CMEE=2' (@this.atsetup) 1) e1: echo on for head and tail sync 2) v1: verbose cmd run codes e.g. '^OK$' 3) +CMEE=2: error descriptions instead of codes */ this._handle = this.__handle //simple handler until full `at` sync // setup of `at` cmd sync. this command may or may not receive its echo // thus handling must wait usual @_atok reply this._err = this.__nop _gsm('at write setup: `' + atcmd + '` _timeoutAtSync: ' + ta._timeoutAtSync) /* The "AT" or "at" prefix must be set at the beginning of each command line. To terminate a command line enter <CR>. (XT55 Siemens Mobile doc) <CR><LF> is used here: */ sock.write(atcmd + ta._cmdle) if(0 == this._async_handlers.length) this._async_handlers.push( this.SRVSTh ,this.CSQh ,this.CUSDh ,this.CUSDht ) //set up all async handlers return this._hsync } else if (this._handle !== this.__athandle){ // head and tail sync of any `at` command this._handle = this.__athandle } /* 1) async AT commands with same prefix NOTE: upper case AT. This command issues 'OK', ta transmit, async ta recv, then ta reply with same prefix as cmd itself(+CUSD): ,---- USSD request -- |AT+CUSD=1,"*100#",15 | |OK .... some time delta |+CUSD: 0,"Balans=3394r (OP=9777 do 09.05 23:19) Ostatok: MB=43.6 min=232",15 `---- 2) aync AT command's preliminary results with same prefix(+CMGS), final via other command (+CDSI) ,---- SMS sending -- |at+cmgs="+375298077782" |hi, olecom.[ctrl+x] # с русской кодировкой херь нужно разбираться # ушла SMS, id=152 |+CMGS: 152 |OK | # SMS-STATUS-REPORT получен в память SM позицию 11 # (настройки могут быть разные куда писать) |+CDSI: "SM",11 | |+CMS ERROR: 305 # выбираем какую память читать |at+cpms="SM" |OK |+CPMS: 19,30,19,30,19,30 | # читаем позицию 11 |at+cmgr=11 # мессага id=152 доставлена (фотмат этой хери может быть разный) |+CMGR: "REC UNREAD",6,152,"+375298022483",145,"12/03/22,02:42:12+12","12/03/22,02:42:17+12",0 #второй раз уже пишет, что прочитано |at+cmgr=11 |+CMGR: "REC READ",6,152,"+375298022483",145,"12/03/22,02:42:12+12","12/03/22,02:42:17+12",0 `---- ATV[10]: OK 0 Command executed, no errors CONNECT 1 Link established RING 2 Ring detected NO CARRIER 3 Link not established or disconnected ERROR 4 Invalid command or command line too long NO DIALTONE 6 No dial tone, dialling impossible, wrong mode BUSY 7 Remote station busy at+cmgr=1 +CMS ERROR: 321 AT+CEER +CEER: No cause information available OK 320 Memory failure 321 Invalid memory index 322 Memory full */ if(/^AT[+]CUSD=1/.test(atcmd)) { ta._in_ussd = null ta._USSDtimeoutH = setTimeout(ta.do_ussd_timeout, ta._timeoutUSSD) _gsm('set ussd timeout ' + ta._timeoutUSSD) ta._cmd_releaseTimeout = 777 + ta._timeoutUSSD// delay `release` ta._end_ch = /[\n ]$/ } else if(/^AT[+]CMGS/.test(atcmd)) {// non sync testing version ta._end_ch = /[ \n]$/ // normal or error(by timeout) case ta._sync_ok = /> / ta._cmd_releaseTimeout = 4444 } else switch (atcmd) { case 'ati': /* data inside cmd - ok block */ this._handle = function(ta_lines_arr) { for (var i in ta_lines_arr) { if (this._sync_ok.test(ta_lines_arr[i])) { app.gsm = 'GSM:&nbsp;' + this._atdata.splice(1).join('<br/>') this._atdata.splice(0) this._handle = this.__handle gsmtel_runs = this._atok clearTimeout(ta._cmdTimeoutH) ta._cmdTimeoutH = null return this._yes_next } else this._atdata.push(ta_lines_arr[i]) } _gsm('ati handler awaits @_sync_ok') return 'ati-loop'+this._hsync } break case this.getOpName: this._atdata_handler = this.COPSh break case this.CSQ: this._atdata_handler = this.CSQh break } _gsm('at write: `' + atcmd + '`') sock.write(atcmd + ta._cmdle) return this._hsync } /* Handlers NOTE: async handler must look up all atdata[] for its match */ ,this.SRVSTh = function(atdata) { for(var i in atdata) { if (/SRVST:2/.test(atdata[i])) {//async: ^SRVST:2 app.op = '??? ' ta.curm.cmdq.unshift(ta.getOpName) ta.curm.cmdq.unshift(ta.atsetup) } } } ,this.COPSh = function(atdata) { for(var i in atdata) { if (/COPS:/.test(atdata[i])) {// async: +COPS: 0,0,"MTS.BY",2 ta.curm.op = atdata[i].replace(/(^[^"]+")|("[^"]*$)/g,'') if(gsmtel_runs == ta.getOpName && ta._cmdTimeoutH) { clearTimeout(ta._cmdTimeoutH) ta._cmdTimeoutH = null } break } } } ,this.CSQ = 'at+CSQ' ,this.CSQh = function(atdata) { var d if (this.CSQ == atdata[0]) {// sync: '+CSQ: 20,99' d = atdata[1] gsmtel_runs = this._atok clearTimeout(ta._cmdTimeoutH) ta._cmdTimeoutH = null } else for(var i in atdata) { if (/RSSI:/.test(atdata[i])) {// async: '^RSSI:25' d = atdata[i] break } } if (d) ta.curm.sigq = d.replace(/[^:]*:([^,]+).*/,'$1') +'/31' } ,this._in_ussd = null ,this.CUSDht = function(atdata) {// ussd multiline tail async if(ta._in_ussd) for(var i in atdata) { ta._atdata.push(atdata[i])// push multiline data //full reply or ussd error responses if (/",[^,]*$/.test(atdata[i]) || /^[+]CUSD: .$/.test(atdata[i])) {// async RE: str start _gsm('USSD tail: ' + atdata[i] + ' ta._in_ussd: ' + ta._in_ussd) if(ta._appcb) { ta._appcb(ta._atdata.join('<br/>')) ta._appcb = null ta._atdata.splice(0) } if('cancel' == ta._in_ussd) { gsmtel.write('\u001b')// bad useing global var, but gsmtel.write('AT+CUSD=2')// don't care of result } ta._in_ussd = null ta.do_ussd_timeout() return }// read all multiline ussd reply } } ,this.CUSDh = function(atdata) {// ussd head async /* 0 no further user action required (network initiated USSD-Notify, or no further information needed after mobile initiated operation) 1 further user action required (network initiated USSD-Request, or further information needed after mobile initiated operation) 2 USSD terminated by network 3 other local client has responded 4 operation not supported 5 network time out */ //??? не понимаю почему здесь не сработал `this`??? нужна привязка к глобальному `ta` // так как я не знаю контекста этого `this`, лучше использовать глобальные переменные и не мучиться for(var i in atdata) { // async: '+CUSD: 0,"Vash balans sostavlyaet minus 3511 rublej... if (/^[+]CUSD: [012345]/.test(atdata[i])) { _gsm('USSD head: ' + atdata[i]) if (/^[+]CUSD: 0/.test(atdata[i])) { ta._in_ussd = 't' // cancel USSD continuation (portals spam, errors etc.) } else ta._in_ussd = 'cancel' ta._end_ch = /\n$/ break } } } ,this.__athandle = function(ta_lines_arr, samode) { /* when modem's `echo` is on, then all `at` command's ME data starts from command itself this is the first sync point, tail must be ended with _atok, final sync point if first fails, then something happened with connection or getting of a voip module if second fails, this can be some fatal connection problems -- i knew that, see "1.7.1 Communication between Customer Application and XT55" */ _gsm('at handler mode: ' + samode + ' arr: ' +ta_lines_arr) if (/sync$/.test(samode)) { var i = 0 if (/handle-sync$/.test(samode)) while (true) { if (ta_lines_arr[i] == this._cmd) { _gsm("got head of sync cmd: " + this._cmd) gsmtel_runs = this._atok clearTimeout(ta._cmdTimeoutH) ta._cmdTimeoutH = null break } if(++i >= ta_lines_arr.length) return 'AT-sync' } // looking 4 head while (true) { if (ta_lines_arr[i].match(this._sync_ok)) { _gsm("got tail sync cmd: " + this._cmd) _gsm("atdata: " + this._atdata.join('<br/>')) if(this._atdata_handler) /* data inside atcmd - ok block */ this._atdata_handler(this._atdata) this._atdata.splice(0) //AT handles async the same way as this._handle = this.__handle return this._yes_next } else this._atdata.push(ta_lines_arr[i]) if(/ERROR/.test(ta_lines_arr[i])){ _err('AT err cmd: ' + this._cmd + ', msg: ' + ta_lines_arr[i]) break } if(++i >= ta_lines_arr.length){ ta.releaseH(ta_lines_arr) return 'AT-sync'// sync -- no other command setup, but skip async spam } } // searching 4 tail //_err("gsmtel __athandle(): !!! MUST NOT BE HERE1 !!!" + this._cmd) return this._yes_next } else { // the same way as this._handle = this.__handle for (var i in this._async_handlers) { this._async_handlers[i](ta_lines_arr) } ta.releaseH(ta_lines_arr) return 'yes-next-AT-asyn' } } ,this.__handle = function(ta_lines_arr, samode) { /* simple sync and async handler sync commands are done, when any line from ta match RE(@this.sync_ok) async handlers are called otherwise */ _gsm('handler ME mode: ' + samode + '\nthis._sync_ok:' + this._sync_ok + '\nthis._cmd:' + this._cmd) if (/sync$/.test(samode) && this._sync_ok) { var sync = this._sync_ok for (var i in ta_lines_arr) { _gsm('ta_lines_arr[i]: ' + ta_lines_arr[i]) if (ta_lines_arr[i].match(sync)) { _gsm("handled sync cmd: " + this._cmd) /*if(ta._appcb) {// universal handler does such call back process.nextTick(ta._appcb) ta._appcb = null }*/ return this._yes_next } } // no match, and since this is sync cmd, then error // _err() must return either next cmd or do something to actually get cmd done // clear sync flag to deal with possible async garbage between successive commands ta.releaseH(ta_lines_arr) return this._err(ta_lines_arr ? ta_lines_arr.join('') : 'no-event-data') } else { //there can be any async garbage between successive commands for (var i in this._async_handlers) { this._async_handlers[i](ta_lines_arr) } ta.releaseH(ta_lines_arr) return 'yes-next-asyn' } } ,this.qcmds = function (append_this_cmds, modid) { /*if (!(cmd_queue instanceof Array)) { _err('gsmtel queue_cmds(cmds, queue): @queue must be an array') return }*/ var mcmdq if (modid) for (var i in ta.modules) { if(ta.modules[i].modid == modid){ mcmdq = ta.modules[i].cmdq break } } if (!mcmdq) { mcmdq = ta.modules[ta.defmod - 1].cmdq modid = ta.modules[ta.defmod - 1].modid } if (append_this_cmds instanceof Array) { if (append_this_cmds.length <= 0) return mcmdq.push(ta._cmd_get) mcmdq.push(ta.atsetup) for (var i in append_this_cmds) { if (append_this_cmds[i]) { if ('string' === typeof append_this_cmds[i]) { mcmdq.push(append_this_cmds[i]) } else { _err('qcmds(arg): @arg['+i+'] is null, must be string') } } } //creating common release timeout, like it was: if(!/CUSD/.test(append_this_cmds)){ mcmdq.push(ta._cmd_release) } else { _err("append_this_cmds: " + append_this_cmds) if ('string' === typeof append_this_cmds) { if (append_this_cmds.length > 0) { mcmdq.push(ta._cmd_get) mcmdq.push(ta.atsetup) mcmdq.push(append_this_cmds) mcmdq.push(ta._cmd_release) } } else { _err('qcmds(arg): @arg is not string or array!') } } _gsm('mcmdq in "'+modid+'": '+JSON.stringify(mcmdq)) }// qcmds }// ME.GSM //NOTE: function() objects aren't simple {}, ME['GSM']._dscr is undefined via fun() {this._dscr} // RegExp(undefined) matches everything /*if (!/^GSM$/.test(i)) */ ME.GSM._dscr = "GSM modem via Telnet interface" ME.E220 = { _dscr: "HUAWEI_E220" } ME.MV37X = { _dscr: "MV-374" ,logout: function(){ ta.write('logout'+ta._cmdle) } ,login: function(sock, e) { const pass_sync = /word:.*$/ _gsm("MV37X login! : " + ta._sync_ok + ' ta._cmd: ' + ta._cmd) if('login' !== ta._cmd) { // init once ta._cmd = 'login' //on telnet connect /^user/name and password is asked interactively (EOL != \n) ta._sync_ok = /^user/ ta._end_ch = / $/ // space ta._err = this.__errfa } ta._handle = function(arg) { var r = ta.__handle(arg, 'sync') _gsm("MV37X login handle r: " + r) if(/^yes/.test(r)) { if('/^user/' == ta._sync_ok) { ta._sync_ok = pass_sync _gsm("MV37X sock write: voip") sock.write('voip'+ta._cmdle) } else if (pass_sync == ta._sync_ok){ sock.write('1234'+ta._cmdle) _gsm("MV37X sock write: 1234") ta._sync_ok = /\]$/ ta._end_ch = ta._sync_ok ta._handle = ta.__handle // all chain handeled, goto next command ta._err = ta.__nop // eat stuff in std handler return ta._hsync // set next (std) handler's arg } else { /* collect or handle mid data here */ } } /* returns nothing, 'cos this handler doesn't care about @arg */ }//fun return ta._hsync } ,get: function(sock) { _gsm('MV37X get cmd param write: `' + ta.curm.modid + '`') if(ta._cmd_releaseTimeout > 0){ _gsm('MV37X release is pending') return } sock.write(ta.curm.modid + ta._cmdle) ta._cmd = ta.curm.modid gsmtel_runs = ta._cmd //MV37X on get writes 'got!! press ctrl+x to release module X.' ta._sync_ok = /^got/ ta._end_ch = /[\]\n]$/ ta._handle = function(ta_lines_arr){ var i = 0 _gsm('`get` handle data') do { if(/bad command/.test(ta_lines_arr[i])){ _err("telnet err bad cmd") ta.curm.cmdq.splice(0) return ta._yes_next// empty cmdq will schedule modules } while(gsmtel_runs == ta._cmd){ if (ta_lines_arr[i] == gsmtel_runs){ _gsm('`get` head: ' + ta.curm.modid) gsmtel_runs = ta._end_ch clearTimeout(ta._cmdTimeoutH) ta._cmdTimeoutH = setTimeout(function(){ ta._cmdTimeoutH = null if(gsmtel_runs == ta._sync_ok){ //release current, schedule modules ta.curm.cmdq.splice(0) process.nextTick(_do_TELNET2MODULES_cmd_loop) _gsm('timeout cannot get: ' + ta.curm.modid) } }, ta._timeoutGet) } break } while(gsmtel_runs == ta._end_ch){ if (ta_lines_arr[i].match(ta._sync_ok)){ _gsm("got sync ok telnet cmd: " + ta._cmd) clearTimeout(ta._cmdTimeoutH) gsmtel_runs = ta._sync_ok return ta._yes_next } break } } while (ta_lines_arr[++i]) } ta._cmdTimeoutH = setTimeout(function(){ if(gsmtel_runs == ta._cmd){ //release current, schedule modules ta.curm.cmdq.splice(0) process.nextTick(_do_TELNET2MODULES_cmd_loop) _err('timeout, clear cmdq, cannot get: ' + ta.curm.modid) } ta._cmdTimeoutH = null }, ta._timeoutGet) return ta._hsync// wait this cmd } ,releaseH: function(ta_lines_arr){// sync or async handler //if(gsmtel_runs == ta._cmd_release) for(var i in ta_lines_arr){ if (/^release/.test(ta_lines_arr[i])) { _gsm('releaseH: ' + ta.curm.modid) gsmtel_runs = ta._cmd_release //ta.curm.cmdq.splice(0) } if(ta._end_ch.test(ta_lines_arr[i]) && gsmtel_runs == ta._cmd_release) process.nextTick(_do_TELNET2MODULES_cmd_loop) } } ,do_release: function(){ if(!ta || !gsmtel_runs) return// this fully async event may get such case gsmtel.write('\u0018') _gsm("MV37X release. send CTRL+X CAN 0x18, after: " + ta._cmd_releaseTimeout) ta._sync_ok = /^release/ // switch std sync handler to MV37X's telnet cmds ta._end_ch = /\]$/ // restore telnet from ATs ta._err = ta.__nop ta._handle = ta.releaseH ta._cmd_releaseTimeout = 0// allow schedule modules //process.nextTick(_do_TELNET2MODULES_cmd_loop) } /*,release: function() { //`release` gives back AT control on module, but any AT data is queued by module for next `get` //`release` does not clear modules's cmd queue. only errors by timeout do this and call scheduler ta._cmd = ta._cmd_release gsmtel_runs = ta._cmd if(ta._in_releaseTimeout){// pending `release` called directly clearTimeout(ta._in_releaseTimeout) ta._cmd_releaseTimeout = 0 ta._in_releaseTimeout = null } if(ta._cmd_releaseTimeout > 0){ _gsm("MV37X release timeout: " + ta._cmd_releaseTimeout) ta._in_releaseTimeout = setTimeout(ta.do_release, ta._cmd_releaseTimeout) } else ta.do_release() //returns nothing }*/ } var modring = 0 function _do_TELNET2MODULES_cmd_loop() { /* Modules manager On Telnet connect MODEL setup is done. Current module is set to default one or first otherwise. In its cmd queue's head `login` command is inserted and do_TE2ME handler is called. It events until cmdq is empty, thus nextTicking this manager. */ if(!gsmtel_runs || !ta) { _gsm('telnet2modules: NOLINK') return } if(0 == modring){// first run if(ta.modules.length <= 0) { _err('app err: ta.modules[] is empty') return } modring = ta.defmod } if(ta._cmd_releaseTimeout > 0){// `release` is pending, reschedule process.nextTick(_do_TELNET2MODULES_cmd_loop) return } ta.modqlenTotal = 0 for (var i in ta.modules) ta.modqlenTotal += ta.modules[i].cmdq.length _gsm('sch: ta.modqlenTotal: ' + ta.modqlenTotal) if(ta.modqlenTotal <= 0) return// nothing to do, wait app commands _gsm('sch: modring: ' + modring + " cmdq: "+ ta.modules[modring - 1].cmdq) var cm = modring while (ta.modules[modring - 1].cmdq.length <= 0){ if(++modring > ta.modules.length) modring = 1 _gsm('sch: modring2: ' + modring) /*if (cm == modring){ return //ring is over, but there are total commands }*/ } _gsm('sch: selected modring = ' + modring) ta.curm = ta.modules[modring - 1] _gsm('sch: selecting "' + ta.curm.modid + '"') // give currently selected module into evented data handling TE_ME_mode = ta._yes_next process.nextTick(_do_TE2ME_cmd_loop) } function _do_TE2ME_cmd_loop(ta_lines_arr) { /* Main command and data handling @ta_lines_arr: if defined, then data event has been accured, and there are some data lines to sent to sync or async handlers @ta_lines_arr: undefined, set up the first command from the @ta.curm.cmdq queue @gsmtel_runs: if null, then nothing happens (@ta.curm.cmdq can be cleared, because link is down and new set up chain of command will be needed and queued on connect) */ if(!gsmtel_runs) { //TODO: check if user closes all manually `connect` && `disconnect` commands //ta.curm.cmdq.splice(0) // clear cmds // last cmd in queue must receive error // not last, but currently set up handler must get show stop event _gsm('telnet: NOLINK') return } var next_cmd _gsm('do loop, TE_ME_mode: ' + TE_ME_mode) if (ta_lines_arr) { _gsm('cmd handle: ' + (ta_lines_arr.join('|'))) next_cmd = ta._handle(ta_lines_arr, TE_ME_mode) if(!next_cmd) {// handler without yes_next, wait for more data _gsm('no next more data') return } } else next_cmd = TE_ME_mode// first setup _gsm('handler ret || cmd to setup: ' + next_cmd) while (RegExp(ta._yes_next).test(next_cmd)) { var c = ta.curm.cmdq[0] _gsm('cmd to setup: ' + c) if (!c) { ta._cmd = TE_ME_mode = ta._yes_next +" end of module's cmd queue" //schedule modules process.nextTick(_do_TELNET2MODULES_cmd_loop) return //end cmd queue } else if(/^at/i.test(c)) { //AT: specially handeled subset of commands next_cmd = ta.at(gsmtel, c) } else if(ta.hasOwnProperty(c)) { next_cmd = ta[c](gsmtel) } else { _gsm('direct write of:' + c) gsmtel.write(c) //loop next cmd } ta.curm.cmdq.shift() //loop next cmd } TE_ME_mode = next_cmd // sets up new mode in handlers } var gsmtel_addr = { //GSM_TELNET="localhost:8023" port: process.env.GSM_TELNET.replace(/[^:]*:/,'') ,fqdn: process.env.GSM_TELNET.replace(/:[^:]*/,'') } ,gsmtel /*= net.connect(gsmtel_addr.port, gsmtel_addr.fqdn, gsmtel_ok) gsmtel.setTimeout(1024) //see NOTE below*/ function gsmtel_init() { modring = 0 gsmtel_runs = null TE_ME_mode = 'login-mode' app.gsm = 'connecting....' //reloads modules store in extjs } function gsmtel_configure() { var model = process.env.GSM_MODEL, i, j //v0:,"_atok": /^0$/ //NOTE: JSON.parse() doen't do REs, so it must be implemented in conf load ta = new ME.GSM if (/^{/.test(model)) { /*GSM_MODEL='{ "name": "MV001: MV-374 / MV-378 VoIP GSM Gateway" ,"module1": { "own":"+375298714075", "numre": "+37529[2578] +37533" } ,"module2": { "own":"set me in cfg", "numre": "+37529[136] +37544" } ,"default": 1 ,"_other_cfg": "be-be" }'*/ try { var cfg = JSON.parse(model) for(i in ME) { if(RegExp(ME[i]._dscr).test(cfg.name)) { var m = ME[i] for(j in m) { ta[j] = m[j] } // add own interface stuff to default break } } ta._dscr = cfg.name ta.modules.splice(0)// remove default j = 0 for(i in cfg){ if(!/(^default)|(^name)|(^_)/.test(i)) { var m if(cfg.default == ++j) ta.defmod = j //default module number in (array + 1) m = {} // new module object m.modid = i m.op = '??' // stats m.sigq = 0 m.ownum = cfg[i].own m.re = [] cfg[i].numre.replace(/[+]/g, '^[+]').split(' ').forEach( function(re_str){ if(re_str) m.re.push(RegExp(re_str)) } ) m.cmdq = [] ta.modules.push(m) } else if(/^_/.test(i)) ta[i] = cfg[i] // possible other cfg stuff } if(!j) { _err('model JSON config err: no modules found') } else if(!ta.defmod) { _gsm('model module selection: "default" module number is out of range or is not defined, setting "module1"') ta.defmod = 1 } if(ta._atsetup) ta.atsetup = ta._atsetup j = ta.initcmds() for (i in ta.modules) ta.qcmds(j ,ta.modules[i].modid) } catch(e) { _err('model JSON config err: ' + e + e.stack) _gsm('JSON cfg err, using default module config') ta = new ME.GSM } } else { //simple GSM_MODEL='HUAWEI_E220 HSDPA USB modem' for(var i in ME) { if(RegExp(ME[i]._dscr).test(model)) { var m = ME[i] for(var j in m) { ta[j] = m[j] }// add own stuff to default break } } ta._dscr = model ta.qcmds(ta.initcmds()) } } function gsmtel_ok(){ if(!gsmtel_runs){ _gsm('gsmtel_runs is null, wait and reconnect (4 sec) ...') gsmtel_runs = 'reconnect' _log('@[' + _date() + '] gsm telnet: reconnecting...') /* NOTE: gsmtel's socket timout must be less than this one or `node` will say: (node) warning: possible EventEmitter memory leak detected. 11 listeners added. Use emitter.setMaxListeners() to increase limit. */ setTimeout(gsmtel_ok, 4096) } else if ('reconnect' == gsmtel_runs) {// no connection, try later gsmtel_runs = null gsmtel.connect(gsmtel_addr.port, gsmtel_addr.fqdn /*, no need in another callback*/) } } function gsmtel_setup(){ gsmtel = net.connect(gsmtel_addr.port, gsmtel_addr.fqdn, gsmtel_ok) gsmtel.setTimeout(1024)//see NOTE in gsmtel_ok() above gsmtel_configure()// initcmds are in cmdq tain = ta._dscr.match(/^([^ :,.;]+)/)[1] _gsm('db collection prefix: ' + tain) taout = db.collection(tain+'_taout') tain = db.collection(tain+'_tain' ) tain.stats(function(e, stats){ if(stats.count <= 0)// if db income is empty, fill it from SIM for (var i in ta.modules) ta.qcmds(ta.sms_mem_setup ,ta.modules[i].modid) }) gsmtel.on('connect', function(){ gsmtel.setEncoding('ascii') //_gsm('ta: ' + inspect(ta)) gsmtel_runs = '@[' + _date() + '] gsm telnet: connected to ' + gsmtel_addr.fqdn + ':' + gsmtel_addr.port _log(gsmtel_runs) /*`login` runs current module's cmd queue, empty cmdq schedules modules by calling _do_TELNET2MODULES_cmd_loop() otherwise this timeout will reboot gsmtel: */ setTimeout(function(){ if(gsmtel_runs && ta && 'login' == ta._cmd) { _err('\n'+ '==== FATAL ERROR: ====\n'+ 'Telnet login fails. Maybe module config is wrong:\n"'+ process.env.GSM_MODEL+'"\n'+ '====') gsmtel.end() } }, ta._timeoutLogin) if(!ta.curm)// setup of current module ta.curm = ta.modules[ta.defmod - 1] ta.curm.cmdq.unshift('login')// first module runs `login` TE_ME_mode = ta._yes_next /* NOTE: this must be run as soon as possible to habdle any login prompts */ process.nextTick(_do_TE2ME_cmd_loop) }) // set up event handlers once gsmtel.on('data', function(chBuffer){ var lines = get_input_lines(chBuffer.toString()) _gsm('gsmtel `data` event lines:' + (lines ? lines.join('|'): 'null')) if (null == lines) return _do_TE2ME_cmd_loop(lines) }) gsmtel.on('end', function(){ _gsm('gsmtel `end` event') // other end closed connection FIN packet gsmtel_init() //TODO: if !user setTimeout(gsmtel_ok, 4096) }) gsmtel.on('error', function(e){ //NOTE: net error handler must not be inside init callback!!! if (e) { _err('gsm telnet {addr:'+process.env.GSM_TELNET+'} err : ' + e) gsmtel_init() setTimeout(gsmtel_ok, 4096) return } }) } /*====---- APP: http web part ----====*/ var express = require('express') ,app = express() ,app_srv app.configure(function(){ // app.set('views', __dirname + '/views') // app.set('view engine', 'jade') app.use(express.bodyParser()) //parse JSON into objects app.use(express.methodOverride()) app.use(app.router) //app.use('/extjs', express.static(__dirname + '/../../extjs-4.1.0-rc2')) //app.use('/extjs/examples/ux', express.static(__dirname + '/../../extjs-4.1.0-rc2/examples/ux')) //app.use('/extjs/ux', express.static(__dirname + '/../../extjs-4.1.0-rc2/examples/ux')) app.use('/extjs', express.static(__dirname + '/../../extjs-4.1')) app.use('/extjs/examples/ux', express.static(__dirname + '/../../extjs-4.1/ux')) app.use(express.static(__dirname + '/../_ui-web')) app.use(function errorHandler(err, req, res, next){ if (err.status) res.statusCode = err.status; if (res.statusCode < 400) res.statusCode = 500; res.writeHead(res.statusCode, { 'Content-Type': 'text/plain' }); res.end(err.stack); }) }) /* app.configure('development', function(){ }) app.configure('production', function () { })*/ // Routes app.get('/', function (req, res) { res.redirect('/telsms.htm') } ) app._htmlf = function(m, re) { return String(m).replace(/\n/g, '<br/>') } app_gsm = function(logmsg, atcmds_arr, cb, module) { if (logmsg) _gsm(logmsg) if(!gsmtel_runs) { return { success: !true ,msg: 'telnet: NOLINK' } } else if ('reconnect' == gsmtel_runs) { return { success: !true ,msg: 'telnet: reconnecting...' } } else if (!ta) { return { success: !true ,msg: 'ME is undefined. Unexpected.' } } ta._appcb = cb ta.qcmds(atcmds_arr, module) process.nextTick(_do_TELNET2MODULES_cmd_loop) // post-event queuing is preferable here } /* Первый SMS через форму: aT+cmgs="+375298022483" > test sms ^BOOT:10389262,0,0,0,6 +CMGS: 239 OK ExtJS SMS form load: smsNumber: +375298077782 smsBody(str or array): text smsModule: module1 "+375297XXYY677" */ function mk_sms_body(smsText) { // based on to_ascii() from uglify-js by Mihai Bazon const smsA = 140, smsU = smsA / 2 var a = true, aa = true, ws, c = 0, tc = 0, s = [] smsText.replace(/[\s\S]/g, function(ch) { var co = ch.charCodeAt(0) a = a && (128 > co) ++tc ++c if (/[ .,\n\r:;!?]/.test(ch)) ws = c if(a) { if(smsA == c ) { s.push({ascii:a, count:c}) ; tc -= c ; c = 0 } if(aa) aa = !true } else { if (c > smsU && c <= smsA && !aa) { if (ws) { s.push({ascii:true, count:ws}) ; tc -= ws ; c -= ws ; ws = 0 } else { s.push({ascii:true, count:c - 1}) ; tc -= c - 1 ; c -= 1 } aa = true a = true } if(smsU == c) { /*if (ws) { s.push({ascii:a, count:ws}) ; tc -= ws ; c -= ws ; ws = 0 } else { s.push({ascii:a, count:c}) ; tc -= c ; c = 0 }*/ s.push({ascii:a, count:c}) ; tc -= c ; c = 0 if(aa) aa = !true a = true } } }) if(tc) { s.push({ascii:a, count:tc}) } return s } function UCS2(text){// based on to_ascii() from uglify-js by Mihai Bazon return text.replace(/[\s\S]/g ,function(ch){ ch = ch.charCodeAt(0) return (128 > ch ? "00" : "0") + ch.toString(16) }).toUpperCase() } function unUCS2(hext){ return hext.replace(/..../g ,function(ch){ return String.fromCharCode(parseInt(ch ,16)) }) } app_sms = function(smsnum, smsbody, cb, module) { if(!gsmtel_runs) { return { success: !true ,msg: 'gsm: NOLINK'} } else if ('reconnect' == gsmtel_runs) { return { success: !true ,msg: 'gsm: reconnecting...'} } else if (!ta) { return { success: !true ,msg: 'ME is undefined. Unexpected.'} } var i ,j ,k ,m //normalize numbers: +375 29 8077782 +375 (29) 8077782 (29) 80-777-82 +37529234234 ,smsnums = smsnum.replace(/ +/g,' ') .replace(/ *[(]/g,' +375') .replace(/ *[+]375 *([+]375)/g,' $1') .replace(/(-)|([)] *)/g,'').replace(/ +(\d\d) +/g,'$1') .split(' ') //ascii and ucs2 body parts, ta._sms_smp ,smsbods = mk_sms_body(smsbody) _gsm("sms 2 " + smsnums) _gsm('smsbods: ' + inspect(smsbods)) ta._smst = ta._timeoutSendSMS for(i in smsnums) { if(!smsnums[i]) _err((1+parseInt(i)) + 'й номер пуст.') else { k = 0 for(j in smsbods) { m = { dateQ: new Date() ,dateS: null ,mid: null} if(smsbods[j].ascii) { m.atcmd = ta._sms_smp_asci + ';+CMGS="'+smsnums[i]+'"' m.m = smsbody.substr(k, k + smsbods[j].count) } else { m.atcmd = ta._sms_smp_ucs2 + ';+CMGS="'+smsnums[i]+'"' m.m = UCS2(smsbody.substr(k, k + smsbods[j].count)) } ta.sms2send.push(m) k += smsbods[j].count ta._smst += ta._smst// sms times timeout } } } if(ta.sms2send.length > 0){ _gsm('sms2send: ' + inspect(ta.sms2send)) ta.qcmds([ ta._sms_setup, 'sms' ], module) process.nextTick(_do_TELNET2MODULES_cmd_loop) return { success: true ,msg: 'SMS `AT` executed'} } return { success: !true ,msg: 'no SMS 2 send'} } //TODO: app.post('/qsms.json', function (req, res) { app.post('/sms.json', function (req, res) { var ret if (!req.body.smsNumber) ret = { success: !true ,msg: "form's smsNumber is null. Unexpected." };else ret = app_sms(req.body.smsNumber ,req.body.smsBody ,function(msg) { res.json({ success: true ,msg: msg }) } ,req.body.smsModule.replace(/ .*$/, '')) if (ret) res.json(ret)// error or other info which ends res here } ) app.post('/ussd.json', function (req, res) { //ExtJS USSD form load var ret if (!req.body.ussdNumber) ret = { success: !true ,msg: "form's ussdNumber is null. Unexpected." };else ret = app_gsm('ussd: ' + req.body.ussdNumber ,['AT+CUSD=1,"'+req.body.ussdNumber+'",15'] //ExtJS ussd form reply format: { "success": true, "msg": "A command was done" } //http error reply is plain text (hacked connect's errorhandler middleware) ,function(msg) { msg = msg.replace(/(^[^"]+")|("[^"]+$)/g,'') tain.save({ m: msg ,num: req.body.ussdNumber ,d: new Date() } ,function(e){ if(e) _err('db ussd save err: ' + e) }) res.json({ success: true ,msg: msg }) } ,req.body.module.replace(/ .*$/, '') ) if (ret) res.json(ret) // error or other info which ends res here } ) app.get('/tain.json', function (req, res) { //ExtJS table load: USSD and SMS from DB: start=80&limit=20 var r = {} tain.find().sort({$natural: -1}) .skip(parseInt(req.query.start)) .limit(parseInt(req.query.limit), function(e, recin) { r.data = recin tain.stats(function(e, stats){ r.total = stats.count res.json(r) }) }) } ) app.get('/swhw_stat.json', function (req, res) { //ExtJS will load this once in a while into Ext Store for dataview var i, logs = [], gsms = [], errs = [] if (srv_log.length > 0) { for (i in srv_log) { logs.push(app._htmlf(srv_log[i])) } srv_log.splice(0) } if (gsm_inf.length > 0) { for (i in gsm_inf) { gsms.push(app._htmlf(gsm_inf[i])) } gsm_inf.splice(0) } if (err_log.length > 0) { for (i in err_log) { errs.push(app._htmlf(err_log[i])) } err_log.splice(0) } modules = [] if (ta) for (i in ta.modules){ modules.push({op: ta.modules[i].op, sigq: ta.modules[i].sigq }) } res.json({ stats: [ { os: app.os ,server: app.server ,db: app.db ,uptime: Math.ceil(process.uptime()) ,gsm: app.gsm } ] ,modules: modules ,logs: logs, gsms: gsms, errs: errs } ) if(app.gsm) app.gsm = null } ) app.get('/mods.json', function (req, res) { // serv static store of configured modules var m if (ta) { m = [] for (var i in ta.modules) { m.push({d: ta.modules[i].modid+ (ta.modules[i].ownum ? ' "'+ta.modules[i].ownum+'"':'')}) } } else m = [{d:'нет связи с движком GSM'}] res.json(m) } ) /* \ / Error handling for web app, http control channel. \/ All errors are fatal except -- EAGAIN && EINTR while reading something. /\ Latter must be handled by nodeJS. Thus we exit(1) if there is any. \/ /\ External watchdog or user must take care about running this "forever". / \ */ var ctl = http.createServer(function(req, res){ var status = 200, len = 0, body = null if ('/cmd_exit' == req.url){ process.nextTick(function(){ process.exit(0) }) } else if ('/sts_running' == req.url) { } else if ('/cmd_stat' == req.url) { if ('GET' == req.method) { body = Math.ceil(process.uptime()).toString() len = body.length } } res.writeHead(status, { 'Content-Length': len, 'Content-Type': 'text/plain' }) res.end(body) }) app_srv = http.Server(app) app_srv.on('error', function(e){ if (/EADDR.*/.test(e.code)){ _err("web app can't listen host:port='*':" + process.env.JSAPPCTLPORT + "\n" + e + "\npossible 'app.conf' 'JSAPPCTLPORT' port collision or bad IP address") } else { _err("web app: " + e) //state_machine_append(err) } if (!app_runs) process.exit(1) } ) ctl.on('error', function(e){ //NOTE: net error handler must not be inside init callback!!! if (EADDRINUSE == e.code) { //'EADDRINUSE' 'EADDRNOTAVAIL' _err("controlling channel can't listen host:port='127.0.0.1':" + process.env.JSAPPCTLPORT + "\n" + e + "\npossible 'app.conf' 'JSAPPCTLPORT' port collision") } else { _err("controlling channel: " + e) } if (!ctl_runs) process.exit(1) } ) app_srv.on('listening', function(){ app_runs = _date() } ) ctl.on('listening', function(){ ctl_runs = _date() } ) app_srv.on('close', function(){ app_runs = null } ) ctl.on('close', function(){ ctl_runs = null } ) process.on('exit' ,function(){ if(gsmtel) try { if(ta && ta.logout) ta.logout() gsmtel.end() } catch(e) {} log('telsms nodeJS exit.') }) /*====---- APP: memory = data base ----====*/ function db_read_mem(arr, mode){ /* == SMS-STATUS-REPORT ==: 15: ": 21,"REC READ",6,233,"+375298022483",145,"12/05/15,03:16:34+12","12/05/15,03:16:39+12",0" smsas RE: ^^^^^^^^+++^ das RE: ^++++++++++++++++++++^^^++++++++++++++++++++^ == SMS-DELIVER ==: 14: ": 20,"REC READ","+375297253059",,"10/04/21,15:11:51+12"\n003700390033003100350031003904210430…." smsd RE: ^^^^^^^+++++++++++++^ ^++++++++++++++++++++^^ = dad */ var smss = /READ",6,([^,]+),/ ,smsd = /READ","([^"]+)"/ ,gsmd = /(\d\d)[/](\d\d)[/](\d\d),(\d\d):(\d\d):(\d\d)/ ,das = /"([^"]+)","([^"]+)"/ ,dad = /"([^"]+)"\n([\s\S]+)\n$/ ,csms5 = /^050003/ ,csms6 = /^060804/ ,ucs2body ,m ,r ,i ,d if (mode != 'ALL'){ // read unread sms status reports and check of mid } arr.shift()// remove empty first `split` _gsm('sms arr len: ' + arr.length) for(i in arr){// record header can be removed in UI r = { m: arr[i] }// default if(m = r.m.match(smsd)){ r.num = m[1] if(m = r.m.match(dad)){ d = m[1].match(gsmd) r.d = new Date('20'+d[1],parseInt(d[2])-1,d[3],d[4],d[5],d[6]) d = m[2] if(csms5.test(d)){// multipart concatenated SMS d = 12 // skip header, decode body } else if(csms6.test(d)){ d = 14 } else d = 0 d = m[2].substr(d) ucs2body = RegExp('^0[04][0-9A-F]{'+(d.length-2)+'}') if(ucs2body.test(d)){// match UCS2 at whole string length r.m = unUCS2(d) } else r.m = d } } else if(m = r.m.match(smss)){ r.mid = parseInt(m[1] ,10) if(m = r.m.match(das)){ d = m[1].match(gsmd)// poslan Date() r.p = new Date('20'+d[1],parseInt(d[2])-1,d[3],d[4],d[5],d[6]) d = m[2].match(gsmd)// dostavlen Date() r.d = new Date('20'+d[1],parseInt(d[2])-1,d[3],d[4],d[5],d[6]) } } tain.save(r ,function(e){ if(e) _err('db SIM save err: ' + e) }) // we will look for every sent sms if there is delivery report tain.ensureIndex({mid : 1} ,{sparse : true}) } } // init try { // third party modules better to try{} db_runs = false var mongo = require('mongojs'), db ,tain ,taout } catch (e) { cerr("[error] mongojs init: " + e) process.exit(1) } function db_run_check(){ if (!process.env.MONGODS) { _log("db: `process.env.MONGODS` is null, no db set") return } // mongodb-native or mongojs needs to redo connection on error db = mongo.connect(process.env.MONGODS + '/test') db.admin( function(aerr, a) { if(aerr){ _err("db.admin(): " + aerr) setTimeout(db_run_check ,4096) return } a.command({buildInfo: 1}, function(e, d) { if(e){ setTimeout(db_run_check,4096) _err('db.admin.command():' + e) return } app.db = "mongodb v" + d.documents[0]['version'] _log("telsms DB server: " + app.db + "@" + process.env.MONGODS + "\n") db_runs = _date() app_srv.listen(process.env.JSAPPJOBPORT, function(){ _log( "telsms Express server is listening on port " + process.env.JSAPPJOBPORT + " in " + app.settings.env + " mode\n"+ "controlling channel is http://127.0.0.1:" + process.env.JSAPPCTLPORT + "\n") app.os = process.platform + '@' + process.arch app.server = 'nodeJS v' + process.versions['node'] //setting up link with gsm app.gsm = 'connecting....' gsmtel_setup() } ) } ) }//cb admin ) }// once per app run, make init of global its parts ctl.listen(process.env.JSAPPCTLPORT, '127.0.0.1', db_run_check) })( require ,process ,console.log ,console.error ,eval ,setTimeout ,clearTimeout ,RegExp ,Math ,String) //olecom: telsms.js ends here
get module list and sent items dbtable (taout); read sms reports, update time in sent items according to these reports
enjsms/_app/telsms.js
get module list and sent items dbtable (taout); read sms reports, update time in sent items according to these reports
<ide><path>njsms/_app/telsms.js <del>/*====---- APP: self process management ----====*/ <add>/*====---- APP: self process management ----====*/ <ide> (function(require ,process ,log ,cerr ,eval ,setTimeout ,clearTimeout ,RegExp ,Math ,String) { <ide> var http = require('http'), net = require('net'), inspect = require('util').inspect <ide> ,ctl_runs = null, app_runs = null, db_runs = null <ide> with the XT55 (ME) via a receive and a transmit line. <ide> Since both lines are driven by independent devices collisions may (and will) happen, <ide> i.e. while the TE issues an AT command the XT55 starts sending an URC. This probably <del>will lead to the TE’s misinterpretation of the URC being part of the AT command’s <add>will lead to the TE’s misinterpretation of the URC being part of the AT command’s <ide> response. <ide> To avoid this conflict the following measures must be taken: <ide> = If an AT command is finished (with "OK" or "ERROR") the TE shall always wait at <ide> ,this.do_release = function(){ <ide> _gsm('gsm do release') <ide> ta._handle = ta.__handle ; TE_ME_mode = null// std handler && its mode <add> if(ta._appcb) ta._appcb = null// multimodule cmds can't clear this <ide> process.nextTick(_do_TELNET2MODULES_cmd_loop) <ide> } <ide> ,this._cmd_get = 'get' <ide> <ide> +CMGL: 0,"REC READ","+375297656850",,"08/12/01,10:16:24+08" <ide> 0414043E04310440043E04350020044304420440043E002100200421043E043B043D04350447043D <del>[…] <add>[…] <ide> +CMGL: 21,"REC READ",6,233,"+375298022483",145,"12/05/15,03:16:34+12","12/05/15,03:16:39+12",0 <ide> +CMGL: 23,"REC READ",6,252,"+375298022483",145,"12/05/24,08:29:48+12","12/05/24,08:29:54+12",0 <ide> +CMGL: 11,"REC UNREAD",6,229,"+375298022483",145,"12/05/15,03:00:37+12","12/05/15,03:00:42+12",0 <ide> } else {// tail sync + read exactly m "shall be records": /^[+]CMGL: / <ide> if((ta._sync_ok == l) && (ta._yes_next == gsmtel_runs)){ <ide> clearTimeout(ta._cmdTimeoutH) <del> db_read_mem((ta.rcvd_sms.join('\n') + '\n').split('+CMGL'), 'ALL') <add> db_read_gsm_mem((ta.rcvd_sms.join('\n') + '\n').split('+CMGL')) <ide> ta.rcvd_sms.splice(0) <ide> return ta._yes_next// obviously it will be OK or timeout <ide> } <ide> //167 = 1 day = 24 hours sms validity period:> 12+(167-143)*30/60 <ide> //0 = 0 (higher protocol) <ide> //8 = UCS2, 0 = GSM codepages <del> ,this._sms_smp_ucs2 = 'ate0+csmp=49,167,0,8' <del> ,this._sms_smp_asci = 'ate0+csmp=49,167,0,0' <add> ,this._sms_smp_ucs2 = 'ate0;+csmp=49,167,0,8' <add> ,this._sms_smp_asci = 'ate0;+csmp=49,167,0,0' <ide> ,this._timeoutSendSMS = 1024 << 2 <ide> ,this._smst = this._timeoutSendSMS <ide> ,this.sms2send = []//+CMGS: 152 at cmd got sms id <ide> NOTE: 'release' will end this cmdq <ide> */ <ide> <del>//!! ta.curm.cmdq.shift()// `sms` <del>//!! ta._cmd = ta.curm.cmdq[0] <ide> ta._cmd = ta.sms2send[0].atcmd <ide> gsmtel_runs = ta._cmd// setup timeout flag <ide> <ide> ta._sync_ok = /^>/ <ide> _gsm('sms: ' + ta._cmd) <ide> sock.write(ta._cmd+ta._cmdle) <del>//!! ta.sent_sms.push(ta._cmd)// next element will follow with error or id <ide> ta._handle = function(ta_lines_arr, samode){ <ide> var m ,i <ide> for (i in ta._async_handlers) {//async handlers <ide> if(gsmtel_runs == ta._cmd){ <ide> if(/ERROR/.test(ta_lines_arr[0])){ <ide> ta.sms2send[0].mid = ta_lines_arr[0] <del>//!! ta.sent_sms.push(ta_lines_arr[0]) <ide> return ta.do_smsTimeout(true) <ide> } <ide> gsmtel_runs = ta._smsle <ide> do { <ide> _gsm('smH i = ' + i + 'line: ' + ta_lines_arr[i] + 'ta._sync_ok: ' + ta._sync_ok) <ide> if(/ERROR/.test(ta_lines_arr[i])){ <del>//!! ta.sent_sms.push(ta_lines_arr[i]) <ide> ta.sms2send[0].mid = ta_lines_arr[i] <ide> return ta.do_smsTimeout(true) <ide> } <del> m = ta_lines_arr[i].match(/^[+]CMGS:(.*)$/)//ta._sync_ok <add> m = ta_lines_arr[i].match(/^[+]CMGS:(.*)$/) <ide> if(m){// id of sms + time <del>/* m = m[1] + ' ' + (new Date().toISOString()) <del>_gsm('smsH sms id: ' + m) <del> ta.sent_sms.push(m)*/ <ide> ta.sms2send[0].dateS = new Date() <ide> ta.sms2send[0].mid = parseInt(m[1]) <ide> } <ide> m = null <ide> _gsm('smH atok test i = ' + i + 'line: ' + ta_lines_arr[i]) <ide> if(ta._atok.test(ta_lines_arr[i])){// sms sent, goto next sms <del>//!! ta.curm.cmdq.shift()// sms body <del>//!! m = ta.curm.cmdq[0] <del>//!! if(ta._cmd_release != m){ <del>//_gsm('more sms ta.curm.cmdq[0]: ' + m) <del>// ta.sms2send.shift() <ide> _gsm('sent sms: ' + inspect(ta.sms2send[0])) <del> delete ta.sms2send[0].atcmd <add> delete ta.sms2send[0].atcmd// no need in tech info in db <add> ta.sms2send[0].module = ta.curm.ownum + ' ' + ta.curm.modid <ide> taout.save(ta.sms2send[0] ,function(e, rec){ <ide> if(e) { <ide> _err('db err save sent sms: ' + e) <ide> ta._cmd = ta.sms2send[0].atcmd <ide> sock.write(ta._cmd + ta._cmdle) <ide> gsmtel_runs = ta._cmd <del>//!! ta.sent_sms.push(ta._cmd) <del>//!! m = null <del>//!! ta.curm.cmdq.shift() <ide> return ta._hsync// next sms <ide> } <ide> clearTimeout(ta._cmdTimeoutH) <ide> ta._end_ch = /\]$/ // restore telnet from ATs <ide> ta._err = ta.__nop <ide> ta._handle = ta.releaseH <add> if(ta._appcb) ta._appcb = null <ide> ta._cmd_releaseTimeout = 0// allow schedule modules <ide> //process.nextTick(_do_TELNET2MODULES_cmd_loop) <ide> } <del> /*,release: function() { <del>//`release` gives back AT control on module, but any AT data is queued by module for next `get` <del>//`release` does not clear modules's cmd queue. only errors by timeout do this and call scheduler <del> ta._cmd = ta._cmd_release <del> gsmtel_runs = ta._cmd <del> if(ta._in_releaseTimeout){// pending `release` called directly <del> clearTimeout(ta._in_releaseTimeout) <del> ta._cmd_releaseTimeout = 0 <del> ta._in_releaseTimeout = null <del> } <del> if(ta._cmd_releaseTimeout > 0){ <del>_gsm("MV37X release timeout: " + ta._cmd_releaseTimeout) <del> ta._in_releaseTimeout = setTimeout(ta.do_release, ta._cmd_releaseTimeout) <del> } else ta.do_release() <del>//returns nothing <del> }*/ <ide> } <ide> <ide> var modring = 0 <ide> port: process.env.GSM_TELNET.replace(/[^:]*:/,'') <ide> ,fqdn: process.env.GSM_TELNET.replace(/:[^:]*/,'') <ide> } <del> ,gsmtel /*= net.connect(gsmtel_addr.port, gsmtel_addr.fqdn, gsmtel_ok) <del> <del>gsmtel.setTimeout(1024) //see NOTE below*/ <add> ,gsmtel <ide> <ide> function gsmtel_init() { <ide> modring = 0 <ide> taout = db.collection(tain+'_taout') <ide> tain = db.collection(tain+'_tain' ) <ide> tain.stats(function(e, stats){ <del> if(stats.count <= 0)// if db income is empty, fill it from SIM <add> if(stats.count <= 0){// if db income is empty, fill it from SIM <ide> for (var i in ta.modules) <ide> ta.qcmds(ta.sms_mem_setup ,ta.modules[i].modid) <add> db_runs = 'init' <add> } <ide> }) <ide> <ide> gsmtel.on('connect', function(){ <ide> else { <ide> k = 0 <ide> for(j in smsbods) { <del> m = { dateQ: new Date() ,dateS: null ,mid: null} <add> m = { num: smsnums[i] ,dateQ: new Date() ,dateS: null ,mid: null ,module: null } <ide> if(smsbods[j].ascii) { <ide> m.atcmd = ta._sms_smp_asci + ';+CMGS="'+smsnums[i]+'"' <ide> m.m = smsbody.substr(k, k + smsbods[j].count) <ide> <ide> if(ta.sms2send.length > 0){ <ide> _gsm('sms2send: ' + inspect(ta.sms2send)) <del> <ide> ta.qcmds([ ta._sms_setup, 'sms' ], module) <ide> process.nextTick(_do_TELNET2MODULES_cmd_loop) <ide> return { success: true ,msg: 'SMS `AT` executed'} <ide> } <ide> ) <ide> <add>app.get('/gsmemr.json', function (req, res) { <add>//ExtJS table load: USSD and SMS from DB: start=80&limit=20 <add> db_runs = '' <add> for (var i in ta.modules){ <add> ta.qcmds(ta.sms_mem_setup ,ta.modules[i].modid) <add> db_runs += '>' <add> } <add> db_runs += 'E' <add> process.nextTick(_do_TELNET2MODULES_cmd_loop) <add> res.json({ success: true }) <add> } <add>) <add> <ide> app.get('/tain.json', function (req, res) { <ide> //ExtJS table load: USSD and SMS from DB: start=80&limit=20 <ide> var r = {} <ide> } <ide> ) <ide> <add>app.get('/taout.json', function (req, res) { <add>//ExtJS table load: sent SMS <add> var r = {} <add> taout.find().sort({$natural: -1}) <add> .skip(parseInt(req.query.start)) <add> .limit(parseInt(req.query.limit), function(e, recout) { <add> r.data = recout <add> taout.stats(function(e, stats){ <add> r.total = stats.count <add> res.json(r) <add> }) <add> }) <add> } <add>) <add> <ide> app.get('/swhw_stat.json', function (req, res) { <ide> //ExtJS will load this once in a while into Ext Store for dataview <ide> var i, logs = [], gsms = [], errs = [] <ide> <ide> /*====---- APP: memory = data base ----====*/ <ide> <del>function db_read_mem(arr, mode){ <add>function db_read_gsm_mem(arr){ <ide> /* <ide> == SMS-STATUS-REPORT ==: <ide> 15: ": 21,"REC READ",6,233,"+375298022483",145,"12/05/15,03:16:34+12","12/05/15,03:16:39+12",0" <ide> smsas RE: ^^^^^^^^+++^ das RE: ^++++++++++++++++++++^^^++++++++++++++++++++^ <ide> == SMS-DELIVER ==: <del>14: ": 20,"REC READ","+375297253059",,"10/04/21,15:11:51+12"\n003700390033003100350031003904210430…." <add>14: ": 20,"REC READ","+375297253059",,"10/04/21,15:11:51+12"\n003700390033003100350031003904210430…." <ide> smsd RE: ^^^^^^^+++++++++++++^ ^++++++++++++++++++++^^ = dad <ide> */ <ide> var smss = /READ",6,([^,]+),/ <ide> ,dad = /"([^"]+)"\n([\s\S]+)\n$/ <ide> ,csms5 = /^050003/ <ide> ,csms6 = /^060804/ <add> ,reports = [] <ide> ,ucs2body <ide> ,m ,r ,i ,d <del> <del> if (mode != 'ALL'){ <del> // read unread sms status reports and check of mid <del> } <ide> <ide> arr.shift()// remove empty first `split` <ide> _gsm('sms arr len: ' + arr.length) <ide> r.p = new Date('20'+d[1],parseInt(d[2])-1,d[3],d[4],d[5],d[6]) <ide> d = m[2].match(gsmd)// dostavlen Date() <ide> r.d = new Date('20'+d[1],parseInt(d[2])-1,d[3],d[4],d[5],d[6]) <del> } <del> } <add> //if (status){// sms status reports and check of mid <add> //no deletes <add> reports.push(r) <add> //} <add> } <add> } <add> if(/^>/.test(db_runs)) <add> if (!/,"REC UNREAD",/.test(r.m)) <add> continue <add> <ide> tain.save(r ,function(e){ <del> if(e) _err('db SIM save err: ' + e) <add> if(e) _err('db GSM save err: ' + e) <ide> }) <del> // we will look for every sent sms if there is delivery report <del> tain.ensureIndex({mid : 1} ,{sparse : true}) <del> } <add> } <add> tain.ensureIndex({mid: 1} ,{sparse: true}) <add> db_runs = db_runs.replace(/>/,'')// for every module <add> if(reports.length > 0){ <add> for(i in reports){ <add> // arg is send by value, not by ref, thus update is safe in loop <add> fix_mid_date_update(reports[i]) <add> } <add> } <add>} <add> <add>function fix_mid_date_update(d){ <add>var dl ,dh = (15*60*1000)// 15 minutes <add> // local send date +- possible delta with GSM time, can be in config <add> dl = new Date(d.p.getTime() - dh) <add> dh = new Date(d.p.getTime() + dh) <add> taout.find({mid: d.mid, dateS: {$gt: dl ,$lt: dh }} ,function(e ,r){ <add> if(e) { <add> _err('db.taout mid err: ' + e) <add> return <add> } <add> if (r.length == 1){ <add>_log('4updater found: ' + inspect(r)) <add> //real i.e. GSM send and receive time <add> taout.update({ _id: r[0]._id }, { <add> $set: { mid: -d.mid, dateS: d.p, dateR: d.d } <add> }) <add> } <add> }) <ide> } <ide> <ide> // init <ide> setTimeout(db_run_check ,4096) <ide> return <ide> } <del> a.command({buildInfo: 1}, function(e, d) { <add> a.command({buildInfo: 1}, function(e ,d) { <ide> if(e){ <del> setTimeout(db_run_check,4096) <add> setTimeout(db_run_check ,4096) <ide> _err('db.admin.command():' + e) <ide> return <ide> }
Java
apache-2.0
fecfb098b3e67183ebefd548b0b441bddd83ce5f
0
neo4j-contrib/neo4j-apoc-procedures,neo4j-contrib/neo4j-apoc-procedures,neo4j-contrib/neo4j-apoc-procedures,neo4j-contrib/neo4j-apoc-procedures
package apoc.custom; import apoc.ApocConfig; import apoc.SystemLabels; import apoc.SystemPropertyKeys; import apoc.util.JsonUtil; import apoc.util.Util; import org.neo4j.collection.RawIterator; import org.neo4j.function.ThrowingFunction; import org.neo4j.graphdb.Entity; import org.neo4j.graphdb.GraphDatabaseService; import org.neo4j.graphdb.Node; import org.neo4j.graphdb.Path; import org.neo4j.graphdb.QueryExecutionException; import org.neo4j.graphdb.Result; import org.neo4j.graphdb.Transaction; import org.neo4j.internal.helpers.collection.Iterators; import org.neo4j.internal.helpers.collection.Pair; import org.neo4j.internal.kernel.api.exceptions.ProcedureException; import org.neo4j.internal.kernel.api.procs.DefaultParameterValue; import org.neo4j.internal.kernel.api.procs.FieldSignature; import org.neo4j.internal.kernel.api.procs.Neo4jTypes; import org.neo4j.internal.kernel.api.procs.ProcedureSignature; import org.neo4j.internal.kernel.api.procs.QualifiedName; import org.neo4j.internal.kernel.api.procs.UserFunctionSignature; import org.neo4j.kernel.api.ResourceTracker; import org.neo4j.kernel.api.procedure.CallableProcedure; import org.neo4j.kernel.api.procedure.CallableUserFunction; import org.neo4j.kernel.api.procedure.Context; import org.neo4j.kernel.api.procedure.GlobalProcedures; import org.neo4j.kernel.availability.AvailabilityListener; import org.neo4j.kernel.impl.util.ValueUtils; import org.neo4j.kernel.internal.GraphDatabaseAPI; import org.neo4j.kernel.lifecycle.LifecycleAdapter; import org.neo4j.logging.Log; import org.neo4j.procedure.Mode; import org.neo4j.procedure.Name; import org.neo4j.procedure.impl.GlobalProceduresRegistry; import org.neo4j.scheduler.Group; import org.neo4j.scheduler.JobHandle; import org.neo4j.scheduler.JobScheduler; import org.neo4j.values.AnyValue; import org.neo4j.values.ValueMapper; import org.neo4j.values.storable.Values; import org.neo4j.values.virtual.MapValueBuilder; import org.neo4j.values.virtual.VirtualValues; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; import static apoc.ApocConfig.apocConfig; import static java.util.Collections.emptySet; import static java.util.Collections.singletonList; import static org.neo4j.internal.helpers.collection.MapUtil.map; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.AnyType; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTAny; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTBoolean; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTDate; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTDateTime; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTDuration; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTFloat; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTGeometry; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTInteger; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTList; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTLocalDateTime; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTLocalTime; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTMap; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTNode; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTNumber; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTPath; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTPoint; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTRelationship; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTString; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTTime; public class CypherProceduresHandler extends LifecycleAdapter implements AvailabilityListener { public static final String PREFIX = "custom"; public static final String FUNCTION = "function"; public static final String PROCEDURE = "procedure"; public static final String CUSTOM_PROCEDURES_REFRESH = "apoc.custom.procedures.refresh"; public static final List<FieldSignature> DEFAULT_INPUTS = singletonList(FieldSignature.inputField("params", NTMap, DefaultParameterValue.ntMap(Collections.emptyMap()))); public static final List<FieldSignature> DEFAULT_MAP_OUTPUT = singletonList(FieldSignature.inputField("row", NTMap)); private final GraphDatabaseAPI api; private final Log log; private final GraphDatabaseService systemDb; private final GlobalProcedures globalProceduresRegistry; private final JobScheduler jobScheduler; private long lastUpdate; private final ThrowingFunction<Context, Transaction, ProcedureException> transactionComponentFunction; private Set<ProcedureSignature> registeredProcedureSignatures = emptySet(); private Set<UserFunctionSignature> registeredUserFunctionSignatures = emptySet(); private static Group REFRESH_GROUP = Group.STORAGE_MAINTENANCE; private JobHandle restoreProceduresHandle; public CypherProceduresHandler(GraphDatabaseAPI db, JobScheduler jobScheduler, ApocConfig apocConfig, Log userLog, GlobalProcedures globalProceduresRegistry) { this.api = db; this.log = userLog; this.jobScheduler = jobScheduler; this.systemDb = apocConfig.getSystemDb(); this.globalProceduresRegistry = globalProceduresRegistry; transactionComponentFunction = globalProceduresRegistry.lookupComponentProvider(Transaction.class, true); } @Override public void available() { restoreProceduresAndFunctions(); long refreshInterval = apocConfig().getInt(CUSTOM_PROCEDURES_REFRESH, 60000); restoreProceduresHandle = jobScheduler.scheduleRecurring(REFRESH_GROUP, () -> { if (getLastUpdate() > lastUpdate) { restoreProceduresAndFunctions(); } }, refreshInterval, refreshInterval, TimeUnit.MILLISECONDS); } @Override public void unavailable() { if (restoreProceduresHandle != null) { restoreProceduresHandle.cancel(); } } public Mode mode(String s) { return s == null ? Mode.READ : Mode.valueOf(s.toUpperCase()); } public Stream<ProcedureOrFunctionDescriptor> readSignatures() { List<ProcedureOrFunctionDescriptor> descriptors; try (Transaction tx = systemDb.beginTx()) { descriptors = tx.findNodes(SystemLabels.ApocCypherProcedures, SystemPropertyKeys.database.name(), api.databaseName()).stream().map(node -> { if (node.hasLabel(SystemLabels.Procedure)) { return procedureDescriptor(node); } else if (node.hasLabel(SystemLabels.Function)) { return userFunctionDescriptor(node); } else { throw new IllegalStateException("don't know what to do with systemdb node " + node); } }).collect(Collectors.toList()); tx.commit(); } return descriptors.stream(); } private ProcedureDescriptor procedureDescriptor(Node node) { String statement = (String) node.getProperty(SystemPropertyKeys.statement.name()); String name = (String) node.getProperty(SystemPropertyKeys.name.name()); String description = (String) node.getProperty(SystemPropertyKeys.description.name(), null); String property = (String) node.getProperty(SystemPropertyKeys.inputs.name()); List<FieldSignature> inputs = deserializeSignatures(property); List<FieldSignature> outputSignature = deserializeSignatures((String) node.getProperty(SystemPropertyKeys.outputs.name())); return new ProcedureDescriptor(Signatures.createProcedureSignature( new QualifiedName(new String[]{PREFIX}, name), inputs, outputSignature, Mode.valueOf((String) node.getProperty(SystemPropertyKeys.mode.name())), false, null, new String[0], description, null, false, false, false, false ), statement); } private UserFunctionDescriptor userFunctionDescriptor(Node node) { String statement = (String) node.getProperty(SystemPropertyKeys.statement.name()); String name = (String) node.getProperty(SystemPropertyKeys.name.name()); String description = (String) node.getProperty(SystemPropertyKeys.description.name(), null); String property = (String) node.getProperty(SystemPropertyKeys.inputs.name()); List<FieldSignature> inputs = deserializeSignatures(property); boolean forceSingle = (boolean) node.getProperty(SystemPropertyKeys.forceSingle.name(), false); return new UserFunctionDescriptor(new UserFunctionSignature( new QualifiedName(new String[]{PREFIX}, name), inputs, typeof((String) node.getProperty(SystemPropertyKeys.output.name())), null, new String[0], description, "apoc.custom", false ), statement, forceSingle); } public void restoreProceduresAndFunctions() { lastUpdate = System.currentTimeMillis(); Set<ProcedureSignature> currentProcedureSignatures = Collections.synchronizedSet(new HashSet<>()); Set<UserFunctionSignature> currentUserFunctionSignatures = Collections.synchronizedSet(new HashSet<>()); readSignatures().forEach(descriptor -> { descriptor.register(); if (descriptor instanceof ProcedureDescriptor) { ProcedureSignature signature = ((ProcedureDescriptor) descriptor).getSignature(); currentProcedureSignatures.add(signature); registeredProcedureSignatures.remove(signature); } else { UserFunctionSignature signature = ((UserFunctionDescriptor) descriptor).getSignature(); currentUserFunctionSignatures.add(signature); registeredUserFunctionSignatures.remove(signature); } }); // de-register removed procs/functions registeredProcedureSignatures.forEach(signature -> registerProcedure(signature, null)); registeredUserFunctionSignatures.forEach(signature -> registerFunction(signature, null, false)); registeredProcedureSignatures = currentProcedureSignatures; registeredUserFunctionSignatures = currentUserFunctionSignatures; api.executeTransactionally("call db.clearQueryCaches()"); } private <T> T withSystemDb(Function<Transaction, T> action) { try (Transaction tx = systemDb.beginTx()) { T result = action.apply(tx); tx.commit(); return result; } } public void storeFunction(UserFunctionSignature signature, String statement, boolean forceSingle) { withSystemDb(tx -> { Node node = Util.mergeNode(tx, SystemLabels.ApocCypherProcedures, SystemLabels.Function, Pair.of(SystemPropertyKeys.database.name(), api.databaseName()), Pair.of(SystemPropertyKeys.name.name(), signature.name().name()) ); node.setProperty(SystemPropertyKeys.description.name(), signature.description().orElse(null)); node.setProperty(SystemPropertyKeys.statement.name(), statement); node.setProperty(SystemPropertyKeys.inputs.name(), serializeSignatures(signature.inputSignature())); node.setProperty(SystemPropertyKeys.output.name(), signature.outputType().toString()); node.setProperty(SystemPropertyKeys.forceSingle.name(), forceSingle); setLastUpdate(tx); registerFunction(signature, statement, forceSingle); return null; }); } public void storeProcedure(ProcedureSignature signature, String statement) { withSystemDb(tx -> { Node node = Util.mergeNode(tx, SystemLabels.ApocCypherProcedures, SystemLabels.Procedure, Pair.of(SystemPropertyKeys.database.name(), api.databaseName()), Pair.of(SystemPropertyKeys.name.name(), signature.name().name()) ); node.setProperty(SystemPropertyKeys.description.name(), signature.description().orElse(null)); node.setProperty(SystemPropertyKeys.statement.name(), statement); node.setProperty(SystemPropertyKeys.inputs.name(), serializeSignatures(signature.inputSignature())); node.setProperty(SystemPropertyKeys.outputs.name(), serializeSignatures(signature.outputSignature())); node.setProperty(SystemPropertyKeys.mode.name(), signature.mode().name()); setLastUpdate(tx); registerProcedure(signature, statement); return null; }); } private String serializeSignatures(List<FieldSignature> signatures) { List<Map<String, Object>> mapped = signatures.stream().map(fs -> map( "name", fs.name(), "type", fs.neo4jType().toString(), "default", fs.defaultValue().orElse(DefaultParameterValue.nullValue(new Neo4jTypes.AnyType())).value() )).collect(Collectors.toList()); return Util.toJson(mapped); } private List<FieldSignature> deserializeSignatures(String s) { List<Map<String, Object>> mapped = Util.fromJson(s, List.class); return mapped.stream().map(map -> { String typeString = (String) map.get("type"); if (typeString.endsWith("?")) { typeString = typeString.substring(0, typeString.length() - 1); } AnyType type = typeof(typeString); Object deflt = map.get("default"); if (deflt == null) { return FieldSignature.inputField((String) map.get("name"), type); } else { return FieldSignature.inputField((String) map.get("name"), type, new DefaultParameterValue(deflt, type)); } }).collect(Collectors.toList()); } private void setLastUpdate(Transaction tx) { Node node = tx.findNode(SystemLabels.ApocCypherProceduresMeta, SystemPropertyKeys.database.name(), api.databaseName()); if (node == null) { node = tx.createNode(SystemLabels.ApocCypherProceduresMeta); node.setProperty(SystemPropertyKeys.database.name(), api.databaseName()); } node.setProperty(SystemPropertyKeys.lastUpdated.name(), System.currentTimeMillis()); } private long getLastUpdate() { return withSystemDb( tx -> { Node node = tx.findNode(SystemLabels.ApocCypherProceduresMeta, SystemPropertyKeys.database.name(), api.databaseName()); return node == null ? 0L : (long) node.getProperty(SystemPropertyKeys.lastUpdated.name()); }); } public ProcedureSignature procedureSignature(String name, String mode, List<List<String>> outputs, List<List<String>> inputs, String description) { boolean admin = false; // TODO return new ProcedureSignature(qualifiedName(name), inputSignatures(inputs), outputSignatures(outputs), Mode.valueOf(mode.toUpperCase()), admin, null, new String[0], description, null, false, false, true, false, false ); } public UserFunctionSignature functionSignature(String name, String output, List<List<String>> inputs, String description) { AnyType outType = typeof(output.isEmpty() ? "LIST OF MAP" : output); return new UserFunctionSignature(qualifiedName(name), inputSignatures(inputs), outType, null, new String[0], description, "apoc.custom",false); } /** * * @param signature * @param statement null indicates a removed procedure * @return */ public boolean registerProcedure(ProcedureSignature signature, String statement) { try { globalProceduresRegistry.register(new CallableProcedure.BasicProcedure(signature) { @Override public RawIterator<AnyValue[], ProcedureException> apply(org.neo4j.kernel.api.procedure.Context ctx, AnyValue[] input, ResourceTracker resourceTracker) throws ProcedureException { if (statement == null) { final String error = String.format("There is no procedure with the name `%s` registered for this database instance. " + "Please ensure you've spelled the procedure name correctly and that the procedure is properly deployed.", signature.name()); throw new QueryExecutionException(error, null, "Neo.ClientError.Statement.SyntaxError"); } else { Map<String, Object> params = params(input, signature.inputSignature(), ctx.valueMapper()); Transaction tx = transactionComponentFunction.apply(ctx); Result result = tx.execute(statement, params); resourceTracker.registerCloseableResource(result); List<FieldSignature> outputs = signature.outputSignature(); String[] names = outputs == null ? null : outputs.stream().map(FieldSignature::name).toArray(String[]::new); boolean defaultOutputs = outputs == null || outputs.equals(DEFAULT_MAP_OUTPUT); Stream<AnyValue[]> stream = result.stream().map(row -> toResult(row, names, defaultOutputs)); return Iterators.asRawIterator(stream); } } }, true); registeredProcedureSignatures.add(signature); return true; } catch (Exception e) { log.error("Could not register procedure: " + signature.name() + " with " + statement + "\n accepting" + signature.inputSignature() + " resulting in " + signature.outputSignature() + " mode " + signature.mode(), e); return false; } } public boolean registerFunction(UserFunctionSignature signature, String statement, boolean forceSingle) { try { globalProceduresRegistry.register(new CallableUserFunction.BasicUserFunction(signature) { @Override public AnyValue apply(org.neo4j.kernel.api.procedure.Context ctx, AnyValue[] input) throws ProcedureException { if (statement == null) { final String error = String.format("Unknown function '%s'", signature.name()); throw new QueryExecutionException(error, null, "Neo.ClientError.Statement.SyntaxError"); } else { Map<String, Object> params = params(input, signature.inputSignature(), ctx.valueMapper()); AnyType outType = signature.outputType(); Transaction tx = transactionComponentFunction.apply(ctx); try (Result result = tx.execute(statement, params)) { // resourceTracker.registerCloseableResource(result); // TODO if (!result.hasNext()) return null; if (outType.equals(NTAny)) { return ValueUtils.of(result.stream().collect(Collectors.toList())); } List<String> cols = result.columns(); if (cols.isEmpty()) return null; if (!forceSingle && outType instanceof Neo4jTypes.ListType) { Neo4jTypes.ListType listType = (Neo4jTypes.ListType) outType; Neo4jTypes.AnyType innerType = listType.innerType(); if (innerType instanceof Neo4jTypes.MapType) return ValueUtils.of(result.stream().collect(Collectors.toList())); if (cols.size() == 1) return ValueUtils.of(result.stream().map(row -> row.get(cols.get(0))).collect(Collectors.toList())); } else { Map<String, Object> row = result.next(); if (outType instanceof Neo4jTypes.MapType) return ValueUtils.of(row); if (cols.size() == 1) return ValueUtils.of(row.get(cols.get(0))); } throw new IllegalStateException("Result mismatch " + cols + " output type is " + outType); } } } }, true); registeredUserFunctionSignatures.add(signature); return true; } catch (Exception e) { log.error("Could not register function: " + signature + "\nwith: " + statement + "\n single result " + forceSingle, e); return false; } } public static QualifiedName qualifiedName(@Name("name") String name) { String[] names = name.split("\\."); List<String> namespace = new ArrayList<>(names.length); namespace.add(PREFIX); namespace.addAll(Arrays.asList(names)); return new QualifiedName(namespace.subList(0, namespace.size() - 1), names[names.length - 1]); } public List<FieldSignature> inputSignatures(@Name(value = "inputs", defaultValue = "null") List<List<String>> inputs) { List<FieldSignature> inputSignature = inputs == null ? singletonList(FieldSignature.inputField("params", NTMap, DefaultParameterValue.ntMap(Collections.emptyMap()))) : inputs.stream().map(pair -> { DefaultParameterValue defaultValue = defaultValue(pair.get(1), pair.size() > 2 ? pair.get(2) : null); return defaultValue == null ? FieldSignature.inputField(pair.get(0), typeof(pair.get(1))) : FieldSignature.inputField(pair.get(0), typeof(pair.get(1)), defaultValue); }).collect(Collectors.toList()); return inputSignature; } public List<FieldSignature> outputSignatures(@Name(value = "outputs", defaultValue = "null") List<List<String>> outputs) { return outputs == null ? singletonList(FieldSignature.inputField("row", NTMap)) : outputs.stream().map(pair -> FieldSignature.outputField(pair.get(0), typeof(pair.get(1)))).collect(Collectors.toList()); } private Neo4jTypes.AnyType typeof(String typeName) { typeName = typeName.replaceAll("\\?", ""); typeName = typeName.toUpperCase(); if (typeName.startsWith("LIST OF ")) return NTList(typeof(typeName.substring(8))); if (typeName.startsWith("LIST ")) return NTList(typeof(typeName.substring(5))); switch (typeName) { case "ANY": return NTAny; case "MAP": return NTMap; case "NODE": return NTNode; case "REL": return NTRelationship; case "RELATIONSHIP": return NTRelationship; case "EDGE": return NTRelationship; case "PATH": return NTPath; case "NUMBER": return NTNumber; case "LONG": return NTInteger; case "INT": return NTInteger; case "INTEGER": return NTInteger; case "FLOAT": return NTFloat; case "DOUBLE": return NTFloat; case "BOOL": return NTBoolean; case "BOOLEAN": return NTBoolean; case "DATE": return NTDate; case "TIME": return NTTime; case "LOCALTIME": return NTLocalTime; case "DATETIME": return NTDateTime; case "LOCALDATETIME": return NTLocalDateTime; case "DURATION": return NTDuration; case "POINT": return NTPoint; case "GEO": return NTGeometry; case "GEOMETRY": return NTGeometry; case "STRING": return NTString; case "TEXT": return NTString; default: return NTString; } } private DefaultParameterValue defaultValue(String typeName, String stringValue) { if (stringValue == null) return null; Object value = JsonUtil.parse(stringValue, null, Object.class); if (value == null) return null; typeName = typeName.toUpperCase(); if (typeName.startsWith("LIST ")) return DefaultParameterValue.ntList((List<?>) value, typeof(typeName.substring(5))); switch (typeName) { case "MAP": return DefaultParameterValue.ntMap((Map<String, Object>) value); case "NODE": case "REL": case "RELATIONSHIP": case "EDGE": case "PATH": return null; case "NUMBER": return value instanceof Float || value instanceof Double ? DefaultParameterValue.ntFloat(((Number) value).doubleValue()) : DefaultParameterValue.ntInteger(((Number) value).longValue()); case "LONG": case "INT": case "INTEGER": return DefaultParameterValue.ntInteger(((Number) value).longValue()); case "FLOAT": case "DOUBLE": return DefaultParameterValue.ntFloat(((Number) value).doubleValue()); case "BOOL": case "BOOLEAN": return DefaultParameterValue.ntBoolean((Boolean) value); case "DATE": case "TIME": case "LOCALTIME": case "DATETIME": case "LOCALDATETIME": case "DURATION": case "POINT": case "GEO": case "GEOMETRY": return null; case "STRING": case "TEXT": return DefaultParameterValue.ntString(value.toString()); default: return null; } } private AnyValue[] toResult(Map<String, Object> row, String[] names, boolean defaultOutputs) { if (defaultOutputs) { return new AnyValue[]{convertToValueRecursive(row)}; } else { AnyValue[] result = new AnyValue[names.length]; for (int i = 0; i < names.length; i++) { result[i] = convertToValueRecursive(row.get(names[i])); } return result; } } private AnyValue convertToValueRecursive(Object... toConverts) { switch (toConverts.length) { case 0: return Values.NO_VALUE; case 1: Object toConvert = toConverts[0]; if (toConvert instanceof List) { List list = (List) toConvert; AnyValue[] objects = ((Stream<AnyValue>) list.stream().map(x -> convertToValueRecursive(x))).toArray(AnyValue[]::new); return VirtualValues.list(objects); } else if (toConvert instanceof Map) { Map<String, Object> map = (Map) toConvert; MapValueBuilder builder = new MapValueBuilder(map.size()); map.entrySet().stream().forEach(e -> { builder.add(e.getKey(), convertToValueRecursive(e.getValue())); }); return builder.build(); } else if (toConvert instanceof Entity || toConvert instanceof Path){ return ValueUtils.asAnyValue(toConvert); } else { return Values.of(toConvert); } default: AnyValue[] values = Arrays.stream(toConverts).map(c -> convertToValueRecursive(c)).toArray(AnyValue[]::new); return VirtualValues.list(values); } } public Map<String, Object> params(AnyValue[] input, List<FieldSignature> fieldSignatures, ValueMapper valueMapper) { if (input == null || input.length == 0) return Collections.emptyMap(); if (fieldSignatures == null || fieldSignatures.isEmpty() || fieldSignatures.equals(DEFAULT_INPUTS)) return (Map<String, Object>) input[0].map(valueMapper); Map<String, Object> params = new HashMap<>(input.length); for (int i = 0; i < input.length; i++) { params.put(fieldSignatures.get(i).name(), input[i].map(valueMapper)); } return params; } public void removeProcedure(String name) { withSystemDb(tx -> { Node node = Iterators.single(tx.findNodes(SystemLabels.ApocCypherProcedures, SystemPropertyKeys.database.name(), api.databaseName(), SystemPropertyKeys.name.name(), name ).stream().filter(n -> n.hasLabel(SystemLabels.Procedure)).iterator()); ProcedureDescriptor descriptor = procedureDescriptor(node); registerProcedure(descriptor.getSignature(), null); registeredProcedureSignatures.remove(descriptor.getSignature()); node.delete(); setLastUpdate(tx); return null; }); } public void removeFunction(String name) { withSystemDb(tx -> { Node node = Iterators.single(tx.findNodes(SystemLabels.ApocCypherProcedures, SystemPropertyKeys.database.name(), api.databaseName(), SystemPropertyKeys.name.name(), name ).stream().filter(n -> n.hasLabel(SystemLabels.Function)).iterator()); UserFunctionDescriptor descriptor = userFunctionDescriptor(node); registerFunction(descriptor.getSignature(), null, false); registeredUserFunctionSignatures.remove(descriptor.getSignature()); node.delete(); setLastUpdate(tx); return null; }); } public abstract class ProcedureOrFunctionDescriptor { private final String statement; protected ProcedureOrFunctionDescriptor(String statement) { this.statement = statement; } public String getStatement() { return statement; } abstract public void register(); } public class ProcedureDescriptor extends ProcedureOrFunctionDescriptor { private final ProcedureSignature signature; public ProcedureDescriptor(ProcedureSignature signature, String statement) { super(statement); this.signature = signature; } public ProcedureSignature getSignature() { return signature; } @Override public void register() { registerProcedure(getSignature(), getStatement()); } } public class UserFunctionDescriptor extends ProcedureOrFunctionDescriptor { private final UserFunctionSignature signature; private final boolean forceSingle; public UserFunctionDescriptor(UserFunctionSignature signature, String statement, boolean forceSingle) { super(statement); this.signature = signature; this.forceSingle = forceSingle; } public UserFunctionSignature getSignature() { return signature; } public boolean isForceSingle() { return forceSingle; } @Override public void register() { registerFunction(getSignature(), getStatement(), isForceSingle()); } } }
full/src/main/java/apoc/custom/CypherProceduresHandler.java
package apoc.custom; import apoc.ApocConfig; import apoc.SystemLabels; import apoc.SystemPropertyKeys; import apoc.util.JsonUtil; import apoc.util.Util; import org.neo4j.collection.RawIterator; import org.neo4j.function.ThrowingFunction; import org.neo4j.graphdb.Entity; import org.neo4j.graphdb.GraphDatabaseService; import org.neo4j.graphdb.Node; import org.neo4j.graphdb.Path; import org.neo4j.graphdb.QueryExecutionException; import org.neo4j.graphdb.Result; import org.neo4j.graphdb.Transaction; import org.neo4j.internal.helpers.collection.Iterators; import org.neo4j.internal.helpers.collection.Pair; import org.neo4j.internal.kernel.api.exceptions.ProcedureException; import org.neo4j.internal.kernel.api.procs.DefaultParameterValue; import org.neo4j.internal.kernel.api.procs.FieldSignature; import org.neo4j.internal.kernel.api.procs.Neo4jTypes; import org.neo4j.internal.kernel.api.procs.ProcedureSignature; import org.neo4j.internal.kernel.api.procs.QualifiedName; import org.neo4j.internal.kernel.api.procs.UserFunctionSignature; import org.neo4j.kernel.api.ResourceTracker; import org.neo4j.kernel.api.procedure.CallableProcedure; import org.neo4j.kernel.api.procedure.CallableUserFunction; import org.neo4j.kernel.api.procedure.Context; import org.neo4j.kernel.api.procedure.GlobalProcedures; import org.neo4j.kernel.availability.AvailabilityListener; import org.neo4j.kernel.impl.util.ValueUtils; import org.neo4j.kernel.internal.GraphDatabaseAPI; import org.neo4j.kernel.lifecycle.LifecycleAdapter; import org.neo4j.logging.Log; import org.neo4j.procedure.Mode; import org.neo4j.procedure.Name; import org.neo4j.procedure.impl.GlobalProceduresRegistry; import org.neo4j.scheduler.Group; import org.neo4j.scheduler.JobHandle; import org.neo4j.scheduler.JobScheduler; import org.neo4j.values.AnyValue; import org.neo4j.values.ValueMapper; import org.neo4j.values.storable.Values; import org.neo4j.values.virtual.MapValueBuilder; import org.neo4j.values.virtual.VirtualValues; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; import static apoc.ApocConfig.apocConfig; import static java.util.Collections.emptySet; import static java.util.Collections.singletonList; import static org.neo4j.internal.helpers.collection.MapUtil.map; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.AnyType; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTAny; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTBoolean; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTDate; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTDateTime; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTDuration; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTFloat; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTGeometry; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTInteger; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTList; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTLocalDateTime; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTLocalTime; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTMap; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTNode; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTNumber; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTPath; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTPoint; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTRelationship; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTString; import static org.neo4j.internal.kernel.api.procs.Neo4jTypes.NTTime; public class CypherProceduresHandler extends LifecycleAdapter implements AvailabilityListener { public static final String PREFIX = "custom"; public static final String FUNCTION = "function"; public static final String PROCEDURE = "procedure"; public static final String CUSTOM_PROCEDURES_REFRESH = "apoc.custom.procedures.refresh"; public static final List<FieldSignature> DEFAULT_INPUTS = singletonList(FieldSignature.inputField("params", NTMap, DefaultParameterValue.ntMap(Collections.emptyMap()))); public static final List<FieldSignature> DEFAULT_MAP_OUTPUT = singletonList(FieldSignature.inputField("row", NTMap)); private final GraphDatabaseAPI api; private final Log log; private final GraphDatabaseService systemDb; private final GlobalProcedures globalProceduresRegistry; private final JobScheduler jobScheduler; private long lastUpdate; private final ThrowingFunction<Context, Transaction, ProcedureException> transactionComponentFunction; private Set<ProcedureSignature> registeredProcedureSignatures = emptySet(); private Set<UserFunctionSignature> registeredUserFunctionSignatures = emptySet(); private static Group REFRESH_GROUP = Group.STORAGE_MAINTENANCE; private JobHandle restoreProceduresHandle; public CypherProceduresHandler(GraphDatabaseAPI db, JobScheduler jobScheduler, ApocConfig apocConfig, Log userLog, GlobalProcedures globalProceduresRegistry) { this.api = db; this.log = userLog; this.jobScheduler = jobScheduler; this.systemDb = apocConfig.getSystemDb(); this.globalProceduresRegistry = globalProceduresRegistry; transactionComponentFunction = globalProceduresRegistry.lookupComponentProvider(Transaction.class, true); } @Override public void available() { restoreProceduresAndFunctions(); long refreshInterval = apocConfig().getInt(CUSTOM_PROCEDURES_REFRESH, 60000); restoreProceduresHandle = jobScheduler.scheduleRecurring(REFRESH_GROUP, () -> { if (getLastUpdate() > lastUpdate) { restoreProceduresAndFunctions(); } }, refreshInterval, refreshInterval, TimeUnit.MILLISECONDS); } @Override public void unavailable() { if (restoreProceduresHandle != null) { restoreProceduresHandle.cancel(); } } public Mode mode(String s) { return s == null ? Mode.READ : Mode.valueOf(s.toUpperCase()); } public Stream<ProcedureOrFunctionDescriptor> readSignatures() { List<ProcedureOrFunctionDescriptor> descriptors; try (Transaction tx = systemDb.beginTx()) { descriptors = tx.findNodes(SystemLabels.ApocCypherProcedures, SystemPropertyKeys.database.name(), api.databaseName()).stream().map(node -> { if (node.hasLabel(SystemLabels.Procedure)) { return procedureDescriptor(node); } else if (node.hasLabel(SystemLabels.Function)) { return userFunctionDescriptor(node); } else { throw new IllegalStateException("don't know what to do with systemdb node " + node); } }).collect(Collectors.toList()); tx.commit(); } return descriptors.stream(); } private ProcedureDescriptor procedureDescriptor(Node node) { String statement = (String) node.getProperty(SystemPropertyKeys.statement.name()); String name = (String) node.getProperty(SystemPropertyKeys.name.name()); String description = (String) node.getProperty(SystemPropertyKeys.description.name(), null); String property = (String) node.getProperty(SystemPropertyKeys.inputs.name()); List<FieldSignature> inputs = deserializeSignatures(property); List<FieldSignature> outputSignature = deserializeSignatures((String) node.getProperty(SystemPropertyKeys.outputs.name())); return new ProcedureDescriptor(Signatures.createProcedureSignature( new QualifiedName(new String[]{PREFIX}, name), inputs, outputSignature, Mode.valueOf((String) node.getProperty(SystemPropertyKeys.mode.name())), false, null, new String[0], description, null, false, false, false, false ), statement); } private UserFunctionDescriptor userFunctionDescriptor(Node node) { String statement = (String) node.getProperty(SystemPropertyKeys.statement.name()); String name = (String) node.getProperty(SystemPropertyKeys.name.name()); String description = (String) node.getProperty(SystemPropertyKeys.description.name(), null); String property = (String) node.getProperty(SystemPropertyKeys.inputs.name()); List<FieldSignature> inputs = deserializeSignatures(property); boolean forceSingle = (boolean) node.getProperty(SystemPropertyKeys.forceSingle.name(), false); return new UserFunctionDescriptor(new UserFunctionSignature( new QualifiedName(new String[]{PREFIX}, name), inputs, typeof((String) node.getProperty(SystemPropertyKeys.output.name())), null, new String[0], description, "apoc.custom", false ), statement, forceSingle); } public void restoreProceduresAndFunctions() { lastUpdate = System.currentTimeMillis(); Set<ProcedureSignature> currentProcedureSignatures = Collections.synchronizedSet(new HashSet<>()); Set<UserFunctionSignature> currentUserFunctionSignatures = Collections.synchronizedSet(new HashSet<>()); readSignatures().forEach(descriptor -> { descriptor.register(); if (descriptor instanceof ProcedureDescriptor) { ProcedureSignature signature = ((ProcedureDescriptor) descriptor).getSignature(); currentProcedureSignatures.add(signature); registeredProcedureSignatures.remove(signature); } else { UserFunctionSignature signature = ((UserFunctionDescriptor) descriptor).getSignature(); currentUserFunctionSignatures.add(signature); registeredUserFunctionSignatures.remove(signature); } }); // de-register removed procs/functions registeredProcedureSignatures.forEach(signature -> registerProcedure(signature, null)); registeredUserFunctionSignatures.forEach(signature -> registerFunction(signature, null, false)); registeredProcedureSignatures = currentProcedureSignatures; registeredUserFunctionSignatures = currentUserFunctionSignatures; api.executeTransactionally("call db.clearQueryCaches()"); } private <T> T withSystemDb(Function<Transaction, T> action) { try (Transaction tx = systemDb.beginTx()) { T result = action.apply(tx); tx.commit(); return result; } } public void storeFunction(UserFunctionSignature signature, String statement, boolean forceSingle) { withSystemDb(tx -> { Node node = Util.mergeNode(tx, SystemLabels.ApocCypherProcedures, SystemLabels.Function, Pair.of(SystemPropertyKeys.database.name(), api.databaseName()), Pair.of(SystemPropertyKeys.name.name(), signature.name().name()) ); node.setProperty(SystemPropertyKeys.description.name(), signature.description().orElse(null)); node.setProperty(SystemPropertyKeys.statement.name(), statement); node.setProperty(SystemPropertyKeys.inputs.name(), serializeSignatures(signature.inputSignature())); node.setProperty(SystemPropertyKeys.output.name(), signature.outputType().toString()); node.setProperty(SystemPropertyKeys.forceSingle.name(), forceSingle); setLastUpdate(tx); registerFunction(signature, statement, forceSingle); return null; }); } public void storeProcedure(ProcedureSignature signature, String statement) { withSystemDb(tx -> { Node node = Util.mergeNode(tx, SystemLabels.ApocCypherProcedures, SystemLabels.Procedure, Pair.of(SystemPropertyKeys.database.name(), api.databaseName()), Pair.of(SystemPropertyKeys.name.name(), signature.name().name()) ); node.setProperty(SystemPropertyKeys.description.name(), signature.description().orElse(null)); node.setProperty(SystemPropertyKeys.statement.name(), statement); node.setProperty(SystemPropertyKeys.inputs.name(), serializeSignatures(signature.inputSignature())); node.setProperty(SystemPropertyKeys.outputs.name(), serializeSignatures(signature.outputSignature())); node.setProperty(SystemPropertyKeys.mode.name(), signature.mode().name()); setLastUpdate(tx); registerProcedure(signature, statement); return null; }); } private String serializeSignatures(List<FieldSignature> signatures) { List<Map<String, Object>> mapped = signatures.stream().map(fs -> map( "name", fs.name(), "type", fs.neo4jType().toString(), "default", fs.defaultValue().orElse(DefaultParameterValue.nullValue(new Neo4jTypes.AnyType())).value() )).collect(Collectors.toList()); return Util.toJson(mapped); } private List<FieldSignature> deserializeSignatures(String s) { List<Map<String, Object>> mapped = Util.fromJson(s, List.class); return mapped.stream().map(map -> { String typeString = (String) map.get("type"); if (typeString.endsWith("?")) { typeString = typeString.substring(0, typeString.length() - 1); } AnyType type = typeof(typeString); Object deflt = map.get("default"); if (deflt == null) { return FieldSignature.inputField((String) map.get("name"), type); } else { return FieldSignature.inputField((String) map.get("name"), type, new DefaultParameterValue(deflt, type)); } }).collect(Collectors.toList()); } private void setLastUpdate(Transaction tx) { Node node = tx.findNode(SystemLabels.ApocCypherProceduresMeta, SystemPropertyKeys.database.name(), api.databaseName()); if (node == null) { node = tx.createNode(SystemLabels.ApocCypherProceduresMeta); node.setProperty(SystemPropertyKeys.database.name(), api.databaseName()); } node.setProperty(SystemPropertyKeys.lastUpdated.name(), System.currentTimeMillis()); } private long getLastUpdate() { return withSystemDb( tx -> { Node node = tx.findNode(SystemLabels.ApocCypherProceduresMeta, SystemPropertyKeys.database.name(), api.databaseName()); return node == null ? 0L : (long) node.getProperty(SystemPropertyKeys.lastUpdated.name()); }); } public ProcedureSignature procedureSignature(String name, String mode, List<List<String>> outputs, List<List<String>> inputs, String description) { boolean admin = false; // TODO return new ProcedureSignature(qualifiedName(name), inputSignatures(inputs), outputSignatures(outputs), Mode.valueOf(mode.toUpperCase()), admin, null, new String[0], description, null, false, false, true, false ); } public UserFunctionSignature functionSignature(String name, String output, List<List<String>> inputs, String description) { AnyType outType = typeof(output.isEmpty() ? "LIST OF MAP" : output); return new UserFunctionSignature(qualifiedName(name), inputSignatures(inputs), outType, null, new String[0], description, "apoc.custom",false); } /** * * @param signature * @param statement null indicates a removed procedure * @return */ public boolean registerProcedure(ProcedureSignature signature, String statement) { try { globalProceduresRegistry.register(new CallableProcedure.BasicProcedure(signature) { @Override public RawIterator<AnyValue[], ProcedureException> apply(org.neo4j.kernel.api.procedure.Context ctx, AnyValue[] input, ResourceTracker resourceTracker) throws ProcedureException { if (statement == null) { final String error = String.format("There is no procedure with the name `%s` registered for this database instance. " + "Please ensure you've spelled the procedure name correctly and that the procedure is properly deployed.", signature.name()); throw new QueryExecutionException(error, null, "Neo.ClientError.Statement.SyntaxError"); } else { Map<String, Object> params = params(input, signature.inputSignature(), ctx.valueMapper()); Transaction tx = transactionComponentFunction.apply(ctx); Result result = tx.execute(statement, params); resourceTracker.registerCloseableResource(result); List<FieldSignature> outputs = signature.outputSignature(); String[] names = outputs == null ? null : outputs.stream().map(FieldSignature::name).toArray(String[]::new); boolean defaultOutputs = outputs == null || outputs.equals(DEFAULT_MAP_OUTPUT); Stream<AnyValue[]> stream = result.stream().map(row -> toResult(row, names, defaultOutputs)); return Iterators.asRawIterator(stream); } } }, true); registeredProcedureSignatures.add(signature); return true; } catch (Exception e) { log.error("Could not register procedure: " + signature.name() + " with " + statement + "\n accepting" + signature.inputSignature() + " resulting in " + signature.outputSignature() + " mode " + signature.mode(), e); return false; } } public boolean registerFunction(UserFunctionSignature signature, String statement, boolean forceSingle) { try { globalProceduresRegistry.register(new CallableUserFunction.BasicUserFunction(signature) { @Override public AnyValue apply(org.neo4j.kernel.api.procedure.Context ctx, AnyValue[] input) throws ProcedureException { if (statement == null) { final String error = String.format("Unknown function '%s'", signature.name()); throw new QueryExecutionException(error, null, "Neo.ClientError.Statement.SyntaxError"); } else { Map<String, Object> params = params(input, signature.inputSignature(), ctx.valueMapper()); AnyType outType = signature.outputType(); Transaction tx = transactionComponentFunction.apply(ctx); try (Result result = tx.execute(statement, params)) { // resourceTracker.registerCloseableResource(result); // TODO if (!result.hasNext()) return null; if (outType.equals(NTAny)) { return ValueUtils.of(result.stream().collect(Collectors.toList())); } List<String> cols = result.columns(); if (cols.isEmpty()) return null; if (!forceSingle && outType instanceof Neo4jTypes.ListType) { Neo4jTypes.ListType listType = (Neo4jTypes.ListType) outType; Neo4jTypes.AnyType innerType = listType.innerType(); if (innerType instanceof Neo4jTypes.MapType) return ValueUtils.of(result.stream().collect(Collectors.toList())); if (cols.size() == 1) return ValueUtils.of(result.stream().map(row -> row.get(cols.get(0))).collect(Collectors.toList())); } else { Map<String, Object> row = result.next(); if (outType instanceof Neo4jTypes.MapType) return ValueUtils.of(row); if (cols.size() == 1) return ValueUtils.of(row.get(cols.get(0))); } throw new IllegalStateException("Result mismatch " + cols + " output type is " + outType); } } } }, true); registeredUserFunctionSignatures.add(signature); return true; } catch (Exception e) { log.error("Could not register function: " + signature + "\nwith: " + statement + "\n single result " + forceSingle, e); return false; } } public static QualifiedName qualifiedName(@Name("name") String name) { String[] names = name.split("\\."); List<String> namespace = new ArrayList<>(names.length); namespace.add(PREFIX); namespace.addAll(Arrays.asList(names)); return new QualifiedName(namespace.subList(0, namespace.size() - 1), names[names.length - 1]); } public List<FieldSignature> inputSignatures(@Name(value = "inputs", defaultValue = "null") List<List<String>> inputs) { List<FieldSignature> inputSignature = inputs == null ? singletonList(FieldSignature.inputField("params", NTMap, DefaultParameterValue.ntMap(Collections.emptyMap()))) : inputs.stream().map(pair -> { DefaultParameterValue defaultValue = defaultValue(pair.get(1), pair.size() > 2 ? pair.get(2) : null); return defaultValue == null ? FieldSignature.inputField(pair.get(0), typeof(pair.get(1))) : FieldSignature.inputField(pair.get(0), typeof(pair.get(1)), defaultValue); }).collect(Collectors.toList()); return inputSignature; } public List<FieldSignature> outputSignatures(@Name(value = "outputs", defaultValue = "null") List<List<String>> outputs) { return outputs == null ? singletonList(FieldSignature.inputField("row", NTMap)) : outputs.stream().map(pair -> FieldSignature.outputField(pair.get(0), typeof(pair.get(1)))).collect(Collectors.toList()); } private Neo4jTypes.AnyType typeof(String typeName) { typeName = typeName.replaceAll("\\?", ""); typeName = typeName.toUpperCase(); if (typeName.startsWith("LIST OF ")) return NTList(typeof(typeName.substring(8))); if (typeName.startsWith("LIST ")) return NTList(typeof(typeName.substring(5))); switch (typeName) { case "ANY": return NTAny; case "MAP": return NTMap; case "NODE": return NTNode; case "REL": return NTRelationship; case "RELATIONSHIP": return NTRelationship; case "EDGE": return NTRelationship; case "PATH": return NTPath; case "NUMBER": return NTNumber; case "LONG": return NTInteger; case "INT": return NTInteger; case "INTEGER": return NTInteger; case "FLOAT": return NTFloat; case "DOUBLE": return NTFloat; case "BOOL": return NTBoolean; case "BOOLEAN": return NTBoolean; case "DATE": return NTDate; case "TIME": return NTTime; case "LOCALTIME": return NTLocalTime; case "DATETIME": return NTDateTime; case "LOCALDATETIME": return NTLocalDateTime; case "DURATION": return NTDuration; case "POINT": return NTPoint; case "GEO": return NTGeometry; case "GEOMETRY": return NTGeometry; case "STRING": return NTString; case "TEXT": return NTString; default: return NTString; } } private DefaultParameterValue defaultValue(String typeName, String stringValue) { if (stringValue == null) return null; Object value = JsonUtil.parse(stringValue, null, Object.class); if (value == null) return null; typeName = typeName.toUpperCase(); if (typeName.startsWith("LIST ")) return DefaultParameterValue.ntList((List<?>) value, typeof(typeName.substring(5))); switch (typeName) { case "MAP": return DefaultParameterValue.ntMap((Map<String, Object>) value); case "NODE": case "REL": case "RELATIONSHIP": case "EDGE": case "PATH": return null; case "NUMBER": return value instanceof Float || value instanceof Double ? DefaultParameterValue.ntFloat(((Number) value).doubleValue()) : DefaultParameterValue.ntInteger(((Number) value).longValue()); case "LONG": case "INT": case "INTEGER": return DefaultParameterValue.ntInteger(((Number) value).longValue()); case "FLOAT": case "DOUBLE": return DefaultParameterValue.ntFloat(((Number) value).doubleValue()); case "BOOL": case "BOOLEAN": return DefaultParameterValue.ntBoolean((Boolean) value); case "DATE": case "TIME": case "LOCALTIME": case "DATETIME": case "LOCALDATETIME": case "DURATION": case "POINT": case "GEO": case "GEOMETRY": return null; case "STRING": case "TEXT": return DefaultParameterValue.ntString(value.toString()); default: return null; } } private AnyValue[] toResult(Map<String, Object> row, String[] names, boolean defaultOutputs) { if (defaultOutputs) { return new AnyValue[]{convertToValueRecursive(row)}; } else { AnyValue[] result = new AnyValue[names.length]; for (int i = 0; i < names.length; i++) { result[i] = convertToValueRecursive(row.get(names[i])); } return result; } } private AnyValue convertToValueRecursive(Object... toConverts) { switch (toConverts.length) { case 0: return Values.NO_VALUE; case 1: Object toConvert = toConverts[0]; if (toConvert instanceof List) { List list = (List) toConvert; AnyValue[] objects = ((Stream<AnyValue>) list.stream().map(x -> convertToValueRecursive(x))).toArray(AnyValue[]::new); return VirtualValues.list(objects); } else if (toConvert instanceof Map) { Map<String, Object> map = (Map) toConvert; MapValueBuilder builder = new MapValueBuilder(map.size()); map.entrySet().stream().forEach(e -> { builder.add(e.getKey(), convertToValueRecursive(e.getValue())); }); return builder.build(); } else if (toConvert instanceof Entity || toConvert instanceof Path){ return ValueUtils.asAnyValue(toConvert); } else { return Values.of(toConvert); } default: AnyValue[] values = Arrays.stream(toConverts).map(c -> convertToValueRecursive(c)).toArray(AnyValue[]::new); return VirtualValues.list(values); } } public Map<String, Object> params(AnyValue[] input, List<FieldSignature> fieldSignatures, ValueMapper valueMapper) { if (input == null || input.length == 0) return Collections.emptyMap(); if (fieldSignatures == null || fieldSignatures.isEmpty() || fieldSignatures.equals(DEFAULT_INPUTS)) return (Map<String, Object>) input[0].map(valueMapper); Map<String, Object> params = new HashMap<>(input.length); for (int i = 0; i < input.length; i++) { params.put(fieldSignatures.get(i).name(), input[i].map(valueMapper)); } return params; } public void removeProcedure(String name) { withSystemDb(tx -> { Node node = Iterators.single(tx.findNodes(SystemLabels.ApocCypherProcedures, SystemPropertyKeys.database.name(), api.databaseName(), SystemPropertyKeys.name.name(), name ).stream().filter(n -> n.hasLabel(SystemLabels.Procedure)).iterator()); ProcedureDescriptor descriptor = procedureDescriptor(node); registerProcedure(descriptor.getSignature(), null); registeredProcedureSignatures.remove(descriptor.getSignature()); node.delete(); setLastUpdate(tx); return null; }); } public void removeFunction(String name) { withSystemDb(tx -> { Node node = Iterators.single(tx.findNodes(SystemLabels.ApocCypherProcedures, SystemPropertyKeys.database.name(), api.databaseName(), SystemPropertyKeys.name.name(), name ).stream().filter(n -> n.hasLabel(SystemLabels.Function)).iterator()); UserFunctionDescriptor descriptor = userFunctionDescriptor(node); registerFunction(descriptor.getSignature(), null, false); registeredUserFunctionSignatures.remove(descriptor.getSignature()); node.delete(); setLastUpdate(tx); return null; }); } public abstract class ProcedureOrFunctionDescriptor { private final String statement; protected ProcedureOrFunctionDescriptor(String statement) { this.statement = statement; } public String getStatement() { return statement; } abstract public void register(); } public class ProcedureDescriptor extends ProcedureOrFunctionDescriptor { private final ProcedureSignature signature; public ProcedureDescriptor(ProcedureSignature signature, String statement) { super(statement); this.signature = signature; } public ProcedureSignature getSignature() { return signature; } @Override public void register() { registerProcedure(getSignature(), getStatement()); } } public class UserFunctionDescriptor extends ProcedureOrFunctionDescriptor { private final UserFunctionSignature signature; private final boolean forceSingle; public UserFunctionDescriptor(UserFunctionSignature signature, String statement, boolean forceSingle) { super(statement); this.signature = signature; this.forceSingle = forceSingle; } public UserFunctionSignature getSignature() { return signature; } public boolean isForceSingle() { return forceSingle; } @Override public void register() { registerFunction(getSignature(), getStatement(), isForceSingle()); } } }
update based on API change to ProcedureSignature
full/src/main/java/apoc/custom/CypherProceduresHandler.java
update based on API change to ProcedureSignature
<ide><path>ull/src/main/java/apoc/custom/CypherProceduresHandler.java <ide> public ProcedureSignature procedureSignature(String name, String mode, List<List<String>> outputs, List<List<String>> inputs, String description) { <ide> boolean admin = false; // TODO <ide> return new ProcedureSignature(qualifiedName(name), inputSignatures(inputs), outputSignatures(outputs), <del> Mode.valueOf(mode.toUpperCase()), admin, null, new String[0], description, null, false, false, true, false <add> Mode.valueOf(mode.toUpperCase()), admin, null, new String[0], description, null, false, false, true, false, false <ide> ); <ide> } <ide>
Java
apache-2.0
aeb773b96dc3f82c4943c6bf4f95a0cb49733ba9
0
jayxue/YouTubeUploader
package com.wms.youtubeuploader.sdk.activity; import android.accounts.AccountManager; import android.app.Activity; import android.content.ActivityNotFoundException; import android.content.Intent; import android.database.Cursor; import android.net.Uri; import android.os.Bundle; import android.provider.MediaStore; import android.view.View; import android.view.WindowManager; import android.widget.EditText; import android.widget.ImageButton; import android.widget.MediaController; import android.widget.ProgressBar; import android.widget.TextView; import android.widget.Toast; import android.widget.VideoView; import com.google.android.gms.common.AccountPicker; import com.wms.youtubeuploader.sdk.R; import com.wms.youtubeuploader.sdk.dialog.ConfirmUploadVideoDialogBuilder; import com.wms.youtubeuploader.sdk.handler.FetchTokenHandler; import com.wms.youtubeuploader.sdk.handler.UploadProgressHandler; import com.wms.youtubeuploader.sdk.listener.ImageButtonBackgroundSelector; import com.wms.youtubeuploader.sdk.task.FetchYouTubeTokenTask; import com.wms.youtubeuploader.sdk.util.DialogUtil; import com.wms.youtubeuploader.sdk.util.FileUtil; import com.wms.youtubeuploader.sdk.task.YouTubeUploadTask; import java.io.File; import java.util.UUID; /** * In order to upload videos to YouTube, you need to create a Google API project in Google Developers Console (https://console.developers.google.com). In the API project, under * APIs & Auth and Credentials, create two OAuth client IDs for each individual app, one for dev version and one for release version. Enter package name and SHA1 code for it. * You'll need to wait for 15 minutes or longer until the ID takes effect. */ public class UploadVideoActivity extends Activity { private ProgressBar progressBarUploadVideo = null; private EditText editTextVideoTitle = null; private EditText editTextVideoDescription = null; private TextView textViewFilePath = null; private TextView textViewVideoUrl = null; private TextView textViewProgress = null; private ImageButton imageButtonTakeVideo = null; private ImageButton imageButtonGallery = null; private ImageButton imageButtonUploadVideo = null; private VideoView videoViewPreview = null; private String videoFileName = ""; private String selectedGoogleAccount; private FetchTokenHandler fetchTokenHandler = null; private UploadProgressHandler uploadProgressHandler = null; private YouTubeUploadTask youtubeUploadTask = null; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.upload_video); editTextVideoTitle = (EditText) findViewById(R.id.editTextTitle); editTextVideoDescription = (EditText) findViewById(R.id.editTextDescription); textViewFilePath = (TextView) findViewById(R.id.textViewFilePath); textViewVideoUrl = (TextView) findViewById(R.id.textViewVideoUrl); textViewProgress = (TextView) findViewById(R.id.textViewProgress); videoViewPreview = (VideoView) findViewById(R.id.videoViewPreview); imageButtonUploadVideo = (ImageButton) findViewById(R.id.imageButtonUploadVideo); imageButtonUploadVideo.setOnClickListener(new ImageButtonUploadVideoOnClickListener()); imageButtonUploadVideo.setOnTouchListener(new ImageButtonBackgroundSelector()); imageButtonUploadVideo.setEnabled(false); imageButtonTakeVideo = (ImageButton) findViewById(R.id.imageButtonTakeVideo); imageButtonTakeVideo.setOnClickListener(new ImageButtonTakeVideoOnClickListener()); imageButtonTakeVideo.setOnTouchListener(new ImageButtonBackgroundSelector()); imageButtonGallery = (ImageButton) findViewById(R.id.imageButtonGallery); imageButtonGallery.setOnClickListener(new ImageButtonGalleryOnClickListener()); imageButtonGallery.setOnTouchListener(new ImageButtonBackgroundSelector()); progressBarUploadVideo = (ProgressBar) findViewById(R.id.progressBarUploadVideo); uploadProgressHandler = new UploadProgressHandler(this); fetchTokenHandler = new FetchTokenHandler(this); // Do not show the soft keyboard this.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_HIDDEN); } @Override public void onActivityResult(int requestCode, int resultCode,Intent data) { if (requestCode == IntentRequestCode.TAKE_VIDEO && resultCode == RESULT_OK) { // videoFileName has been prepared for taking video File file = new File(videoFileName); // On Android 2.2, the file may not be created, therefore we need to check the returned URI. if (!file.exists()) { if (data.getData() != null) { videoFileName = getRealPathFromURI(data.getData()); if(videoFileName != null) { onVideoReady(); } } else { videoFileName = null; Toast.makeText(this, getString(R.string.videoNotAvailable), Toast.LENGTH_LONG).show(); } } else { onVideoReady(); } } else if (requestCode == IntentRequestCode.PICK_UP_VIDEO && resultCode == RESULT_OK) { Uri selectedVideo = data.getData(); videoFileName = getRealPathFromURI(selectedVideo); if(videoFileName != null) { onVideoReady(); } else { Toast.makeText(this, getString(R.string.videoNotAvailable), Toast.LENGTH_LONG).show(); } } else if (requestCode == IntentRequestCode.REQUEST_ACCOUNT_PICKER &&resultCode == Activity.RESULT_OK) { if(data != null && data.getExtras() != null) { String accountName = data.getExtras().getString(AccountManager.KEY_ACCOUNT_NAME); if (accountName != null) { selectedGoogleAccount = accountName; fetchToken(); } else { DialogUtil.showExceptionAlertDialog(this, getString(R.string.googleAccountNotSelected), getString(R.string.googleAccountNotSupported)); } } } else if(requestCode == IntentRequestCode.REQUEST_AUTHORIZATION && resultCode == Activity.RESULT_OK) { // Account has been chosen and permissions have been granted. You can upload video Toast.makeText(this, getString(R.string.appAuthorized), Toast.LENGTH_LONG).show(); } super.onActivityResult(requestCode, resultCode, data); } private String getRealPathFromURI(Uri contentUri) { String filePath = null; String[] projection = { MediaStore.Video.Media.DATA }; Cursor cursor = getContentResolver().query(contentUri, projection, null, null, null); if(cursor.moveToFirst()) { int columnIndex = cursor.getColumnIndexOrThrow(projection[0]); filePath = cursor.getString(columnIndex); } cursor.close(); return filePath; } private class ImageButtonUploadVideoOnClickListener implements ImageButton.OnClickListener { @Override public void onClick(View v) { // Title must be provided for a YouTube video if(editTextVideoTitle.getText().toString().trim().isEmpty()) { DialogUtil.showDialog(UploadVideoActivity.this, getString(R.string.enterVideoTitle)); return; } new ConfirmUploadVideoDialogBuilder(UploadVideoActivity.this, uploadProgressHandler).create().show(); } } private File getTempVideoFile() { // It will return a file path like: /mnt/sdcard/com.company.app videoFileName = FileUtil.getAppExternalStoragePath(this); File file = new File(videoFileName); if (!file.exists()) { // Create the folder if it does not exist file.mkdir(); } // Generate a UUID as file name videoFileName += "/" + UUID.randomUUID().toString() + ".3gp"; file = new File(videoFileName); return file; } private class ImageButtonTakeVideoOnClickListener implements ImageButton.OnClickListener { @Override public void onClick(View v) { startTakeVideo(); } } private class ImageButtonGalleryOnClickListener implements ImageButton.OnClickListener { @Override public void onClick(View v) { startPickVideo(); } } private void startTakeVideo() { resetProgress(); Intent intent = new Intent(MediaStore.ACTION_VIDEO_CAPTURE); intent.putExtra(MediaStore.EXTRA_OUTPUT, Uri.fromFile(getTempVideoFile())); intent.putExtra(MediaStore.EXTRA_VIDEO_QUALITY, 1); intent.putExtra(MediaStore.EXTRA_DURATION_LIMIT, 300); startActivityForResult(intent, IntentRequestCode.TAKE_VIDEO); } private void startPickVideo() { resetProgress(); Intent intent = new Intent(Intent.ACTION_PICK, MediaStore.Video.Media.EXTERNAL_CONTENT_URI); try { startActivityForResult(intent, IntentRequestCode.PICK_UP_VIDEO); } catch (ActivityNotFoundException e) { // On Andriod 2.2, the above method may cause exception due to not finding an activity to handle the intent. Use the method below instead. Intent mediaChooser = new Intent(Intent.ACTION_GET_CONTENT); mediaChooser.setType("video/*"); startActivityForResult(mediaChooser, IntentRequestCode.PICK_UP_VIDEO); } catch (SecurityException e) { // When picking up videos, there may be an exception like: // java.lang.SecurityException: // Permission Denial: // starting Intent { act=android.intent.action.PICK // dat=content://media/external/video/media // cmp=com.android.music/.VideoBrowserActivity } from ProcessRecord // Try another way to start the intent intent = new Intent(Intent.ACTION_PICK, null); intent.setType("video/*"); try { startActivityForResult(intent, IntentRequestCode.PICK_UP_VIDEO); } catch (Exception ex) { DialogUtil.showExceptionAlertDialog(UploadVideoActivity.this, getString(R.string.cannotPickUpVideo), getString(R.string.notSupportedOnDevice)); } } } private void onVideoReady() { MediaController mediaController = new MediaController(this); videoViewPreview.setVisibility(View.VISIBLE); videoViewPreview.setVideoPath(videoFileName); videoViewPreview.setMediaController(mediaController); videoViewPreview.requestFocus(); videoViewPreview.start(); videoViewPreview.pause(); imageButtonUploadVideo.setEnabled(true); imageButtonUploadVideo.setImageResource(R.drawable.upload); textViewFilePath.setText(videoFileName); editTextVideoTitle.setText(""); editTextVideoDescription.setText(""); textViewVideoUrl.setText(getString(R.string.noUrlYet)); Toast.makeText(this, R.string.pressVideoToPreview, Toast.LENGTH_LONG).show(); } private void resetProgress() { progressBarUploadVideo.setProgress(0); textViewProgress.setText(" 00%"); } private void fetchToken() { new FetchYouTubeTokenTask(this, selectedGoogleAccount, fetchTokenHandler).execute(); } public void preventUploadingSameVideo() { imageButtonUploadVideo.setEnabled(false); imageButtonUploadVideo.setImageResource(R.drawable.upload_disabled); } /** * Pick up a Google account from the device. See http://developer.android.com/google/auth/http-auth.html. */ public void chooseAccount() { String[] accountTypes = new String[]{"com.google"}; Intent intent = AccountPicker.newChooseAccountIntent(null, null, accountTypes, false, null, null, null, null); startActivityForResult(intent, IntentRequestCode.REQUEST_ACCOUNT_PICKER); } /** * Uploads user selected video to the user's YouTube account using OAuth2 for authentication. */ public void uploadYouTubeVideo() { youtubeUploadTask = new YouTubeUploadTask(this, videoFileName, getString(R.string.app_name), editTextVideoTitle.getText().toString(), editTextVideoDescription.getText().toString(), selectedGoogleAccount, uploadProgressHandler); youtubeUploadTask.execute(); } public TextView getTextViewProgress() { return textViewProgress; } public ProgressBar getProgressBarUploadVideo() { return progressBarUploadVideo; } public TextView getTextViewVideoUrl() { return textViewVideoUrl; } public void setSelectedGoogleAccount(String account) { selectedGoogleAccount = account; } }
YouTubeUploaderSDK/src/main/java/com/wms/youtubeuploader/sdk/activity/UploadVideoActivity.java
package com.wms.youtubeuploader.sdk.activity; import android.accounts.AccountManager; import android.app.Activity; import android.content.ActivityNotFoundException; import android.content.Intent; import android.database.Cursor; import android.net.Uri; import android.os.Bundle; import android.provider.MediaStore; import android.view.View; import android.view.WindowManager; import android.widget.EditText; import android.widget.ImageButton; import android.widget.MediaController; import android.widget.ProgressBar; import android.widget.TextView; import android.widget.Toast; import android.widget.VideoView; import com.google.android.gms.common.AccountPicker; import com.wms.youtubeuploader.sdk.R; import com.wms.youtubeuploader.sdk.dialog.ConfirmUploadVideoDialogBuilder; import com.wms.youtubeuploader.sdk.handler.FetchTokenHandler; import com.wms.youtubeuploader.sdk.handler.UploadProgressHandler; import com.wms.youtubeuploader.sdk.listener.ImageButtonBackgroundSelector; import com.wms.youtubeuploader.sdk.task.FetchYouTubeTokenTask; import com.wms.youtubeuploader.sdk.util.DialogUtil; import com.wms.youtubeuploader.sdk.util.FileUtil; import com.wms.youtubeuploader.sdk.task.YouTubeUploadTask; import java.io.File; import java.util.UUID; /** * In order to upload videos to YouTube, you need to create a Google API project in Google Developers Console (https://console.developers.google.com). In the API project, under * APIs & Auth and Credentials, create two OAuth client IDs for each individual app, one for dev version and one for release version. Enter package name and SHA1 code for it. * You'll need to wait for 15 minutes or longer until the ID takes effect. */ public class UploadVideoActivity extends Activity { private ProgressBar progressBarUploadVideo = null; private EditText editTextVideoTitle = null; private EditText editTextVideoDescription = null; private TextView textViewFilePath = null; private TextView textViewVideoUrl = null; private TextView textViewProgress = null; private ImageButton imageButtonTakeVideo = null; private ImageButton imageButtonGallery = null; private ImageButton imageButtonUploadVideo = null; private VideoView videoViewPreview = null; private String videoFileName = ""; private String selectedGoogleAccount; private FetchTokenHandler fetchTokenHandler = null; private UploadProgressHandler uploadProgressHandler = null; private YouTubeUploadTask youtubeUploadTask = null; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.upload_video); editTextVideoTitle = (EditText) findViewById(R.id.editTextTitle); editTextVideoDescription = (EditText) findViewById(R.id.editTextDescription); textViewFilePath = (TextView) findViewById(R.id.textViewFilePath); textViewVideoUrl = (TextView) findViewById(R.id.textViewVideoUrl); textViewProgress = (TextView) findViewById(R.id.textViewProgress); videoViewPreview = (VideoView) findViewById(R.id.videoViewPreview); imageButtonUploadVideo = (ImageButton) findViewById(R.id.imageButtonUploadVideo); imageButtonUploadVideo.setOnClickListener(new ImageButtonUploadVideoOnClickListener()); imageButtonUploadVideo.setOnTouchListener(new ImageButtonBackgroundSelector()); imageButtonUploadVideo.setEnabled(false); imageButtonTakeVideo = (ImageButton) findViewById(R.id.imageButtonTakeVideo); imageButtonTakeVideo.setOnClickListener(new ImageButtonTakeVideoOnClickListener()); imageButtonTakeVideo.setOnTouchListener(new ImageButtonBackgroundSelector()); imageButtonGallery = (ImageButton) findViewById(R.id.imageButtonGallery); imageButtonGallery.setOnClickListener(new ImageButtonGalleryOnClickListener()); imageButtonGallery.setOnTouchListener(new ImageButtonBackgroundSelector()); progressBarUploadVideo = (ProgressBar) findViewById(R.id.progressBarUploadVideo); uploadProgressHandler = new UploadProgressHandler(this); fetchTokenHandler = new FetchTokenHandler(this); // Do not show the soft keyboard this.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_HIDDEN); } @Override public void onActivityResult(int requestCode, int resultCode,Intent data) { if (requestCode == IntentRequestCode.TAKE_VIDEO && resultCode == RESULT_OK) { // videoFileName has been prepared for taking video File file = new File(videoFileName); // On Android 2.2, the file may not be created, therefore we need to check the returned URI. if (!file.exists()) { if (data.getData() != null) { videoFileName = getRealPathFromURI(data.getData()); if(videoFileName != null) { onVideoReady(); } } else { videoFileName = null; Toast.makeText(this, getString(R.string.videoNotAvailable), Toast.LENGTH_LONG).show(); } } else { onVideoReady(); } } else if (requestCode == IntentRequestCode.PICK_UP_VIDEO && resultCode == RESULT_OK) { Uri selectedVideo = data.getData(); videoFileName = getRealPathFromURI(selectedVideo); if(videoFileName != null) { onVideoReady(); } else { Toast.makeText(this, getString(R.string.videoNotAvailable), Toast.LENGTH_LONG).show(); } } else if (requestCode == IntentRequestCode.REQUEST_ACCOUNT_PICKER &&resultCode == Activity.RESULT_OK) { if(data != null && data.getExtras() != null) { String accountName = data.getExtras().getString(AccountManager.KEY_ACCOUNT_NAME); if (accountName != null) { selectedGoogleAccount = accountName; fetchToken(); } else { DialogUtil.showExceptionAlertDialog(this, getString(R.string.googleAccountNotSelected), getString(R.string.googleAccountNotSupported)); } } } else if(requestCode == IntentRequestCode.REQUEST_AUTHORIZATION && resultCode == Activity.RESULT_OK) { // Account has been chosen and permissions have been granted. You can upload video Toast.makeText(this, getString(R.string.appAuthorized), Toast.LENGTH_LONG).show(); } super.onActivityResult(requestCode, resultCode, data); } private String getRealPathFromURI(Uri contentUri) { String filePath = null; String[] projection = { MediaStore.Video.Media.DATA }; Cursor cursor = getContentResolver().query(contentUri, projection, null, null, null); if(cursor.moveToFirst()) { int columnIndex = cursor.getColumnIndexOrThrow(projection[0]); filePath = cursor.getString(columnIndex); } cursor.close(); return filePath; } private class ImageButtonUploadVideoOnClickListener implements ImageButton.OnClickListener { @Override public void onClick(View v) { // Title must be provided for a YouTube video if(editTextVideoTitle.getText().toString().trim().isEmpty()) { DialogUtil.showDialog(UploadVideoActivity.this, getString(R.string.enterVideoTitle)); return; } new ConfirmUploadVideoDialogBuilder(UploadVideoActivity.this, uploadProgressHandler).create().show(); } } private File getTempVideoFile() { // It will return a file path like: /mnt/sdcard/com.company.app videoFileName = FileUtil.getAppExternalStoragePath(this); File file = new File(videoFileName); if (!file.exists()) { // Create the folder if it does not exist file.mkdir(); } // Generate a UUID as file name videoFileName += "/" + UUID.randomUUID().toString() + ".3gp"; file = new File(videoFileName); return file; } private class ImageButtonTakeVideoOnClickListener implements ImageButton.OnClickListener { @Override public void onClick(View v) { startTakeVideo(); } } private class ImageButtonGalleryOnClickListener implements ImageButton.OnClickListener { @Override public void onClick(View v) { startPickVideo(); } } private void startTakeVideo() { resetProgress(); Intent intent = new Intent(MediaStore.ACTION_VIDEO_CAPTURE); intent.putExtra(MediaStore.EXTRA_OUTPUT, Uri.fromFile(getTempVideoFile())); intent.putExtra(MediaStore.EXTRA_VIDEO_QUALITY, 1); intent.putExtra(MediaStore.EXTRA_DURATION_LIMIT, 300); startActivityForResult(intent, IntentRequestCode.TAKE_VIDEO); } private void startPickVideo() { resetProgress(); Intent intent = new Intent(Intent.ACTION_PICK, MediaStore.Video.Media.EXTERNAL_CONTENT_URI); try { startActivityForResult(intent, IntentRequestCode.PICK_UP_VIDEO); } catch (ActivityNotFoundException e) { // On Andriod 2.2, the above method may cause exception due to not finding an activity to handle the intent. Use the method below instead. Intent mediaChooser = new Intent(Intent.ACTION_GET_CONTENT); mediaChooser.setType("video/*"); startActivityForResult(mediaChooser, IntentRequestCode.PICK_UP_VIDEO); } catch (SecurityException e) { // When picking up videos, there may be an exception: // java.lang.SecurityException: // Permission Denial: // starting Intent { act=android.intent.action.PICK // dat=content://media/external/video/media // cmp=com.android.music/.VideoBrowserActivity } from ProcessRecord // Try another way to start the intent intent = new Intent(Intent.ACTION_PICK, null); intent.setType("video/*"); try { startActivityForResult(intent, IntentRequestCode.PICK_UP_VIDEO); } catch (Exception ex) { DialogUtil.showExceptionAlertDialog(UploadVideoActivity.this, getString(R.string.cannotPickUpVideo), getString(R.string.notSupportedOnDevice)); } } } private void onVideoReady() { MediaController mediaController = new MediaController(this); videoViewPreview.setVisibility(View.VISIBLE); videoViewPreview.setVideoPath(videoFileName); videoViewPreview.setMediaController(mediaController); videoViewPreview.requestFocus(); videoViewPreview.start(); videoViewPreview.pause(); imageButtonUploadVideo.setEnabled(true); imageButtonUploadVideo.setImageResource(R.drawable.upload); textViewFilePath.setText(videoFileName); editTextVideoTitle.setText(""); editTextVideoDescription.setText(""); textViewVideoUrl.setText(getString(R.string.noUrlYet)); Toast.makeText(this, R.string.pressVideoToPreview, Toast.LENGTH_LONG).show(); } private void resetProgress() { progressBarUploadVideo.setProgress(0); textViewProgress.setText(" 00%"); } private void fetchToken() { new FetchYouTubeTokenTask(this, selectedGoogleAccount, fetchTokenHandler).execute(); } public void preventUploadingSameVideo() { imageButtonUploadVideo.setEnabled(false); imageButtonUploadVideo.setImageResource(R.drawable.upload_disabled); } /** * Pick up a Google account from the device. See http://developer.android.com/google/auth/http-auth.html. */ public void chooseAccount() { String[] accountTypes = new String[]{"com.google"}; Intent intent = AccountPicker.newChooseAccountIntent(null, null, accountTypes, false, null, null, null, null); startActivityForResult(intent, IntentRequestCode.REQUEST_ACCOUNT_PICKER); } /** * Uploads user selected video to the user's YouTube account using OAuth2 for authentication. */ public void uploadYouTubeVideo() { youtubeUploadTask = new YouTubeUploadTask(this, videoFileName, getString(R.string.app_name), editTextVideoTitle.getText().toString(), editTextVideoDescription.getText().toString(), selectedGoogleAccount, uploadProgressHandler); youtubeUploadTask.execute(); } public TextView getTextViewProgress() { return textViewProgress; } public ProgressBar getProgressBarUploadVideo() { return progressBarUploadVideo; } public TextView getTextViewVideoUrl() { return textViewVideoUrl; } public void setSelectedGoogleAccount(String account) { selectedGoogleAccount = account; } }
Update a comment
YouTubeUploaderSDK/src/main/java/com/wms/youtubeuploader/sdk/activity/UploadVideoActivity.java
Update a comment
<ide><path>ouTubeUploaderSDK/src/main/java/com/wms/youtubeuploader/sdk/activity/UploadVideoActivity.java <ide> startActivityForResult(mediaChooser, IntentRequestCode.PICK_UP_VIDEO); <ide> } <ide> catch (SecurityException e) { <del> // When picking up videos, there may be an exception: <add> // When picking up videos, there may be an exception like: <ide> // java.lang.SecurityException: <ide> // Permission Denial: <ide> // starting Intent { act=android.intent.action.PICK
Java
apache-2.0
436613ad079de78ce460fd206ecb205af9aa2265
0
imageprocessor/cv4j,imageprocessor/cv4j
package com.cv4j.image.util; import com.cv4j.core.datamodel.CV4JImage; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import edu.uthscsa.ric.volume.formats.jpeg.JPEGLosslessDecoderWrapper; /** * Created by gloomy fish on 2017/7/8. */ public class ImageCodecs { public static CV4JImage read(String filePath) { CV4JImage image = null; if (filePath.endsWith(".jpg") || filePath.endsWith(".JPG") || filePath.endsWith(".JPEG") || filePath.endsWith(".jpeg")) { try { image = JPEGLosslessDecoderWrapper.readImage(getBytesFromFile(new File(filePath))); } catch (IOException ioe) { } } else if (filePath.endsWith(".png") || filePath.endsWith(".PNG")) { // TODO: zhigang } return image; } private static byte[] getBytesFromFile(File file) { if (file == null) return null; byte[] ret = null; FileInputStream in = null; ByteArrayOutputStream out = null; try { in = new FileInputStream(file); out = new ByteArrayOutputStream(4096); byte[] b = new byte[4096]; int n; while ((n = in.read(b)) != -1) { out.write(b, 0, n); } ret = out.toByteArray(); } catch (IOException e) { e.printStackTrace(); } finally { IOUtils.closeQuietly(in); IOUtils.closeQuietly(out); } return ret; } }
cv4j/src/main/java/com/cv4j/image/util/ImageCodecs.java
package com.cv4j.image.util; import com.cv4j.core.datamodel.CV4JImage; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import edu.uthscsa.ric.volume.formats.jpeg.JPEGLosslessDecoderWrapper; /** * Created by gloomy fish on 2017/7/8. */ public class ImageCodecs { public static CV4JImage read(String filePath) { CV4JImage image = null; if (filePath.endsWith(".jpg") || filePath.endsWith(".JPG") || filePath.endsWith(".JPEG") || filePath.endsWith(".jpeg")) { try { image = JPEGLosslessDecoderWrapper.readImage(getBytesFromFile(new File(filePath))); } catch (IOException ioe) { } } else if (filePath.endsWith(".png") || filePath.endsWith(".PNG")) { // TODO: zhigang } return image; } private static byte[] getBytesFromFile(File file) { byte[] ret = null; FileInputStream in = null; ByteArrayOutputStream out = null; try { if (file == null) { return null; } in = new FileInputStream(file); out = new ByteArrayOutputStream(4096); byte[] b = new byte[4096]; int n; while ((n = in.read(b)) != -1) { out.write(b, 0, n); } ret = out.toByteArray(); } catch (IOException e) { e.printStackTrace(); } finally { IOUtils.closeQuietly(in); IOUtils.closeQuietly(out); } return ret; } }
update ImageCodecs
cv4j/src/main/java/com/cv4j/image/util/ImageCodecs.java
update ImageCodecs
<ide><path>v4j/src/main/java/com/cv4j/image/util/ImageCodecs.java <ide> } <ide> <ide> private static byte[] getBytesFromFile(File file) { <add> <add> if (file == null) return null; <add> <ide> byte[] ret = null; <ide> <ide> FileInputStream in = null; <ide> ByteArrayOutputStream out = null; <ide> try { <del> if (file == null) { <del> return null; <del> } <del> <ide> in = new FileInputStream(file); <ide> out = new ByteArrayOutputStream(4096); <ide> byte[] b = new byte[4096];
Java
mit
d681c652cba0c0bab2fc1dfdb764681e0a74b1ed
0
lemmy/tlaplus,lemmy/tlaplus,tlaplus/tlaplus,lemmy/tlaplus,tlaplus/tlaplus,lemmy/tlaplus,tlaplus/tlaplus,tlaplus/tlaplus
// Copyright (c) 2012 Microsoft Corporation. All rights reserved. package tlc2.tool.fp; import java.io.IOException; import java.util.Random; import java.util.concurrent.CountDownLatch; public abstract class MultiThreadedFPSetTeset extends AbstractFPSetTest { private static final int NUM_THREADS = Integer.getInteger(MultiThreadedFPSetTeset.class.getName() + ".numThreads", 2); private static final long INSERTIONS = Long.getLong(MultiThreadedFPSetTeset.class.getName() + ".insertions", Integer.MAX_VALUE + 2L); /** * Test filling a {@link FPSet} with max int + 2L random using multiple * threads * * @throws IOException * @throws InterruptedException */ public void testMaxFPSetSizeRnd() throws IOException, InterruptedException { final FPSet fpSet = getFPSetInitialized(); final CountDownLatch latch = new CountDownLatch(NUM_THREADS); long seed = 15041980L; final FingerPrintGenerator[] fpgs = new FingerPrintGenerator[NUM_THREADS]; for (int i = 0; i < NUM_THREADS; i++) { fpgs[i] = new FingerPrintGenerator(i, fpSet, latch, seed++, INSERTIONS); Thread thread = new Thread(fpgs[i], "Producer#" + i); thread.start(); } // wait for runnables/fpg to tear down the latch latch.await(); // print stats for (int i = 0; i < fpgs.length; i++) { final FingerPrintGenerator fpg = fpgs[i]; System.out.println("Producer: " + fpg.getId() + " puts: " + fpg.getPuts()); System.out.println("puts/collisions: " + (double) (fpg.getPuts() / fpg.getCollisions())); } assertEquals(INSERTIONS - 1, fpSet.size()); } public class FingerPrintGenerator implements Runnable { private final long insertions; private final Random rnd; private final FPSet fpSet; private final CountDownLatch latch; private final int id; private long puts = 0L; private long collisions = 0L; public FingerPrintGenerator(int id, FPSet fpSet, CountDownLatch latch, long seed, long insertions) { this.id = id; this.fpSet = fpSet; this.latch = latch; this.rnd = new Random(seed); this.insertions = insertions; } /* (non-Javadoc) * @see java.lang.Runnable#run() */ public void run() { long predecessor = 0L; while (fpSet.size() < insertions) { try { // make sure set still contains predecessor if (predecessor != 0L) { assertTrue(fpSet.contains(predecessor)); } predecessor = rnd.nextLong(); boolean put = fpSet.put(predecessor); if (put == false) { puts++; } else { collisions++; } // First producer prints stats if (id == 0) { printInsertionSpeed(fpSet.size()); } } catch (IOException e) { e.printStackTrace(); fail("Unexpected"); } } latch.countDown(); } public int getId() { return id; } /** * @return the puts */ public long getPuts() { return puts; } /** * @return the collisions */ public long getCollisions() { return collisions == 0 ? 1 : collisions; } } }
tlatools/test-long/tlc2/tool/fp/MultiThreadedFPSetTeset.java
// Copyright (c) 2012 Microsoft Corporation. All rights reserved. package tlc2.tool.fp; import java.io.IOException; import java.util.Random; import java.util.concurrent.CountDownLatch; public abstract class MultiThreadedFPSetTeset extends AbstractFPSetTest { private static final int NUM_THREADS = Integer.getInteger(MultiThreadedFPSetTeset.class.getName() + ".numThreads", 2); private static final long INSERTIONS = Long.getLong(MultiThreadedFPSetTeset.class.getName() + ".insertions", Integer.MAX_VALUE + 2L); /** * Test filling a {@link FPSet} with max int + 2L random using multiple * threads * * @throws IOException * @throws InterruptedException */ public void testMaxFPSetSizeRnd() throws IOException, InterruptedException { final FPSet fpSet = getFPSetInitialized(); final CountDownLatch latch = new CountDownLatch(NUM_THREADS); long seed = 15041980L; final FingerPrintGenerator[] fpgs = new FingerPrintGenerator[NUM_THREADS]; for (int i = 0; i < NUM_THREADS; i++) { fpgs[i] = new FingerPrintGenerator(i, fpSet, latch, seed++, INSERTIONS); Thread thread = new Thread(fpgs[i], "Producer#" + i); thread.start(); } // wait for runnables/fpg to tear down the latch latch.await(); // print stats for (int i = 0; i < fpgs.length; i++) { final FingerPrintGenerator fpg = fpgs[i]; System.out.println("Producer: " + fpg.getId() + " puts: " + fpg.getPuts()); System.out.println("puts/collisions: " + (double) (fpg.getPuts() / fpg.getCollisions())); } assertEquals(INSERTIONS - 1, fpSet.size()); } public class FingerPrintGenerator implements Runnable { private final long insertions; private final Random rnd; private final FPSet fpSet; private final CountDownLatch latch; private final int id; private long puts = 0L; private long collisions = 0L; public FingerPrintGenerator(int id, FPSet fpSet, CountDownLatch latch, long seed, long insertions) { this.id = id; this.fpSet = fpSet; this.latch = latch; this.rnd = new Random(seed); this.insertions = insertions; } /* (non-Javadoc) * @see java.lang.Runnable#run() */ public void run() { long predecessor = 0L; while (fpSet.size() < insertions) { try { // make sure set still contains predecessor if (predecessor != 0L) { assertTrue(fpSet.contains(predecessor)); } predecessor = rnd.nextLong(); boolean put = fpSet.put(predecessor); if (put == false) { puts++; } else { collisions++; } // First producer prints stats if (id == 0) { printInsertionSpeed(fpSet.size()); } } catch (IOException e) { e.printStackTrace(); fail("Unexpected"); } } latch.countDown(); } public int getId() { return id; } /** * @return the puts */ public long getPuts() { return puts; } /** * @return the collisions */ public long getCollisions() { return collisions; } } }
Fix div/0 bug
tlatools/test-long/tlc2/tool/fp/MultiThreadedFPSetTeset.java
Fix div/0 bug
<ide><path>latools/test-long/tlc2/tool/fp/MultiThreadedFPSetTeset.java <ide> * @return the collisions <ide> */ <ide> public long getCollisions() { <del> return collisions; <add> return collisions == 0 ? 1 : collisions; <ide> } <ide> } <ide> }
Java
mit
6adfa4b1b573eacf2ed2034a0985feb5ea68330e
0
douggie/XChange
package org.knowm.xchange.coinbasepro; import java.math.BigDecimal; import java.math.MathContext; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.*; import java.util.stream.Collectors; import org.knowm.xchange.coinbasepro.dto.CoinbaseProTransfer; import org.knowm.xchange.coinbasepro.dto.account.CoinbaseProAccount; import org.knowm.xchange.coinbasepro.dto.marketdata.*; import org.knowm.xchange.coinbasepro.dto.trade.CoinbaseProFill; import org.knowm.xchange.coinbasepro.dto.trade.CoinbaseProOrder; import org.knowm.xchange.coinbasepro.dto.trade.CoinbaseProOrderFlags; import org.knowm.xchange.coinbasepro.dto.trade.CoinbaseProPlaceLimitOrder; import org.knowm.xchange.coinbasepro.dto.trade.CoinbaseProPlaceMarketOrder; import org.knowm.xchange.coinbasepro.dto.trade.CoinbaseProPlaceOrder; import org.knowm.xchange.currency.Currency; import org.knowm.xchange.currency.CurrencyPair; import org.knowm.xchange.dto.Order; import org.knowm.xchange.dto.Order.OrderStatus; import org.knowm.xchange.dto.Order.OrderType; import org.knowm.xchange.dto.account.Balance; import org.knowm.xchange.dto.account.FundingRecord; import org.knowm.xchange.dto.account.Wallet; import org.knowm.xchange.dto.marketdata.OrderBook; import org.knowm.xchange.dto.marketdata.Ticker; import org.knowm.xchange.dto.marketdata.Trade; import org.knowm.xchange.dto.marketdata.Trades; import org.knowm.xchange.dto.marketdata.Trades.TradeSortType; import org.knowm.xchange.dto.meta.CurrencyMetaData; import org.knowm.xchange.dto.meta.CurrencyPairMetaData; import org.knowm.xchange.dto.meta.ExchangeMetaData; import org.knowm.xchange.dto.meta.WalletHealth; import org.knowm.xchange.dto.trade.LimitOrder; import org.knowm.xchange.dto.trade.MarketOrder; import org.knowm.xchange.dto.trade.OpenOrders; import org.knowm.xchange.dto.trade.StopOrder; import org.knowm.xchange.dto.trade.UserTrade; import org.knowm.xchange.dto.trade.UserTrades; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class CoinbaseProAdapters { private static final Logger logger = LoggerFactory.getLogger(CoinbaseProAdapters.class); private CoinbaseProAdapters() {} protected static Date parseDate(final String rawDate) { String modified; if (rawDate.length() > 23) { modified = rawDate.substring(0, 23); } else if (rawDate.endsWith("Z")) { switch (rawDate.length()) { case 20: modified = rawDate.substring(0, 19) + ".000"; break; case 22: modified = rawDate.substring(0, 21) + "00"; break; case 23: modified = rawDate.substring(0, 22) + "0"; break; default: modified = rawDate; break; } } else { switch (rawDate.length()) { case 19: modified = rawDate + ".000"; break; case 21: modified = rawDate + "00"; break; case 22: modified = rawDate + "0"; break; default: modified = rawDate; break; } } try { SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS"); dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); return dateFormat.parse(modified); } catch (ParseException e) { logger.warn("unable to parse rawDate={} modified={}", rawDate, modified, e); return null; } } public static CurrencyPair toCurrencyPair(final String productId) { final String[] parts = productId.split("-"); return new CurrencyPair(parts[0], parts[1]); } public static Ticker adaptTicker( CoinbaseProProductTicker ticker, CoinbaseProProductStats stats, CurrencyPair currencyPair) { return new Ticker.Builder() .instrument(currencyPair) .last(ticker.getPrice()) .open(stats.getOpen()) .high(stats.getHigh()) .low(stats.getLow()) .bid(ticker.getBid()) .ask(ticker.getAsk()) .volume(ticker.getVolume()) .timestamp(parseDate(ticker.getTime())) .build(); } public static List<Ticker> adaptTickers(Map<String, CoinbaseProStats> stats) { List<Ticker> tickers = new LinkedList<>(); for (String pair : stats.keySet()) { CoinbaseProStats pairStats = stats.get(pair); tickers.add( new Ticker.Builder() .instrument(new CurrencyPair(pair)) .last(pairStats.getLast()) .open(pairStats.getOpen()) .high(pairStats.getHigh()) .low(pairStats.getLow()) .volume(pairStats.getVolume()) .build()); } return tickers; } public static OrderBook adaptOrderBook( CoinbaseProProductBook book, CurrencyPair currencyPair, Date date) { List<LimitOrder> asks = toLimitOrderList(book.getAsks(), OrderType.ASK, currencyPair); List<LimitOrder> bids = toLimitOrderList(book.getBids(), OrderType.BID, currencyPair); return new OrderBook(date, asks, bids); } public static OrderBook adaptOrderBook(CoinbaseProProductBook book, CurrencyPair currencyPair) { return adaptOrderBook(book, currencyPair, null); } private static List<LimitOrder> toLimitOrderList( CoinbaseProProductBookEntry[] levels, OrderType orderType, CurrencyPair currencyPair) { List<LimitOrder> allLevels = new ArrayList<>(); if (levels != null) { for (CoinbaseProProductBookEntry ask : levels) { allLevels.add( new LimitOrder(orderType, ask.getVolume(), currencyPair, "0", null, ask.getPrice())); } } return allLevels; } public static Wallet adaptAccountInfo(CoinbaseProAccount[] coinbaseProAccounts) { List<Balance> balances = new ArrayList<>(coinbaseProAccounts.length); for (CoinbaseProAccount coinbaseProAccount : coinbaseProAccounts) { balances.add( new Balance( Currency.getInstance(coinbaseProAccount.getCurrency()), coinbaseProAccount.getBalance(), coinbaseProAccount.getAvailable(), coinbaseProAccount.getHold())); } return Wallet.Builder.from(balances).id(coinbaseProAccounts[0].getProfile_id()).build(); } @SuppressWarnings("unchecked") public static OpenOrders adaptOpenOrders(CoinbaseProOrder[] coinbaseExOpenOrders) { final Map<Boolean, List<Order>> twoTypes = Arrays.stream(coinbaseExOpenOrders) .map(CoinbaseProAdapters::adaptOrder) .collect(Collectors.partitioningBy(t -> t instanceof LimitOrder)); @SuppressWarnings("rawtypes") List limitOrders = twoTypes.get(true); return new OpenOrders(limitOrders, twoTypes.get(false)); } public static Order adaptOrder(CoinbaseProOrder order) { OrderType type = "buy".equals(order.getSide()) ? OrderType.BID : OrderType.ASK; CurrencyPair currencyPair = new CurrencyPair(order.getProductId().replace('-', '/')); Order.Builder builder = null; if (order.getType() == null) { return null; } switch (order.getType()) { case "market": builder = new MarketOrder.Builder(type, currencyPair); break; case "limit": if (order.getStop() == null) { builder = new LimitOrder.Builder(type, currencyPair).limitPrice(order.getPrice()); } else { builder = new StopOrder.Builder(type, currencyPair).stopPrice(order.getStopPrice()); } break; } if (builder == null) { return null; } builder .orderStatus(adaptOrderStatus(order)) .originalAmount(order.getSize()) .id(order.getId()) .timestamp(parseDate(order.getCreatedAt())) .cumulativeAmount(order.getFilledSize()) .fee(order.getFillFees()); BigDecimal averagePrice; if (order.getFilledSize().signum() != 0 && order.getExecutedvalue().signum() != 0) { averagePrice = order.getExecutedvalue().divide(order.getFilledSize(), MathContext.DECIMAL32); } else { averagePrice = BigDecimal.ZERO; } return builder.averagePrice(averagePrice).build(); } public static OrderStatus[] adaptOrderStatuses(CoinbaseProOrder[] orders) { OrderStatus[] orderStatuses = new OrderStatus[orders.length]; int i = 0; for (CoinbaseProOrder coinbaseProOrder : orders) { orderStatuses[i++] = adaptOrderStatus(coinbaseProOrder); } return orderStatuses; } /** The status from the CoinbaseProOrder object converted to xchange status */ public static OrderStatus adaptOrderStatus(CoinbaseProOrder order) { if (order.getStatus() == null) { return OrderStatus.UNKNOWN; } switch (order.getStatus()) { case "pending": return OrderStatus.PENDING_NEW; case "done": case "settled": if (order.getDoneReason() == null) { return OrderStatus.UNKNOWN; } switch (order.getDoneReason()) { case "filled": return OrderStatus.FILLED; case "canceled": return OrderStatus.CANCELED; } return OrderStatus.UNKNOWN; } if (order.getFilledSize().signum() == 0) { if ("open".equals(order.getStatus()) && order.getStop() != null) { // This is a massive edge case of a stop triggering but not immediately // fulfilling. STOPPED status is only currently used by the HitBTC and // YoBit implementations and in both cases it looks like a // misunderstanding and those should return CANCELLED. Should we just // remove this status? return OrderStatus.STOPPED; } return OrderStatus.NEW; } if (order.getFilledSize().compareTo(BigDecimal.ZERO) > 0 // if size >= filledSize order should be partially filled && order.getSize().compareTo(order.getFilledSize()) >= 0) return OrderStatus.PARTIALLY_FILLED; return OrderStatus.UNKNOWN; } public static Trades adaptTrades( List<CoinbaseProTrade> coinbaseProTradesList, CurrencyPair currencyPair) { CoinbaseProTrade[] tradeArray = new CoinbaseProTrade[coinbaseProTradesList.size()]; coinbaseProTradesList.toArray(tradeArray); return CoinbaseProAdapters.adaptTrades(tradeArray, currencyPair); } public static UserTrades adaptTradeHistory(CoinbaseProFill[] coinbaseExFills) { List<UserTrade> trades = new ArrayList<>(coinbaseExFills.length); for (CoinbaseProFill fill : coinbaseExFills) { CurrencyPair currencyPair = new CurrencyPair(fill.getProductId().replace('-', '/')); trades.add( new UserTrade.Builder() .type("buy".equals(fill.getSide()) ? OrderType.BID : OrderType.ASK) .originalAmount(fill.getSize()) .currencyPair(currencyPair) .price(fill.getPrice()) .timestamp(parseDate(fill.getCreatedAt())) .id(String.valueOf(fill.getTradeId())) .orderId(fill.getOrderId()) .feeAmount(fill.getFee()) .feeCurrency(currencyPair.counter) .build()); } return new UserTrades(trades, TradeSortType.SortByID); } public static Trades adaptTrades(CoinbaseProTrade[] coinbaseExTrades, CurrencyPair currencyPair) { List<Trade> trades = new ArrayList<>(coinbaseExTrades.length); for (CoinbaseProTrade trade : coinbaseExTrades) { // yes, sell means buy for coinbasePro reported trades.. OrderType type = "sell".equals(trade.getSide()) ? OrderType.BID : OrderType.ASK; trades.add( new Trade.Builder() .type(type) .originalAmount(trade.getSize()) .price(trade.getPrice()) .instrument(currencyPair) .timestamp(parseDate(trade.getTimestamp())) .id(String.valueOf(trade.getTradeId())) .makerOrderId(trade.getMakerOrderId()) .takerOrderId(trade.getTakerOrderId()) .build()); } return new Trades(trades, coinbaseExTrades[0].getTradeId(), TradeSortType.SortByID); } public static CurrencyPair adaptCurrencyPair(CoinbaseProProduct product) { return new CurrencyPair(product.getBaseCurrency(), product.getTargetCurrency()); } private static Currency adaptCurrency(CoinbaseProCurrency currency) { return new Currency(currency.getId()); } private static int numberOfDecimals(BigDecimal value) { double d = value.doubleValue(); return -(int) Math.round(Math.log10(d)); } public static ExchangeMetaData adaptToExchangeMetaData( ExchangeMetaData exchangeMetaData, CoinbaseProProduct[] products, CoinbaseProCurrency[] cbCurrencies) { Map<CurrencyPair, CurrencyPairMetaData> currencyPairs = exchangeMetaData == null ? new HashMap<>() : exchangeMetaData.getCurrencyPairs(); Map<Currency, CurrencyMetaData> currencies = exchangeMetaData == null ? new HashMap<>() : exchangeMetaData.getCurrencies(); for (CoinbaseProProduct product : products) { if (!"online".equals(product.getStatus())) { continue; } CurrencyPair pair = adaptCurrencyPair(product); CurrencyPairMetaData staticMetaData = currencyPairs.get(pair); int baseScale = numberOfDecimals(product.getBaseIncrement()); int priceScale = numberOfDecimals(product.getQuoteIncrement()); boolean marketOrderAllowed = !product.isLimitOnly(); currencyPairs.put( pair, new CurrencyPairMetaData( new BigDecimal("0.50"), // Trading fee at Coinbase is 0.5 % product.getBaseMinSize(), product.getBaseMaxSize(), product.getMinMarketFunds(), product.getMaxMarketFunds(), baseScale, priceScale, null, staticMetaData != null ? staticMetaData.getFeeTiers() : null, null, pair.counter, marketOrderAllowed)); } Arrays.stream(cbCurrencies) .forEach( currency -> currencies.put( adaptCurrency(currency), new CurrencyMetaData( numberOfDecimals(currency.getMaxPrecision()), BigDecimal.ZERO, currency.getDetails().getMinWithdrawalAmount(), "online".equals(currency.getStatus()) ? WalletHealth.ONLINE : WalletHealth.OFFLINE))); return new ExchangeMetaData( currencyPairs, currencies, exchangeMetaData == null ? null : exchangeMetaData.getPublicRateLimits(), exchangeMetaData == null ? null : exchangeMetaData.getPrivateRateLimits(), true); } public static String adaptProductID(CurrencyPair currencyPair) { return currencyPair.base.getCurrencyCode() + "-" + currencyPair.counter.getCurrencyCode(); } public static CoinbaseProPlaceOrder.Side adaptSide(OrderType orderType) { return orderType == OrderType.ASK ? CoinbaseProPlaceOrder.Side.sell : CoinbaseProPlaceOrder.Side.buy; } public static CoinbaseProPlaceOrder.Stop adaptStop(OrderType orderType) { return orderType == OrderType.ASK ? CoinbaseProPlaceOrder.Stop.loss : CoinbaseProPlaceOrder.Stop.entry; } public static CoinbaseProPlaceLimitOrder adaptCoinbaseProPlaceLimitOrder(LimitOrder limitOrder) { CoinbaseProPlaceLimitOrder.Builder builder = new CoinbaseProPlaceLimitOrder.Builder() .clientOid(limitOrder.getUserReference()) .price(limitOrder.getLimitPrice()) .type(CoinbaseProPlaceOrder.Type.limit) .productId(adaptProductID(limitOrder.getCurrencyPair())) .side(adaptSide(limitOrder.getType())) .size(limitOrder.getOriginalAmount()); if (limitOrder.getOrderFlags().contains(CoinbaseProOrderFlags.POST_ONLY)) builder.postOnly(true); if (limitOrder.getOrderFlags().contains(CoinbaseProOrderFlags.FILL_OR_KILL)) builder.timeInForce(CoinbaseProPlaceLimitOrder.TimeInForce.FOK); if (limitOrder.getOrderFlags().contains(CoinbaseProOrderFlags.IMMEDIATE_OR_CANCEL)) builder.timeInForce(CoinbaseProPlaceLimitOrder.TimeInForce.IOC); return builder.build(); } public static CoinbaseProPlaceMarketOrder adaptCoinbaseProPlaceMarketOrder( MarketOrder marketOrder) { return new CoinbaseProPlaceMarketOrder.Builder() .productId(adaptProductID(marketOrder.getCurrencyPair())) .clientOid(marketOrder.getUserReference()) .type(CoinbaseProPlaceOrder.Type.market) .side(adaptSide(marketOrder.getType())) .funds(marketOrder.getType() == OrderType.BID ? marketOrder.getOriginalAmount() : null) .size(marketOrder.getType() == OrderType.ASK ? marketOrder.getOriginalAmount() : null) .build(); } /** * Creates a 'stop' order. Stop limit order converts to a limit order when the stop amount is * triggered. The limit order can have a different price than the stop price. * * <p>If the stop order has no limit price it will execute as a market order once the stop price * is broken * * @param stopOrder * @return */ public static CoinbaseProPlaceOrder adaptCoinbaseProStopOrder(StopOrder stopOrder) { // stop orders can also execute as 'stop limit' orders, that is converting to // a limit order, but a traditional 'stop' order converts to a market order if (stopOrder.getLimitPrice() == null) { return new CoinbaseProPlaceMarketOrder.Builder() .productId(adaptProductID(stopOrder.getCurrencyPair())) .clientOid(stopOrder.getUserReference()) .type(CoinbaseProPlaceOrder.Type.market) .side(adaptSide(stopOrder.getType())) .size(stopOrder.getOriginalAmount()) .stop(adaptStop(stopOrder.getType())) .stopPrice(stopOrder.getStopPrice()) .build(); } return new CoinbaseProPlaceLimitOrder.Builder() .productId(adaptProductID(stopOrder.getCurrencyPair())) .clientOid(stopOrder.getUserReference()) .type(CoinbaseProPlaceOrder.Type.limit) .side(adaptSide(stopOrder.getType())) .size(stopOrder.getOriginalAmount()) .stop(adaptStop(stopOrder.getType())) .stopPrice(stopOrder.getStopPrice()) .price(stopOrder.getLimitPrice()) .build(); } public static FundingRecord adaptFundingRecord( Currency currency, CoinbaseProTransfer coinbaseProTransfer) { FundingRecord.Status status = FundingRecord.Status.PROCESSING; Date processedAt = coinbaseProTransfer.processedAt(); Date canceledAt = coinbaseProTransfer.canceledAt(); if (canceledAt != null) status = FundingRecord.Status.CANCELLED; else if (processedAt != null) status = FundingRecord.Status.COMPLETE; String address = coinbaseProTransfer.getDetails().getCryptoAddress(); if (address == null) address = coinbaseProTransfer.getDetails().getSentToAddress(); String cryptoTransactionHash = coinbaseProTransfer.getDetails().getCryptoTransactionHash(); String transactionHash = adaptTransactionHash(currency.getSymbol(), cryptoTransactionHash); return new FundingRecord( address, coinbaseProTransfer.getDetails().getDestinationTag(), coinbaseProTransfer.createdAt(), currency, coinbaseProTransfer.amount(), coinbaseProTransfer.getId(), transactionHash, coinbaseProTransfer.type(), status, null, null, null); } // crypto_transaction_link: "https://etherscan.io/tx/0x{{txId}}" private static String adaptTransactionHash(String currency, String transactionHash) { switch (currency) { case "ZRX": case "BAT": case "LOOM": case "CVC": case "DNT": case "MANA": case "GNT": case "REP": case "LINK": case "ETH": case "ETC": case "USDC": case "DAI": case "ZIL": case "MKR": transactionHash = transactionHash != null ? "0x" + transactionHash : null; break; } return transactionHash; } }
xchange-coinbasepro/src/main/java/org/knowm/xchange/coinbasepro/CoinbaseProAdapters.java
package org.knowm.xchange.coinbasepro; import java.math.BigDecimal; import java.math.MathContext; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.*; import java.util.stream.Collectors; import org.knowm.xchange.coinbasepro.dto.CoinbaseProTransfer; import org.knowm.xchange.coinbasepro.dto.account.CoinbaseProAccount; import org.knowm.xchange.coinbasepro.dto.marketdata.*; import org.knowm.xchange.coinbasepro.dto.trade.CoinbaseProFill; import org.knowm.xchange.coinbasepro.dto.trade.CoinbaseProOrder; import org.knowm.xchange.coinbasepro.dto.trade.CoinbaseProOrderFlags; import org.knowm.xchange.coinbasepro.dto.trade.CoinbaseProPlaceLimitOrder; import org.knowm.xchange.coinbasepro.dto.trade.CoinbaseProPlaceMarketOrder; import org.knowm.xchange.coinbasepro.dto.trade.CoinbaseProPlaceOrder; import org.knowm.xchange.currency.Currency; import org.knowm.xchange.currency.CurrencyPair; import org.knowm.xchange.dto.Order; import org.knowm.xchange.dto.Order.OrderStatus; import org.knowm.xchange.dto.Order.OrderType; import org.knowm.xchange.dto.account.Balance; import org.knowm.xchange.dto.account.FundingRecord; import org.knowm.xchange.dto.account.Wallet; import org.knowm.xchange.dto.marketdata.OrderBook; import org.knowm.xchange.dto.marketdata.Ticker; import org.knowm.xchange.dto.marketdata.Trade; import org.knowm.xchange.dto.marketdata.Trades; import org.knowm.xchange.dto.marketdata.Trades.TradeSortType; import org.knowm.xchange.dto.meta.CurrencyMetaData; import org.knowm.xchange.dto.meta.CurrencyPairMetaData; import org.knowm.xchange.dto.meta.ExchangeMetaData; import org.knowm.xchange.dto.meta.WalletHealth; import org.knowm.xchange.dto.trade.LimitOrder; import org.knowm.xchange.dto.trade.MarketOrder; import org.knowm.xchange.dto.trade.OpenOrders; import org.knowm.xchange.dto.trade.StopOrder; import org.knowm.xchange.dto.trade.UserTrade; import org.knowm.xchange.dto.trade.UserTrades; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class CoinbaseProAdapters { private static final Logger logger = LoggerFactory.getLogger(CoinbaseProAdapters.class); private CoinbaseProAdapters() {} protected static Date parseDate(final String rawDate) { String modified; if (rawDate.length() > 23) { modified = rawDate.substring(0, 23); } else if (rawDate.endsWith("Z")) { switch (rawDate.length()) { case 20: modified = rawDate.substring(0, 19) + ".000"; break; case 22: modified = rawDate.substring(0, 21) + "00"; break; case 23: modified = rawDate.substring(0, 22) + "0"; break; default: modified = rawDate; break; } } else { switch (rawDate.length()) { case 19: modified = rawDate + ".000"; break; case 21: modified = rawDate + "00"; break; case 22: modified = rawDate + "0"; break; default: modified = rawDate; break; } } try { SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS"); dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); return dateFormat.parse(modified); } catch (ParseException e) { logger.warn("unable to parse rawDate={} modified={}", rawDate, modified, e); return null; } } public static CurrencyPair toCurrencyPair(final String productId) { final String[] parts = productId.split("-"); return new CurrencyPair(parts[0], parts[1]); } public static Ticker adaptTicker( CoinbaseProProductTicker ticker, CoinbaseProProductStats stats, CurrencyPair currencyPair) { return new Ticker.Builder() .instrument(currencyPair) .last(ticker.getPrice()) .open(stats.getOpen()) .high(stats.getHigh()) .low(stats.getLow()) .bid(ticker.getBid()) .ask(ticker.getAsk()) .volume(ticker.getVolume()) .timestamp(parseDate(ticker.getTime())) .build(); } public static List<Ticker> adaptTickers(Map<String, CoinbaseProStats> stats) { List<Ticker> tickers = new LinkedList<>(); for (String pair : stats.keySet()) { CoinbaseProStats pairStats = stats.get(pair); tickers.add( new Ticker.Builder() .instrument(new CurrencyPair(pair)) .last(pairStats.getLast()) .open(pairStats.getOpen()) .high(pairStats.getHigh()) .low(pairStats.getLow()) .volume(pairStats.getVolume()) .build()); } return tickers; } public static OrderBook adaptOrderBook( CoinbaseProProductBook book, CurrencyPair currencyPair, Date date) { List<LimitOrder> asks = toLimitOrderList(book.getAsks(), OrderType.ASK, currencyPair); List<LimitOrder> bids = toLimitOrderList(book.getBids(), OrderType.BID, currencyPair); return new OrderBook(date, asks, bids); } public static OrderBook adaptOrderBook(CoinbaseProProductBook book, CurrencyPair currencyPair) { return adaptOrderBook(book, currencyPair, null); } private static List<LimitOrder> toLimitOrderList( CoinbaseProProductBookEntry[] levels, OrderType orderType, CurrencyPair currencyPair) { List<LimitOrder> allLevels = new ArrayList<>(); if (levels != null) { for (CoinbaseProProductBookEntry ask : levels) { allLevels.add( new LimitOrder(orderType, ask.getVolume(), currencyPair, "0", null, ask.getPrice())); } } return allLevels; } public static Wallet adaptAccountInfo(CoinbaseProAccount[] coinbaseProAccounts) { List<Balance> balances = new ArrayList<>(coinbaseProAccounts.length); for (CoinbaseProAccount coinbaseProAccount : coinbaseProAccounts) { balances.add( new Balance( Currency.getInstance(coinbaseProAccount.getCurrency()), coinbaseProAccount.getBalance(), coinbaseProAccount.getAvailable(), coinbaseProAccount.getHold())); } return Wallet.Builder.from(balances).id(coinbaseProAccounts[0].getProfile_id()).build(); } @SuppressWarnings("unchecked") public static OpenOrders adaptOpenOrders(CoinbaseProOrder[] coinbaseExOpenOrders) { final Map<Boolean, List<Order>> twoTypes = Arrays.stream(coinbaseExOpenOrders) .map(CoinbaseProAdapters::adaptOrder) .collect(Collectors.partitioningBy(t -> t instanceof LimitOrder)); @SuppressWarnings("rawtypes") List limitOrders = twoTypes.get(true); return new OpenOrders(limitOrders, twoTypes.get(false)); } public static Order adaptOrder(CoinbaseProOrder order) { OrderType type = "buy".equals(order.getSide()) ? OrderType.BID : OrderType.ASK; CurrencyPair currencyPair = new CurrencyPair(order.getProductId().replace('-', '/')); Order.Builder builder = null; if (order.getType() == null) { return null; } switch (order.getType()) { case "market": builder = new MarketOrder.Builder(type, currencyPair); break; case "limit": if (order.getStop() == null) { builder = new LimitOrder.Builder(type, currencyPair).limitPrice(order.getPrice()); } else { builder = new StopOrder.Builder(type, currencyPair).stopPrice(order.getStopPrice()); } break; } if (builder == null) { return null; } builder .orderStatus(adaptOrderStatus(order)) .originalAmount(order.getSize()) .id(order.getId()) .timestamp(parseDate(order.getCreatedAt())) .cumulativeAmount(order.getFilledSize()) .fee(order.getFillFees()); BigDecimal averagePrice; if (order.getFilledSize().signum() != 0 && order.getExecutedvalue().signum() != 0) { averagePrice = order.getExecutedvalue().divide(order.getFilledSize(), MathContext.DECIMAL32); } else { averagePrice = BigDecimal.ZERO; } return builder.averagePrice(averagePrice).build(); } public static OrderStatus[] adaptOrderStatuses(CoinbaseProOrder[] orders) { OrderStatus[] orderStatuses = new OrderStatus[orders.length]; int i = 0; for (CoinbaseProOrder coinbaseProOrder : orders) { orderStatuses[i++] = adaptOrderStatus(coinbaseProOrder); } return orderStatuses; } /** The status from the CoinbaseProOrder object converted to xchange status */ public static OrderStatus adaptOrderStatus(CoinbaseProOrder order) { if (order.getStatus() == null) { return OrderStatus.UNKNOWN; } switch (order.getStatus()) { case "pending": return OrderStatus.PENDING_NEW; case "done": case "settled": if (order.getDoneReason() == null) { return OrderStatus.UNKNOWN; } switch (order.getDoneReason()) { case "filled": return OrderStatus.FILLED; case "canceled": return OrderStatus.CANCELED; } return OrderStatus.UNKNOWN; } if (order.getFilledSize().signum() == 0) { if ("open".equals(order.getStatus()) && order.getStop() != null) { // This is a massive edge case of a stop triggering but not immediately // fulfilling. STOPPED status is only currently used by the HitBTC and // YoBit implementations and in both cases it looks like a // misunderstanding and those should return CANCELLED. Should we just // remove this status? return OrderStatus.STOPPED; } return OrderStatus.NEW; } if (order.getFilledSize().compareTo(BigDecimal.ZERO) > 0 // if size >= filledSize order should be partially filled && order.getSize().compareTo(order.getFilledSize()) >= 0) return OrderStatus.PARTIALLY_FILLED; return OrderStatus.UNKNOWN; } public static Trades adaptTrades( List<CoinbaseProTrade> coinbaseProTradesList, CurrencyPair currencyPair) { CoinbaseProTrade[] tradeArray = new CoinbaseProTrade[coinbaseProTradesList.size()]; coinbaseProTradesList.toArray(tradeArray); return CoinbaseProAdapters.adaptTrades(tradeArray, currencyPair); } public static UserTrades adaptTradeHistory(CoinbaseProFill[] coinbaseExFills) { List<UserTrade> trades = new ArrayList<>(coinbaseExFills.length); for (CoinbaseProFill fill : coinbaseExFills) { CurrencyPair currencyPair = new CurrencyPair(fill.getProductId().replace('-', '/')); trades.add( new UserTrade.Builder() .type("buy".equals(fill.getSide()) ? OrderType.BID : OrderType.ASK) .originalAmount(fill.getSize()) .currencyPair(currencyPair) .price(fill.getPrice()) .timestamp(parseDate(fill.getCreatedAt())) .id(String.valueOf(fill.getTradeId())) .orderId(fill.getOrderId()) .feeAmount(fill.getFee()) .feeCurrency(currencyPair.counter) .build()); } return new UserTrades(trades, TradeSortType.SortByID); } public static Trades adaptTrades(CoinbaseProTrade[] coinbaseExTrades, CurrencyPair currencyPair) { List<Trade> trades = new ArrayList<>(coinbaseExTrades.length); for (CoinbaseProTrade trade : coinbaseExTrades) { // yes, sell means buy for coinbasePro reported trades.. OrderType type = "sell".equals(trade.getSide()) ? OrderType.BID : OrderType.ASK; trades.add( new Trade.Builder() .type(type) .originalAmount(trade.getSize()) .price(trade.getPrice()) .instrument(currencyPair) .timestamp(parseDate(trade.getTimestamp())) .id(String.valueOf(trade.getTradeId())) .makerOrderId(trade.getMakerOrderId()) .takerOrderId(trade.getTakerOrderId()) .build()); } return new Trades(trades, coinbaseExTrades[0].getTradeId(), TradeSortType.SortByID); } public static CurrencyPair adaptCurrencyPair(CoinbaseProProduct product) { return new CurrencyPair(product.getBaseCurrency(), product.getTargetCurrency()); } private static Currency adaptCurrency(CoinbaseProCurrency currency) { return new Currency(currency.getId()); } private static int numberOfDecimals(BigDecimal value) { double d = value.doubleValue(); return -(int) Math.round(Math.log10(d)); } public static ExchangeMetaData adaptToExchangeMetaData( ExchangeMetaData exchangeMetaData, CoinbaseProProduct[] products, CoinbaseProCurrency[] cbCurrencies) { Map<CurrencyPair, CurrencyPairMetaData> currencyPairs = exchangeMetaData == null ? new HashMap<>() : exchangeMetaData.getCurrencyPairs(); Map<Currency, CurrencyMetaData> currencies = exchangeMetaData == null ? new HashMap<>() : exchangeMetaData.getCurrencies(); for (CoinbaseProProduct product : products) { if (!"online".equals(product.getStatus())) { continue; } CurrencyPair pair = adaptCurrencyPair(product); CurrencyPairMetaData staticMetaData = currencyPairs.get(pair); int baseScale = numberOfDecimals(product.getBaseIncrement()); int priceScale = numberOfDecimals(product.getQuoteIncrement()); boolean marketOrderAllowed = !product.isLimitOnly(); currencyPairs.put( pair, new CurrencyPairMetaData( new BigDecimal("0.25"), // Trading fee at Coinbase is 0.25 % product.getBaseMinSize(), product.getBaseMaxSize(), product.getMinMarketFunds(), product.getMaxMarketFunds(), baseScale, priceScale, null, staticMetaData != null ? staticMetaData.getFeeTiers() : null, null, pair.counter, marketOrderAllowed)); } Arrays.stream(cbCurrencies) .forEach( currency -> currencies.put( adaptCurrency(currency), new CurrencyMetaData( numberOfDecimals(currency.getMaxPrecision()), BigDecimal.ZERO, currency.getDetails().getMinWithdrawalAmount(), "online".equals(currency.getStatus()) ? WalletHealth.ONLINE : WalletHealth.OFFLINE))); return new ExchangeMetaData( currencyPairs, currencies, exchangeMetaData == null ? null : exchangeMetaData.getPublicRateLimits(), exchangeMetaData == null ? null : exchangeMetaData.getPrivateRateLimits(), true); } public static String adaptProductID(CurrencyPair currencyPair) { return currencyPair.base.getCurrencyCode() + "-" + currencyPair.counter.getCurrencyCode(); } public static CoinbaseProPlaceOrder.Side adaptSide(OrderType orderType) { return orderType == OrderType.ASK ? CoinbaseProPlaceOrder.Side.sell : CoinbaseProPlaceOrder.Side.buy; } public static CoinbaseProPlaceOrder.Stop adaptStop(OrderType orderType) { return orderType == OrderType.ASK ? CoinbaseProPlaceOrder.Stop.loss : CoinbaseProPlaceOrder.Stop.entry; } public static CoinbaseProPlaceLimitOrder adaptCoinbaseProPlaceLimitOrder(LimitOrder limitOrder) { CoinbaseProPlaceLimitOrder.Builder builder = new CoinbaseProPlaceLimitOrder.Builder() .clientOid(limitOrder.getUserReference()) .price(limitOrder.getLimitPrice()) .type(CoinbaseProPlaceOrder.Type.limit) .productId(adaptProductID(limitOrder.getCurrencyPair())) .side(adaptSide(limitOrder.getType())) .size(limitOrder.getOriginalAmount()); if (limitOrder.getOrderFlags().contains(CoinbaseProOrderFlags.POST_ONLY)) builder.postOnly(true); if (limitOrder.getOrderFlags().contains(CoinbaseProOrderFlags.FILL_OR_KILL)) builder.timeInForce(CoinbaseProPlaceLimitOrder.TimeInForce.FOK); if (limitOrder.getOrderFlags().contains(CoinbaseProOrderFlags.IMMEDIATE_OR_CANCEL)) builder.timeInForce(CoinbaseProPlaceLimitOrder.TimeInForce.IOC); return builder.build(); } public static CoinbaseProPlaceMarketOrder adaptCoinbaseProPlaceMarketOrder( MarketOrder marketOrder) { return new CoinbaseProPlaceMarketOrder.Builder() .productId(adaptProductID(marketOrder.getCurrencyPair())) .clientOid(marketOrder.getUserReference()) .type(CoinbaseProPlaceOrder.Type.market) .side(adaptSide(marketOrder.getType())) .funds(marketOrder.getType() == OrderType.BID ? marketOrder.getOriginalAmount() : null) .size(marketOrder.getType() == OrderType.ASK ? marketOrder.getOriginalAmount() : null) .build(); } /** * Creates a 'stop' order. Stop limit order converts to a limit order when the stop amount is * triggered. The limit order can have a different price than the stop price. * * <p>If the stop order has no limit price it will execute as a market order once the stop price * is broken * * @param stopOrder * @return */ public static CoinbaseProPlaceOrder adaptCoinbaseProStopOrder(StopOrder stopOrder) { // stop orders can also execute as 'stop limit' orders, that is converting to // a limit order, but a traditional 'stop' order converts to a market order if (stopOrder.getLimitPrice() == null) { return new CoinbaseProPlaceMarketOrder.Builder() .productId(adaptProductID(stopOrder.getCurrencyPair())) .clientOid(stopOrder.getUserReference()) .type(CoinbaseProPlaceOrder.Type.market) .side(adaptSide(stopOrder.getType())) .size(stopOrder.getOriginalAmount()) .stop(adaptStop(stopOrder.getType())) .stopPrice(stopOrder.getStopPrice()) .build(); } return new CoinbaseProPlaceLimitOrder.Builder() .productId(adaptProductID(stopOrder.getCurrencyPair())) .clientOid(stopOrder.getUserReference()) .type(CoinbaseProPlaceOrder.Type.limit) .side(adaptSide(stopOrder.getType())) .size(stopOrder.getOriginalAmount()) .stop(adaptStop(stopOrder.getType())) .stopPrice(stopOrder.getStopPrice()) .price(stopOrder.getLimitPrice()) .build(); } public static FundingRecord adaptFundingRecord( Currency currency, CoinbaseProTransfer coinbaseProTransfer) { FundingRecord.Status status = FundingRecord.Status.PROCESSING; Date processedAt = coinbaseProTransfer.processedAt(); Date canceledAt = coinbaseProTransfer.canceledAt(); if (canceledAt != null) status = FundingRecord.Status.CANCELLED; else if (processedAt != null) status = FundingRecord.Status.COMPLETE; String address = coinbaseProTransfer.getDetails().getCryptoAddress(); if (address == null) address = coinbaseProTransfer.getDetails().getSentToAddress(); String cryptoTransactionHash = coinbaseProTransfer.getDetails().getCryptoTransactionHash(); String transactionHash = adaptTransactionHash(currency.getSymbol(), cryptoTransactionHash); return new FundingRecord( address, coinbaseProTransfer.getDetails().getDestinationTag(), coinbaseProTransfer.createdAt(), currency, coinbaseProTransfer.amount(), coinbaseProTransfer.getId(), transactionHash, coinbaseProTransfer.type(), status, null, null, null); } // crypto_transaction_link: "https://etherscan.io/tx/0x{{txId}}" private static String adaptTransactionHash(String currency, String transactionHash) { switch (currency) { case "ZRX": case "BAT": case "LOOM": case "CVC": case "DNT": case "MANA": case "GNT": case "REP": case "LINK": case "ETH": case "ETC": case "USDC": case "DAI": case "ZIL": case "MKR": transactionHash = transactionHash != null ? "0x" + transactionHash : null; break; } return transactionHash; } }
[CoinbasePro] Default fee is 0.50%
xchange-coinbasepro/src/main/java/org/knowm/xchange/coinbasepro/CoinbaseProAdapters.java
[CoinbasePro] Default fee is 0.50%
<ide><path>change-coinbasepro/src/main/java/org/knowm/xchange/coinbasepro/CoinbaseProAdapters.java <ide> currencyPairs.put( <ide> pair, <ide> new CurrencyPairMetaData( <del> new BigDecimal("0.25"), // Trading fee at Coinbase is 0.25 % <add> new BigDecimal("0.50"), // Trading fee at Coinbase is 0.5 % <ide> product.getBaseMinSize(), <ide> product.getBaseMaxSize(), <ide> product.getMinMarketFunds(),
Java
apache-2.0
aaaa33550e852bfb12acfcad4009898bd2f5f6a3
0
virtualdataset/metagen-java,virtualdataset/metagen-java
package io.virtdata.core; import io.virtdata.api.DataMapper; import io.virtdata.api.DataMapperLibrary; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; import java.util.stream.Collectors; public class AllDataMapperLibraries implements DataMapperLibrary { private static AllDataMapperLibraries instance = new AllDataMapperLibraries(); private List<DataMapperLibrary> libraries = DataMapperLibraryFinder.getAll(); private final Map<String,DataMapper<?>> threadSafeCache = new HashMap<>(); private final static Logger logger = LoggerFactory.getLogger(AllDataMapperLibraries.class); private AllDataMapperLibraries() { } public static AllDataMapperLibraries get() { return instance; } @Override public String getLibraryName() { return "ALL"; } private <T> Optional<DataMapper<T>> getDataMapperUnsynced(String spec) { List<ResolvedFunction> resolvedFunctions = resolveFunctions(spec); if (resolvedFunctions.size()==0) { throw new RuntimeException("Unable to resolve a mapping function for " + spec); } if (resolvedFunctions.size()>1) { logger.warn("Found " + resolvedFunctions.size() + " resolved functions for '" + spec +"'. This library " + "expects there to be exactly 1"); } Optional<ResolvedFunction> optionallyResolvedFunction = Optional.ofNullable(resolvedFunctions.get(0)); if (optionallyResolvedFunction.isPresent()) { ResolvedFunction resolvedFunction = optionallyResolvedFunction.get(); } Optional<DataMapper<T>> dataMapper = optionallyResolvedFunction .map(ResolvedFunction::getFunctionObject) .map(DataMapperFunctionMapper::map); return dataMapper; } /** * This method modifies the usual logic of finding data mapping functions. This is to allow only libraries which * can parse the spec to have a chance to map the function objects internally. * * @param spec A specifier that describes the type and or parameterization of a new data mapper instance. * @param <T> result type of a data mapper * @return an optional data mapper instance */ @Override public <T> Optional<DataMapper<T>> getDataMapper(String spec) { if (!canParseSpec(spec)) { throw new RuntimeException("No libraries could parse: " + spec); } synchronized (this) { if (threadSafeCache.containsKey(spec)) { DataMapper<T> dataMapper = (DataMapper<T>) threadSafeCache.get(spec); if (dataMapper != null) { return Optional.ofNullable(dataMapper); } // else known to be not marked as threadsafe } else { // dont' know if it is threadsafe or not, so must compute in critical section Optional<ResolvedFunction> optionallyResolvedFunction = resolveFunction(spec); if (optionallyResolvedFunction.isPresent()) { ResolvedFunction resolvedFunction = optionallyResolvedFunction.get(); DataMapper<T> mapper = DataMapperFunctionMapper.map(resolvedFunction.getFunctionObject()); if (resolvedFunction.isThreadSafe()) { logger.debug("Function " + spec + " is marked as thread safe. Caching and sharing."); threadSafeCache.put(spec,mapper); } else { logger.debug("Function " + spec + " is not thread safe."); threadSafeCache.put(spec,null); } } else { return Optional.empty(); } Optional<DataMapper<Object>> newlyResolved = getDataMapperUnsynced(spec); } } return getDataMapperUnsynced(spec); // List<ResolvedFunction> resolvedFunctions = resolveFunctions(spec); // // if (resolvedFunctions.size()==0) { // throw new RuntimeException("Unable to resolve a mapping function for " + spec); // } // // if (resolvedFunctions.size()>1) { // logger.warn("Found " + resolvedFunctions.size() + " resolved functions for '" + spec +"'. This library " + // "expects there to be exactly 1"); // } // // Optional<ResolvedFunction> optionallyResolvedFunction = Optional.ofNullable(resolvedFunctions.get(0)); // if (optionallyResolvedFunction.isPresent()) { // ResolvedFunction resolvedFunction = optionallyResolvedFunction.get(); // } // Optional<DataMapper<T>> dataMapper = optionallyResolvedFunction // .map(ResolvedFunction::getFunctionObject) // .map(DataMapperFunctionMapper::map); // // return dataMapper; // } /** * If any composed libraries can parse the spec, we just return that one. * @param spec a data mapping function specifier * @return true, if this spec is at least parsable by this library */ @Override public boolean canParseSpec(String spec) { return libraries.stream().map(gl -> gl.canParseSpec(spec)).anyMatch(l-> l); } /** * This method modifies the usual logic of finding data mappers. This is to allow only libraries which * can parse the spec to have a chance to map the function objects internally. * @param spec A specifier that describes the type and or parameterization of a new data mapper. * @return a list of resolved functions */ @Override public List<ResolvedFunction> resolveFunctions(String spec) { List<ResolvedFunction> resolvedFunctions = new ArrayList<>(); int parsingLibs=0; for (DataMapperLibrary library : libraries) { if (library.canParseSpec(spec)) { parsingLibs++; List<ResolvedFunction> resolvedFunctions1 = library.resolveFunctions(spec); resolvedFunctions.addAll(resolvedFunctions1); } } if (parsingLibs==0) { throw new RuntimeException("No library could parse: " + spec); } return resolvedFunctions; } public Optional<ResolvedFunction> resolveFunction(String spec) { List<ResolvedFunction> resolvedFunctionList = resolveFunctions(spec); if (resolvedFunctionList.size() == 0) { logger.warn("Unable to find data mapper for spec '" + spec + "' in any libimpl, searched in " + toString()); return Optional.empty(); } if (resolvedFunctionList.size() > 1) { String resolvedNames = resolvedFunctionList.stream() .map(r -> r.getClass().getCanonicalName()) .collect(Collectors.joining()); logger.warn("Found more than one matching data mapper for spec '" + spec + "' : " + resolvedNames); } return Optional.of(resolvedFunctionList.get(0)); } @Override public List<String> getDataMapperNames() { List<String> genNames = new ArrayList<>(); for (DataMapperLibrary library : libraries) { List<String> libGenNames = library.getDataMapperNames().stream() .map(genName -> library.getLibraryName() + "::" + genName) .collect(Collectors.toList()); genNames.addAll(libGenNames); } genNames.sort(Comparator.naturalOrder()); return genNames; } public String toString() { return AllDataMapperLibraries.class.getSimpleName() + ":" + libraries.stream().map(DataMapperLibrary::getLibraryName).collect(Collectors.joining(",", "[", "]")); } }
metagen-api/src/main/java/io/virtdata/core/AllDataMapperLibraries.java
package io.virtdata.core; import io.virtdata.api.DataMapper; import io.virtdata.api.DataMapperLibrary; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; import java.util.stream.Collectors; public class AllDataMapperLibraries implements DataMapperLibrary { private static AllDataMapperLibraries instance = new AllDataMapperLibraries(); private List<DataMapperLibrary> libraries = DataMapperLibraryFinder.getAll(); private final Map<String,DataMapper<?>> threadSafeCache = new HashMap<>(); private final static Logger logger = LoggerFactory.getLogger(AllDataMapperLibraries.class); private AllDataMapperLibraries() { } public static AllDataMapperLibraries get() { return instance; } @Override public String getLibraryName() { return "ALL"; } private <T> Optional<DataMapper<T>> getDataMapperUnsynced(String spec) { List<ResolvedFunction> resolvedFunctions = resolveFunctions(spec); if (resolvedFunctions.size()==0) { throw new RuntimeException("Unable to resolve a mapping function for " + spec); } if (resolvedFunctions.size()>1) { logger.warn("Found " + resolvedFunctions.size() + " resolved functions for '" + spec +"'. This library " + "expects there to be exactly 1"); } Optional<ResolvedFunction> optionallyResolvedFunction = Optional.ofNullable(resolvedFunctions.get(0)); if (optionallyResolvedFunction.isPresent()) { ResolvedFunction resolvedFunction = optionallyResolvedFunction.get(); } Optional<DataMapper<T>> dataMapper = optionallyResolvedFunction .map(ResolvedFunction::getFunctionObject) .map(DataMapperFunctionMapper::map); return dataMapper; } /** * This method modifies the usual logic of finding data mapping functions. This is to allow only libraries which * can parse the spec to have a chance to map the function objects internally. * * @param spec A specifier that describes the type and or parameterization of a new data mapper instance. * @param <T> result type of a data mapper * @return an optional data mapper instance */ @Override public <T> Optional<DataMapper<T>> getDataMapper(String spec) { if (!canParseSpec(spec)) { throw new RuntimeException("No libraries could parse: " + spec); } synchronized (this) { if (threadSafeCache.containsKey(spec)) { DataMapper<T> dataMapper = (DataMapper<T>) threadSafeCache.get(spec); if (dataMapper != null) { return Optional.ofNullable(dataMapper); } // else known to be not marked as threadsafe } else { // dont' know if it is threadsafe or not, so must compute in critical section Optional<ResolvedFunction> optionallyResolvedFunction = resolveFunction(spec); if (optionallyResolvedFunction.isPresent()) { ResolvedFunction resolvedFunction = optionallyResolvedFunction.get(); DataMapper<T> mapper = DataMapperFunctionMapper.map(resolvedFunction.getFunctionObject()); if (resolvedFunction.isThreadSafe()) { logger.debug("Function " + spec + " is marked as thread safe. Caching and sharing."); threadSafeCache.put(spec,mapper); } else { logger.debug("Function " + spec + " is not thread safe."); threadSafeCache.put(spec,null); } } else { return Optional.empty(); } Optional<DataMapper<Object>> newlyResolved = getDataMapperUnsynced(spec); } } return getDataMapperUnsynced(spec); // List<ResolvedFunction> resolvedFunctions = resolveFunctions(spec); // // if (resolvedFunctions.size()==0) { // throw new RuntimeException("Unable to resolve a mapping function for " + spec); // } // // if (resolvedFunctions.size()>1) { // logger.warn("Found " + resolvedFunctions.size() + " resolved functions for '" + spec +"'. This library " + // "expects there to be exactly 1"); // } // // Optional<ResolvedFunction> optionallyResolvedFunction = Optional.ofNullable(resolvedFunctions.get(0)); // if (optionallyResolvedFunction.isPresent()) { // ResolvedFunction resolvedFunction = optionallyResolvedFunction.get(); // } // Optional<DataMapper<T>> dataMapper = optionallyResolvedFunction // .map(ResolvedFunction::getFunctionObject) // .map(DataMapperFunctionMapper::map); // // return dataMapper; // } /** * If any composed libraries can parse the spec, we just return that one. * @param spec a data mapping function specifier * @return true, if this spec is at least parsable by this library */ @Override public boolean canParseSpec(String spec) { return libraries.stream().map(gl -> gl.canParseSpec(spec)).anyMatch(l-> l); } /** * This method modifies the usual logic of finding data mappers. This is to allow only libraries which * can parse the spec to have a chance to map the function objects internally. * @param spec A specifier that describes the type and or parameterization of a new data mapper. * @return a list of resolved functions */ @Override public List<ResolvedFunction> resolveFunctions(String spec) { List<ResolvedFunction> resolvedFunctions = new ArrayList<>(); int parsingLibs=0; for (DataMapperLibrary library : libraries) { if (library.canParseSpec(spec)) { parsingLibs++; List<ResolvedFunction> resolvedFunctions1 = library.resolveFunctions(spec); resolvedFunctions.addAll(resolvedFunctions1); // Optional<ResolvedFunction> resolvedFunction = library.resolveFunction(spec); // if (resolvedFunction.isPresent()) { // resolvedFunctions.add(resolvedFunction.get()); // } } } if (parsingLibs==0) { throw new RuntimeException("No library could parse: " + spec); } return resolvedFunctions; } public Optional<ResolvedFunction> resolveFunction(String spec) { List<ResolvedFunction> resolvedFunctionList = resolveFunctions(spec); if (resolvedFunctionList.size() == 0) { logger.warn("Unable to find data mapper for spec '" + spec + "' in any libimpl, searched in " + toString()); return Optional.empty(); } if (resolvedFunctionList.size() > 1) { String resolvedNames = resolvedFunctionList.stream() .map(r -> r.getClass().getCanonicalName()) .collect(Collectors.joining()); logger.warn("Found more than one matching data mapper for spec '" + spec + "' : " + resolvedNames); } return Optional.of(resolvedFunctionList.get(0)); } @Override public List<String> getDataMapperNames() { List<String> genNames = new ArrayList<>(); for (DataMapperLibrary library : libraries) { List<String> libGenNames = library.getDataMapperNames().stream() .map(genName -> library.getLibraryName() + "::" + genName) .collect(Collectors.toList()); genNames.addAll(libGenNames); } genNames.sort(Comparator.naturalOrder()); return genNames; } public String toString() { return AllDataMapperLibraries.class.getSimpleName() + ":" + libraries.stream().map(DataMapperLibrary::getLibraryName).collect(Collectors.joining(",", "[", "]")); } }
removing dead code
metagen-api/src/main/java/io/virtdata/core/AllDataMapperLibraries.java
removing dead code
<ide><path>etagen-api/src/main/java/io/virtdata/core/AllDataMapperLibraries.java <ide> parsingLibs++; <ide> List<ResolvedFunction> resolvedFunctions1 = library.resolveFunctions(spec); <ide> resolvedFunctions.addAll(resolvedFunctions1); <del>// Optional<ResolvedFunction> resolvedFunction = library.resolveFunction(spec); <del>// if (resolvedFunction.isPresent()) { <del>// resolvedFunctions.add(resolvedFunction.get()); <del>// } <ide> } <ide> } <ide> if (parsingLibs==0) {
Java
mit
e3dfc191192cc6cdfcc1739ce473fb792b2086f4
0
glezo1/commonlibs
package com.glezo.mac; public class Mac { private String iso_mac; //01:23:45:67:89:AB [capital] //----------------------------------------------------------------------------------------- public Mac(String mac) throws UnparseableMacException { if(mac.length()==17) { this.iso_mac=""; boolean separator_matches= mac.charAt(2)==mac.charAt(5) && mac.charAt(2)==mac.charAt(8) && mac.charAt(2)==mac.charAt(11) && mac.charAt(2)==mac.charAt(14); if(!separator_matches) { throw new UnparseableMacException("Unparseable mac: "+mac+". Separators don't unify"); } else { for(int i=0;i<mac.length();i++) { boolean is_separator=(i==2 || i==5 ||i==8 ||i==11 ||i==14); if(!is_separator) { char c_upper=mac.toUpperCase().charAt(i); if(Character.digit(c_upper,16)==-1) { throw new UnparseableMacException("Unparseable mac: "+mac+". Wrong character at position "+i); } this.iso_mac+=c_upper; } else { this.iso_mac+=":"; } } } } else if(mac.length()==12) { this.iso_mac=""; for(int i=0;i<mac.length();i++) { if(i==2 || i==4 ||i==6 ||i==8 ||i==10) { this.iso_mac+=":"; } char c_upper=mac.toUpperCase().charAt(i); if(Character.digit(c_upper,16)==-1) { throw new UnparseableMacException("Unparseable mac: "+mac+". Wrong character at position "+i); } this.iso_mac+=c_upper; } } else { throw new UnparseableMacException("Unparseable mac: "+mac+". Length must be 12 or 17"); } } //----------------------------------------------------------------------------------------- public String get_mac(String separator,boolean uppercase) { String sep=""; sep+=separator; String result=this.iso_mac; if(uppercase) {result=result.toUpperCase();} else {result=result.toLowerCase();} result=result.replaceAll(":",sep); return result; } //----------------------------------------------------------------------------------------- public String get_mac_oui(String separator,boolean uppercase) { String sep=""; sep+=separator; String result=this.iso_mac.substring(0,8); if(uppercase) {result=result.toUpperCase();} else {result=result.toLowerCase();} result=result.replaceAll(":",sep); return result; } //----------------------------------------------------------------------------------------- public String toString() { return this.iso_mac; } //----------------------------------------------------------------------------------------- public static boolean is_valid_mac(String m) { try { new Mac(m); return true; } catch(UnparseableMacException e) { return false; } } //----------------------------------------------------------------------------------------- public boolean equals(Object o) { if(o==null) { return false; } else { if(this.getClass()!=o.getClass()) { return false; } Mac oo=(Mac)o; return oo.iso_mac.equals(oo.iso_mac); } } //----------------------------------------------------------------------------------------- }
com/glezo/mac/Mac.java
package com.glezo.mac; public class Mac { private String iso_mac; //01:23:45:67:89:AB [capital] //----------------------------------------------------------------------------------------- public Mac(String mac) throws UnparseableMacException { if(mac.length()==17) { this.iso_mac=""; boolean separator_matches= mac.charAt(2)==mac.charAt(5) && mac.charAt(2)==mac.charAt(8) && mac.charAt(2)==mac.charAt(11) && mac.charAt(2)==mac.charAt(14); if(!separator_matches) { throw new UnparseableMacException("Unparseable mac: "+mac+". Separators don't unify"); } else { for(int i=0;i<mac.length();i++) { boolean is_separator=(i==2 || i==5 ||i==8 ||i==11 ||i==14); if(!is_separator) { char c_upper=mac.toUpperCase().charAt(i); if(Character.digit(c_upper,16)==-1) { throw new UnparseableMacException("Unparseable mac: "+mac+". Wrong character at position "+i); } this.iso_mac+=c_upper; } else { this.iso_mac+=":"; } } } } else if(mac.length()==12) { this.iso_mac=""; for(int i=0;i<mac.length();i++) { if(i==2 || i==4 ||i==6 ||i==8 ||i==10) { this.iso_mac+=":"; } char c_upper=mac.toUpperCase().charAt(i); if(Character.digit(c_upper,16)==-1) { throw new UnparseableMacException("Unparseable mac: "+mac+". Wrong character at position "+i); } this.iso_mac+=c_upper; } } else { throw new UnparseableMacException("Unparseable mac: "+mac+". Length must be 12 or 17"); } } //----------------------------------------------------------------------------------------- public String get_mac(String separator,boolean uppercase) { String sep=""; sep+=separator; String result=this.iso_mac; if(uppercase) {result=result.toUpperCase();} else {result=result.toLowerCase();} result=result.replaceAll(":",sep); return result; } //----------------------------------------------------------------------------------------- public String get_mac_oui(String separator,boolean uppercase) { String sep=""; sep+=separator; String result=this.iso_mac.substring(0,8); if(uppercase) {result=result.toUpperCase();} else {result=result.toLowerCase();} result=result.replaceAll(":",sep); return result; } //----------------------------------------------------------------------------------------- public String toString() { return this.iso_mac; } //----------------------------------------------------------------------------------------- public static boolean is_valid_mac(String m) { try { new Mac(m); return true; } catch(UnparseableMacException e) { return false; } } //----------------------------------------------------------------------------------------- public boolean equals(Object o) { if(o==null) { return false; } else { if(this.getClass()!=o.getClass()) { return false; } Mac oo=(Mac)o; return oo.iso_mac.equals(o.iso_mac); } } //----------------------------------------------------------------------------------------- }
Update Mac.java
com/glezo/mac/Mac.java
Update Mac.java
<ide><path>om/glezo/mac/Mac.java <ide> return false; <ide> } <ide> Mac oo=(Mac)o; <del> return oo.iso_mac.equals(o.iso_mac); <add> return oo.iso_mac.equals(oo.iso_mac); <ide> } <ide> } <ide> //-----------------------------------------------------------------------------------------
JavaScript
isc
6a6085dc0ac1fc9bff752fe5a88930f0d8ca2a5d
0
evanx/redexutil,ionteamza/redexutil,ionteamza/redexutil,evanx/redexutil
// Copyright (c) 2015, Evan Summers (twitter.com/evanxsummers) // ISC license, see http://github.com/evanx/redexutil/LICENSE import assert from 'assert'; import bunyan from 'bunyan'; import lodash from 'lodash'; const defaultLevel = 'info'; const levels = ['debug', 'info', 'warn', 'error']; const extraLevels = ['state', 'digest', 'child']; const allLevels = levels.concat(extraLevels); const digestLimit = 100; const state = { limit: 10, stats: {}, logging: { error: [], warn: [], info: [], debug: [], digest: [] } }; function getStats(name) { let stats = state.stats[name]; if (!stats) { stats = { counts: {}, averages: {}, peaks: {}, }; state.stats[name] = stats; } return stats; } function increment(logger, name, prop) { let stats = getStats(name); let count = stats.counts[prop] || 0; count += 1; stats.counts[prop] = count; return count; } function peak(logger, name, prop, value) { let stats = getStats(name); let count = stats.counts[prop] || 0; count += 1; stats.counts[prop] = count; // let average = stats.averages[prop]; if (!average) { average = value; } else if (count > 1) { average = ((count - 1) * average + value)/count; } stats.averages[prop] = average; // let peak = stats.peaks[prop]; if (!stats.peaks.hasOwnProperty(prop)) { stats.peaks[prop] = value; return value; } if (value > peak) { stats.peaks[prop] = value; peak = value; logger.info('peak', prop, value, count, average); } return peak; } function basename(file) { var matcher = file.match(/([^\/]+)\.[a-z]+$/); if (matcher) { return matcher[1]; } else { return file; } } module.exports = { pub() { return Object.assign({}, state.stats, state.logging); }, counters() { // TODO deprecate return {}; }, create(name, level) { name = basename(name); level = level || global.loggerLevel || process.env.loggerLevel || defaultLevel; if (lodash.includes(levels, level)) { let logger = bunyan.createLogger({name, level}); return decorate(logger, name, level); } else { assert(lodash.includes(extraLevels, level), 'level: ' + level); return decorate(null, name, level); } } }; function logging(logger, name, loggerLevel, context, level, args, count) { increment(logger, name, level); args = [].slice.call(args); // convert arguments to array if (!lodash.isEmpty(context)) { if (lodash.isArray(context)) { args = context.concat(args); } } if (!state.logging.hasOwnProperty(level)) { args.splice(0, 0, 'Invalid level: ' + level); level = 'warn'; } if (logger) { if (level === 'digest') { if (count < digestLimit/10 && count % digestLimit === 0) { logger.info('digest', count, ...args); } } else if (lodash.includes(levels, level)) { if (levels.indexOf(level) >= levels.indexOf(loggerLevel)) { logger[level].call(logger, ...args); let error = findArgsError(args); if (error) { if (error.code && error.code === 'ETIMEOUT') { } else { logger[level].call(logger, error); } } } } else { } } let date = new Date().toISOString().substring(0, 16); let message = [date, name, ...args]; state.logging[level].splice(0, 0, message); if (state.logging[level].length > state.limit) { // trim state.logging[level].length = state.limit; } } function findArgsError(args) { if (lodash.isError(args[0])) { return args[0]; } else if (lodash.isError(args[1])) { return args[1]; } else if (lodash.isError(args[args.length - 1])) { return args[args.length - 1]; } } function decorate(logger, name, level) { let count = 0; let context = []; const those = { get name() { return name; }, verbose() { }, vdebug() { }, debug() { if (level === 'debug') { logging(logger, name, level, context, 'debug', arguments); } }, info() { if (level !== 'warn') { logging(logger, name, level, context, 'info', arguments); } }, dev() { logging(logger, name, level, context, 'warn', arguments); }, wverbose() { logging(logger, name, level, context, 'warn', arguments); }, wdebug() { logging(logger, name, level, context, 'warn', arguments); }, winfo() { logging(logger, name, level, context, 'warn', arguments); }, dverbose() { logging(logger, name, level, context, 'warn', arguments); }, ddebug() { logging(logger, name, level, context, 'warn', arguments); }, dinfo() { logging(logger, name, level, context, 'warn', arguments); }, tdebug() { if (process.env.envType === 'test') { logging(logger, name, level, context, 'info', arguments); } }, warn() { logging(logger, name, level, context, 'warn', arguments); }, error() { logging(logger, name, level, context, 'error', arguments); }, state() { logging(logger, name, level, context, 'state', arguments); }, digest() { if (level === 'debug') { count += 1; logging(logger, name, level, context, 'digest', arguments, count); } }, context() { context = [].slice.call(arguments); }, child() { let childName = [name].concat([].slice.call(arguments)).join('.'); return Loggers.create(childName, level); }, increment(prop) { return increment(logger, name, prop); }, peak(prop, value) { return peak(logger, name, prop, value); }, timer(prop, time) { return peak(logger, name, prop, new Date().getTime() - time); } }; return those; }
Loggers.js
// Copyright (c) 2015, Evan Summers (twitter.com/evanxsummers) // ISC license, see http://github.com/evanx/redexutil/LICENSE import assert from 'assert'; import bunyan from 'bunyan'; import lodash from 'lodash'; const defaultLevel = 'info'; const levels = ['debug', 'info', 'warn', 'error']; const extraLevels = ['state', 'digest', 'child']; const allLevels = levels.concat(extraLevels); const digestLimit = 100; const state = { limit: 10, stats: {}, logging: { error: [], warn: [], info: [], debug: [], digest: [] } }; function getStats(name) { let stats = state.stats[name]; if (!stats) { stats = { counts: {}, averages: {}, peaks: {}, }; state.stats[name] = stats; } return stats; } function increment(logger, name, prop) { let stats = getStats(name); let count = stats.counts[prop] || 0; count += 1; stats.counts[prop] = count; return count; } function peak(logger, name, prop, value) { let stats = getStats(name); let count = stats.counts[prop] || 0; count += 1; stats.counts[prop] = count; // let average = stats.averages[prop]; if (!average) { average = value; } else if (count > 1) { average = ((count - 1) * average + value)/count; } stats.averages[prop] = average; // let peak = stats.peaks[prop]; if (!stats.peaks.hasOwnProperty(prop)) { stats.peaks[prop] = value; return value; } if (value > peak) { stats.peaks[prop] = value; peak = value; logger.info(name, 'peak', prop, value, count, average); } return peak; } function basename(file) { var matcher = file.match(/([^\/]+)\.[a-z]+$/); if (matcher) { return matcher[1]; } else { return file; } } module.exports = { pub() { return Object.assign({}, state.stats, state.logging); }, counters() { // TODO deprecate return {}; }, create(name, level) { name = basename(name); level = level || global.loggerLevel || process.env.loggerLevel || defaultLevel; if (lodash.includes(levels, level)) { let logger = bunyan.createLogger({name, level}); return decorate(logger, name, level); } else { assert(lodash.includes(extraLevels, level), 'level: ' + level); return decorate(null, name, level); } } }; function logging(logger, name, loggerLevel, context, level, args, count) { increment(logger, name, level); args = [].slice.call(args); // convert arguments to array if (!lodash.isEmpty(context)) { if (lodash.isArray(context)) { args = context.concat(args); } } if (!state.logging.hasOwnProperty(level)) { args.splice(0, 0, 'Invalid level: ' + level); level = 'warn'; } if (logger) { if (level === 'digest') { if (count < digestLimit/10 && count % digestLimit === 0) { logger.info('digest', count, ...args); } } else if (lodash.includes(levels, level)) { if (levels.indexOf(level) >= levels.indexOf(loggerLevel)) { logger[level].call(logger, ...args); let error = findArgsError(args); if (error) { if (error.code && error.code === 'ETIMEOUT') { } else { logger[level].call(logger, error); } } } } else { } } let date = new Date().toISOString().substring(0, 16); let message = [date, name, ...args]; state.logging[level].splice(0, 0, message); if (state.logging[level].length > state.limit) { // trim state.logging[level].length = state.limit; } } function findArgsError(args) { if (lodash.isError(args[0])) { return args[0]; } else if (lodash.isError(args[1])) { return args[1]; } else if (lodash.isError(args[args.length - 1])) { return args[args.length - 1]; } } function decorate(logger, name, level) { let count = 0; let context = []; const those = { get name() { return name; }, verbose() { }, vdebug() { }, debug() { if (level === 'debug') { logging(logger, name, level, context, 'debug', arguments); } }, info() { if (level !== 'warn') { logging(logger, name, level, context, 'info', arguments); } }, dev() { logging(logger, name, level, context, 'warn', arguments); }, wverbose() { logging(logger, name, level, context, 'warn', arguments); }, wdebug() { logging(logger, name, level, context, 'warn', arguments); }, winfo() { logging(logger, name, level, context, 'warn', arguments); }, dverbose() { logging(logger, name, level, context, 'warn', arguments); }, ddebug() { logging(logger, name, level, context, 'warn', arguments); }, dinfo() { logging(logger, name, level, context, 'warn', arguments); }, tdebug() { if (process.env.envType === 'test') { logging(logger, name, level, context, 'info', arguments); } }, warn() { logging(logger, name, level, context, 'warn', arguments); }, error() { logging(logger, name, level, context, 'error', arguments); }, state() { logging(logger, name, level, context, 'state', arguments); }, digest() { if (level === 'debug') { count += 1; logging(logger, name, level, context, 'digest', arguments, count); } }, context() { context = [].slice.call(arguments); }, child() { let childName = [name].concat([].slice.call(arguments)).join('.'); return Loggers.create(childName, level); }, increment(prop) { return increment(logger, name, prop); }, peak(prop, value) { return peak(logger, name, prop, value); }, timer(prop, time) { return peak(logger, name, prop, new Date().getTime() - time); } }; return those; }
admin ips
Loggers.js
admin ips
<ide><path>oggers.js <ide> if (value > peak) { <ide> stats.peaks[prop] = value; <ide> peak = value; <del> logger.info(name, 'peak', prop, value, count, average); <add> logger.info('peak', prop, value, count, average); <ide> } <ide> return peak; <ide> }
Java
bsd-3-clause
44064c25de1805fa5bfd3eae2b689b1a05b2c56e
0
Nutomic/controldlna,carhero/controldlna
/* Copyright (c) 2013, Felix Ableitner All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the <organization> nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.github.nutomic.controldlna.mediarouter; import android.app.Notification; import android.app.PendingIntent; import android.app.Service; import android.content.Context; import android.content.Intent; import android.graphics.Bitmap; import android.net.Uri; import android.os.Bundle; import android.os.Handler; import android.os.IBinder; import android.preference.PreferenceManager; import android.support.v4.app.NotificationCompat; import android.support.v7.media.MediaControlIntent; import android.support.v7.media.MediaItemStatus; import android.support.v7.media.MediaRouteSelector; import android.support.v7.media.MediaRouter; import android.support.v7.media.MediaRouter.ControlRequestCallback; import android.support.v7.media.MediaRouter.RouteInfo; import android.telephony.PhoneStateListener; import android.telephony.TelephonyManager; import android.util.Log; import com.github.nutomic.controldlna.R; import com.github.nutomic.controldlna.gui.MainActivity; import com.github.nutomic.controldlna.gui.PreferencesActivity; import com.github.nutomic.controldlna.gui.RouteFragment; import com.github.nutomic.controldlna.utility.LoadImageTask; import org.teleal.cling.support.contentdirectory.DIDLParser; import org.teleal.cling.support.model.DIDLContent; import org.teleal.cling.support.model.DIDLObject; import org.teleal.cling.support.model.item.Item; import org.teleal.cling.support.model.item.MusicTrack; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.List; import java.util.Random; /** * Background service that handles media playback to a single UPNP media renderer. * * @author Felix Ableitner * */ public class MediaRouterPlayService extends Service { private static final String TAG = "PlayService"; private static final int NOTIFICATION_ID = 1; private final MediaRouterPlayServiceBinder mBinder = new MediaRouterPlayServiceBinder(this); private MediaRouter mMediaRouter; /** * Media items that should be played. */ private List<Item> mPlaylist = new ArrayList<Item>(); /** * The track that is currently being played. */ private int mCurrentTrack = -1; private boolean mShuffle = false; private boolean mRepeat = false; private String mItemId; private String mSessionId; private WeakReference<RouteFragment> mRouterFragment = new WeakReference<RouteFragment>(null); private boolean mPollingStatus = false; private boolean mBound; /** * Route that is currently being played to. May be invalid. */ private RouteInfo mCurrentRoute; /* * Stops foreground mode and notification if the current route * has been removed. If the service is not bound, stops it. */ private MediaRouter.Callback mRouteRemovedCallback = new MediaRouter.Callback() { @Override public void onRouteRemoved(MediaRouter router, RouteInfo route) { if (route.equals(mCurrentRoute)) { stopForeground(true); } if (!mBound && !mPollingStatus) { stopSelf(); } } @Override public void onRouteAdded(MediaRouter router, RouteInfo route) { if (route.getId().equals(mCurrentRoute.getId())) { selectRoute(route); if (mCurrentTrack >= 0 && mCurrentTrack < mPlaylist.size()) { new CreateNotificationTask().execute(mPlaylist.get(mCurrentTrack) .getFirstPropertyValue(DIDLObject.Property.UPNP.ALBUM_ART_URI.class)); } } } }; /** * Creates a notification after the icon bitmap is loaded. */ private class CreateNotificationTask extends LoadImageTask { @Override protected void onPostExecute(Bitmap result) { String title = ""; String artist = ""; if (mCurrentTrack < mPlaylist.size()) { title = mPlaylist.get(mCurrentTrack).getTitle(); if (mPlaylist.get(mCurrentTrack) instanceof MusicTrack) { MusicTrack track = (MusicTrack) mPlaylist.get(mCurrentTrack); if (track.getArtists().length > 0) { artist = track.getArtists()[0].getName(); } } } Intent intent = new Intent(MediaRouterPlayService.this, MainActivity.class); intent.setAction("showRouteFragment"); Notification notification = new NotificationCompat.Builder(MediaRouterPlayService.this) .setContentIntent(PendingIntent.getActivity(MediaRouterPlayService.this, 0, intent, 0)) .setContentTitle(title) .setContentText(artist) .setLargeIcon(result) .setSmallIcon(R.drawable.ic_launcher) .build(); notification.flags |= Notification.FLAG_ONGOING_EVENT; startForeground(NOTIFICATION_ID, notification); } } /** * Listens for incoming phone calls and pauses playback then. */ private class PhoneCallListener extends PhoneStateListener { private boolean mPausedForCall = false; @Override public void onCallStateChanged(int state, String incomingNumber) { if (!PreferenceManager.getDefaultSharedPreferences(MediaRouterPlayService.this) .getBoolean(PreferencesActivity.KEY_INCOMING_PHONE_CALL_PAUSE, true)) { return; } if (TelephonyManager.CALL_STATE_RINGING == state || TelephonyManager.CALL_STATE_OFFHOOK == state) { // phone ringing or call active pause(); mPausedForCall = true; } if (mPausedForCall && TelephonyManager.CALL_STATE_IDLE == state) { // run when class initial and phone call ended resume(); mPausedForCall = false; } } } @Override public void onCreate() { super.onCreate(); mMediaRouter = MediaRouter.getInstance(this); pollStatus(); PhoneCallListener phoneListener = new PhoneCallListener(); TelephonyManager telephonyManager = (TelephonyManager) this.getSystemService(Context.TELEPHONY_SERVICE); telephonyManager.listen(phoneListener, PhoneStateListener.LISTEN_CALL_STATE); } @Override public IBinder onBind(Intent intent) { mBound = true; return mBinder; } /** * Stops service after a delay if no media is playing (delay in case the * fragment is recreated for screen rotation). */ @Override public boolean onUnbind(Intent intent) { if (!mPollingStatus) { stopSelf(); } mBound = false; return super.onUnbind(intent); } public void setRouterFragment(RouteFragment rf) { mRouterFragment = new WeakReference<RouteFragment>(rf); } public void selectRoute(RouteInfo route) { mMediaRouter.removeCallback(mRouteRemovedCallback); mMediaRouter.selectRoute(route); MediaRouteSelector selector = new MediaRouteSelector.Builder() .addControlCategory(MediaControlIntent.CATEGORY_REMOTE_PLAYBACK) .build(); mMediaRouter.addCallback(selector, mRouteRemovedCallback, 0); mCurrentRoute = route; } public void sendControlRequest(Intent intent) { mMediaRouter.getSelectedRoute().sendControlRequest(intent, null); } /** * Sets current track in renderer to specified item in playlist, then * starts playback. */ public void play(int trackNumber) { if (trackNumber < 0 || trackNumber >= mPlaylist.size()) return; mCurrentTrack = trackNumber; Item track = mPlaylist.get(trackNumber); DIDLParser parser = new DIDLParser(); DIDLContent didl = new DIDLContent(); didl.addItem(track); String metadata = ""; try { metadata = parser.generate(didl, true); } catch (Exception e) { Log.w(TAG, "Metadata generation failed", e); } Intent intent = new Intent(MediaControlIntent.ACTION_PLAY); intent.addCategory(MediaControlIntent.CATEGORY_REMOTE_PLAYBACK); intent.setData(Uri.parse(track.getFirstResource().getValue())); intent.putExtra(MediaControlIntent.EXTRA_ITEM_METADATA, metadata); mMediaRouter.getSelectedRoute().sendControlRequest(intent, new ControlRequestCallback() { @Override public void onResult(Bundle data) { mSessionId = data.getString(MediaControlIntent.EXTRA_SESSION_ID); mItemId = data.getString(MediaControlIntent.EXTRA_ITEM_ID); mPollingStatus = true; new CreateNotificationTask().execute(mPlaylist.get(mCurrentTrack) .getFirstPropertyValue(DIDLObject.Property.UPNP.ALBUM_ART_URI.class)); if (mRouterFragment.get() != null) { mRouterFragment.get().scrollToCurrent(); } } }); } /** * Sends 'pause' signal to current renderer. */ public void pause() { if (mPlaylist.isEmpty()) return; Intent intent = new Intent(MediaControlIntent.ACTION_PAUSE); intent.addCategory(MediaControlIntent.CATEGORY_REMOTE_PLAYBACK); intent.putExtra(MediaControlIntent.EXTRA_SESSION_ID, mSessionId); mMediaRouter.getSelectedRoute().sendControlRequest(intent, null); mPollingStatus = false; stopForeground(true); } /** * Sends 'resume' signal to current renderer. */ public void resume() { if (mPlaylist.isEmpty()) return; Intent intent = new Intent(MediaControlIntent.ACTION_RESUME); intent.addCategory(MediaControlIntent.CATEGORY_REMOTE_PLAYBACK); intent.putExtra(MediaControlIntent.EXTRA_SESSION_ID, mSessionId); mMediaRouter.getSelectedRoute().sendControlRequest(intent, null); mPollingStatus = true; new CreateNotificationTask().execute(mPlaylist.get(mCurrentTrack) .getFirstPropertyValue(DIDLObject.Property.UPNP.ALBUM_ART_URI.class)); } /** * Sends 'stop' signal to current renderer. */ public void stop() { if (mPlaylist.isEmpty()) return; Intent intent = new Intent(MediaControlIntent.ACTION_STOP); intent.addCategory(MediaControlIntent.CATEGORY_REMOTE_PLAYBACK); intent.putExtra(MediaControlIntent.EXTRA_SESSION_ID, mSessionId); mMediaRouter.getSelectedRoute().sendControlRequest(intent, null); mPollingStatus = false; stopForeground(true); } public void seek(int seconds) { if (mPlaylist.isEmpty()) return; Intent intent = new Intent(MediaControlIntent.ACTION_SEEK); intent.addCategory(MediaControlIntent.CATEGORY_REMOTE_PLAYBACK); intent.putExtra(MediaControlIntent.EXTRA_SESSION_ID, mSessionId); intent.putExtra(MediaControlIntent.EXTRA_ITEM_ID, mItemId); intent.putExtra(MediaControlIntent.EXTRA_ITEM_CONTENT_POSITION, (long) seconds * 1000); mMediaRouter.getSelectedRoute().sendControlRequest(intent, null); } /** * Sets a new playlist and starts playing. * * @param playlist The media files in the playlist. */ public void setPlaylist(List<Item> playlist) { mPlaylist = playlist; } /** * Plays the track after current in the playlist. * * @return True if another item is played, false if the end * of the playlist is reached. */ public boolean playNext() { if (mCurrentTrack == -1) return false; if (mShuffle) { // Play random item. play(new Random().nextInt(mPlaylist.size())); return true; } else if (mCurrentTrack + 1 < mPlaylist.size()) { // Playlist not over, play next item. play(mCurrentTrack + 1); return true; } else if (mRepeat) { // Playlist over, repeat it. play(0); return true; } else { // Playlist over, stop playback. stop(); if (!mBound) { stopSelf(); } mPollingStatus = false; return false; } } /** * Plays the track before current in the playlist. */ public void playPrevious() { if (mCurrentTrack == -1) return; if (mShuffle) { // Play random item. play(new Random().nextInt(mPlaylist.size())); } else { play(mCurrentTrack - 1); } } /** * Returns index of the track that is currently played (zero-based). * @return */ public int getCurrentTrack() { return mCurrentTrack; } /** * Requests playback information every second, as long as RendererFragment * is attached or media is playing. */ private void pollStatus() { if (mPollingStatus && mSessionId != null && mItemId != null) { Intent i = new Intent(); i.setAction(MediaControlIntent.ACTION_GET_STATUS); i.putExtra(MediaControlIntent.EXTRA_SESSION_ID, mSessionId); i.putExtra(MediaControlIntent.EXTRA_ITEM_ID, mItemId); mMediaRouter.getSelectedRoute().sendControlRequest(i, new ControlRequestCallback() { @Override public void onResult(Bundle data) { MediaItemStatus status = MediaItemStatus.fromBundle(data); if (status == null) return; if (mRouterFragment.get() != null) { mRouterFragment.get().receivePlaybackStatus(status); } if (status.getPlaybackState() != MediaItemStatus.PLAYBACK_STATE_PENDING && status.getPlaybackState() != MediaItemStatus.PLAYBACK_STATE_BUFFERING && status.getPlaybackState() != MediaItemStatus.PLAYBACK_STATE_PLAYING) { stopForeground(true); } if (status.getPlaybackState() == MediaItemStatus.PLAYBACK_STATE_FINISHED || status.getPlaybackState() == MediaItemStatus.PLAYBACK_STATE_CANCELED) { playNext(); } } }); } new Handler().postDelayed(new Runnable() { @Override public void run() { pollStatus(); } }, 1000); } public void increaseVolume() { mMediaRouter.getSelectedRoute().requestUpdateVolume(1); } public void decreaseVolume() { mMediaRouter.getSelectedRoute().requestUpdateVolume(-1); } public List<Item> getPlaylist() { return mPlaylist; } public void toggleShuffleEnabled() { mShuffle = !mShuffle; } public boolean getShuffleEnabled() { return mShuffle; } public void toggleRepeatEnabled() { mRepeat = !mRepeat; } public boolean getRepeatEnabled() { return mRepeat; } public RouteInfo getCurrentRoute() { return mCurrentRoute; } }
src/main/java/com/github/nutomic/controldlna/mediarouter/MediaRouterPlayService.java
/* Copyright (c) 2013, Felix Ableitner All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the <organization> nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.github.nutomic.controldlna.mediarouter; import android.app.Notification; import android.app.PendingIntent; import android.app.Service; import android.content.Context; import android.content.Intent; import android.graphics.Bitmap; import android.net.Uri; import android.os.Bundle; import android.os.Handler; import android.os.IBinder; import android.preference.PreferenceManager; import android.support.v4.app.NotificationCompat; import android.support.v7.media.MediaControlIntent; import android.support.v7.media.MediaItemStatus; import android.support.v7.media.MediaRouteSelector; import android.support.v7.media.MediaRouter; import android.support.v7.media.MediaRouter.ControlRequestCallback; import android.support.v7.media.MediaRouter.RouteInfo; import android.telephony.PhoneStateListener; import android.telephony.TelephonyManager; import android.util.Log; import com.github.nutomic.controldlna.R; import com.github.nutomic.controldlna.gui.MainActivity; import com.github.nutomic.controldlna.gui.PreferencesActivity; import com.github.nutomic.controldlna.gui.RouteFragment; import com.github.nutomic.controldlna.utility.LoadImageTask; import org.teleal.cling.support.contentdirectory.DIDLParser; import org.teleal.cling.support.model.DIDLContent; import org.teleal.cling.support.model.DIDLObject; import org.teleal.cling.support.model.item.Item; import org.teleal.cling.support.model.item.MusicTrack; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.List; import java.util.Random; /** * Background service that handles media playback to a single UPNP media renderer. * * @author Felix Ableitner * */ public class MediaRouterPlayService extends Service { private static final String TAG = "PlayService"; private static final int NOTIFICATION_ID = 1; private final MediaRouterPlayServiceBinder mBinder = new MediaRouterPlayServiceBinder(this); private MediaRouter mMediaRouter; /** * Media items that should be played. */ private List<Item> mPlaylist = new ArrayList<Item>(); /** * The track that is currently being played. */ private int mCurrentTrack = -1; private boolean mShuffle = false; private boolean mRepeat = false; private String mItemId; private String mSessionId; private WeakReference<RouteFragment> mRouterFragment = new WeakReference<RouteFragment>(null); private boolean mPollingStatus = false; private boolean mBound; /** * Route that is currently being played to. May be invalid. */ private RouteInfo mCurrentRoute; /* * Stops foreground mode and notification if the current route * has been removed. If the service is not bound, stops it. */ private MediaRouter.Callback mRouteRemovedCallback = new MediaRouter.Callback() { @Override public void onRouteRemoved(MediaRouter router, RouteInfo route) { if (route.equals(mCurrentRoute)) { stopForeground(true); } if (!mBound && !mPollingStatus) { stopSelf(); } } @Override public void onRouteAdded(MediaRouter router, RouteInfo route) { if (route.getId().equals(mCurrentRoute.getId())) { selectRoute(route); new CreateNotificationTask().execute(mPlaylist.get(mCurrentTrack) .getFirstPropertyValue(DIDLObject.Property.UPNP.ALBUM_ART_URI.class)); } } }; /** * Creates a notification after the icon bitmap is loaded. */ private class CreateNotificationTask extends LoadImageTask { @Override protected void onPostExecute(Bitmap result) { String title = ""; String artist = ""; if (mCurrentTrack < mPlaylist.size()) { title = mPlaylist.get(mCurrentTrack).getTitle(); if (mPlaylist.get(mCurrentTrack) instanceof MusicTrack) { MusicTrack track = (MusicTrack) mPlaylist.get(mCurrentTrack); if (track.getArtists().length > 0) { artist = track.getArtists()[0].getName(); } } } Intent intent = new Intent(MediaRouterPlayService.this, MainActivity.class); intent.setAction("showRouteFragment"); Notification notification = new NotificationCompat.Builder(MediaRouterPlayService.this) .setContentIntent(PendingIntent.getActivity(MediaRouterPlayService.this, 0, intent, 0)) .setContentTitle(title) .setContentText(artist) .setLargeIcon(result) .setSmallIcon(R.drawable.ic_launcher) .build(); notification.flags |= Notification.FLAG_ONGOING_EVENT; startForeground(NOTIFICATION_ID, notification); } } /** * Listens for incoming phone calls and pauses playback then. */ private class PhoneCallListener extends PhoneStateListener { private boolean mPausedForCall = false; @Override public void onCallStateChanged(int state, String incomingNumber) { if (!PreferenceManager.getDefaultSharedPreferences(MediaRouterPlayService.this) .getBoolean(PreferencesActivity.KEY_INCOMING_PHONE_CALL_PAUSE, true)) { return; } if (TelephonyManager.CALL_STATE_RINGING == state || TelephonyManager.CALL_STATE_OFFHOOK == state) { // phone ringing or call active pause(); mPausedForCall = true; } if (mPausedForCall && TelephonyManager.CALL_STATE_IDLE == state) { // run when class initial and phone call ended resume(); mPausedForCall = false; } } } @Override public void onCreate() { super.onCreate(); mMediaRouter = MediaRouter.getInstance(this); pollStatus(); PhoneCallListener phoneListener = new PhoneCallListener(); TelephonyManager telephonyManager = (TelephonyManager) this.getSystemService(Context.TELEPHONY_SERVICE); telephonyManager.listen(phoneListener, PhoneStateListener.LISTEN_CALL_STATE); } @Override public IBinder onBind(Intent intent) { mBound = true; return mBinder; } /** * Stops service after a delay if no media is playing (delay in case the * fragment is recreated for screen rotation). */ @Override public boolean onUnbind(Intent intent) { if (!mPollingStatus) { stopSelf(); } mBound = false; return super.onUnbind(intent); } public void setRouterFragment(RouteFragment rf) { mRouterFragment = new WeakReference<RouteFragment>(rf); } public void selectRoute(RouteInfo route) { mMediaRouter.removeCallback(mRouteRemovedCallback); mMediaRouter.selectRoute(route); MediaRouteSelector selector = new MediaRouteSelector.Builder() .addControlCategory(MediaControlIntent.CATEGORY_REMOTE_PLAYBACK) .build(); mMediaRouter.addCallback(selector, mRouteRemovedCallback, 0); mCurrentRoute = route; } public void sendControlRequest(Intent intent) { mMediaRouter.getSelectedRoute().sendControlRequest(intent, null); } /** * Sets current track in renderer to specified item in playlist, then * starts playback. */ public void play(int trackNumber) { if (trackNumber < 0 || trackNumber >= mPlaylist.size()) return; mCurrentTrack = trackNumber; Item track = mPlaylist.get(trackNumber); DIDLParser parser = new DIDLParser(); DIDLContent didl = new DIDLContent(); didl.addItem(track); String metadata = ""; try { metadata = parser.generate(didl, true); } catch (Exception e) { Log.w(TAG, "Metadata generation failed", e); } Intent intent = new Intent(MediaControlIntent.ACTION_PLAY); intent.addCategory(MediaControlIntent.CATEGORY_REMOTE_PLAYBACK); intent.setData(Uri.parse(track.getFirstResource().getValue())); intent.putExtra(MediaControlIntent.EXTRA_ITEM_METADATA, metadata); mMediaRouter.getSelectedRoute().sendControlRequest(intent, new ControlRequestCallback() { @Override public void onResult(Bundle data) { mSessionId = data.getString(MediaControlIntent.EXTRA_SESSION_ID); mItemId = data.getString(MediaControlIntent.EXTRA_ITEM_ID); mPollingStatus = true; new CreateNotificationTask().execute(mPlaylist.get(mCurrentTrack) .getFirstPropertyValue(DIDLObject.Property.UPNP.ALBUM_ART_URI.class)); if (mRouterFragment.get() != null) { mRouterFragment.get().scrollToCurrent(); } } }); } /** * Sends 'pause' signal to current renderer. */ public void pause() { if (mPlaylist.isEmpty()) return; Intent intent = new Intent(MediaControlIntent.ACTION_PAUSE); intent.addCategory(MediaControlIntent.CATEGORY_REMOTE_PLAYBACK); intent.putExtra(MediaControlIntent.EXTRA_SESSION_ID, mSessionId); mMediaRouter.getSelectedRoute().sendControlRequest(intent, null); mPollingStatus = false; stopForeground(true); } /** * Sends 'resume' signal to current renderer. */ public void resume() { if (mPlaylist.isEmpty()) return; Intent intent = new Intent(MediaControlIntent.ACTION_RESUME); intent.addCategory(MediaControlIntent.CATEGORY_REMOTE_PLAYBACK); intent.putExtra(MediaControlIntent.EXTRA_SESSION_ID, mSessionId); mMediaRouter.getSelectedRoute().sendControlRequest(intent, null); mPollingStatus = true; new CreateNotificationTask().execute(mPlaylist.get(mCurrentTrack) .getFirstPropertyValue(DIDLObject.Property.UPNP.ALBUM_ART_URI.class)); } /** * Sends 'stop' signal to current renderer. */ public void stop() { if (mPlaylist.isEmpty()) return; Intent intent = new Intent(MediaControlIntent.ACTION_STOP); intent.addCategory(MediaControlIntent.CATEGORY_REMOTE_PLAYBACK); intent.putExtra(MediaControlIntent.EXTRA_SESSION_ID, mSessionId); mMediaRouter.getSelectedRoute().sendControlRequest(intent, null); mPollingStatus = false; stopForeground(true); } public void seek(int seconds) { if (mPlaylist.isEmpty()) return; Intent intent = new Intent(MediaControlIntent.ACTION_SEEK); intent.addCategory(MediaControlIntent.CATEGORY_REMOTE_PLAYBACK); intent.putExtra(MediaControlIntent.EXTRA_SESSION_ID, mSessionId); intent.putExtra(MediaControlIntent.EXTRA_ITEM_ID, mItemId); intent.putExtra(MediaControlIntent.EXTRA_ITEM_CONTENT_POSITION, (long) seconds * 1000); mMediaRouter.getSelectedRoute().sendControlRequest(intent, null); } /** * Sets a new playlist and starts playing. * * @param playlist The media files in the playlist. */ public void setPlaylist(List<Item> playlist) { mPlaylist = playlist; } /** * Plays the track after current in the playlist. * * @return True if another item is played, false if the end * of the playlist is reached. */ public boolean playNext() { if (mCurrentTrack == -1) return false; if (mShuffle) { // Play random item. play(new Random().nextInt(mPlaylist.size())); return true; } else if (mCurrentTrack + 1 < mPlaylist.size()) { // Playlist not over, play next item. play(mCurrentTrack + 1); return true; } else if (mRepeat) { // Playlist over, repeat it. play(0); return true; } else { // Playlist over, stop playback. stop(); if (!mBound) { stopSelf(); } mPollingStatus = false; return false; } } /** * Plays the track before current in the playlist. */ public void playPrevious() { if (mCurrentTrack == -1) return; if (mShuffle) { // Play random item. play(new Random().nextInt(mPlaylist.size())); } else { play(mCurrentTrack - 1); } } /** * Returns index of the track that is currently played (zero-based). * @return */ public int getCurrentTrack() { return mCurrentTrack; } /** * Requests playback information every second, as long as RendererFragment * is attached or media is playing. */ private void pollStatus() { if (mPollingStatus && mSessionId != null && mItemId != null) { Intent i = new Intent(); i.setAction(MediaControlIntent.ACTION_GET_STATUS); i.putExtra(MediaControlIntent.EXTRA_SESSION_ID, mSessionId); i.putExtra(MediaControlIntent.EXTRA_ITEM_ID, mItemId); mMediaRouter.getSelectedRoute().sendControlRequest(i, new ControlRequestCallback() { @Override public void onResult(Bundle data) { MediaItemStatus status = MediaItemStatus.fromBundle(data); if (status == null) return; if (mRouterFragment.get() != null) { mRouterFragment.get().receivePlaybackStatus(status); } if (status.getPlaybackState() != MediaItemStatus.PLAYBACK_STATE_PENDING && status.getPlaybackState() != MediaItemStatus.PLAYBACK_STATE_BUFFERING && status.getPlaybackState() != MediaItemStatus.PLAYBACK_STATE_PLAYING) { stopForeground(true); } if (status.getPlaybackState() == MediaItemStatus.PLAYBACK_STATE_FINISHED || status.getPlaybackState() == MediaItemStatus.PLAYBACK_STATE_CANCELED) { playNext(); } } }); } new Handler().postDelayed(new Runnable() { @Override public void run() { pollStatus(); } }, 1000); } public void increaseVolume() { mMediaRouter.getSelectedRoute().requestUpdateVolume(1); } public void decreaseVolume() { mMediaRouter.getSelectedRoute().requestUpdateVolume(-1); } public List<Item> getPlaylist() { return mPlaylist; } public void toggleShuffleEnabled() { mShuffle = !mShuffle; } public boolean getShuffleEnabled() { return mShuffle; } public void toggleRepeatEnabled() { mRepeat = !mRepeat; } public boolean getRepeatEnabled() { return mRepeat; } public RouteInfo getCurrentRoute() { return mCurrentRoute; } }
Fixed crash when route is added.
src/main/java/com/github/nutomic/controldlna/mediarouter/MediaRouterPlayService.java
Fixed crash when route is added.
<ide><path>rc/main/java/com/github/nutomic/controldlna/mediarouter/MediaRouterPlayService.java <ide> public void onRouteAdded(MediaRouter router, RouteInfo route) { <ide> if (route.getId().equals(mCurrentRoute.getId())) { <ide> selectRoute(route); <del> new CreateNotificationTask().execute(mPlaylist.get(mCurrentTrack) <del> .getFirstPropertyValue(DIDLObject.Property.UPNP.ALBUM_ART_URI.class)); <add> if (mCurrentTrack >= 0 && mCurrentTrack < mPlaylist.size()) { <add> new CreateNotificationTask().execute(mPlaylist.get(mCurrentTrack) <add> .getFirstPropertyValue(DIDLObject.Property.UPNP.ALBUM_ART_URI.class)); <add> } <ide> } <ide> } <ide> };
Java
lgpl-2.1
5f2ff676f9d44a0e22942736717326c96e655864
0
justincc/intermine,Arabidopsis-Information-Portal/intermine,drhee/toxoMine,tomck/intermine,drhee/toxoMine,elsiklab/intermine,tomck/intermine,kimrutherford/intermine,tomck/intermine,elsiklab/intermine,joshkh/intermine,zebrafishmine/intermine,zebrafishmine/intermine,JoeCarlson/intermine,drhee/toxoMine,Arabidopsis-Information-Portal/intermine,joshkh/intermine,JoeCarlson/intermine,justincc/intermine,Arabidopsis-Information-Portal/intermine,zebrafishmine/intermine,tomck/intermine,justincc/intermine,JoeCarlson/intermine,elsiklab/intermine,justincc/intermine,Arabidopsis-Information-Portal/intermine,elsiklab/intermine,tomck/intermine,joshkh/intermine,zebrafishmine/intermine,kimrutherford/intermine,joshkh/intermine,zebrafishmine/intermine,JoeCarlson/intermine,elsiklab/intermine,JoeCarlson/intermine,zebrafishmine/intermine,justincc/intermine,joshkh/intermine,zebrafishmine/intermine,drhee/toxoMine,kimrutherford/intermine,drhee/toxoMine,elsiklab/intermine,kimrutherford/intermine,zebrafishmine/intermine,elsiklab/intermine,Arabidopsis-Information-Portal/intermine,drhee/toxoMine,justincc/intermine,joshkh/intermine,elsiklab/intermine,justincc/intermine,kimrutherford/intermine,drhee/toxoMine,kimrutherford/intermine,Arabidopsis-Information-Portal/intermine,justincc/intermine,tomck/intermine,joshkh/intermine,joshkh/intermine,JoeCarlson/intermine,Arabidopsis-Information-Portal/intermine,JoeCarlson/intermine,tomck/intermine,kimrutherford/intermine,Arabidopsis-Information-Portal/intermine,Arabidopsis-Information-Portal/intermine,drhee/toxoMine,elsiklab/intermine,zebrafishmine/intermine,justincc/intermine,drhee/toxoMine,tomck/intermine,kimrutherford/intermine,JoeCarlson/intermine,kimrutherford/intermine,JoeCarlson/intermine,tomck/intermine,joshkh/intermine
package org.intermine.dataloader; /* * Copyright (C) 2002-2005 FlyMine * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. See the LICENSE file for more * information or http://www.gnu.org/copyleft/lesser.html. * */ import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import org.intermine.metadata.AttributeDescriptor; import org.intermine.metadata.ClassDescriptor; import org.intermine.metadata.CollectionDescriptor; import org.intermine.metadata.FieldDescriptor; import org.intermine.metadata.MetaDataException; import org.intermine.metadata.Model; import org.intermine.metadata.PrimaryKey; import org.intermine.metadata.PrimaryKeyUtil; import org.intermine.metadata.ReferenceDescriptor; import org.intermine.model.InterMineObject; import org.intermine.objectstore.proxy.ProxyReference; import org.intermine.objectstore.query.ConstraintOp; import org.intermine.objectstore.query.ConstraintSet; import org.intermine.objectstore.query.ContainsConstraint; import org.intermine.objectstore.query.Query; import org.intermine.objectstore.query.QueryClass; import org.intermine.objectstore.query.QueryField; import org.intermine.objectstore.query.QueryValue; import org.intermine.objectstore.query.QueryObjectReference; import org.intermine.objectstore.query.SimpleConstraint; import org.intermine.objectstore.query.SubqueryConstraint; import org.intermine.util.DynamicUtil; import org.intermine.util.IntToIntMap; import org.intermine.util.PropertiesUtil; import org.intermine.util.TypeUtil; import org.apache.log4j.Logger; /** * Class providing utility methods to help with primary key and data source priority configuration * * @author Andrew Varley * @author Mark Woodbridge * @author Richard Smith * @author Matthew Wakeling */ public class DataLoaderHelper { private static final Logger LOG = Logger.getLogger(DataLoaderHelper.class); protected static Map sourceKeys = new HashMap(); protected static Map modelDescriptors = new HashMap(); /** * Compare the priorities of two sources over a field. * * @param fd FieldDescriptor for the field * @param src1 the first Source * @param src2 the second Source * @return a positive integer if src1 is of higher priority than src2, a negative integer if * src2 is of higher priority than src1 or zero if the sources are equal. * @throws IllegalArgumentException if the class is not in the file, or both of the sources * are not listed for that class */ public static int comparePriority(FieldDescriptor fd, Source src1, Source src2) { if (src1.equals(src2)) { return 0; } if (src1.getName().equals(src2.getName())) { if (src1.getSkeleton() && (!src2.getSkeleton())) { return -1; } else if (src2.getSkeleton() && (!src1.getSkeleton())) { return 1; } else { return 0; } } ClassDescriptor cld = fd.getClassDescriptor(); String cldName = TypeUtil.unqualifiedName(cld.getName()); Map descriptorSources = getDescriptors(cld.getModel()); List srcs = (List) descriptorSources.get(cldName + "." + fd.getName()); if (srcs == null) { srcs = (List) descriptorSources.get(cldName); } if (srcs != null && srcs.contains(src1.getName()) && srcs.contains(src2.getName())) { return srcs.indexOf(src2.getName()) - srcs.indexOf(src1.getName()); } else { throw new IllegalArgumentException("Could not determine priorities for sources " + src1.getName() + " and " + src2.getName() + " for field " + fd.getClassDescriptor().getName() + "." + fd.getName() + " - is the config file set up correctly?"); } } /** * Build a map from class and field names to a priority-ordered List of source name Strings. * * @param model the Model * @return the Map */ protected static Map getDescriptors(Model model) { Map descriptorSources = null; synchronized (modelDescriptors) { descriptorSources = (Map) modelDescriptors.get(model); if (descriptorSources == null) { descriptorSources = new HashMap(); Properties priorities = PropertiesUtil.loadProperties(model.getName() + "_priorities.properties"); for (Iterator i = priorities.entrySet().iterator(); i.hasNext();) { Map.Entry entry = (Map.Entry) i.next(); String descriptorName = (String) entry.getKey(); String sourceNames = (String) entry.getValue(); List sources = new ArrayList(); String[] tokens = sourceNames.split(","); for (int o = 0; o < tokens.length; o++) { String token = tokens[o].trim(); sources.add(token); } descriptorSources.put(descriptorName, sources); } modelDescriptors.put(model, descriptorSources); } } return descriptorSources; } /** * Return a Set of PrimaryKeys relevant to a given Source for a ClassDescriptor. The Set * contains all the primary keys that exist on a particular class that are used by the * source, without performing any recursion. The Model.getClassDescriptorsForClass() * method is recommended if you wish for all the primary keys of the class' parents * as well. * * @param cld the ClassDescriptor * @param source the Source * @return a Set of PrimaryKeys */ public static Set getPrimaryKeys(ClassDescriptor cld, Source source) { Set keySet = new HashSet(); Properties keys = getKeyProperties(source); if (keys != null) { Map map = PrimaryKeyUtil.getPrimaryKeys(cld); String cldName = TypeUtil.unqualifiedName(cld.getName()); String keyList = (String) keys.get(cldName); if (keyList != null) { String[] tokens = keyList.split(","); for (int i = 0; i < tokens.length; i++) { String token = tokens[i].trim(); keySet.add(map.get(token)); } } } else { throw new IllegalArgumentException("Unable to find keys for source: " + source.getName()); } return keySet; } /** * Return the Properties that enumerate the keys for this Source * * @param source the Source * @return the relevant Properties */ protected static Properties getKeyProperties(Source source) { Properties keys = null; synchronized (sourceKeys) { keys = (Properties) sourceKeys.get(source); if (keys == null) { keys = PropertiesUtil.loadProperties(source.getName() + "_keys.properties"); sourceKeys.put(source, keys); } } return keys; } /** * Generates a query that searches for all objects in the database equivalent to a given * example object according to the primary keys defined for the given source. * * @param model a Model * @param obj the Object to take as an example * @param source the Source database * @param idMap an IntToIntMap from source IDs to destination IDs * nulls. If false the Query will constrain only those keys that have a value in the template * obj * @return a new Query (or null if all the primary keys from obj contain a null) * @throws MetaDataException if anything goes wrong */ public static Query createPKQuery(Model model, InterMineObject obj, Source source, IntToIntMap idMap) throws MetaDataException { return createPKQuery(model, obj, source, idMap, true); } /** * Generates a query that searches for all objects in the database equivalent to a given * example object according to the primary keys defined for the given source. * * @param model a Model * @param obj the Object to take as an example * @param source the Source database * @param idMap an IntToIntMap from source IDs to destination IDs * @param queryNulls if true allow primary keys to contain null values if the template obj has * nulls. If false the Query will constrain only those keys that have a value in the template * obj * @return a new Query (or null if all the primary keys from obj contain a null) * @throws MetaDataException if anything goes wrong */ public static Query createPKQuery(Model model, InterMineObject obj, Source source, IntToIntMap idMap, boolean queryNulls) throws MetaDataException { int subCount = 0; Query q = new Query(); q.setDistinct(false); QueryClass qcIMO = new QueryClass(InterMineObject.class); q.addFrom(qcIMO); q.addToSelect(qcIMO); ConstraintSet where = new ConstraintSet(ConstraintOp.OR); Query subQ = null; Set classDescriptors = model.getClassDescriptorsForClass(obj.getClass()); Iterator cldIter = classDescriptors.iterator(); while (cldIter.hasNext()) { ClassDescriptor cld = (ClassDescriptor) cldIter.next(); Set classQueries = createPKQueriesForClass(model, obj, source, idMap, queryNulls, cld); Iterator classQueriesIter = classQueries.iterator(); while (classQueriesIter.hasNext()) { subQ = (Query) classQueriesIter.next(); where.addConstraint(new SubqueryConstraint(qcIMO, ConstraintOp.IN, subQ)); subCount++; } } q.setConstraint(where); switch (subCount) { case 0: if (queryNulls) { return q; } else { return null; } case 1: return subQ; default: return q; } } /** * Generates a query that searches for all objects in the database equivalent to a given * example object, considering only one of it's classes. * @param model a Model * @param obj the Object to take as an example * @param source the Source database * @param idMap an IntToIntMap from source IDs to destination IDs * @param queryNulls if true allow primary keys to contain null values if the template obj has * nulls. If false the Query will constrain only those keys that have a value in the template * obj * @param cld one of the classes that obj is. Only primary keys for this classes will be * considered * @return a new Query (or null if all the primary keys from obj contain a null) * @throws MetaDataException if anything goes wrong */ private static Set createPKQueriesForClass(Model model, InterMineObject obj, Source source, IntToIntMap idMap, boolean queryNulls, ClassDescriptor cld) throws MetaDataException { Set primaryKeys; if (source == null) { primaryKeys = new HashSet(PrimaryKeyUtil.getPrimaryKeys(cld).values()); } else { primaryKeys = DataLoaderHelper.getPrimaryKeys(cld, source); } LOG.debug("primary keys for class " + cld.getName() + " = " + primaryKeys); Set returnSet = new LinkedHashSet(); Iterator pkSetIter = primaryKeys.iterator(); while (pkSetIter.hasNext()) { PrimaryKey pk = (PrimaryKey) pkSetIter.next(); if (!queryNulls && !objectPrimaryKeyNotNull(model, obj, cld, pk, source)) { continue; } Query query = new Query(); query.setDistinct(false); QueryClass qc = new QueryClass(cld.getType()); query.addFrom(qc); query.addToSelect(qc); ConstraintSet cs = new ConstraintSet(ConstraintOp.AND); Iterator pkIter = pk.getFieldNames().iterator(); PK: while (pkIter.hasNext()) { String fieldName = (String) pkIter.next(); FieldDescriptor fd = cld.getFieldDescriptorByName(fieldName); if (fd instanceof AttributeDescriptor) { Object value; try { value = TypeUtil.getFieldValue(obj, fieldName); } catch (IllegalAccessException e) { throw new RuntimeException("failed to get field value for field name: " + fieldName + " in " + obj, e); } if (value == null) { cs.addConstraint(new SimpleConstraint(new QueryField(qc, fieldName), ConstraintOp.IS_NULL)); } else { cs.addConstraint(new SimpleConstraint(new QueryField(qc, fieldName), ConstraintOp.EQUALS, new QueryValue(value))); } } else if (fd instanceof CollectionDescriptor) { throw new MetaDataException("A collection cannot be part of" + " a primary key"); } else if (fd instanceof ReferenceDescriptor) { InterMineObject refObj; try { refObj = (InterMineObject) TypeUtil.getFieldProxy(obj, fieldName); } catch (IllegalAccessException e) { throw new RuntimeException("failed to get field proxy for field name: " + fieldName + " in " + obj, e); } if (refObj == null) { QueryObjectReference queryObjectReference = new QueryObjectReference(qc, fieldName); cs.addConstraint(new ContainsConstraint(queryObjectReference, ConstraintOp.IS_NULL)); continue PK; } Integer destId = null; if (refObj.getId() != null) { destId = idMap.get(refObj.getId()); } if (destId == null) { if (refObj instanceof ProxyReference) { refObj = ((ProxyReference) refObj).getObject(); } Query refSubQuery = createPKQuery(model, refObj, source, idMap, queryNulls); ClassDescriptor referencedClassDescriptor = ((ReferenceDescriptor) fd).getReferencedClassDescriptor(); QueryClass qc2 = new QueryClass(referencedClassDescriptor.getType()); query.addFrom(qc2); QueryObjectReference fieldQOF = new QueryObjectReference(qc, fieldName); cs.addConstraint(new ContainsConstraint(fieldQOF, ConstraintOp.CONTAINS, qc2)); cs.addConstraint(new SubqueryConstraint(qc2, ConstraintOp.IN, refSubQuery)); } else { InterMineObject destObj = (InterMineObject) DynamicUtil.createObject(Collections.singleton(InterMineObject.class)); destObj.setId(destId); cs.addConstraint(new ContainsConstraint(new QueryObjectReference(qc, fieldName), ConstraintOp.CONTAINS, destObj)); } } } query.setConstraint(cs); returnSet.add(query); } return returnSet; } /** * Look a the values of the given primary key in the object and return true if and only if some * part of the primary key is null. If the primary key contains a reference it is sufficient * for any of the primary keys of the referenced object to be non-null (ie * objectPrimaryKeyIsNull() returning true). * @param model the Model in which to find ClassDescriptors * @param obj the Object to check * @param cld one of the classes that obj is. Only primary keys for this classes will be * checked * @param pk the primary key to check * @param source the Source database * @return true if the the given primary key is non-null for the given object * @throws MetaDataException if anything goes wrong */ public static boolean objectPrimaryKeyNotNull(Model model, InterMineObject obj, ClassDescriptor cld, PrimaryKey pk, Source source) throws MetaDataException { Iterator pkFieldIter = pk.getFieldNames().iterator(); PK: while (pkFieldIter.hasNext()) { String fieldName = (String) pkFieldIter.next(); FieldDescriptor fd = cld.getFieldDescriptorByName(fieldName); if (fd instanceof AttributeDescriptor) { Object value; try { value = TypeUtil.getFieldValue(obj, fieldName); } catch (IllegalAccessException e) { throw new MetaDataException("Failed to get field " + fieldName + " for key " + pk + " from " + obj, e); } if (value == null) { return false; } } else if (fd instanceof CollectionDescriptor) { throw new MetaDataException("A collection cannot be part of" + " a primary key"); } else if (fd instanceof ReferenceDescriptor) { InterMineObject refObj; try { refObj = (InterMineObject) TypeUtil.getFieldProxy(obj, fieldName); } catch (IllegalAccessException e) { throw new MetaDataException("Failed to get field " + fieldName + " for key " + pk + " from " + obj, e); } if (refObj == null) { return false; } if (refObj instanceof ProxyReference) { refObj = ((ProxyReference) refObj).getObject(); } boolean foundNonNullKey = false; Set classDescriptors = model.getClassDescriptorsForClass(refObj.getClass()); Iterator cldIter = classDescriptors.iterator(); CLDS: while (cldIter.hasNext()) { ClassDescriptor refCld = (ClassDescriptor) cldIter.next(); Set primaryKeys; if (source == null) { primaryKeys = new HashSet(PrimaryKeyUtil.getPrimaryKeys(refCld).values()); } else { primaryKeys = DataLoaderHelper.getPrimaryKeys(refCld, source); } Iterator pkSetIter = primaryKeys.iterator(); while (pkSetIter.hasNext()) { PrimaryKey refPK = (PrimaryKey) pkSetIter.next(); if (objectPrimaryKeyNotNull(model, refObj, refCld, refPK, source)) { foundNonNullKey = true; break CLDS; } } } if (!foundNonNullKey) { return false; } } } return true; } /** * Returns true if the given field is a member of any primary key on the given class, for the * given source. * * @param model the Model in which to find ClassDescriptors * @param clazz the Class in which to look * @param fieldName the name of the field to check * @param source the Source that the keys belong to * @return true if the field is a primary key */ public static boolean fieldIsPrimaryKey(Model model, Class clazz, String fieldName, Source source) { Set classDescriptors = model.getClassDescriptorsForClass(clazz); Iterator cldIter = classDescriptors.iterator(); while (cldIter.hasNext()) { ClassDescriptor cld = (ClassDescriptor) cldIter.next(); Set primaryKeys = DataLoaderHelper.getPrimaryKeys(cld, source); Iterator pkIter = primaryKeys.iterator(); while (pkIter.hasNext()) { PrimaryKey pk = (PrimaryKey) pkIter.next(); if (pk.getFieldNames().contains(fieldName)) { return true; } } } return false; } }
intermine/integrate/main/src/org/intermine/dataloader/DataLoaderHelper.java
package org.intermine.dataloader; /* * Copyright (C) 2002-2005 FlyMine * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. See the LICENSE file for more * information or http://www.gnu.org/copyleft/lesser.html. * */ import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import org.intermine.metadata.AttributeDescriptor; import org.intermine.metadata.ClassDescriptor; import org.intermine.metadata.CollectionDescriptor; import org.intermine.metadata.FieldDescriptor; import org.intermine.metadata.MetaDataException; import org.intermine.metadata.Model; import org.intermine.metadata.PrimaryKey; import org.intermine.metadata.PrimaryKeyUtil; import org.intermine.metadata.ReferenceDescriptor; import org.intermine.model.InterMineObject; import org.intermine.objectstore.proxy.ProxyReference; import org.intermine.objectstore.query.ConstraintOp; import org.intermine.objectstore.query.ConstraintSet; import org.intermine.objectstore.query.ContainsConstraint; import org.intermine.objectstore.query.Query; import org.intermine.objectstore.query.QueryClass; import org.intermine.objectstore.query.QueryField; import org.intermine.objectstore.query.QueryValue; import org.intermine.objectstore.query.QueryObjectReference; import org.intermine.objectstore.query.SimpleConstraint; import org.intermine.objectstore.query.SubqueryConstraint; import org.intermine.util.DynamicUtil; import org.intermine.util.IntToIntMap; import org.intermine.util.PropertiesUtil; import org.intermine.util.TypeUtil; import org.apache.log4j.Logger; /** * Class providing utility methods to help with primary key and data source priority configuration * * @author Andrew Varley * @author Mark Woodbridge * @author Richard Smith * @author Matthew Wakeling */ public class DataLoaderHelper { private static final Logger LOG = Logger.getLogger(DataLoaderHelper.class); protected static Map sourceKeys = new HashMap(); protected static Map modelDescriptors = new HashMap(); /** * Compare the priorities of two sources over a field. * * @param fd FieldDescriptor for the field * @param src1 the first Source * @param src2 the second Source * @return a positive integer if src1 is of higher priority than src2, a negative integer if * src2 is of higher priority than src1 or zero if the sources are equal. * @throws IllegalArgumentException if the class is not in the file, or both of the sources * are not listed for that class */ public static int comparePriority(FieldDescriptor fd, Source src1, Source src2) { if (src1.equals(src2)) { return 0; } if (src1.getName().equals(src2.getName())) { if (src1.getSkeleton() && (!src2.getSkeleton())) { return -1; } else if (src2.getSkeleton() && (!src1.getSkeleton())) { return 1; } else { return 0; } } ClassDescriptor cld = fd.getClassDescriptor(); String cldName = TypeUtil.unqualifiedName(cld.getName()); Map descriptorSources = getDescriptors(cld.getModel()); List srcs = (List) descriptorSources.get(cldName + "." + fd.getName()); if (srcs == null) { srcs = (List) descriptorSources.get(cldName); } if (srcs != null && srcs.contains(src1.getName()) && srcs.contains(src2.getName())) { return srcs.indexOf(src2.getName()) - srcs.indexOf(src1.getName()); } else { throw new IllegalArgumentException("Could not determine priorities for sources " + src1.getName() + " and " + src2.getName() + " for field " + fd.getClassDescriptor().getName() + "." + fd.getName() + " - is the config file set up correctly?"); } } /** * Build a map from class and field names to a priority-ordered List of source name Strings. * * @param model the Model * @return the Map */ protected static Map getDescriptors(Model model) { Map descriptorSources = null; synchronized (modelDescriptors) { descriptorSources = (Map) modelDescriptors.get(model); if (descriptorSources == null) { descriptorSources = new HashMap(); Properties priorities = PropertiesUtil.loadProperties(model.getName() + "_priorities.properties"); for (Iterator i = priorities.entrySet().iterator(); i.hasNext();) { Map.Entry entry = (Map.Entry) i.next(); String descriptorName = (String) entry.getKey(); String sourceNames = (String) entry.getValue(); List sources = new ArrayList(); String[] tokens = sourceNames.split(","); for (int o = 0; o < tokens.length; o++) { String token = tokens[o].trim(); sources.add(token); } descriptorSources.put(descriptorName, sources); } modelDescriptors.put(model, descriptorSources); } } return descriptorSources; } /** * Return a Set of PrimaryKeys relevant to a given Source for a ClassDescriptor. The Set * contains all the primary keys that exist on a particular class that are used by the * source, without performing any recursion. The Model.getClassDescriptorsForClass() * method is recommended if you wish for all the primary keys of the class' parents * as well. * * @param cld the ClassDescriptor * @param source the Source * @return a Set of PrimaryKeys */ public static Set getPrimaryKeys(ClassDescriptor cld, Source source) { Set keySet = new HashSet(); Properties keys = getKeyProperties(source); if (keys != null) { Map map = PrimaryKeyUtil.getPrimaryKeys(cld); String cldName = TypeUtil.unqualifiedName(cld.getName()); String keyList = (String) keys.get(cldName); if (keyList != null) { String[] tokens = keyList.split(","); for (int i = 0; i < tokens.length; i++) { String token = tokens[i].trim(); keySet.add(map.get(token)); } } } else { throw new IllegalArgumentException("Unable to find keys for source: " + source.getName()); } return keySet; } /** * Return the Properties that enumerate the keys for this Source * * @param source the Source * @return the relevant Properties */ protected static Properties getKeyProperties(Source source) { Properties keys = null; synchronized (sourceKeys) { keys = (Properties) sourceKeys.get(source); if (keys == null) { keys = PropertiesUtil.loadProperties(source.getName() + "_keys.properties"); sourceKeys.put(source, keys); } } return keys; } /** * Generates a query that searches for all objects in the database equivalent to a given * example object according to the primary keys defined for the given source. * * @param model a Model * @param obj the Object to take as an example * @param source the Source database * @param idMap an IntToIntMap from source IDs to destination IDs * nulls. If false the Query will constrain only those keys that have a value in the template * obj * @return a new Query (or null if all the primary keys from obj contain a null) * @throws MetaDataException if anything goes wrong */ public static Query createPKQuery(Model model, InterMineObject obj, Source source, IntToIntMap idMap) throws MetaDataException { return createPKQuery(model, obj, source, idMap, true); } /** * Generates a query that searches for all objects in the database equivalent to a given * example object according to the primary keys defined for the given source. * * @param model a Model * @param obj the Object to take as an example * @param source the Source database * @param idMap an IntToIntMap from source IDs to destination IDs * @param queryNulls if true allow primary keys to contain null values if the template obj has * nulls. If false the Query will constrain only those keys that have a value in the template * obj * @return a new Query (or null if all the primary keys from obj contain a null) * @throws MetaDataException if anything goes wrong */ public static Query createPKQuery(Model model, InterMineObject obj, Source source, IntToIntMap idMap, boolean queryNulls) throws MetaDataException { int subCount = 0; Query q = new Query(); q.setDistinct(false); QueryClass qcIMO = new QueryClass(InterMineObject.class); q.addFrom(qcIMO); q.addToSelect(qcIMO); ConstraintSet where = new ConstraintSet(ConstraintOp.OR); Query subQ = null; Set classDescriptors = model.getClassDescriptorsForClass(obj.getClass()); Iterator cldIter = classDescriptors.iterator(); while (cldIter.hasNext()) { ClassDescriptor cld = (ClassDescriptor) cldIter.next(); Set classQueries = createPKQueriesForClass(model, obj, source, idMap, queryNulls, cld); Iterator classQueriesIter = classQueries.iterator(); while (classQueriesIter.hasNext()) { subQ = (Query) classQueriesIter.next(); where.addConstraint(new SubqueryConstraint(qcIMO, ConstraintOp.IN, subQ)); subCount++; } } q.setConstraint(where); switch (subCount) { case 0: if (queryNulls) { return q; } else { return null; } case 1: return subQ; default: return q; } } /** * Generates a query that searches for all objects in the database equivalent to a given * example object, considering only one of it's classes. * @param model a Model * @param obj the Object to take as an example * @param source the Source database * @param idMap an IntToIntMap from source IDs to destination IDs * @param queryNulls if true allow primary keys to contain null values if the template obj has * nulls. If false the Query will constrain only those keys that have a value in the template * obj * @param cld one of the classes that obj is. Only primary keys for this classes will be * considered * @return a new Query (or null if all the primary keys from obj contain a null) * @throws MetaDataException if anything goes wrong */ private static Set createPKQueriesForClass(Model model, InterMineObject obj, Source source, IntToIntMap idMap, boolean queryNulls, ClassDescriptor cld) throws MetaDataException { Set primaryKeys; if (source == null) { primaryKeys = new HashSet(PrimaryKeyUtil.getPrimaryKeys(cld).values()); } else { primaryKeys = DataLoaderHelper.getPrimaryKeys(cld, source); } LOG.info("primary keys for class " + cld.getName() + " = " + primaryKeys); Set returnSet = new LinkedHashSet(); Iterator pkSetIter = primaryKeys.iterator(); while (pkSetIter.hasNext()) { PrimaryKey pk = (PrimaryKey) pkSetIter.next(); if (!queryNulls && !objectPrimaryKeyNotNull(model, obj, cld, pk, source)) { continue; } Query query = new Query(); query.setDistinct(false); QueryClass qc = new QueryClass(cld.getType()); query.addFrom(qc); query.addToSelect(qc); ConstraintSet cs = new ConstraintSet(ConstraintOp.AND); Iterator pkIter = pk.getFieldNames().iterator(); PK: while (pkIter.hasNext()) { String fieldName = (String) pkIter.next(); FieldDescriptor fd = cld.getFieldDescriptorByName(fieldName); if (fd instanceof AttributeDescriptor) { Object value; try { value = TypeUtil.getFieldValue(obj, fieldName); } catch (IllegalAccessException e) { throw new RuntimeException("failed to get field value for field name: " + fieldName + " in " + obj, e); } if (value == null) { cs.addConstraint(new SimpleConstraint(new QueryField(qc, fieldName), ConstraintOp.IS_NULL)); } else { cs.addConstraint(new SimpleConstraint(new QueryField(qc, fieldName), ConstraintOp.EQUALS, new QueryValue(value))); } } else if (fd instanceof CollectionDescriptor) { throw new MetaDataException("A collection cannot be part of" + " a primary key"); } else if (fd instanceof ReferenceDescriptor) { InterMineObject refObj; try { refObj = (InterMineObject) TypeUtil.getFieldProxy(obj, fieldName); } catch (IllegalAccessException e) { throw new RuntimeException("failed to get field proxy for field name: " + fieldName + " in " + obj, e); } if (refObj == null) { QueryObjectReference queryObjectReference = new QueryObjectReference(qc, fieldName); cs.addConstraint(new ContainsConstraint(queryObjectReference, ConstraintOp.IS_NULL)); continue PK; } Integer destId = null; if (refObj.getId() != null) { destId = idMap.get(refObj.getId()); } if (destId == null) { if (refObj instanceof ProxyReference) { refObj = ((ProxyReference) refObj).getObject(); } Query refSubQuery = createPKQuery(model, refObj, source, idMap, queryNulls); ClassDescriptor referencedClassDescriptor = ((ReferenceDescriptor) fd).getReferencedClassDescriptor(); QueryClass qc2 = new QueryClass(referencedClassDescriptor.getType()); query.addFrom(qc2); QueryObjectReference fieldQOF = new QueryObjectReference(qc, fieldName); cs.addConstraint(new ContainsConstraint(fieldQOF, ConstraintOp.CONTAINS, qc2)); cs.addConstraint(new SubqueryConstraint(qc2, ConstraintOp.IN, refSubQuery)); } else { InterMineObject destObj = (InterMineObject) DynamicUtil.createObject(Collections.singleton(InterMineObject.class)); destObj.setId(destId); cs.addConstraint(new ContainsConstraint(new QueryObjectReference(qc, fieldName), ConstraintOp.CONTAINS, destObj)); } } } query.setConstraint(cs); returnSet.add(query); } return returnSet; } /** * Look a the values of the given primary key in the object and return true if and only if some * part of the primary key is null. If the primary key contains a reference it is sufficient * for any of the primary keys of the referenced object to be non-null (ie * objectPrimaryKeyIsNull() returning true). * @param model the Model in which to find ClassDescriptors * @param obj the Object to check * @param cld one of the classes that obj is. Only primary keys for this classes will be * checked * @param pk the primary key to check * @param source the Source database * @return true if the the given primary key is non-null for the given object * @throws MetaDataException if anything goes wrong */ public static boolean objectPrimaryKeyNotNull(Model model, InterMineObject obj, ClassDescriptor cld, PrimaryKey pk, Source source) throws MetaDataException { Iterator pkFieldIter = pk.getFieldNames().iterator(); PK: while (pkFieldIter.hasNext()) { String fieldName = (String) pkFieldIter.next(); FieldDescriptor fd = cld.getFieldDescriptorByName(fieldName); if (fd instanceof AttributeDescriptor) { Object value; try { value = TypeUtil.getFieldValue(obj, fieldName); } catch (IllegalAccessException e) { throw new MetaDataException("Failed to get field " + fieldName + " for key " + pk + " from " + obj, e); } if (value == null) { return false; } } else if (fd instanceof CollectionDescriptor) { throw new MetaDataException("A collection cannot be part of" + " a primary key"); } else if (fd instanceof ReferenceDescriptor) { InterMineObject refObj; try { refObj = (InterMineObject) TypeUtil.getFieldProxy(obj, fieldName); } catch (IllegalAccessException e) { throw new MetaDataException("Failed to get field " + fieldName + " for key " + pk + " from " + obj, e); } if (refObj == null) { return false; } if (refObj instanceof ProxyReference) { refObj = ((ProxyReference) refObj).getObject(); } boolean foundNonNullKey = false; Set classDescriptors = model.getClassDescriptorsForClass(refObj.getClass()); Iterator cldIter = classDescriptors.iterator(); CLDS: while (cldIter.hasNext()) { ClassDescriptor refCld = (ClassDescriptor) cldIter.next(); Set primaryKeys; if (source == null) { primaryKeys = new HashSet(PrimaryKeyUtil.getPrimaryKeys(refCld).values()); } else { primaryKeys = DataLoaderHelper.getPrimaryKeys(refCld, source); } Iterator pkSetIter = primaryKeys.iterator(); while (pkSetIter.hasNext()) { PrimaryKey refPK = (PrimaryKey) pkSetIter.next(); if (objectPrimaryKeyNotNull(model, refObj, refCld, refPK, source)) { foundNonNullKey = true; break CLDS; } } } if (!foundNonNullKey) { return false; } } } return true; } /** * Returns true if the given field is a member of any primary key on the given class, for the * given source. * * @param model the Model in which to find ClassDescriptors * @param clazz the Class in which to look * @param fieldName the name of the field to check * @param source the Source that the keys belong to * @return true if the field is a primary key */ public static boolean fieldIsPrimaryKey(Model model, Class clazz, String fieldName, Source source) { Set classDescriptors = model.getClassDescriptorsForClass(clazz); Iterator cldIter = classDescriptors.iterator(); while (cldIter.hasNext()) { ClassDescriptor cld = (ClassDescriptor) cldIter.next(); Set primaryKeys = DataLoaderHelper.getPrimaryKeys(cld, source); Iterator pkIter = primaryKeys.iterator(); while (pkIter.hasNext()) { PrimaryKey pk = (PrimaryKey) pkIter.next(); if (pk.getFieldNames().contains(fieldName)) { return true; } } } return false; } }
chnaged log message level down to debug
intermine/integrate/main/src/org/intermine/dataloader/DataLoaderHelper.java
chnaged log message level down to debug
<ide><path>ntermine/integrate/main/src/org/intermine/dataloader/DataLoaderHelper.java <ide> public class DataLoaderHelper <ide> { <ide> private static final Logger LOG = Logger.getLogger(DataLoaderHelper.class); <del> <add> <ide> protected static Map sourceKeys = new HashMap(); <ide> protected static Map modelDescriptors = new HashMap(); <ide> <ide> default: <ide> return q; <ide> } <del> <add> <ide> } <ide> <ide> /** <ide> primaryKeys = DataLoaderHelper.getPrimaryKeys(cld, source); <ide> } <ide> <del> LOG.info("primary keys for class " + cld.getName() + " = " + primaryKeys); <del> <add> LOG.debug("primary keys for class " + cld.getName() + " = " + primaryKeys); <add> <ide> Set returnSet = new LinkedHashSet(); <ide> <ide> Iterator pkSetIter = primaryKeys.iterator();
Java
apache-2.0
99fd4d31232ac00fd8fdc38a4b667bb35f546a22
0
FITeagle/adapters,FITeagle/adapters,FITeagle/adapters,FITeagle/adapters
package org.fiteagle.adapters.OpenBaton; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.PostConstruct; import javax.ejb.EJB; import javax.ejb.Init; import javax.ejb.PostActivate; import javax.enterprise.concurrent.ManagedThreadFactory; import javax.naming.InitialContext; import javax.naming.NamingException; import org.fiteagle.abstractAdapter.AbstractAdapter; import org.fiteagle.adapters.OpenBaton.Model.BenchmarkingTool; import org.fiteagle.adapters.OpenBaton.Model.Control; import org.fiteagle.adapters.OpenBaton.Model.DomainNameSystem; import org.fiteagle.adapters.OpenBaton.Model.ENodeB; import org.fiteagle.adapters.OpenBaton.Model.FiveGCore; import org.fiteagle.adapters.OpenBaton.Model.Gateway; import org.fiteagle.adapters.OpenBaton.Model.HomeSubscriberService; import org.fiteagle.adapters.OpenBaton.Model.MME; import org.fiteagle.adapters.OpenBaton.Model.OpenBatonGeneric; import org.fiteagle.adapters.OpenBaton.Model.OpenBatonService; import org.fiteagle.adapters.OpenBaton.Model.ServiceContainer; import org.fiteagle.adapters.OpenBaton.Model.SgwuPgwu; import org.fiteagle.adapters.OpenBaton.Model.Switch; import org.fiteagle.adapters.OpenBaton.Model.Topology; import org.fiteagle.adapters.OpenBaton.Model.UE; import org.fiteagle.adapters.OpenBaton.dm.OpenBatonAdapterMDBSender; import org.fiteagle.api.core.Config; import org.fiteagle.api.core.IMessageBus; import org.fiteagle.api.core.MessageBusOntologyModel; import org.fiteagle.api.core.MessageUtil; import org.fiteagle.api.core.OntologyModelUtil; import org.fiteagle.api.tripletStoreAccessor.TripletStoreAccessor; import org.openbaton.catalogue.mano.common.Ip; import org.openbaton.catalogue.mano.descriptor.NetworkServiceDescriptor; import org.openbaton.catalogue.mano.descriptor.VirtualLinkDescriptor; import org.openbaton.catalogue.mano.descriptor.VirtualNetworkFunctionDescriptor; import org.openbaton.catalogue.mano.record.NetworkServiceRecord; import org.openbaton.catalogue.mano.record.VirtualNetworkFunctionRecord; import org.openbaton.catalogue.security.Project; import com.hp.hpl.jena.ontology.Ontology; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.rdf.model.NodeIterator; import com.hp.hpl.jena.rdf.model.Property; import com.hp.hpl.jena.rdf.model.RDFNode; import com.hp.hpl.jena.rdf.model.ResIterator; import com.hp.hpl.jena.rdf.model.Resource; import com.hp.hpl.jena.rdf.model.Statement; import com.hp.hpl.jena.rdf.model.StmtIterator; import com.hp.hpl.jena.rdf.model.impl.StatementImpl; import com.hp.hpl.jena.vocabulary.OWL; import com.hp.hpl.jena.vocabulary.RDF; import com.hp.hpl.jena.vocabulary.RDFS; import info.openmultinet.ontology.vocabulary.Omn; import info.openmultinet.ontology.vocabulary.Omn_federation; import info.openmultinet.ontology.vocabulary.Omn_lifecycle; import info.openmultinet.ontology.vocabulary.Omn_resource; import info.openmultinet.ontology.vocabulary.Omn_service; import info.openmultinet.ontology.vocabulary.OpenBaton; import info.openmultinet.ontology.vocabulary.Osco; public final class OpenBatonAdapter extends AbstractAdapter { private static final Logger LOGGER = Logger.getLogger(OpenBatonAdapter.class.toString()); // protected OpenBatonClient openBatonClient ; protected OpenBatonClient adminClient ; private OpenBatonAdapterMDBSender listener; @EJB OpenBatonAdapterControl openBatonAdapterControler; private String username; private String password; private String nfvoIp; private String nfvoPort; private String version; private String vpnIP; private String vpnPort; private String adminProjectId; private String debugString; private VirtualNetworkFunctionDescriptor createdDebugMME; private Resource debugTopologyResource; private String debugProjectId = "d28a8a82-d503-42c5-80e5-899469e9255d"; // private String debugProjectId = null; private transient final HashMap<String, OpenBatonGeneric> instanceList = new HashMap<String, OpenBatonGeneric>(); private HashMap<String,OpenBatonClient> clientList = new HashMap<String,OpenBatonClient>(); public OpenBatonAdapter(final Model adapterModel, Resource adapterABox) { super(); this.uuid = UUID.randomUUID().toString(); this.adapterTBox = adapterModel; this.adapterABox = adapterABox; final Resource adapterType = this.getAdapterClass(); this.adapterABox.addProperty(RDF.type, adapterType); this.adapterABox.addProperty(RDFS.label, this.adapterABox.getLocalName()); this.adapterABox.addProperty(RDFS.comment, "OpenBaton Adapter"); this.adapterABox.addLiteral(MessageBusOntologyModel.maxInstances, 100); // this.adapterABox.addProperty(Omn_lifecycle.canImplement, // Omn_domain_pc.PC); /** * Looking up all Resources that belongs to the Adapter and will be * shown in SFA as Nodes. */ Model implementables = OntologyModelUtil.loadModel("ontologies/openBaton-adapter.ttl", IMessageBus.SERIALIZATION_TURTLE); final NodeIterator resourceIterator = implementables.listObjectsOfProperty(Omn_lifecycle.implements_); while (resourceIterator.hasNext()) { final Resource resource = resourceIterator.next().asResource(); this.adapterABox.addProperty(Omn_lifecycle.canImplement, resource); this.adapterABox.getModel().add(resource.getModel()); final ResIterator propIterator = this.adapterTBox.listSubjectsWithProperty(RDFS.domain, resource); while (propIterator.hasNext()) { final Property property = this.adapterTBox.getProperty(propIterator.next().getURI()); } } } // @PostConstruct public void init() { // openBatonClient.init(); // adminClient = new OpenBatonClient(this, adminProjectId); adminClient = findClient(adminProjectId); // TODO CHANGE WHEN DEBUG IS OVER OpenBatonClient initClient = adminClient; // Refresh the adapterABox Model with infos from Database Model newImplementables = TripletStoreAccessor.getResource(adapterABox.getURI()); NodeIterator iterator = newImplementables.listObjectsOfProperty(Omn_lifecycle.canImplement); //If Adapter has no "canImplement" Resources check on OpenBaton-Server while(iterator.hasNext()){ RDFNode statement = iterator.next(); Resource resource = statement.asResource(); this.adapterABox.addProperty(Omn_lifecycle.canImplement, resource); this.adapterABox.getModel().add(resource.getModel()); } try{ List<VirtualLinkDescriptor> vnfdList = initClient.getAllVnfDescriptor(); for(VirtualLinkDescriptor v : vnfdList){ Resource newResource = this.adapterABox.getModel().createResource(Omn.NAMESPACE + v.getName()); newResource.addProperty(RDFS.label, v.getName()); newResource.addProperty(RDFS.subClassOf, Omn.Resource); newResource.addProperty(Omn_lifecycle.hasID, v.getId()); this.adapterABox.addProperty(Omn_lifecycle.canImplement, newResource); this.adapterABox.getModel().add(newResource.getModel()); } listener.publishModelUpdate(this.adapterABox.getModel(), UUID.randomUUID().toString(), "INFORM", "TARGET_ORCHESTRATOR"); }catch(Exception e){ e.printStackTrace(); } } @Override public void updateAdapterDescription() throws ProcessingException { // TODO Auto-generated method stub } @Override public Model updateInstance(final String instanceURI, final Model configureModel) { if (LOGGER.isLoggable(Level.INFO)) { LOGGER.log(Level.INFO, "updateInstance instanceURI: " + instanceURI); LOGGER.log(Level.INFO, "updateInstance configureModel: " + MessageUtil.serializeModel(configureModel, IMessageBus.SERIALIZATION_TURTLE)); } // if the instance is in the list of instances in the adapter if (this.getInstanceList().containsKey(instanceURI)) { final OpenBatonGeneric currentFiveG = this.getInstanceList().get(instanceURI); Resource fivegResource = configureModel.getResource(instanceURI); currentFiveG.updateInstance(fivegResource); final Model newModel = this.parseToModel(currentFiveG); if (LOGGER.isLoggable(Level.INFO)) { LOGGER.log(Level.INFO, "Returning updated fiveg: " + newModel); } return newModel; } else { if (LOGGER.isLoggable(Level.INFO)) { LOGGER.log(Level.INFO, "Instance list does not contain key."); } } if (LOGGER.isLoggable(Level.INFO)) { LOGGER.log(Level.INFO, "Creating new instance"); } return ModelFactory.createDefaultModel(); } @Override public Model createInstances(Model model) throws ProcessingException, InvalidRequestException { Model createdInstancesModel = super.createInstances(model); LOGGER.warning("createInstances override method."); String topologyUri = null; NodeIterator objects = model.listObjectsOfProperty(Omn.isResourceOf); if (objects.hasNext()) { RDFNode object = objects.next(); topologyUri = object.asResource().getURI(); } // Uncomment this method to make creation at OpenSDNCore automatically // occur upon SFA provision call // try { // HelperMethods.createTopologyAtOpenSDNCore(this, "FiveGAdapter-1", // topologyUri, 10); // } catch (Exception e) { // e.printStackTrace(); // } return createdInstancesModel; } @Override public Model createInstance(String instanceURI, Model newInstanceModel) { Resource resource = newInstanceModel.getResource(instanceURI); OpenBatonClient client =null; // check if already created for (Map.Entry<String, OpenBatonGeneric> entry : this.getInstanceList().entrySet()) { String key = entry.getKey(); OpenBatonGeneric value = entry.getValue(); if (instanceURI.equals(key)) { if (LOGGER.isLoggable(Level.WARNING)) { LOGGER.warning("Instance already exists: " + instanceURI); } return this.parseToModel(value); } } // check if topology exists, otherwise create it String topologyUri = null; Resource topologyResource = null; Topology topology = null; NetworkServiceDescriptor nsd = null; if (resource.hasProperty(Omn.isResourceOf)) { topologyResource = resource.getProperty(Omn.isResourceOf).getObject().asResource(); topologyUri = topologyResource.getURI().toString(); // topologyResource = ModelFactory.createDefaultModel().getResource(topologyUri); if (this.getInstanceList().get(topologyUri) == null) { topology = new Topology(this, topologyUri); } else { topology = (Topology) this.getInstanceList().get(topologyUri); } this.getInstanceList().put(topologyUri, topology); } // Check which Ressource should be created if(resource.hasProperty(Omn_resource.hasInterface)){ if(debugProjectId != null){ client = findClient(adminProjectId); nsd = client.getNetworkServiceDescriptor(); if(nsd == null){ nsd = client.createLocalNetworkServiceDescriptor(); topologyResource.addProperty(Omn_resource.hasHardwareType, ModelFactory.createDefaultModel().createResource(adapterABox.getNameSpace() + nsd.getName())); topologyResource.addProperty(Omn_service.username, getExperimenterUsername(newInstanceModel)); topologyResource.addProperty(Omn.hasAttribute, debugProjectId); //Adding the Resource we are now starting to create topologyResource.addProperty(Omn.hasResource,resource); listener.publishModelUpdate(topologyResource.getModel(), UUID.randomUUID().toString(), "INFORM", "TARGET_ORCHESTRATOR"); }else{ if(nsd.getId() != null){ nsd = client.getNetworkServiceDescriptor(nsd.getId()); if(nsd == null){ nsd = client.createLocalNetworkServiceDescriptor(); topologyResource.addProperty(Omn_resource.hasHardwareType, ModelFactory.createDefaultModel().createResource(adapterABox.getNameSpace() + nsd.getName())); topologyResource.addProperty(Omn_service.username, getExperimenterUsername(newInstanceModel)); topologyResource.addProperty(Omn.hasAttribute, debugProjectId); //Adding the Resource we are now starting to create topologyResource.addProperty(Omn.hasResource,resource); listener.publishModelUpdate(topologyResource.getModel(), UUID.randomUUID().toString(), "INFORM", "TARGET_ORCHESTRATOR"); } } } //Adding the Resource we are now starting to create topologyResource.addProperty(Omn.hasResource,resource); }else{ //If NSR allready exists, add this instance to it. Else create one and add it // if(topologyResource.hasProperty(Omn_resource.hasHardwareType)){ // client = findClient(topologyResource.getProperty(Omn.hasAttribute).getString()); if(topology.getProjectId() != null){ client = findClient(topology.getProjectId()); nsd = client.getNetworkServiceDescriptor(); //Adding the Resource we are now starting to create // topologyResource.addProperty(Omn.hasResource,resource); }else{ LOGGER.log(Level.WARNING, "ProjectId of Topology was NULL - Creating new Project/Client"); // String projectId = adminClient.createNewProjectOnServer(); String experimenterUsername = getExperimenterUsername(resource.getModel()); String projectId = null; for(Project p : adminClient.getAllProjectsFromServer()){ if(p.getName().equals(experimenterUsername)){ projectId = p.getId(); } } if(projectId == null){ projectId = adminClient.createNewProjectOnServer(experimenterUsername); } client = findClient(projectId); topology.setProjectId(projectId); topology.setProjectClient(client); nsd = client.createLocalNetworkServiceDescriptor(); // Add the NSR-Name, Experimenter username und project ID to the related Topology topologyResource.addProperty(Omn_resource.hasHardwareType, ModelFactory.createDefaultModel().createResource(adapterABox.getNameSpace() + nsd.getName())); topologyResource.addProperty(Omn_service.username, getExperimenterUsername(newInstanceModel)); topologyResource.addProperty(Omn.hasAttribute, projectId); //Adding the Resource we are now starting to create // topologyResource.addProperty(Omn.hasResource,resource); listener.publishModelUpdate(topologyResource.getModel(), UUID.randomUUID().toString(), "INFORM", "TARGET_ORCHESTRATOR"); } } } if (resource.hasProperty(RDF.type, OpenBaton.Gateway)) { final Gateway openBaton = new Gateway(this, instanceURI); this.getInstanceList().put(instanceURI, openBaton); this.updateInstance(instanceURI, newInstanceModel); VirtualNetworkFunctionDescriptor gateway = client.createGateway(openBaton, null); client.addVnfdToNsd(gateway); return this.parseToModel(openBaton); } else if (resource.hasProperty(RDF.type, OpenBaton.DomainNameSystem)) { final DomainNameSystem openBaton = new DomainNameSystem(this, instanceURI); this.getInstanceList().put(instanceURI, openBaton); this.updateInstance(instanceURI, newInstanceModel); // VirtualNetworkFunctionDescriptor dns = client.createDomainNameSystem(openBaton, null); // client.addVnfdToNsd(openBaton); return this.parseToModel(openBaton); } else if (resource.hasProperty(RDF.type, OpenBaton.ENodeB)) { final ENodeB openBaton = new ENodeB(this, instanceURI); this.getInstanceList().put(instanceURI, openBaton); this.updateInstance(instanceURI, newInstanceModel); client.createENodeB(openBaton, null); return this.parseToModel(openBaton); } else if (resource.hasProperty(RDF.type, OpenBaton.Switch)) { final Switch fiveg = new Switch(this, instanceURI); this.getInstanceList().put(instanceURI, fiveg); this.updateInstance(instanceURI, newInstanceModel); return this.parseToModel(fiveg); } else if (resource.hasProperty(RDF.type, OpenBaton.BenchmarkingTool)) { final BenchmarkingTool fiveg = new BenchmarkingTool(this, instanceURI); this.getInstanceList().put(instanceURI, fiveg); this.updateInstance(instanceURI, newInstanceModel); return this.parseToModel(fiveg); } else if (resource.hasProperty(RDF.type, OpenBaton.MME)) { final MME openBaton = new MME(this, instanceURI); this.getInstanceList().put(instanceURI, openBaton); this.updateInstance(instanceURI, newInstanceModel); Model tmpModel = this.parseToModel(openBaton); client.createMME(openBaton, null); return tmpModel; } else if (resource.hasProperty(RDF.type, OpenBaton.Control)) { final Control fiveg = new Control(this, instanceURI); this.getInstanceList().put(instanceURI, fiveg); this.updateInstance(instanceURI, newInstanceModel); return this.parseToModel(fiveg); } else if (resource.hasProperty(RDF.type, OpenBaton.HomeSubscriberServer)) { final HomeSubscriberService fiveg = new HomeSubscriberService(this, instanceURI); this.getInstanceList().put(instanceURI, fiveg); this.updateInstance(instanceURI, newInstanceModel); return this.parseToModel(fiveg); } else if (resource.hasProperty(RDF.type, OpenBaton.FiveGCore)) { FiveGCore fiveg = new FiveGCore(this, instanceURI); this.getInstanceList().put(instanceURI, fiveg); this.updateInstance(instanceURI, newInstanceModel); client.createFiveGCore(fiveg); Property property = resource.getModel().createProperty(Omn_lifecycle.hasState.getNameSpace(), Omn_lifecycle.hasState.getLocalName()); property.addProperty(RDF.type, (RDFNode)OWL.FunctionalProperty); try { CreateNSR createNsr = new CreateNSR(resource, fiveg, property, this.listener,client); ManagedThreadFactory threadFactory = (ManagedThreadFactory)new InitialContext().lookup("java:jboss/ee/concurrency/factory/default"); Thread createVMThread = threadFactory.newThread((Runnable)createNsr); createVMThread.start(); } catch (NamingException e) { e.printStackTrace(); } Model model2 = this.parseToModel((OpenBatonGeneric)fiveg); return model2; } else if (resource.hasProperty(RDF.type, OpenBaton.UE)) { final UE openBaton = new UE(this, instanceURI); this.getInstanceList().put(instanceURI, openBaton); this.updateInstance(instanceURI, newInstanceModel); client.createUe(openBaton, null); Model model = this.parseToModel(openBaton); return model; } else if (resource.hasProperty(RDF.type, OpenBaton.SgwuPgwu)) { final SgwuPgwu openBaton = new SgwuPgwu(this, instanceURI); this.getInstanceList().put(instanceURI, openBaton); this.updateInstance(instanceURI, newInstanceModel); client.createSgwuPgwu(openBaton, null); Model model = this.parseToModel(openBaton); return model; } else if (resource.hasProperty(RDF.type, Osco.ServiceContainer)) { if (LOGGER.isLoggable(Level.WARNING)) { LOGGER.warning("createInstance: Creating ServiceContainer " + instanceURI); } // need to check if already created OpenBatonGeneric fiveg = this.getInstanceObject(instanceURI); ServiceContainer sc = null; if (fiveg == null) { sc = new ServiceContainer(this, instanceURI); this.getInstanceList().put(instanceURI, sc); topology.getServiceContainers().add(sc); sc.setTopology(topology); this.updateInstance(instanceURI, newInstanceModel); } else { sc = (ServiceContainer) fiveg; } return this.parseToModel(sc); } else if (resource.hasProperty(RDF.type) && !resource.hasProperty(RDF.type, Omn_resource.Link)) { OpenBatonService fiveg = new OpenBatonService(this, instanceURI); this.getInstanceList().put(instanceURI, fiveg); this.updateInstance(instanceURI, newInstanceModel); client.addVnfdToNsd(resource); Model model = this.parseToModel(fiveg); return model; } if (LOGGER.isLoggable(Level.WARNING)) { LOGGER.warning("Couldn't recognize type, so returning original model."); } return newInstanceModel; } private String getExperimenterUsername(Model newInstanceModel) { return newInstanceModel.listObjectsOfProperty(newInstanceModel.getProperty("http://open-multinet.info/ontology/omn-service#username")).next().asLiteral().getString(); } Model parseToModel(final OpenBatonGeneric fivegGeneric) { LOGGER.warning("Calling parse to model..."); final Resource resource = ModelFactory.createDefaultModel().createResource(fivegGeneric.getInstanceUri()); final Property property = resource.getModel().createProperty(Omn_lifecycle.hasState.getNameSpace(), Omn_lifecycle.hasState.getLocalName()); property.addProperty(RDF.type, OWL.FunctionalProperty); if (!(fivegGeneric instanceof Topology)) { resource.addProperty(property, Omn_lifecycle.Uncompleted); final Property propertyLabel = resource.getModel().createProperty(RDFS.label.getNameSpace(), RDFS.label.getLocalName()); propertyLabel.addProperty(RDF.type, OWL.FunctionalProperty); } if (fivegGeneric instanceof Gateway) { Gateway gw = (Gateway) fivegGeneric; gw.parseToModel(resource); } else if (fivegGeneric instanceof Switch) { Switch sw = (Switch) fivegGeneric; sw.parseToModel(resource); } else if (fivegGeneric instanceof ENodeB) { ENodeB eNodeB = (ENodeB) fivegGeneric; eNodeB.parseToModel(resource); } else if (fivegGeneric instanceof Control) { Control control = (Control) fivegGeneric; control.parseToModel(resource); } else if (fivegGeneric instanceof HomeSubscriberService) { HomeSubscriberService hss = (HomeSubscriberService) fivegGeneric; hss.parseToModel(resource); } else if (fivegGeneric instanceof BenchmarkingTool) { BenchmarkingTool bt = (BenchmarkingTool) fivegGeneric; bt.parseToModel(resource); } else if (fivegGeneric instanceof DomainNameSystem) { DomainNameSystem dns = (DomainNameSystem) fivegGeneric; dns.parseToModel(resource); } else if (fivegGeneric instanceof ServiceContainer) { ServiceContainer sc = (ServiceContainer) fivegGeneric; sc.parseToModel(resource); } else if (fivegGeneric instanceof Topology) { Topology topology = (Topology) fivegGeneric; topology.parseToModel(resource); } else if (fivegGeneric instanceof MME) { MME mme = (MME) fivegGeneric; mme.parseToModel(resource); } else if (fivegGeneric instanceof UE) { UE ue = (UE) fivegGeneric; ue.parseToModel(resource); } else if (fivegGeneric instanceof SgwuPgwu) { SgwuPgwu sgwuPgwu = (SgwuPgwu) fivegGeneric; sgwuPgwu.parseToModel(resource); } else if (fivegGeneric instanceof FiveGCore) { FiveGCore fiveG = (FiveGCore) fivegGeneric; fiveG.parseToModel(resource); }else if (fivegGeneric instanceof OpenBatonGeneric) { OpenBatonGeneric fiveG = fivegGeneric; fiveG.parseToModel(resource); } if (LOGGER.isLoggable(Level.INFO)) { LOGGER.log(Level.INFO, "CONTENT parse to model: " + resource.getModel().toString()); } return resource.getModel(); } @Override public void startNSR(Model createdInstances){ Property property = adapterABox.getModel().createProperty(Omn_lifecycle.hasState.getNameSpace(), Omn_lifecycle.hasState.getLocalName()); property.addProperty(RDF.type, (RDFNode)OWL.FunctionalProperty); try { CreateNSR createNsr = new CreateNSR(createdInstances, property, this.listener,findClient(adminProjectId)); ManagedThreadFactory threadFactory = (ManagedThreadFactory)new InitialContext().lookup("java:jboss/ee/concurrency/factory/default"); Thread createVMThread = threadFactory.newThread((Runnable)createNsr); createVMThread.start(); } catch (NamingException e) { e.printStackTrace(); } } public class CreateNSR implements Runnable { private Resource resource; private Model createdInstances; // private OpenBatonGeneric fiveG; private NetworkServiceRecord fivegNSR; private Property property; private OpenBatonClient client; private String nsrID; private int counter; private OpenBatonAdapterMDBSender parent; public CreateNSR(Resource resource, OpenBatonGeneric openBatonGeneric, Property property, OpenBatonAdapterMDBSender parent,OpenBatonClient client) { this.resource = resource; this.parent = parent; // this.fiveG = openBatonGeneric; this.property = property; this.client = client; this.counter = 0; LOGGER.log(Level.SEVERE, "Thread Created"); } public CreateNSR(Model model, Property property, OpenBatonAdapterMDBSender parent,OpenBatonClient client) { this.createdInstances = model; this.parent = parent; // this.fiveG = openBatonGeneric; this.property = property; this.client = client; this.counter = 0; LOGGER.log(Level.SEVERE, "Thread Created"); } @Override public void run() { while (!Thread.currentThread().isInterrupted() && this.counter < 10) { LOGGER.log(Level.SEVERE, "Starting RUN Methode now"); try { try { if (fivegNSR == null) { fivegNSR = client.createNetworkServiceRecord(); } } catch (Exception e) { LOGGER.log(Level.SEVERE, "Exception in getting All NSRs"); } ++counter; // Check if the NSR is allready started and Ready. if (checkIfNsrIsActive()) { //Check if we are working on the Model-Object(More than 1 Nodes) or the Resource-Object(1 Instance/Node) if(resource == null){ LOGGER.log(Level.SEVERE, "Adding LoginResource to Resource"); LOGGER.log(Level.SEVERE, "-------------------------------------------"); // Resource loginService = createdInstances.createResource(OntologyModelUtil.getResourceNamespace() + "LoginService" + UUID.randomUUID().toString()); // loginService.addProperty(RDF.type, (RDFNode)Omn_service.LoginService); // loginService.addProperty((Property)Omn_service.authentication, "ssh-keys"); // loginService.addProperty((Property)Omn_service.port, "22"); // // String username = resource.getProperty(Omn_service.username).getObject().asLiteral().getString(); // loginService.addProperty((Property)Omn_service.username, username); // if (OpenBatonAdapter.this.vpnIP == null || OpenBatonAdapter.this.vpnIP.equals("") || OpenBatonAdapter.this.vpnPort == null || OpenBatonAdapter.this.vpnPort.equals("")) { // loginService.addProperty(RDF.type, (RDFNode)Omn_service.LoginService); // loginService.addProperty((Property)Omn_service.authentication, "ssh-keys"); // loginService.addProperty((Property)Omn_service.username, "home"); // loginService.addProperty((Property)Omn_service.hostname, "127.0.0.1"); // loginService.addProperty((Property)Omn_service.port, "22"); // } else { // loginService.addProperty(RDF.type, (RDFNode)Omn_service.LoginService); // loginService.addProperty((Property)Omn_service.authentication, "ssh-keys"); // loginService.addProperty((Property)Omn_service.username, "home"); // loginService.addProperty((Property)Omn_service.hostname, OpenBatonAdapter.this.vpnIP); // loginService.addProperty((Property)Omn_service.port, OpenBatonAdapter.this.vpnPort); // } ResIterator resIterator = createdInstances.listResourcesWithProperty(Omn_lifecycle.hasState); Model updatedInstances = ModelFactory.createDefaultModel(); HashMap<String,Ip> ipMap = getIpsFromNsr(); for (Resource r : resIterator.toList()){ try{ Resource loginService = createdInstances.createResource(OntologyModelUtil.getResourceNamespace() + "LoginService" + UUID.randomUUID().toString()); loginService.addProperty(RDF.type, (RDFNode)Omn_service.LoginService); loginService.addProperty((Property)Omn_service.authentication, "ssh-keys"); loginService.addProperty((Property)Omn_service.port, "22"); String username = r.getProperty(Omn_service.username).getObject().asLiteral().getString(); loginService.addProperty((Property)Omn_service.username, username); String ip = ipMap.keySet().iterator().next(); loginService.addProperty((Property)Omn_service.hostname, ipMap.get(ip).getIp()); ipMap.remove(ip); Statement stm2 = new StatementImpl(r, Omn.hasService, loginService); updatedInstances.add(stm2); updatedInstances.add(loginService.listProperties().toList()); }catch (Exception e) { // loginService.addProperty((Property)Omn_service.hostname, "127.0.0.1"); } Statement stm = new StatementImpl(r, property, Omn_lifecycle.Started); Statement stm3 = new StatementImpl(property, RDF.type, OWL.FunctionalProperty); updatedInstances.add(stm); ; updatedInstances.add(stm3); LOGGER.log(Level.SEVERE, "Added LoginService to Resource"); // updatedInstances.add(r.getModel()); // parent.publishModelUpdate(r.getModel(), UUID.randomUUID().toString(), "INFORM", "TARGET_ORCHESTRATOR"); } parent.publishModelUpdate(updatedInstances, UUID.randomUUID().toString(), "INFORM", "TARGET_ORCHESTRATOR"); LOGGER.log(Level.SEVERE, "Killing Thread now"); Thread.currentThread().interrupt(); }else{ getIpsFromNsr(); LOGGER.log(Level.SEVERE, "Adding LoginResource to Resource"); LOGGER.log(Level.SEVERE, "-------------------------------------------"); Resource loginService = this.resource.getModel().createResource(OntologyModelUtil.getResourceNamespace() + "LoginService" + UUID.randomUUID().toString()); if (OpenBatonAdapter.this.vpnIP == null || OpenBatonAdapter.this.vpnIP.equals("") || OpenBatonAdapter.this.vpnPort == null || OpenBatonAdapter.this.vpnPort.equals("")) { loginService.addProperty(RDF.type, (RDFNode)Omn_service.LoginService); loginService.addProperty((Property)Omn_service.authentication, "ssh-keys"); loginService.addProperty((Property)Omn_service.username, "home"); loginService.addProperty((Property)Omn_service.hostname, "127.0.0.1"); loginService.addProperty((Property)Omn_service.port, "22"); } else { loginService.addProperty(RDF.type, (RDFNode)Omn_service.LoginService); loginService.addProperty((Property)Omn_service.authentication, "ssh-keys"); loginService.addProperty((Property)Omn_service.username, "home"); loginService.addProperty((Property)Omn_service.hostname, OpenBatonAdapter.this.vpnIP); loginService.addProperty((Property)Omn_service.port, OpenBatonAdapter.this.vpnPort); } this.resource.addProperty((Property)Omn.hasService, (RDFNode)loginService); Statement blub = this.resource.getProperty(this.property); blub.changeObject((RDFNode)Omn_lifecycle.Started); this.resource.addProperty(this.property, (RDFNode)Omn_lifecycle.Started); this.resource.addProperty((Property)Omn_lifecycle.hasOriginalID, this.fivegNSR.getId()); LOGGER.log(Level.SEVERE, "Added LoginService to Resource"); this.parent.publishModelUpdate(this.resource.getModel(), UUID.randomUUID().toString(), "INFORM", "TARGET_ORCHESTRATOR"); LOGGER.log(Level.SEVERE, "Killing Thread now"); Thread.currentThread().interrupt(); continue; } } Thread.currentThread(); Thread.sleep(30000); } catch (Exception e) { ++this.counter; e.printStackTrace(); if (this.counter >= 9) { try { Thread.currentThread(); Thread.sleep(30000); } catch (InterruptedException e1) { e1.printStackTrace(); } continue; } Thread.currentThread().interrupt(); } } } public HashMap<String, Ip> getIpsFromNsr() { HashMap<String, Ip> ipMap = new HashMap<>(); for(VirtualNetworkFunctionRecord v : fivegNSR.getVnfr()){ Ip ip = v.getVdu().iterator().next().getVnfc_instance().iterator().next().getFloatingIps().iterator().next(); ipMap.put(v.getName(), ip); } return ipMap; } public boolean checkIfNsrIsActive() throws InterruptedException { fivegNSR = client.updateNetworkServiceRecord(fivegNSR); String status = fivegNSR.getStatus().toString(); LOGGER.log(Level.SEVERE, "STATUS of NSR: " + status); switch (status) { case "NULL": { LOGGER.log(Level.SEVERE, "NetworkServiceRecord is NULL at the moment. Will check again later"); return false; } case "INITIALIZED": { LOGGER.log(Level.SEVERE, "NetworkServiceRecord is INITIALIZED at the moment. Will check again later"); return false; } case "ERROR": { LOGGER.log(Level.SEVERE, "NetworkServiceRecord ERRORED while starting. Pls check the Logs"); return false; } case "ACTIVE": { LOGGER.log(Level.SEVERE, "NetworkServiceRecord is ACTIVE now. Will try to get Floating Ips now"); return true; } } LOGGER.log(Level.SEVERE, "NetworkServiceRecord is not ready at the moment. Will check again later"); return false; } } @Override public void deleteInstance(String instanceURI) throws InstanceNotFoundException, InvalidRequestException, ProcessingException { try{ OpenBatonClient client = findClient(adminProjectId); client.stopNetworkServiceRecord(); client.deleteNetworkServiceDescriptor(); }catch(Exception e){ e.printStackTrace(); } } @Override public Model getInstance(String instanceURI) throws InstanceNotFoundException, ProcessingException, InvalidRequestException { // TODO Auto-generated method stub return null; } @Override public Model getAllInstances() throws InstanceNotFoundException, ProcessingException { // TODO Auto-generated method stub return null; } @Override public void refreshConfig() throws ProcessingException { // TODO Auto-generated method stub } @Override public void shutdown() { // TODO Auto-generated method stub } @Override public void configure(Config configuration) { // TODO Auto-generated method stub } public void setListener(OpenBatonAdapterMDBSender mdbSender) { // TODO Auto-generated method stub this.listener = mdbSender; } protected String getUsername() { return username; } protected void setUsername(String username) { this.username = username; } protected String getPassword() { return password; } protected void setPassword(String password) { this.password = password; } protected String getNfvoIp() { return nfvoIp; } protected void setNfvoIp(String nfvoIp) { this.nfvoIp = nfvoIp; } protected String getNfvoPort() { return nfvoPort; } protected void setNfvoPort(String nfvoPort) { this.nfvoPort = nfvoPort; } protected String getVersion() { return version; } protected void setVersion(String version) { this.version = version; } protected String getVpnIP() { return vpnIP; } protected void setVpnIP(String vpnIP) { this.vpnIP = vpnIP; } protected String getVpnPort() { return vpnPort; } protected void setVpnPort(String vpnPort) { this.vpnPort = vpnPort; } public HashMap<String, OpenBatonGeneric> getInstanceList() { return instanceList; } public Topology getTopologyObject(final String topologyURI) { if (LOGGER.isLoggable(Level.INFO)) { LOGGER.log(Level.INFO, "Get topology: " + topologyURI); } OpenBatonGeneric fiveG = this.getInstanceList().get(topologyURI); Topology topology = null; if (fiveG instanceof Topology) { topology = (Topology) fiveG; } return topology; } public String parseConfig(Resource resource, String parameter) { Model model = ModelFactory.createDefaultModel(); return resource.getProperty(model.createProperty(OpenBaton.getURI(), parameter)).getLiteral().getString(); } public String getInstanceUri(OpenBatonGeneric OpenBatonGeneric) { return OpenBatonGeneric.getInstanceUri(); } @Override public Resource getAdapterABox() { // TODO Auto-generated method stub return adapterABox; } @Override public Model getAdapterDescriptionModel() { // TODO Auto-generated method stub return adapterTBox; } public OpenBatonGeneric getInstanceObject(final String instanceURI) { final OpenBatonGeneric fiveg = this.getInstanceList().get(instanceURI); if (LOGGER.isLoggable(Level.WARNING)) { LOGGER.warning("Get instance: " + instanceURI); } return fiveg; } public void updateOldVnfPackage() { // TODO Auto-generated method stub } public void addUploadedPackageToDatabase(String id, String fileName,String projectId) { // Resource resourceToCreate = ModelFactory.createDefaultModel().createResource(adapterABox.getLocalName()+"/" +fileName); Resource resourceToCreate = ModelFactory.createDefaultModel().createResource(Omn.NAMESPACE +fileName); resourceToCreate.addProperty(Omn_lifecycle.hasID,id); resourceToCreate.addProperty(RDFS.label,fileName); resourceToCreate.addProperty(RDFS.subClassOf, Omn.Resource); resourceToCreate.addProperty(Omn.isAttributeOf, projectId); adapterABox.addProperty(Omn_lifecycle.canImplement, resourceToCreate); listener.publishModelUpdate(resourceToCreate.getModel(), UUID.randomUUID().toString(), "INFORM", "TARGET_ORCHESTRATOR"); listener.publishModelUpdate(adapterABox.getModel(), UUID.randomUUID().toString(), "INFORM", "TARGET_ORCHESTRATOR"); } public String uploadPackageToDatabase(String projectId,String fileDirectory) { OpenBatonClient client = findClient(projectId); return client.uploadPackageToDatabase(fileDirectory); } private OpenBatonClient findClient(String projectId) { if(clientList.containsKey(projectId)){ return clientList.get(projectId); }else{ clientList.put(projectId,new OpenBatonClient(this,projectId)); return clientList.get(projectId); } } public OpenBatonClient getAdminClient() { return adminClient; } public String getAdminProjectId() { return adminProjectId; } public void setAdminProjectId(String adminProjectId) { this.adminProjectId = adminProjectId; } // public void createNewVnfPackage() { // String mmeID; // MME mme = new MME(this, "http://TEST.OPENBATON.MME"); // this.createdDebugMME = this.admin.createMME(mme); // this.debugString = mmeID = this.createdDebugMME.getId(); // Model newmModel = ModelFactory.createDefaultModel(); // Resource newResource = newmModel.createResource("http://TEST.OPENBATON.RESOURCE"); // newResource.addProperty(RDF.type, OWL.Class); // newResource.addProperty((Property)Omn_lifecycle.hasID, mmeID); // newResource.addProperty(RDFS.subClassOf, Omn.Resource); // this.adapterABox.addProperty((Property)Omn_lifecycle.canImplement, newResource); // this.adapterABox.getModel().add(newResource.getModel()); // ResIterator propIterator = this.adapterTBox.listSubjectsWithProperty(RDFS.domain, newResource); // while (propIterator.hasNext()) { // Property property = this.adapterTBox.getProperty(((Resource)propIterator.next()).getURI()); // } // this.listener.publishModelUpdate(this.adapterABox.getModel(), UUID.randomUUID().toString(), "INFORM", "TARGET_ORCHESTRATOR"); //} }
OpenBaton/src/main/java/org/fiteagle/adapters/OpenBaton/OpenBatonAdapter.java
package org.fiteagle.adapters.OpenBaton; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.PostConstruct; import javax.ejb.EJB; import javax.ejb.Init; import javax.ejb.PostActivate; import javax.enterprise.concurrent.ManagedThreadFactory; import javax.naming.InitialContext; import javax.naming.NamingException; import org.fiteagle.abstractAdapter.AbstractAdapter; import org.fiteagle.adapters.OpenBaton.Model.BenchmarkingTool; import org.fiteagle.adapters.OpenBaton.Model.Control; import org.fiteagle.adapters.OpenBaton.Model.DomainNameSystem; import org.fiteagle.adapters.OpenBaton.Model.ENodeB; import org.fiteagle.adapters.OpenBaton.Model.FiveGCore; import org.fiteagle.adapters.OpenBaton.Model.Gateway; import org.fiteagle.adapters.OpenBaton.Model.HomeSubscriberService; import org.fiteagle.adapters.OpenBaton.Model.MME; import org.fiteagle.adapters.OpenBaton.Model.OpenBatonGeneric; import org.fiteagle.adapters.OpenBaton.Model.OpenBatonService; import org.fiteagle.adapters.OpenBaton.Model.ServiceContainer; import org.fiteagle.adapters.OpenBaton.Model.SgwuPgwu; import org.fiteagle.adapters.OpenBaton.Model.Switch; import org.fiteagle.adapters.OpenBaton.Model.Topology; import org.fiteagle.adapters.OpenBaton.Model.UE; import org.fiteagle.adapters.OpenBaton.dm.OpenBatonAdapterMDBSender; import org.fiteagle.api.core.Config; import org.fiteagle.api.core.IMessageBus; import org.fiteagle.api.core.MessageBusOntologyModel; import org.fiteagle.api.core.MessageUtil; import org.fiteagle.api.core.OntologyModelUtil; import org.fiteagle.api.tripletStoreAccessor.TripletStoreAccessor; import org.openbaton.catalogue.mano.common.Ip; import org.openbaton.catalogue.mano.descriptor.NetworkServiceDescriptor; import org.openbaton.catalogue.mano.descriptor.VirtualLinkDescriptor; import org.openbaton.catalogue.mano.descriptor.VirtualNetworkFunctionDescriptor; import org.openbaton.catalogue.mano.record.NetworkServiceRecord; import org.openbaton.catalogue.mano.record.VirtualNetworkFunctionRecord; import org.openbaton.catalogue.security.Project; import com.hp.hpl.jena.ontology.Ontology; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.rdf.model.NodeIterator; import com.hp.hpl.jena.rdf.model.Property; import com.hp.hpl.jena.rdf.model.RDFNode; import com.hp.hpl.jena.rdf.model.ResIterator; import com.hp.hpl.jena.rdf.model.Resource; import com.hp.hpl.jena.rdf.model.Statement; import com.hp.hpl.jena.rdf.model.StmtIterator; import com.hp.hpl.jena.rdf.model.impl.StatementImpl; import com.hp.hpl.jena.vocabulary.OWL; import com.hp.hpl.jena.vocabulary.RDF; import com.hp.hpl.jena.vocabulary.RDFS; import info.openmultinet.ontology.vocabulary.Omn; import info.openmultinet.ontology.vocabulary.Omn_federation; import info.openmultinet.ontology.vocabulary.Omn_lifecycle; import info.openmultinet.ontology.vocabulary.Omn_resource; import info.openmultinet.ontology.vocabulary.Omn_service; import info.openmultinet.ontology.vocabulary.OpenBaton; import info.openmultinet.ontology.vocabulary.Osco; public final class OpenBatonAdapter extends AbstractAdapter { private static final Logger LOGGER = Logger.getLogger(OpenBatonAdapter.class.toString()); // protected OpenBatonClient openBatonClient ; protected OpenBatonClient adminClient ; private OpenBatonAdapterMDBSender listener; @EJB OpenBatonAdapterControl openBatonAdapterControler; private String username; private String password; private String nfvoIp; private String nfvoPort; private String version; private String vpnIP; private String vpnPort; private String adminProjectId; private String debugString; private VirtualNetworkFunctionDescriptor createdDebugMME; private Resource debugTopologyResource; private String debugProjectId = "d28a8a82-d503-42c5-80e5-899469e9255d"; // private String debugProjectId = null; private transient final HashMap<String, OpenBatonGeneric> instanceList = new HashMap<String, OpenBatonGeneric>(); private HashMap<String,OpenBatonClient> clientList = new HashMap<String,OpenBatonClient>(); public OpenBatonAdapter(final Model adapterModel, Resource adapterABox) { super(); this.uuid = UUID.randomUUID().toString(); this.adapterTBox = adapterModel; this.adapterABox = adapterABox; final Resource adapterType = this.getAdapterClass(); this.adapterABox.addProperty(RDF.type, adapterType); this.adapterABox.addProperty(RDFS.label, this.adapterABox.getLocalName()); this.adapterABox.addProperty(RDFS.comment, "OpenBaton Adapter"); this.adapterABox.addLiteral(MessageBusOntologyModel.maxInstances, 100); // this.adapterABox.addProperty(Omn_lifecycle.canImplement, // Omn_domain_pc.PC); /** * Looking up all Resources that belongs to the Adapter and will be * shown in SFA as Nodes. */ Model implementables = OntologyModelUtil.loadModel("ontologies/openBaton-adapter.ttl", IMessageBus.SERIALIZATION_TURTLE); final NodeIterator resourceIterator = implementables.listObjectsOfProperty(Omn_lifecycle.implements_); while (resourceIterator.hasNext()) { final Resource resource = resourceIterator.next().asResource(); this.adapterABox.addProperty(Omn_lifecycle.canImplement, resource); this.adapterABox.getModel().add(resource.getModel()); final ResIterator propIterator = this.adapterTBox.listSubjectsWithProperty(RDFS.domain, resource); while (propIterator.hasNext()) { final Property property = this.adapterTBox.getProperty(propIterator.next().getURI()); } } } // @PostConstruct public void init() { // openBatonClient.init(); // adminClient = new OpenBatonClient(this, adminProjectId); adminClient = findClient(adminProjectId); // TODO CHANGE WHEN DEBUG IS OVER OpenBatonClient initClient = adminClient; // Refresh the adapterABox Model with infos from Database Model newImplementables = TripletStoreAccessor.getResource(adapterABox.getURI()); NodeIterator iterator = newImplementables.listObjectsOfProperty(Omn_lifecycle.canImplement); //If Adapter has no "canImplement" Resources check on OpenBaton-Server while(iterator.hasNext()){ RDFNode statement = iterator.next(); Resource resource = statement.asResource(); this.adapterABox.addProperty(Omn_lifecycle.canImplement, resource); this.adapterABox.getModel().add(resource.getModel()); } try{ List<VirtualLinkDescriptor> vnfdList = initClient.getAllVnfDescriptor(); for(VirtualLinkDescriptor v : vnfdList){ Resource newResource = this.adapterABox.getModel().createResource(Omn.NAMESPACE + v.getName()); newResource.addProperty(RDFS.label, v.getName()); newResource.addProperty(RDFS.subClassOf, Omn.Resource); newResource.addProperty(Omn_lifecycle.hasID, v.getId()); this.adapterABox.addProperty(Omn_lifecycle.canImplement, newResource); this.adapterABox.getModel().add(newResource.getModel()); } listener.publishModelUpdate(this.adapterABox.getModel(), UUID.randomUUID().toString(), "INFORM", "TARGET_ORCHESTRATOR"); }catch(Exception e){ e.printStackTrace(); } } @Override public void updateAdapterDescription() throws ProcessingException { // TODO Auto-generated method stub } @Override public Model updateInstance(final String instanceURI, final Model configureModel) { if (LOGGER.isLoggable(Level.INFO)) { LOGGER.log(Level.INFO, "updateInstance instanceURI: " + instanceURI); LOGGER.log(Level.INFO, "updateInstance configureModel: " + MessageUtil.serializeModel(configureModel, IMessageBus.SERIALIZATION_TURTLE)); } // if the instance is in the list of instances in the adapter if (this.getInstanceList().containsKey(instanceURI)) { final OpenBatonGeneric currentFiveG = this.getInstanceList().get(instanceURI); Resource fivegResource = configureModel.getResource(instanceURI); currentFiveG.updateInstance(fivegResource); final Model newModel = this.parseToModel(currentFiveG); if (LOGGER.isLoggable(Level.INFO)) { LOGGER.log(Level.INFO, "Returning updated fiveg: " + newModel); } return newModel; } else { if (LOGGER.isLoggable(Level.INFO)) { LOGGER.log(Level.INFO, "Instance list does not contain key."); } } if (LOGGER.isLoggable(Level.INFO)) { LOGGER.log(Level.INFO, "Creating new instance"); } return ModelFactory.createDefaultModel(); } @Override public Model createInstances(Model model) throws ProcessingException, InvalidRequestException { Model createdInstancesModel = super.createInstances(model); LOGGER.warning("createInstances override method."); String topologyUri = null; NodeIterator objects = model.listObjectsOfProperty(Omn.isResourceOf); if (objects.hasNext()) { RDFNode object = objects.next(); topologyUri = object.asResource().getURI(); } // Uncomment this method to make creation at OpenSDNCore automatically // occur upon SFA provision call // try { // HelperMethods.createTopologyAtOpenSDNCore(this, "FiveGAdapter-1", // topologyUri, 10); // } catch (Exception e) { // e.printStackTrace(); // } return createdInstancesModel; } @Override public Model createInstance(String instanceURI, Model newInstanceModel) { Resource resource = newInstanceModel.getResource(instanceURI); OpenBatonClient client =null; // check if already created for (Map.Entry<String, OpenBatonGeneric> entry : this.getInstanceList().entrySet()) { String key = entry.getKey(); OpenBatonGeneric value = entry.getValue(); if (instanceURI.equals(key)) { if (LOGGER.isLoggable(Level.WARNING)) { LOGGER.warning("Instance already exists: " + instanceURI); } return this.parseToModel(value); } } // check if topology exists, otherwise create it String topologyUri = null; Resource topologyResource = null; Topology topology = null; NetworkServiceDescriptor nsd = null; if (resource.hasProperty(Omn.isResourceOf)) { topologyResource = resource.getProperty(Omn.isResourceOf).getObject().asResource(); topologyUri = topologyResource.getURI().toString(); // topologyResource = ModelFactory.createDefaultModel().getResource(topologyUri); if (this.getInstanceList().get(topologyUri) == null) { topology = new Topology(this, topologyUri); } else { topology = (Topology) this.getInstanceList().get(topologyUri); } this.getInstanceList().put(topologyUri, topology); } // Check which Ressource should be created if(resource.hasProperty(Omn_resource.hasInterface)){ if(debugProjectId != null){ client = findClient(adminProjectId); nsd = client.getNetworkServiceDescriptor(); if(nsd == null){ nsd = client.createLocalNetworkServiceDescriptor(); topologyResource.addProperty(Omn_resource.hasHardwareType, ModelFactory.createDefaultModel().createResource(adapterABox.getNameSpace() + nsd.getName())); topologyResource.addProperty(Omn_service.username, getExperimenterUsername(newInstanceModel)); topologyResource.addProperty(Omn.hasAttribute, debugProjectId); //Adding the Resource we are now starting to create topologyResource.addProperty(Omn.hasResource,resource); listener.publishModelUpdate(topologyResource.getModel(), UUID.randomUUID().toString(), "INFORM", "TARGET_ORCHESTRATOR"); }else{ if(nsd.getId() != null){ nsd = client.getNetworkServiceDescriptor(nsd.getId()); if(nsd == null){ nsd = client.createLocalNetworkServiceDescriptor(); topologyResource.addProperty(Omn_resource.hasHardwareType, ModelFactory.createDefaultModel().createResource(adapterABox.getNameSpace() + nsd.getName())); topologyResource.addProperty(Omn_service.username, getExperimenterUsername(newInstanceModel)); topologyResource.addProperty(Omn.hasAttribute, debugProjectId); //Adding the Resource we are now starting to create topologyResource.addProperty(Omn.hasResource,resource); listener.publishModelUpdate(topologyResource.getModel(), UUID.randomUUID().toString(), "INFORM", "TARGET_ORCHESTRATOR"); } } } //Adding the Resource we are now starting to create topologyResource.addProperty(Omn.hasResource,resource); }else{ //If NSR allready exists, add this instance to it. Else create one and add it // if(topologyResource.hasProperty(Omn_resource.hasHardwareType)){ // client = findClient(topologyResource.getProperty(Omn.hasAttribute).getString()); if(topology.getProjectId() != null){ client = findClient(topology.getProjectId()); nsd = client.getNetworkServiceDescriptor(); //Adding the Resource we are now starting to create // topologyResource.addProperty(Omn.hasResource,resource); }else{ LOGGER.log(Level.WARNING, "ProjectId of Topology was NULL - Creating new Project/Client"); // String projectId = adminClient.createNewProjectOnServer(); String experimenterUsername = getExperimenterUsername(resource.getModel()); String projectId = null; for(Project p : adminClient.getAllProjectsFromServer()){ if(p.getName().equals(experimenterUsername)){ projectId = p.getId(); } } if(projectId == null){ projectId = adminClient.createNewProjectOnServer(experimenterUsername); } client = findClient(projectId); topology.setProjectId(projectId); topology.setProjectClient(client); nsd = client.createLocalNetworkServiceDescriptor(); // Add the NSR-Name, Experimenter username und project ID to the related Topology topologyResource.addProperty(Omn_resource.hasHardwareType, ModelFactory.createDefaultModel().createResource(adapterABox.getNameSpace() + nsd.getName())); topologyResource.addProperty(Omn_service.username, getExperimenterUsername(newInstanceModel)); topologyResource.addProperty(Omn.hasAttribute, projectId); //Adding the Resource we are now starting to create // topologyResource.addProperty(Omn.hasResource,resource); listener.publishModelUpdate(topologyResource.getModel(), UUID.randomUUID().toString(), "INFORM", "TARGET_ORCHESTRATOR"); } } } if (resource.hasProperty(RDF.type, OpenBaton.Gateway)) { final Gateway openBaton = new Gateway(this, instanceURI); this.getInstanceList().put(instanceURI, openBaton); this.updateInstance(instanceURI, newInstanceModel); VirtualNetworkFunctionDescriptor gateway = client.createGateway(openBaton, null); client.addVnfdToNsd(gateway); return this.parseToModel(openBaton); } else if (resource.hasProperty(RDF.type, OpenBaton.DomainNameSystem)) { final DomainNameSystem openBaton = new DomainNameSystem(this, instanceURI); this.getInstanceList().put(instanceURI, openBaton); this.updateInstance(instanceURI, newInstanceModel); // VirtualNetworkFunctionDescriptor dns = client.createDomainNameSystem(openBaton, null); // client.addVnfdToNsd(openBaton); return this.parseToModel(openBaton); } else if (resource.hasProperty(RDF.type, OpenBaton.ENodeB)) { final ENodeB openBaton = new ENodeB(this, instanceURI); this.getInstanceList().put(instanceURI, openBaton); this.updateInstance(instanceURI, newInstanceModel); client.createENodeB(openBaton, null); return this.parseToModel(openBaton); } else if (resource.hasProperty(RDF.type, OpenBaton.Switch)) { final Switch fiveg = new Switch(this, instanceURI); this.getInstanceList().put(instanceURI, fiveg); this.updateInstance(instanceURI, newInstanceModel); return this.parseToModel(fiveg); } else if (resource.hasProperty(RDF.type, OpenBaton.BenchmarkingTool)) { final BenchmarkingTool fiveg = new BenchmarkingTool(this, instanceURI); this.getInstanceList().put(instanceURI, fiveg); this.updateInstance(instanceURI, newInstanceModel); return this.parseToModel(fiveg); } else if (resource.hasProperty(RDF.type, OpenBaton.MME)) { final MME openBaton = new MME(this, instanceURI); this.getInstanceList().put(instanceURI, openBaton); this.updateInstance(instanceURI, newInstanceModel); Model tmpModel = this.parseToModel(openBaton); client.createMME(openBaton, null); return tmpModel; } else if (resource.hasProperty(RDF.type, OpenBaton.Control)) { final Control fiveg = new Control(this, instanceURI); this.getInstanceList().put(instanceURI, fiveg); this.updateInstance(instanceURI, newInstanceModel); return this.parseToModel(fiveg); } else if (resource.hasProperty(RDF.type, OpenBaton.HomeSubscriberServer)) { final HomeSubscriberService fiveg = new HomeSubscriberService(this, instanceURI); this.getInstanceList().put(instanceURI, fiveg); this.updateInstance(instanceURI, newInstanceModel); return this.parseToModel(fiveg); } else if (resource.hasProperty(RDF.type, OpenBaton.FiveGCore)) { FiveGCore fiveg = new FiveGCore(this, instanceURI); this.getInstanceList().put(instanceURI, fiveg); this.updateInstance(instanceURI, newInstanceModel); client.createFiveGCore(fiveg); Property property = resource.getModel().createProperty(Omn_lifecycle.hasState.getNameSpace(), Omn_lifecycle.hasState.getLocalName()); property.addProperty(RDF.type, (RDFNode)OWL.FunctionalProperty); try { CreateNSR createNsr = new CreateNSR(resource, fiveg, property, this.listener,client); ManagedThreadFactory threadFactory = (ManagedThreadFactory)new InitialContext().lookup("java:jboss/ee/concurrency/factory/default"); Thread createVMThread = threadFactory.newThread((Runnable)createNsr); createVMThread.start(); } catch (NamingException e) { e.printStackTrace(); } Model model2 = this.parseToModel((OpenBatonGeneric)fiveg); return model2; } else if (resource.hasProperty(RDF.type, OpenBaton.UE)) { final UE openBaton = new UE(this, instanceURI); this.getInstanceList().put(instanceURI, openBaton); this.updateInstance(instanceURI, newInstanceModel); client.createUe(openBaton, null); Model model = this.parseToModel(openBaton); return model; } else if (resource.hasProperty(RDF.type, OpenBaton.SgwuPgwu)) { final SgwuPgwu openBaton = new SgwuPgwu(this, instanceURI); this.getInstanceList().put(instanceURI, openBaton); this.updateInstance(instanceURI, newInstanceModel); client.createSgwuPgwu(openBaton, null); Model model = this.parseToModel(openBaton); return model; } else if (resource.hasProperty(RDF.type, Osco.ServiceContainer)) { if (LOGGER.isLoggable(Level.WARNING)) { LOGGER.warning("createInstance: Creating ServiceContainer " + instanceURI); } // need to check if already created OpenBatonGeneric fiveg = this.getInstanceObject(instanceURI); ServiceContainer sc = null; if (fiveg == null) { sc = new ServiceContainer(this, instanceURI); this.getInstanceList().put(instanceURI, sc); topology.getServiceContainers().add(sc); sc.setTopology(topology); this.updateInstance(instanceURI, newInstanceModel); } else { sc = (ServiceContainer) fiveg; } return this.parseToModel(sc); } else if (resource.hasProperty(RDF.type) && !resource.hasProperty(RDF.type, Omn_resource.Link)) { OpenBatonService fiveg = new OpenBatonService(this, instanceURI); this.getInstanceList().put(instanceURI, fiveg); this.updateInstance(instanceURI, newInstanceModel); client.addVnfdToNsd(resource); Model model = this.parseToModel(fiveg); return model; } if (LOGGER.isLoggable(Level.WARNING)) { LOGGER.warning("Couldn't recognize type, so returning original model."); } return newInstanceModel; } private String getExperimenterUsername(Model newInstanceModel) { return newInstanceModel.listObjectsOfProperty(newInstanceModel.getProperty("http://open-multinet.info/ontology/omn-service#username")).next().asLiteral().getString(); } Model parseToModel(final OpenBatonGeneric fivegGeneric) { LOGGER.warning("Calling parse to model..."); final Resource resource = ModelFactory.createDefaultModel().createResource(fivegGeneric.getInstanceUri()); final Property property = resource.getModel().createProperty(Omn_lifecycle.hasState.getNameSpace(), Omn_lifecycle.hasState.getLocalName()); property.addProperty(RDF.type, OWL.FunctionalProperty); if (!(fivegGeneric instanceof Topology)) { resource.addProperty(property, Omn_lifecycle.Uncompleted); final Property propertyLabel = resource.getModel().createProperty(RDFS.label.getNameSpace(), RDFS.label.getLocalName()); propertyLabel.addProperty(RDF.type, OWL.FunctionalProperty); } if (fivegGeneric instanceof Gateway) { Gateway gw = (Gateway) fivegGeneric; gw.parseToModel(resource); } else if (fivegGeneric instanceof Switch) { Switch sw = (Switch) fivegGeneric; sw.parseToModel(resource); } else if (fivegGeneric instanceof ENodeB) { ENodeB eNodeB = (ENodeB) fivegGeneric; eNodeB.parseToModel(resource); } else if (fivegGeneric instanceof Control) { Control control = (Control) fivegGeneric; control.parseToModel(resource); } else if (fivegGeneric instanceof HomeSubscriberService) { HomeSubscriberService hss = (HomeSubscriberService) fivegGeneric; hss.parseToModel(resource); } else if (fivegGeneric instanceof BenchmarkingTool) { BenchmarkingTool bt = (BenchmarkingTool) fivegGeneric; bt.parseToModel(resource); } else if (fivegGeneric instanceof DomainNameSystem) { DomainNameSystem dns = (DomainNameSystem) fivegGeneric; dns.parseToModel(resource); } else if (fivegGeneric instanceof ServiceContainer) { ServiceContainer sc = (ServiceContainer) fivegGeneric; sc.parseToModel(resource); } else if (fivegGeneric instanceof Topology) { Topology topology = (Topology) fivegGeneric; topology.parseToModel(resource); } else if (fivegGeneric instanceof MME) { MME mme = (MME) fivegGeneric; mme.parseToModel(resource); } else if (fivegGeneric instanceof UE) { UE ue = (UE) fivegGeneric; ue.parseToModel(resource); } else if (fivegGeneric instanceof SgwuPgwu) { SgwuPgwu sgwuPgwu = (SgwuPgwu) fivegGeneric; sgwuPgwu.parseToModel(resource); } else if (fivegGeneric instanceof FiveGCore) { FiveGCore fiveG = (FiveGCore) fivegGeneric; fiveG.parseToModel(resource); }else if (fivegGeneric instanceof OpenBatonGeneric) { OpenBatonGeneric fiveG = fivegGeneric; fiveG.parseToModel(resource); } if (LOGGER.isLoggable(Level.INFO)) { LOGGER.log(Level.INFO, "CONTENT parse to model: " + resource.getModel().toString()); } return resource.getModel(); } @Override public void startNSR(Model createdInstances){ Property property = adapterABox.getModel().createProperty(Omn_lifecycle.hasState.getNameSpace(), Omn_lifecycle.hasState.getLocalName()); property.addProperty(RDF.type, (RDFNode)OWL.FunctionalProperty); try { CreateNSR createNsr = new CreateNSR(createdInstances, property, this.listener,findClient(adminProjectId)); ManagedThreadFactory threadFactory = (ManagedThreadFactory)new InitialContext().lookup("java:jboss/ee/concurrency/factory/default"); Thread createVMThread = threadFactory.newThread((Runnable)createNsr); createVMThread.start(); } catch (NamingException e) { e.printStackTrace(); } } public class CreateNSR implements Runnable { private Resource resource; private Model createdInstances; // private OpenBatonGeneric fiveG; private NetworkServiceRecord fivegNSR; private Property property; private OpenBatonClient client; private String nsrID; private int counter; private OpenBatonAdapterMDBSender parent; public CreateNSR(Resource resource, OpenBatonGeneric openBatonGeneric, Property property, OpenBatonAdapterMDBSender parent,OpenBatonClient client) { this.resource = resource; this.parent = parent; // this.fiveG = openBatonGeneric; this.property = property; this.client = client; this.counter = 0; LOGGER.log(Level.SEVERE, "Thread Created"); } public CreateNSR(Model model, Property property, OpenBatonAdapterMDBSender parent,OpenBatonClient client) { this.createdInstances = model; this.parent = parent; // this.fiveG = openBatonGeneric; this.property = property; this.client = client; this.counter = 0; LOGGER.log(Level.SEVERE, "Thread Created"); } @Override public void run() { while (!Thread.currentThread().isInterrupted() && this.counter < 10) { LOGGER.log(Level.SEVERE, "Starting RUN Methode now"); try { try { if (fivegNSR == null) { fivegNSR = client.createNetworkServiceRecord(); } } catch (Exception e) { LOGGER.log(Level.SEVERE, "Exception in getting All NSRs"); } ++counter; // Check if the NSR is allready started and Ready. if (checkIfNsrIsActive()) { //Check if we are working on the Model-Object(More than 1 Nodes) or the Resource-Object(1 Instance/Node) if(resource == null){ LOGGER.log(Level.SEVERE, "Adding LoginResource to Resource"); LOGGER.log(Level.SEVERE, "-------------------------------------------"); // Resource loginService = createdInstances.createResource(OntologyModelUtil.getResourceNamespace() + "LoginService" + UUID.randomUUID().toString()); // loginService.addProperty(RDF.type, (RDFNode)Omn_service.LoginService); // loginService.addProperty((Property)Omn_service.authentication, "ssh-keys"); // loginService.addProperty((Property)Omn_service.port, "22"); // // String username = resource.getProperty(Omn_service.username).getObject().asLiteral().getString(); // loginService.addProperty((Property)Omn_service.username, username); // if (OpenBatonAdapter.this.vpnIP == null || OpenBatonAdapter.this.vpnIP.equals("") || OpenBatonAdapter.this.vpnPort == null || OpenBatonAdapter.this.vpnPort.equals("")) { // loginService.addProperty(RDF.type, (RDFNode)Omn_service.LoginService); // loginService.addProperty((Property)Omn_service.authentication, "ssh-keys"); // loginService.addProperty((Property)Omn_service.username, "home"); // loginService.addProperty((Property)Omn_service.hostname, "127.0.0.1"); // loginService.addProperty((Property)Omn_service.port, "22"); // } else { // loginService.addProperty(RDF.type, (RDFNode)Omn_service.LoginService); // loginService.addProperty((Property)Omn_service.authentication, "ssh-keys"); // loginService.addProperty((Property)Omn_service.username, "home"); // loginService.addProperty((Property)Omn_service.hostname, OpenBatonAdapter.this.vpnIP); // loginService.addProperty((Property)Omn_service.port, OpenBatonAdapter.this.vpnPort); // } ResIterator resIterator = createdInstances.listResourcesWithProperty(Omn_lifecycle.hasState); Model updatedInstances = ModelFactory.createDefaultModel(); HashMap<String,Ip> ipMap = getIpsFromNsr(); for (Resource r : resIterator.toList()){ Resource loginService = createdInstances.createResource(OntologyModelUtil.getResourceNamespace() + "LoginService" + UUID.randomUUID().toString()); loginService.addProperty(RDF.type, (RDFNode)Omn_service.LoginService); loginService.addProperty((Property)Omn_service.authentication, "ssh-keys"); loginService.addProperty((Property)Omn_service.port, "22"); String username = r.getProperty(Omn_service.username).getObject().asLiteral().getString(); loginService.addProperty((Property)Omn_service.username, username); try{ String ip = ipMap.keySet().iterator().next(); loginService.addProperty((Property)Omn_service.hostname, ipMap.get(ip).getIp()); ipMap.remove(ip); }catch (Exception e) { loginService.addProperty((Property)Omn_service.hostname, "127.0.0.1"); } Statement stm = new StatementImpl(r, property, Omn_lifecycle.Started); Statement stm2 = new StatementImpl(r, Omn.hasService, loginService); Statement stm3 = new StatementImpl(property, RDF.type, OWL.FunctionalProperty); updatedInstances.add(stm); updatedInstances.add(stm2); updatedInstances.add(loginService.listProperties().toList()); updatedInstances.add(stm3); LOGGER.log(Level.SEVERE, "Added LoginService to Resource"); // updatedInstances.add(r.getModel()); // parent.publishModelUpdate(r.getModel(), UUID.randomUUID().toString(), "INFORM", "TARGET_ORCHESTRATOR"); } parent.publishModelUpdate(updatedInstances, UUID.randomUUID().toString(), "INFORM", "TARGET_ORCHESTRATOR"); LOGGER.log(Level.SEVERE, "Killing Thread now"); Thread.currentThread().interrupt(); }else{ getIpsFromNsr(); LOGGER.log(Level.SEVERE, "Adding LoginResource to Resource"); LOGGER.log(Level.SEVERE, "-------------------------------------------"); Resource loginService = this.resource.getModel().createResource(OntologyModelUtil.getResourceNamespace() + "LoginService" + UUID.randomUUID().toString()); if (OpenBatonAdapter.this.vpnIP == null || OpenBatonAdapter.this.vpnIP.equals("") || OpenBatonAdapter.this.vpnPort == null || OpenBatonAdapter.this.vpnPort.equals("")) { loginService.addProperty(RDF.type, (RDFNode)Omn_service.LoginService); loginService.addProperty((Property)Omn_service.authentication, "ssh-keys"); loginService.addProperty((Property)Omn_service.username, "home"); loginService.addProperty((Property)Omn_service.hostname, "127.0.0.1"); loginService.addProperty((Property)Omn_service.port, "22"); } else { loginService.addProperty(RDF.type, (RDFNode)Omn_service.LoginService); loginService.addProperty((Property)Omn_service.authentication, "ssh-keys"); loginService.addProperty((Property)Omn_service.username, "home"); loginService.addProperty((Property)Omn_service.hostname, OpenBatonAdapter.this.vpnIP); loginService.addProperty((Property)Omn_service.port, OpenBatonAdapter.this.vpnPort); } this.resource.addProperty((Property)Omn.hasService, (RDFNode)loginService); Statement blub = this.resource.getProperty(this.property); blub.changeObject((RDFNode)Omn_lifecycle.Started); this.resource.addProperty(this.property, (RDFNode)Omn_lifecycle.Started); this.resource.addProperty((Property)Omn_lifecycle.hasOriginalID, this.fivegNSR.getId()); LOGGER.log(Level.SEVERE, "Added LoginService to Resource"); this.parent.publishModelUpdate(this.resource.getModel(), UUID.randomUUID().toString(), "INFORM", "TARGET_ORCHESTRATOR"); LOGGER.log(Level.SEVERE, "Killing Thread now"); Thread.currentThread().interrupt(); continue; } } Thread.currentThread(); Thread.sleep(30000); } catch (Exception e) { ++this.counter; e.printStackTrace(); if (this.counter >= 9) { try { Thread.currentThread(); Thread.sleep(30000); } catch (InterruptedException e1) { e1.printStackTrace(); } continue; } Thread.currentThread().interrupt(); } } } public HashMap<String, Ip> getIpsFromNsr() { HashMap<String, Ip> ipMap = new HashMap<>(); for(VirtualNetworkFunctionRecord v : fivegNSR.getVnfr()){ Ip ip = v.getVdu().iterator().next().getVnfc_instance().iterator().next().getFloatingIps().iterator().next(); ipMap.put(v.getName(), ip); } return ipMap; } public boolean checkIfNsrIsActive() throws InterruptedException { fivegNSR = client.updateNetworkServiceRecord(fivegNSR); String status = fivegNSR.getStatus().toString(); LOGGER.log(Level.SEVERE, "STATUS of NSR: " + status); switch (status) { case "NULL": { LOGGER.log(Level.SEVERE, "NetworkServiceRecord is NULL at the moment. Will check again later"); return false; } case "INITIALIZED": { LOGGER.log(Level.SEVERE, "NetworkServiceRecord is INITIALIZED at the moment. Will check again later"); return false; } case "ERROR": { LOGGER.log(Level.SEVERE, "NetworkServiceRecord ERRORED while starting. Pls check the Logs"); return false; } case "ACTIVE": { LOGGER.log(Level.SEVERE, "NetworkServiceRecord is ACTIVE now. Will try to get Floating Ips now"); return true; } } LOGGER.log(Level.SEVERE, "NetworkServiceRecord is not ready at the moment. Will check again later"); return false; } } @Override public void deleteInstance(String instanceURI) throws InstanceNotFoundException, InvalidRequestException, ProcessingException { try{ OpenBatonClient client = findClient(adminProjectId); client.stopNetworkServiceRecord(); client.deleteNetworkServiceDescriptor(); }catch(Exception e){ e.printStackTrace(); } } @Override public Model getInstance(String instanceURI) throws InstanceNotFoundException, ProcessingException, InvalidRequestException { // TODO Auto-generated method stub return null; } @Override public Model getAllInstances() throws InstanceNotFoundException, ProcessingException { // TODO Auto-generated method stub return null; } @Override public void refreshConfig() throws ProcessingException { // TODO Auto-generated method stub } @Override public void shutdown() { // TODO Auto-generated method stub } @Override public void configure(Config configuration) { // TODO Auto-generated method stub } public void setListener(OpenBatonAdapterMDBSender mdbSender) { // TODO Auto-generated method stub this.listener = mdbSender; } protected String getUsername() { return username; } protected void setUsername(String username) { this.username = username; } protected String getPassword() { return password; } protected void setPassword(String password) { this.password = password; } protected String getNfvoIp() { return nfvoIp; } protected void setNfvoIp(String nfvoIp) { this.nfvoIp = nfvoIp; } protected String getNfvoPort() { return nfvoPort; } protected void setNfvoPort(String nfvoPort) { this.nfvoPort = nfvoPort; } protected String getVersion() { return version; } protected void setVersion(String version) { this.version = version; } protected String getVpnIP() { return vpnIP; } protected void setVpnIP(String vpnIP) { this.vpnIP = vpnIP; } protected String getVpnPort() { return vpnPort; } protected void setVpnPort(String vpnPort) { this.vpnPort = vpnPort; } public HashMap<String, OpenBatonGeneric> getInstanceList() { return instanceList; } public Topology getTopologyObject(final String topologyURI) { if (LOGGER.isLoggable(Level.INFO)) { LOGGER.log(Level.INFO, "Get topology: " + topologyURI); } OpenBatonGeneric fiveG = this.getInstanceList().get(topologyURI); Topology topology = null; if (fiveG instanceof Topology) { topology = (Topology) fiveG; } return topology; } public String parseConfig(Resource resource, String parameter) { Model model = ModelFactory.createDefaultModel(); return resource.getProperty(model.createProperty(OpenBaton.getURI(), parameter)).getLiteral().getString(); } public String getInstanceUri(OpenBatonGeneric OpenBatonGeneric) { return OpenBatonGeneric.getInstanceUri(); } @Override public Resource getAdapterABox() { // TODO Auto-generated method stub return adapterABox; } @Override public Model getAdapterDescriptionModel() { // TODO Auto-generated method stub return adapterTBox; } public OpenBatonGeneric getInstanceObject(final String instanceURI) { final OpenBatonGeneric fiveg = this.getInstanceList().get(instanceURI); if (LOGGER.isLoggable(Level.WARNING)) { LOGGER.warning("Get instance: " + instanceURI); } return fiveg; } public void updateOldVnfPackage() { // TODO Auto-generated method stub } public void addUploadedPackageToDatabase(String id, String fileName,String projectId) { // Resource resourceToCreate = ModelFactory.createDefaultModel().createResource(adapterABox.getLocalName()+"/" +fileName); Resource resourceToCreate = ModelFactory.createDefaultModel().createResource(Omn.NAMESPACE +fileName); resourceToCreate.addProperty(Omn_lifecycle.hasID,id); resourceToCreate.addProperty(RDFS.label,fileName); resourceToCreate.addProperty(RDFS.subClassOf, Omn.Resource); resourceToCreate.addProperty(Omn.isAttributeOf, projectId); adapterABox.addProperty(Omn_lifecycle.canImplement, resourceToCreate); listener.publishModelUpdate(resourceToCreate.getModel(), UUID.randomUUID().toString(), "INFORM", "TARGET_ORCHESTRATOR"); listener.publishModelUpdate(adapterABox.getModel(), UUID.randomUUID().toString(), "INFORM", "TARGET_ORCHESTRATOR"); } public String uploadPackageToDatabase(String projectId,String fileDirectory) { OpenBatonClient client = findClient(projectId); return client.uploadPackageToDatabase(fileDirectory); } private OpenBatonClient findClient(String projectId) { if(clientList.containsKey(projectId)){ return clientList.get(projectId); }else{ clientList.put(projectId,new OpenBatonClient(this,projectId)); return clientList.get(projectId); } } public OpenBatonClient getAdminClient() { return adminClient; } public String getAdminProjectId() { return adminProjectId; } public void setAdminProjectId(String adminProjectId) { this.adminProjectId = adminProjectId; } // public void createNewVnfPackage() { // String mmeID; // MME mme = new MME(this, "http://TEST.OPENBATON.MME"); // this.createdDebugMME = this.admin.createMME(mme); // this.debugString = mmeID = this.createdDebugMME.getId(); // Model newmModel = ModelFactory.createDefaultModel(); // Resource newResource = newmModel.createResource("http://TEST.OPENBATON.RESOURCE"); // newResource.addProperty(RDF.type, OWL.Class); // newResource.addProperty((Property)Omn_lifecycle.hasID, mmeID); // newResource.addProperty(RDFS.subClassOf, Omn.Resource); // this.adapterABox.addProperty((Property)Omn_lifecycle.canImplement, newResource); // this.adapterABox.getModel().add(newResource.getModel()); // ResIterator propIterator = this.adapterTBox.listSubjectsWithProperty(RDFS.domain, newResource); // while (propIterator.hasNext()) { // Property property = this.adapterTBox.getProperty(((Resource)propIterator.next()).getURI()); // } // this.listener.publishModelUpdate(this.adapterABox.getModel(), UUID.randomUUID().toString(), "INFORM", "TARGET_ORCHESTRATOR"); //} }
only loginservice for instances with floating ips
OpenBaton/src/main/java/org/fiteagle/adapters/OpenBaton/OpenBatonAdapter.java
only loginservice for instances with floating ips
<ide><path>penBaton/src/main/java/org/fiteagle/adapters/OpenBaton/OpenBatonAdapter.java <ide> HashMap<String,Ip> ipMap = getIpsFromNsr(); <ide> for (Resource r : resIterator.toList()){ <ide> <del> Resource loginService = createdInstances.createResource(OntologyModelUtil.getResourceNamespace() + "LoginService" + UUID.randomUUID().toString()); <del> loginService.addProperty(RDF.type, (RDFNode)Omn_service.LoginService); <del> loginService.addProperty((Property)Omn_service.authentication, "ssh-keys"); <del> loginService.addProperty((Property)Omn_service.port, "22"); <del> <del> String username = r.getProperty(Omn_service.username).getObject().asLiteral().getString(); <del> loginService.addProperty((Property)Omn_service.username, username); <add> <ide> try{ <add> Resource loginService = createdInstances.createResource(OntologyModelUtil.getResourceNamespace() + "LoginService" + UUID.randomUUID().toString()); <add> loginService.addProperty(RDF.type, (RDFNode)Omn_service.LoginService); <add> loginService.addProperty((Property)Omn_service.authentication, "ssh-keys"); <add> loginService.addProperty((Property)Omn_service.port, "22"); <add> <add> String username = r.getProperty(Omn_service.username).getObject().asLiteral().getString(); <add> loginService.addProperty((Property)Omn_service.username, username); <ide> String ip = ipMap.keySet().iterator().next(); <ide> loginService.addProperty((Property)Omn_service.hostname, ipMap.get(ip).getIp()); <ide> ipMap.remove(ip); <add> Statement stm2 = new StatementImpl(r, Omn.hasService, loginService); <add> <add> updatedInstances.add(stm2); <add> updatedInstances.add(loginService.listProperties().toList()); <ide> }catch (Exception e) { <del> loginService.addProperty((Property)Omn_service.hostname, "127.0.0.1"); <add>// loginService.addProperty((Property)Omn_service.hostname, "127.0.0.1"); <ide> } <ide> <ide> <ide> Statement stm = new StatementImpl(r, property, Omn_lifecycle.Started); <del> Statement stm2 = new StatementImpl(r, Omn.hasService, loginService); <ide> Statement stm3 = new StatementImpl(property, RDF.type, OWL.FunctionalProperty); <ide> <ide> <ide> updatedInstances.add(stm); <del> updatedInstances.add(stm2); <del> updatedInstances.add(loginService.listProperties().toList()); <add>; <ide> updatedInstances.add(stm3); <ide> <ide> LOGGER.log(Level.SEVERE, "Added LoginService to Resource");
Java
apache-2.0
d46a50348d0720d50f3c038f3aaf7b9f1e7015cb
0
jay-hodgson/SynapseWebClient,jay-hodgson/SynapseWebClient,Sage-Bionetworks/SynapseWebClient,Sage-Bionetworks/SynapseWebClient,jay-hodgson/SynapseWebClient,jay-hodgson/SynapseWebClient,Sage-Bionetworks/SynapseWebClient,Sage-Bionetworks/SynapseWebClient
package org.sagebionetworks.web.client.widget.entity; import static org.sagebionetworks.web.client.ServiceEntryPointUtils.fixServiceEntryPoint; import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.function.Consumer; import org.sagebionetworks.repo.model.Entity; import org.sagebionetworks.repo.model.VersionInfo; import org.sagebionetworks.repo.model.VersionableEntity; import org.sagebionetworks.repo.model.entitybundle.v2.EntityBundle; import org.sagebionetworks.repo.model.table.Table; import org.sagebionetworks.web.client.DisplayConstants; import org.sagebionetworks.web.client.GlobalApplicationState; import org.sagebionetworks.web.client.SynapseClientAsync; import org.sagebionetworks.web.client.SynapseJavascriptClient; import org.sagebionetworks.web.client.place.Synapse; import org.sagebionetworks.web.client.place.Synapse.EntityArea; import org.sagebionetworks.web.client.utils.Callback; import org.sagebionetworks.web.client.widget.entity.controller.PreflightController; import org.sagebionetworks.web.client.widget.entity.controller.SynapseAlert; import com.google.gwt.http.client.Request; import com.google.gwt.user.client.rpc.AsyncCallback; import com.google.gwt.user.client.ui.IsWidget; import com.google.gwt.user.client.ui.Widget; import com.google.inject.Inject; /** * This widget shows the properties and annotations as a non-editable table grid. * * @author jayhodgson */ public class VersionHistoryWidget implements VersionHistoryWidgetView.Presenter, IsWidget { private VersionHistoryWidgetView view; private EntityBundle bundle; private SynapseClientAsync synapseClient; private GlobalApplicationState globalApplicationState; public static final Integer VERSION_LIMIT = 100; public PreflightController preflightController; private SynapseAlert synAlert; private boolean canEdit; private Long versionNumber; private SynapseJavascriptClient jsClient; int currentOffset; private Request currentRequest; private List<Consumer<Boolean>> visibilityChangeListeners = new ArrayList<>(); @Inject public VersionHistoryWidget(VersionHistoryWidgetView view, SynapseClientAsync synapseClient, SynapseJavascriptClient jsClient, GlobalApplicationState globalApplicationState, PreflightController preflightController, SynapseAlert synAlert) { super(); this.synapseClient = synapseClient; fixServiceEntryPoint(synapseClient); this.jsClient = jsClient; this.view = view; this.globalApplicationState = globalApplicationState; this.preflightController = preflightController; this.view.setPresenter(this); this.synAlert = synAlert; view.setSynAlert(synAlert); } public void setEntityBundle(EntityBundle bundle, Long versionNumber) { this.bundle = bundle; this.versionNumber = versionNumber; this.canEdit = bundle.getPermissions().getCanCertifiedUserEdit(); refreshFileHistory(); } @Override public void updateVersionInfo(String newLabel, String newComment) { editCurrentVersionInfo(bundle.getEntity(), newLabel, newComment); } private void editCurrentVersionInfo(Entity entity, String version, String comment) { if (entity instanceof VersionableEntity) { final VersionableEntity vb = (VersionableEntity) entity; if (Objects.equals(version, vb.getVersionLabel()) && Objects.equals(comment, vb.getVersionComment())) { // no-op view.hideEditVersionInfo(); return; } String versionLabel = null; if (version != null) versionLabel = version.toString(); vb.setVersionLabel(versionLabel); vb.setVersionComment(comment); synAlert.clear(); jsClient.updateEntity(vb, null, null, new AsyncCallback<Entity>() { @Override public void onFailure(Throwable caught) { synAlert.handleException(caught); } @Override public void onSuccess(Entity result) { view.hideEditVersionInfo(); view.showInfo(DisplayConstants.VERSION_INFO_UPDATED + ": " + vb.getName()); globalApplicationState.refreshPage(); } }); } } @Override public void deleteVersion(final Long versionNumber) { synAlert.clear(); synapseClient.deleteEntityVersionById(bundle.getEntity().getId(), versionNumber, new AsyncCallback<Void>() { @Override public void onFailure(Throwable caught) { synAlert.handleException(caught); } @Override public void onSuccess(Void result) { view.showInfo("Version " + versionNumber + " of " + bundle.getEntity().getId() + " " + DisplayConstants.LABEL_DELETED); // SWC-4002: if deleting the version that we're looking at, go to the latest version if (versionNumber.equals(VersionHistoryWidget.this.versionNumber)) { gotoCurrentVersion(); } else { refreshFileHistory(); } } }); } @Override public Widget asWidget() { // The view is the real widget. return view.asWidget(); } public void refreshFileHistory() { synAlert.clear(); view.clearVersions(); currentOffset = 0; if (currentRequest != null) { currentRequest.cancel(); } onMore(); } public void gotoCurrentVersion() { Long targetVersion = null; Synapse synapse = new Synapse(bundle.getEntity().getId(), targetVersion, EntityArea.FILES, null); globalApplicationState.getPlaceChanger().goTo(synapse); } public void onMore() { currentRequest = jsClient.getEntityVersions(bundle.getEntity().getId(), currentOffset, VERSION_LIMIT, new AsyncCallback<List<VersionInfo>>() { @Override public void onSuccess(List<VersionInfo> results) { view.setMoreButtonVisible(results.size() == VERSION_LIMIT); if (currentOffset == 0) { // TODO: Use `isLatestVersion` when PLFM-6583 is complete. boolean isCurrentVersion = versionNumber == null; // we know the current version based on this, unless we're looking at a Table if (!(bundle.getEntity() instanceof Table)) { Long currentVersion = results.get(0).getVersionNumber(); isCurrentVersion = isCurrentVersion || currentVersion.equals(versionNumber); } view.setEntityBundle(bundle.getEntity(), !isCurrentVersion); view.setEditVersionInfoButtonVisible(isCurrentVersion && canEdit && !(bundle.getEntity() instanceof Table)); if (results.size() == 0) { view.showNoResults(); } } if (versionNumber == null && currentOffset == 0 && results.size() > 0) { // if not a table, then the first row represents the current version if (!(bundle.getEntity() instanceof Table)) { versionNumber = results.get(0).getVersionNumber(); } } for (VersionInfo versionInfo : results) { view.addVersion(bundle.getEntity().getId(), versionInfo, canEdit, versionInfo.getVersionNumber().equals(versionNumber)); } currentOffset += VERSION_LIMIT; } @Override public void onFailure(Throwable caught) { view.showErrorMessage(caught.getMessage()); } }); } /** * For testing purposes only * * @return */ public Long getVersionNumber() { return versionNumber; } @Override public void onEditVersionInfoClicked() { preflightController.checkUploadToEntity(bundle, new Callback() { @Override public void invoke() { final VersionableEntity vb = (VersionableEntity) bundle.getEntity(); view.showEditVersionInfo(vb.getVersionLabel(), vb.getVersionComment()); } }); } public void setVisible(boolean visible) { view.setVisible(visible); invokeVisibilityChangeListeners(); } public boolean isVisible() { return view.isVisible(); } public void registerVisibilityChangeListener(Consumer<Boolean> callback) { visibilityChangeListeners.add(callback); invokeVisibilityChangeListeners(); } private void invokeVisibilityChangeListeners() { for (Consumer<Boolean> cb : visibilityChangeListeners) { cb.accept(this.isVisible()); } } }
src/main/java/org/sagebionetworks/web/client/widget/entity/VersionHistoryWidget.java
package org.sagebionetworks.web.client.widget.entity; import static org.sagebionetworks.web.client.ServiceEntryPointUtils.fixServiceEntryPoint; import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.function.Consumer; import org.sagebionetworks.repo.model.Entity; import org.sagebionetworks.repo.model.VersionInfo; import org.sagebionetworks.repo.model.VersionableEntity; import org.sagebionetworks.repo.model.entitybundle.v2.EntityBundle; import org.sagebionetworks.repo.model.table.Table; import org.sagebionetworks.web.client.DisplayConstants; import org.sagebionetworks.web.client.GlobalApplicationState; import org.sagebionetworks.web.client.SynapseClientAsync; import org.sagebionetworks.web.client.SynapseJavascriptClient; import org.sagebionetworks.web.client.place.Synapse; import org.sagebionetworks.web.client.place.Synapse.EntityArea; import org.sagebionetworks.web.client.utils.Callback; import org.sagebionetworks.web.client.widget.entity.controller.PreflightController; import org.sagebionetworks.web.client.widget.entity.controller.SynapseAlert; import com.google.gwt.http.client.Request; import com.google.gwt.user.client.rpc.AsyncCallback; import com.google.gwt.user.client.ui.IsWidget; import com.google.gwt.user.client.ui.Widget; import com.google.inject.Inject; /** * This widget shows the properties and annotations as a non-editable table grid. * * @author jayhodgson */ public class VersionHistoryWidget implements VersionHistoryWidgetView.Presenter, IsWidget { private VersionHistoryWidgetView view; private EntityBundle bundle; private SynapseClientAsync synapseClient; private GlobalApplicationState globalApplicationState; public static final Integer VERSION_LIMIT = 100; public PreflightController preflightController; private SynapseAlert synAlert; private boolean canEdit; private Long versionNumber; private SynapseJavascriptClient jsClient; int currentOffset; private Request currentRequest; private List<Consumer<Boolean>> visibilityChangeListeners = new ArrayList<>(); @Inject public VersionHistoryWidget(VersionHistoryWidgetView view, SynapseClientAsync synapseClient, SynapseJavascriptClient jsClient, GlobalApplicationState globalApplicationState, PreflightController preflightController, SynapseAlert synAlert) { super(); this.synapseClient = synapseClient; fixServiceEntryPoint(synapseClient); this.jsClient = jsClient; this.view = view; this.globalApplicationState = globalApplicationState; this.preflightController = preflightController; this.view.setPresenter(this); this.synAlert = synAlert; view.setSynAlert(synAlert); } public void setEntityBundle(EntityBundle bundle, Long versionNumber) { this.bundle = bundle; this.versionNumber = versionNumber; this.canEdit = bundle.getPermissions().getCanCertifiedUserEdit(); refreshFileHistory(); } @Override public void updateVersionInfo(String newLabel, String newComment) { editCurrentVersionInfo(bundle.getEntity(), newLabel, newComment); } private void editCurrentVersionInfo(Entity entity, String version, String comment) { if (entity instanceof VersionableEntity) { final VersionableEntity vb = (VersionableEntity) entity; if (Objects.equals(version, vb.getVersionLabel()) && Objects.equals(comment, vb.getVersionComment())) { // no-op view.hideEditVersionInfo(); return; } String versionLabel = null; if (version != null) versionLabel = version.toString(); vb.setVersionLabel(versionLabel); vb.setVersionComment(comment); synAlert.clear(); jsClient.updateEntity(vb, null, null, new AsyncCallback<Entity>() { @Override public void onFailure(Throwable caught) { synAlert.handleException(caught); } @Override public void onSuccess(Entity result) { view.hideEditVersionInfo(); view.showInfo(DisplayConstants.VERSION_INFO_UPDATED + ": " + vb.getName()); globalApplicationState.refreshPage(); } }); } } @Override public void deleteVersion(final Long versionNumber) { synAlert.clear(); synapseClient.deleteEntityVersionById(bundle.getEntity().getId(), versionNumber, new AsyncCallback<Void>() { @Override public void onFailure(Throwable caught) { synAlert.handleException(caught); } @Override public void onSuccess(Void result) { view.showInfo("Version " + versionNumber + " of " + bundle.getEntity().getId() + " " + DisplayConstants.LABEL_DELETED); // SWC-4002: if deleting the version that we're looking at, go to the latest version if (versionNumber.equals(VersionHistoryWidget.this.versionNumber)) { gotoCurrentVersion(); } else { refreshFileHistory(); } } }); } @Override public Widget asWidget() { // The view is the real widget. return view.asWidget(); } public void refreshFileHistory() { synAlert.clear(); view.clearVersions(); currentOffset = 0; if (currentRequest != null) { currentRequest.cancel(); } onMore(); } public void gotoCurrentVersion() { Long targetVersion = null; Synapse synapse = new Synapse(bundle.getEntity().getId(), targetVersion, EntityArea.FILES, null); globalApplicationState.getPlaceChanger().goTo(synapse); } public void onMore() { currentRequest = jsClient.getEntityVersions(bundle.getEntity().getId(), currentOffset, VERSION_LIMIT, new AsyncCallback<List<VersionInfo>>() { @Override public void onSuccess(List<VersionInfo> results) { view.setMoreButtonVisible(results.size() == VERSION_LIMIT); if (currentOffset == 0) { // TODO: Use `isLatestVersion` when PLFM-6583 is complete. boolean isCurrentVersion = versionNumber == null; // we know the current version based on this, unless we're looking at a Table if (!(bundle.getEntity() instanceof Table)) { Long currentVersion = results.get(0).getVersionNumber(); isCurrentVersion = isCurrentVersion || currentVersion.equals(versionNumber); } view.setEntityBundle(bundle.getEntity(), !isCurrentVersion); view.setEditVersionInfoButtonVisible(isCurrentVersion && canEdit && !(bundle.getEntity() instanceof Table)); if (results.size() == 0) { view.showNoResults(); } } if (versionNumber == null && currentOffset == 0 && results.size() > 0) { // if not a table, then the first row represents the current version if (!(bundle.getEntity() instanceof Table)) { versionNumber = results.get(0).getVersionNumber(); } } for (VersionInfo versionInfo : results) { view.addVersion(bundle.getEntity().getId(), versionInfo, canEdit, versionInfo.getVersionNumber().equals(versionNumber)); } currentOffset += VERSION_LIMIT; } @Override public void onFailure(Throwable caught) { view.showErrorMessage(caught.getMessage()); } }); } /** * For testing purposes only * * @return */ public Long getVersionNumber() { return versionNumber; } @Override public void onEditVersionInfoClicked() { preflightController.checkUploadToEntity(bundle, new Callback() { @Override public void invoke() { final VersionableEntity vb = (VersionableEntity) bundle.getEntity(); view.showEditVersionInfo(vb.getVersionLabel(), vb.getVersionComment()); } }); } public void setVisible(boolean visible) { view.setVisible(visible); invokeChangeListeners(); } public boolean isVisible() { return view.isVisible(); } public void registerVisibilityChangeListener(Consumer<Boolean> callback) { visibilityChangeListeners.add(callback); invokeChangeListeners(); } private void invokeChangeListeners() { for (Consumer<Boolean> cb : visibilityChangeListeners) { cb.accept(this.isVisible()); } } }
Rename method from code review
src/main/java/org/sagebionetworks/web/client/widget/entity/VersionHistoryWidget.java
Rename method from code review
<ide><path>rc/main/java/org/sagebionetworks/web/client/widget/entity/VersionHistoryWidget.java <ide> <ide> public void setVisible(boolean visible) { <ide> view.setVisible(visible); <del> invokeChangeListeners(); <add> invokeVisibilityChangeListeners(); <ide> } <ide> <ide> public boolean isVisible() { <ide> <ide> public void registerVisibilityChangeListener(Consumer<Boolean> callback) { <ide> visibilityChangeListeners.add(callback); <del> invokeChangeListeners(); <del> } <del> <del> private void invokeChangeListeners() { <add> invokeVisibilityChangeListeners(); <add> } <add> <add> private void invokeVisibilityChangeListeners() { <ide> for (Consumer<Boolean> cb : visibilityChangeListeners) { <ide> cb.accept(this.isVisible()); <ide> }
Java
mit
d0a73d402b1ad8ebe5cdd82215714d8ba0443cad
0
owwlo/WebSearchEngine,owwlo/WebSearchEngine,owwlo/WebSearchEngine
package edu.nyu.cs.cs2580; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.math.BigInteger; import com.sun.net.httpserver.Headers; import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpHandler; import edu.nyu.cs.cs2580.SearchEngine.Options; import edu.nyu.cs.cs2580.utils.ClickLoggingManager; import edu.nyu.cs.cs2580.utils.ScoredDocumentComparator; import java.security.SecureRandom; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.PriorityQueue; import java.util.Set; import java.util.Vector; /** * Handles each incoming query, students do not need to change this class except * to provide more query time CGI arguments and the HTML output. N.B. This class * is not thread-safe. * * @author congyu * @author fdiaz */ public class QueryHandler implements HttpHandler { /** * CGI arguments provided by the user through the URL. This will determine * which Ranker to use and what output format to adopt. For simplicity, all * arguments are publicly accessible. */ public static Set<String> stopWords = new HashSet<String>(Arrays.asList(new String[]{ "i", "me", "my", "myself", "we", "our", "ours", "ourselves", "you", "your", "yours", "yourself", "yourselves", "he", "him", "his", "himself", "she", "her", "hers", "herself", "it", "its", "itself", "they", "them", "their", "theirs", "themselves", "what", "which", "who", "whom", "this", "that", "these", "those", "am", "is", "are", "was", "were", "be", "been", "being", "have", "has", "had", "having", "do", "does", "did", "doing", "a", "an", "the", "and", "but", "if", "or", "because", "as", "until", "while", "of", "at", "by", "for", "with", "about", "against", "between", "into", "through", "during", "before", "after", "above", "below", "to", "from", "up", "down", "in", "out", "on", "off", "over", "under", "again", "further", "then", "once", "here", "there", "when", "where", "why", "how", "all", "any", "both", "each", "few", "more", "most", "other", "some", "such", "no", "nor", "not", "only", "own", "same", "so", "than", "too", "very", "s", "t", "can", "will", "just", "don", "should", "now" })); public static class CgiArguments { // The raw user query public String _query = ""; // How many results to return private int _numResults = 10; // hw3 prf: top m terms from top k documents private int _numDocs = 10; private int _numTerms = 5; // The type of the ranker we will be using. public enum RankerType { NONE, FULLSCAN, CONJUNCTIVE, FAVORITE, COSINE, PHRASE, QL, LINEAR, } public RankerType _rankerType = RankerType.NONE; // The output format. public enum OutputFormat { TEXT, HTML, } public OutputFormat _outputFormat = OutputFormat.TEXT; public CgiArguments(String uriQuery) { String[] params = uriQuery.split("&"); for (String param : params) { String[] keyval = param.split("=", 2); if (keyval.length < 2) { continue; } String key = keyval[0].toLowerCase(); String val = keyval[1]; if (key.equals("query")) { _query = val; } else if (key.equals("num")) { try { _numResults = Integer.parseInt(val); } catch (NumberFormatException e) { // Ignored, search engine should never fail upon invalid // user input. } } else if (key.equals("ranker")) { try { _rankerType = RankerType.valueOf(val.toUpperCase()); } catch (IllegalArgumentException e) { // Ignored, search engine should never fail upon invalid // user input. } } else if (key.equals("format")) { try { _outputFormat = OutputFormat.valueOf(val.toUpperCase()); } catch (IllegalArgumentException e) { // Ignored, search engine should never fail upon invalid // user input. } } else if (key.equals("numdocs")) { // hw3 prf try { _numDocs = Integer.parseInt(val); } catch (NumberFormatException e) { // Ignored } } else if (key.equals("numterms")) { // hw3 prf try { _numTerms = Integer.parseInt(val); } catch (NumberFormatException e) { // Ignored } } } // End of iterating over params } } // hw3 prf: for priority queue of mapentry<term, occ> private class CompareByValue implements Comparator<Map.Entry<String, Double>> { @Override public int compare(Map.Entry<String, Double> lhs, Map.Entry<String, Double> rhs) { return lhs.getValue().compareTo(rhs.getValue()); } } // For accessing the underlying documents to be used by the Ranker. Since // we are not worried about thread-safety here, the Indexer class must take // care of thread-safety. private Indexer _indexer; public QueryHandler(Options options, Indexer indexer) { _indexer = indexer; } private void respondWithMsg(HttpExchange exchange, final String message) throws IOException { Headers responseHeaders = exchange.getResponseHeaders(); responseHeaders.set("Content-Type", "text/plain"); exchange.sendResponseHeaders(200, 0); // arbitrary number of bytes OutputStream responseBody = exchange.getResponseBody(); responseBody.write(message.getBytes()); responseBody.close(); } private void constructTextOutput(final Vector<ScoredDocument> docs, StringBuffer response) { for (ScoredDocument doc : docs) { response.append(response.length() > 0 ? "\n" : ""); response.append(doc.asTextResult()); } response.append(response.length() > 0 ? "\n" : ""); } // hw3 prf: query representation output private void constructTermOutput(final Map.Entry<String, Double>[] terms, StringBuffer response) { for (int i = terms.length - 1; i >= 0; ++i) { response.append(response.length() > 0 ? "\n" : ""); response.append(terms[i].getKey() + "\t" + terms[i].getValue()); } response.append(response.length() > 0 ? "\n" : ""); } public static Map<String, String> getQueryMap(String query) { String[] params = query.split("&"); Map<String, String> map = new HashMap<String, String>(); for (String param : params) { String name = param.split("=")[0]; String value = param.split("=")[1]; map.put(name, value); } return map; } public void handle(HttpExchange exchange) throws IOException { String requestMethod = exchange.getRequestMethod(); if (!requestMethod.equalsIgnoreCase("GET")) { // GET requests only. return; } // Print the user request header. Headers requestHeaders = exchange.getRequestHeaders(); System.out.print("Incoming request: "); for (String key : requestHeaders.keySet()) { System.out.print(key + ":" + requestHeaders.get(key) + "; "); } String uriQuery = exchange.getRequestURI().getQuery(); String uriPath = exchange.getRequestURI().getPath(); if (uriPath == null || uriQuery == null) { respondWithMsg(exchange, "Something wrong with the URI!"); } System.out.println("Query: " + uriQuery); if (uriPath.equals("/click_loging")) { Map<String, String> query_map = getQueryMap(uriQuery); Set<String> keys = query_map.keySet(); if (keys.contains("did") && keys.contains("query") && keys.contains("ranker") && keys.contains("action")) { ClickLoggingManager clm = ClickLoggingManager.getInstance(); // Ensure the session will be the same when open multiple // threads synchronized (clm) { String session = (String) exchange.getAttribute("session"); if (session == null) { SecureRandom random = new SecureRandom(); session = new BigInteger(130, random).toString(32); exchange.setAttribute("session", session); } clm.writeToLog(session, query_map.get("query"), query_map.get("did"), query_map.get("action")); } respondWithMsg(exchange, "Success!"); } } else if (uriPath.equals("/search")) { // Process the CGI arguments. CgiArguments cgiArgs = new CgiArguments(uriQuery); if (cgiArgs._query.isEmpty()) { respondWithMsg(exchange, "No query is given!"); } // Create the ranker. Ranker ranker = Ranker.Factory.getRankerByArguments(cgiArgs, SearchEngine.OPTIONS, _indexer); if (ranker == null) { respondWithMsg(exchange, "Ranker " + cgiArgs._rankerType.toString() + " is not valid!"); } // Processing the query. Query processedQuery = new QueryPhrase(cgiArgs._query); processedQuery.processQuery(); // Ranking. Vector<ScoredDocument> scoredDocs = ranker.runQuery(processedQuery, cgiArgs._numResults); StringBuffer response = new StringBuffer(); switch (cgiArgs._outputFormat) { case TEXT: constructTextOutput(scoredDocs, response); break; case HTML: // Sort result of ScoredDocuments Collections.sort(scoredDocs, new ScoredDocumentComparator()); Map<String, String> query_map = getQueryMap(uriQuery); // Read initial html file File htmlFile = new File("./public/index.html"); if (!htmlFile.exists()) { htmlFile = new File("../public/index.html"); } InputStream fileIn = new FileInputStream(htmlFile); byte[] data = new byte[(int) htmlFile.length()]; fileIn.read(data); fileIn.close(); String htmlStr = new String(data, "UTF-8"); // Write parameters into it htmlStr = htmlStr.replace("{{query}}", query_map.get("query")); htmlStr = htmlStr .replace("{{ranker}}", query_map.get("ranker")); htmlStr = htmlStr.replace("{{count}}", String.valueOf(scoredDocs.size())); StringBuilder sb = new StringBuilder(); for (int i = 0; i < 10 && i < scoredDocs.size(); i++) { ScoredDocument sd = scoredDocs.get(i); sb.append(sd.asHtmlResult()); } htmlStr = htmlStr.replace("{{result}}", sb.toString()); Headers responseHeaders = exchange.getResponseHeaders(); exchange.sendResponseHeaders(200, 0); OutputStream responseBody = exchange.getResponseBody(); // Make browser this it is a HTML responseHeaders.set("Content-Type", "text/html"); responseBody.write(htmlStr.getBytes()); responseBody.close(); return; default: // nothing } respondWithMsg(exchange, response.toString()); System.out.println("Finished query: " + cgiArgs._query); } else if (uriPath.equals("/prf")) { // query representation // Process the CGI arguments. CgiArguments cgiArgs = new CgiArguments(uriQuery); if (cgiArgs._query.isEmpty()) { respondWithMsg(exchange, "No query is given!"); } // Create the ranker. Ranker ranker = Ranker.Factory.getRankerByArguments(cgiArgs, SearchEngine.OPTIONS, _indexer); if (ranker == null) { respondWithMsg(exchange, "Ranker " + cgiArgs._rankerType.toString() + " is not valid!"); } // Processing the query. Query processedQuery = new QueryPhrase(cgiArgs._query); processedQuery.processQuery(); // Ranking. Vector<ScoredDocument> scoredDocs = ranker.runQuery(processedQuery, cgiArgs._numDocs); StringBuffer response = new StringBuffer(); // get all the doc, generate a map: term->occ(prob) in all docs Map<String, Double> term_map = new HashMap<String, Double>(); int all_occ = 0; // all term occ in all k docs // aggregate over k documents for (ScoredDocument sdoc : scoredDocs) { Map<String, Integer> termInDoc = _indexer.documentTermFrequencyMap(sdoc._doc._docid); //new HashMap<String, Integer>();// _indexer.getTerms(sdoc._doc._docid); all_occ += termInDoc.size(); for (Map.Entry<String, Integer> entry : termInDoc.entrySet()) { String term = entry.getKey(); Integer occ = entry.getValue(); if (term_map.containsKey(term)) { term_map.put(term, term_map.get(term) + occ); } else { term_map.put(term, (double) occ); } } } if (all_occ == 0) { System.out.println("all_occ == 0 ???"); } // get the top m terms in ascending order PriorityQueue<Map.Entry<String, Double>> topTerms = new PriorityQueue<Map.Entry<String, Double>>( cgiArgs._numTerms, new CompareByValue()); for (Map.Entry<String, Double> entry : term_map.entrySet()) { if (!stopWords.contains(entry)){ topTerms.add(entry); if (topTerms.size() > cgiArgs._numTerms) { topTerms.poll(); } } } Map.Entry<String, Double>[] top_term = (Map.Entry<String, Double>[]) topTerms .toArray(); Arrays.sort(top_term); // divide by denominator double sum = 0.0; for (Map.Entry<String, Double> e : top_term) { double prob = e.getValue() / all_occ; e.setValue(prob); sum += prob; } if (sum == 0.0) { System.out.println("sum == 0 ???"); } else { System.out.println("Sum is " + sum); } // normalize for (Map.Entry<String, Double> e : top_term) { e.setValue(e.getValue() / sum); } constructTermOutput(top_term, response); respondWithMsg(exchange, response.toString()); } else { respondWithMsg(exchange, "No valid query is given."); } } }
src/edu/nyu/cs/cs2580/QueryHandler.java
package edu.nyu.cs.cs2580; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.math.BigInteger; import com.sun.net.httpserver.Headers; import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpHandler; import edu.nyu.cs.cs2580.SearchEngine.Options; import edu.nyu.cs.cs2580.utils.ClickLoggingManager; import edu.nyu.cs.cs2580.utils.ScoredDocumentComparator; import java.security.SecureRandom; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.Map; import java.util.PriorityQueue; import java.util.Set; import java.util.Vector; /** * Handles each incoming query, students do not need to change this class except * to provide more query time CGI arguments and the HTML output. N.B. This class * is not thread-safe. * * @author congyu * @author fdiaz */ public class QueryHandler implements HttpHandler { /** * CGI arguments provided by the user through the URL. This will determine * which Ranker to use and what output format to adopt. For simplicity, all * arguments are publicly accessible. */ public static class CgiArguments { // The raw user query public String _query = ""; // How many results to return private int _numResults = 10; // hw3 prf: top m terms from top k documents private int _numDocs = 10; private int _numTerms = 5; // The type of the ranker we will be using. public enum RankerType { NONE, FULLSCAN, CONJUNCTIVE, FAVORITE, COSINE, PHRASE, QL, LINEAR, } public RankerType _rankerType = RankerType.NONE; // The output format. public enum OutputFormat { TEXT, HTML, } public OutputFormat _outputFormat = OutputFormat.TEXT; public CgiArguments(String uriQuery) { String[] params = uriQuery.split("&"); for (String param : params) { String[] keyval = param.split("=", 2); if (keyval.length < 2) { continue; } String key = keyval[0].toLowerCase(); String val = keyval[1]; if (key.equals("query")) { _query = val; } else if (key.equals("num")) { try { _numResults = Integer.parseInt(val); } catch (NumberFormatException e) { // Ignored, search engine should never fail upon invalid // user input. } } else if (key.equals("ranker")) { try { _rankerType = RankerType.valueOf(val.toUpperCase()); } catch (IllegalArgumentException e) { // Ignored, search engine should never fail upon invalid // user input. } } else if (key.equals("format")) { try { _outputFormat = OutputFormat.valueOf(val.toUpperCase()); } catch (IllegalArgumentException e) { // Ignored, search engine should never fail upon invalid // user input. } } else if (key.equals("numdocs")) { // hw3 prf try { _numDocs = Integer.parseInt(val); } catch (NumberFormatException e) { // Ignored } } else if (key.equals("numterms")) { // hw3 prf try { _numTerms = Integer.parseInt(val); } catch (NumberFormatException e) { // Ignored } } } // End of iterating over params } } // hw3 prf: for priority queue of mapentry<term, occ> private class CompareByValue implements Comparator<Map.Entry<String, Double>> { @Override public int compare(Map.Entry<String, Double> lhs, Map.Entry<String, Double> rhs) { return lhs.getValue().compareTo(rhs.getValue()); } } // For accessing the underlying documents to be used by the Ranker. Since // we are not worried about thread-safety here, the Indexer class must take // care of thread-safety. private Indexer _indexer; public QueryHandler(Options options, Indexer indexer) { _indexer = indexer; } private void respondWithMsg(HttpExchange exchange, final String message) throws IOException { Headers responseHeaders = exchange.getResponseHeaders(); responseHeaders.set("Content-Type", "text/plain"); exchange.sendResponseHeaders(200, 0); // arbitrary number of bytes OutputStream responseBody = exchange.getResponseBody(); responseBody.write(message.getBytes()); responseBody.close(); } private void constructTextOutput(final Vector<ScoredDocument> docs, StringBuffer response) { for (ScoredDocument doc : docs) { response.append(response.length() > 0 ? "\n" : ""); response.append(doc.asTextResult()); } response.append(response.length() > 0 ? "\n" : ""); } // hw3 prf: query representation output private void constructTermOutput(final Map.Entry<String, Double>[] terms, StringBuffer response) { for (int i = terms.length - 1; i >= 0; ++i) { response.append(response.length() > 0 ? "\n" : ""); response.append(terms[i].getKey() + "\t" + terms[i].getValue()); } response.append(response.length() > 0 ? "\n" : ""); } public static Map<String, String> getQueryMap(String query) { String[] params = query.split("&"); Map<String, String> map = new HashMap<String, String>(); for (String param : params) { String name = param.split("=")[0]; String value = param.split("=")[1]; map.put(name, value); } return map; } public void handle(HttpExchange exchange) throws IOException { String requestMethod = exchange.getRequestMethod(); if (!requestMethod.equalsIgnoreCase("GET")) { // GET requests only. return; } // Print the user request header. Headers requestHeaders = exchange.getRequestHeaders(); System.out.print("Incoming request: "); for (String key : requestHeaders.keySet()) { System.out.print(key + ":" + requestHeaders.get(key) + "; "); } String uriQuery = exchange.getRequestURI().getQuery(); String uriPath = exchange.getRequestURI().getPath(); if (uriPath == null || uriQuery == null) { respondWithMsg(exchange, "Something wrong with the URI!"); } System.out.println("Query: " + uriQuery); if (uriPath.equals("/click_loging")) { Map<String, String> query_map = getQueryMap(uriQuery); Set<String> keys = query_map.keySet(); if (keys.contains("did") && keys.contains("query") && keys.contains("ranker") && keys.contains("action")) { ClickLoggingManager clm = ClickLoggingManager.getInstance(); // Ensure the session will be the same when open multiple // threads synchronized (clm) { String session = (String) exchange.getAttribute("session"); if (session == null) { SecureRandom random = new SecureRandom(); session = new BigInteger(130, random).toString(32); exchange.setAttribute("session", session); } clm.writeToLog(session, query_map.get("query"), query_map.get("did"), query_map.get("action")); } respondWithMsg(exchange, "Success!"); } } else if (uriPath.equals("/search")) { // Process the CGI arguments. CgiArguments cgiArgs = new CgiArguments(uriQuery); if (cgiArgs._query.isEmpty()) { respondWithMsg(exchange, "No query is given!"); } // Create the ranker. Ranker ranker = Ranker.Factory.getRankerByArguments(cgiArgs, SearchEngine.OPTIONS, _indexer); if (ranker == null) { respondWithMsg(exchange, "Ranker " + cgiArgs._rankerType.toString() + " is not valid!"); } // Processing the query. Query processedQuery = new QueryPhrase(cgiArgs._query); processedQuery.processQuery(); // Ranking. Vector<ScoredDocument> scoredDocs = ranker.runQuery(processedQuery, cgiArgs._numResults); StringBuffer response = new StringBuffer(); switch (cgiArgs._outputFormat) { case TEXT: constructTextOutput(scoredDocs, response); break; case HTML: // Sort result of ScoredDocuments Collections.sort(scoredDocs, new ScoredDocumentComparator()); Map<String, String> query_map = getQueryMap(uriQuery); // Read initial html file File htmlFile = new File("./public/index.html"); if (!htmlFile.exists()) { htmlFile = new File("../public/index.html"); } InputStream fileIn = new FileInputStream(htmlFile); byte[] data = new byte[(int) htmlFile.length()]; fileIn.read(data); fileIn.close(); String htmlStr = new String(data, "UTF-8"); // Write parameters into it htmlStr = htmlStr.replace("{{query}}", query_map.get("query")); htmlStr = htmlStr .replace("{{ranker}}", query_map.get("ranker")); htmlStr = htmlStr.replace("{{count}}", String.valueOf(scoredDocs.size())); StringBuilder sb = new StringBuilder(); for (int i = 0; i < 10 && i < scoredDocs.size(); i++) { ScoredDocument sd = scoredDocs.get(i); sb.append(sd.asHtmlResult()); } htmlStr = htmlStr.replace("{{result}}", sb.toString()); Headers responseHeaders = exchange.getResponseHeaders(); exchange.sendResponseHeaders(200, 0); OutputStream responseBody = exchange.getResponseBody(); // Make browser this it is a HTML responseHeaders.set("Content-Type", "text/html"); responseBody.write(htmlStr.getBytes()); responseBody.close(); return; default: // nothing } respondWithMsg(exchange, response.toString()); System.out.println("Finished query: " + cgiArgs._query); } else if (uriPath.equals("/prf")) { // query representation // Process the CGI arguments. CgiArguments cgiArgs = new CgiArguments(uriQuery); if (cgiArgs._query.isEmpty()) { respondWithMsg(exchange, "No query is given!"); } // Create the ranker. Ranker ranker = Ranker.Factory.getRankerByArguments(cgiArgs, SearchEngine.OPTIONS, _indexer); if (ranker == null) { respondWithMsg(exchange, "Ranker " + cgiArgs._rankerType.toString() + " is not valid!"); } // Processing the query. Query processedQuery = new QueryPhrase(cgiArgs._query); processedQuery.processQuery(); // Ranking. Vector<ScoredDocument> scoredDocs = ranker.runQuery(processedQuery, cgiArgs._numDocs); StringBuffer response = new StringBuffer(); // get all the doc, generate a map: term->occ(prob) in all docs Map<String, Double> term_map = new HashMap<String, Double>(); int all_occ = 0; // all term occ in all k docs // aggregate over k documents for (ScoredDocument sdoc : scoredDocs) { Map<String, Integer> termInDoc = _indexer.documentTermFrequencyMap(sdoc._doc._docid); //new HashMap<String, Integer>();// _indexer.getTerms(sdoc._doc._docid); all_occ += termInDoc.size(); for (Map.Entry<String, Integer> entry : termInDoc.entrySet()) { String term = entry.getKey(); Integer occ = entry.getValue(); if (term_map.containsKey(term)) { term_map.put(term, term_map.get(term) + occ); } else { term_map.put(term, (double) occ); } } } if (all_occ == 0) { System.out.println("all_occ == 0 ???"); } // get the top m terms in ascending order PriorityQueue<Map.Entry<String, Double>> topTerms = new PriorityQueue<Map.Entry<String, Double>>( cgiArgs._numTerms, new CompareByValue()); for (Map.Entry<String, Double> entry : term_map.entrySet()) { topTerms.add(entry); if (topTerms.size() > cgiArgs._numTerms) { topTerms.poll(); } } Map.Entry<String, Double>[] top_term = (Map.Entry<String, Double>[]) topTerms .toArray(); Arrays.sort(top_term); // divide by denominator double sum = 0.0; for (Map.Entry<String, Double> e : top_term) { double prob = e.getValue() / all_occ; e.setValue(prob); sum += prob; } if (sum == 0.0) { System.out.println("sum == 0 ???"); } else { System.out.println("Sum is " + sum); } // normalize for (Map.Entry<String, Double> e : top_term) { e.setValue(e.getValue() / sum); } constructTermOutput(top_term, response); respondWithMsg(exchange, response.toString()); } else { respondWithMsg(exchange, "No valid query is given."); } } }
stopwords
src/edu/nyu/cs/cs2580/QueryHandler.java
stopwords
<ide><path>rc/edu/nyu/cs/cs2580/QueryHandler.java <ide> import java.util.Collections; <ide> import java.util.Comparator; <ide> import java.util.HashMap; <add>import java.util.HashSet; <ide> import java.util.Map; <ide> import java.util.PriorityQueue; <ide> import java.util.Set; <ide> * which Ranker to use and what output format to adopt. For simplicity, all <ide> * arguments are publicly accessible. <ide> */ <add> public static Set<String> stopWords = new HashSet<String>(Arrays.asList(new String[]{ <add> "i", "me", "my", "myself", "we", "our", "ours", "ourselves", "you", "your", "yours", <add> "yourself", "yourselves", "he", "him", "his", "himself", "she", "her", "hers", <add> "herself", "it", "its", "itself", "they", "them", "their", "theirs", "themselves", <add> "what", "which", "who", "whom", "this", "that", "these", "those", "am", "is", "are", <add> "was", "were", "be", "been", "being", "have", "has", "had", "having", "do", "does", <add> "did", "doing", "a", "an", "the", "and", "but", "if", "or", "because", "as", "until", <add> "while", "of", "at", "by", "for", "with", "about", "against", "between", "into", <add> "through", "during", "before", "after", "above", "below", "to", "from", "up", "down", <add> "in", "out", "on", "off", "over", "under", "again", "further", "then", "once", "here", <add> "there", "when", "where", "why", "how", "all", "any", "both", "each", "few", "more", <add> "most", "other", "some", "such", "no", "nor", "not", "only", "own", "same", "so", <add> "than", "too", "very", "s", "t", "can", "will", "just", "don", "should", "now" <add> })); <ide> public static class CgiArguments { <ide> // The raw user query <ide> public String _query = ""; <ide> PriorityQueue<Map.Entry<String, Double>> topTerms = new PriorityQueue<Map.Entry<String, Double>>( <ide> cgiArgs._numTerms, new CompareByValue()); <ide> for (Map.Entry<String, Double> entry : term_map.entrySet()) { <del> topTerms.add(entry); <del> if (topTerms.size() > cgiArgs._numTerms) { <del> topTerms.poll(); <add> if (!stopWords.contains(entry)){ <add> topTerms.add(entry); <add> if (topTerms.size() > cgiArgs._numTerms) { <add> topTerms.poll(); <add> } <ide> } <ide> } <ide> Map.Entry<String, Double>[] top_term = (Map.Entry<String, Double>[]) topTerms
Java
apache-2.0
ef7c381d1086fd22c8ac15b2520e544f3c8cef38
0
intrigus/VisEditor,piotr-j/VisEditor,StQuote/VisEditor,code-disaster/VisEditor,piotr-j/VisEditor,kotcrab/vis-editor,kotcrab/vis-editor,billy1380/VisEditor,kotcrab/VisEditor
/******************************************************************************* * Copyright 2014 Pawel Pastuszak * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package pl.kotcrab.vis.ui.test; import pl.kotcrab.vis.ui.TableUtils; import pl.kotcrab.vis.ui.components.VisLabel; import pl.kotcrab.vis.ui.components.VisTree; import pl.kotcrab.vis.ui.components.VisWindow; import com.badlogic.gdx.scenes.scene2d.Stage; import com.badlogic.gdx.scenes.scene2d.ui.Tree.Node; public class TestTree extends VisWindow { public TestTree (Stage parent) { super(parent, "test tree"); TableUtils.setSpaceDefaults(this); columnDefaults(0).left(); VisTree tree = new VisTree(); Node item1 = new Node(new VisLabel("item 1")); Node item2 = new Node(new VisLabel("item 2")); Node item3 = new Node(new VisLabel("item 3")); item1.add(new Node(new VisLabel("item 1.1"))); item1.add(new Node(new VisLabel("item 1.2"))); item1.add(new Node(new VisLabel("item 1.3"))); item2.add(new Node(new VisLabel("item 2.1"))); item2.add(new Node(new VisLabel("item 2.2"))); item2.add(new Node(new VisLabel("item 2.3"))); item3.add(new Node(new VisLabel("item 3.1"))); item3.add(new Node(new VisLabel("item 3.2"))); item3.add(new Node(new VisLabel("item 3.3"))); item1.setExpanded(true); tree.add(item1); tree.add(item2); tree.add(item3); add(tree).expand().fill(); setSize(150, 380); setPositionToCenter(); setPosition(getX() + 380, getY()); } }
UI/test/pl/kotcrab/vis/ui/test/TestTree.java
/******************************************************************************* * Copyright 2014 Pawel Pastuszak * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package pl.kotcrab.vis.ui.test; import pl.kotcrab.vis.ui.TableUtils; import pl.kotcrab.vis.ui.components.VisLabel; import pl.kotcrab.vis.ui.components.VisTree; import pl.kotcrab.vis.ui.components.VisWindow; import com.badlogic.gdx.scenes.scene2d.Stage; import com.badlogic.gdx.scenes.scene2d.ui.Tree.Node; public class TestTree extends VisWindow { public TestTree (Stage parent) { super(parent, "test tree"); TableUtils.setSpaceDefaults(this); columnDefaults(0).left(); VisTree tree = new VisTree(); Node item1 = new Node(new VisLabel("item 1")); Node item2 = new Node(new VisLabel("item 2")); Node item3 = new Node(new VisLabel("item 3")); item1.add(new Node(new VisLabel("item 1.1"))); item1.add(new Node(new VisLabel("item 1.2"))); item1.add(new Node(new VisLabel("item 1.3"))); item2.add(new Node(new VisLabel("item 2.1"))); item2.add(new Node(new VisLabel("item 2.2"))); item2.add(new Node(new VisLabel("item 2.3"))); item3.add(new Node(new VisLabel("item 3.1"))); item3.add(new Node(new VisLabel("item 3.2"))); item3.add(new Node(new VisLabel("item 3.3"))); tree.add(item1); tree.add(item2); tree.add(item3); add(tree).expand().fill(); setSize(150, 380); setPositionToCenter(); setPosition(getX() + 380, getY()); } }
Expand tree in demo
UI/test/pl/kotcrab/vis/ui/test/TestTree.java
Expand tree in demo
<ide><path>I/test/pl/kotcrab/vis/ui/test/TestTree.java <ide> item3.add(new Node(new VisLabel("item 3.2"))); <ide> item3.add(new Node(new VisLabel("item 3.3"))); <ide> <add> item1.setExpanded(true); <add> <ide> tree.add(item1); <ide> tree.add(item2); <ide> tree.add(item3);
Java
apache-2.0
00efa3fc995aff0a4f5e67009bf27117ea134f74
0
ServiceComb/java-chassis,ServiceComb/java-chassis
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.servicecomb.config; import java.io.IOException; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; import org.apache.commons.configuration.AbstractConfiguration; import org.apache.servicecomb.config.spi.ConfigCenterConfigurationSource; import org.apache.servicecomb.foundation.common.utils.SPIServiceUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.config.PropertyPlaceholderConfigurer; import org.springframework.context.EnvironmentAware; import org.springframework.core.Ordered; import org.springframework.core.env.CompositePropertySource; import org.springframework.core.env.ConfigurableEnvironment; import org.springframework.core.env.EnumerablePropertySource; import org.springframework.core.env.Environment; import org.springframework.core.env.MapPropertySource; import org.springframework.core.env.PropertySource; import org.springframework.util.StringUtils; import com.google.common.collect.Lists; import com.netflix.config.ConcurrentCompositeConfiguration; import com.netflix.config.ConfigurationManager; import com.netflix.config.DynamicPropertyFactory; /** * Adapt spring PropertySource and Archaius Configuration * spring vs archaius * (add) | dynamic(configcenter) * system property | system property * environment | environment * *properties/*.yml | (add) * (add) | microservice.yaml * * add dynamic configuration, microserive.yaml to spring, add *properties/*.yml to archaius * * NOTICE: we are not duplicate spring system property and environment property source, this will cause some problem * related to precedence of a KEY-VAlUE. That is cse.test in dynamic config may not override servicecomb.test in yml. * Users need to use the same key as what is in config file to override. */ public class ConfigurationSpringInitializer extends PropertyPlaceholderConfigurer implements EnvironmentAware { private static final Logger LOGGER = LoggerFactory.getLogger(ConfigurationSpringInitializer.class); public static final String EXTRA_CONFIG_SOURCE_PREFIX = "extraConfig-"; public ConfigurationSpringInitializer() { setOrder(Ordered.LOWEST_PRECEDENCE / 2); setIgnoreUnresolvablePlaceholders(true); } /** * Get configurations from Spring, merge them into the configurations of ServiceComb. * @param environment From which to get the extra config. */ @Override public void setEnvironment(Environment environment) { String environmentName = generateNameForEnvironment(environment); LOGGER.info("Environment received, will get configurations from [{}].", environmentName); Map<String, Object> extraConfig = getAllProperties(environment); ConfigUtil.addExtraConfig(EXTRA_CONFIG_SOURCE_PREFIX + environmentName, extraConfig); ConfigUtil.installDynamicConfig(); setUpSpringPropertySource(environment); } private void setUpSpringPropertySource(Environment environment) { if (environment instanceof ConfigurableEnvironment) { ConfigurableEnvironment ce = (ConfigurableEnvironment) environment; ConfigCenterConfigurationSource configCenterConfigurationSource = SPIServiceUtils.getTargetService(ConfigCenterConfigurationSource.class); if (configCenterConfigurationSource != null) { try { ce.getPropertySources() .addFirst(new MapPropertySource("dynamic-source", configCenterConfigurationSource.getCurrentData())); } catch (Exception e) { LOGGER.warn("set up spring property source failed. msg: {}", e.getMessage()); } } ConcurrentCompositeConfiguration concurrentCompositeConfiguration = ConfigUtil.createLocalConfig(); ce.getPropertySources().addLast( new EnumerablePropertySource<ConcurrentCompositeConfiguration>("microservice.yaml", concurrentCompositeConfiguration) { private String[] propertyNames = null; @Override public String[] getPropertyNames() { if (propertyNames == null) { List<String> keyList = Lists.newArrayList(this.source.getKeys()); propertyNames = keyList.toArray(new String[keyList.size()]); } return propertyNames; } @Override public Object getProperty(String s) { return this.source.getProperty(s); } }); } } @Override protected Properties mergeProperties() throws IOException { Properties properties = super.mergeProperties(); AbstractConfiguration config = ConfigurationManager.getConfigInstance(); Iterator<String> iter = config.getKeys(); while (iter.hasNext()) { String key = iter.next(); Object value = config.getProperty(key); properties.put(key, value); } return properties; } @Override protected String resolvePlaceholder(String placeholder, Properties props) { String propertyValue = super.resolvePlaceholder(placeholder, props); if (propertyValue == null) { return DynamicPropertyFactory.getInstance().getStringProperty(placeholder, null).get(); } return propertyValue; } /** * Try to get a name for identifying the environment. * @param environment the target that the name is generated for. * @return The generated name for the environment. */ private String generateNameForEnvironment(Environment environment) { String environmentName = environment.getProperty("spring.config.name"); if (!StringUtils.isEmpty(environmentName)) { return environmentName; } environmentName = environment.getProperty("spring.application.name"); if (!StringUtils.isEmpty(environmentName)) { return environmentName; } return environment.getClass().getName() + "@" + environment.hashCode(); } /** * Traversal all {@link PropertySource} of {@link ConfigurableEnvironment}, and try to get all properties. */ private Map<String, Object> getAllProperties(Environment environment) { Map<String, Object> configFromSpringBoot = new HashMap<>(); if (!(environment instanceof ConfigurableEnvironment)) { return configFromSpringBoot; } ConfigurableEnvironment configurableEnvironment = (ConfigurableEnvironment) environment; for (PropertySource<?> propertySource : configurableEnvironment.getPropertySources()) { getProperties(configurableEnvironment, propertySource, configFromSpringBoot); } return configFromSpringBoot; } /** * Get property names from {@link EnumerablePropertySource}, and get property value from {@link ConfigurableEnvironment#getProperty(String)} */ private void getProperties(ConfigurableEnvironment environment, PropertySource<?> propertySource, Map<String, Object> configFromSpringBoot) { if (propertySource instanceof CompositePropertySource) { // recursively get EnumerablePropertySource CompositePropertySource compositePropertySource = (CompositePropertySource) propertySource; compositePropertySource.getPropertySources().forEach(ps -> getProperties(environment, ps, configFromSpringBoot)); return; } if (propertySource instanceof EnumerablePropertySource) { EnumerablePropertySource<?> enumerablePropertySource = (EnumerablePropertySource<?>) propertySource; for (String propertyName : enumerablePropertySource.getPropertyNames()) { try { configFromSpringBoot.put(propertyName, environment.getProperty(propertyName, Object.class)); } catch (Exception e) { if (!getIfIgnoreEnvironment()) { throw new RuntimeException("set up spring property source failed.", e); } else { LOGGER.warn("set up spring property source failed.", e); } } } return; } LOGGER.debug("a none EnumerablePropertySource is ignored, propertySourceName = [{}]", propertySource.getName()); } private boolean getIfIgnoreEnvironment() { return (Boolean) ConfigUtil.createLocalConfig().getProperty("servicecomb.config.ignoreResolveFailure"); } }
foundations/foundation-config/src/main/java/org/apache/servicecomb/config/ConfigurationSpringInitializer.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.servicecomb.config; import java.io.IOException; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; import org.apache.commons.configuration.AbstractConfiguration; import org.apache.servicecomb.config.spi.ConfigCenterConfigurationSource; import org.apache.servicecomb.foundation.common.utils.SPIServiceUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.config.PropertyPlaceholderConfigurer; import org.springframework.context.EnvironmentAware; import org.springframework.core.Ordered; import org.springframework.core.env.CompositePropertySource; import org.springframework.core.env.ConfigurableEnvironment; import org.springframework.core.env.EnumerablePropertySource; import org.springframework.core.env.Environment; import org.springframework.core.env.MapPropertySource; import org.springframework.core.env.PropertySource; import org.springframework.util.StringUtils; import com.google.common.collect.Lists; import com.netflix.config.ConcurrentCompositeConfiguration; import com.netflix.config.ConfigurationManager; import com.netflix.config.DynamicPropertyFactory; /** * Adapt spring PropertySource and Archaius Configuration * spring vs archaius * (add) | dynamic(configcenter) * system property | system property * environment | environment * *properties/*.yml | (add) * (add) | microservice.yaml * * add dynamic configuration, microserive.yaml to spring, add *properties/*.yml to archaius * * NOTICE: we are not duplicate spring system property and environment property source, this will cause some problem * related to precedence of a KEY-VAlUE. That is cse.test in dynamic config may not override servicecomb.test in yml. * Users need to use the same key as what is in config file to override. */ public class ConfigurationSpringInitializer extends PropertyPlaceholderConfigurer implements EnvironmentAware { private static final Logger LOGGER = LoggerFactory.getLogger(ConfigurationSpringInitializer.class); public static final String EXTRA_CONFIG_SOURCE_PREFIX = "extraConfig-"; public ConfigurationSpringInitializer() { setOrder(Ordered.LOWEST_PRECEDENCE / 2); setIgnoreUnresolvablePlaceholders(true); } /** * Get configurations from Spring, merge them into the configurations of ServiceComb. * @param environment From which to get the extra config. */ @Override public void setEnvironment(Environment environment) { String environmentName = generateNameForEnvironment(environment); LOGGER.info("Environment received, will get configurations from [{}].", environmentName); Map<String, Object> extraConfig = getAllProperties(environment); ConfigUtil.addExtraConfig(EXTRA_CONFIG_SOURCE_PREFIX + environmentName, extraConfig); ConfigUtil.installDynamicConfig(); setUpSpringPropertySource(environment); } private void setUpSpringPropertySource(Environment environment) { if (environment instanceof ConfigurableEnvironment) { ConfigurableEnvironment ce = (ConfigurableEnvironment) environment; ConfigCenterConfigurationSource configCenterConfigurationSource = SPIServiceUtils.getTargetService(ConfigCenterConfigurationSource.class); if (configCenterConfigurationSource != null) { try { ce.getPropertySources() .addFirst(new MapPropertySource("dynamic-source", configCenterConfigurationSource.getCurrentData())); } catch (Exception e) { LOGGER.warn("set up spring property source failed. msg: {}", e.getMessage()); } } ConcurrentCompositeConfiguration concurrentCompositeConfiguration = ConfigUtil.createLocalConfig(); ce.getPropertySources().addLast( new EnumerablePropertySource<ConcurrentCompositeConfiguration>("microservice.yaml", concurrentCompositeConfiguration) { private String[] propertyNames = null; @Override public String[] getPropertyNames() { if (propertyNames == null) { List<String> keyList = Lists.newArrayList(this.source.getKeys()); propertyNames = keyList.toArray(new String[keyList.size()]); } return propertyNames; } @Override public Object getProperty(String s) { return this.source.getProperty(s); } }); } } @Override protected Properties mergeProperties() throws IOException { Properties properties = super.mergeProperties(); AbstractConfiguration config = ConfigurationManager.getConfigInstance(); Iterator<String> iter = config.getKeys(); while (iter.hasNext()) { String key = iter.next(); Object value = config.getProperty(key); properties.put(key, value); } return properties; } @Override protected String resolvePlaceholder(String placeholder, Properties props) { String propertyValue = super.resolvePlaceholder(placeholder, props); if (propertyValue == null) { return DynamicPropertyFactory.getInstance().getStringProperty(placeholder, null).get(); } return propertyValue; } /** * Try to get a name for identifying the environment. * @param environment the target that the name is generated for. * @return The generated name for the environment. */ private String generateNameForEnvironment(Environment environment) { String environmentName = environment.getProperty("spring.config.name"); if (!StringUtils.isEmpty(environmentName)) { return environmentName; } environmentName = environment.getProperty("spring.application.name"); if (!StringUtils.isEmpty(environmentName)) { return environmentName; } return environment.getClass().getName() + "@" + environment.hashCode(); } /** * Traversal all {@link PropertySource} of {@link ConfigurableEnvironment}, and try to get all properties. */ private Map<String, Object> getAllProperties(Environment environment) { Map<String, Object> configFromSpringBoot = new HashMap<>(); if (!(environment instanceof ConfigurableEnvironment)) { return configFromSpringBoot; } ConfigurableEnvironment configurableEnvironment = (ConfigurableEnvironment) environment; for (PropertySource<?> propertySource : configurableEnvironment.getPropertySources()) { getProperties(configurableEnvironment, propertySource, configFromSpringBoot); } return configFromSpringBoot; } /** * Get property names from {@link EnumerablePropertySource}, and get property value from {@link ConfigurableEnvironment#getProperty(String)} */ private void getProperties(ConfigurableEnvironment environment, PropertySource<?> propertySource, Map<String, Object> configFromSpringBoot) { if (propertySource instanceof CompositePropertySource) { // recursively get EnumerablePropertySource CompositePropertySource compositePropertySource = (CompositePropertySource) propertySource; compositePropertySource.getPropertySources().forEach(ps -> getProperties(environment, ps, configFromSpringBoot)); return; } if (propertySource instanceof EnumerablePropertySource) { EnumerablePropertySource<?> enumerablePropertySource = (EnumerablePropertySource<?>) propertySource; for (String propertyName : enumerablePropertySource.getPropertyNames()) { configFromSpringBoot.put(propertyName, environment.getProperty(propertyName, Object.class)); } return; } LOGGER.debug("a none EnumerablePropertySource is ignored, propertySourceName = [{}]", propertySource.getName()); } }
[SCB-1297]Environment parsing failed to fix
foundations/foundation-config/src/main/java/org/apache/servicecomb/config/ConfigurationSpringInitializer.java
[SCB-1297]Environment parsing failed to fix
<ide><path>oundations/foundation-config/src/main/java/org/apache/servicecomb/config/ConfigurationSpringInitializer.java <ide> if (propertySource instanceof EnumerablePropertySource) { <ide> EnumerablePropertySource<?> enumerablePropertySource = (EnumerablePropertySource<?>) propertySource; <ide> for (String propertyName : enumerablePropertySource.getPropertyNames()) { <del> configFromSpringBoot.put(propertyName, environment.getProperty(propertyName, Object.class)); <add> try { <add> configFromSpringBoot.put(propertyName, environment.getProperty(propertyName, Object.class)); <add> } catch (Exception e) { <add> if (!getIfIgnoreEnvironment()) { <add> throw new RuntimeException("set up spring property source failed.", e); <add> } else { <add> LOGGER.warn("set up spring property source failed.", e); <add> } <add> } <ide> } <ide> return; <ide> } <ide> <ide> LOGGER.debug("a none EnumerablePropertySource is ignored, propertySourceName = [{}]", propertySource.getName()); <ide> } <add> <add> private boolean getIfIgnoreEnvironment() { <add> return (Boolean) ConfigUtil.createLocalConfig().getProperty("servicecomb.config.ignoreResolveFailure"); <add> } <ide> }
Java
apache-2.0
06ef962af2b1700dc8a002bbda1808d0b20042b7
0
arrahtec/osdq-core,arunwizz/osdq-core,arunwizz/osdq-core,arrahtec/osdq-core
package org.arrah.framework.analytics; /************************************************** * Copyright to Vivek Singh 2017 * * * * Any part of code or file can be changed, * * redistributed, modified with the copyright * * information intact * * * * Author$ : Vivek Singh * * * **************************************************/ /* * This class provides chi square test for * independence where input is ReportTableModel * */ import java.util.Vector; import org.arrah.framework.datagen.AggrCumRTM; import org.arrah.framework.ndtable.ReportTableModel; public class ChiSquareTest { ReportTableModel _rtm = null; private double _significanceLevel = 0.05D; // it can be customized. Most people this value private int degreeOfFree = 0; public ChiSquareTest ( ReportTableModel rtm) { _rtm = rtm; } public ChiSquareTest ( ReportTableModel rtm, float sigl) { _rtm = rtm; set_significanceLevel(sigl); } public double getChiSquare() { int rowN = _rtm.getModel().getRowCount(); int colN = _rtm.getModel().getColumnCount(); setDegreeOfFreedom((rowN -1 )* (colN -3)); // there are two extra cols in rtm Vector<Double> populationData = AggrCumRTM.getColumnNumberData(_rtm, colN -1); double population = AggrCumRTM.getSum(populationData); double chisquare = 0.000D; for (int i=1; i < (colN -1) ; i++) { // last column is total count Vector<Double> colData = AggrCumRTM.getColumnNumberData(_rtm, i); double colSum = AggrCumRTM.getSum(colData); for (int j=0; j < rowN; j++ ) { double rowSum = new Double(_rtm.getModel().getValueAt(j, colN -1).toString()); //last column value double expectedFreq = colSum * rowSum / population; //System.out.println(rowSum + ":"+colSum +":" +population); double observedFreq = new Double(_rtm.getModel().getValueAt(j, i).toString()); double singleChi = ((expectedFreq - observedFreq) * (expectedFreq - observedFreq))/expectedFreq; //System.out.println(expectedFreq + ":"+observedFreq ); chisquare += singleChi; } } return chisquare; } public int getDegreeOfFreedom() { return degreeOfFree; } public void setDegreeOfFreedom(int degreeOfFree) { this.degreeOfFree = degreeOfFree; } public double get_significanceLevel() { return _significanceLevel; } public void set_significanceLevel(double _significanceLevel) { this._significanceLevel = _significanceLevel; } } // end of ChiSquareTest
src/main/java/org/arrah/framework/analytics/ChiSquareTest.java
package org.arrah.framework.analytics; /************************************************** * Copyright to Vivek Singh 2017 * * * * Any part of code or file can be changed, * * redistributed, modified with the copyright * * information intact * * * * Author$ : Vivek Singh * * * **************************************************/ /* * This class provides chi square test for * independence where input is ReportTableModel * */ import java.util.Vector; import org.arrah.framework.datagen.AggrCumRTM; import org.arrah.framework.ndtable.ReportTableModel; public class ChiSquareTest { ReportTableModel _rtm = null; private double _significanceLevel = 0.05D; // it can be customized. Most people this value private int degreeOfFree = 0; public ChiSquareTest ( ReportTableModel rtm) { _rtm = rtm; } public ChiSquareTest ( ReportTableModel rtm, float sigl) { _rtm = rtm; set_significanceLevel(sigl); } public double getChiSquare() { int rowN = _rtm.getModel().getRowCount(); int colN = _rtm.getModel().getColumnCount(); setDegreeOfFreedom((rowN -1 )* (colN -3)); // there are two extra cols in rtm Vector<Double> populationData = AggrCumRTM.getColumnNumberData(_rtm, colN -1); double population = AggrCumRTM.getSum(populationData); double chisquare = 0.000D; for (int i=1; i < (colN -2) ; i++) { Vector<Double> colData = AggrCumRTM.getColumnNumberData(_rtm, i); double colSum = AggrCumRTM.getSum(colData); for (int j=0; j < rowN; j++ ) { double rowSum = new Double(_rtm.getModel().getValueAt(j, colN -1).toString()); //last column value double expectedFreq = colSum * rowSum / population; double observedFreq = new Double(_rtm.getModel().getValueAt(j, i).toString()); double singleChi = ((expectedFreq - observedFreq) * (expectedFreq - observedFreq))/expectedFreq; chisquare += singleChi; } } return chisquare; } public int getDegreeOfFreedom() { return degreeOfFree; } public void setDegreeOfFreedom(int degreeOfFree) { this.degreeOfFree = degreeOfFree; } public double get_significanceLevel() { return _significanceLevel; } public void set_significanceLevel(double _significanceLevel) { this._significanceLevel = _significanceLevel; } } // end of ChiSquareTest
bug fix
src/main/java/org/arrah/framework/analytics/ChiSquareTest.java
bug fix
<ide><path>rc/main/java/org/arrah/framework/analytics/ChiSquareTest.java <ide> double population = AggrCumRTM.getSum(populationData); <ide> double chisquare = 0.000D; <ide> <del> for (int i=1; i < (colN -2) ; i++) { <add> for (int i=1; i < (colN -1) ; i++) { // last column is total count <ide> Vector<Double> colData = AggrCumRTM.getColumnNumberData(_rtm, i); <ide> double colSum = AggrCumRTM.getSum(colData); <ide> <ide> for (int j=0; j < rowN; j++ ) { <ide> double rowSum = new Double(_rtm.getModel().getValueAt(j, colN -1).toString()); //last column value <ide> double expectedFreq = colSum * rowSum / population; <add> //System.out.println(rowSum + ":"+colSum +":" +population); <ide> double observedFreq = new Double(_rtm.getModel().getValueAt(j, i).toString()); <ide> double singleChi = ((expectedFreq - observedFreq) * (expectedFreq - observedFreq))/expectedFreq; <add> //System.out.println(expectedFreq + ":"+observedFreq ); <ide> chisquare += singleChi; <ide> } <ide> }
Java
mit
363f3d0c7fe8ab38d1acaec7b7edcab1d3f10d16
0
ben-gibson/jetbrains-open-in-git-host,ben-gibson/remote-repository-mapper
package uk.co.ben_gibson.git.link.UI.Action.Vcs; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.project.Project; import com.intellij.vcs.log.VcsFullCommitDetails; import com.intellij.vcs.log.VcsLog; import com.intellij.vcs.log.VcsLogDataKeys; import uk.co.ben_gibson.git.link.Container; import uk.co.ben_gibson.git.link.Git.Commit; import uk.co.ben_gibson.git.link.Url.Handler.UrlHandler; import uk.co.ben_gibson.git.link.UI.Action.Action; import java.util.List; /** * An action triggered from an VCS log toolbar. */ abstract class VcsLogAction extends Action { abstract UrlHandler urlHandler(); public void actionPerformed(Project project, AnActionEvent event) { VcsLog vcsLog = event.getData(VcsLogDataKeys.VCS_LOG); if (vcsLog == null) { return; } VcsFullCommitDetails vcsCommit = vcsLog.getSelectedDetails().get(0); Commit commit = new Commit(vcsCommit.getId().toShortString()); this.getManager().handleCommit(this.urlHandler(), project, commit, vcsCommit.getRoot()); } protected boolean shouldActionBeEnabled(AnActionEvent event) { VcsLog log = event.getData(VcsLogDataKeys.VCS_LOG); if (log == null) { return false; } List<VcsFullCommitDetails> commits = log.getSelectedDetails(); return commits.size() == 1; } }
src/uk/co/ben_gibson/git/link/UI/Action/Vcs/VcsLogAction.java
package uk.co.ben_gibson.git.link.UI.Action.Vcs; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.project.Project; import com.intellij.vcs.log.VcsFullCommitDetails; import com.intellij.vcs.log.VcsLog; import com.intellij.vcs.log.VcsLogDataKeys; import uk.co.ben_gibson.git.link.Container; import uk.co.ben_gibson.git.link.Git.Commit; import uk.co.ben_gibson.git.link.Url.Handler.UrlHandler; import uk.co.ben_gibson.git.link.UI.Action.Action; import java.util.List; /** * An action triggered from an VCS log toolbar. */ abstract class VcsLogAction extends Action { abstract UrlHandler urlHandler(); public void actionPerformed(Project project, AnActionEvent event) { VcsLog vcsLog = event.getData(VcsLogDataKeys.VCS_LOG); if (vcsLog == null) { return; } VcsFullCommitDetails vcsCommit = vcsLog.getSelectedDetails().get(0); Commit commit = new Commit(vcsCommit.toString()); this.getManager().handleCommit(this.urlHandler(), project, commit, vcsCommit.getRoot()); } protected boolean shouldActionBeEnabled(AnActionEvent event) { VcsLog log = event.getData(VcsLogDataKeys.VCS_LOG); if (log == null) { return false; } List<VcsFullCommitDetails> commits = log.getSelectedDetails(); return commits.size() == 1; } }
Regression in 2.3.0: Opening a commit from the VCS log fails. (#67) Use a proper way to get the revision number.
src/uk/co/ben_gibson/git/link/UI/Action/Vcs/VcsLogAction.java
Regression in 2.3.0: Opening a commit from the VCS log fails. (#67)
<ide><path>rc/uk/co/ben_gibson/git/link/UI/Action/Vcs/VcsLogAction.java <ide> } <ide> <ide> VcsFullCommitDetails vcsCommit = vcsLog.getSelectedDetails().get(0); <del> Commit commit = new Commit(vcsCommit.toString()); <add> Commit commit = new Commit(vcsCommit.getId().toShortString()); <ide> <ide> this.getManager().handleCommit(this.urlHandler(), project, commit, vcsCommit.getRoot()); <ide> }
Java
apache-2.0
8780cbc625151abbc9a7ba91b7e219b42bfc9bee
0
orientechnologies/orientdb,orientechnologies/orientdb,orientechnologies/orientdb,orientechnologies/orientdb
/* * * * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com) * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * * For more information: http://www.orientechnologies.com * */ package com.orientechnologies.orient.core.metadata.schema; import com.orientechnologies.common.exception.OException; import com.orientechnologies.common.listener.OProgressListener; import com.orientechnologies.common.log.OLogManager; import com.orientechnologies.common.util.OArrays; import com.orientechnologies.common.util.OCommonConst; import com.orientechnologies.orient.core.annotation.OBeforeSerialization; import com.orientechnologies.orient.core.command.OCommandResultListener; import com.orientechnologies.orient.core.db.ODatabaseDocumentInternal; import com.orientechnologies.orient.core.db.ODatabaseInternal; import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal; import com.orientechnologies.orient.core.db.OScenarioThreadLocal; import com.orientechnologies.orient.core.db.document.ODatabaseDocument; import com.orientechnologies.orient.core.db.record.OIdentifiable; import com.orientechnologies.orient.core.db.record.ORecordElement; import com.orientechnologies.orient.core.exception.ODatabaseException; import com.orientechnologies.orient.core.exception.OSchemaException; import com.orientechnologies.orient.core.exception.OSecurityAccessException; import com.orientechnologies.orient.core.exception.OSecurityException; import com.orientechnologies.orient.core.id.ORecordId; import com.orientechnologies.orient.core.index.*; import com.orientechnologies.orient.core.metadata.schema.clusterselection.OClusterSelectionStrategy; import com.orientechnologies.orient.core.metadata.schema.clusterselection.ORoundRobinClusterSelectionStrategy; import com.orientechnologies.orient.core.metadata.security.ORole; import com.orientechnologies.orient.core.metadata.security.ORule; import com.orientechnologies.orient.core.metadata.security.OSecurityShared; import com.orientechnologies.orient.core.metadata.security.OSecurityUser; import com.orientechnologies.orient.core.record.ORecord; import com.orientechnologies.orient.core.record.ORecordInternal; import com.orientechnologies.orient.core.record.impl.ODocument; import com.orientechnologies.orient.core.serialization.serializer.record.ORecordSerializerFactory; import com.orientechnologies.orient.core.serialization.serializer.record.string.ORecordSerializerSchemaAware2CSV; import com.orientechnologies.orient.core.sharding.auto.OAutoShardingClusterSelectionStrategy; import com.orientechnologies.orient.core.sql.OCommandSQL; import com.orientechnologies.orient.core.sql.query.OSQLAsynchQuery; import com.orientechnologies.orient.core.storage.*; import com.orientechnologies.orient.core.storage.impl.local.OAbstractPaginatedStorage; import com.orientechnologies.orient.core.type.ODocumentWrapper; import com.orientechnologies.orient.core.type.ODocumentWrapperNoClass; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.util.*; import java.util.concurrent.Callable; /** * Schema Class implementation. * * @author Luca Garulli (l.garulli--at--orientechnologies.com) */ @SuppressWarnings("unchecked") public class OClassImpl extends ODocumentWrapperNoClass implements OClass { private static final long serialVersionUID = 1L; private static final int NOT_EXISTENT_CLUSTER_ID = -1; final OSchemaShared owner; private final Map<String, OProperty> properties = new HashMap<String, OProperty>(); private int defaultClusterId = NOT_EXISTENT_CLUSTER_ID; private String name; private String description; private int[] clusterIds; private List<OClassImpl> superClasses = new ArrayList<OClassImpl>(); private int[] polymorphicClusterIds; private List<OClass> subclasses; private float overSize = 0f; private String shortName; private boolean strictMode = false; // @SINCE v1.0rc8 private boolean abstractClass = false; // @SINCE v1.2.0 private Map<String, String> customFields; private volatile OClusterSelectionStrategy clusterSelection; // @SINCE 1.7 private volatile int hashCode; private static Set<String> reserved = new HashSet<String>(); static { // reserved.add("select"); reserved.add("traverse"); reserved.add("insert"); reserved.add("update"); reserved.add("delete"); reserved.add("from"); reserved.add("where"); reserved.add("skip"); reserved.add("limit"); reserved.add("timeout"); } /** * Constructor used in unmarshalling. */ protected OClassImpl(final OSchemaShared iOwner, final String iName) { this(iOwner, new ODocument().setTrackingChanges(false), iName); } protected OClassImpl(final OSchemaShared iOwner, final String iName, final int[] iClusterIds) { this(iOwner, iName); setClusterIds(iClusterIds); defaultClusterId = iClusterIds[0]; if (defaultClusterId == NOT_EXISTENT_CLUSTER_ID) abstractClass = true; if (abstractClass) setPolymorphicClusterIds(OCommonConst.EMPTY_INT_ARRAY); else setPolymorphicClusterIds(iClusterIds); clusterSelection = owner.getClusterSelectionFactory().newInstanceOfDefaultClass(); } /** * Constructor used in unmarshalling. */ protected OClassImpl(final OSchemaShared iOwner, final ODocument iDocument, final String iName) { name = iName; document = iDocument; owner = iOwner; } public static int[] readableClusters(final ODatabaseDocument iDatabase, final int[] iClusterIds) { List<Integer> listOfReadableIds = new ArrayList<Integer>(); boolean all = true; for (int clusterId : iClusterIds) { try { final String clusterName = iDatabase.getClusterNameById(clusterId); iDatabase.checkSecurity(ORule.ResourceGeneric.CLUSTER, ORole.PERMISSION_READ, clusterName); listOfReadableIds.add(clusterId); } catch (OSecurityAccessException securityException) { all = false; // if the cluster is inaccessible it's simply not processed in the list.add } } if (all) // JUST RETURN INPUT ARRAY (FASTER) return iClusterIds; final int[] readableClusterIds = new int[listOfReadableIds.size()]; int index = 0; for (int clusterId : listOfReadableIds) { readableClusterIds[index++] = clusterId; } return readableClusterIds; } @Override public OClusterSelectionStrategy getClusterSelection() { acquireSchemaReadLock(); try { return clusterSelection; } finally { releaseSchemaReadLock(); } } @Override public OClass setClusterSelection(final OClusterSelectionStrategy clusterSelection) { return setClusterSelection(clusterSelection.getName()); } @Override public OClass setClusterSelection(final String value) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String.format("alter class `%s` clusterselection '%s'", name, value); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final String cmd = String.format("alter class `%s` clusterselection '%s'", name, value); OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); setClusterSelectionInternal(value); } else setClusterSelectionInternal(value); return this; } finally { releaseSchemaWriteLock(); } } @Override public <RET extends ODocumentWrapper> RET reload() { return (RET) owner.reload(); } public String getCustom(final String iName) { acquireSchemaReadLock(); try { if (customFields == null) return null; return customFields.get(iName); } finally { releaseSchemaReadLock(); } } public OClassImpl setCustom(final String name, final String value) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String.format("alter class `%s` custom %s=%s", getName(), name, value); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final String cmd = String.format("alter class `%s` custom %s=%s", getName(), name, value); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); setCustomInternal(name, value); } else setCustomInternal(name, value); return this; } finally { releaseSchemaWriteLock(); } } public Map<String, String> getCustomInternal() { acquireSchemaReadLock(); try { if (customFields != null) return Collections.unmodifiableMap(customFields); return null; } finally { releaseSchemaReadLock(); } } public void removeCustom(final String name) { setCustom(name, null); } public void clearCustom() { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String.format("alter class `%s` custom clear", getName()); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final String cmd = String.format("alter class `%s` custom clear", getName()); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); clearCustomInternal(); } else clearCustomInternal(); } finally { releaseSchemaWriteLock(); } } public Set<String> getCustomKeys() { acquireSchemaReadLock(); try { if (customFields != null) return Collections.unmodifiableSet(customFields.keySet()); return new HashSet<String>(); } finally { releaseSchemaReadLock(); } } @Override public boolean hasClusterId(final int clusterId) { return Arrays.binarySearch(clusterIds, clusterId) >= 0; } @Override public boolean hasPolymorphicClusterId(final int clusterId) { return Arrays.binarySearch(polymorphicClusterIds, clusterId) >= 0; } @Override @Deprecated public OClass getSuperClass() { acquireSchemaReadLock(); try { return superClasses.isEmpty() ? null : superClasses.get(0); } finally { releaseSchemaReadLock(); } } @Override @Deprecated public OClass setSuperClass(OClass iSuperClass) { setSuperClasses(iSuperClass != null ? Arrays.asList(iSuperClass) : Collections.EMPTY_LIST); return this; } public String getName() { acquireSchemaReadLock(); try { return name; } finally { releaseSchemaReadLock(); } } @Override public List<OClass> getSuperClasses() { acquireSchemaReadLock(); try { return Collections.unmodifiableList((List<? extends OClass>) superClasses); } finally { releaseSchemaReadLock(); } } @Override public boolean hasSuperClasses() { acquireSchemaReadLock(); try { return !superClasses.isEmpty(); } finally { releaseSchemaReadLock(); } } @Override public List<String> getSuperClassesNames() { acquireSchemaReadLock(); try { List<String> superClassesNames = new ArrayList<String>(superClasses.size()); for (OClassImpl superClass : superClasses) { superClassesNames.add(superClass.getName()); } return superClassesNames; } finally { releaseSchemaReadLock(); } } public OClass setSuperClassesByNames(List<String> classNames) { if (classNames == null) classNames = Collections.EMPTY_LIST; final List<OClass> classes = new ArrayList<OClass>(classNames.size()); final OSchema schema = getDatabase().getMetadata().getSchema(); for (String className : classNames) { classes.add(schema.getClass(decodeClassName(className))); } return setSuperClasses(classes); } @Override public OClass setSuperClasses(final List<? extends OClass> classes) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); if (classes != null) { List<OClass> toCheck = new ArrayList<OClass>(classes); toCheck.add(this); checkParametersConflict(toCheck); } acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); final StringBuilder sb = new StringBuilder(); if (classes != null && classes.size() > 0) { for (OClass superClass : classes) { sb.append('`').append(superClass.getName()).append("`,"); } sb.deleteCharAt(sb.length() - 1); } else sb.append("null"); final String cmd = String.format("alter class `%s` superclasses %s", name, sb); if (storage instanceof OStorageProxy) { database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); setSuperClassesInternal(classes); } else setSuperClassesInternal(classes); } finally { releaseSchemaWriteLock(); } return this; } void setSuperClassesInternal(final List<? extends OClass> classes) { acquireSchemaWriteLock(); try { List<OClassImpl> newSuperClasses = new ArrayList<OClassImpl>(); OClassImpl cls; for (OClass superClass : classes) { if (superClass instanceof OClassAbstractDelegate) cls = (OClassImpl) ((OClassAbstractDelegate) superClass).delegate; else cls = (OClassImpl) superClass; if (newSuperClasses.contains(cls)) { throw new OSchemaException("Duplicated superclass '" + cls.getName() + "'"); } newSuperClasses.add(cls); } List<OClassImpl> toAddList = new ArrayList<OClassImpl>(newSuperClasses); toAddList.removeAll(superClasses); List<OClassImpl> toRemoveList = new ArrayList<OClassImpl>(superClasses); toRemoveList.removeAll(newSuperClasses); for (OClassImpl toRemove : toRemoveList) { toRemove.removeBaseClassInternal(this); } for (OClassImpl addTo : toAddList) { addTo.addBaseClass(this); } superClasses.clear(); superClasses.addAll(newSuperClasses); } finally { releaseSchemaWriteLock(); } } @Override public OClass addSuperClass(final OClass superClass) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); checkParametersConflict(superClass); acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String .format("alter class `%s` superclass +`%s`", name, superClass != null ? superClass.getName() : null); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final String cmd = String .format("alter class `%s` superclass +`%s`", name, superClass != null ? superClass.getName() : null); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); addSuperClassInternal(superClass); } else addSuperClassInternal(superClass); } finally { releaseSchemaWriteLock(); } return this; } void addSuperClassInternal(final OClass superClass) { acquireSchemaWriteLock(); try { final OClassImpl cls; if (superClass instanceof OClassAbstractDelegate) cls = (OClassImpl) ((OClassAbstractDelegate) superClass).delegate; else cls = (OClassImpl) superClass; if (cls != null) { // CHECK THE USER HAS UPDATE PRIVILEGE AGAINST EXTENDING CLASS final OSecurityUser user = getDatabase().getUser(); if (user != null) user.allow(ORule.ResourceGeneric.CLASS, cls.getName(), ORole.PERMISSION_UPDATE); if (superClasses.contains(superClass)) { throw new OSchemaException( "Class: '" + this.getName() + "' already has the class '" + superClass.getName() + "' as superclass"); } cls.addBaseClass(this); superClasses.add(cls); } } finally { releaseSchemaWriteLock(); } } @Override public OClass removeSuperClass(OClass superClass) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String .format("alter class `%s` superclass -`%s`", name, superClass != null ? superClass.getName() : null); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final String cmd = String .format("alter class `%s` superclass -`%s`", name, superClass != null ? superClass.getName() : null); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); removeSuperClassInternal(superClass); } else removeSuperClassInternal(superClass); } finally { releaseSchemaWriteLock(); } return this; } void removeSuperClassInternal(final OClass superClass) { acquireSchemaWriteLock(); try { final OClassImpl cls; if (superClass instanceof OClassAbstractDelegate) cls = (OClassImpl) ((OClassAbstractDelegate) superClass).delegate; else cls = (OClassImpl) superClass; if (superClasses.contains(cls)) { if (cls != null) cls.removeBaseClassInternal(this); superClasses.remove(superClass); } } finally { releaseSchemaWriteLock(); } } public OClass setName(final String name) { if (getName().equals(name)) return this; getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); final Character wrongCharacter = OSchemaShared.checkClassNameIfValid(name); OClass oClass = getDatabase().getMetadata().getSchema().getClass(name); if (oClass != null) { String error = String.format("Cannot rename class %s to %s. A Class with name %s exists", this.name, name, name); throw new OSchemaException(error); } if (wrongCharacter != null) throw new OSchemaException( "Invalid class name found. Character '" + wrongCharacter + "' cannot be used in class name '" + name + "'"); acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String.format("alter class `%s` name `%s`", this.name, name); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final String cmd = String.format("alter class `%s` name `%s`", this.name, name); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); setNameInternal(name); } else setNameInternal(name); } finally { releaseSchemaWriteLock(); } return this; } public long getSize() { acquireSchemaReadLock(); try { long size = 0; for (int clusterId : clusterIds) size += getDatabase().getClusterRecordSizeById(clusterId); return size; } finally { releaseSchemaReadLock(); } } public String getShortName() { acquireSchemaReadLock(); try { return shortName; } finally { releaseSchemaReadLock(); } } public OClass setShortName(String shortName) { if (shortName != null) { shortName = shortName.trim(); if (shortName.isEmpty()) shortName = null; } getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String.format("alter class `%s` shortname %s", name, shortName); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final String cmd = String.format("alter class `%s` shortname %s", name, shortName); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); setShortNameInternal(shortName); } else setShortNameInternal(shortName); } finally { releaseSchemaWriteLock(); } return this; } public String getDescription() { acquireSchemaReadLock(); try { return description; } finally { releaseSchemaReadLock(); } } public OClass setDescription(String iDescription) { if (iDescription != null) { iDescription = iDescription.trim(); if (iDescription.isEmpty()) iDescription = null; } getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String.format("alter class `%s` description %s", name, shortName); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final String cmd = String.format("alter class `%s` description %s", name, shortName); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); setDescriptionInternal(iDescription); } else setDescriptionInternal(iDescription); } finally { releaseSchemaWriteLock(); } return this; } public String getStreamableName() { acquireSchemaReadLock(); try { return shortName != null ? shortName : name; } finally { releaseSchemaReadLock(); } } public Collection<OProperty> declaredProperties() { acquireSchemaReadLock(); try { return Collections.unmodifiableCollection(properties.values()); } finally { releaseSchemaReadLock(); } } public Map<String, OProperty> propertiesMap() { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_READ); acquireSchemaReadLock(); try { final Map<String, OProperty> props = new HashMap<String, OProperty>(20); propertiesMap(props, true); return props; } finally { releaseSchemaReadLock(); } } private void propertiesMap(Map<String, OProperty> propertiesMap, boolean keepCase) { for (OProperty p : properties.values()) { String propName = p.getName(); if (!keepCase) propName = propName.toLowerCase(); if (!propertiesMap.containsKey(propName)) propertiesMap.put(propName, p); } for (OClassImpl superClass : superClasses) { superClass.propertiesMap(propertiesMap, keepCase); } } public Collection<OProperty> properties() { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_READ); acquireSchemaReadLock(); try { final Collection<OProperty> props = new ArrayList<OProperty>(); properties(props); return props; } finally { releaseSchemaReadLock(); } } private void properties(Collection<OProperty> properties) { properties.addAll(this.properties.values()); for (OClassImpl superClass : superClasses) { superClass.properties(properties); } } public void getIndexedProperties(Collection<OProperty> indexedProperties) { for (OProperty p : properties.values()) if (areIndexed(p.getName())) indexedProperties.add(p); for (OClassImpl superClass : superClasses) { superClass.getIndexedProperties(indexedProperties); } } @Override public Collection<OProperty> getIndexedProperties() { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_READ); acquireSchemaReadLock(); try { Collection<OProperty> indexedProps = new HashSet<OProperty>(); getIndexedProperties(indexedProps); return indexedProps; } finally { releaseSchemaReadLock(); } } public OProperty getProperty(String propertyName) { acquireSchemaReadLock(); try { propertyName = propertyName.toLowerCase(); OProperty p = properties.get(propertyName); if (p != null) return p; for (int i = 0; i < superClasses.size() && p == null; i++) { p = superClasses.get(i).getProperty(propertyName); } return p; } finally { releaseSchemaReadLock(); } } public OProperty createProperty(final String iPropertyName, final OType iType) { return addProperty(iPropertyName, iType, null, null, false); } public OProperty createProperty(final String iPropertyName, final OType iType, final OClass iLinkedClass) { if (iLinkedClass == null) throw new OSchemaException("Missing linked class"); return addProperty(iPropertyName, iType, null, iLinkedClass, false); } public OProperty createProperty(final String iPropertyName, final OType iType, final OClass iLinkedClass, final boolean unsafe) { if (iLinkedClass == null) throw new OSchemaException("Missing linked class"); return addProperty(iPropertyName, iType, null, iLinkedClass, unsafe); } public OProperty createProperty(final String iPropertyName, final OType iType, final OType iLinkedType) { return addProperty(iPropertyName, iType, iLinkedType, null, false); } public OProperty createProperty(final String iPropertyName, final OType iType, final OType iLinkedType, final boolean unsafe) { return addProperty(iPropertyName, iType, iLinkedType, null, unsafe); } @Override public boolean existsProperty(String propertyName) { acquireSchemaReadLock(); try { propertyName = propertyName.toLowerCase(); boolean result = properties.containsKey(propertyName); if (result) return true; for (OClassImpl superClass : superClasses) { result = superClass.existsProperty(propertyName); if (result) return true; } return false; } finally { releaseSchemaReadLock(); } } public void dropProperty(final String propertyName) { if (getDatabase().getTransaction().isActive()) throw new IllegalStateException("Cannot drop a property inside a transaction"); getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_DELETE); final String lowerName = propertyName.toLowerCase(); acquireSchemaWriteLock(); try { if (!properties.containsKey(lowerName)) throw new OSchemaException("Property '" + propertyName + "' not found in class " + name + "'"); final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { database.command(new OCommandSQL("drop property " + name + '.' + propertyName)).execute(); } else if (isDistributedCommand()) { final OCommandSQL commandSQL = new OCommandSQL("drop property " + name + '.' + propertyName); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); OScenarioThreadLocal.executeAsDistributed(new Callable<OProperty>() { @Override public OProperty call() throws Exception { dropPropertyInternal(propertyName); return null; } }); } else OScenarioThreadLocal.executeAsDistributed(new Callable<OProperty>() { @Override public OProperty call() throws Exception { dropPropertyInternal(propertyName); return null; } }); } finally { releaseSchemaWriteLock(); } } @Override public void fromStream() { subclasses = null; superClasses.clear(); name = document.field("name"); if (document.containsField("shortName")) shortName = document.field("shortName"); else shortName = null; if (document.containsField("description")) description = document.field("description"); else description = null; defaultClusterId = document.field("defaultClusterId"); if (document.containsField("strictMode")) strictMode = document.field("strictMode"); else strictMode = false; if (document.containsField("abstract")) abstractClass = document.field("abstract"); else abstractClass = false; if (document.field("overSize") != null) overSize = document.field("overSize"); else overSize = 0f; final Object cc = document.field("clusterIds"); if (cc instanceof Collection<?>) { final Collection<Integer> coll = document.field("clusterIds"); clusterIds = new int[coll.size()]; int i = 0; for (final Integer item : coll) clusterIds[i++] = item; } else clusterIds = (int[]) cc; Arrays.sort(clusterIds); if (clusterIds.length == 1 && clusterIds[0] == -1) setPolymorphicClusterIds(OCommonConst.EMPTY_INT_ARRAY); else setPolymorphicClusterIds(clusterIds); // READ PROPERTIES OPropertyImpl prop; final Map<String, OProperty> newProperties = new HashMap<String, OProperty>(); final Collection<ODocument> storedProperties = document.field("properties"); if (storedProperties != null) for (OIdentifiable id : storedProperties) { ODocument p = id.getRecord(); prop = new OPropertyImpl(this, p); prop.fromStream(); if (properties.containsKey(prop.getName())) { prop = (OPropertyImpl) properties.get(prop.getName().toLowerCase()); prop.fromStream(p); } newProperties.put(prop.getName().toLowerCase(), prop); } properties.clear(); properties.putAll(newProperties); customFields = document.field("customFields", OType.EMBEDDEDMAP); clusterSelection = owner.getClusterSelectionFactory().getStrategy((String) document.field("clusterSelection")); } @Override @OBeforeSerialization public ODocument toStream() { document.setInternalStatus(ORecordElement.STATUS.UNMARSHALLING); try { document.field("name", name); document.field("shortName", shortName); document.field("description", description); document.field("defaultClusterId", defaultClusterId); document.field("clusterIds", clusterIds); document.field("clusterSelection", clusterSelection.getName()); document.field("overSize", overSize); document.field("strictMode", strictMode); document.field("abstract", abstractClass); final Set<ODocument> props = new LinkedHashSet<ODocument>(); for (final OProperty p : properties.values()) { props.add(((OPropertyImpl) p).toStream()); } document.field("properties", props, OType.EMBEDDEDSET); if (superClasses.isEmpty()) { // Single super class is deprecated! document.field("superClass", null, OType.STRING); document.field("superClasses", null, OType.EMBEDDEDLIST); } else { // Single super class is deprecated! document.field("superClass", superClasses.get(0).getName(), OType.STRING); List<String> superClassesNames = new ArrayList<String>(); for (OClassImpl superClass : superClasses) { superClassesNames.add(superClass.getName()); } document.field("superClasses", superClassesNames, OType.EMBEDDEDLIST); } document.field("customFields", customFields != null && customFields.size() > 0 ? customFields : null, OType.EMBEDDEDMAP); } finally { document.setInternalStatus(ORecordElement.STATUS.LOADED); } return document; } @Override public int getClusterForNewInstance(final ODocument doc) { acquireSchemaReadLock(); try { return clusterSelection.getCluster(this, doc); } finally { releaseSchemaReadLock(); } } public int getDefaultClusterId() { acquireSchemaReadLock(); try { return defaultClusterId; } finally { releaseSchemaReadLock(); } } public void setDefaultClusterId(final int defaultClusterId) { acquireSchemaWriteLock(); try { checkEmbedded(); this.defaultClusterId = defaultClusterId; } finally { releaseSchemaWriteLock(); } } public int[] getClusterIds() { acquireSchemaReadLock(); try { return clusterIds; } finally { releaseSchemaReadLock(); } } public int[] getPolymorphicClusterIds() { acquireSchemaReadLock(); try { return Arrays.copyOf(polymorphicClusterIds, polymorphicClusterIds.length); } finally { releaseSchemaReadLock(); } } private void setPolymorphicClusterIds(final int[] iClusterIds) { Set<Integer> set = new TreeSet<Integer>(); for (int iClusterId : iClusterIds) { set.add(iClusterId); } polymorphicClusterIds = new int[set.size()]; int i = 0; for (Integer clusterId : set) { polymorphicClusterIds[i] = clusterId; i++; } } public void renameProperty(final String iOldName, final String iNewName) { final OProperty p = properties.remove(iOldName.toLowerCase()); if (p != null) properties.put(iNewName.toLowerCase(), p); } public OClass addClusterId(final int clusterId) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); if (isAbstract()) { throw new OSchemaException("Impossible to associate a cluster to an abstract class class"); } acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String.format("alter class `%s` addcluster %d", name, clusterId); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final String cmd = String.format("alter class `%s` addcluster %d", name, clusterId); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); addClusterIdInternal(clusterId); } else addClusterIdInternal(clusterId); } finally { releaseSchemaWriteLock(); } return this; } public static OClass addClusters(final OClass cls, final int iClusters) { final String clusterBase = cls.getName().toLowerCase() + "_"; for (int i = 1; i < iClusters; ++i) { cls.addCluster(clusterBase + i); } return cls; } @Override public OClass addCluster(final String clusterNameOrId) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); if (isAbstract()) { throw new OSchemaException("Impossible to associate a cluster to an abstract class class"); } acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String.format("alter class `%s` addcluster `%s`", name, clusterNameOrId); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final int clusterId = owner.createClusterIfNeeded(clusterNameOrId); addClusterIdInternal(clusterId); final String cmd = String.format("alter class `%s` addcluster `%s`", name, clusterNameOrId); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); } else { final int clusterId = owner.createClusterIfNeeded(clusterNameOrId); addClusterIdInternal(clusterId); } } finally { releaseSchemaWriteLock(); } return this; } /** * {@inheritDoc} */ @Override public OClass truncateCluster(String clusterName) { getDatabase().checkSecurity(ORule.ResourceGeneric.CLASS, ORole.PERMISSION_DELETE, name); acquireSchemaReadLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String.format("truncate cluster %s", clusterName); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final String cmd = String.format("truncate cluster %s", clusterName); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); truncateClusterInternal(clusterName, storage); } else truncateClusterInternal(clusterName, storage); } finally { releaseSchemaReadLock(); } return this; } private void truncateClusterInternal(final String clusterName, final OStorage storage) { final OCluster cluster = storage.getClusterByName(clusterName); if (cluster == null) { throw new ODatabaseException("Cluster with name " + clusterName + " does not exist"); } try { cluster.truncate(); } catch (IOException e) { throw OException.wrapException(new ODatabaseException("Error during truncate of cluster " + clusterName), e); } for (OIndex index : getIndexes()) { index.rebuild(); } } public OClass removeClusterId(final int clusterId) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); if (clusterIds.length == 1 && clusterId == clusterIds[0]) throw new ODatabaseException(" Impossible to remove the last cluster of class '" + getName() + "' drop the class instead"); acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String.format("alter class `%s` removecluster %d", name, clusterId); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final String cmd = String.format("alter class `%s` removecluster %d", name, clusterId); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); removeClusterIdInternal(clusterId); } else removeClusterIdInternal(clusterId); } finally { releaseSchemaWriteLock(); } return this; } public Collection<OClass> getSubclasses() { acquireSchemaReadLock(); try { if (subclasses == null || subclasses.size() == 0) return Collections.emptyList(); return Collections.unmodifiableCollection(subclasses); } finally { releaseSchemaReadLock(); } } public Collection<OClass> getAllSubclasses() { acquireSchemaReadLock(); try { final Set<OClass> set = new HashSet<OClass>(); if (subclasses != null) { set.addAll(subclasses); for (OClass c : subclasses) set.addAll(c.getAllSubclasses()); } return set; } finally { releaseSchemaReadLock(); } } @Deprecated public Collection<OClass> getBaseClasses() { return getSubclasses(); } @Deprecated public Collection<OClass> getAllBaseClasses() { return getAllSubclasses(); } @Override public Collection<OClass> getAllSuperClasses() { Set<OClass> ret = new HashSet<OClass>(); getAllSuperClasses(ret); return ret; } private void getAllSuperClasses(Set<OClass> set) { set.addAll(superClasses); for (OClassImpl superClass : superClasses) { superClass.getAllSuperClasses(set); } } OClass removeBaseClassInternal(final OClass baseClass) { acquireSchemaWriteLock(); try { checkEmbedded(); if (subclasses == null) return this; if (subclasses.remove(baseClass)) removePolymorphicClusterIds((OClassImpl) baseClass); return this; } finally { releaseSchemaWriteLock(); } } public float getOverSize() { acquireSchemaReadLock(); try { if (overSize > 0) // CUSTOM OVERSIZE SET return overSize; // NO OVERSIZE by default float maxOverSize = 0; float thisOverSize; for (OClassImpl superClass : superClasses) { thisOverSize = superClass.getOverSize(); if (thisOverSize > maxOverSize) maxOverSize = thisOverSize; } return maxOverSize; } finally { releaseSchemaReadLock(); } } public OClass setOverSize(final float overSize) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { // FORMAT FLOAT LOCALE AGNOSTIC final String cmd = String.format("alter class `%s` oversize %s", name, new Float(overSize).toString()); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { // FORMAT FLOAT LOCALE AGNOSTIC final String cmd = String.format("alter class `%s` oversize %s", name, new Float(overSize).toString()); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); setOverSizeInternal(overSize); } else setOverSizeInternal(overSize); } finally { releaseSchemaWriteLock(); } return this; } @Override public float getClassOverSize() { acquireSchemaReadLock(); try { return overSize; } finally { releaseSchemaReadLock(); } } public boolean isAbstract() { acquireSchemaReadLock(); try { return abstractClass; } finally { releaseSchemaReadLock(); } } public OClass setAbstract(boolean isAbstract) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String.format("alter class `%s` abstract %s", name, isAbstract); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final String cmd = String.format("alter class `%s` abstract %s", name, isAbstract); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); setAbstractInternal(isAbstract); } else setAbstractInternal(isAbstract); } finally { releaseSchemaWriteLock(); } return this; } public boolean isStrictMode() { acquireSchemaReadLock(); try { return strictMode; } finally { releaseSchemaReadLock(); } } public OClass setStrictMode(final boolean isStrict) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String.format("alter class `%s` strictmode %s", name, isStrict); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final String cmd = String.format("alter class `%s` strictmode %s", name, isStrict); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); setStrictModeInternal(isStrict); } else setStrictModeInternal(isStrict); } finally { releaseSchemaWriteLock(); } return this; } @Override public String toString() { acquireSchemaReadLock(); try { return name; } finally { releaseSchemaReadLock(); } } @Override public boolean equals(Object obj) { acquireSchemaReadLock(); try { if (this == obj) return true; if (obj == null) return false; if (!OClass.class.isAssignableFrom(obj.getClass())) return false; final OClass other = (OClass) obj; if (name == null) { if (other.getName() != null) return false; } else if (!name.equals(other.getName())) return false; return true; } finally { releaseSchemaReadLock(); } } @Override public int hashCode() { int sh = hashCode; if (sh != 0) return sh; acquireSchemaReadLock(); try { sh = hashCode; if (sh != 0) return sh; calculateHashCode(); return hashCode; } finally { releaseSchemaReadLock(); } } public int compareTo(final OClass o) { acquireSchemaReadLock(); try { return name.compareTo(o.getName()); } finally { releaseSchemaReadLock(); } } public long count() { return count(true); } public long count(final boolean isPolymorphic) { acquireSchemaReadLock(); try { if (isPolymorphic) return getDatabase().countClusterElements(readableClusters(getDatabase(), polymorphicClusterIds)); return getDatabase().countClusterElements(readableClusters(getDatabase(), clusterIds)); } finally { releaseSchemaReadLock(); } } /** * Truncates all the clusters the class uses. * * @throws IOException */ public void truncate() throws IOException { getDatabase().checkSecurity(ORule.ResourceGeneric.CLASS, ORole.PERMISSION_UPDATE); if (isSubClassOf(OSecurityShared.RESTRICTED_CLASSNAME)) { throw new OSecurityException( "Class '" + getName() + "' cannot be truncated because has record level security enabled (extends '" + OSecurityShared.RESTRICTED_CLASSNAME + "')"); } final OStorage storage = getDatabase().getStorage(); acquireSchemaReadLock(); try { for (int id : clusterIds) storage.getClusterById(id).truncate(); for (OIndex<?> index : getClassIndexes()) index.clear(); Set<OIndex<?>> superclassIndexes = new HashSet<OIndex<?>>(); superclassIndexes.addAll(getIndexes()); superclassIndexes.removeAll(getClassIndexes()); for (OIndex index : superclassIndexes) { index.rebuild(); } } finally { releaseSchemaReadLock(); } } /** * Check if the current instance extends specified schema class. * * @param iClassName of class that should be checked * @return Returns true if the current instance extends the passed schema class (iClass) * @see #isSuperClassOf(OClass) */ public boolean isSubClassOf(final String iClassName) { acquireSchemaReadLock(); try { if (iClassName == null) return false; if (iClassName.equalsIgnoreCase(getName()) || iClassName.equalsIgnoreCase(getShortName())) return true; for (OClassImpl superClass : superClasses) { if (superClass.isSubClassOf(iClassName)) return true; } return false; } finally { releaseSchemaReadLock(); } } /** * Check if the current instance extends specified schema class. * * @param clazz to check * @return true if the current instance extends the passed schema class (iClass) * @see #isSuperClassOf(OClass) */ public boolean isSubClassOf(final OClass clazz) { acquireSchemaReadLock(); try { if (clazz == null) return false; if (equals(clazz)) return true; for (OClassImpl superClass : superClasses) { if (superClass.isSubClassOf(clazz)) return true; } return false; } finally { releaseSchemaReadLock(); } } /** * Returns true if the passed schema class (iClass) extends the current instance. * * @param clazz to check * @return Returns true if the passed schema class extends the current instance * @see #isSubClassOf(OClass) */ public boolean isSuperClassOf(final OClass clazz) { return clazz != null && clazz.isSubClassOf(this); } public Object get(final ATTRIBUTES iAttribute) { if (iAttribute == null) throw new IllegalArgumentException("attribute is null"); switch (iAttribute) { case NAME: return getName(); case SHORTNAME: return getShortName(); case SUPERCLASS: return getSuperClass(); case SUPERCLASSES: return getSuperClasses(); case OVERSIZE: return getOverSize(); case STRICTMODE: return isStrictMode(); case ABSTRACT: return isAbstract(); case CLUSTERSELECTION: return getClusterSelection(); case CUSTOM: return getCustomInternal(); case DESCRIPTION: return getDescription(); } throw new IllegalArgumentException("Cannot find attribute '" + iAttribute + "'"); } public OClass set(final ATTRIBUTES attribute, final Object iValue) { if (attribute == null) throw new IllegalArgumentException("attribute is null"); final String stringValue = iValue != null ? iValue.toString() : null; final boolean isNull = stringValue == null || stringValue.equalsIgnoreCase("NULL"); switch (attribute) { case NAME: setName(decodeClassName(stringValue)); break; case SHORTNAME: setShortName(decodeClassName(stringValue)); break; case SUPERCLASS: if (stringValue == null) throw new IllegalArgumentException("Superclass is null"); if (stringValue.startsWith("+")) { addSuperClass(getDatabase().getMetadata().getSchema().getClass(decodeClassName(stringValue.substring(1)))); } else if (stringValue.startsWith("-")) { removeSuperClass(getDatabase().getMetadata().getSchema().getClass(decodeClassName(stringValue.substring(1)))); } else { setSuperClass(getDatabase().getMetadata().getSchema().getClass(decodeClassName(stringValue))); } break; case SUPERCLASSES: setSuperClassesByNames(stringValue != null ? Arrays.asList(stringValue.split(",\\s*")) : null); break; case OVERSIZE: setOverSize(Float.parseFloat(stringValue)); break; case STRICTMODE: setStrictMode(Boolean.parseBoolean(stringValue)); break; case ABSTRACT: setAbstract(Boolean.parseBoolean(stringValue)); break; case ADDCLUSTER: { addCluster(stringValue); break; } case REMOVECLUSTER: int clId = owner.getClusterId(stringValue); if (clId == NOT_EXISTENT_CLUSTER_ID) throw new IllegalArgumentException("Cluster id '" + stringValue + "' cannot be removed"); removeClusterId(clId); break; case CLUSTERSELECTION: setClusterSelection(stringValue); break; case CUSTOM: int indx = stringValue != null ? stringValue.indexOf('=') : -1; if (indx < 0) { if (isNull || "clear".equalsIgnoreCase(stringValue)) { clearCustom(); } else throw new IllegalArgumentException("Syntax error: expected <name> = <value> or clear, instead found: " + iValue); } else { String customName = stringValue.substring(0, indx).trim(); String customValue = stringValue.substring(indx + 1).trim(); if (isQuoted(customValue)) { customValue = removeQuotes(customValue); } if (customValue.isEmpty()) removeCustom(customName); else setCustom(customName, customValue); } break; case DESCRIPTION: setDescription(stringValue); break; case ENCRYPTION: setEncryption(stringValue); break; } return this; } private String removeQuotes(String s) { s = s.trim(); return s.substring(1, s.length() - 1); } private boolean isQuoted(String s) { s = s.trim(); if (s.startsWith("\"") && s.endsWith("\"")) return true; if (s.startsWith("'") && s.endsWith("'")) return true; if (s.startsWith("`") && s.endsWith("`")) return true; return false; } public OClassImpl setEncryption(final String iValue) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String.format("alter class `%s` encryption %s", name, iValue); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final String cmd = String.format("alter class `%s` encryption %s", name, iValue); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); setEncryptionInternal(iValue); } else setEncryptionInternal(iValue); } finally { releaseSchemaWriteLock(); } return this; } protected void setEncryptionInternal(final String iValue) { for (int cl : getClusterIds()) { final OCluster c = getDatabase().getStorage().getClusterById(cl); if (c != null) try { c.set(OCluster.ATTRIBUTES.ENCRYPTION, iValue); } catch (IOException e) { } } } public OPropertyImpl addPropertyInternal(final String name, final OType type, final OType linkedType, final OClass linkedClass, final boolean unsafe) { if (name == null || name.length() == 0) throw new OSchemaException("Found property name null"); if (!unsafe) checkPersistentPropertyType(getDatabase(), name, type); final String lowerName = name.toLowerCase(); final OPropertyImpl prop; // This check are doubled becouse used by sql commands if (linkedType != null) OPropertyImpl.checkLinkTypeSupport(type); if (linkedClass != null) OPropertyImpl.checkSupportLinkedClass(type); acquireSchemaWriteLock(); try { checkEmbedded(); if (properties.containsKey(lowerName)) throw new OSchemaException("Class '" + this.name + "' already has property '" + name + "'"); OGlobalProperty global = owner.findOrCreateGlobalProperty(name, type); prop = new OPropertyImpl(this, global); properties.put(lowerName, prop); if (linkedType != null) prop.setLinkedTypeInternal(linkedType); else if (linkedClass != null) prop.setLinkedClassInternal(linkedClass); } finally { releaseSchemaWriteLock(); } if (prop != null && !unsafe) fireDatabaseMigration(getDatabase(), name, type); return prop; } public OIndex<?> createIndex(final String iName, final INDEX_TYPE iType, final String... fields) { return createIndex(iName, iType.name(), fields); } public OIndex<?> createIndex(final String iName, final String iType, final String... fields) { return createIndex(iName, iType, null, null, fields); } public OIndex<?> createIndex(final String iName, final INDEX_TYPE iType, final OProgressListener iProgressListener, final String... fields) { return createIndex(iName, iType.name(), iProgressListener, null, fields); } public OIndex<?> createIndex(String iName, String iType, OProgressListener iProgressListener, ODocument metadata, String... fields) { return createIndex(iName, iType, iProgressListener, metadata, null, fields); } public OIndex<?> createIndex(final String name, String type, final OProgressListener progressListener, ODocument metadata, String algorithm, final String... fields) { if (type == null) throw new IllegalArgumentException("Index type is null"); type = type.toUpperCase(); if (fields.length == 0) { throw new OIndexException("List of fields to index cannot be empty."); } final String localName = this.name; final int[] localPolymorphicClusterIds = polymorphicClusterIds; for (final String fieldToIndex : fields) { final String fieldName = decodeClassName(OIndexDefinitionFactory.extractFieldName(fieldToIndex)); if (!fieldName.equals("@rid") && !existsProperty(fieldName)) throw new OIndexException( "Index with name '" + name + "' cannot be created on class '" + localName + "' because the field '" + fieldName + "' is absent in class definition"); } final OIndexDefinition indexDefinition = OIndexDefinitionFactory .createIndexDefinition(this, Arrays.asList(fields), extractFieldTypes(fields), null, type, algorithm); return getDatabase().getMetadata().getIndexManager() .createIndex(name, type, indexDefinition, localPolymorphicClusterIds, progressListener, metadata, algorithm); } public boolean areIndexed(final String... fields) { return areIndexed(Arrays.asList(fields)); } public boolean areIndexed(final Collection<String> fields) { final OIndexManager indexManager = getDatabase().getMetadata().getIndexManager(); acquireSchemaReadLock(); try { final boolean currentClassResult = indexManager.areIndexed(name, fields); if (currentClassResult) return true; for (OClassImpl superClass : superClasses) { if (superClass.areIndexed(fields)) return true; } return false; } finally { releaseSchemaReadLock(); } } public Set<OIndex<?>> getInvolvedIndexes(final String... fields) { return getInvolvedIndexes(Arrays.asList(fields)); } public Set<OIndex<?>> getInvolvedIndexes(final Collection<String> fields) { acquireSchemaReadLock(); try { final Set<OIndex<?>> result = new HashSet<OIndex<?>>(getClassInvolvedIndexes(fields)); for (OClassImpl superClass : superClasses) { result.addAll(superClass.getInvolvedIndexes(fields)); } return result; } finally { releaseSchemaReadLock(); } } public Set<OIndex<?>> getClassInvolvedIndexes(final Collection<String> fields) { final OIndexManager indexManager = getDatabase().getMetadata().getIndexManager(); acquireSchemaReadLock(); try { return indexManager.getClassInvolvedIndexes(name, fields); } finally { releaseSchemaReadLock(); } } public Set<OIndex<?>> getClassInvolvedIndexes(final String... fields) { return getClassInvolvedIndexes(Arrays.asList(fields)); } public OIndex<?> getClassIndex(final String name) { acquireSchemaReadLock(); try { return getDatabase().getMetadata().getIndexManager().getClassIndex(this.name, name); } finally { releaseSchemaReadLock(); } } public Set<OIndex<?>> getClassIndexes() { acquireSchemaReadLock(); try { final OIndexManagerProxy idxManager = getDatabase().getMetadata().getIndexManager(); if (idxManager == null) return new HashSet<OIndex<?>>(); return idxManager.getClassIndexes(name); } finally { releaseSchemaReadLock(); } } @Override public void getClassIndexes(final Collection<OIndex<?>> indexes) { acquireSchemaReadLock(); try { final OIndexManagerProxy idxManager = getDatabase().getMetadata().getIndexManager(); if (idxManager == null) return; idxManager.getClassIndexes(name, indexes); } finally { releaseSchemaReadLock(); } } @Override public OIndex<?> getAutoShardingIndex() { final ODatabaseDocumentInternal db = ODatabaseRecordThreadLocal.INSTANCE.getIfDefined(); return db != null ? db.getMetadata().getIndexManager().getClassAutoShardingIndex(name) : null; } @Override public boolean isEdgeType() { return isSubClassOf(EDGE_CLASS_NAME); } @Override public boolean isVertexType() { return isSubClassOf(VERTEX_CLASS_NAME); } public void onPostIndexManagement() { final OIndex<?> autoShardingIndex = getAutoShardingIndex(); if (autoShardingIndex != null) { if (!getDatabase().getStorage().isRemote()) { // OVERRIDE CLUSTER SELECTION acquireSchemaWriteLock(); try { this.clusterSelection = new OAutoShardingClusterSelectionStrategy(this, autoShardingIndex); } finally { releaseSchemaWriteLock(); } } } else if (clusterSelection instanceof OAutoShardingClusterSelectionStrategy) { // REMOVE AUTO SHARDING CLUSTER SELECTION acquireSchemaWriteLock(); try { this.clusterSelection = new ORoundRobinClusterSelectionStrategy(); } finally { releaseSchemaWriteLock(); } } } @Override public void getIndexes(final Collection<OIndex<?>> indexes) { acquireSchemaReadLock(); try { getClassIndexes(indexes); for (OClass superClass : superClasses) { superClass.getIndexes(indexes); } } finally { releaseSchemaReadLock(); } } public Set<OIndex<?>> getIndexes() { final Set<OIndex<?>> indexes = new HashSet<OIndex<?>>(); getIndexes(indexes); return indexes; } public void acquireSchemaReadLock() { owner.acquireSchemaReadLock(); } public void releaseSchemaReadLock() { owner.releaseSchemaReadLock(); } public void acquireSchemaWriteLock() { owner.acquireSchemaWriteLock(); } public void releaseSchemaWriteLock() { releaseSchemaWriteLock(true); } public void releaseSchemaWriteLock(final boolean iSave) { calculateHashCode(); owner.releaseSchemaWriteLock(iSave); } public void checkEmbedded() { owner.checkEmbedded(getDatabase().getStorage().getUnderlying().getUnderlying()); } public void setClusterSelectionInternal(final String clusterSelection) { // AVOID TO CHECK THIS IN LOCK TO AVOID RE-GENERATION OF IMMUTABLE SCHEMAS if (this.clusterSelection.getName().equals(clusterSelection)) // NO CHANGES return; acquireSchemaWriteLock(); try { checkEmbedded(); this.clusterSelection = owner.getClusterSelectionFactory().newInstance(clusterSelection); } finally { releaseSchemaWriteLock(); } } public void setClusterSelectionInternal(final OClusterSelectionStrategy iClusterSelection) { // AVOID TO CHECK THIS IN LOCK TO AVOID RE-GENERATION OF IMMUTABLE SCHEMAS if (this.clusterSelection.getName().equals(iClusterSelection.getName())) // NO CHANGES return; acquireSchemaWriteLock(); try { checkEmbedded(); this.clusterSelection = iClusterSelection; } finally { releaseSchemaWriteLock(); } } public void fireDatabaseMigration(final ODatabaseDocument database, final String propertyName, final OType type) { final boolean strictSQL = ((ODatabaseInternal) database).getStorage().getConfiguration().isStrictSql(); database.query(new OSQLAsynchQuery<Object>( "select from " + getEscapedName(name, strictSQL) + " where " + getEscapedName(propertyName, strictSQL) + ".type() <> \"" + type.name() + "\"", new OCommandResultListener() { @Override public boolean result(Object iRecord) { final ODocument record = ((OIdentifiable) iRecord).getRecord(); record.field(propertyName, record.field(propertyName), type); database.save(record); return true; } @Override public void end() { } @Override public Object getResult() { return null; } })); } public void firePropertyNameMigration(final ODatabaseDocument database, final String propertyName, final String newPropertyName, final OType type) { final boolean strictSQL = ((ODatabaseInternal) database).getStorage().getConfiguration().isStrictSql(); database.query(new OSQLAsynchQuery<Object>( "select from " + getEscapedName(name, strictSQL) + " where " + getEscapedName(propertyName, strictSQL) + " is not null ", new OCommandResultListener() { @Override public boolean result(Object iRecord) { final ODocument record = ((OIdentifiable) iRecord).getRecord(); record.setFieldType(propertyName, type); record.field(newPropertyName, record.field(propertyName), type); database.save(record); return true; } @Override public void end() { } @Override public Object getResult() { return null; } })); } public void checkPersistentPropertyType(final ODatabaseInternal<ORecord> database, final String propertyName, final OType type) { final boolean strictSQL = database.getStorage().getConfiguration().isStrictSql(); final StringBuilder builder = new StringBuilder(256); builder.append("select count(*) from "); builder.append(getEscapedName(name, strictSQL)); builder.append(" where "); builder.append(getEscapedName(propertyName, strictSQL)); builder.append(".type() not in ["); final Iterator<OType> cur = type.getCastable().iterator(); while (cur.hasNext()) { builder.append('"').append(cur.next().name()).append('"'); if (cur.hasNext()) builder.append(","); } builder.append("] and ").append(getEscapedName(propertyName, strictSQL)).append(" is not null "); if (type.isMultiValue()) builder.append(" and ").append(getEscapedName(propertyName, strictSQL)).append(".size() <> 0 limit 1"); final List<ODocument> res = database.command(new OCommandSQL(builder.toString())).execute(); if (((Long) res.get(0).field("count")) > 0) throw new OSchemaException("The database contains some schema-less data in the property '" + name + "." + propertyName + "' that is not compatible with the type " + type + ". Fix those records and change the schema again"); } protected String getEscapedName(final String iName, final boolean iStrictSQL) { if (iStrictSQL) // ESCAPE NAME return "`" + iName + "`"; return iName; } public OSchemaShared getOwner() { return owner; } private void calculateHashCode() { int result = super.hashCode(); result = 31 * result + (name != null ? name.hashCode() : 0); hashCode = result; } private void setOverSizeInternal(final float overSize) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { checkEmbedded(); this.overSize = overSize; } finally { releaseSchemaWriteLock(); } } private void setCustomInternal(final String name, final String value) { acquireSchemaWriteLock(); try { checkEmbedded(); if (customFields == null) customFields = new HashMap<String, String>(); if (value == null || "null".equalsIgnoreCase(value)) customFields.remove(name); else customFields.put(name, value); } finally { releaseSchemaWriteLock(); } } private void clearCustomInternal() { acquireSchemaWriteLock(); try { checkEmbedded(); customFields = null; } finally { releaseSchemaWriteLock(); } } private void setNameInternal(final String name) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { checkEmbedded(); final String oldName = this.name; owner.changeClassName(this.name, name, this); this.name = name; ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (!database.getStorageVersions().classesAreDetectedByClusterId()) { for (int clusterId : clusterIds) { long[] range = storage.getClusterDataRange(clusterId); OPhysicalPosition[] positions = storage.ceilingPhysicalPositions(clusterId, new OPhysicalPosition(range[0])); do { for (OPhysicalPosition position : positions) { final ORecordId identity = new ORecordId(clusterId, position.clusterPosition); final ORawBuffer record = storage.readRecord(identity, null, true, null).getResult(); if (record.recordType == ODocument.RECORD_TYPE) { final ORecordSerializerSchemaAware2CSV serializer = (ORecordSerializerSchemaAware2CSV) ORecordSerializerFactory .instance().getFormat(ORecordSerializerSchemaAware2CSV.NAME); String persName = new String(record.buffer, "UTF-8"); if (serializer.getClassName(persName).equalsIgnoreCase(name)) { final ODocument document = new ODocument(); document.setLazyLoad(false); document.fromStream(record.buffer); ORecordInternal.setVersion(document, record.version); ORecordInternal.setIdentity(document, identity); document.setClassName(name); document.setDirty(); document.save(); } } if (positions.length > 0) positions = storage.higherPhysicalPositions(clusterId, positions[positions.length - 1]); } } while (positions.length > 0); } } renameCluster(oldName, this.name); } catch (UnsupportedEncodingException e) { throw OException.wrapException(new OSchemaException("Error reading schema"), e); } finally { releaseSchemaWriteLock(); } } private void renameCluster(String oldName, String newName) { oldName = oldName.toLowerCase(); newName = newName.toLowerCase(); final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage.getClusterIdByName(newName) != -1) return; final int clusterId = storage.getClusterIdByName(oldName); if (clusterId == -1) return; if (!hasClusterId(clusterId)) return; database.command(new OCommandSQL("alter cluster `" + oldName + "` name `" + newName + "`")).execute(); } private void setShortNameInternal(final String iShortName) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { checkEmbedded(); String oldName = null; if (this.shortName != null) oldName = this.shortName; owner.changeClassName(oldName, iShortName, this); this.shortName = iShortName; } finally { releaseSchemaWriteLock(); } } private void setDescriptionInternal(final String iDescription) { acquireSchemaWriteLock(); try { checkEmbedded(); this.description = iDescription; } finally { releaseSchemaWriteLock(); } } private void dropPropertyInternal(final String iPropertyName) { if (getDatabase().getTransaction().isActive()) throw new IllegalStateException("Cannot drop a property inside a transaction"); getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_DELETE); acquireSchemaWriteLock(); try { checkEmbedded(); final OProperty prop = properties.remove(iPropertyName.toLowerCase()); if (prop == null) throw new OSchemaException("Property '" + iPropertyName + "' not found in class " + name + "'"); } finally { releaseSchemaWriteLock(); } } private OClass addClusterIdInternal(final int clusterId) { acquireSchemaWriteLock(); try { checkEmbedded(); owner.checkClusterCanBeAdded(clusterId, this); for (int currId : clusterIds) if (currId == clusterId) // ALREADY ADDED return this; clusterIds = OArrays.copyOf(clusterIds, clusterIds.length + 1); clusterIds[clusterIds.length - 1] = clusterId; Arrays.sort(clusterIds); addPolymorphicClusterId(clusterId); if (defaultClusterId == NOT_EXISTENT_CLUSTER_ID) defaultClusterId = clusterId; owner.addClusterForClass(clusterId, this); return this; } finally { releaseSchemaWriteLock(); } } private void addPolymorphicClusterId(int clusterId) { if (Arrays.binarySearch(polymorphicClusterIds, clusterId) >= 0) return; polymorphicClusterIds = OArrays.copyOf(polymorphicClusterIds, polymorphicClusterIds.length + 1); polymorphicClusterIds[polymorphicClusterIds.length - 1] = clusterId; Arrays.sort(polymorphicClusterIds); addClusterIdToIndexes(clusterId); for (OClassImpl superClass : superClasses) { superClass.addPolymorphicClusterId(clusterId); } } private OClass removeClusterIdInternal(final int clusterToRemove) { acquireSchemaWriteLock(); try { checkEmbedded(); boolean found = false; for (int clusterId : clusterIds) { if (clusterId == clusterToRemove) { found = true; break; } } if (found) { final int[] newClusterIds = new int[clusterIds.length - 1]; for (int i = 0, k = 0; i < clusterIds.length; ++i) { if (clusterIds[i] == clusterToRemove) // JUMP IT continue; newClusterIds[k] = clusterIds[i]; k++; } clusterIds = newClusterIds; removePolymorphicClusterId(clusterToRemove); } if (defaultClusterId == clusterToRemove) { if (clusterIds.length >= 1) defaultClusterId = clusterIds[0]; else defaultClusterId = NOT_EXISTENT_CLUSTER_ID; } owner.removeClusterForClass(clusterToRemove, this); } finally { releaseSchemaWriteLock(); } return this; } private void setAbstractInternal(final boolean isAbstract) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { if (isAbstract) { // SWITCH TO ABSTRACT if (defaultClusterId != NOT_EXISTENT_CLUSTER_ID) { // CHECK if (count() > 0) throw new IllegalStateException("Cannot set the class as abstract because contains records."); tryDropCluster(defaultClusterId); for (int clusterId : getClusterIds()) { tryDropCluster(clusterId); removePolymorphicClusterId(clusterId); owner.removeClusterForClass(clusterId, this); } setClusterIds(new int[] { NOT_EXISTENT_CLUSTER_ID }); defaultClusterId = NOT_EXISTENT_CLUSTER_ID; } } else { if (!abstractClass) return; int clusterId = getDatabase().getClusterIdByName(name); if (clusterId == -1) clusterId = getDatabase().addCluster(name); this.defaultClusterId = clusterId; this.clusterIds[0] = this.defaultClusterId; this.polymorphicClusterIds = Arrays.copyOf(clusterIds, clusterIds.length); for (OClass clazz : getAllSubclasses()) { if (clazz instanceof OClassImpl) { addPolymorphicClusterIds((OClassImpl) clazz); } else { OLogManager.instance().warn(this, "Warning: cannot set polymorphic cluster IDs for class " + name); } } } this.abstractClass = isAbstract; } finally { releaseSchemaWriteLock(); } } private void setStrictModeInternal(final boolean iStrict) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { checkEmbedded(); this.strictMode = iStrict; } finally { releaseSchemaWriteLock(); } } private OProperty addProperty(final String propertyName, final OType type, final OType linkedType, final OClass linkedClass, final boolean unsafe) { if (type == null) throw new OSchemaException("Property type not defined."); if (propertyName == null || propertyName.length() == 0) throw new OSchemaException("Property name is null or empty"); if (getDatabase().getStorage().getConfiguration().isStrictSql()) { validatePropertyName(propertyName); } if (getDatabase().getTransaction().isActive()) throw new OSchemaException("Cannot create property '" + propertyName + "' inside a transaction"); final ODatabaseDocumentInternal database = getDatabase(); database.checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); if (linkedType != null) OPropertyImpl.checkLinkTypeSupport(type); if (linkedClass != null) OPropertyImpl.checkSupportLinkedClass(type); acquireSchemaWriteLock(); try { final StringBuilder cmd = new StringBuilder("create property "); // CLASS.PROPERTY NAME if (getDatabase().getStorage().getConfiguration().isStrictSql()) cmd.append('`'); cmd.append(name); if (getDatabase().getStorage().getConfiguration().isStrictSql()) cmd.append('`'); cmd.append('.'); if (getDatabase().getStorage().getConfiguration().isStrictSql()) cmd.append('`'); cmd.append(propertyName); if (getDatabase().getStorage().getConfiguration().isStrictSql()) cmd.append('`'); // TYPE cmd.append(' '); cmd.append(type.name); if (linkedType != null) { // TYPE cmd.append(' '); cmd.append(linkedType.name); } else if (linkedClass != null) { // TYPE cmd.append(' '); if (getDatabase().getStorage().getConfiguration().isStrictSql()) cmd.append('`'); cmd.append(linkedClass.getName()); if (getDatabase().getStorage().getConfiguration().isStrictSql()) cmd.append('`'); } if (unsafe) cmd.append(" unsafe "); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { database.command(new OCommandSQL(cmd.toString())).execute(); reload(); return getProperty(propertyName); } else if (isDistributedCommand()) { final OCommandSQL commandSQL = new OCommandSQL(cmd.toString()); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); return (OProperty) OScenarioThreadLocal.executeAsDistributed(new Callable<OProperty>() { @Override public OProperty call() throws Exception { return addPropertyInternal(propertyName, type, linkedType, linkedClass, unsafe); } }); } else return (OProperty) OScenarioThreadLocal.executeAsDistributed(new Callable<OProperty>() { @Override public OProperty call() throws Exception { return addPropertyInternal(propertyName, type, linkedType, linkedClass, unsafe); } }); } finally { releaseSchemaWriteLock(); } } private void validatePropertyName(final String propertyName) { } private int getClusterId(final String stringValue) { int clId; if (!stringValue.isEmpty() && Character.isDigit(stringValue.charAt(0))) try { clId = Integer.parseInt(stringValue); } catch (NumberFormatException e) { clId = getDatabase().getClusterIdByName(stringValue); } else clId = getDatabase().getClusterIdByName(stringValue); return clId; } private void addClusterIdToIndexes(int iId) { if (getDatabase().getStorage().getUnderlying() instanceof OAbstractPaginatedStorage) { final String clusterName = getDatabase().getClusterNameById(iId); final List<String> indexesToAdd = new ArrayList<String>(); for (OIndex<?> index : getIndexes()) indexesToAdd.add(index.getName()); final OIndexManager indexManager = getDatabase().getMetadata().getIndexManager(); for (String indexName : indexesToAdd) indexManager.addClusterToIndex(clusterName, indexName); } } /** * Adds a base class to the current one. It adds also the base class cluster ids to the polymorphic cluster ids array. * * @param iBaseClass The base class to add. */ private OClass addBaseClass(final OClassImpl iBaseClass) { checkRecursion(iBaseClass); if (subclasses == null) subclasses = new ArrayList<OClass>(); if (subclasses.contains(iBaseClass)) return this; subclasses.add(iBaseClass); addPolymorphicClusterIdsWithInheritance(iBaseClass); return this; } private void checkParametersConflict(final OClass baseClass) { final Collection<OProperty> baseClassProperties = baseClass.properties(); for (OProperty property : baseClassProperties) { OProperty thisProperty = getProperty(property.getName()); if (thisProperty != null && !thisProperty.getType().equals(property.getType())) { throw new OSchemaException( "Cannot add base class '" + baseClass.getName() + "', because of property conflict: '" + thisProperty + "' vs '" + property + "'"); } } } protected static void checkParametersConflict(List<OClass> classes) { final Map<String, OProperty> comulative = new HashMap<String, OProperty>(); final Map<String, OProperty> properties = new HashMap<String, OProperty>(); for (OClass superClass : classes) { if (superClass == null) continue; OClassImpl impl; if (superClass instanceof OClassAbstractDelegate) impl = (OClassImpl) ((OClassAbstractDelegate) superClass).delegate; else impl = (OClassImpl) superClass; impl.propertiesMap(properties, false); for (Map.Entry<String, OProperty> entry : properties.entrySet()) { if (comulative.containsKey(entry.getKey())) { final String property = entry.getKey(); final OProperty existingProperty = comulative.get(property); if (!existingProperty.getType().equals(entry.getValue().getType())) { throw new OSchemaException("Properties conflict detected: '" + existingProperty + "] vs [" + entry.getValue() + "]"); } } } comulative.putAll(properties); properties.clear(); } } private void checkRecursion(final OClass baseClass) { if (isSubClassOf(baseClass)) { throw new OSchemaException("Cannot add base class '" + baseClass.getName() + "', because of recursion"); } } private void removePolymorphicClusterIds(final OClassImpl iBaseClass) { for (final int clusterId : iBaseClass.polymorphicClusterIds) removePolymorphicClusterId(clusterId); } private void removePolymorphicClusterId(final int clusterId) { final int index = Arrays.binarySearch(polymorphicClusterIds, clusterId); if (index < 0) return; if (index < polymorphicClusterIds.length - 1) System.arraycopy(polymorphicClusterIds, index + 1, polymorphicClusterIds, index, polymorphicClusterIds.length - (index + 1)); polymorphicClusterIds = Arrays.copyOf(polymorphicClusterIds, polymorphicClusterIds.length - 1); removeClusterFromIndexes(clusterId); for (OClassImpl superClass : superClasses) { superClass.removePolymorphicClusterId(clusterId); } } private void removeClusterFromIndexes(final int iId) { if (getDatabase().getStorage().getUnderlying() instanceof OAbstractPaginatedStorage) { final String clusterName = getDatabase().getClusterNameById(iId); final List<String> indexesToRemove = new ArrayList<String>(); for (final OIndex<?> index : getIndexes()) indexesToRemove.add(index.getName()); final OIndexManager indexManager = getDatabase().getMetadata().getIndexManager(); for (final String indexName : indexesToRemove) indexManager.removeClusterFromIndex(clusterName, indexName); } } private void tryDropCluster(final int defaultClusterId) { if (name.toLowerCase().equals(getDatabase().getClusterNameById(defaultClusterId))) { // DROP THE DEFAULT CLUSTER CALLED WITH THE SAME NAME ONLY IF EMPTY if (getDatabase().getClusterRecordSizeById(defaultClusterId) == 0) getDatabase().dropCluster(defaultClusterId, true); } } private ODatabaseDocumentInternal getDatabase() { return ODatabaseRecordThreadLocal.INSTANCE.get(); } /** * Add different cluster id to the "polymorphic cluster ids" array. */ private void addPolymorphicClusterIds(final OClassImpl iBaseClass) { Set<Integer> clusters = new TreeSet<Integer>(); for (int clusterId : polymorphicClusterIds) { clusters.add(clusterId); } for (int clusterId : iBaseClass.polymorphicClusterIds) { if (clusters.add(clusterId)) { try { addClusterIdToIndexes(clusterId); } catch (RuntimeException e) { OLogManager.instance().warn(this, "Error adding clusterId '%d' to index of class '%s'", e, clusterId, getName()); clusters.remove(clusterId); } } } polymorphicClusterIds = new int[clusters.size()]; int i = 0; for (Integer cluster : clusters) { polymorphicClusterIds[i] = cluster; i++; } } private void addPolymorphicClusterIdsWithInheritance(final OClassImpl iBaseClass) { addPolymorphicClusterIds(iBaseClass); for (OClassImpl superClass : superClasses) { superClass.addPolymorphicClusterIdsWithInheritance(iBaseClass); } } public List<OType> extractFieldTypes(final String[] fieldNames) { final List<OType> types = new ArrayList<OType>(fieldNames.length); for (String fieldName : fieldNames) { if (!fieldName.equals("@rid")) types.add(getProperty(decodeClassName(OIndexDefinitionFactory.extractFieldName(fieldName)).toLowerCase()).getType()); else types.add(OType.LINK); } return types; } private OClass setClusterIds(final int[] iClusterIds) { clusterIds = iClusterIds; Arrays.sort(clusterIds); return this; } private boolean isDistributedCommand() { return getDatabase().getStorage() instanceof OAutoshardedStorage && !OScenarioThreadLocal.INSTANCE.isRunModeDistributed(); } public static String decodeClassName(String s) { if (s == null) { return null; } s = s.trim(); if (s.startsWith("`") && s.endsWith("`")) { return s.substring(1, s.length() - 1); } return s; } }
core/src/main/java/com/orientechnologies/orient/core/metadata/schema/OClassImpl.java
/* * * * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com) * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * * For more information: http://www.orientechnologies.com * */ package com.orientechnologies.orient.core.metadata.schema; import com.orientechnologies.common.exception.OException; import com.orientechnologies.common.listener.OProgressListener; import com.orientechnologies.common.log.OLogManager; import com.orientechnologies.common.util.OArrays; import com.orientechnologies.common.util.OCommonConst; import com.orientechnologies.orient.core.annotation.OBeforeSerialization; import com.orientechnologies.orient.core.command.OCommandResultListener; import com.orientechnologies.orient.core.db.ODatabaseDocumentInternal; import com.orientechnologies.orient.core.db.ODatabaseInternal; import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal; import com.orientechnologies.orient.core.db.OScenarioThreadLocal; import com.orientechnologies.orient.core.db.document.ODatabaseDocument; import com.orientechnologies.orient.core.db.record.OIdentifiable; import com.orientechnologies.orient.core.db.record.ORecordElement; import com.orientechnologies.orient.core.exception.ODatabaseException; import com.orientechnologies.orient.core.exception.OSchemaException; import com.orientechnologies.orient.core.exception.OSecurityAccessException; import com.orientechnologies.orient.core.exception.OSecurityException; import com.orientechnologies.orient.core.id.ORecordId; import com.orientechnologies.orient.core.index.*; import com.orientechnologies.orient.core.metadata.schema.clusterselection.OClusterSelectionStrategy; import com.orientechnologies.orient.core.metadata.schema.clusterselection.ORoundRobinClusterSelectionStrategy; import com.orientechnologies.orient.core.metadata.security.ORole; import com.orientechnologies.orient.core.metadata.security.ORule; import com.orientechnologies.orient.core.metadata.security.OSecurityShared; import com.orientechnologies.orient.core.metadata.security.OSecurityUser; import com.orientechnologies.orient.core.record.ORecord; import com.orientechnologies.orient.core.record.ORecordInternal; import com.orientechnologies.orient.core.record.impl.ODocument; import com.orientechnologies.orient.core.serialization.serializer.record.ORecordSerializerFactory; import com.orientechnologies.orient.core.serialization.serializer.record.string.ORecordSerializerSchemaAware2CSV; import com.orientechnologies.orient.core.sharding.auto.OAutoShardingClusterSelectionStrategy; import com.orientechnologies.orient.core.sql.OCommandSQL; import com.orientechnologies.orient.core.sql.query.OSQLAsynchQuery; import com.orientechnologies.orient.core.storage.*; import com.orientechnologies.orient.core.storage.impl.local.OAbstractPaginatedStorage; import com.orientechnologies.orient.core.type.ODocumentWrapper; import com.orientechnologies.orient.core.type.ODocumentWrapperNoClass; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.util.*; import java.util.concurrent.Callable; /** * Schema Class implementation. * * @author Luca Garulli (l.garulli--at--orientechnologies.com) */ @SuppressWarnings("unchecked") public class OClassImpl extends ODocumentWrapperNoClass implements OClass { private static final long serialVersionUID = 1L; private static final int NOT_EXISTENT_CLUSTER_ID = -1; final OSchemaShared owner; private final Map<String, OProperty> properties = new HashMap<String, OProperty>(); private int defaultClusterId = NOT_EXISTENT_CLUSTER_ID; private String name; private String description; private int[] clusterIds; private List<OClassImpl> superClasses = new ArrayList<OClassImpl>(); private int[] polymorphicClusterIds; private List<OClass> subclasses; private float overSize = 0f; private String shortName; private boolean strictMode = false; // @SINCE v1.0rc8 private boolean abstractClass = false; // @SINCE v1.2.0 private Map<String, String> customFields; private volatile OClusterSelectionStrategy clusterSelection; // @SINCE 1.7 private volatile int hashCode; private static Set<String> reserved = new HashSet<String>(); static { // reserved.add("select"); reserved.add("traverse"); reserved.add("insert"); reserved.add("update"); reserved.add("delete"); reserved.add("from"); reserved.add("where"); reserved.add("skip"); reserved.add("limit"); reserved.add("timeout"); } /** * Constructor used in unmarshalling. */ protected OClassImpl(final OSchemaShared iOwner, final String iName) { this(iOwner, new ODocument().setTrackingChanges(false), iName); } protected OClassImpl(final OSchemaShared iOwner, final String iName, final int[] iClusterIds) { this(iOwner, iName); setClusterIds(iClusterIds); defaultClusterId = iClusterIds[0]; if (defaultClusterId == NOT_EXISTENT_CLUSTER_ID) abstractClass = true; if (abstractClass) setPolymorphicClusterIds(OCommonConst.EMPTY_INT_ARRAY); else setPolymorphicClusterIds(iClusterIds); clusterSelection = owner.getClusterSelectionFactory().newInstanceOfDefaultClass(); } /** * Constructor used in unmarshalling. */ protected OClassImpl(final OSchemaShared iOwner, final ODocument iDocument, final String iName) { name = iName; document = iDocument; owner = iOwner; } public static int[] readableClusters(final ODatabaseDocument iDatabase, final int[] iClusterIds) { List<Integer> listOfReadableIds = new ArrayList<Integer>(); boolean all = true; for (int clusterId : iClusterIds) { try { final String clusterName = iDatabase.getClusterNameById(clusterId); iDatabase.checkSecurity(ORule.ResourceGeneric.CLUSTER, ORole.PERMISSION_READ, clusterName); listOfReadableIds.add(clusterId); } catch (OSecurityAccessException securityException) { all = false; // if the cluster is inaccessible it's simply not processed in the list.add } } if (all) // JUST RETURN INPUT ARRAY (FASTER) return iClusterIds; final int[] readableClusterIds = new int[listOfReadableIds.size()]; int index = 0; for (int clusterId : listOfReadableIds) { readableClusterIds[index++] = clusterId; } return readableClusterIds; } @Override public OClusterSelectionStrategy getClusterSelection() { acquireSchemaReadLock(); try { return clusterSelection; } finally { releaseSchemaReadLock(); } } @Override public OClass setClusterSelection(final OClusterSelectionStrategy clusterSelection) { return setClusterSelection(clusterSelection.getName()); } @Override public OClass setClusterSelection(final String value) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String.format("alter class `%s` clusterselection '%s'", name, value); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final String cmd = String.format("alter class `%s` clusterselection '%s'", name, value); OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); setClusterSelectionInternal(value); } else setClusterSelectionInternal(value); return this; } finally { releaseSchemaWriteLock(); } } @Override public <RET extends ODocumentWrapper> RET reload() { return (RET) owner.reload(); } public String getCustom(final String iName) { acquireSchemaReadLock(); try { if (customFields == null) return null; return customFields.get(iName); } finally { releaseSchemaReadLock(); } } public OClassImpl setCustom(final String name, final String value) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String.format("alter class `%s` custom %s=%s", getName(), name, value); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final String cmd = String.format("alter class `%s` custom %s=%s", getName(), name, value); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); setCustomInternal(name, value); } else setCustomInternal(name, value); return this; } finally { releaseSchemaWriteLock(); } } public Map<String, String> getCustomInternal() { acquireSchemaReadLock(); try { if (customFields != null) return Collections.unmodifiableMap(customFields); return null; } finally { releaseSchemaReadLock(); } } public void removeCustom(final String name) { setCustom(name, null); } public void clearCustom() { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String.format("alter class `%s` custom clear", getName()); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final String cmd = String.format("alter class `%s` custom clear", getName()); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); clearCustomInternal(); } else clearCustomInternal(); } finally { releaseSchemaWriteLock(); } } public Set<String> getCustomKeys() { acquireSchemaReadLock(); try { if (customFields != null) return Collections.unmodifiableSet(customFields.keySet()); return new HashSet<String>(); } finally { releaseSchemaReadLock(); } } @Override public boolean hasClusterId(final int clusterId) { return Arrays.binarySearch(clusterIds, clusterId) >= 0; } @Override public boolean hasPolymorphicClusterId(final int clusterId) { return Arrays.binarySearch(polymorphicClusterIds, clusterId) >= 0; } @Override @Deprecated public OClass getSuperClass() { acquireSchemaReadLock(); try { return superClasses.isEmpty() ? null : superClasses.get(0); } finally { releaseSchemaReadLock(); } } @Override @Deprecated public OClass setSuperClass(OClass iSuperClass) { setSuperClasses(iSuperClass != null ? Arrays.asList(iSuperClass) : Collections.EMPTY_LIST); return this; } public String getName() { acquireSchemaReadLock(); try { return name; } finally { releaseSchemaReadLock(); } } @Override public List<OClass> getSuperClasses() { acquireSchemaReadLock(); try { return Collections.unmodifiableList((List<? extends OClass>) superClasses); } finally { releaseSchemaReadLock(); } } @Override public boolean hasSuperClasses() { acquireSchemaReadLock(); try { return !superClasses.isEmpty(); } finally { releaseSchemaReadLock(); } } @Override public List<String> getSuperClassesNames() { acquireSchemaReadLock(); try { List<String> superClassesNames = new ArrayList<String>(superClasses.size()); for (OClassImpl superClass : superClasses) { superClassesNames.add(superClass.getName()); } return superClassesNames; } finally { releaseSchemaReadLock(); } } public OClass setSuperClassesByNames(List<String> classNames) { if (classNames == null) classNames = Collections.EMPTY_LIST; final List<OClass> classes = new ArrayList<OClass>(classNames.size()); final OSchema schema = getDatabase().getMetadata().getSchema(); for (String className : classNames) { classes.add(schema.getClass(decodeClassName(className))); } return setSuperClasses(classes); } @Override public OClass setSuperClasses(final List<? extends OClass> classes) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); if (classes != null) { List<OClass> toCheck = new ArrayList<OClass>(classes); toCheck.add(this); checkParametersConflict(toCheck); } acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); final StringBuilder sb = new StringBuilder(); if (classes != null && classes.size() > 0) { for (OClass superClass : classes) { sb.append('`').append(superClass.getName()).append("`,"); } sb.deleteCharAt(sb.length() - 1); } else sb.append("null"); final String cmd = String.format("alter class `%s` superclasses %s", name, sb); if (storage instanceof OStorageProxy) { database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); setSuperClassesInternal(classes); } else setSuperClassesInternal(classes); } finally { releaseSchemaWriteLock(); } return this; } void setSuperClassesInternal(final List<? extends OClass> classes) { acquireSchemaWriteLock(); try { List<OClassImpl> newSuperClasses = new ArrayList<OClassImpl>(); OClassImpl cls; for (OClass superClass : classes) { if (superClass instanceof OClassAbstractDelegate) cls = (OClassImpl) ((OClassAbstractDelegate) superClass).delegate; else cls = (OClassImpl) superClass; if (newSuperClasses.contains(cls)) { throw new OSchemaException("Duplicated superclass '" + cls.getName() + "'"); } newSuperClasses.add(cls); } List<OClassImpl> toAddList = new ArrayList<OClassImpl>(newSuperClasses); toAddList.removeAll(superClasses); List<OClassImpl> toRemoveList = new ArrayList<OClassImpl>(superClasses); toRemoveList.removeAll(newSuperClasses); for (OClassImpl toRemove : toRemoveList) { toRemove.removeBaseClassInternal(this); } for (OClassImpl addTo : toAddList) { addTo.addBaseClass(this); } superClasses.clear(); superClasses.addAll(newSuperClasses); } finally { releaseSchemaWriteLock(); } } @Override public OClass addSuperClass(final OClass superClass) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); checkParametersConflict(superClass); acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String .format("alter class `%s` superclass +`%s`", name, superClass != null ? superClass.getName() : null); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final String cmd = String .format("alter class `%s` superclass +`%s`", name, superClass != null ? superClass.getName() : null); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); addSuperClassInternal(superClass); } else addSuperClassInternal(superClass); } finally { releaseSchemaWriteLock(); } return this; } void addSuperClassInternal(final OClass superClass) { acquireSchemaWriteLock(); try { final OClassImpl cls; if (superClass instanceof OClassAbstractDelegate) cls = (OClassImpl) ((OClassAbstractDelegate) superClass).delegate; else cls = (OClassImpl) superClass; if (cls != null) { // CHECK THE USER HAS UPDATE PRIVILEGE AGAINST EXTENDING CLASS final OSecurityUser user = getDatabase().getUser(); if (user != null) user.allow(ORule.ResourceGeneric.CLASS, cls.getName(), ORole.PERMISSION_UPDATE); if (superClasses.contains(superClass)) { throw new OSchemaException( "Class: '" + this.getName() + "' already has the class '" + superClass.getName() + "' as superclass"); } cls.addBaseClass(this); superClasses.add(cls); } } finally { releaseSchemaWriteLock(); } } @Override public OClass removeSuperClass(OClass superClass) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String .format("alter class `%s` superclass -`%s`", name, superClass != null ? superClass.getName() : null); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final String cmd = String .format("alter class `%s` superclass -`%s`", name, superClass != null ? superClass.getName() : null); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); removeSuperClassInternal(superClass); } else removeSuperClassInternal(superClass); } finally { releaseSchemaWriteLock(); } return this; } void removeSuperClassInternal(final OClass superClass) { acquireSchemaWriteLock(); try { final OClassImpl cls; if (superClass instanceof OClassAbstractDelegate) cls = (OClassImpl) ((OClassAbstractDelegate) superClass).delegate; else cls = (OClassImpl) superClass; if (superClasses.contains(cls)) { if (cls != null) cls.removeBaseClassInternal(this); superClasses.remove(superClass); } } finally { releaseSchemaWriteLock(); } } public OClass setName(final String name) { if (getName().equals(name)) return this; getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); final Character wrongCharacter = OSchemaShared.checkClassNameIfValid(name); OClass oClass = getDatabase().getMetadata().getSchema().getClass(name); if (oClass != null) { String error = String.format("Cannot rename class %s to %s. A Class with name %s exists", this.name, name, name); throw new OSchemaException(error); } if (wrongCharacter != null) throw new OSchemaException( "Invalid class name found. Character '" + wrongCharacter + "' cannot be used in class name '" + name + "'"); acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String.format("alter class `%s` name `%s`", this.name, name); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final String cmd = String.format("alter class `%s` name `%s`", this.name, name); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); setNameInternal(name); } else setNameInternal(name); } finally { releaseSchemaWriteLock(); } return this; } public long getSize() { acquireSchemaReadLock(); try { long size = 0; for (int clusterId : clusterIds) size += getDatabase().getClusterRecordSizeById(clusterId); return size; } finally { releaseSchemaReadLock(); } } public String getShortName() { acquireSchemaReadLock(); try { return shortName; } finally { releaseSchemaReadLock(); } } public OClass setShortName(String shortName) { if (shortName != null) { shortName = shortName.trim(); if (shortName.isEmpty()) shortName = null; } getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String.format("alter class `%s` shortname %s", name, shortName); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final String cmd = String.format("alter class `%s` shortname %s", name, shortName); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); setShortNameInternal(shortName); } else setShortNameInternal(shortName); } finally { releaseSchemaWriteLock(); } return this; } public String getDescription() { acquireSchemaReadLock(); try { return description; } finally { releaseSchemaReadLock(); } } public OClass setDescription(String iDescription) { if (iDescription != null) { iDescription = iDescription.trim(); if (iDescription.isEmpty()) iDescription = null; } getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String.format("alter class `%s` description %s", name, shortName); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final String cmd = String.format("alter class `%s` description %s", name, shortName); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); setDescriptionInternal(iDescription); } else setDescriptionInternal(iDescription); } finally { releaseSchemaWriteLock(); } return this; } public String getStreamableName() { acquireSchemaReadLock(); try { return shortName != null ? shortName : name; } finally { releaseSchemaReadLock(); } } public Collection<OProperty> declaredProperties() { acquireSchemaReadLock(); try { return Collections.unmodifiableCollection(properties.values()); } finally { releaseSchemaReadLock(); } } public Map<String, OProperty> propertiesMap() { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_READ); acquireSchemaReadLock(); try { final Map<String, OProperty> props = new HashMap<String, OProperty>(20); propertiesMap(props, true); return props; } finally { releaseSchemaReadLock(); } } private void propertiesMap(Map<String, OProperty> propertiesMap, boolean keepCase) { for (OProperty p : properties.values()) { String propName = p.getName(); if (!keepCase) propName = propName.toLowerCase(); if (!propertiesMap.containsKey(propName)) propertiesMap.put(propName, p); } for (OClassImpl superClass : superClasses) { superClass.propertiesMap(propertiesMap, keepCase); } } public Collection<OProperty> properties() { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_READ); acquireSchemaReadLock(); try { final Collection<OProperty> props = new ArrayList<OProperty>(); properties(props); return props; } finally { releaseSchemaReadLock(); } } private void properties(Collection<OProperty> properties) { properties.addAll(this.properties.values()); for (OClassImpl superClass : superClasses) { superClass.properties(properties); } } public void getIndexedProperties(Collection<OProperty> indexedProperties) { for (OProperty p : properties.values()) if (areIndexed(p.getName())) indexedProperties.add(p); for (OClassImpl superClass : superClasses) { superClass.getIndexedProperties(indexedProperties); } } @Override public Collection<OProperty> getIndexedProperties() { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_READ); acquireSchemaReadLock(); try { Collection<OProperty> indexedProps = new HashSet<OProperty>(); getIndexedProperties(indexedProps); return indexedProps; } finally { releaseSchemaReadLock(); } } public OProperty getProperty(String propertyName) { acquireSchemaReadLock(); try { propertyName = propertyName.toLowerCase(); OProperty p = properties.get(propertyName); if (p != null) return p; for (int i = 0; i < superClasses.size() && p == null; i++) { p = superClasses.get(i).getProperty(propertyName); } return p; } finally { releaseSchemaReadLock(); } } public OProperty createProperty(final String iPropertyName, final OType iType) { return addProperty(iPropertyName, iType, null, null, false); } public OProperty createProperty(final String iPropertyName, final OType iType, final OClass iLinkedClass) { if (iLinkedClass == null) throw new OSchemaException("Missing linked class"); return addProperty(iPropertyName, iType, null, iLinkedClass, false); } public OProperty createProperty(final String iPropertyName, final OType iType, final OClass iLinkedClass, final boolean unsafe) { if (iLinkedClass == null) throw new OSchemaException("Missing linked class"); return addProperty(iPropertyName, iType, null, iLinkedClass, unsafe); } public OProperty createProperty(final String iPropertyName, final OType iType, final OType iLinkedType) { return addProperty(iPropertyName, iType, iLinkedType, null, false); } public OProperty createProperty(final String iPropertyName, final OType iType, final OType iLinkedType, final boolean unsafe) { return addProperty(iPropertyName, iType, iLinkedType, null, unsafe); } @Override public boolean existsProperty(String propertyName) { acquireSchemaReadLock(); try { propertyName = propertyName.toLowerCase(); boolean result = properties.containsKey(propertyName); if (result) return true; for (OClassImpl superClass : superClasses) { result = superClass.existsProperty(propertyName); if (result) return true; } return false; } finally { releaseSchemaReadLock(); } } public void dropProperty(final String propertyName) { if (getDatabase().getTransaction().isActive()) throw new IllegalStateException("Cannot drop a property inside a transaction"); getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_DELETE); final String lowerName = propertyName.toLowerCase(); acquireSchemaWriteLock(); try { if (!properties.containsKey(lowerName)) throw new OSchemaException("Property '" + propertyName + "' not found in class " + name + "'"); final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { database.command(new OCommandSQL("drop property " + name + '.' + propertyName)).execute(); } else if (isDistributedCommand()) { final OCommandSQL commandSQL = new OCommandSQL("drop property " + name + '.' + propertyName); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); OScenarioThreadLocal.executeAsDistributed(new Callable<OProperty>() { @Override public OProperty call() throws Exception { dropPropertyInternal(propertyName); return null; } }); } else OScenarioThreadLocal.executeAsDistributed(new Callable<OProperty>() { @Override public OProperty call() throws Exception { dropPropertyInternal(propertyName); return null; } }); } finally { releaseSchemaWriteLock(); } } @Override public void fromStream() { subclasses = null; superClasses.clear(); name = document.field("name"); if (document.containsField("shortName")) shortName = document.field("shortName"); else shortName = null; if (document.containsField("description")) description = document.field("description"); else description = null; defaultClusterId = document.field("defaultClusterId"); if (document.containsField("strictMode")) strictMode = document.field("strictMode"); else strictMode = false; if (document.containsField("abstract")) abstractClass = document.field("abstract"); else abstractClass = false; if (document.field("overSize") != null) overSize = document.field("overSize"); else overSize = 0f; final Object cc = document.field("clusterIds"); if (cc instanceof Collection<?>) { final Collection<Integer> coll = document.field("clusterIds"); clusterIds = new int[coll.size()]; int i = 0; for (final Integer item : coll) clusterIds[i++] = item; } else clusterIds = (int[]) cc; Arrays.sort(clusterIds); if (clusterIds.length == 1 && clusterIds[0] == -1) setPolymorphicClusterIds(OCommonConst.EMPTY_INT_ARRAY); else setPolymorphicClusterIds(clusterIds); // READ PROPERTIES OPropertyImpl prop; final Map<String, OProperty> newProperties = new HashMap<String, OProperty>(); final Collection<ODocument> storedProperties = document.field("properties"); if (storedProperties != null) for (OIdentifiable id : storedProperties) { ODocument p = id.getRecord(); prop = new OPropertyImpl(this, p); prop.fromStream(); if (properties.containsKey(prop.getName())) { prop = (OPropertyImpl) properties.get(prop.getName().toLowerCase()); prop.fromStream(p); } newProperties.put(prop.getName().toLowerCase(), prop); } properties.clear(); properties.putAll(newProperties); customFields = document.field("customFields", OType.EMBEDDEDMAP); clusterSelection = owner.getClusterSelectionFactory().getStrategy((String) document.field("clusterSelection")); } @Override @OBeforeSerialization public ODocument toStream() { document.setInternalStatus(ORecordElement.STATUS.UNMARSHALLING); try { document.field("name", name); document.field("shortName", shortName); document.field("description", description); document.field("defaultClusterId", defaultClusterId); document.field("clusterIds", clusterIds); document.field("clusterSelection", clusterSelection.getName()); document.field("overSize", overSize); document.field("strictMode", strictMode); document.field("abstract", abstractClass); final Set<ODocument> props = new LinkedHashSet<ODocument>(); for (final OProperty p : properties.values()) { props.add(((OPropertyImpl) p).toStream()); } document.field("properties", props, OType.EMBEDDEDSET); if (superClasses.isEmpty()) { // Single super class is deprecated! document.field("superClass", null, OType.STRING); document.field("superClasses", null, OType.EMBEDDEDLIST); } else { // Single super class is deprecated! document.field("superClass", superClasses.get(0).getName(), OType.STRING); List<String> superClassesNames = new ArrayList<String>(); for (OClassImpl superClass : superClasses) { superClassesNames.add(superClass.getName()); } document.field("superClasses", superClassesNames, OType.EMBEDDEDLIST); } document.field("customFields", customFields != null && customFields.size() > 0 ? customFields : null, OType.EMBEDDEDMAP); } finally { document.setInternalStatus(ORecordElement.STATUS.LOADED); } return document; } @Override public int getClusterForNewInstance(final ODocument doc) { acquireSchemaReadLock(); try { return clusterSelection.getCluster(this, doc); } finally { releaseSchemaReadLock(); } } public int getDefaultClusterId() { acquireSchemaReadLock(); try { return defaultClusterId; } finally { releaseSchemaReadLock(); } } public void setDefaultClusterId(final int defaultClusterId) { acquireSchemaWriteLock(); try { checkEmbedded(); this.defaultClusterId = defaultClusterId; } finally { releaseSchemaWriteLock(); } } public int[] getClusterIds() { acquireSchemaReadLock(); try { return clusterIds; } finally { releaseSchemaReadLock(); } } public int[] getPolymorphicClusterIds() { acquireSchemaReadLock(); try { return Arrays.copyOf(polymorphicClusterIds, polymorphicClusterIds.length); } finally { releaseSchemaReadLock(); } } private void setPolymorphicClusterIds(final int[] iClusterIds) { Set<Integer> set = new TreeSet<Integer>(); for (int iClusterId : iClusterIds) { set.add(iClusterId); } polymorphicClusterIds = new int[set.size()]; int i = 0; for (Integer clusterId : set) { polymorphicClusterIds[i] = clusterId; i++; } } public void renameProperty(final String iOldName, final String iNewName) { final OProperty p = properties.remove(iOldName.toLowerCase()); if (p != null) properties.put(iNewName.toLowerCase(), p); } public OClass addClusterId(final int clusterId) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); if (isAbstract()) { throw new OSchemaException("Impossible to associate a cluster to an abstract class class"); } acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String.format("alter class `%s` addcluster %d", name, clusterId); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final String cmd = String.format("alter class `%s` addcluster %d", name, clusterId); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); addClusterIdInternal(clusterId); } else addClusterIdInternal(clusterId); } finally { releaseSchemaWriteLock(); } return this; } public static OClass addClusters(final OClass cls, final int iClusters) { final String clusterBase = cls.getName().toLowerCase() + "_"; for (int i = 1; i < iClusters; ++i) { cls.addCluster(clusterBase + i); } return cls; } @Override public OClass addCluster(final String clusterNameOrId) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); if (isAbstract()) { throw new OSchemaException("Impossible to associate a cluster to an abstract class class"); } acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String.format("alter class `%s` addcluster `%s`", name, clusterNameOrId); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final int clusterId = owner.createClusterIfNeeded(clusterNameOrId); addClusterIdInternal(clusterId); final String cmd = String.format("alter class `%s` addcluster `%s`", name, clusterNameOrId); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); } else { final int clusterId = owner.createClusterIfNeeded(clusterNameOrId); addClusterIdInternal(clusterId); } } finally { releaseSchemaWriteLock(); } return this; } /** * {@inheritDoc} */ @Override public OClass truncateCluster(String clusterName) { getDatabase().checkSecurity(ORule.ResourceGeneric.CLASS, ORole.PERMISSION_DELETE, name); acquireSchemaReadLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String.format("truncate cluster %s", clusterName); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final String cmd = String.format("truncate cluster %s", clusterName); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); truncateClusterInternal(clusterName, storage); } else truncateClusterInternal(clusterName, storage); } finally { releaseSchemaReadLock(); } return this; } private void truncateClusterInternal(final String clusterName, final OStorage storage) { final OCluster cluster = storage.getClusterByName(clusterName); if (cluster == null) { throw new ODatabaseException("Cluster with name " + clusterName + " does not exist"); } try { cluster.truncate(); } catch (IOException e) { throw OException.wrapException(new ODatabaseException("Error during truncate of cluster " + clusterName), e); } for (OIndex index : getIndexes()) { index.rebuild(); } } public OClass removeClusterId(final int clusterId) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); if (clusterIds.length == 1 && clusterId == clusterIds[0]) throw new ODatabaseException(" Impossible to remove the last cluster of class '" + getName() + "' drop the class instead"); acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String.format("alter class `%s` removecluster %d", name, clusterId); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final String cmd = String.format("alter class `%s` removecluster %d", name, clusterId); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); removeClusterIdInternal(clusterId); } else removeClusterIdInternal(clusterId); } finally { releaseSchemaWriteLock(); } return this; } public Collection<OClass> getSubclasses() { acquireSchemaReadLock(); try { if (subclasses == null || subclasses.size() == 0) return Collections.emptyList(); return Collections.unmodifiableCollection(subclasses); } finally { releaseSchemaReadLock(); } } public Collection<OClass> getAllSubclasses() { acquireSchemaReadLock(); try { final Set<OClass> set = new HashSet<OClass>(); if (subclasses != null) { set.addAll(subclasses); for (OClass c : subclasses) set.addAll(c.getAllSubclasses()); } return set; } finally { releaseSchemaReadLock(); } } @Deprecated public Collection<OClass> getBaseClasses() { return getSubclasses(); } @Deprecated public Collection<OClass> getAllBaseClasses() { return getAllSubclasses(); } @Override public Collection<OClass> getAllSuperClasses() { Set<OClass> ret = new HashSet<OClass>(); getAllSuperClasses(ret); return ret; } private void getAllSuperClasses(Set<OClass> set) { set.addAll(superClasses); for (OClassImpl superClass : superClasses) { superClass.getAllSuperClasses(set); } } OClass removeBaseClassInternal(final OClass baseClass) { acquireSchemaWriteLock(); try { checkEmbedded(); if (subclasses == null) return this; if (subclasses.remove(baseClass)) removePolymorphicClusterIds((OClassImpl) baseClass); return this; } finally { releaseSchemaWriteLock(); } } public float getOverSize() { acquireSchemaReadLock(); try { if (overSize > 0) // CUSTOM OVERSIZE SET return overSize; // NO OVERSIZE by default float maxOverSize = 0; float thisOverSize; for (OClassImpl superClass : superClasses) { thisOverSize = superClass.getOverSize(); if (thisOverSize > maxOverSize) maxOverSize = thisOverSize; } return maxOverSize; } finally { releaseSchemaReadLock(); } } public OClass setOverSize(final float overSize) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { // FORMAT FLOAT LOCALE AGNOSTIC final String cmd = String.format("alter class `%s` oversize %s", name, new Float(overSize).toString()); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { // FORMAT FLOAT LOCALE AGNOSTIC final String cmd = String.format("alter class `%s` oversize %s", name, new Float(overSize).toString()); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); setOverSizeInternal(overSize); } else setOverSizeInternal(overSize); } finally { releaseSchemaWriteLock(); } return this; } @Override public float getClassOverSize() { acquireSchemaReadLock(); try { return overSize; } finally { releaseSchemaReadLock(); } } public boolean isAbstract() { acquireSchemaReadLock(); try { return abstractClass; } finally { releaseSchemaReadLock(); } } public OClass setAbstract(boolean isAbstract) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String.format("alter class `%s` abstract %s", name, isAbstract); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final String cmd = String.format("alter class `%s` abstract %s", name, isAbstract); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); setAbstractInternal(isAbstract); } else setAbstractInternal(isAbstract); } finally { releaseSchemaWriteLock(); } return this; } public boolean isStrictMode() { acquireSchemaReadLock(); try { return strictMode; } finally { releaseSchemaReadLock(); } } public OClass setStrictMode(final boolean isStrict) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String.format("alter class `%s` strictmode %s", name, isStrict); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final String cmd = String.format("alter class `%s` strictmode %s", name, isStrict); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); setStrictModeInternal(isStrict); } else setStrictModeInternal(isStrict); } finally { releaseSchemaWriteLock(); } return this; } @Override public String toString() { acquireSchemaReadLock(); try { return name; } finally { releaseSchemaReadLock(); } } @Override public boolean equals(Object obj) { acquireSchemaReadLock(); try { if (this == obj) return true; if (obj == null) return false; if (!OClass.class.isAssignableFrom(obj.getClass())) return false; final OClass other = (OClass) obj; if (name == null) { if (other.getName() != null) return false; } else if (!name.equals(other.getName())) return false; return true; } finally { releaseSchemaReadLock(); } } @Override public int hashCode() { int sh = hashCode; if (sh != 0) return sh; acquireSchemaReadLock(); try { sh = hashCode; if (sh != 0) return sh; calculateHashCode(); return hashCode; } finally { releaseSchemaReadLock(); } } public int compareTo(final OClass o) { acquireSchemaReadLock(); try { return name.compareTo(o.getName()); } finally { releaseSchemaReadLock(); } } public long count() { return count(true); } public long count(final boolean isPolymorphic) { acquireSchemaReadLock(); try { if (isPolymorphic) return getDatabase().countClusterElements(readableClusters(getDatabase(), polymorphicClusterIds)); return getDatabase().countClusterElements(readableClusters(getDatabase(), clusterIds)); } finally { releaseSchemaReadLock(); } } /** * Truncates all the clusters the class uses. * * @throws IOException */ public void truncate() throws IOException { getDatabase().checkSecurity(ORule.ResourceGeneric.CLASS, ORole.PERMISSION_UPDATE); if (isSubClassOf(OSecurityShared.RESTRICTED_CLASSNAME)) { throw new OSecurityException( "Class '" + getName() + "' cannot be truncated because has record level security enabled (extends '" + OSecurityShared.RESTRICTED_CLASSNAME + "')"); } final OStorage storage = getDatabase().getStorage(); acquireSchemaReadLock(); try { for (int id : clusterIds) storage.getClusterById(id).truncate(); for (OIndex<?> index : getClassIndexes()) index.clear(); Set<OIndex<?>> superclassIndexes = new HashSet<OIndex<?>>(); superclassIndexes.addAll(getIndexes()); superclassIndexes.removeAll(getClassIndexes()); for (OIndex index : superclassIndexes) { index.rebuild(); } } finally { releaseSchemaReadLock(); } } /** * Check if the current instance extends specified schema class. * * @param iClassName of class that should be checked * @return Returns true if the current instance extends the passed schema class (iClass) * @see #isSuperClassOf(OClass) */ public boolean isSubClassOf(final String iClassName) { acquireSchemaReadLock(); try { if (iClassName == null) return false; if (iClassName.equalsIgnoreCase(getName()) || iClassName.equalsIgnoreCase(getShortName())) return true; for (OClassImpl superClass : superClasses) { if (superClass.isSubClassOf(iClassName)) return true; } return false; } finally { releaseSchemaReadLock(); } } /** * Check if the current instance extends specified schema class. * * @param clazz to check * @return true if the current instance extends the passed schema class (iClass) * @see #isSuperClassOf(OClass) */ public boolean isSubClassOf(final OClass clazz) { acquireSchemaReadLock(); try { if (clazz == null) return false; if (equals(clazz)) return true; for (OClassImpl superClass : superClasses) { if (superClass.isSubClassOf(clazz)) return true; } return false; } finally { releaseSchemaReadLock(); } } /** * Returns true if the passed schema class (iClass) extends the current instance. * * @param clazz to check * @return Returns true if the passed schema class extends the current instance * @see #isSubClassOf(OClass) */ public boolean isSuperClassOf(final OClass clazz) { return clazz != null && clazz.isSubClassOf(this); } public Object get(final ATTRIBUTES iAttribute) { if (iAttribute == null) throw new IllegalArgumentException("attribute is null"); switch (iAttribute) { case NAME: return getName(); case SHORTNAME: return getShortName(); case SUPERCLASS: return getSuperClass(); case SUPERCLASSES: return getSuperClasses(); case OVERSIZE: return getOverSize(); case STRICTMODE: return isStrictMode(); case ABSTRACT: return isAbstract(); case CLUSTERSELECTION: return getClusterSelection(); case CUSTOM: return getCustomInternal(); case DESCRIPTION: return getDescription(); } throw new IllegalArgumentException("Cannot find attribute '" + iAttribute + "'"); } public OClass set(final ATTRIBUTES attribute, final Object iValue) { if (attribute == null) throw new IllegalArgumentException("attribute is null"); final String stringValue = iValue != null ? iValue.toString() : null; final boolean isNull = stringValue == null || stringValue.equalsIgnoreCase("NULL"); switch (attribute) { case NAME: setName(decodeClassName(stringValue)); break; case SHORTNAME: setShortName(decodeClassName(stringValue)); break; case SUPERCLASS: if (stringValue == null) throw new IllegalArgumentException("Superclass is null"); if (stringValue.startsWith("+")) { addSuperClass(getDatabase().getMetadata().getSchema().getClass(decodeClassName(stringValue.substring(1)))); } else if (stringValue.startsWith("-")) { removeSuperClass(getDatabase().getMetadata().getSchema().getClass(decodeClassName(stringValue.substring(1)))); } else { setSuperClass(getDatabase().getMetadata().getSchema().getClass(decodeClassName(stringValue))); } break; case SUPERCLASSES: setSuperClassesByNames(stringValue != null ? Arrays.asList(stringValue.split(",\\s*")) : null); break; case OVERSIZE: setOverSize(Float.parseFloat(stringValue)); break; case STRICTMODE: setStrictMode(Boolean.parseBoolean(stringValue)); break; case ABSTRACT: setAbstract(Boolean.parseBoolean(stringValue)); break; case ADDCLUSTER: { addCluster(stringValue); break; } case REMOVECLUSTER: int clId = owner.getClusterId(stringValue); if (clId == NOT_EXISTENT_CLUSTER_ID) throw new IllegalArgumentException("Cluster id '" + stringValue + "' cannot be removed"); removeClusterId(clId); break; case CLUSTERSELECTION: setClusterSelection(stringValue); break; case CUSTOM: int indx = stringValue != null ? stringValue.indexOf('=') : -1; if (indx < 0) { if (isNull || "clear".equalsIgnoreCase(stringValue)) { clearCustom(); } else throw new IllegalArgumentException("Syntax error: expected <name> = <value> or clear, instead found: " + iValue); } else { String customName = stringValue.substring(0, indx).trim(); String customValue = stringValue.substring(indx + 1).trim(); if (isQuoted(customValue)) { customValue = removeQuotes(customValue); } if (customValue.isEmpty()) removeCustom(customName); else setCustom(customName, customValue); } break; case DESCRIPTION: setDescription(stringValue); break; case ENCRYPTION: setEncryption(stringValue); break; } return this; } private String removeQuotes(String s) { s = s.trim(); return s.substring(1, s.length() - 1); } private boolean isQuoted(String s) { s = s.trim(); if (s.startsWith("\"") && s.endsWith("\"")) return true; if (s.startsWith("'") && s.endsWith("'")) return true; if (s.startsWith("`") && s.endsWith("`")) return true; return false; } public OClassImpl setEncryption(final String iValue) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { final String cmd = String.format("alter class `%s` encryption %s", name, iValue); database.command(new OCommandSQL(cmd)).execute(); } else if (isDistributedCommand()) { final String cmd = String.format("alter class `%s` encryption %s", name, iValue); final OCommandSQL commandSQL = new OCommandSQL(cmd); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); setEncryptionInternal(iValue); } else setEncryptionInternal(iValue); } finally { releaseSchemaWriteLock(); } return this; } protected void setEncryptionInternal(final String iValue) { for (int cl : getClusterIds()) { final OCluster c = getDatabase().getStorage().getClusterById(cl); if (c != null) try { c.set(OCluster.ATTRIBUTES.ENCRYPTION, iValue); } catch (IOException e) { } } } public OPropertyImpl addPropertyInternal(final String name, final OType type, final OType linkedType, final OClass linkedClass, final boolean unsafe) { if (name == null || name.length() == 0) throw new OSchemaException("Found property name null"); if (!unsafe) checkPersistentPropertyType(getDatabase(), name, type); final String lowerName = name.toLowerCase(); final OPropertyImpl prop; // This check are doubled becouse used by sql commands if (linkedType != null) OPropertyImpl.checkLinkTypeSupport(type); if (linkedClass != null) OPropertyImpl.checkSupportLinkedClass(type); acquireSchemaWriteLock(); try { checkEmbedded(); if (properties.containsKey(lowerName)) throw new OSchemaException("Class '" + this.name + "' already has property '" + name + "'"); OGlobalProperty global = owner.findOrCreateGlobalProperty(name, type); prop = new OPropertyImpl(this, global); properties.put(lowerName, prop); if (linkedType != null) prop.setLinkedTypeInternal(linkedType); else if (linkedClass != null) prop.setLinkedClassInternal(linkedClass); } finally { releaseSchemaWriteLock(); } if (prop != null && !unsafe) fireDatabaseMigration(getDatabase(), name, type); return prop; } public OIndex<?> createIndex(final String iName, final INDEX_TYPE iType, final String... fields) { return createIndex(iName, iType.name(), fields); } public OIndex<?> createIndex(final String iName, final String iType, final String... fields) { return createIndex(iName, iType, null, null, fields); } public OIndex<?> createIndex(final String iName, final INDEX_TYPE iType, final OProgressListener iProgressListener, final String... fields) { return createIndex(iName, iType.name(), iProgressListener, null, fields); } public OIndex<?> createIndex(String iName, String iType, OProgressListener iProgressListener, ODocument metadata, String... fields) { return createIndex(iName, iType, iProgressListener, metadata, null, fields); } public OIndex<?> createIndex(final String name, String type, final OProgressListener progressListener, ODocument metadata, String algorithm, final String... fields) { if (type == null) throw new IllegalArgumentException("Index type is null"); type = type.toUpperCase(); if (fields.length == 0) { throw new OIndexException("List of fields to index cannot be empty."); } final String localName = this.name; final int[] localPolymorphicClusterIds = polymorphicClusterIds; for (final String fieldToIndex : fields) { final String fieldName = decodeClassName(OIndexDefinitionFactory.extractFieldName(fieldToIndex)); if (!fieldName.equals("@rid") && !existsProperty(fieldName)) throw new OIndexException( "Index with name '" + name + "' cannot be created on class '" + localName + "' because the field '" + fieldName + "' is absent in class definition"); } final OIndexDefinition indexDefinition = OIndexDefinitionFactory .createIndexDefinition(this, Arrays.asList(fields), extractFieldTypes(fields), null, type, algorithm); return getDatabase().getMetadata().getIndexManager() .createIndex(name, type, indexDefinition, localPolymorphicClusterIds, progressListener, metadata, algorithm); } public boolean areIndexed(final String... fields) { return areIndexed(Arrays.asList(fields)); } public boolean areIndexed(final Collection<String> fields) { final OIndexManager indexManager = getDatabase().getMetadata().getIndexManager(); acquireSchemaReadLock(); try { final boolean currentClassResult = indexManager.areIndexed(name, fields); if (currentClassResult) return true; for (OClassImpl superClass : superClasses) { if (superClass.areIndexed(fields)) return true; } return false; } finally { releaseSchemaReadLock(); } } public Set<OIndex<?>> getInvolvedIndexes(final String... fields) { return getInvolvedIndexes(Arrays.asList(fields)); } public Set<OIndex<?>> getInvolvedIndexes(final Collection<String> fields) { acquireSchemaReadLock(); try { final Set<OIndex<?>> result = new HashSet<OIndex<?>>(getClassInvolvedIndexes(fields)); for (OClassImpl superClass : superClasses) { result.addAll(superClass.getInvolvedIndexes(fields)); } return result; } finally { releaseSchemaReadLock(); } } public Set<OIndex<?>> getClassInvolvedIndexes(final Collection<String> fields) { final OIndexManager indexManager = getDatabase().getMetadata().getIndexManager(); acquireSchemaReadLock(); try { return indexManager.getClassInvolvedIndexes(name, fields); } finally { releaseSchemaReadLock(); } } public Set<OIndex<?>> getClassInvolvedIndexes(final String... fields) { return getClassInvolvedIndexes(Arrays.asList(fields)); } public OIndex<?> getClassIndex(final String name) { acquireSchemaReadLock(); try { return getDatabase().getMetadata().getIndexManager().getClassIndex(this.name, name); } finally { releaseSchemaReadLock(); } } public Set<OIndex<?>> getClassIndexes() { acquireSchemaReadLock(); try { final OIndexManagerProxy idxManager = getDatabase().getMetadata().getIndexManager(); if (idxManager == null) return new HashSet<OIndex<?>>(); return idxManager.getClassIndexes(name); } finally { releaseSchemaReadLock(); } } @Override public void getClassIndexes(final Collection<OIndex<?>> indexes) { acquireSchemaReadLock(); try { final OIndexManagerProxy idxManager = getDatabase().getMetadata().getIndexManager(); if (idxManager == null) return; idxManager.getClassIndexes(name, indexes); } finally { releaseSchemaReadLock(); } } @Override public OIndex<?> getAutoShardingIndex() { final ODatabaseDocumentInternal db = ODatabaseRecordThreadLocal.INSTANCE.getIfDefined(); return db != null ? db.getMetadata().getIndexManager().getClassAutoShardingIndex(name) : null; } @Override public boolean isEdgeType() { return isSubClassOf(EDGE_CLASS_NAME); } @Override public boolean isVertexType() { return isSubClassOf(VERTEX_CLASS_NAME); } public void onPostIndexManagement() { final OIndex<?> autoShardingIndex = getAutoShardingIndex(); if (autoShardingIndex != null) { if (!getDatabase().getStorage().isRemote()) { // OVERRIDE CLUSTER SELECTION acquireSchemaWriteLock(); try { this.clusterSelection = new OAutoShardingClusterSelectionStrategy(this, autoShardingIndex); } finally { releaseSchemaWriteLock(); } } } else if (clusterSelection instanceof OAutoShardingClusterSelectionStrategy) { // REMOVE AUTO SHARDING CLUSTER SELECTION acquireSchemaWriteLock(); try { this.clusterSelection = new ORoundRobinClusterSelectionStrategy(); } finally { releaseSchemaWriteLock(); } } } @Override public void getIndexes(final Collection<OIndex<?>> indexes) { acquireSchemaReadLock(); try { getClassIndexes(indexes); for (OClass superClass : superClasses) { superClass.getIndexes(indexes); } } finally { releaseSchemaReadLock(); } } public Set<OIndex<?>> getIndexes() { final Set<OIndex<?>> indexes = new HashSet<OIndex<?>>(); getIndexes(indexes); return indexes; } public void acquireSchemaReadLock() { owner.acquireSchemaReadLock(); } public void releaseSchemaReadLock() { owner.releaseSchemaReadLock(); } public void acquireSchemaWriteLock() { owner.acquireSchemaWriteLock(); } public void releaseSchemaWriteLock() { releaseSchemaWriteLock(true); } public void releaseSchemaWriteLock(final boolean iSave) { calculateHashCode(); owner.releaseSchemaWriteLock(iSave); } public void checkEmbedded() { owner.checkEmbedded(getDatabase().getStorage().getUnderlying().getUnderlying()); } public void setClusterSelectionInternal(final String clusterSelection) { // AVOID TO CHECK THIS IN LOCK TO AVOID RE-GENERATION OF IMMUTABLE SCHEMAS if (this.clusterSelection.getName().equals(clusterSelection)) // NO CHANGES return; acquireSchemaWriteLock(); try { checkEmbedded(); this.clusterSelection = owner.getClusterSelectionFactory().newInstance(clusterSelection); } finally { releaseSchemaWriteLock(); } } public void setClusterSelectionInternal(final OClusterSelectionStrategy iClusterSelection) { // AVOID TO CHECK THIS IN LOCK TO AVOID RE-GENERATION OF IMMUTABLE SCHEMAS if (this.clusterSelection.getName().equals(iClusterSelection.getName())) // NO CHANGES return; acquireSchemaWriteLock(); try { checkEmbedded(); this.clusterSelection = iClusterSelection; } finally { releaseSchemaWriteLock(); } } public void fireDatabaseMigration(final ODatabaseDocument database, final String propertyName, final OType type) { final boolean strictSQL = ((ODatabaseInternal) database).getStorage().getConfiguration().isStrictSql(); database.query(new OSQLAsynchQuery<Object>( "select from " + getEscapedName(name, strictSQL) + " where " + getEscapedName(propertyName, strictSQL) + ".type() <> \"" + type.name() + "\"", new OCommandResultListener() { @Override public boolean result(Object iRecord) { final ODocument record = ((OIdentifiable) iRecord).getRecord(); record.field(propertyName, record.field(propertyName), type); database.save(record); return true; } @Override public void end() { } @Override public Object getResult() { return null; } })); } public void firePropertyNameMigration(final ODatabaseDocument database, final String propertyName, final String newPropertyName, final OType type) { final boolean strictSQL = ((ODatabaseInternal) database).getStorage().getConfiguration().isStrictSql(); database.query(new OSQLAsynchQuery<Object>( "select from " + getEscapedName(name, strictSQL) + " where " + getEscapedName(propertyName, strictSQL) + " is not null ", new OCommandResultListener() { @Override public boolean result(Object iRecord) { final ODocument record = ((OIdentifiable) iRecord).getRecord(); record.setFieldType(propertyName, type); record.field(newPropertyName, record.field(propertyName), type); database.save(record); return true; } @Override public void end() { } @Override public Object getResult() { return null; } })); } public void checkPersistentPropertyType(final ODatabaseInternal<ORecord> database, final String propertyName, final OType type) { final boolean strictSQL = database.getStorage().getConfiguration().isStrictSql(); final StringBuilder builder = new StringBuilder(256); builder.append("select count(*) from "); builder.append(getEscapedName(name, strictSQL)); builder.append(" where "); builder.append(getEscapedName(propertyName, strictSQL)); builder.append(".type() not in ["); final Iterator<OType> cur = type.getCastable().iterator(); while (cur.hasNext()) { builder.append('"').append(cur.next().name()).append('"'); if (cur.hasNext()) builder.append(","); } builder.append("] and ").append(getEscapedName(propertyName, strictSQL)).append(" is not null "); if (type.isMultiValue()) builder.append(" and ").append(getEscapedName(propertyName, strictSQL)).append(".size() <> 0 limit 1"); final List<ODocument> res = database.command(new OCommandSQL(builder.toString())).execute(); if (((Long) res.get(0).field("count")) > 0) throw new OSchemaException("The database contains some schema-less data in the property '" + name + "." + propertyName + "' that is not compatible with the type " + type + ". Fix those records and change the schema again"); } protected String getEscapedName(final String iName, final boolean iStrictSQL) { if (iStrictSQL) // ESCAPE NAME return "`" + iName + "`"; return iName; } public OSchemaShared getOwner() { return owner; } private void calculateHashCode() { int result = super.hashCode(); result = 31 * result + (name != null ? name.hashCode() : 0); hashCode = result; } private void setOverSizeInternal(final float overSize) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { checkEmbedded(); this.overSize = overSize; } finally { releaseSchemaWriteLock(); } } private void setCustomInternal(final String name, final String value) { acquireSchemaWriteLock(); try { checkEmbedded(); if (customFields == null) customFields = new HashMap<String, String>(); if (value == null || "null".equalsIgnoreCase(value)) customFields.remove(name); else customFields.put(name, value); } finally { releaseSchemaWriteLock(); } } private void clearCustomInternal() { acquireSchemaWriteLock(); try { checkEmbedded(); customFields = null; } finally { releaseSchemaWriteLock(); } } private void setNameInternal(final String name) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { checkEmbedded(); final String oldName = this.name; owner.changeClassName(this.name, name, this); this.name = name; ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (!database.getStorageVersions().classesAreDetectedByClusterId()) { for (int clusterId : clusterIds) { long[] range = storage.getClusterDataRange(clusterId); OPhysicalPosition[] positions = storage.ceilingPhysicalPositions(clusterId, new OPhysicalPosition(range[0])); do { for (OPhysicalPosition position : positions) { final ORecordId identity = new ORecordId(clusterId, position.clusterPosition); final ORawBuffer record = storage.readRecord(identity, null, true, null).getResult(); if (record.recordType == ODocument.RECORD_TYPE) { final ORecordSerializerSchemaAware2CSV serializer = (ORecordSerializerSchemaAware2CSV) ORecordSerializerFactory .instance().getFormat(ORecordSerializerSchemaAware2CSV.NAME); String persName = new String(record.buffer, "UTF-8"); if (serializer.getClassName(persName).equalsIgnoreCase(name)) { final ODocument document = new ODocument(); document.setLazyLoad(false); document.fromStream(record.buffer); ORecordInternal.setVersion(document, record.version); ORecordInternal.setIdentity(document, identity); document.setClassName(name); document.setDirty(); document.save(); } } if (positions.length > 0) positions = storage.higherPhysicalPositions(clusterId, positions[positions.length - 1]); } } while (positions.length > 0); } } renameCluster(oldName, this.name); } catch (UnsupportedEncodingException e) { throw OException.wrapException(new OSchemaException("Error reading schema"), e); } finally { releaseSchemaWriteLock(); } } private void renameCluster(String oldName, String newName) { oldName = oldName.toLowerCase(); newName = newName.toLowerCase(); final ODatabaseDocumentInternal database = getDatabase(); final OStorage storage = database.getStorage(); if (storage.getClusterIdByName(newName) != -1) return; final int clusterId = storage.getClusterIdByName(oldName); if (clusterId == -1) return; if (!hasClusterId(clusterId)) return; database.command(new OCommandSQL("alter cluster `" + oldName + "` name `" + newName + "`")).execute(); } private void setShortNameInternal(final String iShortName) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { checkEmbedded(); String oldName = null; if (this.shortName != null) oldName = this.shortName; owner.changeClassName(oldName, iShortName, this); this.shortName = iShortName; } finally { releaseSchemaWriteLock(); } } private void setDescriptionInternal(final String iDescription) { acquireSchemaWriteLock(); try { checkEmbedded(); this.description = iDescription; } finally { releaseSchemaWriteLock(); } } private void dropPropertyInternal(final String iPropertyName) { if (getDatabase().getTransaction().isActive()) throw new IllegalStateException("Cannot drop a property inside a transaction"); getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_DELETE); acquireSchemaWriteLock(); try { checkEmbedded(); final OProperty prop = properties.remove(iPropertyName.toLowerCase()); if (prop == null) throw new OSchemaException("Property '" + iPropertyName + "' not found in class " + name + "'"); } finally { releaseSchemaWriteLock(); } } private OClass addClusterIdInternal(final int clusterId) { acquireSchemaWriteLock(); try { checkEmbedded(); owner.checkClusterCanBeAdded(clusterId, this); for (int currId : clusterIds) if (currId == clusterId) // ALREADY ADDED return this; clusterIds = OArrays.copyOf(clusterIds, clusterIds.length + 1); clusterIds[clusterIds.length - 1] = clusterId; Arrays.sort(clusterIds); addPolymorphicClusterId(clusterId); if (defaultClusterId == NOT_EXISTENT_CLUSTER_ID) defaultClusterId = clusterId; owner.addClusterForClass(clusterId, this); return this; } finally { releaseSchemaWriteLock(); } } private void addPolymorphicClusterId(int clusterId) { if (Arrays.binarySearch(polymorphicClusterIds, clusterId) >= 0) return; polymorphicClusterIds = OArrays.copyOf(polymorphicClusterIds, polymorphicClusterIds.length + 1); polymorphicClusterIds[polymorphicClusterIds.length - 1] = clusterId; Arrays.sort(polymorphicClusterIds); addClusterIdToIndexes(clusterId); for (OClassImpl superClass : superClasses) { superClass.addPolymorphicClusterId(clusterId); } } private OClass removeClusterIdInternal(final int clusterToRemove) { acquireSchemaWriteLock(); try { checkEmbedded(); boolean found = false; for (int clusterId : clusterIds) { if (clusterId == clusterToRemove) { found = true; break; } } if (found) { final int[] newClusterIds = new int[clusterIds.length - 1]; for (int i = 0, k = 0; i < clusterIds.length; ++i) { if (clusterIds[i] == clusterToRemove) // JUMP IT continue; newClusterIds[k] = clusterIds[i]; k++; } clusterIds = newClusterIds; removePolymorphicClusterId(clusterToRemove); } if (defaultClusterId == clusterToRemove) { if (clusterIds.length >= 1) defaultClusterId = clusterIds[0]; else defaultClusterId = NOT_EXISTENT_CLUSTER_ID; } owner.removeClusterForClass(clusterToRemove, this); } finally { releaseSchemaWriteLock(); } return this; } private void setAbstractInternal(final boolean isAbstract) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { if (isAbstract) { // SWITCH TO ABSTRACT if (defaultClusterId != NOT_EXISTENT_CLUSTER_ID) { // CHECK if (count() > 0) throw new IllegalStateException("Cannot set the class as abstract because contains records."); tryDropCluster(defaultClusterId); for (int clusterId : getClusterIds()) { tryDropCluster(clusterId); removePolymorphicClusterId(clusterId); owner.removeClusterForClass(clusterId, this); } setClusterIds(new int[] { NOT_EXISTENT_CLUSTER_ID }); defaultClusterId = NOT_EXISTENT_CLUSTER_ID; } } else { if (!abstractClass) return; int clusterId = getDatabase().getClusterIdByName(name); if (clusterId == -1) clusterId = getDatabase().addCluster(name); this.defaultClusterId = clusterId; this.clusterIds[0] = this.defaultClusterId; this.polymorphicClusterIds = Arrays.copyOf(clusterIds, clusterIds.length); for (OClass clazz : getAllSubclasses()) { if (clazz instanceof OClassImpl) { addPolymorphicClusterIds((OClassImpl) clazz); } else { OLogManager.instance().warn(this, "Warning: cannot set polymorphic cluster IDs for class " + name); } } } this.abstractClass = isAbstract; } finally { releaseSchemaWriteLock(); } } private void setStrictModeInternal(final boolean iStrict) { getDatabase().checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); acquireSchemaWriteLock(); try { checkEmbedded(); this.strictMode = iStrict; } finally { releaseSchemaWriteLock(); } } private OProperty addProperty(final String propertyName, final OType type, final OType linkedType, final OClass linkedClass, final boolean unsafe) { if (type == null) throw new OSchemaException("Property type not defined."); if (propertyName == null || propertyName.length() == 0) throw new OSchemaException("Property name is null or empty"); if (getDatabase().getStorage().getConfiguration().isStrictSql()) { validatePropertyName(propertyName); } if (getDatabase().getTransaction().isActive()) throw new OSchemaException("Cannot create property '" + propertyName + "' inside a transaction"); final ODatabaseDocumentInternal database = getDatabase(); database.checkSecurity(ORule.ResourceGeneric.SCHEMA, ORole.PERMISSION_UPDATE); if (linkedType != null) OPropertyImpl.checkLinkTypeSupport(type); if (linkedClass != null) OPropertyImpl.checkSupportLinkedClass(type); acquireSchemaWriteLock(); try { final StringBuilder cmd = new StringBuilder("create property "); // CLASS.PROPERTY NAME if (getDatabase().getStorage().getConfiguration().isStrictSql()) cmd.append('`'); cmd.append(name); if (getDatabase().getStorage().getConfiguration().isStrictSql()) cmd.append('`'); cmd.append('.'); if (getDatabase().getStorage().getConfiguration().isStrictSql()) cmd.append('`'); cmd.append(propertyName); if (getDatabase().getStorage().getConfiguration().isStrictSql()) cmd.append('`'); // TYPE cmd.append(' '); cmd.append(type.name); if (linkedType != null) { // TYPE cmd.append(' '); cmd.append(linkedType.name); } else if (linkedClass != null) { // TYPE cmd.append(' '); if (getDatabase().getStorage().getConfiguration().isStrictSql()) cmd.append('`'); cmd.append(linkedClass.getName()); if (getDatabase().getStorage().getConfiguration().isStrictSql()) cmd.append('`'); } if (unsafe) cmd.append(" unsafe "); final OStorage storage = database.getStorage(); if (storage instanceof OStorageProxy) { database.command(new OCommandSQL(cmd.toString())).execute(); reload(); return getProperty(propertyName); } else if (isDistributedCommand()) { final OCommandSQL commandSQL = new OCommandSQL(cmd.toString()); commandSQL.addExcludedNode(((OAutoshardedStorage) storage).getNodeId()); database.command(commandSQL).execute(); return (OProperty) OScenarioThreadLocal.executeAsDistributed(new Callable<OProperty>() { @Override public OProperty call() throws Exception { return addPropertyInternal(propertyName, type, linkedType, linkedClass, unsafe); } }); } else return (OProperty) OScenarioThreadLocal.executeAsDistributed(new Callable<OProperty>() { @Override public OProperty call() throws Exception { return addPropertyInternal(propertyName, type, linkedType, linkedClass, unsafe); } }); } finally { releaseSchemaWriteLock(); } } private void validatePropertyName(final String propertyName) { } private int getClusterId(final String stringValue) { int clId; if (!stringValue.isEmpty() && Character.isDigit(stringValue.charAt(0))) try { clId = Integer.parseInt(stringValue); } catch (NumberFormatException e) { clId = getDatabase().getClusterIdByName(stringValue); } else clId = getDatabase().getClusterIdByName(stringValue); return clId; } private void addClusterIdToIndexes(int iId) { if (getDatabase().getStorage().getUnderlying() instanceof OAbstractPaginatedStorage) { final String clusterName = getDatabase().getClusterNameById(iId); final List<String> indexesToAdd = new ArrayList<String>(); for (OIndex<?> index : getIndexes()) indexesToAdd.add(index.getName()); final OIndexManager indexManager = getDatabase().getMetadata().getIndexManager(); for (String indexName : indexesToAdd) indexManager.addClusterToIndex(clusterName, indexName); } } /** * Adds a base class to the current one. It adds also the base class cluster ids to the polymorphic cluster ids array. * * @param iBaseClass The base class to add. */ private OClass addBaseClass(final OClassImpl iBaseClass) { checkRecursion(iBaseClass); if (subclasses == null) subclasses = new ArrayList<OClass>(); if (subclasses.contains(iBaseClass)) return this; subclasses.add(iBaseClass); addPolymorphicClusterIdsWithInheritance(iBaseClass); return this; } private void checkParametersConflict(final OClass baseClass) { final Collection<OProperty> baseClassProperties = baseClass.properties(); for (OProperty property : baseClassProperties) { OProperty thisProperty = getProperty(property.getName()); if (thisProperty != null && !thisProperty.getType().equals(property.getType())) { throw new OSchemaException( "Cannot add base class '" + baseClass.getName() + "', because of property conflict: '" + thisProperty + "' vs '" + property + "'"); } } } protected static void checkParametersConflict(List<OClass> classes) { final Map<String, OProperty> comulative = new HashMap<String, OProperty>(); final Map<String, OProperty> properties = new HashMap<String, OProperty>(); for (OClass superClass : classes) { if (superClass == null) continue; OClassImpl impl; if (superClass instanceof OClassAbstractDelegate) impl = (OClassImpl) ((OClassAbstractDelegate) superClass).delegate; else impl = (OClassImpl) superClass; impl.propertiesMap(properties, false); for (Map.Entry<String, OProperty> entry : properties.entrySet()) { if (comulative.containsKey(entry.getKey())) { final String property = entry.getKey(); final OProperty existingProperty = comulative.get(property); if (!existingProperty.getType().equals(entry.getValue().getType())) { throw new OSchemaException("Properties conflict detected: '" + existingProperty + "] vs [" + entry.getValue() + "]"); } } } comulative.putAll(properties); properties.clear(); } } private void checkRecursion(final OClass baseClass) { if (isSubClassOf(baseClass)) { throw new OSchemaException("Cannot add base class '" + baseClass.getName() + "', because of recursion"); } } private void removePolymorphicClusterIds(final OClassImpl iBaseClass) { for (final int clusterId : iBaseClass.polymorphicClusterIds) removePolymorphicClusterId(clusterId); } private void removePolymorphicClusterId(final int clusterId) { final int index = Arrays.binarySearch(polymorphicClusterIds, clusterId); if (index < 0) return; if (index < polymorphicClusterIds.length - 1) System.arraycopy(polymorphicClusterIds, index + 1, polymorphicClusterIds, index, polymorphicClusterIds.length - (index + 1)); polymorphicClusterIds = Arrays.copyOf(polymorphicClusterIds, polymorphicClusterIds.length - 1); removeClusterFromIndexes(clusterId); for (OClassImpl superClass : superClasses) { superClass.removePolymorphicClusterId(clusterId); } } private void removeClusterFromIndexes(final int iId) { if (getDatabase().getStorage().getUnderlying() instanceof OAbstractPaginatedStorage) { final String clusterName = getDatabase().getClusterNameById(iId); final List<String> indexesToRemove = new ArrayList<String>(); for (final OIndex<?> index : getIndexes()) indexesToRemove.add(index.getName()); final OIndexManager indexManager = getDatabase().getMetadata().getIndexManager(); for (final String indexName : indexesToRemove) indexManager.removeClusterFromIndex(clusterName, indexName); } } private void tryDropCluster(final int defaultClusterId) { if (name.toLowerCase().equals(getDatabase().getClusterNameById(defaultClusterId))) { // DROP THE DEFAULT CLUSTER CALLED WITH THE SAME NAME ONLY IF EMPTY if (getDatabase().getClusterRecordSizeById(defaultClusterId) == 0) getDatabase().dropCluster(defaultClusterId, true); } } private ODatabaseDocumentInternal getDatabase() { return ODatabaseRecordThreadLocal.INSTANCE.get(); } /** * Add different cluster id to the "polymorphic cluster ids" array. */ private void addPolymorphicClusterIds(final OClassImpl iBaseClass) { Set<Integer> clusters = new TreeSet<Integer>(); for (int clusterId : polymorphicClusterIds) { clusters.add(clusterId); } for (int clusterId : iBaseClass.polymorphicClusterIds) { if (clusters.add(clusterId)) { try { addClusterIdToIndexes(clusterId); } catch (RuntimeException e) { OLogManager.instance().warn(this, "Error adding clusterId '%i' to index of class '%s'", e, clusterId, getName()); clusters.remove(clusterId); } } } polymorphicClusterIds = new int[clusters.size()]; int i = 0; for (Integer cluster : clusters) { polymorphicClusterIds[i] = cluster; i++; } } private void addPolymorphicClusterIdsWithInheritance(final OClassImpl iBaseClass) { addPolymorphicClusterIds(iBaseClass); for (OClassImpl superClass : superClasses) { superClass.addPolymorphicClusterIdsWithInheritance(iBaseClass); } } public List<OType> extractFieldTypes(final String[] fieldNames) { final List<OType> types = new ArrayList<OType>(fieldNames.length); for (String fieldName : fieldNames) { if (!fieldName.equals("@rid")) types.add(getProperty(decodeClassName(OIndexDefinitionFactory.extractFieldName(fieldName)).toLowerCase()).getType()); else types.add(OType.LINK); } return types; } private OClass setClusterIds(final int[] iClusterIds) { clusterIds = iClusterIds; Arrays.sort(clusterIds); return this; } private boolean isDistributedCommand() { return getDatabase().getStorage() instanceof OAutoshardedStorage && !OScenarioThreadLocal.INSTANCE.isRunModeDistributed(); } public static String decodeClassName(String s) { if (s == null) { return null; } s = s.trim(); if (s.startsWith("`") && s.endsWith("`")) { return s.substring(1, s.length() - 1); } return s; } }
Fixed issue on warning
core/src/main/java/com/orientechnologies/orient/core/metadata/schema/OClassImpl.java
Fixed issue on warning
<ide><path>ore/src/main/java/com/orientechnologies/orient/core/metadata/schema/OClassImpl.java <ide> try { <ide> addClusterIdToIndexes(clusterId); <ide> } catch (RuntimeException e) { <del> OLogManager.instance().warn(this, "Error adding clusterId '%i' to index of class '%s'", e, clusterId, getName()); <add> OLogManager.instance().warn(this, "Error adding clusterId '%d' to index of class '%s'", e, clusterId, getName()); <ide> clusters.remove(clusterId); <ide> } <ide> }
Java
apache-2.0
34e26206786547346545cd558b1a942d389f44c6
0
lisaglendenning/zookeeper-proxy
package edu.uw.zookeeper.proxy; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ScheduledExecutorService; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import com.google.common.base.Function; import com.google.common.collect.ImmutableMap; import com.google.common.collect.MapMaker; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.typesafe.config.ConfigFactory; import com.typesafe.config.ConfigUtil; import edu.uw.zookeeper.EnsembleView; import edu.uw.zookeeper.ZooKeeperApplication; import edu.uw.zookeeper.ServerInetAddressView; import edu.uw.zookeeper.client.ConnectionClientExecutorsService; import edu.uw.zookeeper.client.EnsembleViewFactory; import edu.uw.zookeeper.client.FixedClientConnectionFactory; import edu.uw.zookeeper.client.ServerViewFactory; import edu.uw.zookeeper.common.*; import edu.uw.zookeeper.net.ClientConnectionFactory; import edu.uw.zookeeper.net.CodecConnection; import edu.uw.zookeeper.protocol.ConnectMessage; import edu.uw.zookeeper.protocol.FourLetterRequest; import edu.uw.zookeeper.protocol.FourLetterResponse; import edu.uw.zookeeper.protocol.Message; import edu.uw.zookeeper.protocol.Operation; import edu.uw.zookeeper.protocol.ProtocolCodec; import edu.uw.zookeeper.protocol.ProtocolConnection; import edu.uw.zookeeper.protocol.client.ClientConnectionFactoryBuilder; import edu.uw.zookeeper.protocol.client.ClientProtocolConnection; import edu.uw.zookeeper.protocol.client.MessageClientExecutor; import edu.uw.zookeeper.protocol.client.ZxidTracker; import edu.uw.zookeeper.server.FourLetterRequestProcessor; import edu.uw.zookeeper.server.ProcessorTaskExecutor; import edu.uw.zookeeper.server.SimpleServerExecutor; public class ProxyServerExecutorBuilder extends ZooKeeperApplication.ForwardingBuilder<SimpleServerExecutor<ProxySessionExecutor>, ProxyServerExecutorBuilder.ClientBuilder, ProxyServerExecutorBuilder> { public static ProxyServerExecutorBuilder defaults() { return new ProxyServerExecutorBuilder(null, ClientBuilder.defaults()); } @Configurable(arg="servers", key="servers", value="127.0.0.1:2081", help="address:port,...") public static abstract class EnsembleViewConfiguration { public static EnsembleView<ServerInetAddressView> get(Configuration configuration) { Configurable configurable = getConfigurable(); String value = configuration.withConfigurable(configurable) .getConfigOrEmpty(configurable.path()) .getString(configurable.key()); return ServerInetAddressView.ensembleFromString(value); } public static Configurable getConfigurable() { return EnsembleViewConfiguration.class.getAnnotation(Configurable.class); } public static Configuration set(Configuration configuration, EnsembleView<ServerInetAddressView> value) { Configurable configurable = getConfigurable(); return configuration.withConfig(ConfigFactory.parseMap(ImmutableMap.<String,Object>builder().put(ConfigUtil.joinPath(configurable.path(), configurable.key()), EnsembleView.toString(value)).build())); } protected EnsembleViewConfiguration() {} } public static class FromRequestFactory<C extends ProtocolConnection<? super Message.ClientSession,? extends Operation.Response,?,?,?>> implements DefaultsFactory<ConnectMessage.Request, ListenableFuture<MessageClientExecutor<C>>> { public static <C extends ProtocolConnection<? super Message.ClientSession,? extends Operation.Response,?,?,?>> FromRequestFactory<C> create( Factory<? extends ListenableFuture<? extends C>> connections, ScheduledExecutorService executor) { return new FromRequestFactory<C>(connections, executor); } protected final Factory<? extends ListenableFuture<? extends C>> connections; protected final ScheduledExecutorService executor; public FromRequestFactory( Factory<? extends ListenableFuture<? extends C>> connections, ScheduledExecutorService executor) { this.connections = connections; this.executor = executor; } @Override public ListenableFuture<MessageClientExecutor<C>> get() { return get(ConnectMessage.Request.NewRequest.newInstance()); } @Override public ListenableFuture<MessageClientExecutor<C>> get(ConnectMessage.Request request) { return Futures.transform(connections.get(), new Constructor(request), SameThreadExecutor.getInstance()); } protected class Constructor implements Function<C, MessageClientExecutor<C>> { protected final ConnectMessage.Request task; public Constructor(ConnectMessage.Request task) { this.task = task; } @Override public MessageClientExecutor<C> apply(C input) { return MessageClientExecutor.newInstance( task, input, executor); } } } public static class ServerViewFactories implements ParameterizedFactory<ServerInetAddressView, ServerViewFactory<ConnectMessage.Request, ? extends MessageClientExecutor<?>>> { public static ServerViewFactories newInstance( ClientConnectionFactory<? extends ProtocolConnection<? super Message.ClientSession,? extends Operation.Response,?,?,?>> connections, ScheduledExecutorService executor) { return new ServerViewFactories(connections, executor); } protected final ClientConnectionFactory<? extends ProtocolConnection<? super Message.ClientSession,? extends Operation.Response,?,?,?>> connections; protected final ScheduledExecutorService executor; protected ServerViewFactories( ClientConnectionFactory<? extends ProtocolConnection<? super Message.ClientSession,? extends Operation.Response,?,?,?>> connections, ScheduledExecutorService executor) { this.connections = connections; this.executor = executor; } @Override public ServerViewFactory<ConnectMessage.Request, ? extends MessageClientExecutor<?>> get(ServerInetAddressView view) { return ServerViewFactory.create( view, FromRequestFactory.create( FixedClientConnectionFactory.create(view.get(), connections), executor), ZxidTracker.zero()); } } public static class ClientBuilder extends ConnectionClientExecutorsService.AbstractBuilder<ConnectionClientExecutorsService<Message.ClientRequest<?>, ConnectMessage.Request, MessageClientExecutor<?>>, ClientBuilder> { public static ClientBuilder defaults() { return new ClientBuilder(null, null, null, null); } protected ClientBuilder( ClientConnectionFactoryBuilder connectionBuilder, ClientConnectionFactory<? extends ProtocolConnection<Message.ClientSession, Message.ServerSession,?,?,?>> clientConnectionFactory, ConnectionClientExecutorsService<Message.ClientRequest<?>, ConnectMessage.Request, MessageClientExecutor<?>> clientExecutors, RuntimeModule runtime) { super(connectionBuilder, clientConnectionFactory, clientExecutors, runtime); } @Override protected ClientBuilder newInstance( ClientConnectionFactoryBuilder connectionBuilder, ClientConnectionFactory<? extends ProtocolConnection<Message.ClientSession, Message.ServerSession,?,?,?>> clientConnectionFactory, ConnectionClientExecutorsService<Message.ClientRequest<?>, ConnectMessage.Request, MessageClientExecutor<?>> clientExecutors, RuntimeModule runtime) { return new ClientBuilder(connectionBuilder, clientConnectionFactory, clientExecutors, runtime); } @Override protected ConnectionClientExecutorsService<Message.ClientRequest<?>, ConnectMessage.Request, MessageClientExecutor<?>> getDefaultConnectionClientExecutorsService() { EnsembleView<ServerInetAddressView> ensemble = EnsembleViewConfiguration.get(getRuntimeModule().getConfiguration()); final EnsembleViewFactory<? extends ServerViewFactory<ConnectMessage.Request, ? extends MessageClientExecutor<?>>> ensembleFactory = EnsembleViewFactory.random( ensemble, ServerViewFactories.newInstance( clientConnectionFactory, getRuntimeModule().getExecutors().get(ScheduledExecutorService.class))); ConnectionClientExecutorsService<Message.ClientRequest<?>, ConnectMessage.Request, MessageClientExecutor<?>> service = ConnectionClientExecutorsService.newInstance( new DefaultsFactory<ConnectMessage.Request, ListenableFuture<? extends MessageClientExecutor<?>>>() { @Override public ListenableFuture<? extends MessageClientExecutor<?>> get(ConnectMessage.Request value) { return ensembleFactory.get().get(value); } @Override public ListenableFuture<? extends MessageClientExecutor<?>> get() { return ensembleFactory.get().get(); } }); return service; } } protected final Logger logger = LogManager.getLogger(getClass()); protected final NettyModule netModule; protected ProxyServerExecutorBuilder( NettyModule netModule, ClientBuilder clientBuilder) { super(clientBuilder); this.netModule = netModule; } public NettyModule getNetModule() { return netModule; } public ProxyServerExecutorBuilder setNetModule(NettyModule netModule) { if (this.netModule == netModule) { return this; } else { return newInstance( netModule, delegate); } } public ClientBuilder getClientBuilder() { return delegate; } public ProxyServerExecutorBuilder setClientBuilder( ClientBuilder clientBuilder) { if (this.delegate == clientBuilder) { return this; } else { return newInstance(netModule, clientBuilder); } } @Override public ProxyServerExecutorBuilder setDefaults() { if (getNetModule() == null) { return setNetModule(getDefaultNetModule()).setDefaults(); } ClientBuilder clientBuilder = getDefaultClientBuilder(); if (getClientBuilder() != clientBuilder) { return setClientBuilder(clientBuilder).setDefaults(); } return this; } @Override protected ProxyServerExecutorBuilder newInstance( ClientBuilder clientBuilder) { return newInstance(netModule, clientBuilder); } protected ProxyServerExecutorBuilder newInstance( NettyModule netModule, ClientBuilder clientBuilder) { return new ProxyServerExecutorBuilder(netModule, clientBuilder); } protected NettyModule getDefaultNetModule() { return NettyModule.newInstance(getRuntimeModule()); } protected ClientBuilder getDefaultClientBuilder() { ClientBuilder builder = getClientBuilder(); if (builder.getConnectionBuilder() == null) { builder = builder.setConnectionBuilder( ClientConnectionFactoryBuilder.defaults() .setClientModule(getNetModule().clients()) .setConnectionFactory( new ParameterizedFactory<CodecConnection<Message.ClientSession, Message.ServerSession, ProtocolCodec<Message.ClientSession,Message.ServerSession,Message.ClientSession,Message.ServerSession>,?>, ClientProtocolConnection<Message.ClientSession, Message.ServerSession,?,?>>() { @Override public ClientProtocolConnection<Message.ClientSession, Message.ServerSession,?,?> get(CodecConnection<Message.ClientSession, Message.ServerSession, ProtocolCodec<Message.ClientSession,Message.ServerSession,Message.ClientSession,Message.ServerSession>,?> value) { return ClientProtocolConnection.newInstance(value); } })); } return builder.setDefaults(); } @Override protected SimpleServerExecutor<ProxySessionExecutor> doBuild() { ConcurrentMap<Long, ProxySessionExecutor> sessions = new MapMaker().makeMap(); ProxyConnectExecutor connectExecutor = ProxyConnectExecutor.create( sessions, getClientBuilder().getConnectionClientExecutors()); return new SimpleServerExecutor<ProxySessionExecutor>( sessions, connectExecutor, getDefaultAnonymousExecutor()); } protected TaskExecutor<? super FourLetterRequest, ? extends FourLetterResponse> getDefaultAnonymousExecutor() { return ProcessorTaskExecutor.of(FourLetterRequestProcessor.newInstance()); } }
src/main/java/edu/uw/zookeeper/proxy/ProxyServerExecutorBuilder.java
package edu.uw.zookeeper.proxy; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ScheduledExecutorService; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import com.google.common.base.Function; import com.google.common.collect.ImmutableMap; import com.google.common.collect.MapMaker; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.typesafe.config.ConfigFactory; import com.typesafe.config.ConfigUtil; import edu.uw.zookeeper.EnsembleView; import edu.uw.zookeeper.ZooKeeperApplication; import edu.uw.zookeeper.ServerInetAddressView; import edu.uw.zookeeper.client.ConnectionClientExecutorsService; import edu.uw.zookeeper.client.EnsembleViewFactory; import edu.uw.zookeeper.client.FixedClientConnectionFactory; import edu.uw.zookeeper.client.ServerViewFactory; import edu.uw.zookeeper.common.*; import edu.uw.zookeeper.net.ClientConnectionFactory; import edu.uw.zookeeper.net.CodecConnection; import edu.uw.zookeeper.protocol.ConnectMessage; import edu.uw.zookeeper.protocol.FourLetterRequest; import edu.uw.zookeeper.protocol.FourLetterResponse; import edu.uw.zookeeper.protocol.Message; import edu.uw.zookeeper.protocol.Operation; import edu.uw.zookeeper.protocol.ProtocolCodec; import edu.uw.zookeeper.protocol.ProtocolConnection; import edu.uw.zookeeper.protocol.client.ClientConnectionFactoryBuilder; import edu.uw.zookeeper.protocol.client.ClientProtocolConnection; import edu.uw.zookeeper.protocol.client.MessageClientExecutor; import edu.uw.zookeeper.protocol.client.ZxidTracker; import edu.uw.zookeeper.server.FourLetterRequestProcessor; import edu.uw.zookeeper.server.ProcessorTaskExecutor; import edu.uw.zookeeper.server.SimpleServerExecutor; public class ProxyServerExecutorBuilder extends ZooKeeperApplication.ForwardingBuilder<SimpleServerExecutor<ProxySessionExecutor>, ProxyServerExecutorBuilder.ClientBuilder, ProxyServerExecutorBuilder> { public static ProxyServerExecutorBuilder defaults() { return new ProxyServerExecutorBuilder(null, ClientBuilder.defaults()); } @Configurable(arg="servers", key="servers", value="127.0.0.1:2081", help="address:port,...") public static abstract class EnsembleViewConfiguration { public static EnsembleView<ServerInetAddressView> get(Configuration configuration) { Configurable configurable = getConfigurable(); String value = configuration.withConfigurable(configurable) .getConfigOrEmpty(configurable.path()) .getString(configurable.key()); return ServerInetAddressView.ensembleFromString(value); } public static Configurable getConfigurable() { return EnsembleViewConfiguration.class.getAnnotation(Configurable.class); } public static Configuration set(Configuration configuration, EnsembleView<ServerInetAddressView> value) { Configurable configurable = getConfigurable(); return configuration.withConfig(ConfigFactory.parseMap(ImmutableMap.<String,Object>builder().put(ConfigUtil.joinPath(configurable.path(), configurable.key()), EnsembleView.toString(value)).build())); } protected EnsembleViewConfiguration() {} } public static class FromRequestFactory<C extends ProtocolConnection<? super Message.ClientSession,? extends Operation.Response,?,?,?>> implements DefaultsFactory<ConnectMessage.Request, ListenableFuture<MessageClientExecutor<C>>> { public static <C extends ProtocolConnection<? super Message.ClientSession,? extends Operation.Response,?,?,?>> FromRequestFactory<C> create( Factory<? extends ListenableFuture<? extends C>> connections, ScheduledExecutorService executor) { return new FromRequestFactory<C>(connections, executor); } protected final Factory<? extends ListenableFuture<? extends C>> connections; protected final ScheduledExecutorService executor; public FromRequestFactory( Factory<? extends ListenableFuture<? extends C>> connections, ScheduledExecutorService executor) { this.connections = connections; this.executor = executor; } @Override public ListenableFuture<MessageClientExecutor<C>> get() { return get(ConnectMessage.Request.NewRequest.newInstance()); } @Override public ListenableFuture<MessageClientExecutor<C>> get(ConnectMessage.Request request) { return Futures.transform(connections.get(), new Constructor(request), SameThreadExecutor.getInstance()); } protected class Constructor implements Function<C, MessageClientExecutor<C>> { protected final ConnectMessage.Request task; public Constructor(ConnectMessage.Request task) { this.task = task; } @Override public MessageClientExecutor<C> apply(C input) { return MessageClientExecutor.newInstance( task, input, executor); } } } public static class ServerViewFactories implements ParameterizedFactory<ServerInetAddressView, ServerViewFactory<ConnectMessage.Request, ? extends MessageClientExecutor<?>>> { public static ServerViewFactories newInstance( ClientConnectionFactory<? extends ProtocolConnection<? super Message.ClientSession,? extends Operation.Response,?,?,?>> connections, ScheduledExecutorService executor) { return new ServerViewFactories(connections, executor); } protected final ClientConnectionFactory<? extends ProtocolConnection<? super Message.ClientSession,? extends Operation.Response,?,?,?>> connections; protected final ScheduledExecutorService executor; protected ServerViewFactories( ClientConnectionFactory<? extends ProtocolConnection<? super Message.ClientSession,? extends Operation.Response,?,?,?>> connections, ScheduledExecutorService executor) { this.connections = connections; this.executor = executor; } @Override public ServerViewFactory<ConnectMessage.Request, ? extends MessageClientExecutor<?>> get(ServerInetAddressView view) { return ServerViewFactory.create( view, FromRequestFactory.create( FixedClientConnectionFactory.create(view.get(), connections), executor), ZxidTracker.create()); } } public static class ClientBuilder extends ConnectionClientExecutorsService.AbstractBuilder<ConnectionClientExecutorsService<Message.ClientRequest<?>, ConnectMessage.Request, MessageClientExecutor<?>>, ClientBuilder> { public static ClientBuilder defaults() { return new ClientBuilder(null, null, null, null); } protected ClientBuilder( ClientConnectionFactoryBuilder connectionBuilder, ClientConnectionFactory<? extends ProtocolConnection<Message.ClientSession, Message.ServerSession,?,?,?>> clientConnectionFactory, ConnectionClientExecutorsService<Message.ClientRequest<?>, ConnectMessage.Request, MessageClientExecutor<?>> clientExecutors, RuntimeModule runtime) { super(connectionBuilder, clientConnectionFactory, clientExecutors, runtime); } @Override protected ClientBuilder newInstance( ClientConnectionFactoryBuilder connectionBuilder, ClientConnectionFactory<? extends ProtocolConnection<Message.ClientSession, Message.ServerSession,?,?,?>> clientConnectionFactory, ConnectionClientExecutorsService<Message.ClientRequest<?>, ConnectMessage.Request, MessageClientExecutor<?>> clientExecutors, RuntimeModule runtime) { return new ClientBuilder(connectionBuilder, clientConnectionFactory, clientExecutors, runtime); } @Override protected ConnectionClientExecutorsService<Message.ClientRequest<?>, ConnectMessage.Request, MessageClientExecutor<?>> getDefaultConnectionClientExecutorsService() { EnsembleView<ServerInetAddressView> ensemble = EnsembleViewConfiguration.get(getRuntimeModule().getConfiguration()); final EnsembleViewFactory<? extends ServerViewFactory<ConnectMessage.Request, ? extends MessageClientExecutor<?>>> ensembleFactory = EnsembleViewFactory.random( ensemble, ServerViewFactories.newInstance( clientConnectionFactory, getRuntimeModule().getExecutors().get(ScheduledExecutorService.class))); ConnectionClientExecutorsService<Message.ClientRequest<?>, ConnectMessage.Request, MessageClientExecutor<?>> service = ConnectionClientExecutorsService.newInstance( new DefaultsFactory<ConnectMessage.Request, ListenableFuture<? extends MessageClientExecutor<?>>>() { @Override public ListenableFuture<? extends MessageClientExecutor<?>> get(ConnectMessage.Request value) { return ensembleFactory.get().get(value); } @Override public ListenableFuture<? extends MessageClientExecutor<?>> get() { return ensembleFactory.get().get(); } }); return service; } } protected final Logger logger = LogManager.getLogger(getClass()); protected final NettyModule netModule; protected ProxyServerExecutorBuilder( NettyModule netModule, ClientBuilder clientBuilder) { super(clientBuilder); this.netModule = netModule; } public NettyModule getNetModule() { return netModule; } public ProxyServerExecutorBuilder setNetModule(NettyModule netModule) { if (this.netModule == netModule) { return this; } else { return newInstance( netModule, delegate); } } public ClientBuilder getClientBuilder() { return delegate; } public ProxyServerExecutorBuilder setClientBuilder( ClientBuilder clientBuilder) { if (this.delegate == clientBuilder) { return this; } else { return newInstance(netModule, clientBuilder); } } @Override public ProxyServerExecutorBuilder setDefaults() { if (getNetModule() == null) { return setNetModule(getDefaultNetModule()).setDefaults(); } ClientBuilder clientBuilder = getDefaultClientBuilder(); if (getClientBuilder() != clientBuilder) { return setClientBuilder(clientBuilder).setDefaults(); } return this; } @Override protected ProxyServerExecutorBuilder newInstance( ClientBuilder clientBuilder) { return newInstance(netModule, clientBuilder); } protected ProxyServerExecutorBuilder newInstance( NettyModule netModule, ClientBuilder clientBuilder) { return new ProxyServerExecutorBuilder(netModule, clientBuilder); } protected NettyModule getDefaultNetModule() { return NettyModule.newInstance(getRuntimeModule()); } protected ClientBuilder getDefaultClientBuilder() { ClientBuilder builder = getClientBuilder(); if (builder.getConnectionBuilder() == null) { builder = builder.setConnectionBuilder( ClientConnectionFactoryBuilder.defaults() .setClientModule(getNetModule().clients()) .setConnectionFactory( new ParameterizedFactory<CodecConnection<Message.ClientSession, Message.ServerSession, ProtocolCodec<Message.ClientSession,Message.ServerSession,Message.ClientSession,Message.ServerSession>,?>, ClientProtocolConnection<Message.ClientSession, Message.ServerSession,?,?>>() { @Override public ClientProtocolConnection<Message.ClientSession, Message.ServerSession,?,?> get(CodecConnection<Message.ClientSession, Message.ServerSession, ProtocolCodec<Message.ClientSession,Message.ServerSession,Message.ClientSession,Message.ServerSession>,?> value) { return ClientProtocolConnection.newInstance(value); } })); } return builder.setDefaults(); } @Override protected SimpleServerExecutor<ProxySessionExecutor> doBuild() { ConcurrentMap<Long, ProxySessionExecutor> sessions = new MapMaker().makeMap(); ProxyConnectExecutor connectExecutor = ProxyConnectExecutor.create( sessions, getClientBuilder().getConnectionClientExecutors()); return new SimpleServerExecutor<ProxySessionExecutor>( sessions, connectExecutor, getDefaultAnonymousExecutor()); } protected TaskExecutor<? super FourLetterRequest, ? extends FourLetterResponse> getDefaultAnonymousExecutor() { return ProcessorTaskExecutor.of(FourLetterRequestProcessor.newInstance()); } }
updates
src/main/java/edu/uw/zookeeper/proxy/ProxyServerExecutorBuilder.java
updates
<ide><path>rc/main/java/edu/uw/zookeeper/proxy/ProxyServerExecutorBuilder.java <ide> FromRequestFactory.create( <ide> FixedClientConnectionFactory.create(view.get(), connections), <ide> executor), <del> ZxidTracker.create()); <add> ZxidTracker.zero()); <ide> } <ide> } <ide>
Java
apache-2.0
cd8375071cda70c97aed44efa08ef25cc1d66fb7
0
qtproject/qtqa-gerrit,WANdisco/gerrit,gerrit-review/gerrit,qtproject/qtqa-gerrit,qtproject/qtqa-gerrit,qtproject/qtqa-gerrit,WANdisco/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,gerrit-review/gerrit,WANdisco/gerrit,GerritCodeReview/gerrit,WANdisco/gerrit,gerrit-review/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,gerrit-review/gerrit,gerrit-review/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,qtproject/qtqa-gerrit,qtproject/qtqa-gerrit,WANdisco/gerrit,qtproject/qtqa-gerrit,WANdisco/gerrit,GerritCodeReview/gerrit,gerrit-review/gerrit,WANdisco/gerrit,gerrit-review/gerrit
// Copyright (C) 2015 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server.config; import com.google.common.base.CharMatcher; import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.base.Strings; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.gerrit.common.data.GitwebType; import com.google.gerrit.extensions.config.CloneCommand; import com.google.gerrit.extensions.config.DownloadCommand; import com.google.gerrit.extensions.config.DownloadScheme; import com.google.gerrit.extensions.registration.DynamicItem; import com.google.gerrit.extensions.registration.DynamicMap; import com.google.gerrit.extensions.registration.DynamicSet; import com.google.gerrit.extensions.restapi.RestReadView; import com.google.gerrit.extensions.webui.WebUiPlugin; import com.google.gerrit.reviewdb.client.Account; import com.google.gerrit.reviewdb.client.AuthType; import com.google.gerrit.server.EnableSignedPush; import com.google.gerrit.server.account.Realm; import com.google.gerrit.server.avatar.AvatarProvider; import com.google.gerrit.server.change.ArchiveFormat; import com.google.gerrit.server.change.GetArchive; import com.google.gerrit.server.change.Submit; import com.google.gerrit.server.documentation.QueryDocumentationExecutor; import com.google.gerrit.server.notedb.NotesMigration; import com.google.inject.Inject; import org.eclipse.jgit.lib.Config; import java.net.MalformedURLException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; public class GetServerInfo implements RestReadView<ConfigResource> { private static final String URL_ALIAS = "urlAlias"; private static final String KEY_MATCH = "match"; private static final String KEY_TOKEN = "token"; private final Config config; private final AuthConfig authConfig; private final Realm realm; private final DynamicMap<DownloadScheme> downloadSchemes; private final DynamicMap<DownloadCommand> downloadCommands; private final DynamicMap<CloneCommand> cloneCommands; private final DynamicSet<WebUiPlugin> plugins; private final GetArchive.AllowedFormats archiveFormats; private final AllProjectsName allProjectsName; private final AllUsersName allUsersName; private final String anonymousCowardName; private final DynamicItem<AvatarProvider> avatar; private final boolean enableSignedPush; private final QueryDocumentationExecutor docSearcher; private final NotesMigration migration; @Inject public GetServerInfo( @GerritServerConfig Config config, AuthConfig authConfig, Realm realm, DynamicMap<DownloadScheme> downloadSchemes, DynamicMap<DownloadCommand> downloadCommands, DynamicMap<CloneCommand> cloneCommands, DynamicSet<WebUiPlugin> webUiPlugins, GetArchive.AllowedFormats archiveFormats, AllProjectsName allProjectsName, AllUsersName allUsersName, @AnonymousCowardName String anonymousCowardName, DynamicItem<AvatarProvider> avatar, @EnableSignedPush boolean enableSignedPush, QueryDocumentationExecutor docSearcher, NotesMigration migration) { this.config = config; this.authConfig = authConfig; this.realm = realm; this.downloadSchemes = downloadSchemes; this.downloadCommands = downloadCommands; this.cloneCommands = cloneCommands; this.plugins = webUiPlugins; this.archiveFormats = archiveFormats; this.allProjectsName = allProjectsName; this.allUsersName = allUsersName; this.anonymousCowardName = anonymousCowardName; this.avatar = avatar; this.enableSignedPush = enableSignedPush; this.docSearcher = docSearcher; this.migration = migration; } @Override public ServerInfo apply(ConfigResource rsrc) throws MalformedURLException { ServerInfo info = new ServerInfo(); info.auth = getAuthInfo(authConfig, realm); info.change = getChangeInfo(config); info.download = getDownloadInfo(downloadSchemes, downloadCommands, cloneCommands, archiveFormats); info.gerrit = getGerritInfo(config, allProjectsName, allUsersName); info.noteDbEnabled = isNoteDbEnabled(); info.plugin = getPluginInfo(); info.sshd = getSshdInfo(config); info.suggest = getSuggestInfo(config); Map<String, String> urlAliases = getUrlAliasesInfo(config); info.urlAliases = !urlAliases.isEmpty() ? urlAliases : null; info.user = getUserInfo(anonymousCowardName); info.receive = getReceiveInfo(); return info; } private AuthInfo getAuthInfo(AuthConfig cfg, Realm realm) { AuthInfo info = new AuthInfo(); info.authType = cfg.getAuthType(); info.useContributorAgreements = toBoolean(cfg.isUseContributorAgreements()); info.editableAccountFields = new ArrayList<>(realm.getEditableFields()); info.switchAccountUrl = cfg.getSwitchAccountUrl(); info.isGitBasicAuth = toBoolean(cfg.isGitBasicAuth()); switch (info.authType) { case LDAP: case LDAP_BIND: info.registerUrl = cfg.getRegisterUrl(); info.registerText = cfg.getRegisterText(); info.editFullNameUrl = cfg.getEditFullNameUrl(); break; case CUSTOM_EXTENSION: info.registerUrl = cfg.getRegisterUrl(); info.registerText = cfg.getRegisterText(); info.editFullNameUrl = cfg.getEditFullNameUrl(); info.httpPasswordUrl = cfg.getHttpPasswordUrl(); break; case HTTP: case HTTP_LDAP: info.loginUrl = cfg.getLoginUrl(); info.loginText = cfg.getLoginText(); break; case CLIENT_SSL_CERT_LDAP: case DEVELOPMENT_BECOME_ANY_ACCOUNT: case OAUTH: case OPENID: case OPENID_SSO: break; } return info; } private ChangeConfigInfo getChangeInfo(Config cfg) { ChangeConfigInfo info = new ChangeConfigInfo(); info.allowBlame = toBoolean(cfg.getBoolean("change", "allowBlame", true)); info.allowDrafts = toBoolean(cfg.getBoolean("change", "allowDrafts", true)); info.largeChange = cfg.getInt("change", "largeChange", 500); info.replyTooltip = Optional.fromNullable(cfg.getString("change", null, "replyTooltip")) .or("Reply and score") + " (Shortcut: a)"; info.replyLabel = Optional.fromNullable(cfg.getString("change", null, "replyLabel")) .or("Reply") + "\u2026"; info.updateDelay = (int) ConfigUtil.getTimeUnit( cfg, "change", null, "updateDelay", 30, TimeUnit.SECONDS); info.submitWholeTopic = Submit.wholeTopicEnabled(cfg); return info; } private DownloadInfo getDownloadInfo( DynamicMap<DownloadScheme> downloadSchemes, DynamicMap<DownloadCommand> downloadCommands, DynamicMap<CloneCommand> cloneCommands, GetArchive.AllowedFormats archiveFormats) { DownloadInfo info = new DownloadInfo(); info.schemes = new HashMap<>(); for (DynamicMap.Entry<DownloadScheme> e : downloadSchemes) { DownloadScheme scheme = e.getProvider().get(); if (scheme.isEnabled() && scheme.getUrl("${project}") != null) { info.schemes.put(e.getExportName(), getDownloadSchemeInfo(scheme, downloadCommands, cloneCommands)); } } info.archives = Lists.newArrayList(Iterables.transform( archiveFormats.getAllowed(), new Function<ArchiveFormat, String>() { @Override public String apply(ArchiveFormat in) { return in.getShortName(); } })); return info; } private DownloadSchemeInfo getDownloadSchemeInfo(DownloadScheme scheme, DynamicMap<DownloadCommand> downloadCommands, DynamicMap<CloneCommand> cloneCommands) { DownloadSchemeInfo info = new DownloadSchemeInfo(); info.url = scheme.getUrl("${project}"); info.isAuthRequired = toBoolean(scheme.isAuthRequired()); info.isAuthSupported = toBoolean(scheme.isAuthSupported()); info.commands = new HashMap<>(); for (DynamicMap.Entry<DownloadCommand> e : downloadCommands) { String commandName = e.getExportName(); DownloadCommand command = e.getProvider().get(); String c = command.getCommand(scheme, "${project}", "${ref}"); if (c != null) { info.commands.put(commandName, c); } } info.cloneCommands = new HashMap<>(); for (DynamicMap.Entry<CloneCommand> e : cloneCommands) { String commandName = e.getExportName(); CloneCommand command = e.getProvider().get(); String c = command.getCommand(scheme, "${project-path}/${project-base-name}"); if (c != null) { c = c.replaceAll("\\$\\{project-path\\}/\\$\\{project-base-name\\}", "\\$\\{project\\}"); info.cloneCommands.put(commandName, c); } } return info; } private GerritInfo getGerritInfo(Config cfg, AllProjectsName allProjectsName, AllUsersName allUsersName) { GerritInfo info = new GerritInfo(); info.allProjects = allProjectsName.get(); info.allUsers = allUsersName.get(); info.reportBugUrl = cfg.getString("gerrit", null, "reportBugUrl"); info.reportBugText = cfg.getString("gerrit", null, "reportBugText"); info.docUrl = getDocUrl(cfg); info.docSearch = docSearcher.isAvailable(); info.editGpgKeys = toBoolean(enableSignedPush && cfg.getBoolean("gerrit", null, "editGpgKeys", true)); return info; } private String getDocUrl(Config cfg) { String docUrl = cfg.getString("gerrit", null, "docUrl"); if (Strings.isNullOrEmpty(docUrl)) { return null; } return CharMatcher.is('/').trimTrailingFrom(docUrl) + '/'; } private boolean isNoteDbEnabled() { return migration.readChanges(); } private PluginConfigInfo getPluginInfo() { PluginConfigInfo info = new PluginConfigInfo(); info.hasAvatars = toBoolean(avatar.get() != null); info.jsResourcePaths = new ArrayList<>(); for (WebUiPlugin u : plugins) { info.jsResourcePaths.add(String.format("plugins/%s/%s", u.getPluginName(), u.getJavaScriptResourcePath())); } return info; } private Map<String, String> getUrlAliasesInfo(Config cfg) { Map<String, String> urlAliases = new HashMap<>(); for (String subsection : cfg.getSubsections(URL_ALIAS)) { urlAliases.put(cfg.getString(URL_ALIAS, subsection, KEY_MATCH), cfg.getString(URL_ALIAS, subsection, KEY_TOKEN)); } return urlAliases; } private SshdInfo getSshdInfo(Config cfg) { String[] addr = cfg.getStringList("sshd", null, "listenAddress"); if (addr.length == 1 && isOff(addr[0])) { return null; } return new SshdInfo(); } private static boolean isOff(String listenHostname) { return "off".equalsIgnoreCase(listenHostname) || "none".equalsIgnoreCase(listenHostname) || "no".equalsIgnoreCase(listenHostname); } private SuggestInfo getSuggestInfo(Config cfg) { SuggestInfo info = new SuggestInfo(); info.from = cfg.getInt("suggest", "from", 0); return info; } private UserConfigInfo getUserInfo(String anonymousCowardName) { UserConfigInfo info = new UserConfigInfo(); info.anonymousCowardName = anonymousCowardName; return info; } private ReceiveInfo getReceiveInfo() { ReceiveInfo info = new ReceiveInfo(); info.enableSignedPush = enableSignedPush; return info; } private static Boolean toBoolean(boolean v) { return v ? v : null; } public static class ServerInfo { public AuthInfo auth; public ChangeConfigInfo change; public DownloadInfo download; public GerritInfo gerrit; public Boolean noteDbEnabled; public PluginConfigInfo plugin; public SshdInfo sshd; public SuggestInfo suggest; public Map<String, String> urlAliases; public UserConfigInfo user; public ReceiveInfo receive; } public static class AuthInfo { public AuthType authType; public Boolean useContributorAgreements; public List<Account.FieldName> editableAccountFields; public String loginUrl; public String loginText; public String switchAccountUrl; public String registerUrl; public String registerText; public String editFullNameUrl; public String httpPasswordUrl; public Boolean isGitBasicAuth; } public static class ChangeConfigInfo { public Boolean allowBlame; public Boolean allowDrafts; public int largeChange; public String replyLabel; public String replyTooltip; public int updateDelay; public Boolean submitWholeTopic; } public static class DownloadInfo { public Map<String, DownloadSchemeInfo> schemes; public List<String> archives; } public static class DownloadSchemeInfo { public String url; public Boolean isAuthRequired; public Boolean isAuthSupported; public Map<String, String> commands; public Map<String, String> cloneCommands; } public static class GerritInfo { public String allProjects; public String allUsers; public Boolean docSearch; public String docUrl; public Boolean editGpgKeys; public String reportBugUrl; public String reportBugText; } public static class GitwebInfo { public String url; public GitwebType type; } public static class PluginConfigInfo { public Boolean hasAvatars; public List<String> jsResourcePaths; } public static class SshdInfo { } public static class SuggestInfo { public int from; } public static class UserConfigInfo { public String anonymousCowardName; } public static class ReceiveInfo { public Boolean enableSignedPush; } }
gerrit-server/src/main/java/com/google/gerrit/server/config/GetServerInfo.java
// Copyright (C) 2015 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server.config; import com.google.common.base.CharMatcher; import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.base.Strings; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.gerrit.common.data.GitwebType; import com.google.gerrit.extensions.config.CloneCommand; import com.google.gerrit.extensions.config.DownloadCommand; import com.google.gerrit.extensions.config.DownloadScheme; import com.google.gerrit.extensions.registration.DynamicItem; import com.google.gerrit.extensions.registration.DynamicMap; import com.google.gerrit.extensions.registration.DynamicSet; import com.google.gerrit.extensions.restapi.RestReadView; import com.google.gerrit.extensions.webui.WebUiPlugin; import com.google.gerrit.reviewdb.client.Account; import com.google.gerrit.reviewdb.client.AuthType; import com.google.gerrit.server.EnableSignedPush; import com.google.gerrit.server.account.Realm; import com.google.gerrit.server.avatar.AvatarProvider; import com.google.gerrit.server.change.ArchiveFormat; import com.google.gerrit.server.change.GetArchive; import com.google.gerrit.server.change.Submit; import com.google.gerrit.server.documentation.QueryDocumentationExecutor; import com.google.gerrit.server.notedb.NotesMigration; import com.google.inject.Inject; import org.eclipse.jgit.lib.Config; import java.net.MalformedURLException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; public class GetServerInfo implements RestReadView<ConfigResource> { private static final String URL_ALIAS = "urlAlias"; private static final String KEY_MATCH = "match"; private static final String KEY_TOKEN = "token"; private final Config config; private final AuthConfig authConfig; private final Realm realm; private final DynamicMap<DownloadScheme> downloadSchemes; private final DynamicMap<DownloadCommand> downloadCommands; private final DynamicMap<CloneCommand> cloneCommands; private final DynamicSet<WebUiPlugin> plugins; private final GetArchive.AllowedFormats archiveFormats; private final AllProjectsName allProjectsName; private final AllUsersName allUsersName; private final String anonymousCowardName; private final DynamicItem<AvatarProvider> avatar; private final boolean enableSignedPush; private final QueryDocumentationExecutor docSearcher; private final NotesMigration migration; @Inject public GetServerInfo( @GerritServerConfig Config config, AuthConfig authConfig, Realm realm, DynamicMap<DownloadScheme> downloadSchemes, DynamicMap<DownloadCommand> downloadCommands, DynamicMap<CloneCommand> cloneCommands, DynamicSet<WebUiPlugin> webUiPlugins, GetArchive.AllowedFormats archiveFormats, AllProjectsName allProjectsName, AllUsersName allUsersName, @AnonymousCowardName String anonymousCowardName, DynamicItem<AvatarProvider> avatar, @EnableSignedPush boolean enableSignedPush, QueryDocumentationExecutor docSearcher, NotesMigration migration) { this.config = config; this.authConfig = authConfig; this.realm = realm; this.downloadSchemes = downloadSchemes; this.downloadCommands = downloadCommands; this.cloneCommands = cloneCommands; this.plugins = webUiPlugins; this.archiveFormats = archiveFormats; this.allProjectsName = allProjectsName; this.allUsersName = allUsersName; this.anonymousCowardName = anonymousCowardName; this.avatar = avatar; this.enableSignedPush = enableSignedPush; this.docSearcher = docSearcher; this.migration = migration; } @Override public ServerInfo apply(ConfigResource rsrc) throws MalformedURLException { ServerInfo info = new ServerInfo(); info.auth = getAuthInfo(authConfig, realm); info.change = getChangeInfo(config); info.download = getDownloadInfo(downloadSchemes, downloadCommands, cloneCommands, archiveFormats); info.gerrit = getGerritInfo(config, allProjectsName, allUsersName); info.noteDbEnabled = isNoteDbEnabled(config); info.plugin = getPluginInfo(); info.sshd = getSshdInfo(config); info.suggest = getSuggestInfo(config); Map<String, String> urlAliases = getUrlAliasesInfo(config); info.urlAliases = !urlAliases.isEmpty() ? urlAliases : null; info.user = getUserInfo(anonymousCowardName); info.receive = getReceiveInfo(); return info; } private AuthInfo getAuthInfo(AuthConfig cfg, Realm realm) { AuthInfo info = new AuthInfo(); info.authType = cfg.getAuthType(); info.useContributorAgreements = toBoolean(cfg.isUseContributorAgreements()); info.editableAccountFields = new ArrayList<>(realm.getEditableFields()); info.switchAccountUrl = cfg.getSwitchAccountUrl(); info.isGitBasicAuth = toBoolean(cfg.isGitBasicAuth()); switch (info.authType) { case LDAP: case LDAP_BIND: info.registerUrl = cfg.getRegisterUrl(); info.registerText = cfg.getRegisterText(); info.editFullNameUrl = cfg.getEditFullNameUrl(); break; case CUSTOM_EXTENSION: info.registerUrl = cfg.getRegisterUrl(); info.registerText = cfg.getRegisterText(); info.editFullNameUrl = cfg.getEditFullNameUrl(); info.httpPasswordUrl = cfg.getHttpPasswordUrl(); break; case HTTP: case HTTP_LDAP: info.loginUrl = cfg.getLoginUrl(); info.loginText = cfg.getLoginText(); break; case CLIENT_SSL_CERT_LDAP: case DEVELOPMENT_BECOME_ANY_ACCOUNT: case OAUTH: case OPENID: case OPENID_SSO: break; } return info; } private ChangeConfigInfo getChangeInfo(Config cfg) { ChangeConfigInfo info = new ChangeConfigInfo(); info.allowBlame = toBoolean(cfg.getBoolean("change", "allowBlame", true)); info.allowDrafts = toBoolean(cfg.getBoolean("change", "allowDrafts", true)); info.largeChange = cfg.getInt("change", "largeChange", 500); info.replyTooltip = Optional.fromNullable(cfg.getString("change", null, "replyTooltip")) .or("Reply and score") + " (Shortcut: a)"; info.replyLabel = Optional.fromNullable(cfg.getString("change", null, "replyLabel")) .or("Reply") + "\u2026"; info.updateDelay = (int) ConfigUtil.getTimeUnit( cfg, "change", null, "updateDelay", 30, TimeUnit.SECONDS); info.submitWholeTopic = Submit.wholeTopicEnabled(cfg); return info; } private DownloadInfo getDownloadInfo( DynamicMap<DownloadScheme> downloadSchemes, DynamicMap<DownloadCommand> downloadCommands, DynamicMap<CloneCommand> cloneCommands, GetArchive.AllowedFormats archiveFormats) { DownloadInfo info = new DownloadInfo(); info.schemes = new HashMap<>(); for (DynamicMap.Entry<DownloadScheme> e : downloadSchemes) { DownloadScheme scheme = e.getProvider().get(); if (scheme.isEnabled() && scheme.getUrl("${project}") != null) { info.schemes.put(e.getExportName(), getDownloadSchemeInfo(scheme, downloadCommands, cloneCommands)); } } info.archives = Lists.newArrayList(Iterables.transform( archiveFormats.getAllowed(), new Function<ArchiveFormat, String>() { @Override public String apply(ArchiveFormat in) { return in.getShortName(); } })); return info; } private DownloadSchemeInfo getDownloadSchemeInfo(DownloadScheme scheme, DynamicMap<DownloadCommand> downloadCommands, DynamicMap<CloneCommand> cloneCommands) { DownloadSchemeInfo info = new DownloadSchemeInfo(); info.url = scheme.getUrl("${project}"); info.isAuthRequired = toBoolean(scheme.isAuthRequired()); info.isAuthSupported = toBoolean(scheme.isAuthSupported()); info.commands = new HashMap<>(); for (DynamicMap.Entry<DownloadCommand> e : downloadCommands) { String commandName = e.getExportName(); DownloadCommand command = e.getProvider().get(); String c = command.getCommand(scheme, "${project}", "${ref}"); if (c != null) { info.commands.put(commandName, c); } } info.cloneCommands = new HashMap<>(); for (DynamicMap.Entry<CloneCommand> e : cloneCommands) { String commandName = e.getExportName(); CloneCommand command = e.getProvider().get(); String c = command.getCommand(scheme, "${project-path}/${project-base-name}"); if (c != null) { c = c.replaceAll("\\$\\{project-path\\}/\\$\\{project-base-name\\}", "\\$\\{project\\}"); info.cloneCommands.put(commandName, c); } } return info; } private GerritInfo getGerritInfo(Config cfg, AllProjectsName allProjectsName, AllUsersName allUsersName) { GerritInfo info = new GerritInfo(); info.allProjects = allProjectsName.get(); info.allUsers = allUsersName.get(); info.reportBugUrl = cfg.getString("gerrit", null, "reportBugUrl"); info.reportBugText = cfg.getString("gerrit", null, "reportBugText"); info.docUrl = getDocUrl(cfg); info.docSearch = docSearcher.isAvailable(); info.editGpgKeys = toBoolean(enableSignedPush && cfg.getBoolean("gerrit", null, "editGpgKeys", true)); return info; } private String getDocUrl(Config cfg) { String docUrl = cfg.getString("gerrit", null, "docUrl"); if (Strings.isNullOrEmpty(docUrl)) { return null; } return CharMatcher.is('/').trimTrailingFrom(docUrl) + '/'; } private boolean isNoteDbEnabled(Config cfg) { return migration.readChanges(); } private PluginConfigInfo getPluginInfo() { PluginConfigInfo info = new PluginConfigInfo(); info.hasAvatars = toBoolean(avatar.get() != null); info.jsResourcePaths = new ArrayList<>(); for (WebUiPlugin u : plugins) { info.jsResourcePaths.add(String.format("plugins/%s/%s", u.getPluginName(), u.getJavaScriptResourcePath())); } return info; } private Map<String, String> getUrlAliasesInfo(Config cfg) { Map<String, String> urlAliases = new HashMap<>(); for (String subsection : cfg.getSubsections(URL_ALIAS)) { urlAliases.put(cfg.getString(URL_ALIAS, subsection, KEY_MATCH), cfg.getString(URL_ALIAS, subsection, KEY_TOKEN)); } return urlAliases; } private SshdInfo getSshdInfo(Config cfg) { String[] addr = cfg.getStringList("sshd", null, "listenAddress"); if (addr.length == 1 && isOff(addr[0])) { return null; } return new SshdInfo(); } private static boolean isOff(String listenHostname) { return "off".equalsIgnoreCase(listenHostname) || "none".equalsIgnoreCase(listenHostname) || "no".equalsIgnoreCase(listenHostname); } private SuggestInfo getSuggestInfo(Config cfg) { SuggestInfo info = new SuggestInfo(); info.from = cfg.getInt("suggest", "from", 0); return info; } private UserConfigInfo getUserInfo(String anonymousCowardName) { UserConfigInfo info = new UserConfigInfo(); info.anonymousCowardName = anonymousCowardName; return info; } private ReceiveInfo getReceiveInfo() { ReceiveInfo info = new ReceiveInfo(); info.enableSignedPush = enableSignedPush; return info; } private static Boolean toBoolean(boolean v) { return v ? v : null; } public static class ServerInfo { public AuthInfo auth; public ChangeConfigInfo change; public DownloadInfo download; public GerritInfo gerrit; public Boolean noteDbEnabled; public PluginConfigInfo plugin; public SshdInfo sshd; public SuggestInfo suggest; public Map<String, String> urlAliases; public UserConfigInfo user; public ReceiveInfo receive; } public static class AuthInfo { public AuthType authType; public Boolean useContributorAgreements; public List<Account.FieldName> editableAccountFields; public String loginUrl; public String loginText; public String switchAccountUrl; public String registerUrl; public String registerText; public String editFullNameUrl; public String httpPasswordUrl; public Boolean isGitBasicAuth; } public static class ChangeConfigInfo { public Boolean allowBlame; public Boolean allowDrafts; public int largeChange; public String replyLabel; public String replyTooltip; public int updateDelay; public Boolean submitWholeTopic; } public static class DownloadInfo { public Map<String, DownloadSchemeInfo> schemes; public List<String> archives; } public static class DownloadSchemeInfo { public String url; public Boolean isAuthRequired; public Boolean isAuthSupported; public Map<String, String> commands; public Map<String, String> cloneCommands; } public static class GerritInfo { public String allProjects; public String allUsers; public Boolean docSearch; public String docUrl; public Boolean editGpgKeys; public String reportBugUrl; public String reportBugText; } public static class GitwebInfo { public String url; public GitwebType type; } public static class PluginConfigInfo { public Boolean hasAvatars; public List<String> jsResourcePaths; } public static class SshdInfo { } public static class SuggestInfo { public int from; } public static class UserConfigInfo { public String anonymousCowardName; } public static class ReceiveInfo { public Boolean enableSignedPush; } }
GetServerInfo#isNoteDbEnabled: Remove unused Config parameter Change-Id: I2f002a4579e7f5b26e7ec0b983deddbacdf42cb3
gerrit-server/src/main/java/com/google/gerrit/server/config/GetServerInfo.java
GetServerInfo#isNoteDbEnabled: Remove unused Config parameter
<ide><path>errit-server/src/main/java/com/google/gerrit/server/config/GetServerInfo.java <ide> getDownloadInfo(downloadSchemes, downloadCommands, cloneCommands, <ide> archiveFormats); <ide> info.gerrit = getGerritInfo(config, allProjectsName, allUsersName); <del> info.noteDbEnabled = isNoteDbEnabled(config); <add> info.noteDbEnabled = isNoteDbEnabled(); <ide> info.plugin = getPluginInfo(); <ide> info.sshd = getSshdInfo(config); <ide> info.suggest = getSuggestInfo(config); <ide> return CharMatcher.is('/').trimTrailingFrom(docUrl) + '/'; <ide> } <ide> <del> private boolean isNoteDbEnabled(Config cfg) { <add> private boolean isNoteDbEnabled() { <ide> return migration.readChanges(); <ide> } <ide>
Java
agpl-3.0
7fa57f4167c832dfac0c6468f9215cbbf531c97e
0
evenh/IForgotThat
package com.ehpefi.iforgotthat; import android.app.AlertDialog; import android.app.Dialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.location.LocationManager; import android.provider.Settings; import android.provider.Settings.SettingNotFoundException; import android.util.Log; /** * A collection of helping methods * * @author Even Holthe * @since 1.0.0 */ public class Utils { private static final String TAG = "Utils"; /** * Checks whether location services is enabled. If not, the method displays a dialog prompting the user to enable location services * * @param context The calling activity * @return True if location services is enabled, false otherwise * @since 1.0.0 */ public static boolean isLocationEnabled(final Context context) { if (android.os.Build.VERSION.SDK_INT >= 19) { int locationProviders = Settings.Secure.LOCATION_MODE_OFF; // Fetch the location mode try { locationProviders = Settings.Secure.getInt(context.getContentResolver(), Settings.Secure.LOCATION_MODE); } catch (SettingNotFoundException e) { Log.e(TAG, "LOCATION_MODE not found!", e); } // If we have location enabled, return true if (locationProviders != Settings.Secure.LOCATION_MODE_OFF) { return true; } } else { // Below API level 19 LocationManager lm = (LocationManager) context.getSystemService(Context.LOCATION_SERVICE); try { if (lm.isProviderEnabled(LocationManager.GPS_PROVIDER) || lm.isProviderEnabled(LocationManager.NETWORK_PROVIDER)) { // We have location providers return true; } } catch (Exception ex) { Log.d(TAG, "Got exception while trying to fetch location providers in legacy mode", ex); } } // Show a dialog that location has to be enabled Dialog dialog = new AlertDialog.Builder(context).setTitle(R.string.no_loc_providers_enabled_title).setMessage(R.string.no_loc_providers_enabled_message) .setPositiveButton(R.string.no_loc_providers_enabled_ok, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { context.startActivity(new Intent(Settings.ACTION_LOCATION_SOURCE_SETTINGS)); } }).setNegativeButton(R.string.close, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.cancel(); } }).create(); dialog.show(); return false; } }
IForgotThat/src/com/ehpefi/iforgotthat/Utils.java
package com.ehpefi.iforgotthat; import android.app.AlertDialog; import android.app.Dialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.provider.Settings; import android.provider.Settings.SettingNotFoundException; import android.util.Log; /** * A collection of helping methods * * @author Even Holthe * @since 1.0.0 */ public class Utils { private static final String TAG = "Utils"; /** * Checks whether location services is enabled. If not, the method displays a dialog prompting the user to enable location services * * @param context The calling activity * @return True if location services is enabled, false otherwise * @since 1.0.0 */ public static boolean isLocationEnabled(final Context context) { int locationProviders = Settings.Secure.LOCATION_MODE_OFF; // Fetch the location mode try { locationProviders = Settings.Secure.getInt(context.getContentResolver(), Settings.Secure.LOCATION_MODE); } catch (SettingNotFoundException e) { Log.e(TAG, "LOCATION_MODE not found!"); } // If we have location enabled, return true if (locationProviders != Settings.Secure.LOCATION_MODE_OFF) { return true; } // Show a dialog that location has to be enabled Dialog dialog = new AlertDialog.Builder(context).setTitle(R.string.no_loc_providers_enabled_title).setMessage(R.string.no_loc_providers_enabled_message) .setPositiveButton(R.string.no_loc_providers_enabled_ok, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { context.startActivity(new Intent(Settings.ACTION_LOCATION_SOURCE_SETTINGS)); } }).setNegativeButton(R.string.close, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.cancel(); } }).create(); dialog.show(); return false; } }
Fixes the isLocationEnabled() check to work on API level <19
IForgotThat/src/com/ehpefi/iforgotthat/Utils.java
Fixes the isLocationEnabled() check to work on API level <19
<ide><path>ForgotThat/src/com/ehpefi/iforgotthat/Utils.java <ide> import android.content.Context; <ide> import android.content.DialogInterface; <ide> import android.content.Intent; <add>import android.location.LocationManager; <ide> import android.provider.Settings; <ide> import android.provider.Settings.SettingNotFoundException; <ide> import android.util.Log; <ide> * @since 1.0.0 <ide> */ <ide> public static boolean isLocationEnabled(final Context context) { <del> int locationProviders = Settings.Secure.LOCATION_MODE_OFF; <add> if (android.os.Build.VERSION.SDK_INT >= 19) { <add> int locationProviders = Settings.Secure.LOCATION_MODE_OFF; <ide> <del> // Fetch the location mode <del> try { <del> locationProviders = Settings.Secure.getInt(context.getContentResolver(), Settings.Secure.LOCATION_MODE); <del> } catch (SettingNotFoundException e) { <del> Log.e(TAG, "LOCATION_MODE not found!"); <add> // Fetch the location mode <add> try { <add> locationProviders = Settings.Secure.getInt(context.getContentResolver(), Settings.Secure.LOCATION_MODE); <add> } catch (SettingNotFoundException e) { <add> Log.e(TAG, "LOCATION_MODE not found!", e); <add> } <add> <add> // If we have location enabled, return true <add> if (locationProviders != Settings.Secure.LOCATION_MODE_OFF) { <add> return true; <add> } <add> } else { <add> // Below API level 19 <add> LocationManager lm = (LocationManager) context.getSystemService(Context.LOCATION_SERVICE); <add> try { <add> if (lm.isProviderEnabled(LocationManager.GPS_PROVIDER) || lm.isProviderEnabled(LocationManager.NETWORK_PROVIDER)) { <add> // We have location providers <add> return true; <add> } <add> } catch (Exception ex) { <add> Log.d(TAG, "Got exception while trying to fetch location providers in legacy mode", ex); <add> } <ide> } <del> <del> // If we have location enabled, return true <del> if (locationProviders != Settings.Secure.LOCATION_MODE_OFF) { <del> return true; <del> } <del> <ide> // Show a dialog that location has to be enabled <ide> Dialog dialog = new AlertDialog.Builder(context).setTitle(R.string.no_loc_providers_enabled_title).setMessage(R.string.no_loc_providers_enabled_message) <ide> .setPositiveButton(R.string.no_loc_providers_enabled_ok, new DialogInterface.OnClickListener() {
Java
apache-2.0
ab7a72b9ed90f083f0fa4e4fa11baa7f2840a569
0
adriens/liquibase,adriens/liquibase,C0mmi3/liquibase,cleiter/liquibase,liquibase/liquibase,instantdelay/liquibase,CoderPaulK/liquibase,C0mmi3/liquibase,talklittle/liquibase,evigeant/liquibase,syncron/liquibase,instantdelay/liquibase,danielkec/liquibase,jimmycd/liquibase,lazaronixon/liquibase,vast-engineering/liquibase,dbmanul/dbmanul,hbogaards/liquibase,ArloL/liquibase,evigeant/liquibase,vbekiaris/liquibase,jimmycd/liquibase,maberle/liquibase,Datical/liquibase,mattbertolini/liquibase,Datical/liquibase,gquintana/liquibase,NSIT/liquibase,mbreslow/liquibase,Willem1987/liquibase,foxel/liquibase,FreshGrade/liquibase,CoderPaulK/liquibase,dyk/liquibase,mattbertolini/liquibase,Vampire/liquibase,cleiter/liquibase,fbiville/liquibase,dprguard2000/liquibase,vbekiaris/liquibase,OculusVR/shanghai-liquibase,danielkec/liquibase,foxel/liquibase,gquintana/liquibase,russ-p/liquibase,lazaronixon/liquibase,mattbertolini/liquibase,adriens/liquibase,evigeant/liquibase,iherasymenko/liquibase,ZEPowerGroup/liquibase,balazs-zsoldos/liquibase,ZEPowerGroup/liquibase,fossamagna/liquibase,Datical/liquibase,iherasymenko/liquibase,vbekiaris/liquibase,instantdelay/liquibase,foxel/liquibase,NSIT/liquibase,mwaylabs/liquibase,dprguard2000/liquibase,CoderPaulK/liquibase,OculusVR/shanghai-liquibase,talklittle/liquibase,OpenCST/liquibase,maberle/liquibase,tjardo83/liquibase,dyk/liquibase,C0mmi3/liquibase,AlisonSouza/liquibase,maberle/liquibase,dprguard2000/liquibase,liquibase/liquibase,ArloL/liquibase,NSIT/liquibase,AlisonSouza/liquibase,balazs-zsoldos/liquibase,rkrzewski/liquibase,foxel/liquibase,FreshGrade/liquibase,russ-p/liquibase,Willem1987/liquibase,dbmanul/dbmanul,pellcorp/liquibase,cbotiza/liquibase,vfpfafrf/liquibase,danielkec/liquibase,mortegac/liquibase,cleiter/liquibase,fbiville/liquibase,dbmanul/dbmanul,mortegac/liquibase,klopfdreh/liquibase,maberle/liquibase,vfpfafrf/liquibase,russ-p/liquibase,dyk/liquibase,jimmycd/liquibase,hbogaards/liquibase,klopfdreh/liquibase,vast-engineering/liquibase,EVODelavega/liquibase,Vampire/liquibase,iherasymenko/liquibase,evigeant/liquibase,tjardo83/liquibase,fbiville/liquibase,ivaylo5ev/liquibase,syncron/liquibase,C0mmi3/liquibase,lazaronixon/liquibase,vast-engineering/liquibase,hbogaards/liquibase,mbreslow/liquibase,pellcorp/liquibase,ivaylo5ev/liquibase,klopfdreh/liquibase,vast-engineering/liquibase,dprguard2000/liquibase,vfpfafrf/liquibase,balazs-zsoldos/liquibase,syncron/liquibase,iherasymenko/liquibase,pellcorp/liquibase,dyk/liquibase,mwaylabs/liquibase,jimmycd/liquibase,liquibase/liquibase,fossamagna/liquibase,EVODelavega/liquibase,tjardo83/liquibase,dbmanul/dbmanul,talklittle/liquibase,EVODelavega/liquibase,AlisonSouza/liquibase,AlisonSouza/liquibase,instantdelay/liquibase,mattbertolini/liquibase,cbotiza/liquibase,klopfdreh/liquibase,rkrzewski/liquibase,pellcorp/liquibase,ArloL/liquibase,fossamagna/liquibase,mbreslow/liquibase,cleiter/liquibase,gquintana/liquibase,Vampire/liquibase,Datical/liquibase,cbotiza/liquibase,balazs-zsoldos/liquibase,russ-p/liquibase,NSIT/liquibase,fbiville/liquibase,EVODelavega/liquibase,cbotiza/liquibase,ZEPowerGroup/liquibase,Willem1987/liquibase,tjardo83/liquibase,mwaylabs/liquibase,vfpfafrf/liquibase,OculusVR/shanghai-liquibase,FreshGrade/liquibase,OpenCST/liquibase,hbogaards/liquibase,rkrzewski/liquibase,mortegac/liquibase,CoderPaulK/liquibase,syncron/liquibase,danielkec/liquibase,mbreslow/liquibase,vbekiaris/liquibase,OpenCST/liquibase,OculusVR/shanghai-liquibase,lazaronixon/liquibase,FreshGrade/liquibase,mortegac/liquibase,gquintana/liquibase,talklittle/liquibase,Willem1987/liquibase,mwaylabs/liquibase,OpenCST/liquibase
package liquibase.sqlgenerator.core; import liquibase.database.Database; import liquibase.database.core.*; import liquibase.database.typeconversion.TypeConverter; import liquibase.database.typeconversion.TypeConverterFactory; import liquibase.exception.DatabaseException; import liquibase.exception.ValidationErrors; import liquibase.sql.Sql; import liquibase.sql.UnparsedSql; import liquibase.sqlgenerator.SqlGenerator; import liquibase.sqlgenerator.SqlGeneratorChain; import liquibase.sqlgenerator.SqlGeneratorFactory; import liquibase.statement.core.SetNullableStatement; import liquibase.statement.core.ReorganizeTableStatement; import java.util.ArrayList; import java.util.List; import java.util.Arrays; public class SetNullableGenerator extends AbstractSqlGenerator<SetNullableStatement> { @Override public boolean supports(SetNullableStatement statement, Database database) { return !(database instanceof FirebirdDatabase || database instanceof SQLiteDatabase); } public ValidationErrors validate(SetNullableStatement setNullableStatement, Database database, SqlGeneratorChain sqlGeneratorChain) { ValidationErrors validationErrors = new ValidationErrors(); validationErrors.checkRequiredField("tableName", setNullableStatement.getTableName()); validationErrors.checkRequiredField("columnName", setNullableStatement.getColumnName()); if (database instanceof MSSQLDatabase || database instanceof MySQLDatabase || database instanceof InformixDatabase || database instanceof H2Database) { validationErrors.checkRequiredField("columnDataType", setNullableStatement.getColumnDataType()); } try { if ((database instanceof DB2Database) && (database.getDatabaseMajorVersion() >= 9)) { validationErrors.addError("DB2 versions less than 9 do not support modifying null constraints"); } } catch (DatabaseException ignore) { //cannot check } return validationErrors; } public Sql[] generateSql(SetNullableStatement statement, Database database, SqlGeneratorChain sqlGeneratorChain) { String sql; TypeConverter typeConverter = TypeConverterFactory.getInstance().findTypeConverter(database); String nullableString; if (statement.isNullable()) { nullableString = " NULL"; } else { nullableString = " NOT NULL"; } if (database instanceof OracleDatabase || database instanceof SybaseDatabase || database instanceof SybaseASADatabase) { sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " MODIFY " + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + nullableString; } else if (database instanceof MSSQLDatabase) { sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " ALTER COLUMN " + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + " " + typeConverter.getDataType(statement.getColumnDataType(), false) + nullableString; } else if (database instanceof MySQLDatabase) { sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " MODIFY " + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + " " + typeConverter.getDataType(statement.getColumnDataType(), false) + nullableString; } else if (database instanceof DerbyDatabase || database instanceof CacheDatabase) { sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " ALTER COLUMN " + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + nullableString; } else if (database instanceof HsqlDatabase) { sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " ALTER COLUMN " + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + " SET"+nullableString; } else if (database instanceof H2Database) { sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " ALTER COLUMN " + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + " " + typeConverter.getDataType(statement.getColumnDataType(), false) + nullableString; } else if (database instanceof MaxDBDatabase) { sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " COLUMN " + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + (statement.isNullable() ? " DEFAULT NULL" : " NOT NULL"); } else if (database instanceof InformixDatabase) { // Informix simply omits the null for nullables if (statement.isNullable()) { nullableString = ""; } sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " MODIFY (" + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + " " + typeConverter.getDataType(statement.getColumnDataType(), false) + nullableString + ")"; } else { sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " ALTER COLUMN " + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + (statement.isNullable() ? " DROP NOT NULL" : " SET NOT NULL"); } List<Sql> returnList = new ArrayList<Sql>(); returnList.add(new UnparsedSql(sql)); if (database instanceof DB2Database) { returnList.addAll(Arrays.asList(SqlGeneratorFactory.getInstance().generateSql(new ReorganizeTableStatement(statement.getSchemaName(), statement.getTableName()), database))); } return returnList.toArray(new Sql[returnList.size()]); } }
liquibase-core/src/main/java/liquibase/sqlgenerator/core/SetNullableGenerator.java
package liquibase.sqlgenerator.core; import liquibase.database.Database; import liquibase.database.core.*; import liquibase.exception.DatabaseException; import liquibase.exception.ValidationErrors; import liquibase.sql.Sql; import liquibase.sql.UnparsedSql; import liquibase.sqlgenerator.SqlGenerator; import liquibase.sqlgenerator.SqlGeneratorChain; import liquibase.sqlgenerator.SqlGeneratorFactory; import liquibase.statement.core.SetNullableStatement; import liquibase.statement.core.ReorganizeTableStatement; import java.util.ArrayList; import java.util.List; import java.util.Arrays; public class SetNullableGenerator extends AbstractSqlGenerator<SetNullableStatement> { @Override public boolean supports(SetNullableStatement statement, Database database) { return !(database instanceof FirebirdDatabase || database instanceof SQLiteDatabase); } public ValidationErrors validate(SetNullableStatement setNullableStatement, Database database, SqlGeneratorChain sqlGeneratorChain) { ValidationErrors validationErrors = new ValidationErrors(); validationErrors.checkRequiredField("tableName", setNullableStatement.getTableName()); validationErrors.checkRequiredField("columnName", setNullableStatement.getColumnName()); if (database instanceof MSSQLDatabase || database instanceof MySQLDatabase || database instanceof InformixDatabase || database instanceof H2Database) { validationErrors.checkRequiredField("columnDataType", setNullableStatement.getColumnDataType()); } try { if ((database instanceof DB2Database) && (database.getDatabaseMajorVersion() >= 9)) { validationErrors.addError("DB2 versions less than 9 do not support modifying null constraints"); } } catch (DatabaseException ignore) { //cannot check } return validationErrors; } public Sql[] generateSql(SetNullableStatement statement, Database database, SqlGeneratorChain sqlGeneratorChain) { String sql; String nullableString; if (statement.isNullable()) { nullableString = " NULL"; } else { nullableString = " NOT NULL"; } if (database instanceof OracleDatabase || database instanceof SybaseDatabase || database instanceof SybaseASADatabase) { sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " MODIFY " + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + nullableString; } else if (database instanceof MSSQLDatabase) { sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " ALTER COLUMN " + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + " " + statement.getColumnDataType() + nullableString; } else if (database instanceof MySQLDatabase) { sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " MODIFY " + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + " " + statement.getColumnDataType() + nullableString; } else if (database instanceof DerbyDatabase || database instanceof CacheDatabase) { sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " ALTER COLUMN " + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + nullableString; } else if (database instanceof HsqlDatabase) { sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " ALTER COLUMN " + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + " SET"+nullableString; } else if (database instanceof H2Database) { sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " ALTER COLUMN " + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + " " + statement.getColumnDataType() + nullableString; } else if (database instanceof MaxDBDatabase) { sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " COLUMN " + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + (statement.isNullable() ? " DEFAULT NULL" : " NOT NULL"); } else if (database instanceof InformixDatabase) { // Informix simply omits the null for nullables if (statement.isNullable()) { nullableString = ""; } sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " MODIFY (" + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + " " + statement.getColumnDataType() + nullableString + ")"; } else { sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " ALTER COLUMN " + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + (statement.isNullable() ? " DROP NOT NULL" : " SET NOT NULL"); } List<Sql> returnList = new ArrayList<Sql>(); returnList.add(new UnparsedSql(sql)); if (database instanceof DB2Database) { returnList.addAll(Arrays.asList(SqlGeneratorFactory.getInstance().generateSql(new ReorganizeTableStatement(statement.getSchemaName(), statement.getTableName()), database))); } return returnList.toArray(new Sql[returnList.size()]); } }
convert types correctly git-svn-id: a91d99a4c51940524e539abe295d6ea473345dd2@1735 e6edf6fb-f266-4316-afb4-e53d95876a76
liquibase-core/src/main/java/liquibase/sqlgenerator/core/SetNullableGenerator.java
convert types correctly
<ide><path>iquibase-core/src/main/java/liquibase/sqlgenerator/core/SetNullableGenerator.java <ide> <ide> import liquibase.database.Database; <ide> import liquibase.database.core.*; <add>import liquibase.database.typeconversion.TypeConverter; <add>import liquibase.database.typeconversion.TypeConverterFactory; <ide> import liquibase.exception.DatabaseException; <ide> import liquibase.exception.ValidationErrors; <ide> import liquibase.sql.Sql; <ide> public Sql[] generateSql(SetNullableStatement statement, Database database, SqlGeneratorChain sqlGeneratorChain) { <ide> String sql; <ide> <add> TypeConverter typeConverter = TypeConverterFactory.getInstance().findTypeConverter(database); <add> <ide> String nullableString; <ide> if (statement.isNullable()) { <ide> nullableString = " NULL"; <ide> if (database instanceof OracleDatabase || database instanceof SybaseDatabase || database instanceof SybaseASADatabase) { <ide> sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " MODIFY " + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + nullableString; <ide> } else if (database instanceof MSSQLDatabase) { <del> sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " ALTER COLUMN " + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + " " + statement.getColumnDataType() + nullableString; <add> sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " ALTER COLUMN " + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + " " + typeConverter.getDataType(statement.getColumnDataType(), false) + nullableString; <ide> } else if (database instanceof MySQLDatabase) { <del> sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " MODIFY " + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + " " + statement.getColumnDataType() + nullableString; <add> sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " MODIFY " + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + " " + typeConverter.getDataType(statement.getColumnDataType(), false) + nullableString; <ide> } else if (database instanceof DerbyDatabase || database instanceof CacheDatabase) { <ide> sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " ALTER COLUMN " + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + nullableString; <ide> } else if (database instanceof HsqlDatabase) { <ide> sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " ALTER COLUMN " + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + " SET"+nullableString; <ide> } else if (database instanceof H2Database) { <del> sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " ALTER COLUMN " + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + " " + statement.getColumnDataType() + nullableString; <add> sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " ALTER COLUMN " + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + " " + typeConverter.getDataType(statement.getColumnDataType(), false) + nullableString; <ide> } else if (database instanceof MaxDBDatabase) { <ide> sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " COLUMN " + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + (statement.isNullable() ? " DEFAULT NULL" : " NOT NULL"); <ide> } else if (database instanceof InformixDatabase) { <ide> if (statement.isNullable()) { <ide> nullableString = ""; <ide> } <del> sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " MODIFY (" + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + " " + statement.getColumnDataType() + nullableString + ")"; <add> sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " MODIFY (" + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + " " + typeConverter.getDataType(statement.getColumnDataType(), false) + nullableString + ")"; <ide> } else { <ide> sql = "ALTER TABLE " + database.escapeTableName(statement.getSchemaName(), statement.getTableName()) + " ALTER COLUMN " + database.escapeColumnName(statement.getSchemaName(), statement.getTableName(), statement.getColumnName()) + (statement.isNullable() ? " DROP NOT NULL" : " SET NOT NULL"); <ide> }