src_fm_fc_ms_ff
stringlengths 43
86.8k
| target
stringlengths 20
276k
|
---|---|
WordUtils { public static boolean isNumber(String word) { if (word == null) { return false; } String wordToParse = new String(word); if (wordToParse.startsWith("-")) { wordToParse = wordToParse.replaceFirst("-", ""); } if (wordToParse.length() == 0) { return false; } for (char c : wordToParse.toCharArray()) { if (!Character.isDigit(c)) { return false; } } return true; } static String getLastWord(final String sentence); static String capitalize(final String str); static boolean isNumber(String word); } | @Test public void testIsNumber() { assertFalse(isNumber(null)); assertFalse(isNumber("")); assertFalse(isNumber("a")); assertFalse(isNumber("-a")); assertFalse(isNumber("23a")); assertTrue(isNumber("-1")); assertTrue(isNumber("0")); assertTrue(isNumber("12345")); } |
Configuration { static String getConfigValue(final String propertyName) { return getConfiguration().getProperty(propertyName); } private Configuration(); static final String CONFIG_FILE; static final String REDIS_HOST; static final String REDIS_PORT; static final String REDIS_PASSWORD; static final String FALLBACK_RHYMES; static final String DEFAULT_RHYMES; } | @Test public void testGetConfigValue() { assertNull(getConfigValue("unexisting")); assertEquals(getConfigValue(REDIS_HOST), "localhost"); } |
Configuration { static String getRequiredConfigValue(final String propertyName) { String value = getConfiguration().getProperty(propertyName); if (value == null) { throw new ConfigurationException("Te requested property [" + propertyName + "] was not set."); } return value; } private Configuration(); static final String CONFIG_FILE; static final String REDIS_HOST; static final String REDIS_PORT; static final String REDIS_PASSWORD; static final String FALLBACK_RHYMES; static final String DEFAULT_RHYMES; } | @Test public void testGetRequiredConfigValue() { assertEquals(getRequiredConfigValue(REDIS_HOST), "localhost"); }
@Test(expectedExceptions = ConfigurationException.class) public void testGetUnexistingRequiredConfigValue() { getRequiredConfigValue("unexisting"); } |
RedisStore { public Set<String> findAll() throws IOException { Set<String> rhymes = new HashSet<String>(); connect(); try { String lastId = getLastId(sentencens); if (lastId != null) { Integer n = Integer.parseInt(getLastId(sentencens)); for (int i = 1; i <= n; i++) { String id = sentencens.build(String.valueOf(i)).toString(); if (redis.exists(id) == 1) { rhymes.add(URLDecoder.decode(redis.get(id), encoding.displayName())); } } } } finally { disconnect(); } return rhymes; } @Inject RedisStore(@Named("sentence") final Keymaker sentencens,
@Named("index") final Keymaker indexns, final WordParser wordParser, final Jedis redis,
final @Named(REDIS_PASSWORD) Optional<String> redisPassword, final Charset encoding); void add(final String sentence); void delete(final String sentence); Set<String> findAll(); String getRhyme(final String sentence); } | @Test public void testFindAll() throws IOException { assertEquals(store.findAll().size(), 2); } |
RedisStore { public String getRhyme(final String sentence) throws IOException { String lastWord = WordUtils.getLastWord(sentence); String rhymepart = wordParser.phoneticRhymePart(lastWord); StressType type = wordParser.stressType(lastWord); LOGGER.debug("Finding rhymes for {}", sentence); Set<String> rhymes = Sets.newHashSet(); connect(); try { rhymes.addAll(search(rhymepart, type)); } finally { disconnect(); } if (rhymes.isEmpty()) { return null; } else { List<String> rhymeList = new ArrayList<String>(rhymes); Random random = new Random(System.currentTimeMillis()); int index = random.nextInt(rhymeList.size()); return rhymeList.get(index); } } @Inject RedisStore(@Named("sentence") final Keymaker sentencens,
@Named("index") final Keymaker indexns, final WordParser wordParser, final Jedis redis,
final @Named(REDIS_PASSWORD) Optional<String> redisPassword, final Charset encoding); void add(final String sentence); void delete(final String sentence); Set<String> findAll(); String getRhyme(final String sentence); } | @Test public void testGetRhyme() throws IOException { assertNull(store.getRhyme("no hay rima")); assertEquals(store.getRhyme("¿Hay algo que rime con tres?"), "Me escondo y no me ves"); assertEquals(store.getRhyme("Nada rima con dos"), "Ya son veintidós!!"); } |
BatchScheduler { public static SchedulerDecisions schedule(List<ActiveJob> activeJobsAllSites, PartitionStatistics partitionStatistics, int userQuotaOde, int userQuotaPde, VCellServerID systemID) { Hashtable<User, UserQuotaInfo> userQuotaInfoMap = new Hashtable<User, UserQuotaInfo>(); int numPendingJobsAllSites = 0; for (ActiveJob activeJob : activeJobsAllSites) { UserQuotaInfo userQuotaInfo = userQuotaInfoMap.get(activeJob.simulationOwner); if (userQuotaInfo==null) { userQuotaInfo = new UserQuotaInfo(activeJob.simulationOwner); userQuotaInfoMap.put(activeJob.simulationOwner, userQuotaInfo); } if (!activeJob.schedulerStatus.isActive()) { continue; } if (activeJob.schedulerStatus.isWaiting()) { continue; } if (activeJob.schedulerStatus.isDispatched() || activeJob.schedulerStatus.isQueued()) { numPendingJobsAllSites++; } if(activeJob.isPDE) { userQuotaInfo.numPdeRunningJobsAllSites++; } else { userQuotaInfo.numOdeRunningJobsAllSites++; } } SchedulerDecisions schedulerDecisions = new SchedulerDecisions(activeJobsAllSites); ArrayList<ActiveJob> prioritizedJobList = new ArrayList<ActiveJob>(activeJobsAllSites); for (ActiveJob activeJob : activeJobsAllSites) { if (!activeJob.schedulerStatus.isActive()) { schedulerDecisions.setInactive(activeJob); prioritizedJobList.remove(activeJob); continue; } if (!activeJob.schedulerStatus.isWaiting()) { schedulerDecisions.setAlreadyRunningOrQueued(activeJob); prioritizedJobList.remove(activeJob); continue; } } Collections.sort(prioritizedJobList,new Comparator<ActiveJob>(){ @Override public int compare(ActiveJob o1, ActiveJob o2) { UserQuotaInfo userQuotaInfo1 = userQuotaInfoMap.get(o1.simulationOwner); UserQuotaInfo userQuotaInfo2 = userQuotaInfoMap.get(o2.simulationOwner); if (userQuotaInfo1.getNumRunningJobs() != userQuotaInfo2.getNumRunningJobs()){ return Integer.compare(userQuotaInfo1.getNumRunningJobs(), userQuotaInfo2.getNumRunningJobs()); } if (o1.isPDE != o2.isPDE){ if (o1.isPDE){ return 1; }else{ return -1; } } return Long.compare(o1.submitTimestamp, o2.submitTimestamp); } }); int index=0; for (ActiveJob activeJob : prioritizedJobList) { schedulerDecisions.setOrdinal(activeJob, index); index++; } HashSet<User> users = new HashSet<User>(); users.addAll(userQuotaInfoMap.keySet()); for (User user : users){ UserQuotaInfo userQuotaInfo = userQuotaInfoMap.get(user); int numDesiredRunningPDEsAllSites = userQuotaInfo.numPdeRunningJobsAllSites; int numDesiredRunningODEsAllSites = userQuotaInfo.numOdeRunningJobsAllSites; Iterator<ActiveJob> prioritizedJobIter = prioritizedJobList.iterator(); while (prioritizedJobIter.hasNext()){ ActiveJob waitingJob = prioritizedJobIter.next(); if (waitingJob.simulationOwner.equals(user)){ if (waitingJob.isPDE){ if (numDesiredRunningPDEsAllSites < userQuotaPde){ numDesiredRunningPDEsAllSites++; }else{ schedulerDecisions.setHeldUserQuotaPDE(waitingJob); prioritizedJobIter.remove(); } }else{ if (numDesiredRunningODEsAllSites < userQuotaOde){ numDesiredRunningODEsAllSites++; }else{ schedulerDecisions.setHeldUserQuotaODE(waitingJob); prioritizedJobIter.remove(); } } } } } int inUseCPUs = partitionStatistics.numCpusAllocated; int cpusAvailable = Math.max(0, partitionStatistics.numCpusTotal - inUseCPUs); int numJobsSlotsAvailable = Math.max(0, cpusAvailable - numPendingJobsAllSites); for (int i=0;i<prioritizedJobList.size();i++) { ActiveJob nextWaitingJob = prioritizedJobList.get(i); if (i<numJobsSlotsAvailable) { if (nextWaitingJob.serverId.equals(systemID)){ schedulerDecisions.setRunnableThisSite(nextWaitingJob); }else { schedulerDecisions.setRunnableOtherSite(nextWaitingJob); } }else { schedulerDecisions.setHeldClusterResources(nextWaitingJob); } } schedulerDecisions.verify(partitionStatistics); return schedulerDecisions; } BatchScheduler(); static final int getMaxOdeJobsPerUser(); static final int getMaxPdeJobsPerUser(); static SchedulerDecisions schedule(List<ActiveJob> activeJobsAllSites, PartitionStatistics partitionStatistics, int userQuotaOde, int userQuotaPde, VCellServerID systemID); } | @Test public void test() { ArrayList<BatchScheduler.ActiveJob> activeJobs = new ArrayList<BatchScheduler.ActiveJob>(); activeJobs.addAll(Arrays.asList(job1,job2,job3,job4,job5,job6,job7,job8,job9,job10)); int numCpusAllocated = 5; int numCpusTotal = 10; double load = 0.4; PartitionStatistics partitionStatistics = new PartitionStatistics(numCpusAllocated, numCpusTotal, load); int userQuotaOde = 2; int userQuotaPde = 2; VCellServerID systemID = relSite; SchedulerDecisions schedulerDecisions = BatchScheduler.schedule(activeJobs, partitionStatistics, userQuotaOde, userQuotaPde, systemID); schedulerDecisions.show(); } |
DimensionalIndex extends ISize { public int rollup(int xValue, int yValue, int zValue) { check(xValue,super.getX(),'x'); check(yValue,super.getY(),'y'); check(zValue,super.getZ(),'z'); return xValue + yValue * getX( ) + zValue * xy; } DimensionalIndex(int newX, int newY, int newZ); DimensionalIndex(String newX, String newY, String newZ); DimensionalIndex(ISize source); int rollup(int xValue, int yValue, int zValue); } | @Test public void validateIndexes( ) { boolean punched[] = new boolean[index.getXYZ()]; for (int x = 0; x < mX; x++) for (int y = 0; y < mY; y++) for (int z = 0; z < mZ; z++) { final int idx = index.rollup(x,y,z); assertFalse(punched[idx]); punched[idx] = true; } for (int p = 0; p < punched.length; ++p) { assertTrue(punched[p]); } }
@Test(expected = RuntimeException.class) public void badX( ) { index.rollup(mX, 0,0); }
@Test(expected = RuntimeException.class) public void badY( ) { index.rollup(0, mY,0); }
@Test(expected = RuntimeException.class) public void badZ( ) { index.rollup(0,0,mZ); }
@Test public void badZmsg( ) { try { index.rollup(0,0,mZ); Assert.fail("should have thrown an exception"); } catch (RuntimeException re) { } } |
EventRateLimiter { public boolean isOkayToFireEventNow(){ long totalElapsedTimeNow = timeRightNow()-startTime; int currentTimeRegimeIndex = 0; for (int i=0; i<intervals.length; i++) { currentTimeRegimeIndex = i; if (totalElapsedTimeNow < intervals[i][TIME_INTERVAL_REGIME]){ break; } } if (timeSinceLastApprovedEvent() < intervals[currentTimeRegimeIndex][TIME_INTERVAL_PER_REGIME]){ return false; } else { timeOfLastEvent = timeRightNow(); return true; } } EventRateLimiter(long[][] specifiedIntervals); EventRateLimiter(); boolean isOkayToFireEventNow(); static void main(String[] args); } | @Test public void test() { try { EventRateLimiter eventRateLimiter = new EventRateLimiter(); long testStartTime = System.currentTimeMillis(); System.out.println("Starting at " + testStartTime); int approvedEventCount = 0; while (System.currentTimeMillis() < testStartTime + 60000) { if (eventRateLimiter.isOkayToFireEventNow()) { approvedEventCount++; System.out.println("Now it is "+ System.currentTimeMillis()); } } if ((approvedEventCount > 67) && (approvedEventCount < 73)) { System.out.println(String.valueOf(approvedEventCount)+ " events approved in one minute"); } else { System.out.println(String.valueOf(approvedEventCount)+" events approved in one minute is out of expected bounds"); fail(String.valueOf(approvedEventCount)+" events approved in one minute is out of expected bounds"); } } catch (Exception e) { fail("Exception occured: "+e.getMessage()); } } |
PropertyLoader { public static File getSystemTemporaryDirectory( ) throws IOException { if (systemTemporaryDirectory != null) { return systemTemporaryDirectory; } File query = File.createTempFile("PropertyLoaderQuery",null); systemTemporaryDirectory = query.getParentFile(); query.delete(); return systemTemporaryDirectory; } PropertyLoader(); static void sendErrorsToMongo( ); static File getSystemTemporaryDirectory( ); final static boolean getBooleanProperty(String propertyName, boolean defaultValue); final static int getIntProperty(String propertyName, int defaultValue); final static long getLongProperty(String propertyName, long defaultValue); final static String getProperty(String propertyName, String defaultValue); final static File getRequiredDirectory(String propertyName); final static File getOptionalDirectory(String propertyName); final static synchronized String getRequiredProperty(String propertyName); final static void loadProperties(String[] required); @Deprecated final static void loadProperties(boolean throwException, boolean validate); final static void loadProperties(); final static void show(); static String getSecretValue(String secretValueProperty, String secretFileProperty); static final String ADMINISTRATOR_ACCOUNT; static final String ADMINISTRATOR_ID; static final String propertyFileProperty; static final String vcellServerIDProperty; static final String simPerUserMemoryLimitFile; static final String primarySimDataDirInternalProperty; static final String secondarySimDataDirInternalProperty; static final String primarySimDataDirExternalProperty; static final String secondarySimDataDirExternalProperty; static final String simDataDirArchiveHost; static final String PARALLEL_DATA_DIR_EXTERNAL; static final String jobMemoryOverheadMB; static final String htcBatchSystemQueue; static final String htcLogDirExternal; static final String htcLogDirInternal; static final String htcUser; static final String htcPbsHome; static final String htcSgeHome; static final String htcNodeList; static final String slurm_cmd_sbatch; static final String slurm_cmd_scancel; static final String slurm_cmd_sacct; static final String slurm_cmd_squeue; static final String slurm_cmd_scontrol; static final String slurm_cmd_sinfo; static final String slurm_partition; static final String slurm_reservation; static final String slurm_partition_pu; static final String slurm_reservation_pu; static final String slurm_tmpdir; static final String slurm_local_singularity_dir; static final String slurm_central_singularity_dir; static final String sgeModulePath; static final String pbsModulePath; static final String MPI_HOME_INTERNAL; static final String MPI_HOME_EXTERNAL; static final String nativeSolverDir_External; static final String comsolRootDir; static final String comsolJarDir; static final String vcellServerHost; static final String pythonExe; static final String vcellapiKeystoreFile; static final String vcellapiKeystorePswd; static final String vcellapiKeystorePswdFile; static final String bioformatsJarFileName; static final String bioformatsJarDownloadURL; static final String COPASI_WEB_URL; static final String SMOLDYN_WEB_URL; static final String BIONETGEN_WEB_URL; static final String NFSIM_WEB_URL; static final String ACKNOWLEGE_PUB__WEB_URL; static final String VCELL_URL; static final String VC_BNG_INDEX_URL; static final String VC_BNG_FAQ_URL; static final String VC_BNG_TUTORIAL_URL; static final String VC_BNG_SAMPLES_URL; static final String VC_SUPPORT_URL; static final String VC_GOOGLE_DISCUSS_URL; static final String VC_TUT_PERMISSION_URL; static final String BMDB_URL; static final String CONTINUUM_URL; static final String DOI_URL; static final String BMDB_DOWNLOAD_URL; static final String PATHWAY_QUERY_URL; static final String PATHWAY_WEB_DO_URL; static final String SABIO_SRCH_KINETIC_URL; static final String SABIO_DIRECT_IFRAME_URL; static final String COPASI_TIKI_URL; static final String BIONUMBERS_SRCH1_URL; static final String BIONUMBERS_SRCH2_URL; static final String SIGNALLING_QUERY_URL; static final String BIOPAX_RSABIO12_URL; static final String BIOPAX_RSABIO11452_URL; static final String BIOPAX_RSABIO65_URL; static final String BIOPAX_RKEGGR01026_URL; static final String COMSOL_URL; static final String databaseThreadsProperty; static final String exportdataThreadsProperty; static final String simdataThreadsProperty; static final String htcworkerThreadsProperty; static final String databaseCacheSizeProperty; static final String simdataCacheSizeProperty; static final String exportBaseURLProperty; static final String exportBaseDirInternalProperty; static final String exportMaxInMemoryLimit; static final String dbDriverName; static final String dbConnectURL; static final String dbUserid; static final String dbPasswordValue; static final String dbPasswordFile; static final String jmsIntHostInternal; static final String jmsIntPortInternal; static final String jmsSimHostInternal; static final String jmsSimPortInternal; static final String jmsSimRestPortInternal; static final String jmsSimHostExternal; static final String jmsSimPortExternal; static final String jmsSimRestPortExternal; static final String jmsUser; static final String jmsPasswordValue; static final String jmsPasswordFile; static final String jmsRestPasswordFile; static final String jmsSimReqQueue; static final String jmsDataRequestQueue; static final String jmsDbRequestQueue; static final String jmsSimJobQueue; static final String jmsWorkerEventQueue; static final String jmsServiceControlTopic; static final String jmsDaemonControlTopic; static final String jmsClientStatusTopic; static final String jmsBlobMessageMinSize; static final String jmsBlobMessageTempDir; static final String jmsBlobMessageUseMongo; static final String vcellClientTimeoutMS; static final String maxOdeJobsPerUser; static final String maxPdeJobsPerUser; static final String maxJobsPerScan; static final String vcellSoftwareVersion; static final String vcellThirdPartyLicense; static final String vcellPublicationHost; static final String vcellSMTPHostName; static final String vcellSMTPPort; static final String vcellSMTPEmailAddress; static final String vcellbatch_docker_name; static final String vcellbatch_singularity_image; static final String javaSimulationExecutable; static final String simulationPreprocessor; static final String simulationPostprocessor; final static String mongodbHostInternal; final static String mongodbPortInternal; final static String mongodbHostExternal; final static String mongodbPortExternal; final static String mongodbDatabase; final static String mongodbLoggingCollection; final static String mongodbThreadSleepMS; static final String amplistorVCellServiceURL; static final String amplistorVCellServiceUser; static final String amplistorVCellServicePassword; static final String installationRoot; static final String vcellDownloadDir; static final String autoflushStandardOutAndErr; static final String suppressQStatStandardOutLogging; static final String nagiosMonitorPort; static final String imageJVcellPluginURL; static final String NATIVE_LIB_DIR; static final String USE_CURRENT_WORKING_DIRECTORY; } | @Test public void testSystemTempDir( ) throws IOException { File tempDir = PropertyLoader.getSystemTemporaryDirectory(); System.out.println(tempDir); File secondCall = PropertyLoader.getSystemTemporaryDirectory(); assertTrue(tempDir == secondCall); } |
DataSetControllerImpl implements SimDataConstants,DataJobListenerHolder { public DataOperationResults doDataOperation(DataOperation dataOperation) throws DataAccessException{ VCDataJobID vcDataJobID = null; try{ if(dataOperation instanceof DataOperation.DataProcessingOutputTimeSeriesOP){ vcDataJobID = ((DataOperation.DataProcessingOutputTimeSeriesOP)dataOperation).getTimeSeriesJobSpec().getVcDataJobID(); } if(!(getVCData(dataOperation.getVCDataIdentifier()) instanceof SimulationData)){ return null; } File dataProcessingOutputFileHDF5 = ((SimulationData)getVCData(dataOperation.getVCDataIdentifier())).getDataProcessingOutputSourceFileHDF5(); DataOperationResults dataOperationResults = getDataProcessingOutput(dataOperation,dataProcessingOutputFileHDF5); if(vcDataJobID != null){ fireDataJobEventIfNecessary(vcDataJobID,MessageEvent.DATA_COMPLETE, dataOperation.getVCDataIdentifier(), new Double(0), ((DataOperationResults.DataProcessingOutputTimeSeriesValues)dataOperationResults).getTimeSeriesJobResults(),null); } return dataOperationResults; }catch(Exception e){ if(vcDataJobID != null){ fireDataJobEventIfNecessary(vcDataJobID,MessageEvent.DATA_FAILURE, dataOperation.getVCDataIdentifier(), new Double(0), null,e); } if(e instanceof DataAccessException){ throw (DataAccessException)e; }else{ throw new DataAccessException("Datasetcontrollerimpl.doDataOperation error: "+e.getMessage(),e); } } } DataSetControllerImpl(Cachetable aCacheTable, File primaryDir, File secondDir); void addDataJobListener(DataJobListener newListener); DataOperationResults doDataOperation(DataOperation dataOperation); static DataOperationResults getDataProcessingOutput(DataOperation dataOperation,File dataProcessingOutputFileHDF5); FieldDataFileOperationResults fieldDataFileOperation(FieldDataFileOperationSpec fieldDataFileOperationSpec); DataIdentifier[] getDataIdentifiers(OutputContext outputContext, VCDataIdentifier vcdID); double[] getDataSetTimes(VCDataIdentifier vcdID); AnnotatedFunction getFunction(OutputContext outputContext,VCDataIdentifier vcdID,String variableName); AnnotatedFunction[] getFunctions(OutputContext outputContext,VCDataIdentifier vcdID); PlotData getLineScan(OutputContext outputContext, VCDataIdentifier vcdID, String varName, double time, SpatialSelection spatialSelection); CartesianMesh getMesh(VCDataIdentifier vcdID); ODEDataBlock getODEDataBlock(VCDataIdentifier vcdID); ParticleDataBlock getParticleDataBlock(VCDataIdentifier vcdID, double time); boolean getParticleDataExists(VCDataIdentifier vcdID); SimDataBlock getSimDataBlock(OutputContext outputContext, VCDataIdentifier vcdID, String varName, double time); TimeSeriesJobResults getTimeSeriesValues(OutputContext outputContext,final VCDataIdentifier vcdID,final TimeSeriesJobSpec timeSeriesJobSpec); VCData getVCData(VCDataIdentifier vcdID); void removeDataJobListener(DataJobListener djListener); void setAllowOptimizedTimeDataRetrieval(boolean bAllowOptimizedTimeDataRetrieval); boolean isAllowOptimizedTimeDataRetrieval(); void writeFieldFunctionData(
OutputContext outputContext,
FieldDataIdentifierSpec[] argFieldDataIDSpecs,
boolean[] bResampleFlags,
CartesianMesh newMesh,
SimResampleInfoProvider simResampleInfoProvider,
int simResampleMembraneDataLength,
int handleExistingResampleMode); DataSetMetadata getDataSetMetadata(VCDataIdentifier vcdataID); DataSetTimeSeries getDataSetTimeSeries(VCDataIdentifier vcdataID, String[] variableNames); boolean getIsChombo(VCDataIdentifier vcdataID); boolean getIsMovingBoundary(VCDataIdentifier vcdataID); boolean getIsComsol(VCDataIdentifier vcdataID); ChomboFiles getChomboFiles(VCDataIdentifier vcdataID); VCellSimFiles getVCellSimFiles(VCDataIdentifier vcdataID); MovingBoundarySimFiles getMovingBoundarySimFiles(VCDataIdentifier vcdataID); ComsolSimFiles getComsolSimFiles(VCDataIdentifier vcdataID); VtuFileContainer getEmptyVtuMeshFiles(ComsolSimFiles comsolSimFiles, VCDataIdentifier vcdataID, int timeIndex); VtuFileContainer getEmptyVtuMeshFiles(ChomboFiles chomboFiles, VCDataIdentifier vcdataID, int timeIndex); VtuFileContainer getEmptyVtuMeshFiles(VCellSimFiles vcellSimFiles, VCDataIdentifier vcdataID, int timeIndex); VtuFileContainer getEmptyVtuMeshFiles(MovingBoundarySimFiles movingBoundarySimFiles, VCDataIdentifier vcdataID, int timeIndex); double[] getVtuTimes(ComsolSimFiles comsolSimFiles, VCDataIdentifier vcdataID); double[] getVtuMeshData(ComsolSimFiles comsolSimFiles, OutputContext outputContext, VCDataIdentifier vcdataID, VtuVarInfo var, double time); double[] getVtuMeshData(ChomboFiles chomboFiles, OutputContext outputContext, VCDataIdentifier vcdataID, VtuVarInfo var, double time); double[] getVtuMeshData(MovingBoundarySimFiles movingBoundarySimFiles, OutputContext outputContext, VCDataIdentifier vcdataID, VtuVarInfo var, double time); double[] getVtuMeshData(VCellSimFiles vcellFiles, OutputContext outputContext, VCDataIdentifier vcdataID, VtuVarInfo var, double time); VtuVarInfo[] getVtuVarInfos(VCellSimFiles vcellFiles, OutputContext outputContext, VCDataIdentifier vcdataID); VtuVarInfo[] getVtuVarInfos(ChomboFiles chomboFiles, OutputContext outputContext, VCDataIdentifier vcdataID); VtuVarInfo[] getVtuVarInfos(MovingBoundarySimFiles movingBoundaryFiles, OutputContext outputContext, VCDataIdentifier vcdataID); VtuVarInfo[] getVtuVarInfos(ComsolSimFiles comsolFiles, OutputContext outputContext, VCDataIdentifier vcdataID); NFSimMolecularConfigurations getNFSimMolecularConfigurations(VCDataIdentifier vcdID); static final Logger lg; } | @Test public void testDoDataOperation() throws DataAccessException { boolean bIncludeStartValuesInfo = true; DataOperation dataOperation = new DataOperation.DataProcessingOutputInfoOP(vcDataIdentifier,bIncludeStartValuesInfo, outputContext); DataProcessingOutputInfo results = (DataProcessingOutputInfo)dsc.doDataOperation(dataOperation); String[] varNames = results.getVariableNames(); String[] expectedVarNames = { "C_cyt_average", "C_cyt_total", "C_cyt_min", "C_cyt_max", "Ran_cyt_average", "Ran_cyt_total", "Ran_cyt_min", "Ran_cyt_max", "RanC_cyt_average", "RanC_cyt_total", "RanC_cyt_min", "RanC_cyt_max", "RanC_nuc_average", "RanC_nuc_total", "RanC_nuc_min", "RanC_nuc_max", "s2_average", "s2_total", "s2_min", "s2_max"}; String[] expectedUnits = { "uM", "molecules", "uM", "uM", "uM", "molecules", "uM", "uM", "uM", "molecules", "uM", "uM", "uM", "molecules", "uM", "uM", "molecules.um-2", "molecules", "uM", "uM"}; double[] expectedTimePoints = {0.0, 0.5, 1.0}; double[] expectedStatistics_RanC_cyt_max = {8.9, 3.5890337679723476, 3.057119332620108}; Assert.assertArrayEquals(varNames,expectedVarNames); Assert.assertArrayEquals(expectedTimePoints, results.getVariableTimePoints(), 1e-8); for (int i = 0; i<expectedVarNames.length; i++){ Assert.assertEquals(results.getVariableUnits(expectedVarNames[i]), expectedUnits[i]); } Assert.assertArrayEquals(expectedStatistics_RanC_cyt_max, results.getVariableStatValues().get("RanC_cyt_max"), 1e-8); } |
DataSetControllerImpl implements SimDataConstants,DataJobListenerHolder { public DataIdentifier[] getDataIdentifiers(OutputContext outputContext, VCDataIdentifier vcdID) throws DataAccessException, IOException, FileNotFoundException { if (lg.isTraceEnabled()) lg.trace("DataSetControllerImpl.getDataIdentifiers("+vcdID.getID()+")"); VCData simData = getVCData(vcdID); DataIdentifier[] dataIdentifiersIncludingOutsideAndInside = simData.getVarAndFunctionDataIdentifiers(outputContext); Vector<DataIdentifier> v = new Vector<DataIdentifier>(); for (int i = 0; i < dataIdentifiersIncludingOutsideAndInside.length; i++){ DataIdentifier di = dataIdentifiersIncludingOutsideAndInside[i]; if (!di.getName().endsWith(InsideVariable.INSIDE_VARIABLE_SUFFIX) && !di.getName().endsWith(OutsideVariable.OUTSIDE_VARIABLE_SUFFIX)) { if (di.getVariableType() == null || di.getVariableType().equals(VariableType.UNKNOWN)) { if (di.isFunction()) { AnnotatedFunction f = getFunction(outputContext,vcdID,di.getName()); VariableType varType = getVariableTypeForFieldFunction(outputContext,vcdID, f); di = new DataIdentifier(di.getName(), varType, di.getDomain(), di.isFunction(), f.getDisplayName()); } } v.addElement(di); } } DataIdentifier[] ids = new DataIdentifier[v.size()]; v.copyInto(ids); return ids; } DataSetControllerImpl(Cachetable aCacheTable, File primaryDir, File secondDir); void addDataJobListener(DataJobListener newListener); DataOperationResults doDataOperation(DataOperation dataOperation); static DataOperationResults getDataProcessingOutput(DataOperation dataOperation,File dataProcessingOutputFileHDF5); FieldDataFileOperationResults fieldDataFileOperation(FieldDataFileOperationSpec fieldDataFileOperationSpec); DataIdentifier[] getDataIdentifiers(OutputContext outputContext, VCDataIdentifier vcdID); double[] getDataSetTimes(VCDataIdentifier vcdID); AnnotatedFunction getFunction(OutputContext outputContext,VCDataIdentifier vcdID,String variableName); AnnotatedFunction[] getFunctions(OutputContext outputContext,VCDataIdentifier vcdID); PlotData getLineScan(OutputContext outputContext, VCDataIdentifier vcdID, String varName, double time, SpatialSelection spatialSelection); CartesianMesh getMesh(VCDataIdentifier vcdID); ODEDataBlock getODEDataBlock(VCDataIdentifier vcdID); ParticleDataBlock getParticleDataBlock(VCDataIdentifier vcdID, double time); boolean getParticleDataExists(VCDataIdentifier vcdID); SimDataBlock getSimDataBlock(OutputContext outputContext, VCDataIdentifier vcdID, String varName, double time); TimeSeriesJobResults getTimeSeriesValues(OutputContext outputContext,final VCDataIdentifier vcdID,final TimeSeriesJobSpec timeSeriesJobSpec); VCData getVCData(VCDataIdentifier vcdID); void removeDataJobListener(DataJobListener djListener); void setAllowOptimizedTimeDataRetrieval(boolean bAllowOptimizedTimeDataRetrieval); boolean isAllowOptimizedTimeDataRetrieval(); void writeFieldFunctionData(
OutputContext outputContext,
FieldDataIdentifierSpec[] argFieldDataIDSpecs,
boolean[] bResampleFlags,
CartesianMesh newMesh,
SimResampleInfoProvider simResampleInfoProvider,
int simResampleMembraneDataLength,
int handleExistingResampleMode); DataSetMetadata getDataSetMetadata(VCDataIdentifier vcdataID); DataSetTimeSeries getDataSetTimeSeries(VCDataIdentifier vcdataID, String[] variableNames); boolean getIsChombo(VCDataIdentifier vcdataID); boolean getIsMovingBoundary(VCDataIdentifier vcdataID); boolean getIsComsol(VCDataIdentifier vcdataID); ChomboFiles getChomboFiles(VCDataIdentifier vcdataID); VCellSimFiles getVCellSimFiles(VCDataIdentifier vcdataID); MovingBoundarySimFiles getMovingBoundarySimFiles(VCDataIdentifier vcdataID); ComsolSimFiles getComsolSimFiles(VCDataIdentifier vcdataID); VtuFileContainer getEmptyVtuMeshFiles(ComsolSimFiles comsolSimFiles, VCDataIdentifier vcdataID, int timeIndex); VtuFileContainer getEmptyVtuMeshFiles(ChomboFiles chomboFiles, VCDataIdentifier vcdataID, int timeIndex); VtuFileContainer getEmptyVtuMeshFiles(VCellSimFiles vcellSimFiles, VCDataIdentifier vcdataID, int timeIndex); VtuFileContainer getEmptyVtuMeshFiles(MovingBoundarySimFiles movingBoundarySimFiles, VCDataIdentifier vcdataID, int timeIndex); double[] getVtuTimes(ComsolSimFiles comsolSimFiles, VCDataIdentifier vcdataID); double[] getVtuMeshData(ComsolSimFiles comsolSimFiles, OutputContext outputContext, VCDataIdentifier vcdataID, VtuVarInfo var, double time); double[] getVtuMeshData(ChomboFiles chomboFiles, OutputContext outputContext, VCDataIdentifier vcdataID, VtuVarInfo var, double time); double[] getVtuMeshData(MovingBoundarySimFiles movingBoundarySimFiles, OutputContext outputContext, VCDataIdentifier vcdataID, VtuVarInfo var, double time); double[] getVtuMeshData(VCellSimFiles vcellFiles, OutputContext outputContext, VCDataIdentifier vcdataID, VtuVarInfo var, double time); VtuVarInfo[] getVtuVarInfos(VCellSimFiles vcellFiles, OutputContext outputContext, VCDataIdentifier vcdataID); VtuVarInfo[] getVtuVarInfos(ChomboFiles chomboFiles, OutputContext outputContext, VCDataIdentifier vcdataID); VtuVarInfo[] getVtuVarInfos(MovingBoundarySimFiles movingBoundaryFiles, OutputContext outputContext, VCDataIdentifier vcdataID); VtuVarInfo[] getVtuVarInfos(ComsolSimFiles comsolFiles, OutputContext outputContext, VCDataIdentifier vcdataID); NFSimMolecularConfigurations getNFSimMolecularConfigurations(VCDataIdentifier vcdID); static final Logger lg; } | @Test public void testGetDataIdentifiers() throws FileNotFoundException, DataAccessException, IOException { DataIdentifier[] dataIdentifiers = dsc.getDataIdentifiers(outputContext, vcDataIdentifier); String[] varNames = Arrays.asList(dataIdentifiers) .stream() .map (a -> a.getName()) .collect(Collectors.toList()).toArray(new String[0]); String[] expectedVarNames = { "C_cyt","Ran_cyt","RanC_cyt","RanC_nuc","s2", "vcRegionVolume","vcRegionArea","vcRegionVolume_subdomain1","vcRegionVolume_subdomain0","J_flux0", "J_r0","KFlux_nm_cyt","KFlux_nm_nuc","RanC_cyt_init_uM","Size_cyt", "Size_EC","Size_nm","Size_nuc","Size_pm","sobj_subdomain11_subdomain00_size", "vobj_subdomain00_size","vobj_subdomain11_size" }; Assert.assertArrayEquals(expectedVarNames, varNames); } |
SimulationData extends VCData { public static String createCanonicalSmoldynOutputFileName(KeyValue fieldDataKey,int jobIndex,int timeIndex){ if (timeIndex > 0) { String rval = createSimIDWithJobIndex(fieldDataKey,jobIndex,false) + String.format("_%03d",timeIndex) + SimDataConstants.SMOLDYN_OUTPUT_FILE_EXTENSION; return rval; } throw new IllegalArgumentException("smoldyn output index must be > 0"); } SimulationData(VCDataIdentifier argVCDataID, File primaryUserDir, File secondaryUserDir,SimDataAmplistorInfo simDataAmplistorInfo); static VCDataIdentifier createScanFriendlyVCDataID(VCDataIdentifier inVCDID); AnnotatedFunction simplifyFunction(AnnotatedFunction function); synchronized long getDataBlockTimeStamp(int dataType, double timepoint); synchronized double[] getDataTimesPostProcess(OutputContext outputContext); synchronized double[] getDataTimes(); SymbolTableEntry getEntry(String identifier); AnnotatedFunction getFunction(OutputContext outputContext,String identifier); AnnotatedFunction[] getFunctions(OutputContext outputContext); synchronized boolean getIsODEData(); File getLogFilePrivate(); synchronized CartesianMesh getPostProcessingMesh(String varName,OutputContext outputContext); synchronized CartesianMesh getMesh(); synchronized ODEDataBlock getODEDataBlock(); synchronized ParticleDataBlock getParticleDataBlock(double time); synchronized boolean getParticleDataExists(); synchronized VCDataIdentifier getResultsInfoObject(); synchronized SimDataBlock getSimDataBlock(OutputContext outputContext, String varName, double time); synchronized long getSizeInBytes(); synchronized DataIdentifier[] getVarAndFunctionDataIdentifiers(OutputContext outputContext); boolean isChombo(); @Override boolean isMovingBoundary(); @Override boolean isComsol(); synchronized void removeAllResults(); static String createCanonicalSimFilePathName(KeyValue fieldDataKey,int timeIndex,int jobIndex,boolean isOldStyle); static String createCanonicalSimTaskXMLFilePathName(KeyValue fieldDataKey,int jobIndex,boolean isOldStyle); static String createCanonicalFieldDataLogFileName(KeyValue fieldDataKey); static String createCanonicalFieldFunctionSyntax(String externalDataIdentifierName,String varName,double beginTime,String extDataIdVariableTypeName); static String createCanonicalSimZipFileName(KeyValue fieldDataKey,Integer zipIndex,int jobIndex,boolean isOldStyle,boolean bHDF5); static String createCanonicalSimLogFileName(KeyValue fieldDataKey,int jobIndex,boolean isOldStyle); static String createCanonicalSmoldynOutputFileName(KeyValue fieldDataKey,int jobIndex,int timeIndex); static String createCanonicalPostProcessFileName(VCDataIdentifier vcdID); static String createCanonicalNFSimOutputFileName(KeyValue fieldDataKey,int jobIndex); static String createCanonicalMeshFileName(KeyValue fieldDataKey,int jobIndex,boolean isOldStyle); static String createCanonicalMeshMetricsFileName(KeyValue fieldDataKey,int jobIndex,boolean isOldStyle); static String createCanonicalMovingBoundaryOutputFileName(KeyValue fieldDataKey,int jobIndex,boolean isOldStyle); static String createCanonicalComsolOutputFileName(KeyValue fieldDataKey,int jobIndex,boolean isOldStyle); static String createCanonicalFunctionsFileName(KeyValue fieldDataKey,int jobIndex,boolean isOldStyle); static String createCanonicalSubdomainFileName(KeyValue fieldDataKey,int jobIndex,boolean isOldStyle); static String createCanonicalResampleFileName(SimResampleInfoProvider simResampleInfoProvider,FieldFunctionArguments fieldFuncArgs); static String createSimIDWithJobIndex(KeyValue fieldDataKey,int jobIndex,boolean isOldStyle); void getEntries(Map<String, SymbolTableEntry> entryMap); boolean isPostProcessing(OutputContext outputContext,String varName); File getDataProcessingOutputSourceFileHDF5(); File getFieldDataFile(SimResampleInfoProvider simResampleInfoProvider,FieldFunctionArguments fieldFunctionArguments); File getMeshFile(boolean bHDF5); File getFunctionsFile(boolean bFirst); File getZipFile(boolean bHDF5,Integer zipIndex); File getSubdomainFile(); File getLogFile(); File getSmoldynOutputFile(int timeIndex); @Override ChomboFiles getChomboFiles(); @Override VCellSimFiles getVCellSimFiles(); @Override MovingBoundarySimFiles getMovingBoundarySimFiles(); @Override ComsolSimFiles getComsolSimFiles(); VCDataIdentifier getVcDataId(); synchronized SimDataBlock getChomboExtrapolatedValues(String varName, double time); static final VariableType getVariableTypeFromLength(CartesianMesh mesh, int dataLength); static String getDefaultFieldDataFileNameForSimulation(FieldFunctionArguments fieldFuncArgs); } | @Test(expected = IllegalArgumentException.class) public void noZero( ) { KeyValue kv = new KeyValue("8675904"); String now = SimulationData.createCanonicalSmoldynOutputFileName(kv, 0, 0); System.out.println("never see " + now); } |
HtcProxy { public static SimTaskInfo getSimTaskInfoFromSimJobName(String simJobName) throws HtcException{ StringTokenizer tokens = new StringTokenizer(simJobName,"_"); String PREFIX = null; if (tokens.hasMoreTokens()){ PREFIX = tokens.nextToken(); } String SITE = null; if (tokens.hasMoreTokens()){ SITE = tokens.nextToken(); } String simIdString = null; if (tokens.hasMoreTokens()){ simIdString = tokens.nextToken(); } String jobIndexString = null; if (tokens.hasMoreTokens()){ jobIndexString = tokens.nextToken(); } String taskIdString = null; if (tokens.hasMoreTokens()){ taskIdString = tokens.nextToken(); } if (PREFIX.equals("V") && SITE!=null && simIdString!=null && jobIndexString!=null && taskIdString!=null){ KeyValue simId = new KeyValue(simIdString); int jobIndex = Integer.valueOf(jobIndexString); int taskId = Integer.valueOf(taskIdString); return new SimTaskInfo(simId,jobIndex,taskId); }else{ throw new HtcException("simJobName : "+simJobName+" not in expected format for a simulation job name"); } } HtcProxy(CommandService commandService, String htcUser); static boolean isMyJob(HtcJobInfo htcJobInfo); abstract void killJobSafe(HtcJobInfo htcJobInfo); abstract void killJobUnsafe(HtcJobID htcJobId); abstract void killJobs(String htcJobSubstring); abstract Map<HtcJobInfo,HtcJobStatus> getJobStatus(List<HtcJobInfo> requestedHtcJobInfos); abstract HtcJobID submitJob(String jobName, File sub_file_internal, File sub_file_external, ExecutableCommand.Container commandSet,
int ncpus, double memSize, Collection<PortableCommand> postProcessingCommands, SimulationTask simTask,File primaryUserDirExternal); abstract HtcJobID submitOptimizationJob(String jobName, File sub_file_internal, File sub_file_external,File optProblemInput,File optProblemOutput); abstract HtcProxy cloneThreadsafe(); abstract Map<HtcJobInfo,HtcJobStatus> getRunningJobs(); abstract PartitionStatistics getPartitionStatistics(); final CommandService getCommandService(); final String getHtcUser(); static SimTaskInfo getSimTaskInfoFromSimJobName(String simJobName); static String createHtcSimJobName(SimTaskInfo simTaskInfo); static void writeUnixStyleTextFile(File file, String javaString); abstract String getSubmissionFileExtension(); static MemLimitResults getMemoryLimit(String vcellUserid,KeyValue simID,SolverDescription solverDescription,double estimatedMemSizeMB); static boolean isStochMultiTrial(SimulationTask simTask); static final Logger LG; final static String HTC_SIMULATION_JOB_NAME_PREFIX; static final boolean bDebugMemLimit; } | @Test public void test_getSimTaskInfoFromSimJobName() throws NumberFormatException, HtcException { System.setProperty(PropertyLoader.vcellServerIDProperty,"ALPHA"); Assert.assertEquals(new SimTaskInfo(new KeyValue("115785823"),0,0), HtcProxy.getSimTaskInfoFromSimJobName("V_ALPHA_115785823_0_0")); Assert.assertEquals(new SimTaskInfo(new KeyValue("115785823"),0,0), HtcProxy.getSimTaskInfoFromSimJobName("V_BETA_115785823_0_0")); Assert.assertEquals(new SimTaskInfo(new KeyValue("115785823"),0,0), HtcProxy.getSimTaskInfoFromSimJobName("V_REL_115785823_0_0")); Assert.assertEquals(new SimTaskInfo(new KeyValue("115785823"),0,0), HtcProxy.getSimTaskInfoFromSimJobName("V_TEST_115785823_0_0")); Assert.assertEquals(new SimTaskInfo(new KeyValue("115785823"),0,0), HtcProxy.getSimTaskInfoFromSimJobName("V_TEST2_115785823_0_0")); Assert.assertEquals(new SimTaskInfo(new KeyValue("115785823"),0,0), HtcProxy.getSimTaskInfoFromSimJobName("V_TEST3_115785823_0_0")); Assert.assertEquals(new SimTaskInfo(new KeyValue("115785823"),0,0), HtcProxy.getSimTaskInfoFromSimJobName("V_TEST4_115785823_0_0")); Assert.assertEquals(new SimTaskInfo(new KeyValue("115785823"),0,0), HtcProxy.getSimTaskInfoFromSimJobName("V_JUNK_115785823_0_0")); Assert.assertNotEquals(new SimTaskInfo(new KeyValue("555"),0,0), HtcProxy.getSimTaskInfoFromSimJobName("V_ALPHA_115785823_0_0")); try { HtcProxy.getSimTaskInfoFromSimJobName("V_ALPHA_115785823_0"); Assert.fail("SimTaskInfo "+"V_ALPHA_115785823_0"+" should have failed"); }catch (HtcException e){ } try { HtcProxy.getSimTaskInfoFromSimJobName("ALPHA_115785823_0_0"); Assert.fail("SimTaskInfo "+"V_ALPHA_115785823_0"+" should have failed"); }catch (HtcException e){ } } |
User implements java.io.Serializable, Matchable, Immutable { public boolean isPublisher() { return Arrays.asList(publishers).contains(userName); } User(String userid, KeyValue key); static String createGuestErrorMessage(String theOffendingOp); static boolean isGuest(String checkThisName); boolean compareEqual(Matchable obj); boolean equals(Object obj); KeyValue getID(); String getName(); int hashCode(); boolean isPublisher(); boolean isTestAccount(); static boolean isTestAccount(String accountName); String toString(); static final String[] publishers; static final User tempUser; static final String VCELL_GUEST; } | @Test public void publisherTest( ) { User u = new User("schaff", testKey( )); Assert.assertTrue(u.isPublisher()); u = new User("fido", testKey( )); Assert.assertFalse(u.isPublisher()); } |
User implements java.io.Serializable, Matchable, Immutable { public boolean isTestAccount() { return isTestAccount(getName( )); } User(String userid, KeyValue key); static String createGuestErrorMessage(String theOffendingOp); static boolean isGuest(String checkThisName); boolean compareEqual(Matchable obj); boolean equals(Object obj); KeyValue getID(); String getName(); int hashCode(); boolean isPublisher(); boolean isTestAccount(); static boolean isTestAccount(String accountName); String toString(); static final String[] publishers; static final User tempUser; static final String VCELL_GUEST; } | @Test public void testAcctTest( ) { User u = new User("vcelltestaccount", testKey( )); Assert.assertTrue(u.isTestAccount()); u = new User("fido", testKey( )); Assert.assertFalse(u.isTestAccount()); } |
VCCollections { public static <T> boolean equal(Collection<T> a, Collection<T> b, Comparator<T> cmp) { return equal(a,b,cmp,null); } static boolean equal(Collection<T> a, Collection<T> b, Comparator<T> cmp); static boolean equal(Collection<T> a, Collection<T> b, Comparator<T> cmp , Collection<Delta<T> > diffs); } | @Test public <T> void ctest( ) { ArrayList<Delta<Integer>> dt = new ArrayList<VCCollections.Delta<Integer>>( ); b.addAll(a); assertTrue(VCCollections.equal(a, b, cmp, null)); assertTrue(VCCollections.equal(a, b, cmp, dt)); for (int i = 0; i < 10 ;i++) { Collections.shuffle(b); assertTrue(VCCollections.equal(a, b, cmp, null)); assertTrue(VCCollections.equal(a, b, cmp, dt)); } b.add(7); assertFalse(VCCollections.equal(a, b, cmp, null)); assertFalse(VCCollections.equal(a, b, cmp, dt)); a.add(8); assertFalse(VCCollections.equal(a, b, cmp, null)); assertFalse(VCCollections.equal(a, b, cmp, dt)); } |
CircularList extends AbstractCollection<E> { @Override public boolean add(E e) { storage.add(e); while (storage.size() > capacity()) { storage.pop(); } return true; } CircularList(int capacity); int capacity(); @Override boolean add(E e); @Override Iterator<E> iterator(); @Override int size(); } | @Test public void tryIt( ) { insert(7,10); validate(7,8,9,10); list.clear(); insert(1,23); validate(19,20,21,22,23); list.add(7); validate(20,21,22,23,7); } |
CachedDataBaseReferenceReader { public static CachedDataBaseReferenceReader getCachedReader( ) { CachedDataBaseReferenceReader r = dbReader.get(); if (r != null) { return r; } synchronized(dbReader) { r = dbReader.get(); if (r != null) { return r; } r = new CachedDataBaseReferenceReader(); dbReader = new SoftReference<CachedDataBaseReferenceReader>(r); } return r; } private CachedDataBaseReferenceReader( ); static CachedDataBaseReferenceReader getCachedReader( ); String getMoleculeDataBaseReference(String molId); String getChEBIName(String chebiId); String getGOTerm(String goId); String getMoleculeDataBaseReference(String db, String id); } | @Ignore @Test public void fetchAndConsume( ) { ReferenceQueue<CachedDataBaseReferenceReader> rq = new ReferenceQueue<CachedDataBaseReferenceReader>(); WeakReference<CachedDataBaseReferenceReader> weakR = new WeakReference<CachedDataBaseReferenceReader>( CachedDataBaseReferenceReader.getCachedReader(),rq); boolean outOfMem = false; ArrayList<int[]> pig = new ArrayList<int[]>( ); for (int size = 10;!outOfMem;size*=10) { try { assertFalse(weakR.isEnqueued()); pig.add(new int[size]); CachedDataBaseReferenceReader w = weakR.get( ); assertTrue(w == CachedDataBaseReferenceReader.getCachedReader()); } catch(OutOfMemoryError error) { assertTrue(weakR.isEnqueued()); assertTrue(weakR.get( ) == null); outOfMem = true; } } pig.clear(); CachedDataBaseReferenceReader dbReader = CachedDataBaseReferenceReader.getCachedReader(); assertTrue(dbReader != null); } |
HtcProxy { public static boolean isMyJob(HtcJobInfo htcJobInfo){ return htcJobInfo.getJobName().startsWith(jobNamePrefix()); } HtcProxy(CommandService commandService, String htcUser); static boolean isMyJob(HtcJobInfo htcJobInfo); abstract void killJobSafe(HtcJobInfo htcJobInfo); abstract void killJobUnsafe(HtcJobID htcJobId); abstract void killJobs(String htcJobSubstring); abstract Map<HtcJobInfo,HtcJobStatus> getJobStatus(List<HtcJobInfo> requestedHtcJobInfos); abstract HtcJobID submitJob(String jobName, File sub_file_internal, File sub_file_external, ExecutableCommand.Container commandSet,
int ncpus, double memSize, Collection<PortableCommand> postProcessingCommands, SimulationTask simTask,File primaryUserDirExternal); abstract HtcJobID submitOptimizationJob(String jobName, File sub_file_internal, File sub_file_external,File optProblemInput,File optProblemOutput); abstract HtcProxy cloneThreadsafe(); abstract Map<HtcJobInfo,HtcJobStatus> getRunningJobs(); abstract PartitionStatistics getPartitionStatistics(); final CommandService getCommandService(); final String getHtcUser(); static SimTaskInfo getSimTaskInfoFromSimJobName(String simJobName); static String createHtcSimJobName(SimTaskInfo simTaskInfo); static void writeUnixStyleTextFile(File file, String javaString); abstract String getSubmissionFileExtension(); static MemLimitResults getMemoryLimit(String vcellUserid,KeyValue simID,SolverDescription solverDescription,double estimatedMemSizeMB); static boolean isStochMultiTrial(SimulationTask simTask); static final Logger LG; final static String HTC_SIMULATION_JOB_NAME_PREFIX; static final boolean bDebugMemLimit; } | @Test public void test_isMyJob(){ System.setProperty(PropertyLoader.vcellServerIDProperty,"ALPHA"); Assert.assertTrue(HtcProxy.isMyJob(new HtcJobInfo(new HtcJobID("1200725", BatchSystemType.SLURM),"V_ALPHA_115785823_0_0"))); Assert.assertFalse(HtcProxy.isMyJob(new HtcJobInfo(new HtcJobID("1200725", BatchSystemType.SLURM),"V_BETA_115785823_0_0"))); System.setProperty(PropertyLoader.vcellServerIDProperty,"BETA"); Assert.assertTrue(HtcProxy.isMyJob(new HtcJobInfo(new HtcJobID("1200725", BatchSystemType.SLURM),"V_BETA_115785823_0_0"))); } |
UnitSymbol implements Serializable { public String getUnitSymbolAsInfix() { return rootNode.toInfix(); } UnitSymbol(String unitStr); String getUnitSymbolAsInfix(); String getUnitSymbolAsInfixWithoutFloatScale(); double getNumericScale(); String getUnitSymbol(); String getUnitSymbolUnicode(); String getUnitSymbolHtml(); } | @Test public void example( ) { Assert.assertEquals("uM*s^-1", new UnitSymbol("uM.s-1").getUnitSymbolAsInfix()); } |
VCellThreadChecker { public static void checkCpuIntensiveInvocation() { if (guiThreadChecker == null){ System.out.println("!!!!!!!!!!!!!! --VCellThreadChecker.setGUIThreadChecker() not set"); Thread.dumpStack(); }else if (guiThreadChecker.isEventDispatchThread() && cpuSuppressed.get() == 0) { System.out.println("!!!!!!!!!!!!!! --calling cpu intensive method from swing thread-----"); Thread.dumpStack(); } } static void setGUIThreadChecker(GUIThreadChecker argGuiThreadChecker); static void checkRemoteInvocation(); static void checkSwingInvocation(); static void checkCpuIntensiveInvocation(); } | @Test public void notSwingCheck( ) { VCellThreadChecker.checkCpuIntensiveInvocation(); checkQuiet( ); }
@Test public void swingCheckSupp( ) throws InvocationTargetException, InterruptedException { SwingUtilities.invokeAndWait(new Runnable() { @Override public void run() { try (VCellThreadChecker.SuppressIntensive si = new VCellThreadChecker.SuppressIntensive()) { subCall( ); VCellThreadChecker.checkCpuIntensiveInvocation(); } } }); checkQuiet( ); }
@Test(expected=IllegalStateException.class) public void swingCheckNotSupp( ) throws InvocationTargetException, InterruptedException { SwingUtilities.invokeAndWait(new Runnable() { @Override public void run() { try (VCellThreadChecker.SuppressIntensive si = new VCellThreadChecker.SuppressIntensive()) { } VCellThreadChecker.checkCpuIntensiveInvocation(); } }); checkQuiet( ); } |
VCellThreadChecker { public static void checkSwingInvocation() { if (guiThreadChecker == null){ System.out.println("!!!!!!!!!!!!!! --VCellThreadChecker.setGUIThreadChecker() not set"); Thread.dumpStack(); }else if (!guiThreadChecker.isEventDispatchThread()) { System.out.println("!!!!!!!!!!!!!! --calling swing from non-swing thread-----"); Thread.dumpStack(); } } static void setGUIThreadChecker(GUIThreadChecker argGuiThreadChecker); static void checkRemoteInvocation(); static void checkSwingInvocation(); static void checkCpuIntensiveInvocation(); } | @Test public void swingCheck( ) throws InvocationTargetException, InterruptedException { SwingUtilities.invokeAndWait(new Runnable() { @Override public void run() { VCellThreadChecker.checkSwingInvocation(); } }); checkQuiet( ); } |
GenericUtils { @SuppressWarnings("unchecked") public static <T> java.util.List<T> convert(java.util.List<?> list, Class<T> clzz) { for (Object o : list) { if (!clzz.isAssignableFrom(o.getClass()) ) { throw new RuntimeException("invalid list conversion, " + clzz.getName() + " list contains " + o.getClass()); } } return (List<T>) list; } @SuppressWarnings("unchecked") static java.util.List<T> convert(java.util.List<?> list, Class<T> clzz); } | @Test public void goodConvert( ) { ArrayList<Object> al = new ArrayList<Object>( ); for (int i = 0; i < 5; i++) { al.add(Integer.valueOf(i)); } List<Integer> asIntArray = GenericUtils.convert(al, Integer.class); for (int i = 0; i < 5; i++) { assertTrue(asIntArray.get(i) == i); } }
@Test(expected = RuntimeException.class) public void badConvert( ) { ArrayList<Object> al = new ArrayList<Object>( ); for (int i = 0; i < 5; i++) { al.add(Integer.valueOf(i)); } List<String> asIntArray = GenericUtils.convert(al, String.class); System.out.println(asIntArray); } |
StreamingResultSet implements Iterator<Map<String, Node>>, Closeable { public StreamingResultSet(BufferedReader in) throws IOException { this.in = in; tsvParser = new TsvParser(in); if ((currentTuple = tsvParser.getTuple()) == null) { hasNext = false; } } StreamingResultSet(BufferedReader in); StreamingResultSet(HttpURLConnection conn); @Override Map<String, Node> next(); String[] getResultVars(); @Override void close(); @Override boolean hasNext(); } | @Test public void testStreamingResultSet() throws IOException { BufferedReader bufferedReader = new BufferedReader(new StringReader("?s\t?p\t?o\n" + "<http: + "<http: + "<http: List<Map<String, Node>> result = new ArrayList<>(); try (StreamingResultSet resultSet = new StreamingResultSet(bufferedReader)) { assertArrayEquals(new String[] {"s", "p", "o"}, resultSet.getResultVars()); int rowsRead = 0; assertEquals(rowsRead, resultSet.getRowNumber()); while (resultSet.hasNext()) { Map<String, Node> rs = resultSet.next(); System.out.println(rs); result.add(rs); rowsRead++; assertEquals(rowsRead, resultSet.getRowNumber()); } } assertEquals(3, result.size()); } |
TsvParser { private static Optional<Node> parseNode(String nodeString) { try { return Optional.of(NodeFactoryExtra.parseNode(nodeString)); } catch (RiotException e) { log.error("Parsing of value '{}' failed: {}", nodeString, e.getMessage()); return Optional.empty(); } } TsvParser(BufferedReader in); Map<String, Node> getTuple(); } | @Test public void testResourceEnding() throws IOException { Map<String, Node> result = parseSingleTuple("?s\n" + "<http: assertEquals(NodeFactoryExtra.parseNode("<http: }
@Test public void testEmptyVariablesInResult() throws IOException { Map<String, Node> result = parseSingleTuple("?s\t?p\t?o\n" + "<http: assertEquals(NodeFactoryExtra.parseNode("<http: assertFalse(result.containsKey("p")); assertEquals(NodeFactoryExtra.parseNode("\"o2\""), result.get("o")); }
@Test public void testMultipleEmptyVariablesInResult() throws IOException { Map<String, Node> result; result = parseSingleTuple("?s\t?p\t?o\n" + "\t\t\"o2\"\n"); assertFalse(result.containsKey("s")); assertFalse(result.containsKey("p")); assertEquals(NodeFactoryExtra.parseNode("\"o2\""), result.get("o")); result = parseSingleTuple("?s\t?p\t?o\n" + "<http: assertEquals(NodeFactoryExtra.parseNode("<http: assertFalse(result.containsKey("p")); assertFalse(result.containsKey("o")); result = parseSingleTuple("?s\t?p\t?o\n" + "\t\t\t\n"); assertEquals(0, result.size()); }
@Test public void testNodeParser() { Node parseNode = NodeFactoryExtra.parseNode("\"Hallo \\\"Echo\\\"!\"@de"); assertEquals("de", parseNode.getLiteralLanguage()); assertEquals("Hallo \"Echo\"!", parseNode.getLiteralValue()); } |
FeaturePluginToMavenDependencyConverter { static Set<Dependency> convert(Set<IFeaturePlugin> featurePlugins) { return featurePlugins.stream().map(FeaturePluginToMavenDependencyConverter::convert).collect(Collectors.toSet()); } } | @Test public void testConversionForFXDependency() { IFeaturePlugin featurePlugin = Mockito.mock(IFeaturePlugin.class); Mockito.when(featurePlugin.getId()).thenReturn(FEATURE_PLUGIN_ID_FX); Mockito.when(featurePlugin.getVersion()).thenReturn(FEATURE_PLUGIN_VERSION_FX); Set<Dependency> dependencies = FeaturePluginToMavenDependencyConverter.convert(new HashSet<>(Arrays.asList(featurePlugin))); assertThat(dependencies.size(), equalTo(1)); if (dependencies.size() == 1) { Dependency dependency = dependencies.iterator().next(); assertThat(dependency.getGroupId(), equalTo("at.bestsolution.efxclipse.rt")); assertThat(dependency.getArtifactId(), equalTo(FEATURE_PLUGIN_ID_FX)); assertThat(dependency.getVersion(), equalTo("3.0.0")); } }
@Test public void testConversionForNonFXDependency() { IFeaturePlugin featurePlugin = Mockito.mock(IFeaturePlugin.class); Mockito.when(featurePlugin.getId()).thenReturn(FEATURE_PLUGIN_ID_NON_FX); Mockito.when(featurePlugin.getVersion()).thenReturn(FEATURE_PLUGIN_VERSION_NON_FX); Set<Dependency> dependencies = FeaturePluginToMavenDependencyConverter.convert(new HashSet<>(Arrays.asList(featurePlugin))); assertThat(dependencies.size(), equalTo(1)); if (dependencies.size() == 1) { Dependency dependency = dependencies.iterator().next(); assertThat(dependency.getGroupId(), equalTo("at.bestsolution.efxclipse.eclipse")); assertThat(dependency.getArtifactId(), equalTo(FEATURE_PLUGIN_ID_NON_FX)); assertThat(dependency.getVersion(), equalTo("0.10.0")); } } |
JarAccessor { static InputStream readEntry(String jarUrl, String entryName) { try { URL url = new URL(convertToJarUrl(jarUrl)); JarURLConnection jarConnection = (JarURLConnection) url.openConnection(); JarFile jarFile = jarConnection.getJarFile(); return jarFile.getInputStream(jarFile.getEntry(entryName)); } catch (IOException e) { LoggingSupport.logErrorMessage(e.getMessage(), e); } return null; } } | @Test public void testRead() { try { InputStream inputStream = JarAccessor.readEntry(JarAccessorTest.class.getResource(FEATURE_TEST_JAR).toURI().toString(), FEATURE_TEST_XML); assertNotNull(inputStream); } catch (URISyntaxException e) { e.printStackTrace(); fail(); } } |
AdditionalDependencyProvider { static Set<Dependency> readAdditionalDependencies(InputStream additionalDependenciesFile) { Set<Dependency> additionalDependencies = new HashSet<>(); try (Scanner sc = new Scanner(additionalDependenciesFile)) { while (sc.hasNextLine()) { handleDependency(additionalDependencies, sc.nextLine(), additionalDependenciesFile.toString()); } } return additionalDependencies; } } | @Test public void testDependencyAdder() { InputStream resource = FeaturePluginFilter.class.getResourceAsStream(ADDITIONAL_DEPENDENCIES_FILE); Set<Dependency> dependencies = AdditionalDependencyProvider.readAdditionalDependencies(resource); assertThat(dependencies.size(), equalTo(1)); if (dependencies.size() == 1) { Dependency dependency = dependencies.iterator().next(); assertThat(dependency.getGroupId(), equalTo(GROUP_ID)); assertThat(dependency.getArtifactId(), equalTo(ARTIFACT_ID)); assertThat(dependency.getVersion(), equalTo(VERSION)); } } |
FeaturePluginExtractor { static Set<IFeaturePlugin> extractFeaturePlugins(InputStream featureInputStream) { WorkspaceFeatureModel fmodel = new WorkspaceFeatureModel(new FileWrapper(featureInputStream)); fmodel.load(); Map<String, IFeaturePlugin> map = new HashMap<>(); Arrays.stream(fmodel.getFeature().getPlugins()).forEach(p -> map.put(p.getId(), p)); return new HashSet<>(map.values()); } } | @Test public void testExtractFeaturePlugins() { Set<IFeaturePlugin> featurePlugins = FeaturePluginExtractor .extractFeaturePlugins(getClass().getResourceAsStream(FEATURE_TEST_XML)); assertThat(featurePlugins.size(), equalTo(1)); if (featurePlugins.size() == 1) { IFeaturePlugin featurePlugin = featurePlugins.iterator().next(); assertThat(featurePlugin.getId(), equalTo(FEATURE_ID)); assertThat(featurePlugin.getVersion(), equalTo(FEATURE_VERSION)); } } |
UpdateSiteAccessor { static String extractRelativeTargetPlatformFeatureJarUrl(InputStream siteInputStream, String urlPrefix) { WorkspaceSiteModel model = new WorkspaceSiteModel(new FileWrapper(siteInputStream)); model.load(); for (ISiteFeature f : model.getSite().getFeatures()) { if (f.getURL().startsWith(urlPrefix)) { return f.getURL(); } } return null; } } | @Test public void testExctractRelativeUrl() { String jarPath = UpdateSiteAccessor.extractRelativeTargetPlatformFeatureJarUrl(getClass().getResourceAsStream(SITE_TEST_XML), JAR_PREFIX); assertThat(jarPath, equalTo(JAR_PATH)); } |
UpdateSiteAccessor { static String readRelativeTargetPlatformFeatureJarUrl(String siteUrl, String targetJarUrlPrefix, Proxy proxy) { try { URL url = new URL(siteUrl); if (proxy != null) { LoggingSupport.logInfoMessage("Using proxy (" + proxy.address() + ") for getting the targetplatform feature at URL " + siteUrl); } URLConnection connection = proxy != null ? url.openConnection(proxy) : url.openConnection(); InputStream siteInputStream = connection.getInputStream(); return extractRelativeTargetPlatformFeatureJarUrl(siteInputStream, targetJarUrlPrefix); } catch (IOException e) { LoggingSupport.logErrorMessage(e.getMessage(), e); return null; } } } | @Test public void testReadRelativeUrl() { try { URL resource = getClass().getResource(SITE_TEST_XML); String jarPath = UpdateSiteAccessor.readRelativeTargetPlatformFeatureJarUrl(resource.toURI().toString(), JAR_PREFIX, null); assertThat(jarPath, equalTo(JAR_PATH)); } catch (URISyntaxException e) { fail(e.getMessage()); } } |
PomWriter { static void writePom(File file, String groupId, String artifactId, String version, Set<Dependency> dependencies) { Model model = new Model(); model.setModelVersion(MAVEN_MODEL_VERSION); model.setArtifactId(artifactId); model.setGroupId(groupId); model.setVersion(version); model.setPackaging(PACKAGING_TYPE); model.setDependencies(new ArrayList<>(dependencies)); DefaultModelWriter writer = new DefaultModelWriter(); try { writer.write(file, (Map<String, Object>) null, model); } catch (IOException e) { LoggingSupport.logErrorMessage(e.getMessage(), e); } } } | @Test public void testPomGeneration() throws IOException { File pomFile = createDestinationFile(); Set<Dependency> dependencies = generateDependencies(); PomWriter.writePom(pomFile, GROUP_ID, ARTIFACT_ID, VERSION, dependencies); Scanner scanner = new Scanner(pomFile); StringWriter writer = new StringWriter(); String generatedFileContent = scanner.useDelimiter(DELIMITER).next(); IOUtils.copy(getClass().getResourceAsStream(POM_CMP_XML), writer); String expectedFileContent = writer.toString(); scanner.close(); assertThat(generatedFileContent.replaceAll("\\s+", ""), equalTo(expectedFileContent.replaceAll("\\s+", ""))); pomFile.delete(); } |
RxActivityResults { public <T> ObservableTransformer<T, ActivityResult> composer(final Intent intent) { return new ObservableTransformer<T, ActivityResult>() { @Override public ObservableSource<ActivityResult> apply(@NonNull Observable<T> upstream) { return request(upstream, intent); } }; } RxActivityResults(@NonNull Activity activity); ObservableTransformer<T, ActivityResult> composer(final Intent intent); ObservableTransformer<T, Boolean> ensureOkResult(final Intent intent); Observable<ActivityResult> start(Intent intent); void setLogging(boolean logging); } | @Test public void composer() throws Exception { Intent someIntent = new Intent(); ActivityResult testResult = new ActivityResult(Activity.RESULT_OK, Activity.RESULT_OK, someIntent); TestObserver<ActivityResult> composerTest = trigger().compose(rxActivityResults.composer(someIntent)).test(); rxActivityResults.onActivityResult(testResult.getResultCode(), someIntent); composerTest.assertNoErrors() .assertSubscribed() .assertComplete() .assertValueCount(1) .assertValue(testResult); }
@Test(expected = IllegalArgumentException.class) public void requestNullIntent() throws Exception { TestObserver<ActivityResult> composerTest = trigger().compose(rxActivityResults.composer(null)).test(); composerTest.assertError(IllegalArgumentException.class) .assertSubscribed() .assertComplete() .assertValueCount(0); } |
RxActivityResults { public <T> ObservableTransformer<T, Boolean> ensureOkResult(final Intent intent) { return new ObservableTransformer<T, Boolean>() { @Override public ObservableSource<Boolean> apply(@NonNull Observable<T> upstream) { return request(upstream, intent).map(new Function<ActivityResult, Boolean>() { @Override public Boolean apply(@NonNull ActivityResult activityResult) throws Exception { return activityResult.isOk(); } }); } }; } RxActivityResults(@NonNull Activity activity); ObservableTransformer<T, ActivityResult> composer(final Intent intent); ObservableTransformer<T, Boolean> ensureOkResult(final Intent intent); Observable<ActivityResult> start(Intent intent); void setLogging(boolean logging); } | @Test public void ensureOkResult() throws Exception { Intent someIntent = new Intent(); ActivityResult testResult = new ActivityResult(Activity.RESULT_OK, Activity.RESULT_OK, someIntent); TestObserver<Boolean> composerTest = trigger().compose(rxActivityResults.ensureOkResult(someIntent)).test(); rxActivityResults.onActivityResult(testResult.getResultCode(), someIntent); composerTest.assertNoErrors() .assertSubscribed() .assertComplete() .assertValueCount(1) .assertValue(testResult.isOk()); } |
RxActivityResults { public Observable<ActivityResult> start(Intent intent) { return Observable.just(TRIGGER).compose(this.composer(intent)); } RxActivityResults(@NonNull Activity activity); ObservableTransformer<T, ActivityResult> composer(final Intent intent); ObservableTransformer<T, Boolean> ensureOkResult(final Intent intent); Observable<ActivityResult> start(Intent intent); void setLogging(boolean logging); } | @Test public void start() throws Exception { Intent someIntent = new Intent(); ActivityResult testResult = new ActivityResult(Activity.RESULT_OK, Activity.RESULT_OK, someIntent); TestObserver<ActivityResult> composerTest = rxActivityResults.start(someIntent).test(); rxActivityResults.onActivityResult(testResult.getResultCode(), someIntent); verify(rxActivityResults.mRxActivityResultsFragment) .onActivityResult(any(Integer.class), eq(testResult.getResultCode()), any(Intent.class)); composerTest.assertNoErrors() .assertSubscribed() .assertComplete() .assertValueCount(1) .assertValue(testResult); } |
ActivityResult { @Override public boolean equals(Object obj) { if (obj instanceof ActivityResult) { if (this == obj) { return true; } ActivityResult activityResult = (ActivityResult) obj; return activityResult.data == data && activityResult.resultCode == resultCode; } else { return false; } } ActivityResult(int okResultCode, int resultCode, Intent data); Intent getData(); int getResultCode(); boolean isOk(); @Override boolean equals(Object obj); @Override String toString(); } | @Test public void equalsTrue() throws Exception { ActivityResult testResult = new ActivityResult(Activity.RESULT_OK, resultCode, intent); assertTrue(activityResult.equals(testResult)); assertTrue(activityResult.equals(activityResult)); }
@Test public void equalsFalse() throws Exception { ActivityResult testResult = new ActivityResult(Activity.RESULT_OK, resultCode - 1, intent); assertFalse(activityResult.equals(testResult)); assertFalse(activityResult.equals(intent)); } |
ActivityResult { public boolean isOk() { return resultCode == okResultCode; } ActivityResult(int okResultCode, int resultCode, Intent data); Intent getData(); int getResultCode(); boolean isOk(); @Override boolean equals(Object obj); @Override String toString(); } | @Test public void isOkTest() throws Exception { ActivityResult faultyResult = new ActivityResult(Activity.RESULT_OK, resultCode - 1, intent); assertFalse(faultyResult.isOk()); ActivityResult okResult = new ActivityResult(Activity.RESULT_OK, Activity.RESULT_OK, intent); assertTrue(okResult.isOk()); } |
ActivityResult { @Override public String toString() { return "ActivityResult { ResultCode = " + resultCode + ", Data = " + data + " }"; } ActivityResult(int okResultCode, int resultCode, Intent data); Intent getData(); int getResultCode(); boolean isOk(); @Override boolean equals(Object obj); @Override String toString(); } | @Test public void toStringTest() throws Exception { String testResult = "ActivityResult { ResultCode" + " = " + resultCode + ", Data = " + intent + " }"; assertEquals(activityResult.toString(), testResult); } |
ActivityResult { public Intent getData() { return data; } ActivityResult(int okResultCode, int resultCode, Intent data); Intent getData(); int getResultCode(); boolean isOk(); @Override boolean equals(Object obj); @Override String toString(); } | @Test public void getData() { assertEquals(activityResult.getData(), intent); } |
ActivityResult { public int getResultCode() { return resultCode; } ActivityResult(int okResultCode, int resultCode, Intent data); Intent getData(); int getResultCode(); boolean isOk(); @Override boolean equals(Object obj); @Override String toString(); } | @Test public void getResultCode() { assertEquals(activityResult.getResultCode(), resultCode); } |
DefaultAlertsPoliciesApi extends ApiBase implements AlertsPoliciesApi { @Override public Optional<AlertsPolicy> getByName(String alertsPolicyName) { Invocation.Builder builder = client .target(POLICIES_URL) .queryParam("filter[name]", alertsPolicyName) .request(APPLICATION_JSON_TYPE); return getPageable(builder, AlertsPolicyList.class) .filter(alertsPolicy -> alertsPolicy.getName().equals(alertsPolicyName)) .getSingle(); } DefaultAlertsPoliciesApi(NewRelicClient client); @Override Optional<AlertsPolicy> getByName(String alertsPolicyName); @Override AlertsPolicy create(AlertsPolicy policy); @Override AlertsPolicy delete(int policyId); @Override AlertsPolicyChannels updateChannels(AlertsPolicyChannels channels); } | @Test public void getByName_shouldReturnPolicy_whenClientReturnsNotUniqueResult() throws Exception { when(responseMock.readEntity(AlertsPolicyList.class)).thenReturn(new AlertsPolicyList(asList( AlertsPolicy.builder().name("policy").build(), AlertsPolicy.builder().name("policy1").build() ))); Optional<AlertsPolicy> policyOptional = testee.getByName("policy"); assertThat(policyOptional).isNotEmpty(); }
@Test public void getByName_shouldNotReturnPolicy_whenClientReturnsNotMatchingResult() throws Exception { when(responseMock.readEntity(AlertsPolicyList.class)).thenReturn(new AlertsPolicyList(Collections.singletonList( AlertsPolicy.builder().name("policy1").build() ))); Optional<AlertsPolicy> policyOptional = testee.getByName("policy"); assertThat(policyOptional).isEmpty(); }
@Test public void getByName_shouldNotReturnPolicy_whenClientReturnsEmptyList() throws Exception { when(responseMock.readEntity(AlertsPolicyList.class)).thenReturn(new AlertsPolicyList(Collections.emptyList())); Optional<AlertsPolicy> policyOptional = testee.getByName("policy"); assertThat(policyOptional).isEmpty(); } |
DefaultApplicationsApi extends ApiBase implements ApplicationsApi { @Override public Optional<Application> getByName(String applicationName) { Invocation.Builder builder = client .target(APPLICATIONS_URL) .queryParam("filter[name]", applicationName) .request(APPLICATION_JSON_TYPE); return getPageable(builder, ApplicationList.class) .filter(application -> application.getName().equals(applicationName)) .getSingle(); } DefaultApplicationsApi(NewRelicClient client); @Override Optional<Application> getByName(String applicationName); @Override Application update(int applicationId, Application application); } | @Test public void getByName_shouldNotReturnApplication_whenClientReturnsNotMatchingResult() throws Exception { when(responseMock.readEntity(ApplicationList.class)).thenReturn(new ApplicationList(Collections.singletonList( Application.builder().name("app1").build() ))); Optional<Application> applicationOptional = testee.getByName("app"); assertThat(applicationOptional).isEmpty(); }
@Test public void getByName_shouldNotReturnApplication_whenClientReturnsEmptyList() throws Exception { when(responseMock.readEntity(ApplicationList.class)).thenReturn(new ApplicationList(Collections.emptyList())); Optional<Application> applicationOptional = testee.getByName("app"); assertThat(applicationOptional).isEmpty(); }
@Test public void getByName_shouldReturnApplication_whenClientReturnsNotUniqueResult() throws Exception { when(responseMock.readEntity(ApplicationList.class)).thenReturn(new ApplicationList(asList( Application.builder().name("app").build(), Application.builder().name("app1").build() ))); Optional<Application> applicationOptional = testee.getByName("app"); assertThat(applicationOptional).isNotEmpty(); } |
Configurator { public void sync() { for (ApplicationConfiguration applicationConfiguration : applicationConfigurations) { applicationConfigurator.sync(applicationConfiguration); } for (PolicyConfiguration configuration : policyConfigurations) { policyConfigurator.sync(configuration); conditionConfigurator.sync(configuration); externalServiceConditionConfigurator.sync(configuration); nrqlConditionConfigurator.sync(configuration); syntheticsConditionConfigurator.sync(configuration); channelConfigurator.sync(configuration); } } Configurator(@NonNull String apiKey); Configurator(ApplicationConfigurator applicationConfigurator,
PolicyConfigurator policyConfigurator,
ConditionConfigurator conditionConfigurator,
ExternalServiceConditionConfigurator externalServiceConditionConfigurator,
NrqlConditionConfigurator nrqlConditionConfigurator,
SyntheticsConditionConfigurator syntheticsConditionConfigurator,
ChannelConfigurator channelConfigurator); void sync(); void setApplicationConfigurations(@NonNull Collection<ApplicationConfiguration> applicationConfigurations); void setPolicyConfigurations(@NonNull Collection<PolicyConfiguration> policyConfigurations); } | @Test public void shouldNotSynchronizeAnything_whenNoConfigurationsSet() { testee.sync(); InOrder order = inOrder(applicationConfiguratorMock, policyConfiguratorMock, conditionConfiguratorMock, externalServiceConditionConfiguratorMock, nrqlConditionConfiguratorMock, syntheticsConditionConfiguratorMock, channelConfiguratorMock); order.verifyNoMoreInteractions(); } |
ChannelConfigurator { void sync(@NonNull PolicyConfiguration config) { if (!config.getChannels().isPresent()) { LOG.info("No alerts channels for policy {} - skipping...", config.getPolicyName()); return; } LOG.info("Synchronizing alerts channels for policy {}...", config.getPolicyName()); AlertsPolicy policy = api.getAlertsPoliciesApi().getByName(config.getPolicyName()).orElseThrow( () -> new NewRelicSyncException(format("Policy %s does not exist", config.getPolicyName()))); Set<Integer> policyChannelsToCleanup = createOrUpdatePolicyAlertsChannels(policy, config.getChannels().get()); cleanupPolicyAlertsChannels(policy, policyChannelsToCleanup); LOG.info("Alerts channels for policy {} synchronized", config.getPolicyName()); } ChannelConfigurator(@NonNull NewRelicApi api); } | @Test public void shouldThrowException_whenPolicyDoesNotExist() { when(alertsPoliciesApiMock.getByName(POLICY_NAME)).thenReturn(Optional.empty()); PolicyConfiguration policyConfiguration = buildPolicyConfiguration(); expectedException.expect(NewRelicSyncException.class); expectedException.expectMessage(format("Policy %s does not exist", POLICY_NAME)); testee.sync(policyConfiguration); }
@Test public void shouldDoNothing_whenEmptyChannelsInConfiguration() { PolicyConfiguration policyConfiguration = buildPolicyConfiguration(); testee.sync(policyConfiguration); InOrder order = inOrder(alertsChannelsApiMock); order.verify(alertsChannelsApiMock).list(); order.verifyNoMoreInteractions(); }
@Test public void shouldDoNothing_whenNullChannelsInConfiguration() { PolicyConfiguration policyConfiguration = PolicyConfiguration.builder() .policyName(POLICY_NAME) .incidentPreference(INCIDENT_PREFERENCE) .build(); testee.sync(policyConfiguration); verifyNoMoreInteractions(alertsChannelsApiMock); }
@Test public void shouldCreateRequiredChannels() { when(alertsChannelsApiMock.list()).thenReturn(ImmutableList.of(savedUserChannel)); PolicyConfiguration policyConfiguration = buildPolicyConfiguration(EMAIL_CHANNEL, SLACK_CHANNEL); testee.sync(policyConfiguration); AlertsPolicyChannels expected = AlertsPolicyChannels.builder() .policyId(POLICY_ID) .channelIds(ImmutableSet.of(savedEmailChannel.getId(), savedSlackChannel.getId())) .build(); InOrder order = inOrder(alertsChannelsApiMock, alertsPoliciesApiMock); order.verify(alertsChannelsApiMock).list(); order.verify(alertsChannelsApiMock).create(configuredEmailChannel); order.verify(alertsChannelsApiMock).create(configuredSlackChannel); order.verify(alertsPoliciesApiMock).updateChannels(expected); order.verifyNoMoreInteractions(); }
@Test public void shouldRemoveOldChannelsAndCreateNewOne_whenChannelUpdated() { int updatedEmailChannelId = 10; EmailChannel updatedEmailChannel = EmailChannel.builder() .channelName(EMAIL_CHANNEL_NAME) .emailAddress("different recipients") .build(); AlertsChannel updatedEmailAlertChannel = createAlertChannel(updatedEmailChannel); when(alertsChannelsApiMock.create(updatedEmailAlertChannel)) .thenReturn(createAlertChannel(updatedEmailChannelId, updatedEmailChannel)); AlertsChannel emailChannelInPolicy = channelInPolicy(savedEmailChannel, POLICY_ID); when(alertsChannelsApiMock.list()).thenReturn(ImmutableList.of( savedUserChannel, emailChannelInPolicy, channelInPolicy(savedSlackChannel, POLICY_ID) )); when(alertsChannelsApiMock.deleteFromPolicy(POLICY_ID, emailChannelInPolicy.getId())) .thenReturn(emailChannelInPolicy); PolicyConfiguration policyConfiguration = buildPolicyConfiguration(updatedEmailChannel, SLACK_CHANNEL); testee.sync(policyConfiguration); AlertsPolicyChannels expected = AlertsPolicyChannels.builder() .policyId(POLICY_ID) .channelIds(ImmutableSet.of(updatedEmailChannelId, savedSlackChannel.getId())) .build(); InOrder order = inOrder(alertsChannelsApiMock, alertsPoliciesApiMock); order.verify(alertsChannelsApiMock).list(); order.verify(alertsChannelsApiMock).create(updatedEmailAlertChannel); order.verify(alertsPoliciesApiMock).updateChannels(expected); order.verify(alertsChannelsApiMock).deleteFromPolicy(POLICY_ID, savedEmailChannel.getId()); order.verify(alertsChannelsApiMock).delete(savedEmailChannel.getId()); order.verifyNoMoreInteractions(); }
@Test public void shouldNotRemoveUnsuedChannel_whenChannelBelongsToDifferentPolicy() { AlertsChannel emailChannelInPolicy = channelInPolicy(savedEmailChannel, POLICY_ID, POLICY_ID + 1); when(alertsChannelsApiMock.list()).thenReturn(ImmutableList.of( savedUserChannel, emailChannelInPolicy, channelInPolicy(savedSlackChannel, POLICY_ID) )); when(alertsChannelsApiMock.deleteFromPolicy(POLICY_ID, emailChannelInPolicy.getId())) .thenReturn(emailChannelInPolicy); PolicyConfiguration policyConfiguration = buildPolicyConfiguration(SLACK_CHANNEL); testee.sync(policyConfiguration); AlertsPolicyChannels expected = AlertsPolicyChannels.builder() .policyId(POLICY_ID) .channelIds(ImmutableSet.of(savedSlackChannel.getId())) .build(); InOrder order = inOrder(alertsChannelsApiMock, alertsPoliciesApiMock); order.verify(alertsChannelsApiMock).list(); order.verify(alertsPoliciesApiMock).updateChannels(expected); order.verify(alertsChannelsApiMock).deleteFromPolicy(POLICY_ID, savedEmailChannel.getId()); order.verify(alertsChannelsApiMock, never()).delete(savedEmailChannel.getId()); order.verifyNoMoreInteractions(); }
@Test public void shouldRemoveUnusedPolicyChannel() { AlertsChannel emailChannelInPolicy = channelInPolicy(savedEmailChannel, POLICY_ID); when(alertsChannelsApiMock.list()).thenReturn(ImmutableList.of( savedUserChannel, emailChannelInPolicy, channelInPolicy(savedSlackChannel, POLICY_ID) )); when(alertsChannelsApiMock.deleteFromPolicy(POLICY_ID, emailChannelInPolicy.getId())) .thenReturn(emailChannelInPolicy); PolicyConfiguration policyConfiguration = buildPolicyConfiguration(SLACK_CHANNEL); testee.sync(policyConfiguration); AlertsPolicyChannels expected = AlertsPolicyChannels.builder() .policyId(POLICY_ID) .channelIds(ImmutableSet.of(savedSlackChannel.getId())) .build(); InOrder order = inOrder(alertsChannelsApiMock, alertsPoliciesApiMock); order.verify(alertsChannelsApiMock).list(); order.verify(alertsPoliciesApiMock).updateChannels(expected); order.verify(alertsChannelsApiMock).deleteFromPolicy(POLICY_ID, savedEmailChannel.getId()); order.verify(alertsChannelsApiMock).delete(savedEmailChannel.getId()); order.verifyNoMoreInteractions(); }
@Test public void shouldThrowException_whenUserChannelDosNotExist() { PolicyConfiguration policyConfiguration = buildPolicyConfiguration(USER_CHANNEL); expectedException.expect(NewRelicSyncException.class); expectedException.expectMessage("Alerts channel with configuration"); testee.sync(policyConfiguration); }
@Test public void shouldNotCreateUserChannel() { when(alertsChannelsApiMock.list()).thenReturn(ImmutableList.of(savedUserChannel)); PolicyConfiguration policyConfiguration = buildPolicyConfiguration(USER_CHANNEL); AlertsPolicyChannels expected = AlertsPolicyChannels.builder() .policyId(POLICY.getId()) .channelIds(ImmutableSet.of(savedUserChannel.getId())) .build(); testee.sync(policyConfiguration); InOrder order = inOrder(alertsChannelsApiMock, alertsPoliciesApiMock); order.verify(alertsChannelsApiMock).list(); order.verify(alertsPoliciesApiMock).updateChannels(expected); order.verifyNoMoreInteractions(); }
@Test public void shouldNotRemoveUnusedUserChannel() { AlertsChannel userChannelInPolicy = channelInPolicy(savedUserChannel, POLICY_ID); when(alertsChannelsApiMock.list()).thenReturn(ImmutableList.of(userChannelInPolicy)); when(alertsChannelsApiMock.deleteFromPolicy(POLICY_ID, userChannelInPolicy.getId())) .thenReturn(userChannelInPolicy); PolicyConfiguration policyConfiguration = buildPolicyConfiguration(); testee.sync(policyConfiguration); AlertsPolicyChannels expected = AlertsPolicyChannels.builder() .policyId(POLICY_ID) .channelIds(emptySet()) .build(); InOrder order = inOrder(alertsChannelsApiMock, alertsPoliciesApiMock); order.verify(alertsChannelsApiMock).list(); order.verify(alertsPoliciesApiMock).updateChannels(expected); order.verify(alertsChannelsApiMock).deleteFromPolicy(POLICY_ID, savedUserChannel.getId()); order.verify(alertsChannelsApiMock, never()).delete(savedUserChannel.getId()); order.verifyNoMoreInteractions(); } |
ApplicationConfigurator { void sync(@NonNull ApplicationConfiguration config) { LOG.info("Synchronizing application {}...", config.getApplicationName()); Application application = api.getApplicationsApi().getByName(config.getApplicationName()).orElseThrow( () -> new NewRelicSyncException(format("Application %s does not exist", config.getApplicationName()))); ApplicationSettings settings = ApplicationSettings.builder() .appApdexThreshold(config.getAppApdexThreshold()) .endUserApdexThreshold(config.getEndUserApdexThreshold()) .enableRealUserMonitoring(config.isEnableRealUserMonitoring()) .build(); Application applicationUpdate = Application.builder() .name(config.getApplicationName()) .settings(settings) .build(); api.getApplicationsApi().update(application.getId(), applicationUpdate); LOG.info("Application {} synchronized", config.getApplicationName()); } ApplicationConfigurator(@NonNull NewRelicApi api); } | @Test public void shouldThrowException_whenApplicationDoesNotExist() { when(applicationsApiMock.getByName(APPLICATION_NAME)).thenReturn(Optional.empty()); expectedException.expect(NewRelicSyncException.class); expectedException.expectMessage(format("Application %s does not exist", APPLICATION_NAME)); testee.sync(CONFIGURATION); }
@Test public void shouldUpdateApplication() { when(applicationsApiMock.getByName(APPLICATION_NAME)).thenReturn(Optional.of(APPLICATION)); ApplicationSettings expectedSettings = ApplicationSettings.builder() .appApdexThreshold(APP_APDEX_THRESHOLD) .endUserApdexThreshold(USER_APDEX_THRESHOLD) .enableRealUserMonitoring(ENABLE_REAL_USER_MONITORING) .build(); Application expectedApplicationUpdate = Application.builder() .name(APPLICATION_NAME) .settings(expectedSettings) .build(); testee.sync(CONFIGURATION); verify(applicationsApiMock).update(APPLICATION.getId(), expectedApplicationUpdate); } |
AbstractPolicyItemConfigurator implements PolicyItemConfigurator { @Override public void sync(@NonNull PolicyConfiguration config) { if (! getConfigItems(config).isPresent()) { LOG.info("No items for policy {} - skipping...", config.getPolicyName()); return; } LOG.info("Synchronizing items for policy {}...", config.getPolicyName()); AlertsPolicy policy = api.getAlertsPoliciesApi().getByName(config.getPolicyName()).orElseThrow( () -> new NewRelicSyncException(format("Policy %s does not exist", config.getPolicyName()))); List<T> allItems = getItemsApi().list(policy.getId()); List<Integer> updatedItemsIds = createOrUpdateAlertsNrqlConditions(policy, getConfigItems(config).get(), allItems); cleanupOldItems(policy, allItems, updatedItemsIds); LOG.info("Items for policy {} synchronized", config.getPolicyName()); } AbstractPolicyItemConfigurator(@NonNull NewRelicApi api); @Override void sync(@NonNull PolicyConfiguration config); } | @Test public void shouldThrowException_whenPolicyDoesNotExist() { when(alertsPoliciesApiMock.getByName(POLICY_NAME)).thenReturn(Optional.empty()); expectedException.expect(NewRelicSyncException.class); expectedException.expectMessage(format("Policy %s does not exist", POLICY_NAME)); testee.sync(getPolicyConfiguration()); }
@Test public void shouldDoNothing_whenNullConditionsInConfiguration() { PolicyItemApi<U> itemApiMock = getPolicyItemApiMock(); PolicyConfiguration config = PolicyConfiguration.builder() .policyName(POLICY_NAME) .incidentPreference(PolicyConfiguration.IncidentPreference.PER_POLICY) .build(); testee.sync(config); verifyZeroInteractions(itemApiMock); }
@Test public void shouldDoNothing_whenEmptyConditionsInConfiguration() { PolicyItemApi<U> itemApiMock = getPolicyItemApiMock(); PolicyConfiguration config = PolicyConfiguration.builder() .policyName(POLICY_NAME) .incidentPreference(PolicyConfiguration.IncidentPreference.PER_POLICY) .conditions(Collections.emptyList()) .nrqlConditions(Collections.emptyList()) .externalServiceConditions(Collections.emptyList()) .build(); testee.sync(config); InOrder order = inOrder(itemApiMock); order.verify(itemApiMock).list(POLICY.getId()); order.verifyNoMoreInteractions(); }
@Test public void shouldCreateCondition() { U itemFromConfig = getItemFromConfig(); U itemSame = getItemSame(); PolicyConfiguration policyConfiguration = getPolicyConfiguration(); PolicyItemApi<U> itemApiMock = getPolicyItemApiMock(); when(itemApiMock.list(POLICY.getId())).thenReturn(ImmutableList.of()); when(itemApiMock.create(POLICY.getId(), itemFromConfig)).thenReturn(itemSame); testee.sync(policyConfiguration); InOrder order = inOrder(itemApiMock); order.verify(itemApiMock).list(POLICY.getId()); order.verify(itemApiMock).create(POLICY.getId(), itemFromConfig); order.verifyNoMoreInteractions(); }
@Test public void shouldUpdateCondition() { U itemFromConfig = getItemFromConfig(); U itemUpdated = getItemUpdated(); PolicyConfiguration policyConfiguration = getPolicyConfiguration(); PolicyItemApi<U> itemApiMock = getPolicyItemApiMock(); when(itemApiMock.list(POLICY.getId())).thenReturn(ImmutableList.of(itemUpdated)); when(itemApiMock.update(itemUpdated.getId(), itemFromConfig)).thenReturn(itemUpdated); testee.sync(policyConfiguration); InOrder order = inOrder(itemApiMock); order.verify(itemApiMock).list(POLICY.getId()); order.verify(itemApiMock).update(itemUpdated.getId(), itemFromConfig); order.verifyNoMoreInteractions(); }
@Test public void shouldRemoveOldCondition() { U itemFromConfig = getItemFromConfig(); U itemSame = getItemSame(); U itemDifferent = getItemDifferent(); PolicyConfiguration policyConfiguration = getPolicyConfiguration(); PolicyItemApi<U> itemApiMock = getPolicyItemApiMock(); when(itemApiMock.list(POLICY.getId())).thenReturn(ImmutableList.of(itemDifferent)); when(itemApiMock.create(POLICY.getId(), itemFromConfig)).thenReturn(itemSame); testee.sync(policyConfiguration); InOrder order = inOrder(itemApiMock); order.verify(itemApiMock).list(POLICY.getId()); order.verify(itemApiMock).create(POLICY.getId(), itemFromConfig); order.verify(itemApiMock).delete(itemDifferent.getId()); order.verifyNoMoreInteractions(); } |
PolicyConfigurator { void sync(@NonNull PolicyConfiguration config) { LOG.info("Synchronizing policy {}...", config.getPolicyName()); AlertsPolicy alertsPolicyFromConfig = AlertsPolicy.builder() .name(config.getPolicyName()) .incidentPreference(config.getIncidentPreference().name()) .build(); Optional<AlertsPolicy> policy = api.getAlertsPoliciesApi().getByName(config.getPolicyName()); if (policy.isPresent()) { AlertsPolicy oldPolicy = policy.get(); if (!StringUtils.equals(alertsPolicyFromConfig.getIncidentPreference(), oldPolicy.getIncidentPreference())) { api.getAlertsPoliciesApi().delete(oldPolicy.getId()); api.getAlertsPoliciesApi().create(alertsPolicyFromConfig); LOG.info(format("Policy %s updated", config.getPolicyName())); } } else { api.getAlertsPoliciesApi().create(alertsPolicyFromConfig); LOG.info("Policy {} created", config.getPolicyName()); } LOG.info("Policy {} synchronized", config.getPolicyName()); } PolicyConfigurator(@NonNull NewRelicApi api); } | @Test public void shouldCreateNewPolicy_whenPolicyDoesNotExist() { when(alertsPoliciesApiMock.getByName(POLICY_NAME)).thenReturn(Optional.empty()); AlertsPolicy expectedPolicy = AlertsPolicy.builder().name(POLICY_NAME).incidentPreference(INCIDENT_PREFERENCE.name()).build(); testee.sync(CONFIGURATION); InOrder order = inOrder(alertsPoliciesApiMock); order.verify(alertsPoliciesApiMock).getByName(POLICY_NAME); order.verify(alertsPoliciesApiMock).create(expectedPolicy); order.verifyNoMoreInteractions(); }
@Test public void shouldDeleteAndCreateNewPolicy_whenPolicyUpdated() { when(alertsPoliciesApiMock.getByName(POLICY_NAME)).thenReturn(Optional.of(ALERT_POLICY_DIFFERENT)); AlertsPolicy expectedPolicy = AlertsPolicy.builder().name(POLICY_NAME).incidentPreference(INCIDENT_PREFERENCE.name()).build(); testee.sync(CONFIGURATION); InOrder order = inOrder(alertsPoliciesApiMock); order.verify(alertsPoliciesApiMock).getByName(POLICY_NAME); order.verify(alertsPoliciesApiMock).delete(ALERT_POLICY_DIFFERENT.getId()); order.verify(alertsPoliciesApiMock).create(expectedPolicy); order.verifyNoMoreInteractions(); }
@Test public void shouldDoNothing_whenPolicyNotUpdated() { when(alertsPoliciesApiMock.getByName(POLICY_NAME)).thenReturn(Optional.of(ALERT_POLICY_SAME)); testee.sync(CONFIGURATION); InOrder order = inOrder(alertsPoliciesApiMock); order.verify(alertsPoliciesApiMock).getByName(POLICY_NAME); order.verifyNoMoreInteractions(); } |
DefaultDashboardsApi extends ApiBase implements DashboardsApi { @Override public List<Dashboard> getByTitle(String dashboardTitle) { String dashboardTitleEncoded = UriComponent.encode(dashboardTitle, QUERY_PARAM_SPACE_ENCODED); Invocation.Builder builder = client .target(DASHBOARDS_URL) .queryParam("filter[title]", dashboardTitleEncoded) .request(APPLICATION_JSON_TYPE); return getPageable(builder, DashboardList.class) .getList(); } DefaultDashboardsApi(NewRelicClient client); @Override Dashboard getById(int dashboardId); @Override List<Dashboard> getByTitle(String dashboardTitle); @Override Dashboard create(Dashboard dashboard); @Override Dashboard update(Dashboard dashboard); @Override Dashboard delete(int dashboardId); } | @Test public void getByTitle_shouldReturnDashboards_whenClientReturnsNotUniqueResult() { when(responseMock.readEntity(DashboardList.class)).thenReturn(new DashboardList(asList( Dashboard.builder().title(DASHBOARD_FILTER_VALUE).build(), Dashboard.builder().title("dashboard1").build() ))); List<Dashboard> dashboard = testee.getByTitle(DASHBOARD_FILTER_VALUE); assertThat(dashboard).isNotEmpty(); assertThat(dashboard).hasSize(2); }
@Test public void getByTitle_shouldReturnDashboard_whenClientReturnsResultContainingQueriedTitle() { when(responseMock.readEntity(DashboardList.class)).thenReturn(new DashboardList(Collections.singletonList( Dashboard.builder().title("dashboard1").build() ))); List<Dashboard> dashboard = testee.getByTitle(DASHBOARD_FILTER_VALUE); assertThat(dashboard).isNotEmpty(); }
@Test public void getByTitle_shouldNotReturnDashboard_whenClientReturnsEmptyList() { when(responseMock.readEntity(DashboardList.class)).thenReturn(new DashboardList(Collections.emptyList())); List<Dashboard> dashboard = testee.getByTitle(DASHBOARD_FILTER_VALUE); assertThat(dashboard).isEmpty(); } |
DefaultServersApi extends ApiBase implements ServersApi { @Override public Optional<Server> getByName(String serverName) { String serverNameEncoded = UriComponent.encode(serverName, QUERY_PARAM_SPACE_ENCODED); Invocation.Builder builder = client .target(SERVERS_URL) .queryParam("filter[name]", serverNameEncoded) .request(APPLICATION_JSON_TYPE); return getPageable(builder, ServerList.class) .filter(application -> application.getName().equals(serverName)) .getSingle(); } DefaultServersApi(NewRelicClient client); @Override Optional<Server> getByName(String serverName); @Override Server getById(int serverId); } | @Test public void getByName_shouldReturnServer_whenClientReturnsNotUniqueResult() throws Exception { when(responseMock.readEntity(ServerList.class)).thenReturn(new ServerList(asList( Server.builder().name("server").build(), Server.builder().name("server1").build() ))); Optional<Server> serverOptional = testee.getByName("server"); assertThat(serverOptional).isNotEmpty(); }
@Test public void getByName_shouldNotReturnServer_whenClientReturnsNotMatchingResult() throws Exception { when(responseMock.readEntity(ServerList.class)).thenReturn(new ServerList(Collections.singletonList( Server.builder().name("server1").build() ))); Optional<Server> serverOptional = testee.getByName("server"); assertThat(serverOptional).isEmpty(); }
@Test public void getByName_shouldNotReturnServer_whenClientReturnsEmptyList() throws Exception { when(responseMock.readEntity(ServerList.class)).thenReturn(new ServerList(Collections.emptyList())); Optional<Server> serverOptional = testee.getByName("server"); assertThat(serverOptional).isEmpty(); } |
EmbeddedJMSBrokerHolder implements AutoCloseable, ConnectionFactoryAccessor, ActiveMQConnectionFactoryAccessor, BrokerURIAccessor { public static EmbeddedJMSBrokerHolder create(final String name, boolean marshal, boolean persistent) { final File tempDir = Files.createTempDir(); LOGGER.debug("Created temporary directory: \"{}\"", tempDir.getAbsolutePath()); return new EmbeddedJMSBrokerHolder(createAndConfigureBrokerService(new BrokerSettings(name, marshal, persistent, tempDir)), tempDir); } EmbeddedJMSBrokerHolder(final BrokerService brokerService, final File tempDir); BrokerService getBrokerService(); @Override ConnectionFactory getConnectionFactory(); @Override ActiveMQConnectionFactory getActiveMQConnectionFactory(); @Override URI getBrokerUri(); static EmbeddedJMSBrokerHolder create(final String name, boolean marshal, boolean persistent); void start(); @Override void close(); } | @DisplayName("Create EmbeddedJMSBrokerHolder instance") @Test void create() throws Exception { try (final EmbeddedJMSBrokerHolder embeddedJmsBrokerHolder = EmbeddedJMSBrokerHolder .create("name", false, false)) { assertNotNull(embeddedJmsBrokerHolder.getBrokerService()); assertFalse(embeddedJmsBrokerHolder.getBrokerService().isStarted()); } }
@DisplayName("Creating a broker using an illegal URI as name should fail") @Test void createFails() { assertThrows(IllegalStateException.class, () -> EmbeddedJMSBrokerHolder .create("\\\\\\", false, false)); } |
EmbeddedJmsRuleImpl implements EmbeddedJmsRule { @Override public ConnectionFactory connectionFactory() { return activeMqConnectionFactory(); } EmbeddedJmsRuleImpl(final String predefinedName, final boolean marshal, final boolean persistent); @Override ConnectionFactory connectionFactory(); @Override ActiveMQConnectionFactory activeMqConnectionFactory(); @Override URI brokerUri(); @Override Statement apply(final Statement base, final Description description); } | @Test(expected = IllegalStateException.class) public void connectionFactory() throws Exception { final EmbeddedJmsRuleImpl rule = new EmbeddedJmsRuleImpl("predefined", true, false); rule.connectionFactory(); } |
EmbeddedJmsRuleImpl implements EmbeddedJmsRule { @Override public ActiveMQConnectionFactory activeMqConnectionFactory() { if (jmsBrokerHolder == null) { throw new IllegalStateException("Can not create ConnectionFactory before the broker has started"); } else { return jmsBrokerHolder.getActiveMQConnectionFactory(); } } EmbeddedJmsRuleImpl(final String predefinedName, final boolean marshal, final boolean persistent); @Override ConnectionFactory connectionFactory(); @Override ActiveMQConnectionFactory activeMqConnectionFactory(); @Override URI brokerUri(); @Override Statement apply(final Statement base, final Description description); } | @Test(expected = IllegalStateException.class) public void activeMqConnectionFactory() throws Exception { final EmbeddedJmsRuleImpl rule = new EmbeddedJmsRuleImpl("predefined", true, false); rule.activeMqConnectionFactory(); } |
EmbeddedJmsRuleImpl implements EmbeddedJmsRule { @Override public URI brokerUri() { if (jmsBrokerHolder == null) { throw new IllegalStateException("Can not create broker URI before the broker has started"); } else { return jmsBrokerHolder.getBrokerUri(); } } EmbeddedJmsRuleImpl(final String predefinedName, final boolean marshal, final boolean persistent); @Override ConnectionFactory connectionFactory(); @Override ActiveMQConnectionFactory activeMqConnectionFactory(); @Override URI brokerUri(); @Override Statement apply(final Statement base, final Description description); } | @Test(expected = IllegalStateException.class) public void brokerUri() throws Exception { final EmbeddedJmsRuleImpl rule = new EmbeddedJmsRuleImpl("predefined", true, false); rule.brokerUri(); } |
BrokerConfiguration { @Override public int hashCode() { return Objects.hash(name, marshal, persistenceEnabled); } BrokerConfiguration(final String name, final Boolean marshal, final Boolean persistenceEnabled); String getName(); Boolean getMarshal(); Boolean getPersistenceEnabled(); @Override boolean equals(final Object o); @Override int hashCode(); @Override String toString(); static final BrokerConfiguration DEFAULT; } | @Test void testHashCode() { assertEquals(BrokerConfigurationBuilder.instance().build().hashCode(), BrokerConfigurationBuilder.instance().build().hashCode()); } |
BrokerConfiguration { @Override public String toString() { final StringBuilder sb = new StringBuilder("BrokerConfiguration{"); sb.append("name='").append(name).append('\''); sb.append(", marshal=").append(marshal); sb.append(", persistenceEnabled=").append(persistenceEnabled); sb.append('}'); return sb.toString(); } BrokerConfiguration(final String name, final Boolean marshal, final Boolean persistenceEnabled); String getName(); Boolean getMarshal(); Boolean getPersistenceEnabled(); @Override boolean equals(final Object o); @Override int hashCode(); @Override String toString(); static final BrokerConfiguration DEFAULT; } | @Test void testToString() { assertEquals(BrokerConfigurationBuilder.instance().build().toString(), BrokerConfigurationBuilder.instance().build().toString()); } |
ZeebeExpressionResolver implements BeanFactoryAware { @SuppressWarnings("unchecked") public <T> T resolve(final String value) { final String resolvedValue = resolve.apply(value); if (!(resolvedValue.startsWith("#{") && value.endsWith("}"))) { return (T) resolvedValue; } return (T) this.resolver.evaluate(resolvedValue, this.expressionContext); } @Override void setBeanFactory(final BeanFactory beanFactory); @SuppressWarnings("unchecked") T resolve(final String value); } | @Test public void resolveNetworkClientPort() throws Exception { final String port = resolver.resolve("${zeebe.network.client.port}"); assertThat(port).isEqualTo("123"); }
@Test public void useValueIfNoExpression() throws Exception { final String normalString = resolver.resolve("normalString"); assertThat(normalString).isEqualTo("normalString"); } |
DeploymentPostProcessor extends BeanInfoPostProcessor { @Override public Consumer<ZeebeClient> apply(final ClassInfo beanInfo) { final ZeebeDeploymentValue value = reader.applyOrThrow(beanInfo); log.info("deployment: {}", value); return client -> { DeployWorkflowCommandStep1 deployWorkflowCommand = client .newDeployCommand(); DeploymentEvent deploymentResult = value.getClassPathResources() .stream() .map(deployWorkflowCommand::addResourceFromClasspath) .reduce((first, second) -> second) .orElseThrow(() -> new IllegalArgumentException("Requires at least one resource to deploy")) .send() .join(); log.info( "Deployed: {}", deploymentResult .getWorkflows() .stream() .map(wf -> String.format("<%s:%d>", wf.getBpmnProcessId(), wf.getVersion())) .collect(Collectors.joining(","))); }; } @Override boolean test(final ClassInfo beanInfo); @Override Consumer<ZeebeClient> apply(final ClassInfo beanInfo); } | @Test public void shouldDeploySingleResourceTest() { ClassInfo classInfo = ClassInfo.builder() .build(); ZeebeDeploymentValue zeebeDeploymentValue = ZeebeDeploymentValue.builder() .classPathResources(Collections.singletonList("/1.bpmn")) .build(); when(reader.applyOrThrow(classInfo)).thenReturn(zeebeDeploymentValue); when(client.newDeployCommand()).thenReturn(deployStep1); when(deployStep1.addResourceFromClasspath(anyString())).thenReturn(deployStep2); when(deployStep2.send()).thenReturn(zeebeFuture); when(zeebeFuture.join()).thenReturn(deploymentEvent); when(deploymentEvent.getWorkflows()).thenReturn(Collections.singletonList(getWorkFlow())); deploymentPostProcessor.apply(classInfo).accept(client); verify(deployStep1).addResourceFromClasspath(eq("/1.bpmn")); verify(deployStep2).send(); verify(zeebeFuture).join(); }
@Test public void shouldDeployMultipleResourcesTest() { ClassInfo classInfo = ClassInfo.builder() .build(); ZeebeDeploymentValue zeebeDeploymentValue = ZeebeDeploymentValue.builder() .classPathResources(Arrays.asList("/1.bpmn", "/2.bpmn")) .build(); when(reader.applyOrThrow(classInfo)).thenReturn(zeebeDeploymentValue); when(client.newDeployCommand()).thenReturn(deployStep1); when(deployStep1.addResourceFromClasspath(anyString())).thenReturn(deployStep2); when(deployStep2.send()).thenReturn(zeebeFuture); when(zeebeFuture.join()).thenReturn(deploymentEvent); when(deploymentEvent.getWorkflows()).thenReturn(Collections.singletonList(getWorkFlow())); deploymentPostProcessor.apply(classInfo).accept(client); verify(deployStep1).addResourceFromClasspath(eq("/1.bpmn")); verify(deployStep1).addResourceFromClasspath(eq("/2.bpmn")); verify(deployStep2).send(); verify(zeebeFuture).join(); }
@Test(expected = IllegalArgumentException.class) public void shouldThrowExceptionOnNoResourcesToDeploy() { ClassInfo classInfo = ClassInfo.builder() .build(); ZeebeDeploymentValue zeebeDeploymentValue = ZeebeDeploymentValue.builder() .classPathResources(Collections.emptyList()) .build(); when(reader.applyOrThrow(classInfo)).thenReturn(zeebeDeploymentValue); when(client.newDeployCommand()).thenReturn(deployStep1); when(deployStep1.addResourceFromClasspath(anyString())).thenReturn(deployStep2); deploymentPostProcessor.apply(classInfo).accept(client); } |
ReadZeebeDeploymentValue extends ReadAnnotationValue<ClassInfo, ZeebeDeployment, ZeebeDeploymentValue> { @Override public Optional<ZeebeDeploymentValue> apply(final ClassInfo classInfo) { return classInfo .getAnnotation(annotationType) .map( annotation -> ZeebeDeploymentValue.builder() .beanInfo(classInfo) .classPathResources( resolveResources(annotation.classPathResources()) ) .build()); } ReadZeebeDeploymentValue(final ZeebeExpressionResolver resolver); @Override Optional<ZeebeDeploymentValue> apply(final ClassInfo classInfo); } | @Test public void shouldReadSingleClassPathResourceTest() { ClassInfo classInfo = ClassInfo.builder() .bean(new WithSingleClassPathResource()) .build(); when(expressionResolver.resolve(anyString())).thenAnswer(inv -> inv.getArgument(0)); ZeebeDeploymentValue expectedDeploymentValue = ZeebeDeploymentValue.builder() .beanInfo(classInfo) .classPathResources(Collections.singletonList("/1.bpmn")) .build(); Optional<ZeebeDeploymentValue> valueForClass = readZeebeDeploymentValue.apply(classInfo); assertTrue(valueForClass.isPresent()); assertEquals(expectedDeploymentValue, valueForClass.get()); }
@Test public void shouldReadMultipleClassPathResourcesTest() { ClassInfo classInfo = ClassInfo.builder() .bean(new WithMultipleClassPathResource()) .build(); when(expressionResolver.resolve(anyString())).thenAnswer(inv -> inv.getArgument(0)); ZeebeDeploymentValue expectedDeploymentValue = ZeebeDeploymentValue.builder() .beanInfo(classInfo) .classPathResources(Arrays.asList("/1.bpmn", "/2.bpmn")) .build(); Optional<ZeebeDeploymentValue> valueForClass = readZeebeDeploymentValue.apply(classInfo); assertTrue(valueForClass.isPresent()); assertEquals(expectedDeploymentValue, valueForClass.get()); }
@Test public void shouldReadNoClassPathResourcesTest() { ClassInfo classInfo = ClassInfo.builder() .bean(new WithoutAnnotation()) .build(); when(expressionResolver.resolve(anyString())).thenAnswer(inv -> inv.getArgument(0)); Optional<ZeebeDeploymentValue> valueForClass = readZeebeDeploymentValue.apply(classInfo); assertFalse(valueForClass.isPresent()); } |
Experiment { public <I, D> Experiment(LearningAlgorithm<? extends A, I, D> learningAlgorithm, EquivalenceOracle<? super A, I, D> equivalenceAlgorithm, Alphabet<I> inputs) { this.impl = new ExperimentImpl<>(learningAlgorithm, equivalenceAlgorithm, inputs); } <I, D> Experiment(LearningAlgorithm<? extends A, I, D> learningAlgorithm,
EquivalenceOracle<? super A, I, D> equivalenceAlgorithm,
Alphabet<I> inputs); A run(); A getFinalHypothesis(); void setLogModels(boolean logModels); void setProfile(boolean profile); Counter getRounds(); static final String LEARNING_PROFILE_KEY; static final String COUNTEREXAMPLE_PROFILE_KEY; } | @Test public void testExperiment() { final Alphabet<Character> alphabet = Alphabets.characters('a', 'c'); final CompactDFA<Character> target = RandomAutomata.randomDFA(RANDOM, 5, alphabet); final CompactDFA<Character> intermediateTarget = RandomAutomata.randomDFA(RANDOM, target.size() - 1, alphabet); final MockUpLearner<Character> learner = new MockUpLearner<>(target, intermediateTarget); final DFAEquivalenceOracle<Character> eq = new MockUpOracle<>(intermediateTarget); DFAExperiment<Character> experiment = new DFAExperiment<>(learner, eq, alphabet); experiment.setProfile(true); Assert.assertThrows(experiment::getFinalHypothesis); experiment.run(); Assert.assertThrows(experiment::run); DFA<?, Character> finalModel = experiment.getFinalHypothesis(); Assert.assertNotNull(experiment.getFinalHypothesis()); Assert.assertSame(finalModel, target); Assert.assertTrue(learner.startLearningCalled); Assert.assertEquals(learner.refinementSteps, REFINEMENT_STEPS); Assert.assertNotNull(SimpleProfiler.cumulated(Experiment.LEARNING_PROFILE_KEY)); Assert.assertNotNull(SimpleProfiler.cumulated(Experiment.COUNTEREXAMPLE_PROFILE_KEY)); } |
AbstractBFInclusionOracle extends AbstractBFOracle<A, I, D> implements InclusionOracle<A, I, D> { @Override public @Nullable DefaultQuery<I, D> findCounterExample(A hypothesis, Collection<? extends I> inputs) { return super.findCounterExample(hypothesis, inputs); } AbstractBFInclusionOracle(MembershipOracle<I, D> membershipOracle, double multiplier); @Override boolean isCounterExample(A hypothesis, Iterable<? extends I> inputs, D output); @Override @Nullable DefaultQuery<I, D> findCounterExample(A hypothesis, Collection<? extends I> inputs); } | @Test public void testFindCounterExample() { final DefaultQuery<Character, D> cex = bfio.findCounterExample(automaton, ALPHABET); Assert.assertEquals(cex, query); } |
CExFirstOracle implements BlackBoxOracle<A, I, D> { @Override public List<PropertyOracle<I, ? super A, ?, D>> getPropertyOracles() { return propertyOracles; } CExFirstOracle(); CExFirstOracle(PropertyOracle<I, A, ?, D> propertyOracle); CExFirstOracle(Collection<? extends PropertyOracle<I, ? super A, ?, D>> propertyOracles); @Override List<PropertyOracle<I, ? super A, ?, D>> getPropertyOracles(); @Override @Nullable DefaultQuery<I, D> findCounterExample(A hypothesis, Collection<? extends I> inputs); } | @Test public void testGetPropertyOracles() { Assert.assertEquals(oracle.getPropertyOracles().size(), 2); } |
CExFirstOracle implements BlackBoxOracle<A, I, D> { @Override public @Nullable DefaultQuery<I, D> findCounterExample(A hypothesis, Collection<? extends I> inputs) { for (PropertyOracle<I, ? super A, ?, D> propertyOracle : propertyOracles) { final DefaultQuery<I, D> result = propertyOracle.findCounterExample(hypothesis, inputs); if (result != null) { assert isCounterExample(hypothesis, result.getInput(), result.getOutput()); return result; } } return null; } CExFirstOracle(); CExFirstOracle(PropertyOracle<I, A, ?, D> propertyOracle); CExFirstOracle(Collection<? extends PropertyOracle<I, ? super A, ?, D>> propertyOracles); @Override List<PropertyOracle<I, ? super A, ?, D>> getPropertyOracles(); @Override @Nullable DefaultQuery<I, D> findCounterExample(A hypothesis, Collection<? extends I> inputs); } | @Test public void testFindCounterExample() { final DefaultQuery<Boolean, Boolean> ce = oracle.findCounterExample(automaton, inputs); Assert.assertEquals(ce, query); Mockito.verify(po1).disprove(automaton, inputs); Mockito.verify(po2, Mockito.never()).disprove(automaton, inputs); Mockito.verify(po2, Mockito.never()).findCounterExample(automaton, inputs); } |
RandomWalkEQOracle implements MealyEquivalenceOracle<I, O> { @Override public @Nullable DefaultQuery<I, Word<O>> findCounterExample(MealyMachine<?, I, ?, O> hypothesis, Collection<? extends I> inputs) { return doFindCounterExample(hypothesis, inputs); } RandomWalkEQOracle(SUL<I, O> sul,
double restartProbability,
long maxSteps,
boolean resetStepCount,
Random random); RandomWalkEQOracle(SUL<I, O> sul, double restartProbability, long maxSteps, Random random); @Override @Nullable DefaultQuery<I, Word<O>> findCounterExample(MealyMachine<?, I, ?, O> hypothesis,
Collection<? extends I> inputs); } | @Test public void testOracle() { final DummySUL dummySUL = new DummySUL(); final MealyEquivalenceOracle<Character, Character> mOracle = new RandomWalkEQOracle<>(dummySUL, 0.01, MAX_LENGTH, new Random(42)); final DefaultQuery<Character, Word<Character>> ce = mOracle.findCounterExample(new DummyMealy(ALPHABET), ALPHABET); Assert.assertNull(ce); Assert.assertTrue(dummySUL.isCalledPost()); } |
SULSymbolQueryOracle implements SymbolQueryOracle<I, O> { @Override public void reset() { if (postRequired) { this.sul.post(); this.postRequired = false; } this.preRequired = true; } SULSymbolQueryOracle(final SUL<I, O> sul); @Override O query(I i); @Override void reset(); } | @Test public void testResetIdempotency() { final SUL<Character, Integer> mock = Mockito.spy(sul); final SULSymbolQueryOracle<Character, Integer> oracle = new SULSymbolQueryOracle<>(mock); Mockito.verify(mock, Mockito.times(0)).pre(); Mockito.verify(mock, Mockito.times(0)).post(); oracle.reset(); oracle.reset(); oracle.reset(); Mockito.verify(mock, Mockito.times(0)).pre(); Mockito.verify(mock, Mockito.times(0)).post(); }
@Test public void testQueriesAndCleanUp() { final SUL<Character, Integer> mock = Mockito.spy(sul); final SULSymbolQueryOracle<Character, Integer> oracle = new SULSymbolQueryOracle<>(mock); Mockito.verify(mock, Mockito.times(0)).pre(); Mockito.verify(mock, Mockito.times(0)).post(); final Word<Character> i1 = Word.fromCharSequence("abcabcabc"); final Word<Integer> o1 = oracle.answerQuery(i1); oracle.reset(); Assert.assertEquals(o1, example.getReferenceAutomaton().computeOutput(i1)); Mockito.verify(mock, Mockito.times(1)).pre(); Mockito.verify(mock, Mockito.times(1)).post(); Mockito.verify(mock, Mockito.times(i1.size())).step(Mockito.anyChar()); final Word<Character> i2 = Word.fromCharSequence("cba"); final Word<Integer> o2 = oracle.answerQuery(i2); oracle.reset(); oracle.reset(); Assert.assertEquals(o2, example.getReferenceAutomaton().computeOutput(i2)); Mockito.verify(mock, Mockito.times(2)).pre(); Mockito.verify(mock, Mockito.times(2)).post(); Mockito.verify(mock, Mockito.times(i1.size() + i2.size())).step(Mockito.anyChar()); } |
SimulatorOmegaOracle implements SingleQueryOmegaOracle<S, I, D> { @Override public void processQueries(Collection<? extends OmegaQuery<I, D>> queries) { MQUtil.answerOmegaQueries(this, queries); } <A extends SuffixOutput<I, D> & SimpleDTS<S, I>> SimulatorOmegaOracle(A automaton, SimulatorOracle<I, D> simulatorOracle); @Override MembershipOracle<I, D> getMembershipOracle(); @Override boolean isSameState(Word<I> input1, S s1, Word<I> input2, S s2); @Override void processQueries(Collection<? extends OmegaQuery<I, D>> queries); @Override Pair<D, Integer> answerQuery(Word<I> prefix, Word<I> loop, int repeat); } | @Test public void testDFASimulatorOmegaOracle() { DFA<Integer, Symbol> dfa = ExamplePaulAndMary.constructMachine(); DFASimulatorOmegaOracle<Integer, Symbol> oracle = new DFASimulatorOmegaOracle<>(dfa); List<OmegaQuery<Symbol, Boolean>> queries = new ArrayList<>(); OmegaQuery<Symbol, Boolean> q1 = new OmegaQuery<>(Word.epsilon(), Word.fromSymbols(ExamplePaulAndMary.IN_PAUL, ExamplePaulAndMary.IN_LOVES, ExamplePaulAndMary.IN_MARY), 1); OmegaQuery<Symbol, Boolean> q2 = new OmegaQuery<>(Word.fromSymbols(ExamplePaulAndMary.IN_MARY), Word.fromSymbols(ExamplePaulAndMary.IN_MARY, ExamplePaulAndMary.IN_LOVES, ExamplePaulAndMary.IN_PAUL), 1); queries.add(q1); queries.add(q2); Assert.assertEquals(queries.get(0).getLoop().size(), 3); Assert.assertEquals(queries.get(1).getLoop().size(), 3); oracle.processQueries(queries); Assert.assertFalse(queries.get(0).isUltimatelyPeriodic()); Assert.assertTrue(queries.get(1).isUltimatelyPeriodic()); Assert.assertEquals(queries.get(1).getOutput(), Boolean.FALSE); } |
SimulatorOracle implements SingleQueryOracle<I, D> { @Override public void processQueries(Collection<? extends Query<I, D>> queries) { MQUtil.answerQueries(this, queries); } SimulatorOracle(SuffixOutput<I, D> automaton); @Override D answerQuery(Word<I> prefix, Word<I> suffix); @Override void processQueries(Collection<? extends Query<I, D>> queries); } | @Test public void testDFASimulatorOracle() { DFA<?, Symbol> dfa = ExamplePaulAndMary.constructMachine(); SimulatorOracle<Symbol, Boolean> oracle = new SimulatorOracle<>(dfa); List<DefaultQuery<Symbol, Boolean>> queries = new ArrayList<>(); DefaultQuery<Symbol, Boolean> q1 = new DefaultQuery<>(Word.fromSymbols(ExamplePaulAndMary.IN_PAUL, ExamplePaulAndMary.IN_LOVES, ExamplePaulAndMary.IN_MARY)); DefaultQuery<Symbol, Boolean> q2 = new DefaultQuery<>(Word.fromSymbols(ExamplePaulAndMary.IN_MARY, ExamplePaulAndMary.IN_LOVES, ExamplePaulAndMary.IN_PAUL)); queries.add(q1); queries.add(q2); Assert.assertEquals(queries.get(0).getInput().size(), 3); Assert.assertEquals(queries.get(1).getInput().size(), 3); oracle.processQueries(queries); Assert.assertEquals(queries.get(0).getOutput(), Boolean.TRUE); Assert.assertEquals(queries.get(1).getOutput(), Boolean.FALSE); } |
StateLocalInputSULSymbolQueryOracle extends SULSymbolQueryOracle<I, O> implements SymbolQueryOracle<I, O> { @Override public void reset() { super.reset(); this.fetchRequired = true; } StateLocalInputSULSymbolQueryOracle(StateLocalInputSUL<I, O> sul, O undefinedOutput); @Override void reset(); } | @Test public void testResetIdempotency() { final StateLocalInputSUL<Character, Integer> mock = Mockito.spy(sul); Mockito.doAnswer(invocation -> Collections.singleton('a')).when(mock).currentlyEnabledInputs(); final SULSymbolQueryOracle<Character, Integer> oracle = new StateLocalInputSULSymbolQueryOracle<>(mock, example.getUndefinedOutput()); Mockito.verify(mock, Mockito.times(0)).pre(); Mockito.verify(mock, Mockito.times(0)).post(); Mockito.verify(mock, Mockito.times(0)).currentlyEnabledInputs(); oracle.reset(); oracle.reset(); oracle.reset(); Mockito.verify(mock, Mockito.times(0)).pre(); Mockito.verify(mock, Mockito.times(0)).post(); Mockito.verify(mock, Mockito.times(0)).currentlyEnabledInputs(); }
@Test public void testQueriesAndCleanUp() { final StateLocalInputSUL<Character, Integer> mock = Mockito.spy(sul); Mockito.doAnswer(invocation -> Collections.singleton('a')).when(mock).currentlyEnabledInputs(); final SULSymbolQueryOracle<Character, Integer> oracle = new StateLocalInputSULSymbolQueryOracle<>(mock, example.getUndefinedOutput()); Mockito.verify(mock, Mockito.times(0)).pre(); Mockito.verify(mock, Mockito.times(0)).post(); final Word<Character> i1 = Word.fromCharSequence("abcabcabc"); final Word<Integer> o1 = oracle.answerQuery(i1); oracle.reset(); Assert.assertEquals(o1.firstSymbol(), example.getReferenceAutomaton().computeOutput(i1).firstSymbol()); Assert.assertEquals(o1.subWord(1), Word.fromList(Collections.nCopies(i1.size() - 1, example.getUndefinedOutput()))); Mockito.verify(mock, Mockito.times(1)).pre(); Mockito.verify(mock, Mockito.times(1)).post(); Mockito.verify(mock, Mockito.times(2)).currentlyEnabledInputs(); Mockito.verify(mock, Mockito.times(1)).step(Mockito.anyChar()); final Word<Character> i2 = Word.fromCharSequence("aaaaa"); final Word<Integer> o2 = oracle.answerQuery(i2); oracle.reset(); oracle.reset(); Assert.assertEquals(o2, example.getReferenceAutomaton().computeOutput(i2)); Mockito.verify(mock, Mockito.times(2)).pre(); Mockito.verify(mock, Mockito.times(2)).post(); Mockito.verify(mock, Mockito.times(2 + i2.size())).currentlyEnabledInputs(); Mockito.verify(mock, Mockito.times(1 + i2.size())).step(Mockito.anyChar()); } |
SULMapperComposition extends MapperComposition<AI, AO, ACI, CAO, CI, CO, SULMapper<? super AI, ? extends AO, ACI, CAO>, SULMapper<? super ACI, ? extends CAO, ? extends CI, ? super CO>> implements SULMapper<AI, AO, CI, CO> { @Override public boolean canFork() { return mapper1.canFork() && mapper2.canFork(); } SULMapperComposition(SULMapper<? super AI, ? extends AO, ACI, CAO> outerMapper,
SULMapper<? super ACI, ? extends CAO, ? extends CI, ? super CO> innerMapper); @SuppressWarnings("PMD.AvoidCatchingGenericException") // we want to allow mapping generic RuntimeExceptions @Override MappedException<? extends AO> mapWrappedException(SULException exception); @SuppressWarnings("PMD.AvoidCatchingGenericException") // we want to allow mapping generic RuntimeExceptions @Override MappedException<? extends AO> mapUnwrappedException(RuntimeException exception); @Override boolean canFork(); @Override SULMapper<AI, AO, CI, CO> fork(); } | @Test public void testComposition() { mappedSUL.pre(); Character result = mappedSUL.step('A'); Assert.assertNotNull(result); Assert.assertEquals(result.charValue(), 'A'); result = mappedSUL.step(INNER_EXCEPTION_TRIGGER_CHAR); Assert.assertNotNull(result); Assert.assertEquals(result.charValue(), INNER_EXCEPTION_TRIGGER_CHAR); result = mappedSUL.step('C'); Assert.assertNotNull(result); Assert.assertEquals(result.charValue(), INNER_EXCEPTION_TRIGGER_CHAR); result = mappedSUL.step(null); Assert.assertNotNull(result); Assert.assertEquals(result.charValue(), INNER_EXCEPTION_TRIGGER_CHAR); mappedSUL.post(); mappedSUL.pre(); result = mappedSUL.step('A'); Assert.assertNotNull(result); Assert.assertEquals(result.charValue(), 'A'); result = mappedSUL.step(OUTER_EXCEPTION_TRIGGER_CHAR); Assert.assertNotNull(result); Assert.assertEquals(result.charValue(), OUTER_EXCEPTION_RESULT); result = mappedSUL.step('C'); Assert.assertNotNull(result); Assert.assertEquals(result.charValue(), 'C'); result = mappedSUL.step(NESTED_EXCEPTION_TRIGGER_CHAR); Assert.assertNotNull(result); Assert.assertEquals(result.charValue(), NESTED_EXCEPTION_TRIGGER_CHAR); Assert.assertThrows(NullPointerException.class, () -> mappedSUL.step(null)); mappedSUL.post(); mappedSUL.pre(); mappedSUL.pre(); Assert.assertEquals(4, this.innerUnwrappedMapper.getPreCounter()); Assert.assertEquals(4, this.outerWrappedMapper.getPreCounter()); Assert.assertEquals(2, this.innerUnwrappedMapper.getPostCounter()); Assert.assertEquals(2, this.outerWrappedMapper.getPostCounter()); Assert.assertFalse(mappedSUL.canFork()); Assert.assertThrows(mappedSUL::fork); } |
StaticParallelOmegaOracle extends AbstractStaticBatchProcessor<OmegaQuery<I, D>, OmegaMembershipOracle<S, I, D>> implements ParallelOmegaOracle<S, I, D> { @Override public boolean isSameState(Word<I> w1, S s1, Word<I> w2, S s2) { return getProcessor().isSameState(w1, s1, w2, s2); } StaticParallelOmegaOracle(Collection<? extends OmegaMembershipOracle<S, I, D>> oracles,
@NonNegative int minBatchSize,
PoolPolicy policy); @Override void processQueries(Collection<? extends OmegaQuery<I, D>> omegaQueries); @Override MembershipOracle<I, D> getMembershipOracle(); @Override boolean isSameState(Word<I> w1, S s1, Word<I> w2, S s2); } | @Test public void testSingleMethods() { final ParallelOmegaOracle<?, Integer, TestOutput> oracle = getBuilder().create(); Assert.assertThrows(OmegaException.class, oracle::getMembershipOracle); Assert.assertThrows(OmegaException.class, () -> oracle.isSameState(null, null, null, null)); } |
DynamicParallelOmegaOracle extends AbstractDynamicBatchProcessor<OmegaQuery<I, D>, OmegaMembershipOracle<S, I, D>> implements ParallelOmegaOracle<S, I, D> { @Override public void processQueries(Collection<? extends OmegaQuery<I, D>> omegaQueries) { processBatch(omegaQueries); } DynamicParallelOmegaOracle(Supplier<? extends OmegaMembershipOracle<S, I, D>> oracleSupplier,
@NonNegative int batchSize,
ExecutorService executor); @Override void processQueries(Collection<? extends OmegaQuery<I, D>> omegaQueries); @Override MembershipOracle<I, D> getMembershipOracle(); @Override boolean isSameState(Word<I> w1, S s1, Word<I> w2, S s2); } | @Test(dataProvider = "policies", dataProviderClass = Utils.class, timeOut = 2000) public void testThreadCreation(PoolPolicy poolPolicy) { final List<AnswerOnceQuery<Void>> queries = createQueries(10); final int expectedThreads = queries.size(); final CountDownLatch latch = new CountDownLatch(expectedThreads); final NullOracle[] oracles = new NullOracle[expectedThreads]; for (int i = 0; i < expectedThreads; i++) { oracles[i] = new NullOracle() { @Override public void processQueries(Collection<? extends OmegaQuery<Void, Void>> queries) { try { latch.countDown(); latch.await(); } catch (InterruptedException e) { throw new IllegalStateException(e); } super.processQueries(queries); } }; } final ParallelOmegaOracle<?, Void, Void> oracle = ParallelOracleBuilders.newDynamicParallelOmegaOracle(oracles[0], Arrays.copyOfRange(oracles, 1, oracles.length)) .withBatchSize(1) .withPoolSize(oracles.length) .withPoolPolicy(poolPolicy) .create(); try { oracle.processQueries(queries); } finally { oracle.shutdown(); } }
@Test(dataProvider = "policies", dataProviderClass = Utils.class, timeOut = 2000) public void testThreadScheduling(PoolPolicy poolPolicy) { final List<AnswerOnceQuery<Void>> queries = createQueries(10); final CountDownLatch latch = new CountDownLatch(queries.size() - 1); final NullOracle awaitingOracle = new NullOracle() { @Override public void processQueries(Collection<? extends OmegaQuery<Void, Void>> queries) { try { latch.await(); } catch (InterruptedException e) { throw new IllegalStateException(e); } super.processQueries(queries); } }; final NullOracle countDownOracle = new NullOracle() { @Override public void processQueries(Collection<? extends OmegaQuery<Void, Void>> queries) { latch.countDown(); super.processQueries(queries); } }; final ParallelOmegaOracle<?, Void, Void> oracle = ParallelOracleBuilders.newDynamicParallelOmegaOracle(awaitingOracle, countDownOracle) .withPoolSize(2) .withPoolPolicy(poolPolicy) .create(); try { oracle.processQueries(queries); } finally { oracle.shutdown(); } } |
DynamicParallelOmegaOracle extends AbstractDynamicBatchProcessor<OmegaQuery<I, D>, OmegaMembershipOracle<S, I, D>> implements ParallelOmegaOracle<S, I, D> { @Override public boolean isSameState(Word<I> w1, S s1, Word<I> w2, S s2) { return getProcessor().isSameState(w1, s1, w2, s2); } DynamicParallelOmegaOracle(Supplier<? extends OmegaMembershipOracle<S, I, D>> oracleSupplier,
@NonNegative int batchSize,
ExecutorService executor); @Override void processQueries(Collection<? extends OmegaQuery<I, D>> omegaQueries); @Override MembershipOracle<I, D> getMembershipOracle(); @Override boolean isSameState(Word<I> w1, S s1, Word<I> w2, S s2); } | @Test public void testSingleMethods() { final ParallelOmegaOracle<?, Void, Void> oracle = getBuilder().create(); Assert.assertThrows(OmegaException.class, oracle::getMembershipOracle); Assert.assertThrows(OmegaException.class, () -> oracle.isSameState(null, null, null, null)); } |
DynamicParallelOracle extends AbstractDynamicBatchProcessor<Query<I, D>, MembershipOracle<I, D>> implements ParallelOracle<I, D> { @Override public void processQueries(Collection<? extends Query<I, D>> queries) { processBatch(queries); } DynamicParallelOracle(Supplier<? extends MembershipOracle<I, D>> oracleSupplier,
@NonNegative int batchSize,
ExecutorService executor); @Override void processQueries(Collection<? extends Query<I, D>> queries); } | @Test(dataProvider = "policies", dataProviderClass = Utils.class, timeOut = 2000) public void testThreadCreation(PoolPolicy poolPolicy) { final List<AnswerOnceQuery<Void>> queries = createQueries(10); final int expectedThreads = queries.size(); final CountDownLatch latch = new CountDownLatch(expectedThreads); final NullOracle[] oracles = new NullOracle[expectedThreads]; for (int i = 0; i < expectedThreads; i++) { oracles[i] = new NullOracle() { @Override public void processQueries(Collection<? extends Query<Void, Void>> queries) { try { latch.countDown(); latch.await(); } catch (InterruptedException e) { throw new IllegalStateException(e); } super.processQueries(queries); } }; } final ParallelOracle<Void, Void> oracle = ParallelOracleBuilders.newDynamicParallelOracle(oracles[0], Arrays.copyOfRange( oracles, 1, oracles.length)) .withBatchSize(1) .withPoolSize(oracles.length) .withPoolPolicy(poolPolicy) .create(); try { oracle.processQueries(queries); } finally { oracle.shutdown(); } }
@Test(dataProvider = "policies", dataProviderClass = Utils.class, timeOut = 2000) public void testThreadScheduling(PoolPolicy poolPolicy) { final List<AnswerOnceQuery<Void>> queries = createQueries(10); final CountDownLatch latch = new CountDownLatch(queries.size() - 1); final NullOracle awaitingOracle = new NullOracle() { @Override public void processQueries(Collection<? extends Query<Void, Void>> queries) { try { latch.await(); } catch (InterruptedException e) { throw new IllegalStateException(e); } super.processQueries(queries); } }; final NullOracle countDownOracle = new NullOracle() { @Override public void processQueries(Collection<? extends Query<Void, Void>> queries) { latch.countDown(); super.processQueries(queries); } }; final ParallelOracle<Void, Void> oracle = ParallelOracleBuilders.newDynamicParallelOracle(awaitingOracle, countDownOracle) .withPoolSize(2) .withPoolPolicy(poolPolicy) .create(); try { oracle.processQueries(queries); } finally { oracle.shutdown(); } } |
HistogramOracle implements StatisticOracle<I, D> { @Override public final void processQueries(Collection<? extends Query<I, D>> queries) { for (Query<I, D> q : queries) { this.dataSet.addDataPoint((long) q.getInput().size()); } nextOracle.processQueries(queries); } HistogramOracle(MembershipOracle<I, D> next, String name); @Override final void processQueries(Collection<? extends Query<I, D>> queries); @Override final HistogramDataSet getStatisticalData(); @Override final void setNext(final MembershipOracle<I, D> next); } | @Test(dependsOnMethods = "testInitialState") public void testFirstQueryBatch() { Collection<Query<Integer, Word<Character>>> queries = TestQueries.createNoopQueries(2); oracle.processQueries(queries); verifyCounts(2, 0, 0, 0); }
@Test(dependsOnMethods = "testFirstQueryBatch") public void testEmptyQueryBatch() { Collection<Query<Integer, Word<Character>>> noQueries = Collections.emptySet(); oracle.processQueries(noQueries); verifyCounts(2, 0, 0, 0); }
@Test(dependsOnMethods = "testEmptyQueryBatch") public void testSecondQueryBatch() { Collection<Query<Integer, Word<Character>>> queries = TestQueries.createNoopQueries(2, 5, TestQueries.INPUTS); oracle.processQueries(queries); verifyCounts(4, 10, 2.5, 0); } |
HistogramOracle implements StatisticOracle<I, D> { @Override public final HistogramDataSet getStatisticalData() { return this.dataSet; } HistogramOracle(MembershipOracle<I, D> next, String name); @Override final void processQueries(Collection<? extends Query<I, D>> queries); @Override final HistogramDataSet getStatisticalData(); @Override final void setNext(final MembershipOracle<I, D> next); } | @Test(dependsOnMethods = "testSecondQueryBatch") public void testSummary() throws IOException { final String details = oracle.getStatisticalData().getDetails(); final String summary = oracle.getStatisticalData().getSummary(); try (InputStream detailStream = HistogramOracleTest.class.getResourceAsStream("/histogram_details.txt"); InputStream summaryStream = HistogramOracleTest.class.getResourceAsStream("/histogram_summary.txt")) { final String expectedDetail = CharStreams.toString(IOUtil.asBufferedUTF8Reader(detailStream)); final String expectedSummary = CharStreams.toString(IOUtil.asBufferedUTF8Reader(summaryStream)); Assert.assertEquals(details, expectedDetail); Assert.assertEquals(summary, expectedSummary); } }
@Test public void testGetName() { Assert.assertEquals(oracle.getStatisticalData().getName(), COUNTER_NAME); } |
SuffixASCIIWriter extends AbstractObservationTableWriter<I, D> { @Override public void write(ObservationTable<? extends I, ? extends D> table, Appendable out) throws IOException { List<? extends Word<? extends I>> suffixes = table.getSuffixes(); StringBuilder sb = new StringBuilder(); boolean first = true; for (Word<? extends I> word : suffixes) { if (first) { first = false; } else { sb.append(WORD_DELIMITER); } String stringRepresentation = wordToString(word); if (stringRepresentation.contains(WORD_DELIMITER)) { throw new IllegalArgumentException( "Delimiter '" + WORD_DELIMITER + "' must not be used in symbol names. " + "Symbol containing the delimiter was '" + stringRepresentation + '\''); } else { sb.append(stringRepresentation); } } out.append(sb.toString()); } SuffixASCIIWriter(); @Override void write(ObservationTable<? extends I, ? extends D> table, Appendable out); } | @Test public void testWrite() { SuffixASCIIWriter<String, String> writer = new SuffixASCIIWriter<>(); ObservationTable<String, String> ot = ObservationTableSource.otWithFourSuffixes(); Assert.assertEquals(OTUtils.toString(ot, writer), ";A;B;A,B"); } |
CounterOracle implements StatisticOracle<I, D> { public long getCount() { return counter.getCount(); } CounterOracle(MembershipOracle<I, D> nextOracle, String name); @Override void processQueries(Collection<? extends Query<I, D>> queries); @Override Counter getStatisticalData(); Counter getCounter(); long getCount(); @Override void setNext(MembershipOracle<I, D> next); } | @Test public void testInitialState() { Assert.assertEquals(oracle.getCount(), 0L); } |
CounterOracle implements StatisticOracle<I, D> { public Counter getCounter() { return this.counter; } CounterOracle(MembershipOracle<I, D> nextOracle, String name); @Override void processQueries(Collection<? extends Query<I, D>> queries); @Override Counter getStatisticalData(); Counter getCounter(); long getCount(); @Override void setNext(MembershipOracle<I, D> next); } | @Test public void testGetName() { Assert.assertEquals(oracle.getCounter().getName(), COUNTER_NAME); } |
JointCounterOracle implements MembershipOracle<I, D> { @Override public void processQueries(Collection<? extends Query<I, D>> queries) { queryCounter.addAndGet(queries.size()); for (Query<I, D> qry : queries) { symbolCounter.addAndGet(qry.getInput().length()); } delegate.processQueries(queries); } JointCounterOracle(MembershipOracle<I, D> delegate); @Override void processQueries(Collection<? extends Query<I, D>> queries); long getQueryCount(); long getSymbolCount(); } | @Test(dependsOnMethods = "testInitialState") public void testFirstQueryBatch() { Collection<Query<Integer, Word<Character>>> queries = TestQueries.createNoopQueries(2); oracle.processQueries(queries); verifyCounts(2, 0); }
@Test(dependsOnMethods = "testFirstQueryBatch") public void testEmptyQueryBatch() { Collection<Query<Integer, Word<Character>>> noQueries = Collections.emptySet(); oracle.processQueries(noQueries); verifyCounts(2, 0); }
@Test(dependsOnMethods = "testEmptyQueryBatch") public void testSecondQueryBatch() { Collection<Query<Integer, Word<Character>>> queries = TestQueries.createNoopQueries(2, 5, TestQueries.INPUTS); oracle.processQueries(queries); verifyCounts(4, 10); } |
MealyDHC implements MealyLearner<I, O>,
AccessSequenceTransformer<I>,
GlobalSuffixLearnerMealy<I, O>,
SupportsGrowingAlphabet<I>,
Resumable<MealyDHCState<I, O>> { @Override public CompactMealy<I, O> getHypothesisModel() { checkInternalState(); return hypothesis; } MealyDHC(Alphabet<I> alphabet, MembershipOracle<I, Word<O>> oracle); @GenerateBuilder(defaults = BuilderDefaults.class, builderFinal = false) MealyDHC(Alphabet<I> alphabet,
MembershipOracle<I, Word<O>> oracle,
GlobalSuffixFinder<? super I, ? super Word<O>> suffixFinder,
Collection<? extends Word<I>> initialSplitters); @Override Collection<Word<I>> getGlobalSuffixes(); @Override boolean addGlobalSuffixes(Collection<? extends Word<I>> newGlobalSuffixes); @Override void startLearning(); @Override boolean refineHypothesis(DefaultQuery<I, Word<O>> ceQuery); @Override CompactMealy<I, O> getHypothesisModel(); @Override void addAlphabetSymbol(I symbol); @Override MealyDHCState<I, O> suspend(); @Override void resume(final MealyDHCState<I, O> state); @Override Word<I> transformAccessSequence(Word<I> word); @Override boolean isAccessSequence(Word<I> word); } | @Test(expectedExceptions = Exception.class) public void testMealyDHCInternalSate() { ExampleStack stackExample = ExampleStack.createExample(); MealyMachine<?, ExampleStack.Input, ?, ExampleStack.Output> fm = stackExample.getReferenceAutomaton(); Alphabet<ExampleStack.Input> alphabet = stackExample.getAlphabet(); MealySimulatorOracle<ExampleStack.Input, ExampleStack.Output> simoracle = new MealySimulatorOracle<>(fm); MealyDHC<ExampleStack.Input, ExampleStack.Output> dhc = new MealyDHC<>(alphabet, simoracle); dhc.getHypothesisModel(); } |
ADTHypothesis extends AbstractFastMutableDet<ADTState<I, O>, I, ADTTransition<I, O>, Void, O> implements MutableMealyMachine<ADTState<I, O>, I, ADTTransition<I, O>, O>, AccessSequenceTransformer<I> { @Override public void setTransition(final ADTState<I, O> state, I input, final ADTTransition<I, O> transition) { final ADTTransition<I, O> oldTrans = getTransition(state, input); if (oldTrans != null) { oldTrans.getTarget().getIncomingTransitions().remove(oldTrans); } super.setTransition(state, input, transition); if (transition != null) { transition.setSource(state); transition.setInput(input); } } ADTHypothesis(final Alphabet<I> alphabet); @Override ADTState<I, O> getSuccessor(final ADTTransition<I, O> transition); ADTTransition<I, O> createOpenTransition(final ADTState<I, O> source,
final I input,
final ADTNode<ADTState<I, O>, I, O> siftTarget); @Override void setTransition(final ADTState<I, O> state, I input, final ADTTransition<I, O> transition); @Override ADTTransition<I, O> createTransition(final ADTState<I, O> successor, final O properties); @Override void setTransitionOutput(final ADTTransition<I, O> transition, final O output); @Override O getTransitionOutput(final ADTTransition<I, O> transition); @SuppressWarnings("nullness") // hypothesis is always complete @Override Word<I> transformAccessSequence(final Word<I> word); @SuppressWarnings("nullness") // hypothesis is always complete @Override boolean isAccessSequence(final Word<I> word); } | @Test public void testAutomaton() { final int states = 10; final Alphabet<Character> alphabet = Alphabets.characters('a', 'd'); final ADTHypothesis<Character, Integer> automaton = new ADTHypothesis<>(alphabet); automaton.addInitialState(); for (int i = 1; i < states; i++) { automaton.addState(); } Assert.assertEquals(states, automaton.size()); automaton.getStates().forEach(x -> Assert.assertTrue(x.getIncomingTransitions().isEmpty())); final StateIDs<ADTState<Character, Integer>> stateIds = automaton.stateIDs(); for (int s = 0; s < automaton.size(); s++) { for (final Character i : alphabet) { automaton.addTransition(stateIds.getState(s), i, automaton.getInitialState(), 0); } } Assert.assertEquals(states * alphabet.size(), automaton.getInitialState().getIncomingTransitions().size()); final ADTState<Character, Integer> s1 = stateIds.getState(1), s2 = stateIds.getState(2), s3 = stateIds.getState(3); automaton.removeAllTransitions(s1); Assert.assertEquals((states - 1) * alphabet.size(), automaton.getInitialState().getIncomingTransitions().size()); automaton.removeAllTransitions(s2, alphabet.getSymbol(0)); Assert.assertEquals((states - 1) * alphabet.size() - 1, automaton.getInitialState().getIncomingTransitions().size()); automaton.addTransition(s2, alphabet.getSymbol(0), s1, 0); for (int i = 1; i < alphabet.size(); i++) { ADTTransition<Character, Integer> transition = automaton.getTransition(s2, alphabet.getSymbol(i)); transition.setTarget(s1); } Assert.assertEquals(alphabet.size(), s1.getIncomingTransitions().size()); for (int i = 0; i < alphabet.size(); i++) { automaton.setTransition(s3, alphabet.getSymbol(i), s1, 0); } Assert.assertEquals(alphabet.size() * 2, s1.getIncomingTransitions().size()); } |
AbstractBFEmptinessOracle extends AbstractBFOracle<A, I, D> implements EmptinessOracle<A, I, D> { @Override public @Nullable DefaultQuery<I, D> findCounterExample(A hypothesis, Collection<? extends I> inputs) { return super.findCounterExample(hypothesis, inputs); } protected AbstractBFEmptinessOracle(MembershipOracle<I, D> membershipOracle, double multiplier); @Override boolean isCounterExample(A hypothesis, Iterable<? extends I> inputs, @Nullable D output); @Override @Nullable DefaultQuery<I, D> findCounterExample(A hypothesis, Collection<? extends I> inputs); } | @Test public void testFindCounterExample() { final DefaultQuery<Character, D> cex = bfeo.findCounterExample(automaton, ALPHABET); Assert.assertEquals(cex, query); } |
AbstractBFEmptinessOracle extends AbstractBFOracle<A, I, D> implements EmptinessOracle<A, I, D> { @Override public boolean isCounterExample(A hypothesis, Iterable<? extends I> inputs, @Nullable D output) { return EmptinessOracle.super.isCounterExample(hypothesis, inputs, output); } protected AbstractBFEmptinessOracle(MembershipOracle<I, D> membershipOracle, double multiplier); @Override boolean isCounterExample(A hypothesis, Iterable<? extends I> inputs, @Nullable D output); @Override @Nullable DefaultQuery<I, D> findCounterExample(A hypothesis, Collection<? extends I> inputs); } | @Test public void testIsCounterExample() { bfeo.isCounterExample(automaton, query.getInput(), query.getOutput()); } |
DisproveFirstOracle implements BlackBoxOracle<A, I, D> { @Override public List<PropertyOracle<I, ? super A, ?, D>> getPropertyOracles() { return propertyOracles; } DisproveFirstOracle(); DisproveFirstOracle(PropertyOracle<I, ? super A, ?, D> propertyOracle); DisproveFirstOracle(Collection<? extends PropertyOracle<I, ? super A, ?, D>> propertyOracles); @Override List<PropertyOracle<I, ? super A, ?, D>> getPropertyOracles(); @Override @Nullable DefaultQuery<I, D> findCounterExample(A hypothesis, Collection<? extends I> inputs); } | @Test public void testGetPropertyOracles() { Assert.assertEquals(oracle.getPropertyOracles().size(), 2); } |
DisproveFirstOracle implements BlackBoxOracle<A, I, D> { @Override public @Nullable DefaultQuery<I, D> findCounterExample(A hypothesis, Collection<? extends I> inputs) { for (PropertyOracle<I, ? super A, ?, D> po : propertyOracles) { if (!po.isDisproved()) { po.disprove(hypothesis, inputs); } } for (PropertyOracle<I, ? super A, ?, D> po : propertyOracles) { if (!po.isDisproved()) { final DefaultQuery<I, D> ce = po.doFindCounterExample(hypothesis, inputs); if (ce != null) { assert isCounterExample(hypothesis, ce.getInput(), ce.getOutput()); return ce; } } } return null; } DisproveFirstOracle(); DisproveFirstOracle(PropertyOracle<I, ? super A, ?, D> propertyOracle); DisproveFirstOracle(Collection<? extends PropertyOracle<I, ? super A, ?, D>> propertyOracles); @Override List<PropertyOracle<I, ? super A, ?, D>> getPropertyOracles(); @Override @Nullable DefaultQuery<I, D> findCounterExample(A hypothesis, Collection<? extends I> inputs); } | @Test public void testFindCounterExample() { final DefaultQuery<Boolean, Boolean> ce = oracle.findCounterExample(automaton, inputs); Assert.assertEquals(ce, query); Mockito.verify(po1, Mockito.times(1)).disprove(automaton, inputs); Mockito.verify(po2).disprove(automaton, inputs); Mockito.verify(po2, Mockito.never()).doFindCounterExample(automaton, inputs); } |
AbstractBFInclusionOracle extends AbstractBFOracle<A, I, D> implements InclusionOracle<A, I, D> { @Override public boolean isCounterExample(A hypothesis, Iterable<? extends I> inputs, D output) { return InclusionOracle.super.isCounterExample(hypothesis, inputs, output); } AbstractBFInclusionOracle(MembershipOracle<I, D> membershipOracle, double multiplier); @Override boolean isCounterExample(A hypothesis, Iterable<? extends I> inputs, D output); @Override @Nullable DefaultQuery<I, D> findCounterExample(A hypothesis, Collection<? extends I> inputs); } | @Test public void testIsCounterExample() { Assert.assertTrue(bfio.isCounterExample(automaton, query.getInput(), query.getOutput())); } |
Entry implements ContentHolder<T> { public static Entry<Void> ofDirectory(Revision revision, String path) { return new Entry<>(revision, path, EntryType.DIRECTORY, null); } private Entry(Revision revision, String path, EntryType type, @Nullable T content); static Entry<Void> ofDirectory(Revision revision, String path); static Entry<JsonNode> ofJson(Revision revision, String path, JsonNode content); static Entry<JsonNode> ofJson(Revision revision, String path, String content); static Entry<String> ofText(Revision revision, String path, String content); static Entry<T> of(Revision revision, String path, EntryType type, @Nullable T content); Revision revision(); String path(); boolean hasContent(); void ifHasContent(Consumer<? super T> consumer); @Override EntryType type(); @Override T content(); @Override String contentAsText(); @Override String contentAsPrettyText(); @Override int hashCode(); @Override boolean equals(Object o); @Override String toString(); } | @Test void ofDirectory() throws Exception { final Entry<Void> e = Entry.ofDirectory(new Revision(1), "/"); assertThat(e.revision()).isEqualTo(new Revision(1)); assertThat(e.hasContent()).isFalse(); assertThatThrownBy(e::content).isInstanceOf(EntryNoContentException.class); assertThatThrownBy(e::contentAsJson).isInstanceOf(EntryNoContentException.class); assertThatThrownBy(e::contentAsText).isInstanceOf(EntryNoContentException.class); assertThatThrownBy(e::contentAsPrettyText).isInstanceOf(EntryNoContentException.class); assertThatThrownBy(() -> e.contentAsJson(JsonNode.class)).isInstanceOf(EntryNoContentException.class); final Entry<Void> e2 = Entry.ofDirectory(new Revision(1), "/"); assertThat(e).isEqualTo(e2); assertThat(e.hashCode()).isEqualTo(e2.hashCode()); assertThat(e).isNotEqualTo(Entry.ofDirectory(new Revision(2), "/")); assertThat(e).isNotEqualTo(Entry.ofDirectory(new Revision(1), "/foo")); final Entry<String> e3 = Entry.ofText(new Revision(1), "/a.txt", "foo"); assertThat(e).isNotEqualTo(e3); assertThat(e.hashCode()).isNotEqualTo(e3.hashCode()); final Entry<JsonNode> e4 = Entry.ofJson(new Revision(1), "/a.json", "{ \"foo\": \"bar\" }"); assertThat(e).isNotEqualTo(e4); assertThat(e.hashCode()).isNotEqualTo(e4.hashCode()); } |
DefaultMetaRepository extends RepositoryWrapper implements MetaRepository { @Override public Set<Mirror> mirrors() { mirrorLock.lock(); try { final int headRev = normalizeNow(Revision.HEAD).major(); final Set<String> repos = parent().repos().list().keySet(); if (headRev > mirrorRev || !mirrorRepos.equals(repos)) { mirrors = loadMirrors(headRev); mirrorRev = headRev; mirrorRepos = repos; } return mirrors; } finally { mirrorLock.unlock(); } } DefaultMetaRepository(Repository repo); @Override Set<Mirror> mirrors(); } | @Test void testEmptyMirrors() { assertThat(metaRepo.mirrors()).isEmpty(); metaRepo.commit(Revision.HEAD, 0, Author.SYSTEM, "", Change.ofJsonUpsert("/credentials.json", "[]")); assertThat(metaRepo.mirrors()).isEmpty(); metaRepo.commit(Revision.HEAD, 0, Author.SYSTEM, "", Change.ofJsonUpsert("/mirrors.json", "[]")); assertThat(metaRepo.mirrors()).isEmpty(); }
@Test void testInvalidMirrors() { metaRepo.commit(Revision.HEAD, 0, Author.SYSTEM, "", Change.ofJsonUpsert(PATH_MIRRORS, "{}")).join(); assertThatThrownBy(() -> metaRepo.mirrors()).isInstanceOf(RepositoryMetadataException.class); metaRepo.commit(Revision.HEAD, 0, Author.SYSTEM, "", Change.ofJsonUpsert(PATH_MIRRORS, "\"oops\"")).join(); assertThatThrownBy(() -> metaRepo.mirrors()).isInstanceOf(RepositoryMetadataException.class); metaRepo.commit(Revision.HEAD, 0, Author.SYSTEM, "", Change.ofJsonUpsert(PATH_MIRRORS, "[ null ]")).join(); assertThatThrownBy(() -> metaRepo.mirrors()).isInstanceOf(RepositoryMetadataException.class); }
@Test void testSingleTypeMirror() { metaRepo.commit(Revision.HEAD, 0, Author.SYSTEM, "", Change.ofJsonUpsert( PATH_MIRRORS, "[{" + " \"enabled\": true," + " \"type\": \"single\"," + " \"direction\": \"LOCAL_TO_REMOTE\"," + " \"localRepo\": \"foo\"," + " \"localPath\": \"/mirrors/foo\"," + " \"remoteUri\": \"git+ssh: "},{" + " \"enabled\": true," + " \"type\": \"single\"," + " \"schedule\": \"*/10 * * * * ?\"," + " \"direction\": \"REMOTE_TO_LOCAL\"," + " \"localRepo\": \"bar\"," + " \"remoteUri\": \"git+ssh: "}, {" + " \"type\": \"single\"," + " \"direction\": \"LOCAL_TO_REMOTE\"," + " \"localRepo\": \"qux\"," + " \"remoteUri\": \"git+ssh: "}, {" + " \"enabled\": false," + " \"type\": \"single\"," + " \"direction\": \"LOCAL_TO_REMOTE\"," + " \"localRepo\": \"foo\"," + " \"localPath\": \"/mirrors/bar\"," + " \"remoteUri\": \"git+ssh: "}]"), UPSERT_CREDENTIALS).join(); assertThat(metaRepo.mirrors()).isEmpty(); project.repos().create("foo", Author.SYSTEM); project.repos().create("bar", Author.SYSTEM); project.repos().create("qux", Author.SYSTEM); final List<Mirror> mirrors = findMirrors(); assertThat(mirrors.stream() .map(m -> m.localRepo().name()) .collect(Collectors.toList())).containsExactly("bar", "foo", "qux"); final Mirror foo = mirrors.get(1); final Mirror bar = mirrors.get(0); final Mirror qux = mirrors.get(2); assertThat(foo.direction()).isEqualTo(MirrorDirection.LOCAL_TO_REMOTE); assertThat(bar.direction()).isEqualTo(MirrorDirection.REMOTE_TO_LOCAL); assertThat(qux.direction()).isEqualTo(MirrorDirection.LOCAL_TO_REMOTE); assertThat(foo.schedule().equivalent(cronParser.parse("0 * * * * ?"))).isTrue(); assertThat(bar.schedule().equivalent(cronParser.parse("*/10 * * * * ?"))).isTrue(); assertThat(qux.schedule().equivalent(cronParser.parse("0 * * * * ?"))).isTrue(); assertThat(foo.localPath()).isEqualTo("/mirrors/foo/"); assertThat(bar.localPath()).isEqualTo("/"); assertThat(qux.localPath()).isEqualTo("/"); assertThat(foo.remoteRepoUri().toString()).isEqualTo("git+ssh: assertThat(bar.remoteRepoUri().toString()).isEqualTo("git+ssh: assertThat(qux.remoteRepoUri().toString()).isEqualTo("git+ssh: assertThat(foo.remotePath()).isEqualTo("/"); assertThat(bar.remotePath()).isEqualTo("/some-path/"); assertThat(qux.remotePath()).isEqualTo("/"); assertThat(foo.remoteBranch()).isEqualTo("master"); assertThat(bar.remoteBranch()).isEqualTo("master"); assertThat(qux.remoteBranch()).isEqualTo("develop"); assertThat(foo.credential()).isInstanceOf(PasswordMirrorCredential.class); assertThat(bar.credential()).isInstanceOf(PasswordMirrorCredential.class); assertThat(qux.credential()).isInstanceOf(NoneMirrorCredential.class); final PasswordMirrorCredential fooCredential = (PasswordMirrorCredential) foo.credential(); final PasswordMirrorCredential barCredential = (PasswordMirrorCredential) bar.credential(); assertThat(fooCredential.username()).isEqualTo("alice"); assertThat(fooCredential.password()).isEqualTo("secret_a"); assertThat(barCredential.username()).isEqualTo("bob"); assertThat(barCredential.password()).isEqualTo("secret_b"); }
@Test void testMultipleTypeMirror() { metaRepo.commit(Revision.HEAD, 0, Author.SYSTEM, "", Change.ofJsonUpsert( PATH_MIRRORS, "[{" + " \"enabled\": true," + " \"type\": \"multiple\"," + " \"defaultDirection\": \"LOCAL_TO_REMOTE\"," + " \"defaultLocalPath\": \"/source\"," + " \"includes\": [{" + " \"pattern\": \"^([^.]+)(\\\\.[^.]+)$\"," + " \"replacement\": \"git+ssh: " }]," + " \"excludes\": [ \"^qux\\\\.net$\" ]" + "}, {" + " \"type\": \"multiple\"," + " \"defaultDirection\": \"LOCAL_TO_REMOTE\"," + " \"defaultLocalPath\": \"/source\"," + " \"defaultSchedule\": \"* * * * * ?\"," + " \"includes\": [{" + " \"pattern\": \"^qux\\\\.net$\"," + " \"replacement\": \"dogma: " \"schedule\": \"0 0 * * * ?\"," + " \"direction\": \"REMOTE_TO_LOCAL\"," + " \"localPath\": \"/mirrored/qux\"" + " }]" + "}, {" + " \"enabled\": false," + " \"type\": \"multiple\"," + " \"defaultDirection\": \"REMOTE_TO_LOCAL\"," + " \"defaultLocalPath\": \"/otherSource\"," + " \"includes\": [{" + " \"pattern\": \"^([^.]+)(\\\\.[^.]+)$\"," + " \"replacement\": \"git+ssh: " }]" + "}]"), UPSERT_CREDENTIALS).join(); assertThat(metaRepo.mirrors()).isEmpty(); project.repos().create("foo.com", Author.SYSTEM); project.repos().create("bar.org", Author.SYSTEM); project.repos().create("qux.net", Author.SYSTEM); final List<Mirror> mirrors = findMirrors(); assertThat(mirrors.stream() .map(m -> m.localRepo().name()) .collect(Collectors.toList())).containsExactly("bar.org", "foo.com", "qux.net"); final Mirror foo = mirrors.get(1); final Mirror bar = mirrors.get(0); final Mirror qux = mirrors.get(2); assertThat(foo.direction()).isEqualTo(MirrorDirection.LOCAL_TO_REMOTE); assertThat(bar.direction()).isEqualTo(MirrorDirection.LOCAL_TO_REMOTE); assertThat(qux.direction()).isEqualTo(MirrorDirection.REMOTE_TO_LOCAL); assertThat(foo.schedule().equivalent(cronParser.parse("0 * * * * ?"))).isTrue(); assertThat(bar.schedule().equivalent(cronParser.parse("0 * * * * ?"))).isTrue(); assertThat(qux.schedule().equivalent(cronParser.parse("0 0 * * * ?"))).isTrue(); assertThat(foo.localPath()).isEqualTo("/source/"); assertThat(bar.localPath()).isEqualTo("/source/"); assertThat(qux.localPath()).isEqualTo("/mirrored/qux/"); assertThat(foo.remoteRepoUri().toASCIIString()).isEqualTo("git+ssh: assertThat(bar.remoteRepoUri().toASCIIString()).isEqualTo("git+ssh: assertThat(qux.remoteRepoUri().toASCIIString()).isEqualTo("dogma: assertThat(foo.remotePath()).isEqualTo("/"); assertThat(bar.remotePath()).isEqualTo("/"); assertThat(qux.remotePath()).isEqualTo("/some-path/"); assertThat(foo.remoteBranch()).isEqualTo("develop-foo"); assertThat(bar.remoteBranch()).isEqualTo("develop-bar"); assertThat(qux.remoteBranch()).isNull(); assertThat(foo.direction()).isEqualTo(MirrorDirection.LOCAL_TO_REMOTE); assertThat(bar.direction()).isEqualTo(MirrorDirection.LOCAL_TO_REMOTE); assertThat(qux.direction()).isEqualTo(MirrorDirection.REMOTE_TO_LOCAL); }
@Test void testMultipleTypeMirrorWithCredentialId() { metaRepo.commit(Revision.HEAD, 0, Author.SYSTEM, "", Change.ofJsonUpsert( PATH_MIRRORS, "[{" + " \"type\": \"multiple\"," + " \"defaultDirection\": \"REMOTE_TO_LOCAL\"," + " \"includes\": [{" + " \"pattern\": \"^(foo|bar)$\"," + " \"replacement\": \"git+ssh: " \"credentialId\": \"alice\"" + " }]" + "}, {" + " \"type\": \"multiple\"," + " \"defaultDirection\": \"REMOTE_TO_LOCAL\"," + " \"defaultCredentialId\": \"alice\"," + " \"includes\": [{" + " \"pattern\": \"^(qux)$\"," + " \"replacement\": \"git+ssh: " }]" + "}]"), UPSERT_CREDENTIALS).join(); assertThat(metaRepo.mirrors()).isEmpty(); project.repos().create("foo", Author.SYSTEM); project.repos().create("bar", Author.SYSTEM); project.repos().create("qux", Author.SYSTEM); final List<Mirror> mirrors = findMirrors(); assertThat(mirrors.stream() .map(m -> m.localRepo().name()) .collect(Collectors.toList())).containsExactly("bar", "foo", "qux"); final MirrorCredential fooCredential = mirrors.get(1).credential(); assertThat(fooCredential).isInstanceOf(PasswordMirrorCredential.class); assertThat(((PasswordMirrorCredential) fooCredential).username()).isEqualTo("alice"); assertThat(mirrors.get(0).credential()).isSameAs(fooCredential); assertThat(mirrors.get(2).credential()).isSameAs(fooCredential); } |
CachingRepository implements Repository { @Override public CompletableFuture<Map<String, Entry<?>>> find(Revision revision, String pathPattern, Map<FindOption<?>, ?> options) { requireNonNull(revision, "revision"); requireNonNull(pathPattern, "pathPattern"); requireNonNull(options, "options"); final Revision normalizedRevision = normalizeNow(revision); return cache.get(new CacheableFindCall(repo, normalizedRevision, pathPattern, options)); } CachingRepository(Repository repo, RepositoryCache cache); @Override long creationTimeMillis(); @Override Author author(); @Override CompletableFuture<Entry<T>> getOrNull(Revision revision, Query<T> query); @Override CompletableFuture<Map<String, Entry<?>>> find(Revision revision, String pathPattern,
Map<FindOption<?>, ?> options); @Override CompletableFuture<List<Commit>> history(Revision from, Revision to,
String pathPattern, int maxCommits); @Override CompletableFuture<Change<?>> diff(Revision from, Revision to, Query<?> query); @Override CompletableFuture<Map<String, Change<?>>> diff(Revision from, Revision to, String pathPattern); @Override CompletableFuture<Revision> findLatestRevision(Revision lastKnownRevision, String pathPattern); @Override CompletableFuture<Revision> watch(Revision lastKnownRevision, String pathPattern); @Override Project parent(); @Override String name(); @Override Revision normalizeNow(Revision revision); @Override RevisionRange normalizeNow(Revision from, Revision to); @Override CompletableFuture<Map<String, Change<?>>> previewDiff(Revision baseRevision,
Iterable<Change<?>> changes); @Override CompletableFuture<Revision> commit(Revision baseRevision, long commitTimeMillis,
Author author, String summary, String detail, Markup markup,
Iterable<Change<?>> changes); @Override CompletableFuture<MergedEntry<T>> mergeFiles(Revision revision, MergeQuery<T> query); @Override String toString(); } | @Test void find() { final Repository repo = setMockNames(newCachingRepo()); final Map<String, Entry<?>> entries = ImmutableMap.of("/baz.txt", Entry.ofText(new Revision(10), "/baz.txt", "qux")); doReturn(new Revision(10)).when(delegateRepo).normalizeNow(new Revision(10)); doReturn(new Revision(10)).when(delegateRepo).normalizeNow(HEAD); when(delegateRepo.find(any(), any(), any())).thenReturn(completedFuture(entries)); assertThat(repo.find(HEAD, "/**", ImmutableMap.of()).join()).isEqualTo(entries); verify(delegateRepo).find(new Revision(10), "/**", ImmutableMap.of()); verifyNoMoreInteractions(delegateRepo); clearInvocations(delegateRepo); assertThat(repo.find(HEAD, "/**", ImmutableMap.of()).join()).isEqualTo(entries); assertThat(repo.find(new Revision(10), "/**", ImmutableMap.of()).join()).isEqualTo(entries); verify(delegateRepo, never()).find(any(), any(), any()); verifyNoMoreInteractions(delegateRepo); } |
CachingRepository implements Repository { @Override public CompletableFuture<List<Commit>> history(Revision from, Revision to, String pathPattern, int maxCommits) { requireNonNull(from, "from"); requireNonNull(to, "to"); requireNonNull(pathPattern, "pathPattern"); if (maxCommits <= 0) { throw new IllegalArgumentException("maxCommits: " + maxCommits + " (expected: > 0)"); } final RevisionRange range = normalizeNow(from, to); final int actualMaxCommits = Math.min( maxCommits, Math.abs(range.from().major() - range.to().major()) + 1); return cache.get(new CacheableHistoryCall(repo, range.from(), range.to(), pathPattern, actualMaxCommits)); } CachingRepository(Repository repo, RepositoryCache cache); @Override long creationTimeMillis(); @Override Author author(); @Override CompletableFuture<Entry<T>> getOrNull(Revision revision, Query<T> query); @Override CompletableFuture<Map<String, Entry<?>>> find(Revision revision, String pathPattern,
Map<FindOption<?>, ?> options); @Override CompletableFuture<List<Commit>> history(Revision from, Revision to,
String pathPattern, int maxCommits); @Override CompletableFuture<Change<?>> diff(Revision from, Revision to, Query<?> query); @Override CompletableFuture<Map<String, Change<?>>> diff(Revision from, Revision to, String pathPattern); @Override CompletableFuture<Revision> findLatestRevision(Revision lastKnownRevision, String pathPattern); @Override CompletableFuture<Revision> watch(Revision lastKnownRevision, String pathPattern); @Override Project parent(); @Override String name(); @Override Revision normalizeNow(Revision revision); @Override RevisionRange normalizeNow(Revision from, Revision to); @Override CompletableFuture<Map<String, Change<?>>> previewDiff(Revision baseRevision,
Iterable<Change<?>> changes); @Override CompletableFuture<Revision> commit(Revision baseRevision, long commitTimeMillis,
Author author, String summary, String detail, Markup markup,
Iterable<Change<?>> changes); @Override CompletableFuture<MergedEntry<T>> mergeFiles(Revision revision, MergeQuery<T> query); @Override String toString(); } | @Test void history() { final Repository repo = setMockNames(newCachingRepo()); final List<Commit> commits = ImmutableList.of( new Commit(new Revision(3), SYSTEM, "third", "", Markup.MARKDOWN), new Commit(new Revision(3), SYSTEM, "second", "", Markup.MARKDOWN), new Commit(new Revision(3), SYSTEM, "first", "", Markup.MARKDOWN)); doReturn(new RevisionRange(3, 1)).when(delegateRepo).normalizeNow(HEAD, INIT); doReturn(new RevisionRange(3, 1)).when(delegateRepo).normalizeNow(HEAD, new Revision(-3)); doReturn(new RevisionRange(3, 1)).when(delegateRepo).normalizeNow(new Revision(3), new Revision(-3)); doReturn(new RevisionRange(3, 1)).when(delegateRepo).normalizeNow(new Revision(3), INIT); when(delegateRepo.history(any(), any(), any(), anyInt())).thenReturn(completedFuture(commits)); assertThat(repo.history(HEAD, INIT, "/**", Integer.MAX_VALUE).join()).isEqualTo(commits); verify(delegateRepo).history(new Revision(3), INIT, "/**", 3); verifyNoMoreInteractions(delegateRepo); clearInvocations(delegateRepo); assertThat(repo.history(HEAD, new Revision(-3), "/**", 3).join()).isEqualTo(commits); assertThat(repo.history(HEAD, INIT, "/**", 4).join()).isEqualTo(commits); assertThat(repo.history(new Revision(3), new Revision(-3), "/**", 5).join()).isEqualTo(commits); assertThat(repo.history(new Revision(3), INIT, "/**", 6).join()).isEqualTo(commits); verify(delegateRepo, never()).history(any(), any(), any(), anyInt()); verifyNoMoreInteractions(delegateRepo); } |
CachingRepository implements Repository { @Override public Revision normalizeNow(Revision revision) { return repo.normalizeNow(revision); } CachingRepository(Repository repo, RepositoryCache cache); @Override long creationTimeMillis(); @Override Author author(); @Override CompletableFuture<Entry<T>> getOrNull(Revision revision, Query<T> query); @Override CompletableFuture<Map<String, Entry<?>>> find(Revision revision, String pathPattern,
Map<FindOption<?>, ?> options); @Override CompletableFuture<List<Commit>> history(Revision from, Revision to,
String pathPattern, int maxCommits); @Override CompletableFuture<Change<?>> diff(Revision from, Revision to, Query<?> query); @Override CompletableFuture<Map<String, Change<?>>> diff(Revision from, Revision to, String pathPattern); @Override CompletableFuture<Revision> findLatestRevision(Revision lastKnownRevision, String pathPattern); @Override CompletableFuture<Revision> watch(Revision lastKnownRevision, String pathPattern); @Override Project parent(); @Override String name(); @Override Revision normalizeNow(Revision revision); @Override RevisionRange normalizeNow(Revision from, Revision to); @Override CompletableFuture<Map<String, Change<?>>> previewDiff(Revision baseRevision,
Iterable<Change<?>> changes); @Override CompletableFuture<Revision> commit(Revision baseRevision, long commitTimeMillis,
Author author, String summary, String detail, Markup markup,
Iterable<Change<?>> changes); @Override CompletableFuture<MergedEntry<T>> mergeFiles(Revision revision, MergeQuery<T> query); @Override String toString(); } | @Test void metrics() { final MeterRegistry meterRegistry = PrometheusMeterRegistries.newRegistry(); final Repository repo = newCachingRepo(meterRegistry); final Map<String, Double> meters = MoreMeters.measureAll(meterRegistry); assertThat(meters).containsKeys("cache.load#count{cache=repository,result=success}"); assertThat(repo.normalizeNow(HEAD)).isNotEqualTo(""); } |
TagUtil { static String byteHexDirName(int majorRevision) { if (majorRevision <= 0) { throw new IllegalArgumentException("invalid majorRevision " + majorRevision + " (expected: > 0)"); } final StringBuilder sb = new StringBuilder(16); final int shift = 8; do { sb.append(TABLE[majorRevision & 0xFF]); majorRevision >>>= shift; } while (majorRevision != 0); return sb.toString(); } private TagUtil(); } | @Test void testByteHexDirName() { assertThat(TagUtil.byteHexDirName(0x00000001)).isEqualTo("01/"); assertThat(TagUtil.byteHexDirName(0x00000b0a)).isEqualTo("0a/0b/"); assertThat(TagUtil.byteHexDirName(0x000c0b0a)).isEqualTo("0a/0b/0c/"); assertThat(TagUtil.byteHexDirName(0x0d0c0b0a)).isEqualTo("0a/0b/0c/0d/"); }
@Test void testByteHexDirNameException() { assertThatIllegalArgumentException() .isThrownBy(() -> TagUtil.byteHexDirName(0)); assertThatIllegalArgumentException() .isThrownBy(() -> TagUtil.byteHexDirName(-1)); } |
JsonPatch implements JsonSerializable { public JsonNode apply(final JsonNode node) { requireNonNull(node, "node"); JsonNode ret = node.deepCopy(); for (final JsonPatchOperation operation : operations) { ret = operation.apply(ret); } return ret; } @JsonCreator JsonPatch(final List<JsonPatchOperation> operations); static JsonPatch fromJson(final JsonNode node); static JsonPatch generate(final JsonNode source, final JsonNode target, ReplaceMode replaceMode); boolean isEmpty(); List<JsonPatchOperation> operations(); JsonNode apply(final JsonNode node); ArrayNode toJson(); @Override String toString(); @Override void serialize(final JsonGenerator jgen, final SerializerProvider provider); @Override void serializeWithType(final JsonGenerator jgen,
final SerializerProvider provider, final TypeSerializer typeSer); } | @Test void cannotPatchNull() { final JsonPatch patch = new JsonPatch(ImmutableList.of(op1, op2)); assertThatNullPointerException() .isThrownBy(() -> patch.apply(null)); }
@Test void operationsAreCalledInOrder() { final JsonNode node1 = FACTORY.textNode("hello"); final JsonNode node2 = FACTORY.textNode("world"); when(op1.apply(node1)).thenReturn(node2); final JsonPatch patch = new JsonPatch(ImmutableList.of(op1, op2)); final ArgumentCaptor<JsonNode> captor = ArgumentCaptor.forClass(JsonNode.class); patch.apply(node1); verify(op1, only()).apply(same(node1)); verify(op2, only()).apply(captor.capture()); assertThat(captor.getValue()).isSameAs(node2); }
@Test void whenOneOperationFailsNextOperationIsNotCalled() { final String message = "foo"; when(op1.apply(any(JsonNode.class))) .thenThrow(new JsonPatchException(message)); final JsonPatch patch = new JsonPatch(ImmutableList.of(op1, op2)); assertThatThrownBy(() -> patch.apply(FACTORY.nullNode())) .isInstanceOf(JsonPatchException.class) .hasMessage(message); verifyNoMoreInteractions(op2); } |
GitRepository implements Repository { @Override public CompletableFuture<Revision> commit( Revision baseRevision, long commitTimeMillis, Author author, String summary, String detail, Markup markup, Iterable<Change<?>> changes) { final ServiceRequestContext ctx = context(); return CompletableFuture.supplyAsync(() -> { failFastIfTimedOut(this, logger, ctx, "commit", baseRevision, author, summary); return blockingCommit(baseRevision, commitTimeMillis, author, summary, detail, markup, changes, false); }, repositoryWorker); } @VisibleForTesting GitRepository(Project parent, File repoDir, Executor repositoryWorker,
long creationTimeMillis, Author author); GitRepository(Project parent, File repoDir, GitRepositoryFormat format, Executor repositoryWorker,
long creationTimeMillis, Author author, @Nullable RepositoryCache cache); GitRepository(Project parent, File repoDir, Executor repositoryWorker, @Nullable RepositoryCache cache); @Override Project parent(); @Override String name(); GitRepositoryFormat format(); boolean needsMigration(GitRepositoryFormat preferredFormat); @Override Revision normalizeNow(Revision revision); @Override RevisionRange normalizeNow(Revision from, Revision to); @Override CompletableFuture<Map<String, Entry<?>>> find(
Revision revision, String pathPattern, Map<FindOption<?>, ?> options); @Override CompletableFuture<List<Commit>> history(
Revision from, Revision to, String pathPattern, int maxCommits); @Override CompletableFuture<Map<String, Change<?>>> diff(Revision from, Revision to, String pathPattern); @Override CompletableFuture<Map<String, Change<?>>> previewDiff(Revision baseRevision,
Iterable<Change<?>> changes); @Override CompletableFuture<Revision> commit(
Revision baseRevision, long commitTimeMillis, Author author, String summary,
String detail, Markup markup, Iterable<Change<?>> changes); @Override CompletableFuture<Revision> findLatestRevision(Revision lastKnownRevision, String pathPattern); @Override CompletableFuture<Revision> watch(Revision lastKnownRevision, String pathPattern); void cloneTo(File newRepoDir); void cloneTo(File newRepoDir, BiConsumer<Integer, Integer> progressListener); void cloneTo(File newRepoDir, GitRepositoryFormat format); void cloneTo(File newRepoDir, GitRepositoryFormat format,
BiConsumer<Integer, Integer> progressListener); @Override String toString(); } | @Test void testRemoval() { assertThatThrownBy(() -> repo .commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, Change.ofRemoval(jsonPaths[0])).join()) .isInstanceOf(CompletionException.class) .hasCauseInstanceOf(ChangeConflictException.class); Revision revision = repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, jsonUpserts[0]).join(); assertThat(repo.exists(revision, jsonPaths[0]).join()).isTrue(); revision = repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, Change.ofRemoval(jsonPaths[0])).join(); assertThat(repo.exists(revision, jsonPaths[0]).join()).isFalse(); assertThatThrownBy(() -> repo .commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, Change.ofRemoval(jsonPaths[0])).join()) .isInstanceOf(CompletionException.class) .hasCauseInstanceOf(ChangeConflictException.class); }
@Test void testRename() { repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, jsonUpserts[0]).join(); repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, Change.ofRename(jsonPaths[0], jsonPaths[1])).join(); assertThat(repo.exists(HEAD, jsonPaths[0]).join()).isFalse(); assertThat(repo.exists(HEAD, jsonPaths[1]).join()).isTrue(); assertThat(repo.exists(HEAD, jsonPaths[2]).join()).isFalse(); assertThatJson(repo.get(HEAD, jsonPaths[1]).join().content()) .isEqualTo(jsonUpserts[0].content()); repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, Change.ofRename(jsonPaths[1], jsonPaths[2]), Change.ofJsonPatch(jsonPaths[2], jsonPatches[1].content()), Change.ofJsonPatch(jsonPaths[2], jsonPatches[2].content())).join(); assertThat(repo.exists(HEAD, jsonPaths[0]).join()).isFalse(); assertThat(repo.exists(HEAD, jsonPaths[1]).join()).isFalse(); assertThat(repo.exists(HEAD, jsonPaths[2]).join()).isTrue(); assertThatJson(repo.get(HEAD, jsonPaths[2]).join().content()) .isEqualTo(jsonUpserts[2].content()); }
@Test void testRenameFailure() { assertThatThrownBy(() -> repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, jsonUpserts[0], jsonUpserts[1], Change.ofRename(jsonPaths[0], jsonPaths[1])).join()) .isInstanceOf(CompletionException.class) .hasCauseInstanceOf(ChangeConflictException.class); repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, jsonUpserts[0]).join(); assertThatThrownBy(() -> repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, Change.ofRename(jsonPaths[0], jsonPaths[0])).join()) .isInstanceOf(CompletionException.class) .hasCauseInstanceOf(ChangeConflictException.class); assertThatThrownBy(() -> repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, jsonUpserts[1], Change.ofRename(jsonPaths[1], jsonPaths[1])) .join()) .isInstanceOf(CompletionException.class) .hasCauseInstanceOf(ChangeConflictException.class); }
@Test void testLateCommit() { final Revision rev = repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, jsonUpserts[0]).join(); assertThatThrownBy(() -> repo .commit(new Revision(rev.major() - 1), 0L, Author.UNKNOWN, SUMMARY, jsonUpserts[1]).join()) .isInstanceOf(CompletionException.class) .hasCauseInstanceOf(ChangeConflictException.class); }
@Test void testEmptyCommit() { assertThatThrownBy( () -> repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, Collections.emptyList()).join()) .isInstanceOf(CompletionException.class) .hasCauseInstanceOf(RedundantChangeException.class); }
@Test void testEmptyCommitWithRedundantRenames() { repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, jsonUpserts[0]).join(); assertThatThrownBy(() -> repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, Change.ofRename(jsonPaths[0], jsonPaths[1]), Change.ofRename(jsonPaths[1], jsonPaths[0])).join()) .isInstanceOf(CompletionException.class) .hasCauseInstanceOf(RedundantChangeException.class); }
@Test void testEmptyCommitWithRedundantUpsert() { assertThatThrownBy( () -> repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, Collections.emptyList()).join()) .isInstanceOf(CompletionException.class) .hasCauseInstanceOf(RedundantChangeException.class); repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, jsonUpserts[0]).join(); assertThatThrownBy( () -> repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, jsonUpserts[0]).join()) .isInstanceOf(CompletionException.class) .hasCauseInstanceOf(RedundantChangeException.class); }
@Test void testEmptyCommitWithRedundantUpsert2() { final Change<JsonNode> change1 = Change.ofJsonUpsert("/redundant_upsert_2.json", "{ \"foo\": 0, \"bar\": 1 }"); final Change<String> change2 = Change.ofTextUpsert("/redundant_upsert_2.txt", "foo"); repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, change1).join(); repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, change2).join(); assertThatThrownBy(() -> repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, change1).join()) .isInstanceOf(CompletionException.class) .hasCauseInstanceOf(RedundantChangeException.class); assertThatThrownBy(() -> repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, change2).join()) .isInstanceOf(CompletionException.class) .hasCauseInstanceOf(RedundantChangeException.class); final Change<JsonNode> change1a = Change.ofJsonUpsert("/redundant_upsert_2.json", "{ \"bar\": 1, \"foo\": 0 }"); assertThatThrownBy(() -> repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, change1a).join()) .isInstanceOf(CompletionException.class) .hasCauseInstanceOf(RedundantChangeException.class); }
@Test void testTextSanitization() { final Change<String> dosText = Change.ofTextUpsert("/text_sanitization_dos.txt", "foo\r\nbar\r\n"); repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, dosText).join(); assertThat(repo.get(HEAD, dosText.path()).join().contentAsText()).isEqualTo("foo\nbar\n"); assertThatThrownBy(() -> repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, Change.ofTextUpsert(dosText.path(), "foo\nbar\n")).join()) .isInstanceOf(CompletionException.class) .hasCauseInstanceOf(RedundantChangeException.class); assertThatThrownBy(() -> repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, Change.ofTextUpsert(dosText.path(), "foo\nbar")).join()) .isInstanceOf(CompletionException.class) .hasCauseInstanceOf(RedundantChangeException.class); repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, Change.ofTextUpsert(dosText.path(), "foo\nbar\r\n\n")).join(); assertThat(repo.get(HEAD, dosText.path()).join().contentAsText()).isEqualTo("foo\nbar\n\n"); final Change<String> withoutNewline = Change.ofTextUpsert("/text_sanitization_without_lf.txt", "foo"); repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, withoutNewline).join(); assertThat(repo.get(HEAD, withoutNewline.path()).join().contentAsText()).isEqualTo("foo\n"); final Change<String> withNewline = Change.ofTextUpsert("/text_sanitization_with_lf.txt", "foo\n"); repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, withNewline).join(); assertThat(repo.get(HEAD, withNewline.path()).join().contentAsText()).isEqualTo("foo\n"); }
@Test void testRenameWithConflict() { repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, jsonUpserts[0]).join(); assertThatThrownBy(() -> repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, Change.ofRename(jsonPaths[0], jsonPaths[0])).join()) .isInstanceOf(CompletionException.class) .hasCauseInstanceOf(ChangeConflictException.class); }
@Test void testMultipleChangesWithConflict() { assertThatThrownBy(() -> repo .commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, jsonUpserts[0], jsonPatches[2]).join()) .isInstanceOf(CompletionException.class) .hasCauseInstanceOf(ChangeConflictException.class); }
@Test void testJsonPathQuery() { repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, Change.ofJsonUpsert("/instances.json", '[' + " {" + " \"name\": \"a\"," + " \"groups\": [{" + " \"type\": \"phase\"," + " \"name\": \"alpha\"" + " }, {" + " \"type\": \"not_phase\"," + " \"name\": \"beta\"" + " }]" + " }, {" + " \"name\": \"b\"," + " \"groups\": [{" + " \"type\": \"phase\"," + " \"name\": \"beta\"" + " }, {" + " \"type\": \"not_phase\"," + " \"name\": \"alpha\"" + " }]" + " }" + ']')).join(); final Entry<JsonNode> res1 = repo.get(HEAD, Query.ofJsonPath( "/instances.json", "$[?(@.name == 'b')]")).join(); assertThatJson(res1.content()).isEqualTo("[{" + " \"name\": \"b\"," + " \"groups\": [{" + " \"type\": \"phase\"," + " \"name\": \"beta\"" + " }, {" + " \"type\": \"not_phase\"," + " \"name\": \"alpha\"" + " }]" + "}]"); final Entry<JsonNode> res2 = repo.get(HEAD, Query.ofJsonPath( "/instances.json", "$..groups[?(@.type == 'not_phase' && @.name == 'alpha')]")).join(); assertThatJson(res2.content()).isEqualTo("[{" + " \"type\": \"not_phase\"," + " \"name\": \"alpha\"" + "}]"); final Entry<JsonNode> res3 = repo.get(HEAD, Query.ofJsonPath( "/instances.json", "$[?(@.groups[?(@.type == 'phase' && @.name == 'alpha')] empty false)]")) .join(); assertThatJson(res3.content()).isEqualTo("[{" + " \"name\": \"a\"," + " \"groups\": [{" + " \"type\": \"phase\"," + " \"name\": \"alpha\"" + " }, {" + " \"type\": \"not_phase\"," + " \"name\": \"beta\"" + " }]" + "}]"); } |
GitRepository implements Repository { @Override public CompletableFuture<Map<String, Change<?>>> previewDiff(Revision baseRevision, Iterable<Change<?>> changes) { final ServiceRequestContext ctx = context(); return CompletableFuture.supplyAsync(() -> { failFastIfTimedOut(this, logger, ctx, "previewDiff", baseRevision); return blockingPreviewDiff(baseRevision, changes); }, repositoryWorker); } @VisibleForTesting GitRepository(Project parent, File repoDir, Executor repositoryWorker,
long creationTimeMillis, Author author); GitRepository(Project parent, File repoDir, GitRepositoryFormat format, Executor repositoryWorker,
long creationTimeMillis, Author author, @Nullable RepositoryCache cache); GitRepository(Project parent, File repoDir, Executor repositoryWorker, @Nullable RepositoryCache cache); @Override Project parent(); @Override String name(); GitRepositoryFormat format(); boolean needsMigration(GitRepositoryFormat preferredFormat); @Override Revision normalizeNow(Revision revision); @Override RevisionRange normalizeNow(Revision from, Revision to); @Override CompletableFuture<Map<String, Entry<?>>> find(
Revision revision, String pathPattern, Map<FindOption<?>, ?> options); @Override CompletableFuture<List<Commit>> history(
Revision from, Revision to, String pathPattern, int maxCommits); @Override CompletableFuture<Map<String, Change<?>>> diff(Revision from, Revision to, String pathPattern); @Override CompletableFuture<Map<String, Change<?>>> previewDiff(Revision baseRevision,
Iterable<Change<?>> changes); @Override CompletableFuture<Revision> commit(
Revision baseRevision, long commitTimeMillis, Author author, String summary,
String detail, Markup markup, Iterable<Change<?>> changes); @Override CompletableFuture<Revision> findLatestRevision(Revision lastKnownRevision, String pathPattern); @Override CompletableFuture<Revision> watch(Revision lastKnownRevision, String pathPattern); void cloneTo(File newRepoDir); void cloneTo(File newRepoDir, BiConsumer<Integer, Integer> progressListener); void cloneTo(File newRepoDir, GitRepositoryFormat format); void cloneTo(File newRepoDir, GitRepositoryFormat format,
BiConsumer<Integer, Integer> progressListener); @Override String toString(); } | @Test void testPreviewDiff() { final Map<String, Change<?>> changeMap = repo.previewDiff(HEAD, jsonUpserts[0]).join(); assertThat(changeMap).containsEntry(jsonPaths[0], jsonUpserts[0]); assertThatThrownBy(() -> repo.previewDiff(HEAD, jsonPatches[1]).join()) .isInstanceOf(CompletionException.class) .hasCauseInstanceOf(ChangeConflictException.class); assertThatThrownBy(() -> repo.previewDiff(HEAD, Change.ofRemoval(jsonPaths[0])).join()) .isInstanceOf(CompletionException.class) .hasCauseInstanceOf(ChangeConflictException.class); final List<Change<?>> changes = Arrays.asList(jsonUpserts[0], jsonPatches[1], jsonPatches[2], Change.ofRename(jsonPaths[0], jsonPaths[1]), Change.ofRemoval(jsonPaths[1])); Map<String, Change<?>> returnedChangeMap = repo.previewDiff(HEAD, changes).join(); assertThat(returnedChangeMap).isEmpty(); assertThatThrownBy(() -> repo.previewDiff(new Revision(Integer.MAX_VALUE), changes).join()) .isInstanceOf(CompletionException.class) .hasCauseInstanceOf(RevisionNotFoundException.class); assertThat(repo.previewDiff(new Revision(-1), Collections.emptyList()).join()).isEmpty(); repo.commit(HEAD, 0L, Author.UNKNOWN, SUMMARY, jsonPatches[0], jsonPatches[1]).join(); returnedChangeMap = repo.previewDiff(HEAD, jsonUpserts[0]).join(); assertThat(returnedChangeMap.get(jsonPaths[0]).type()).isEqualTo(ChangeType.APPLY_JSON_PATCH); } |
GitRepository implements Repository { @Override public CompletableFuture<Map<String, Entry<?>>> find( Revision revision, String pathPattern, Map<FindOption<?>, ?> options) { final ServiceRequestContext ctx = context(); return CompletableFuture.supplyAsync(() -> { failFastIfTimedOut(this, logger, ctx, "find", revision, pathPattern, options); return blockingFind(revision, pathPattern, options); }, repositoryWorker); } @VisibleForTesting GitRepository(Project parent, File repoDir, Executor repositoryWorker,
long creationTimeMillis, Author author); GitRepository(Project parent, File repoDir, GitRepositoryFormat format, Executor repositoryWorker,
long creationTimeMillis, Author author, @Nullable RepositoryCache cache); GitRepository(Project parent, File repoDir, Executor repositoryWorker, @Nullable RepositoryCache cache); @Override Project parent(); @Override String name(); GitRepositoryFormat format(); boolean needsMigration(GitRepositoryFormat preferredFormat); @Override Revision normalizeNow(Revision revision); @Override RevisionRange normalizeNow(Revision from, Revision to); @Override CompletableFuture<Map<String, Entry<?>>> find(
Revision revision, String pathPattern, Map<FindOption<?>, ?> options); @Override CompletableFuture<List<Commit>> history(
Revision from, Revision to, String pathPattern, int maxCommits); @Override CompletableFuture<Map<String, Change<?>>> diff(Revision from, Revision to, String pathPattern); @Override CompletableFuture<Map<String, Change<?>>> previewDiff(Revision baseRevision,
Iterable<Change<?>> changes); @Override CompletableFuture<Revision> commit(
Revision baseRevision, long commitTimeMillis, Author author, String summary,
String detail, Markup markup, Iterable<Change<?>> changes); @Override CompletableFuture<Revision> findLatestRevision(Revision lastKnownRevision, String pathPattern); @Override CompletableFuture<Revision> watch(Revision lastKnownRevision, String pathPattern); void cloneTo(File newRepoDir); void cloneTo(File newRepoDir, BiConsumer<Integer, Integer> progressListener); void cloneTo(File newRepoDir, GitRepositoryFormat format); void cloneTo(File newRepoDir, GitRepositoryFormat format,
BiConsumer<Integer, Integer> progressListener); @Override String toString(); } | @Test void testFindNone() { assertThat(repo.find(HEAD, "/non-existent").join()).isEmpty(); assertThat(repo.find(HEAD, "non-existent").join()).isEmpty(); }
@Test void testFind_invalidPathPattern() { final String pattern = "a'\"><img src=1 onerror=alert(document.domain)>"; assertThatThrownBy(() -> repo.find(HEAD, pattern).join()) .isInstanceOf(CompletionException.class) .hasCauseInstanceOf(IllegalArgumentException.class); } |
GitRepository implements Repository { @Override public CompletableFuture<Revision> watch(Revision lastKnownRevision, String pathPattern) { requireNonNull(lastKnownRevision, "lastKnownRevision"); requireNonNull(pathPattern, "pathPattern"); final ServiceRequestContext ctx = context(); final Revision normLastKnownRevision = normalizeNow(lastKnownRevision); final CompletableFuture<Revision> future = new CompletableFuture<>(); CompletableFuture.runAsync(() -> { failFastIfTimedOut(this, logger, ctx, "watch", lastKnownRevision, pathPattern); readLock(); try { final Revision latestRevision = blockingFindLatestRevision(normLastKnownRevision, pathPattern); if (latestRevision != null) { future.complete(latestRevision); } else { commitWatchers.add(normLastKnownRevision, pathPattern, future); } } finally { readUnlock(); } }, repositoryWorker).exceptionally(cause -> { future.completeExceptionally(cause); return null; }); return future; } @VisibleForTesting GitRepository(Project parent, File repoDir, Executor repositoryWorker,
long creationTimeMillis, Author author); GitRepository(Project parent, File repoDir, GitRepositoryFormat format, Executor repositoryWorker,
long creationTimeMillis, Author author, @Nullable RepositoryCache cache); GitRepository(Project parent, File repoDir, Executor repositoryWorker, @Nullable RepositoryCache cache); @Override Project parent(); @Override String name(); GitRepositoryFormat format(); boolean needsMigration(GitRepositoryFormat preferredFormat); @Override Revision normalizeNow(Revision revision); @Override RevisionRange normalizeNow(Revision from, Revision to); @Override CompletableFuture<Map<String, Entry<?>>> find(
Revision revision, String pathPattern, Map<FindOption<?>, ?> options); @Override CompletableFuture<List<Commit>> history(
Revision from, Revision to, String pathPattern, int maxCommits); @Override CompletableFuture<Map<String, Change<?>>> diff(Revision from, Revision to, String pathPattern); @Override CompletableFuture<Map<String, Change<?>>> previewDiff(Revision baseRevision,
Iterable<Change<?>> changes); @Override CompletableFuture<Revision> commit(
Revision baseRevision, long commitTimeMillis, Author author, String summary,
String detail, Markup markup, Iterable<Change<?>> changes); @Override CompletableFuture<Revision> findLatestRevision(Revision lastKnownRevision, String pathPattern); @Override CompletableFuture<Revision> watch(Revision lastKnownRevision, String pathPattern); void cloneTo(File newRepoDir); void cloneTo(File newRepoDir, BiConsumer<Integer, Integer> progressListener); void cloneTo(File newRepoDir, GitRepositoryFormat format); void cloneTo(File newRepoDir, GitRepositoryFormat format,
BiConsumer<Integer, Integer> progressListener); @Override String toString(); } | @Test void testWatch() throws Exception { final Revision rev1 = repo.normalizeNow(HEAD); final Revision rev2 = rev1.forward(1); final CompletableFuture<Revision> f = repo.watch(rev1, Repository.ALL_PATH); assertThat(f).isNotDone(); repo.commit(rev1, 0L, Author.UNKNOWN, SUMMARY, jsonUpserts[0]); assertThat(f.get(3, TimeUnit.SECONDS)).isEqualTo(rev2); assertThat(repo.normalizeNow(HEAD)).isEqualTo(rev2); ensureWatcherCleanUp(); }
@Test void testWatchWithQueryCancellation() throws Exception { final AtomicInteger numSubtasks = new AtomicInteger(); final CountDownLatch subtaskCancelled = new CountDownLatch(1); watchConsumer = f -> { numSubtasks.getAndIncrement(); f.exceptionally(cause -> { if (cause instanceof CancellationException) { subtaskCancelled.countDown(); } return null; }); }; final CompletableFuture<Entry<JsonNode>> f = repo.watch(HEAD, Query.ofJsonPath(jsonPaths[0], "$")); assertThatThrownBy(() -> f.get(500, TimeUnit.MILLISECONDS)) .isInstanceOf(TimeoutException.class); assertThat(numSubtasks.get()).isEqualTo(1); assertThat(subtaskCancelled.getCount()).isEqualTo(1L); assertThat(f.cancel(true)).isTrue(); assertThatThrownBy(() -> f.get(3, TimeUnit.SECONDS)) .isInstanceOf(CancellationException.class); assertThat(subtaskCancelled.await(3, TimeUnit.SECONDS)).isTrue(); assertThat(numSubtasks.get()).isEqualTo(1); ensureWatcherCleanUp(); } |
TokenlessClientLogger extends SimpleDecoratingHttpService { @Override public HttpResponse serve(ServiceRequestContext ctx, HttpRequest req) throws Exception { final String authorization = req.headers().get(HttpHeaderNames.AUTHORIZATION); if (authorization == null || !PATTERN.matcher(authorization).matches()) { final InetSocketAddress raddr = ctx.remoteAddress(); final String ip = raddr.getAddress().getHostAddress(); final Instant now = Instant.now(clock); final Instant lastReport = reportedAddresses.putIfAbsent(ip, now); final boolean report; if (lastReport == null) { report = true; } else if (ChronoUnit.DAYS.between(lastReport, now) >= 1) { report = reportedAddresses.replace(ip, lastReport, now); } else { report = false; } if (report) { report(raddr.getHostString(), ip); } } return unwrap().serve(ctx, req); } TokenlessClientLogger(HttpService delegate); @VisibleForTesting TokenlessClientLogger(HttpService delegate, Clock clock); @Override HttpResponse serve(ServiceRequestContext ctx, HttpRequest req); } | @Test void testWithToken() throws Exception { final MockTokenlessClientLogger logger = new MockTokenlessClientLogger(); final ServiceRequestContext ctx = mock(ServiceRequestContext.class); final HttpRequest req = newRequestWithToken(); logger.serve(ctx, req); assertThat(logger.hostname).isNull(); assertThat(logger.ip).isNull(); verify(delegate, times(1)).serve(ctx, req); verify(clock, never()).instant(); }
@Test void testWithoutToken() throws Exception { final MockTokenlessClientLogger logger = new MockTokenlessClientLogger(); final Instant startTime = Instant.now(); when(clock.instant()).thenReturn(startTime); final ServiceRequestContext ctx = newContext("foo", "192.168.0.1"); final HttpRequest req = newRequestWithoutToken(); logger.serve(ctx, req); assertThat(logger.hostname).isEqualTo("foo"); assertThat(logger.ip).isEqualTo("192.168.0.1"); verify(delegate, times(1)).serve(ctx, req); final ServiceRequestContext ctx2 = newContext("bar", "192.168.0.1"); final HttpRequest req2 = newRequestWithoutToken(); when(clock.instant()).thenReturn(startTime.plus(30, ChronoUnit.MINUTES)); logger.serve(ctx2, req2); assertThat(logger.hostname).isNull(); assertThat(logger.ip).isNull(); verify(delegate, times(1)).serve(ctx2, req2); final ServiceRequestContext ctx3 = newContext("baz", "192.168.0.1"); final HttpRequest req3 = newRequestWithoutToken(); when(clock.instant()).thenReturn(startTime.plus(1, ChronoUnit.DAYS)); logger.serve(ctx3, req3); assertThat(logger.hostname).isEqualTo("baz"); assertThat(logger.ip).isEqualTo("192.168.0.1"); verify(delegate, times(1)).serve(ctx3, req3); final ServiceRequestContext ctx4 = newContext("qux", "192.168.0.2"); final HttpRequest req4 = newRequestWithoutToken(); logger.serve(ctx4, req4); assertThat(logger.hostname).isEqualTo("qux"); assertThat(logger.ip).isEqualTo("192.168.0.2"); verify(delegate, times(1)).serve(ctx4, req4); } |
PublicKeyMirrorCredential extends AbstractMirrorCredential { @Nullable public byte[] passphrase() { if (passphrase == null) { return null; } else { return passphrase.clone(); } } @JsonCreator PublicKeyMirrorCredential(@JsonProperty("id") @Nullable String id,
@JsonProperty("hostnamePatterns") @Nullable
@JsonDeserialize(contentAs = Pattern.class)
Iterable<Pattern> hostnamePatterns,
@JsonProperty("username") String username,
@JsonProperty("publicKey") String publicKey,
@JsonProperty("privateKey") String privateKey,
@JsonProperty("passphrase") @Nullable String passphrase); PublicKeyMirrorCredential(@Nullable String id,
@Nullable Iterable<Pattern> hostnamePatterns,
String username, byte[] publicKey, byte[] privateKey,
@Nullable byte[] passphrase); String username(); byte[] publicKey(); byte[] privateKey(); @Nullable byte[] passphrase(); @Override boolean equals(Object o); @Override int hashCode(); } | @Test void testBase64Passphrase() { final PublicKeyMirrorCredential c = new PublicKeyMirrorCredential( null, null, USERNAME, PUBLIC_KEY, PRIVATE_KEY, PASSPHRASE_BASE64); assertThat(c.passphrase()).isEqualTo(PASSPHRASE.getBytes(StandardCharsets.UTF_8)); } |
ExpiredSessionDeletingSessionManager extends ForwardingSessionManager { @Override public CompletableFuture<Session> get(String sessionId) { return super.get(sessionId).thenApply(session -> { if (session != null) { if (Instant.now().isBefore(session.expirationTime())) { return session; } delete(sessionId); } return null; }); } ExpiredSessionDeletingSessionManager(SessionManager delegate); @Override CompletableFuture<Session> get(String sessionId); } | @Test void shouldReturnNonNull() { final Session expiredAfterOneHour = createSession(Instant.now().plus(1, ChronoUnit.HOURS)); final SessionManager delegate = mock(SessionManager.class); when(delegate.get(any())).thenReturn(CompletableFuture.completedFuture(expiredAfterOneHour)); final ExpiredSessionDeletingSessionManager manager = new ExpiredSessionDeletingSessionManager(delegate); assertThat(manager.get("id").join()).isEqualTo(expiredAfterOneHour); }
@Test void shouldReturnNull() { final Session expiredSession = createSession(Instant.EPOCH); final SessionManager delegate = mock(SessionManager.class); when(delegate.get(any())).thenReturn(CompletableFuture.completedFuture(expiredSession)); final ExpiredSessionDeletingSessionManager manager = new ExpiredSessionDeletingSessionManager(delegate); assertThat(manager.get("id").join()).isNull(); } |
CentralDogmaAuthFailureHandler implements AuthFailureHandler { @Override public HttpResponse authFailed(HttpService delegate, ServiceRequestContext ctx, HttpRequest req, @Nullable Throwable cause) throws Exception { if (cause != null) { if (!(cause instanceof ShuttingDownException)) { logger.warn("Unexpected exception during authorization:", cause); } return HttpApiUtil.newResponse(ctx, HttpStatus.INTERNAL_SERVER_ERROR, cause); } return HttpApiUtil.newResponse(ctx, HttpStatus.UNAUTHORIZED, AUTHORIZATION_EXCEPTION); } @Override HttpResponse authFailed(HttpService delegate,
ServiceRequestContext ctx, HttpRequest req,
@Nullable Throwable cause); } | @Test void shuttingDown() throws Exception { final AggregatedHttpResponse res = handler.authFailed(delegate, ctx, req, new ShuttingDownException()) .aggregate().join(); assertThat(res.status()).isEqualTo(HttpStatus.INTERNAL_SERVER_ERROR); assertThat(res.contentType()).isEqualTo(MediaType.JSON_UTF_8); assertThatJson(res.contentUtf8()).isEqualTo( '{' + " \"exception\": \"com.linecorp.centraldogma.common.ShuttingDownException\"," + " \"message\":\"\"" + '}'); }
@Test void failure() throws Exception { final AggregatedHttpResponse res = handler.authFailed(delegate, ctx, req, new Exception("oops")) .aggregate().join(); assertThat(res.status()).isEqualTo(HttpStatus.INTERNAL_SERVER_ERROR); assertThat(res.contentType()).isEqualTo(MediaType.JSON_UTF_8); assertThatJson(res.contentUtf8()).isEqualTo( '{' + " \"exception\": \"java.lang.Exception\"," + " \"message\":\"oops\"" + '}'); }
@Test void incorrectToken() throws Exception { final AggregatedHttpResponse res = handler.authFailed(delegate, ctx, req, null) .aggregate().join(); assertThat(res.status()).isEqualTo(HttpStatus.UNAUTHORIZED); assertThat(res.contentType()).isEqualTo(MediaType.JSON_UTF_8); assertThatJson(res.contentUtf8()).isEqualTo( '{' + " \"exception\": \"com.linecorp.centraldogma.common.AuthorizationException\"," + " \"message\":\"\"" + '}'); } |
Subsets and Splits