target
stringlengths
20
113k
src_fm
stringlengths
11
86.3k
src_fm_fc
stringlengths
21
86.4k
src_fm_fc_co
stringlengths
30
86.4k
src_fm_fc_ms
stringlengths
42
86.8k
src_fm_fc_ms_ff
stringlengths
43
86.8k
@Test public void testValidatePositiveThread() throws Exception { MainSettingValidator.validate(mainSetting); }
public static void validate(MainSetting mainSetting) throws IllegalArgumentException { if (mainSetting.threads() < 1) { throw new IllegalArgumentException("Thread number must be greater than zero"); } }
MainSettingValidator { public static void validate(MainSetting mainSetting) throws IllegalArgumentException { if (mainSetting.threads() < 1) { throw new IllegalArgumentException("Thread number must be greater than zero"); } } }
MainSettingValidator { public static void validate(MainSetting mainSetting) throws IllegalArgumentException { if (mainSetting.threads() < 1) { throw new IllegalArgumentException("Thread number must be greater than zero"); } } private MainSettingValidator(); }
MainSettingValidator { public static void validate(MainSetting mainSetting) throws IllegalArgumentException { if (mainSetting.threads() < 1) { throw new IllegalArgumentException("Thread number must be greater than zero"); } } private MainSettingValidator(); static void validate(MainSetting mainSetting); }
MainSettingValidator { public static void validate(MainSetting mainSetting) throws IllegalArgumentException { if (mainSetting.threads() < 1) { throw new IllegalArgumentException("Thread number must be greater than zero"); } } private MainSettingValidator(); static void validate(MainSetting mainSetting); }
@Test(expected = IllegalArgumentException.class) public void testValidateZeroThread() throws Exception { when(mainSetting.threads()).thenReturn(0); MainSettingValidator.validate(mainSetting); }
public static void validate(MainSetting mainSetting) throws IllegalArgumentException { if (mainSetting.threads() < 1) { throw new IllegalArgumentException("Thread number must be greater than zero"); } }
MainSettingValidator { public static void validate(MainSetting mainSetting) throws IllegalArgumentException { if (mainSetting.threads() < 1) { throw new IllegalArgumentException("Thread number must be greater than zero"); } } }
MainSettingValidator { public static void validate(MainSetting mainSetting) throws IllegalArgumentException { if (mainSetting.threads() < 1) { throw new IllegalArgumentException("Thread number must be greater than zero"); } } private MainSettingValidator(); }
MainSettingValidator { public static void validate(MainSetting mainSetting) throws IllegalArgumentException { if (mainSetting.threads() < 1) { throw new IllegalArgumentException("Thread number must be greater than zero"); } } private MainSettingValidator(); static void validate(MainSetting mainSetting); }
MainSettingValidator { public static void validate(MainSetting mainSetting) throws IllegalArgumentException { if (mainSetting.threads() < 1) { throw new IllegalArgumentException("Thread number must be greater than zero"); } } private MainSettingValidator(); static void validate(MainSetting mainSetting); }
@Test(expected = IllegalArgumentException.class) public void testValidateNegativeThread() throws Exception { when(mainSetting.threads()).thenReturn(-1); MainSettingValidator.validate(mainSetting); }
public static void validate(MainSetting mainSetting) throws IllegalArgumentException { if (mainSetting.threads() < 1) { throw new IllegalArgumentException("Thread number must be greater than zero"); } }
MainSettingValidator { public static void validate(MainSetting mainSetting) throws IllegalArgumentException { if (mainSetting.threads() < 1) { throw new IllegalArgumentException("Thread number must be greater than zero"); } } }
MainSettingValidator { public static void validate(MainSetting mainSetting) throws IllegalArgumentException { if (mainSetting.threads() < 1) { throw new IllegalArgumentException("Thread number must be greater than zero"); } } private MainSettingValidator(); }
MainSettingValidator { public static void validate(MainSetting mainSetting) throws IllegalArgumentException { if (mainSetting.threads() < 1) { throw new IllegalArgumentException("Thread number must be greater than zero"); } } private MainSettingValidator(); static void validate(MainSetting mainSetting); }
MainSettingValidator { public static void validate(MainSetting mainSetting) throws IllegalArgumentException { if (mainSetting.threads() < 1) { throw new IllegalArgumentException("Thread number must be greater than zero"); } } private MainSettingValidator(); static void validate(MainSetting mainSetting); }
@Test public void testHandleAlwaysTrue() throws Exception { assertThat(cut.handle(testPath), is(true)); }
@Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, HashingHandler.class.getSimpleName()); ImageHashJob job = new ImageHashJob(file, hasher, imageRepository, statistics); job.setHashAttribute(hashAttribute); threadPool.execute(job); return true; }
HashingHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, HashingHandler.class.getSimpleName()); ImageHashJob job = new ImageHashJob(file, hasher, imageRepository, statistics); job.setHashAttribute(hashAttribute); threadPool.execute(job); return true; } }
HashingHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, HashingHandler.class.getSimpleName()); ImageHashJob job = new ImageHashJob(file, hasher, imageRepository, statistics); job.setHashAttribute(hashAttribute); threadPool.execute(job); return true; } HashingHandler(ExecutorService threadPool, ImagePHash hasher, ImageRepository imageRepository, Statistics statistics, HashAttribute hashAttribute); }
HashingHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, HashingHandler.class.getSimpleName()); ImageHashJob job = new ImageHashJob(file, hasher, imageRepository, statistics); job.setHashAttribute(hashAttribute); threadPool.execute(job); return true; } HashingHandler(ExecutorService threadPool, ImagePHash hasher, ImageRepository imageRepository, Statistics statistics, HashAttribute hashAttribute); @Override boolean handle(Path file); }
HashingHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, HashingHandler.class.getSimpleName()); ImageHashJob job = new ImageHashJob(file, hasher, imageRepository, statistics); job.setHashAttribute(hashAttribute); threadPool.execute(job); return true; } HashingHandler(ExecutorService threadPool, ImagePHash hasher, ImageRepository imageRepository, Statistics statistics, HashAttribute hashAttribute); @Override boolean handle(Path file); }
@Test public void testHandleJobIsExecuted() throws Exception { cut.handle(testPath); verify(threadPool).execute(any(ImageHashJob.class)); }
@Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, HashingHandler.class.getSimpleName()); ImageHashJob job = new ImageHashJob(file, hasher, imageRepository, statistics); job.setHashAttribute(hashAttribute); threadPool.execute(job); return true; }
HashingHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, HashingHandler.class.getSimpleName()); ImageHashJob job = new ImageHashJob(file, hasher, imageRepository, statistics); job.setHashAttribute(hashAttribute); threadPool.execute(job); return true; } }
HashingHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, HashingHandler.class.getSimpleName()); ImageHashJob job = new ImageHashJob(file, hasher, imageRepository, statistics); job.setHashAttribute(hashAttribute); threadPool.execute(job); return true; } HashingHandler(ExecutorService threadPool, ImagePHash hasher, ImageRepository imageRepository, Statistics statistics, HashAttribute hashAttribute); }
HashingHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, HashingHandler.class.getSimpleName()); ImageHashJob job = new ImageHashJob(file, hasher, imageRepository, statistics); job.setHashAttribute(hashAttribute); threadPool.execute(job); return true; } HashingHandler(ExecutorService threadPool, ImagePHash hasher, ImageRepository imageRepository, Statistics statistics, HashAttribute hashAttribute); @Override boolean handle(Path file); }
HashingHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, HashingHandler.class.getSimpleName()); ImageHashJob job = new ImageHashJob(file, hasher, imageRepository, statistics); job.setHashAttribute(hashAttribute); threadPool.execute(job); return true; } HashingHandler(ExecutorService threadPool, ImagePHash hasher, ImageRepository imageRepository, Statistics statistics, HashAttribute hashAttribute); @Override boolean handle(Path file); }
@Test public void testHandleFileHasHash() throws Exception { assertThat(cut.handle(testFile), is(true)); }
@Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; }
ExtendedAttributeHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; } }
ExtendedAttributeHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; } ExtendedAttributeHandler(HashAttribute hashAttribute, ImageRepository imageRepository, ExtendedAttributeQuery eaQuery); }
ExtendedAttributeHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; } ExtendedAttributeHandler(HashAttribute hashAttribute, ImageRepository imageRepository, ExtendedAttributeQuery eaQuery); @Override boolean handle(Path file); }
ExtendedAttributeHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; } ExtendedAttributeHandler(HashAttribute hashAttribute, ImageRepository imageRepository, ExtendedAttributeQuery eaQuery); @Override boolean handle(Path file); static final String CORRUPT_EA_NAMESPACE; }
@Test public void testIsEaSupportedExpireCache() throws Exception { cut = new ExtendedAttributeDirectoryCache(eaQuery, 1, TimeUnit.MICROSECONDS); assertThat(cut.isEaSupported(subDirectory), is(true)); when(eaQuery.isEaSupported(any(Path.class))).thenReturn(false); assertThat(cut.isEaSupported(subDirectory), is(false)); }
@Override public boolean isEaSupported(Path path) { Path parent = path.getParent(); if (path.getRoot() != null && path.equals(path.getRoot())) { parent = path; } if (parent == null) { return false; } return eaSupport.getUnchecked(parent); }
ExtendedAttributeDirectoryCache implements ExtendedAttributeQuery { @Override public boolean isEaSupported(Path path) { Path parent = path.getParent(); if (path.getRoot() != null && path.equals(path.getRoot())) { parent = path; } if (parent == null) { return false; } return eaSupport.getUnchecked(parent); } }
ExtendedAttributeDirectoryCache implements ExtendedAttributeQuery { @Override public boolean isEaSupported(Path path) { Path parent = path.getParent(); if (path.getRoot() != null && path.equals(path.getRoot())) { parent = path; } if (parent == null) { return false; } return eaSupport.getUnchecked(parent); } @Inject ExtendedAttributeDirectoryCache(ExtendedAttributeQuery eaQuery); ExtendedAttributeDirectoryCache(ExtendedAttributeQuery eaQuery, int expireTime, TimeUnit expireUnit); }
ExtendedAttributeDirectoryCache implements ExtendedAttributeQuery { @Override public boolean isEaSupported(Path path) { Path parent = path.getParent(); if (path.getRoot() != null && path.equals(path.getRoot())) { parent = path; } if (parent == null) { return false; } return eaSupport.getUnchecked(parent); } @Inject ExtendedAttributeDirectoryCache(ExtendedAttributeQuery eaQuery); ExtendedAttributeDirectoryCache(ExtendedAttributeQuery eaQuery, int expireTime, TimeUnit expireUnit); @Override boolean isEaSupported(Path path); }
ExtendedAttributeDirectoryCache implements ExtendedAttributeQuery { @Override public boolean isEaSupported(Path path) { Path parent = path.getParent(); if (path.getRoot() != null && path.equals(path.getRoot())) { parent = path; } if (parent == null) { return false; } return eaSupport.getUnchecked(parent); } @Inject ExtendedAttributeDirectoryCache(ExtendedAttributeQuery eaQuery); ExtendedAttributeDirectoryCache(ExtendedAttributeQuery eaQuery, int expireTime, TimeUnit expireUnit); @Override boolean isEaSupported(Path path); }
@Test public void testHandleFileHasNoHash() throws Exception { when(hashAttribute.areAttributesValid(testFile)).thenReturn(false); assertThat(cut.handle(testFile), is(false)); }
@Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; }
ExtendedAttributeHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; } }
ExtendedAttributeHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; } ExtendedAttributeHandler(HashAttribute hashAttribute, ImageRepository imageRepository, ExtendedAttributeQuery eaQuery); }
ExtendedAttributeHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; } ExtendedAttributeHandler(HashAttribute hashAttribute, ImageRepository imageRepository, ExtendedAttributeQuery eaQuery); @Override boolean handle(Path file); }
ExtendedAttributeHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; } ExtendedAttributeHandler(HashAttribute hashAttribute, ImageRepository imageRepository, ExtendedAttributeQuery eaQuery); @Override boolean handle(Path file); static final String CORRUPT_EA_NAMESPACE; }
@Test public void testHandleDbError() throws Exception { Mockito.doThrow(RepositoryException.class).when(imageRepository).store(any(ImageRecord.class)); assertThat(cut.handle(testFile), is(false)); }
@Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; }
ExtendedAttributeHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; } }
ExtendedAttributeHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; } ExtendedAttributeHandler(HashAttribute hashAttribute, ImageRepository imageRepository, ExtendedAttributeQuery eaQuery); }
ExtendedAttributeHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; } ExtendedAttributeHandler(HashAttribute hashAttribute, ImageRepository imageRepository, ExtendedAttributeQuery eaQuery); @Override boolean handle(Path file); }
ExtendedAttributeHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; } ExtendedAttributeHandler(HashAttribute hashAttribute, ImageRepository imageRepository, ExtendedAttributeQuery eaQuery); @Override boolean handle(Path file); static final String CORRUPT_EA_NAMESPACE; }
@Test public void testHandleFileReadError() throws Exception { when(hashAttribute.readHash(testFile)).thenThrow(new IOException()); assertThat(cut.handle(testFile), is(false)); }
@Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; }
ExtendedAttributeHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; } }
ExtendedAttributeHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; } ExtendedAttributeHandler(HashAttribute hashAttribute, ImageRepository imageRepository, ExtendedAttributeQuery eaQuery); }
ExtendedAttributeHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; } ExtendedAttributeHandler(HashAttribute hashAttribute, ImageRepository imageRepository, ExtendedAttributeQuery eaQuery); @Override boolean handle(Path file); }
ExtendedAttributeHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; } ExtendedAttributeHandler(HashAttribute hashAttribute, ImageRepository imageRepository, ExtendedAttributeQuery eaQuery); @Override boolean handle(Path file); static final String CORRUPT_EA_NAMESPACE; }
@Test public void testHandleAttributeError() throws Exception { when(hashAttribute.readHash(testFile)).thenThrow(new InvalidAttributeValueException()); assertThat(cut.handle(testFile), is(false)); }
@Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; }
ExtendedAttributeHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; } }
ExtendedAttributeHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; } ExtendedAttributeHandler(HashAttribute hashAttribute, ImageRepository imageRepository, ExtendedAttributeQuery eaQuery); }
ExtendedAttributeHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; } ExtendedAttributeHandler(HashAttribute hashAttribute, ImageRepository imageRepository, ExtendedAttributeQuery eaQuery); @Override boolean handle(Path file); }
ExtendedAttributeHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; } ExtendedAttributeHandler(HashAttribute hashAttribute, ImageRepository imageRepository, ExtendedAttributeQuery eaQuery); @Override boolean handle(Path file); static final String CORRUPT_EA_NAMESPACE; }
@Test public void testHandleCorruptFileIsHandled() throws Exception { when(eaQuery.isEaSupported(testFile)).thenReturn(true); when(hashAttribute.areAttributesValid(testFile)).thenReturn(false); ExtendedAttribute.setExtendedAttribute(testFile, ExtendedAttributeHandler.CORRUPT_EA_NAMESPACE, ""); assertThat(cut.handle(testFile), is(true)); }
@Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; }
ExtendedAttributeHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; } }
ExtendedAttributeHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; } ExtendedAttributeHandler(HashAttribute hashAttribute, ImageRepository imageRepository, ExtendedAttributeQuery eaQuery); }
ExtendedAttributeHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; } ExtendedAttributeHandler(HashAttribute hashAttribute, ImageRepository imageRepository, ExtendedAttributeQuery eaQuery); @Override boolean handle(Path file); }
ExtendedAttributeHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); if (eaQuery.isEaSupported(file)) { try { if (ExtendedAttribute.isExtendedAttributeSet(file, CORRUPT_EA_NAMESPACE)) { LOGGER.trace("{} is corrupt", file); return true; } } catch (IOException e1) { LOGGER.error("Failed to read attributes from {}", file); } if (hashAttribute.areAttributesValid(file)) { LOGGER.trace("{} has valid extended attributes", file); try { imageRepository.store(new ImageRecord(file.toString(), hashAttribute.readHash(file))); LOGGER.trace("Successfully read and stored the hash for {}", file); return true; } catch (InvalidAttributeValueException | IOException e) { LOGGER.error("Failed to read extended attribute from {} ({})", file, e.toString()); } catch (RepositoryException e) { LOGGER.error("Failed to access database for {} ({})", file, e.toString()); } } } return false; } ExtendedAttributeHandler(HashAttribute hashAttribute, ImageRepository imageRepository, ExtendedAttributeQuery eaQuery); @Override boolean handle(Path file); static final String CORRUPT_EA_NAMESPACE; }
@Test public void testHandleFileFoundGood() throws Exception { when(imageRepository.getByPath(testFile)).thenReturn(existingImage); assertThat(cut.handle(testFile), is(true)); }
@Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); try { if (isInDatabase(file)) { LOGGER.trace("{} was found in the database"); statistics.incrementSkippedFiles(); statistics.incrementProcessedFiles(); return true; } } catch (RepositoryException e) { LOGGER.error("Failed to check the database for {} ({})", file, e.toString()); } return false; }
DatabaseHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); try { if (isInDatabase(file)) { LOGGER.trace("{} was found in the database"); statistics.incrementSkippedFiles(); statistics.incrementProcessedFiles(); return true; } } catch (RepositoryException e) { LOGGER.error("Failed to check the database for {} ({})", file, e.toString()); } return false; } }
DatabaseHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); try { if (isInDatabase(file)) { LOGGER.trace("{} was found in the database"); statistics.incrementSkippedFiles(); statistics.incrementProcessedFiles(); return true; } } catch (RepositoryException e) { LOGGER.error("Failed to check the database for {} ({})", file, e.toString()); } return false; } DatabaseHandler(ImageRepository imageRepository, Statistics statistics); }
DatabaseHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); try { if (isInDatabase(file)) { LOGGER.trace("{} was found in the database"); statistics.incrementSkippedFiles(); statistics.incrementProcessedFiles(); return true; } } catch (RepositoryException e) { LOGGER.error("Failed to check the database for {} ({})", file, e.toString()); } return false; } DatabaseHandler(ImageRepository imageRepository, Statistics statistics); @Override boolean handle(Path file); }
DatabaseHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); try { if (isInDatabase(file)) { LOGGER.trace("{} was found in the database"); statistics.incrementSkippedFiles(); statistics.incrementProcessedFiles(); return true; } } catch (RepositoryException e) { LOGGER.error("Failed to check the database for {} ({})", file, e.toString()); } return false; } DatabaseHandler(ImageRepository imageRepository, Statistics statistics); @Override boolean handle(Path file); }
@Test public void testHandleFileNotFound() throws Exception { assertThat(cut.handle(testFile), is(false)); }
@Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); try { if (isInDatabase(file)) { LOGGER.trace("{} was found in the database"); statistics.incrementSkippedFiles(); statistics.incrementProcessedFiles(); return true; } } catch (RepositoryException e) { LOGGER.error("Failed to check the database for {} ({})", file, e.toString()); } return false; }
DatabaseHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); try { if (isInDatabase(file)) { LOGGER.trace("{} was found in the database"); statistics.incrementSkippedFiles(); statistics.incrementProcessedFiles(); return true; } } catch (RepositoryException e) { LOGGER.error("Failed to check the database for {} ({})", file, e.toString()); } return false; } }
DatabaseHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); try { if (isInDatabase(file)) { LOGGER.trace("{} was found in the database"); statistics.incrementSkippedFiles(); statistics.incrementProcessedFiles(); return true; } } catch (RepositoryException e) { LOGGER.error("Failed to check the database for {} ({})", file, e.toString()); } return false; } DatabaseHandler(ImageRepository imageRepository, Statistics statistics); }
DatabaseHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); try { if (isInDatabase(file)) { LOGGER.trace("{} was found in the database"); statistics.incrementSkippedFiles(); statistics.incrementProcessedFiles(); return true; } } catch (RepositoryException e) { LOGGER.error("Failed to check the database for {} ({})", file, e.toString()); } return false; } DatabaseHandler(ImageRepository imageRepository, Statistics statistics); @Override boolean handle(Path file); }
DatabaseHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); try { if (isInDatabase(file)) { LOGGER.trace("{} was found in the database"); statistics.incrementSkippedFiles(); statistics.incrementProcessedFiles(); return true; } } catch (RepositoryException e) { LOGGER.error("Failed to check the database for {} ({})", file, e.toString()); } return false; } DatabaseHandler(ImageRepository imageRepository, Statistics statistics); @Override boolean handle(Path file); }
@Test public void testHandleDatabaseError() throws Exception { when(imageRepository.getByPath(testFile)).thenThrow(new RepositoryException("test")); assertThat(cut.handle(testFile), is(false)); }
@Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); try { if (isInDatabase(file)) { LOGGER.trace("{} was found in the database"); statistics.incrementSkippedFiles(); statistics.incrementProcessedFiles(); return true; } } catch (RepositoryException e) { LOGGER.error("Failed to check the database for {} ({})", file, e.toString()); } return false; }
DatabaseHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); try { if (isInDatabase(file)) { LOGGER.trace("{} was found in the database"); statistics.incrementSkippedFiles(); statistics.incrementProcessedFiles(); return true; } } catch (RepositoryException e) { LOGGER.error("Failed to check the database for {} ({})", file, e.toString()); } return false; } }
DatabaseHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); try { if (isInDatabase(file)) { LOGGER.trace("{} was found in the database"); statistics.incrementSkippedFiles(); statistics.incrementProcessedFiles(); return true; } } catch (RepositoryException e) { LOGGER.error("Failed to check the database for {} ({})", file, e.toString()); } return false; } DatabaseHandler(ImageRepository imageRepository, Statistics statistics); }
DatabaseHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); try { if (isInDatabase(file)) { LOGGER.trace("{} was found in the database"); statistics.incrementSkippedFiles(); statistics.incrementProcessedFiles(); return true; } } catch (RepositoryException e) { LOGGER.error("Failed to check the database for {} ({})", file, e.toString()); } return false; } DatabaseHandler(ImageRepository imageRepository, Statistics statistics); @Override boolean handle(Path file); }
DatabaseHandler implements HashHandler { @Override public boolean handle(Path file) { LOGGER.trace("Handling {} with {}", file, ExtendedAttributeHandler.class.getSimpleName()); try { if (isInDatabase(file)) { LOGGER.trace("{} was found in the database"); statistics.incrementSkippedFiles(); statistics.incrementProcessedFiles(); return true; } } catch (RepositoryException e) { LOGGER.error("Failed to check the database for {} ({})", file, e.toString()); } return false; } DatabaseHandler(ImageRepository imageRepository, Statistics statistics); @Override boolean handle(Path file); }
@Test public void testHandleValidExtendedAttribute() throws Exception { when(hashAttribute.areAttributesValid(testFile)).thenReturn(true); assertThat(cut.handle(testFile), is(true)); }
@Override public boolean handle(Path file) { if (!hashAttribute.areAttributesValid(file)) { try { long hash = hasher.getLongHash(Files.newInputStream(file)); hashAttribute.writeHash(file, hash); } catch (IOException e) { LOGGER.warn("Failed to hash {}, {}", file, e.toString()); return false; } } return true; }
ExtendedAttributeUpdateHandler implements HashHandler { @Override public boolean handle(Path file) { if (!hashAttribute.areAttributesValid(file)) { try { long hash = hasher.getLongHash(Files.newInputStream(file)); hashAttribute.writeHash(file, hash); } catch (IOException e) { LOGGER.warn("Failed to hash {}, {}", file, e.toString()); return false; } } return true; } }
ExtendedAttributeUpdateHandler implements HashHandler { @Override public boolean handle(Path file) { if (!hashAttribute.areAttributesValid(file)) { try { long hash = hasher.getLongHash(Files.newInputStream(file)); hashAttribute.writeHash(file, hash); } catch (IOException e) { LOGGER.warn("Failed to hash {}, {}", file, e.toString()); return false; } } return true; } ExtendedAttributeUpdateHandler(HashAttribute hashAttribute, ImagePHash hasher); }
ExtendedAttributeUpdateHandler implements HashHandler { @Override public boolean handle(Path file) { if (!hashAttribute.areAttributesValid(file)) { try { long hash = hasher.getLongHash(Files.newInputStream(file)); hashAttribute.writeHash(file, hash); } catch (IOException e) { LOGGER.warn("Failed to hash {}, {}", file, e.toString()); return false; } } return true; } ExtendedAttributeUpdateHandler(HashAttribute hashAttribute, ImagePHash hasher); @Override boolean handle(Path file); }
ExtendedAttributeUpdateHandler implements HashHandler { @Override public boolean handle(Path file) { if (!hashAttribute.areAttributesValid(file)) { try { long hash = hasher.getLongHash(Files.newInputStream(file)); hashAttribute.writeHash(file, hash); } catch (IOException e) { LOGGER.warn("Failed to hash {}, {}", file, e.toString()); return false; } } return true; } ExtendedAttributeUpdateHandler(HashAttribute hashAttribute, ImagePHash hasher); @Override boolean handle(Path file); }
@Test public void testHandleInvalidExtendedAttribute() throws Exception { when(hashAttribute.areAttributesValid(testFile)).thenReturn(false); assertThat(cut.handle(testFile), is(true)); }
@Override public boolean handle(Path file) { if (!hashAttribute.areAttributesValid(file)) { try { long hash = hasher.getLongHash(Files.newInputStream(file)); hashAttribute.writeHash(file, hash); } catch (IOException e) { LOGGER.warn("Failed to hash {}, {}", file, e.toString()); return false; } } return true; }
ExtendedAttributeUpdateHandler implements HashHandler { @Override public boolean handle(Path file) { if (!hashAttribute.areAttributesValid(file)) { try { long hash = hasher.getLongHash(Files.newInputStream(file)); hashAttribute.writeHash(file, hash); } catch (IOException e) { LOGGER.warn("Failed to hash {}, {}", file, e.toString()); return false; } } return true; } }
ExtendedAttributeUpdateHandler implements HashHandler { @Override public boolean handle(Path file) { if (!hashAttribute.areAttributesValid(file)) { try { long hash = hasher.getLongHash(Files.newInputStream(file)); hashAttribute.writeHash(file, hash); } catch (IOException e) { LOGGER.warn("Failed to hash {}, {}", file, e.toString()); return false; } } return true; } ExtendedAttributeUpdateHandler(HashAttribute hashAttribute, ImagePHash hasher); }
ExtendedAttributeUpdateHandler implements HashHandler { @Override public boolean handle(Path file) { if (!hashAttribute.areAttributesValid(file)) { try { long hash = hasher.getLongHash(Files.newInputStream(file)); hashAttribute.writeHash(file, hash); } catch (IOException e) { LOGGER.warn("Failed to hash {}, {}", file, e.toString()); return false; } } return true; } ExtendedAttributeUpdateHandler(HashAttribute hashAttribute, ImagePHash hasher); @Override boolean handle(Path file); }
ExtendedAttributeUpdateHandler implements HashHandler { @Override public boolean handle(Path file) { if (!hashAttribute.areAttributesValid(file)) { try { long hash = hasher.getLongHash(Files.newInputStream(file)); hashAttribute.writeHash(file, hash); } catch (IOException e) { LOGGER.warn("Failed to hash {}, {}", file, e.toString()); return false; } } return true; } ExtendedAttributeUpdateHandler(HashAttribute hashAttribute, ImagePHash hasher); @Override boolean handle(Path file); }
@Test public void testIsEaSupportedRootParent() throws Exception { assertThat(cut.isEaSupported(rootDirectory), is(true)); }
@Override public boolean isEaSupported(Path path) { Path parent = path.getParent(); if (path.getRoot() != null && path.equals(path.getRoot())) { parent = path; } if (parent == null) { return false; } return eaSupport.getUnchecked(parent); }
ExtendedAttributeDirectoryCache implements ExtendedAttributeQuery { @Override public boolean isEaSupported(Path path) { Path parent = path.getParent(); if (path.getRoot() != null && path.equals(path.getRoot())) { parent = path; } if (parent == null) { return false; } return eaSupport.getUnchecked(parent); } }
ExtendedAttributeDirectoryCache implements ExtendedAttributeQuery { @Override public boolean isEaSupported(Path path) { Path parent = path.getParent(); if (path.getRoot() != null && path.equals(path.getRoot())) { parent = path; } if (parent == null) { return false; } return eaSupport.getUnchecked(parent); } @Inject ExtendedAttributeDirectoryCache(ExtendedAttributeQuery eaQuery); ExtendedAttributeDirectoryCache(ExtendedAttributeQuery eaQuery, int expireTime, TimeUnit expireUnit); }
ExtendedAttributeDirectoryCache implements ExtendedAttributeQuery { @Override public boolean isEaSupported(Path path) { Path parent = path.getParent(); if (path.getRoot() != null && path.equals(path.getRoot())) { parent = path; } if (parent == null) { return false; } return eaSupport.getUnchecked(parent); } @Inject ExtendedAttributeDirectoryCache(ExtendedAttributeQuery eaQuery); ExtendedAttributeDirectoryCache(ExtendedAttributeQuery eaQuery, int expireTime, TimeUnit expireUnit); @Override boolean isEaSupported(Path path); }
ExtendedAttributeDirectoryCache implements ExtendedAttributeQuery { @Override public boolean isEaSupported(Path path) { Path parent = path.getParent(); if (path.getRoot() != null && path.equals(path.getRoot())) { parent = path; } if (parent == null) { return false; } return eaSupport.getUnchecked(parent); } @Inject ExtendedAttributeDirectoryCache(ExtendedAttributeQuery eaQuery); ExtendedAttributeDirectoryCache(ExtendedAttributeQuery eaQuery, int expireTime, TimeUnit expireUnit); @Override boolean isEaSupported(Path path); }
@Test public void testHandleIOError() throws Exception { when(hashAttribute.areAttributesValid(testFile)).thenReturn(false); when(hasher.getLongHash(any(InputStream.class))).thenThrow(new IOException()); assertThat(cut.handle(testFile), is(false)); }
@Override public boolean handle(Path file) { if (!hashAttribute.areAttributesValid(file)) { try { long hash = hasher.getLongHash(Files.newInputStream(file)); hashAttribute.writeHash(file, hash); } catch (IOException e) { LOGGER.warn("Failed to hash {}, {}", file, e.toString()); return false; } } return true; }
ExtendedAttributeUpdateHandler implements HashHandler { @Override public boolean handle(Path file) { if (!hashAttribute.areAttributesValid(file)) { try { long hash = hasher.getLongHash(Files.newInputStream(file)); hashAttribute.writeHash(file, hash); } catch (IOException e) { LOGGER.warn("Failed to hash {}, {}", file, e.toString()); return false; } } return true; } }
ExtendedAttributeUpdateHandler implements HashHandler { @Override public boolean handle(Path file) { if (!hashAttribute.areAttributesValid(file)) { try { long hash = hasher.getLongHash(Files.newInputStream(file)); hashAttribute.writeHash(file, hash); } catch (IOException e) { LOGGER.warn("Failed to hash {}, {}", file, e.toString()); return false; } } return true; } ExtendedAttributeUpdateHandler(HashAttribute hashAttribute, ImagePHash hasher); }
ExtendedAttributeUpdateHandler implements HashHandler { @Override public boolean handle(Path file) { if (!hashAttribute.areAttributesValid(file)) { try { long hash = hasher.getLongHash(Files.newInputStream(file)); hashAttribute.writeHash(file, hash); } catch (IOException e) { LOGGER.warn("Failed to hash {}, {}", file, e.toString()); return false; } } return true; } ExtendedAttributeUpdateHandler(HashAttribute hashAttribute, ImagePHash hasher); @Override boolean handle(Path file); }
ExtendedAttributeUpdateHandler implements HashHandler { @Override public boolean handle(Path file) { if (!hashAttribute.areAttributesValid(file)) { try { long hash = hasher.getLongHash(Files.newInputStream(file)); hashAttribute.writeHash(file, hash); } catch (IOException e) { LOGGER.warn("Failed to hash {}, {}", file, e.toString()); return false; } } return true; } ExtendedAttributeUpdateHandler(HashAttribute hashAttribute, ImagePHash hasher); @Override boolean handle(Path file); }
@Test public void testQueryForNull() throws Exception { cut.apply(null); verify(imageRepository).getAllWithoutIgnored(); }
@Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAllWithoutIgnored(); } else { result = imageRepository.getAllWithoutIgnored(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query non-ignored images: {}, cause: {}", e.toString(), e.getCause()); } return result; }
IgnoredImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAllWithoutIgnored(); } else { result = imageRepository.getAllWithoutIgnored(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query non-ignored images: {}, cause: {}", e.toString(), e.getCause()); } return result; } }
IgnoredImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAllWithoutIgnored(); } else { result = imageRepository.getAllWithoutIgnored(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query non-ignored images: {}, cause: {}", e.toString(), e.getCause()); } return result; } IgnoredImageQueryStage(ImageRepository imageRepository); }
IgnoredImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAllWithoutIgnored(); } else { result = imageRepository.getAllWithoutIgnored(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query non-ignored images: {}, cause: {}", e.toString(), e.getCause()); } return result; } IgnoredImageQueryStage(ImageRepository imageRepository); @Override List<ImageRecord> apply(Path path); }
IgnoredImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAllWithoutIgnored(); } else { result = imageRepository.getAllWithoutIgnored(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query non-ignored images: {}, cause: {}", e.toString(), e.getCause()); } return result; } IgnoredImageQueryStage(ImageRepository imageRepository); @Override List<ImageRecord> apply(Path path); }
@Test public void testQueryForEmpty() throws Exception { cut.apply(Paths.get("")); verify(imageRepository).getAllWithoutIgnored(); }
@Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAllWithoutIgnored(); } else { result = imageRepository.getAllWithoutIgnored(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query non-ignored images: {}, cause: {}", e.toString(), e.getCause()); } return result; }
IgnoredImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAllWithoutIgnored(); } else { result = imageRepository.getAllWithoutIgnored(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query non-ignored images: {}, cause: {}", e.toString(), e.getCause()); } return result; } }
IgnoredImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAllWithoutIgnored(); } else { result = imageRepository.getAllWithoutIgnored(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query non-ignored images: {}, cause: {}", e.toString(), e.getCause()); } return result; } IgnoredImageQueryStage(ImageRepository imageRepository); }
IgnoredImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAllWithoutIgnored(); } else { result = imageRepository.getAllWithoutIgnored(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query non-ignored images: {}, cause: {}", e.toString(), e.getCause()); } return result; } IgnoredImageQueryStage(ImageRepository imageRepository); @Override List<ImageRecord> apply(Path path); }
IgnoredImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAllWithoutIgnored(); } else { result = imageRepository.getAllWithoutIgnored(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query non-ignored images: {}, cause: {}", e.toString(), e.getCause()); } return result; } IgnoredImageQueryStage(ImageRepository imageRepository); @Override List<ImageRecord> apply(Path path); }
@Test public void testQueryForPath() throws Exception { cut.apply(PATH); verify(imageRepository).getAllWithoutIgnored(PATH); }
@Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAllWithoutIgnored(); } else { result = imageRepository.getAllWithoutIgnored(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query non-ignored images: {}, cause: {}", e.toString(), e.getCause()); } return result; }
IgnoredImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAllWithoutIgnored(); } else { result = imageRepository.getAllWithoutIgnored(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query non-ignored images: {}, cause: {}", e.toString(), e.getCause()); } return result; } }
IgnoredImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAllWithoutIgnored(); } else { result = imageRepository.getAllWithoutIgnored(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query non-ignored images: {}, cause: {}", e.toString(), e.getCause()); } return result; } IgnoredImageQueryStage(ImageRepository imageRepository); }
IgnoredImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAllWithoutIgnored(); } else { result = imageRepository.getAllWithoutIgnored(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query non-ignored images: {}, cause: {}", e.toString(), e.getCause()); } return result; } IgnoredImageQueryStage(ImageRepository imageRepository); @Override List<ImageRecord> apply(Path path); }
IgnoredImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAllWithoutIgnored(); } else { result = imageRepository.getAllWithoutIgnored(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query non-ignored images: {}, cause: {}", e.toString(), e.getCause()); } return result; } IgnoredImageQueryStage(ImageRepository imageRepository); @Override List<ImageRecord> apply(Path path); }
@Test public void testRepositoryError() throws Exception { when(imageRepository.getAllWithoutIgnored()).thenThrow(new RepositoryException("")); assertThat(cut.apply(null), is(empty())); }
@Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAllWithoutIgnored(); } else { result = imageRepository.getAllWithoutIgnored(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query non-ignored images: {}, cause: {}", e.toString(), e.getCause()); } return result; }
IgnoredImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAllWithoutIgnored(); } else { result = imageRepository.getAllWithoutIgnored(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query non-ignored images: {}, cause: {}", e.toString(), e.getCause()); } return result; } }
IgnoredImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAllWithoutIgnored(); } else { result = imageRepository.getAllWithoutIgnored(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query non-ignored images: {}, cause: {}", e.toString(), e.getCause()); } return result; } IgnoredImageQueryStage(ImageRepository imageRepository); }
IgnoredImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAllWithoutIgnored(); } else { result = imageRepository.getAllWithoutIgnored(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query non-ignored images: {}, cause: {}", e.toString(), e.getCause()); } return result; } IgnoredImageQueryStage(ImageRepository imageRepository); @Override List<ImageRecord> apply(Path path); }
IgnoredImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAllWithoutIgnored(); } else { result = imageRepository.getAllWithoutIgnored(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query non-ignored images: {}, cause: {}", e.toString(), e.getCause()); } return result; } IgnoredImageQueryStage(ImageRepository imageRepository); @Override List<ImageRecord> apply(Path path); }
@Test public void testQueryForNull() throws Exception { cut.apply(null); verify(imageRepository).getAll(); }
@Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAll(); } else { result = imageRepository.startsWithPath(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query images: {}, cause: {}", e.toString(), e.getCause()); } return result; }
ImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAll(); } else { result = imageRepository.startsWithPath(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query images: {}, cause: {}", e.toString(), e.getCause()); } return result; } }
ImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAll(); } else { result = imageRepository.startsWithPath(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query images: {}, cause: {}", e.toString(), e.getCause()); } return result; } ImageQueryStage(ImageRepository imageRepository); }
ImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAll(); } else { result = imageRepository.startsWithPath(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query images: {}, cause: {}", e.toString(), e.getCause()); } return result; } ImageQueryStage(ImageRepository imageRepository); @Override List<ImageRecord> apply(Path path); }
ImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAll(); } else { result = imageRepository.startsWithPath(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query images: {}, cause: {}", e.toString(), e.getCause()); } return result; } ImageQueryStage(ImageRepository imageRepository); @Override List<ImageRecord> apply(Path path); }
@Test public void testQueryForEmpty() throws Exception { cut.apply(Paths.get("")); verify(imageRepository).getAll(); }
@Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAll(); } else { result = imageRepository.startsWithPath(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query images: {}, cause: {}", e.toString(), e.getCause()); } return result; }
ImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAll(); } else { result = imageRepository.startsWithPath(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query images: {}, cause: {}", e.toString(), e.getCause()); } return result; } }
ImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAll(); } else { result = imageRepository.startsWithPath(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query images: {}, cause: {}", e.toString(), e.getCause()); } return result; } ImageQueryStage(ImageRepository imageRepository); }
ImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAll(); } else { result = imageRepository.startsWithPath(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query images: {}, cause: {}", e.toString(), e.getCause()); } return result; } ImageQueryStage(ImageRepository imageRepository); @Override List<ImageRecord> apply(Path path); }
ImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAll(); } else { result = imageRepository.startsWithPath(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query images: {}, cause: {}", e.toString(), e.getCause()); } return result; } ImageQueryStage(ImageRepository imageRepository); @Override List<ImageRecord> apply(Path path); }
@Test public void testQueryForPath() throws Exception { cut.apply(PATH); verify(imageRepository).startsWithPath(PATH); }
@Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAll(); } else { result = imageRepository.startsWithPath(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query images: {}, cause: {}", e.toString(), e.getCause()); } return result; }
ImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAll(); } else { result = imageRepository.startsWithPath(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query images: {}, cause: {}", e.toString(), e.getCause()); } return result; } }
ImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAll(); } else { result = imageRepository.startsWithPath(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query images: {}, cause: {}", e.toString(), e.getCause()); } return result; } ImageQueryStage(ImageRepository imageRepository); }
ImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAll(); } else { result = imageRepository.startsWithPath(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query images: {}, cause: {}", e.toString(), e.getCause()); } return result; } ImageQueryStage(ImageRepository imageRepository); @Override List<ImageRecord> apply(Path path); }
ImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAll(); } else { result = imageRepository.startsWithPath(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query images: {}, cause: {}", e.toString(), e.getCause()); } return result; } ImageQueryStage(ImageRepository imageRepository); @Override List<ImageRecord> apply(Path path); }
@Test public void testRepositoryError() throws Exception { when(imageRepository.getAll()).thenThrow(new RepositoryException("")); assertThat(cut.apply(null), is(empty())); }
@Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAll(); } else { result = imageRepository.startsWithPath(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query images: {}, cause: {}", e.toString(), e.getCause()); } return result; }
ImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAll(); } else { result = imageRepository.startsWithPath(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query images: {}, cause: {}", e.toString(), e.getCause()); } return result; } }
ImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAll(); } else { result = imageRepository.startsWithPath(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query images: {}, cause: {}", e.toString(), e.getCause()); } return result; } ImageQueryStage(ImageRepository imageRepository); }
ImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAll(); } else { result = imageRepository.startsWithPath(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query images: {}, cause: {}", e.toString(), e.getCause()); } return result; } ImageQueryStage(ImageRepository imageRepository); @Override List<ImageRecord> apply(Path path); }
ImageQueryStage implements Function<Path, List<ImageRecord>> { @Override public List<ImageRecord> apply(Path path) { List<ImageRecord> result = Collections.emptyList(); try { if (path == null || Paths.get("").equals(path)) { result = imageRepository.getAll(); } else { result = imageRepository.startsWithPath(path); } } catch (RepositoryException e) { LOGGER.error("Failed to query images: {}, cause: {}", e.toString(), e.getCause()); } return result; } ImageQueryStage(ImageRepository imageRepository); @Override List<ImageRecord> apply(Path path); }
@Test public void testQueryExecuted() throws Exception { verify(imageQueryStage).apply(any()); }
@Override public Multimap<Long, ImageRecord> apply(Path path) { List<ImageRecord> images = imageQueryStage.apply(path); Multimap<Long, ImageRecord> groups = imageGrouper.apply(images); return postProcessing(groups); }
ImageQueryPipeline implements Function<Path, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Path path) { List<ImageRecord> images = imageQueryStage.apply(path); Multimap<Long, ImageRecord> groups = imageGrouper.apply(images); return postProcessing(groups); } }
ImageQueryPipeline implements Function<Path, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Path path) { List<ImageRecord> images = imageQueryStage.apply(path); Multimap<Long, ImageRecord> groups = imageGrouper.apply(images); return postProcessing(groups); } ImageQueryPipeline(Function<Path, List<ImageRecord>> imageQueryStage, Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> imageGrouper, Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> postProcessingStages); }
ImageQueryPipeline implements Function<Path, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Path path) { List<ImageRecord> images = imageQueryStage.apply(path); Multimap<Long, ImageRecord> groups = imageGrouper.apply(images); return postProcessing(groups); } ImageQueryPipeline(Function<Path, List<ImageRecord>> imageQueryStage, Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> imageGrouper, Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> postProcessingStages); @Override Multimap<Long, ImageRecord> apply(Path path); Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> getPostProcessingStages(); Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> getImageGrouper(); }
ImageQueryPipeline implements Function<Path, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Path path) { List<ImageRecord> images = imageQueryStage.apply(path); Multimap<Long, ImageRecord> groups = imageGrouper.apply(images); return postProcessing(groups); } ImageQueryPipeline(Function<Path, List<ImageRecord>> imageQueryStage, Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> imageGrouper, Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> postProcessingStages); @Override Multimap<Long, ImageRecord> apply(Path path); Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> getPostProcessingStages(); Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> getImageGrouper(); }
@Test public void testIsEaSupportedRoot() throws Exception { assertThat(cut.isEaSupported(root), is(true)); }
@Override public boolean isEaSupported(Path path) { Path parent = path.getParent(); if (path.getRoot() != null && path.equals(path.getRoot())) { parent = path; } if (parent == null) { return false; } return eaSupport.getUnchecked(parent); }
ExtendedAttributeDirectoryCache implements ExtendedAttributeQuery { @Override public boolean isEaSupported(Path path) { Path parent = path.getParent(); if (path.getRoot() != null && path.equals(path.getRoot())) { parent = path; } if (parent == null) { return false; } return eaSupport.getUnchecked(parent); } }
ExtendedAttributeDirectoryCache implements ExtendedAttributeQuery { @Override public boolean isEaSupported(Path path) { Path parent = path.getParent(); if (path.getRoot() != null && path.equals(path.getRoot())) { parent = path; } if (parent == null) { return false; } return eaSupport.getUnchecked(parent); } @Inject ExtendedAttributeDirectoryCache(ExtendedAttributeQuery eaQuery); ExtendedAttributeDirectoryCache(ExtendedAttributeQuery eaQuery, int expireTime, TimeUnit expireUnit); }
ExtendedAttributeDirectoryCache implements ExtendedAttributeQuery { @Override public boolean isEaSupported(Path path) { Path parent = path.getParent(); if (path.getRoot() != null && path.equals(path.getRoot())) { parent = path; } if (parent == null) { return false; } return eaSupport.getUnchecked(parent); } @Inject ExtendedAttributeDirectoryCache(ExtendedAttributeQuery eaQuery); ExtendedAttributeDirectoryCache(ExtendedAttributeQuery eaQuery, int expireTime, TimeUnit expireUnit); @Override boolean isEaSupported(Path path); }
ExtendedAttributeDirectoryCache implements ExtendedAttributeQuery { @Override public boolean isEaSupported(Path path) { Path parent = path.getParent(); if (path.getRoot() != null && path.equals(path.getRoot())) { parent = path; } if (parent == null) { return false; } return eaSupport.getUnchecked(parent); } @Inject ExtendedAttributeDirectoryCache(ExtendedAttributeQuery eaQuery); ExtendedAttributeDirectoryCache(ExtendedAttributeQuery eaQuery, int expireTime, TimeUnit expireUnit); @Override boolean isEaSupported(Path path); }
@Test public void testGroupingExecuted() throws Exception { verify(grouper).apply(images); }
@Override public Multimap<Long, ImageRecord> apply(Path path) { List<ImageRecord> images = imageQueryStage.apply(path); Multimap<Long, ImageRecord> groups = imageGrouper.apply(images); return postProcessing(groups); }
ImageQueryPipeline implements Function<Path, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Path path) { List<ImageRecord> images = imageQueryStage.apply(path); Multimap<Long, ImageRecord> groups = imageGrouper.apply(images); return postProcessing(groups); } }
ImageQueryPipeline implements Function<Path, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Path path) { List<ImageRecord> images = imageQueryStage.apply(path); Multimap<Long, ImageRecord> groups = imageGrouper.apply(images); return postProcessing(groups); } ImageQueryPipeline(Function<Path, List<ImageRecord>> imageQueryStage, Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> imageGrouper, Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> postProcessingStages); }
ImageQueryPipeline implements Function<Path, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Path path) { List<ImageRecord> images = imageQueryStage.apply(path); Multimap<Long, ImageRecord> groups = imageGrouper.apply(images); return postProcessing(groups); } ImageQueryPipeline(Function<Path, List<ImageRecord>> imageQueryStage, Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> imageGrouper, Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> postProcessingStages); @Override Multimap<Long, ImageRecord> apply(Path path); Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> getPostProcessingStages(); Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> getImageGrouper(); }
ImageQueryPipeline implements Function<Path, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Path path) { List<ImageRecord> images = imageQueryStage.apply(path); Multimap<Long, ImageRecord> groups = imageGrouper.apply(images); return postProcessing(groups); } ImageQueryPipeline(Function<Path, List<ImageRecord>> imageQueryStage, Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> imageGrouper, Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> postProcessingStages); @Override Multimap<Long, ImageRecord> apply(Path path); Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> getPostProcessingStages(); Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> getImageGrouper(); }
@Test public void testPostprocessingAExecuted() throws Exception { verify(postProcessingStageA).apply(groups); }
@Override public Multimap<Long, ImageRecord> apply(Path path) { List<ImageRecord> images = imageQueryStage.apply(path); Multimap<Long, ImageRecord> groups = imageGrouper.apply(images); return postProcessing(groups); }
ImageQueryPipeline implements Function<Path, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Path path) { List<ImageRecord> images = imageQueryStage.apply(path); Multimap<Long, ImageRecord> groups = imageGrouper.apply(images); return postProcessing(groups); } }
ImageQueryPipeline implements Function<Path, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Path path) { List<ImageRecord> images = imageQueryStage.apply(path); Multimap<Long, ImageRecord> groups = imageGrouper.apply(images); return postProcessing(groups); } ImageQueryPipeline(Function<Path, List<ImageRecord>> imageQueryStage, Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> imageGrouper, Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> postProcessingStages); }
ImageQueryPipeline implements Function<Path, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Path path) { List<ImageRecord> images = imageQueryStage.apply(path); Multimap<Long, ImageRecord> groups = imageGrouper.apply(images); return postProcessing(groups); } ImageQueryPipeline(Function<Path, List<ImageRecord>> imageQueryStage, Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> imageGrouper, Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> postProcessingStages); @Override Multimap<Long, ImageRecord> apply(Path path); Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> getPostProcessingStages(); Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> getImageGrouper(); }
ImageQueryPipeline implements Function<Path, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Path path) { List<ImageRecord> images = imageQueryStage.apply(path); Multimap<Long, ImageRecord> groups = imageGrouper.apply(images); return postProcessing(groups); } ImageQueryPipeline(Function<Path, List<ImageRecord>> imageQueryStage, Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> imageGrouper, Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> postProcessingStages); @Override Multimap<Long, ImageRecord> apply(Path path); Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> getPostProcessingStages(); Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> getImageGrouper(); }
@Test public void testPostprocessingBExecuted() throws Exception { verify(postProcessingStageB).apply(groups); }
@Override public Multimap<Long, ImageRecord> apply(Path path) { List<ImageRecord> images = imageQueryStage.apply(path); Multimap<Long, ImageRecord> groups = imageGrouper.apply(images); return postProcessing(groups); }
ImageQueryPipeline implements Function<Path, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Path path) { List<ImageRecord> images = imageQueryStage.apply(path); Multimap<Long, ImageRecord> groups = imageGrouper.apply(images); return postProcessing(groups); } }
ImageQueryPipeline implements Function<Path, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Path path) { List<ImageRecord> images = imageQueryStage.apply(path); Multimap<Long, ImageRecord> groups = imageGrouper.apply(images); return postProcessing(groups); } ImageQueryPipeline(Function<Path, List<ImageRecord>> imageQueryStage, Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> imageGrouper, Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> postProcessingStages); }
ImageQueryPipeline implements Function<Path, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Path path) { List<ImageRecord> images = imageQueryStage.apply(path); Multimap<Long, ImageRecord> groups = imageGrouper.apply(images); return postProcessing(groups); } ImageQueryPipeline(Function<Path, List<ImageRecord>> imageQueryStage, Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> imageGrouper, Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> postProcessingStages); @Override Multimap<Long, ImageRecord> apply(Path path); Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> getPostProcessingStages(); Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> getImageGrouper(); }
ImageQueryPipeline implements Function<Path, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Path path) { List<ImageRecord> images = imageQueryStage.apply(path); Multimap<Long, ImageRecord> groups = imageGrouper.apply(images); return postProcessing(groups); } ImageQueryPipeline(Function<Path, List<ImageRecord>> imageQueryStage, Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> imageGrouper, Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> postProcessingStages); @Override Multimap<Long, ImageRecord> apply(Path path); Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> getPostProcessingStages(); Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> getImageGrouper(); }
@Test public void testPipelineReturnsGroups() throws Exception { assertThat(cut.apply(null), is(groups)); }
@Override public Multimap<Long, ImageRecord> apply(Path path) { List<ImageRecord> images = imageQueryStage.apply(path); Multimap<Long, ImageRecord> groups = imageGrouper.apply(images); return postProcessing(groups); }
ImageQueryPipeline implements Function<Path, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Path path) { List<ImageRecord> images = imageQueryStage.apply(path); Multimap<Long, ImageRecord> groups = imageGrouper.apply(images); return postProcessing(groups); } }
ImageQueryPipeline implements Function<Path, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Path path) { List<ImageRecord> images = imageQueryStage.apply(path); Multimap<Long, ImageRecord> groups = imageGrouper.apply(images); return postProcessing(groups); } ImageQueryPipeline(Function<Path, List<ImageRecord>> imageQueryStage, Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> imageGrouper, Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> postProcessingStages); }
ImageQueryPipeline implements Function<Path, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Path path) { List<ImageRecord> images = imageQueryStage.apply(path); Multimap<Long, ImageRecord> groups = imageGrouper.apply(images); return postProcessing(groups); } ImageQueryPipeline(Function<Path, List<ImageRecord>> imageQueryStage, Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> imageGrouper, Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> postProcessingStages); @Override Multimap<Long, ImageRecord> apply(Path path); Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> getPostProcessingStages(); Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> getImageGrouper(); }
ImageQueryPipeline implements Function<Path, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Path path) { List<ImageRecord> images = imageQueryStage.apply(path); Multimap<Long, ImageRecord> groups = imageGrouper.apply(images); return postProcessing(groups); } ImageQueryPipeline(Function<Path, List<ImageRecord>> imageQueryStage, Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> imageGrouper, Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> postProcessingStages); @Override Multimap<Long, ImageRecord> apply(Path path); Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> getPostProcessingStages(); Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> getImageGrouper(); }
@Test public void testGrouperInstance() throws Exception { assertThat(cut.getImageGrouper(), is(instanceOf(GroupImagesStage.class))); }
public Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> getImageGrouper() { return imageGrouper; }
ImageQueryPipeline implements Function<Path, Multimap<Long, ImageRecord>> { public Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> getImageGrouper() { return imageGrouper; } }
ImageQueryPipeline implements Function<Path, Multimap<Long, ImageRecord>> { public Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> getImageGrouper() { return imageGrouper; } ImageQueryPipeline(Function<Path, List<ImageRecord>> imageQueryStage, Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> imageGrouper, Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> postProcessingStages); }
ImageQueryPipeline implements Function<Path, Multimap<Long, ImageRecord>> { public Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> getImageGrouper() { return imageGrouper; } ImageQueryPipeline(Function<Path, List<ImageRecord>> imageQueryStage, Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> imageGrouper, Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> postProcessingStages); @Override Multimap<Long, ImageRecord> apply(Path path); Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> getPostProcessingStages(); Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> getImageGrouper(); }
ImageQueryPipeline implements Function<Path, Multimap<Long, ImageRecord>> { public Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> getImageGrouper() { return imageGrouper; } ImageQueryPipeline(Function<Path, List<ImageRecord>> imageQueryStage, Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> imageGrouper, Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> postProcessingStages); @Override Multimap<Long, ImageRecord> apply(Path path); Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> getPostProcessingStages(); Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> getImageGrouper(); }
@Test public void testGetPostProcessingStages() throws Exception { assertThat(cut.getPostProcessingStages(), hasSize(2)); }
public Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> getPostProcessingStages() { return ImmutableList.copyOf(postProcessingStages); }
ImageQueryPipeline implements Function<Path, Multimap<Long, ImageRecord>> { public Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> getPostProcessingStages() { return ImmutableList.copyOf(postProcessingStages); } }
ImageQueryPipeline implements Function<Path, Multimap<Long, ImageRecord>> { public Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> getPostProcessingStages() { return ImmutableList.copyOf(postProcessingStages); } ImageQueryPipeline(Function<Path, List<ImageRecord>> imageQueryStage, Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> imageGrouper, Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> postProcessingStages); }
ImageQueryPipeline implements Function<Path, Multimap<Long, ImageRecord>> { public Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> getPostProcessingStages() { return ImmutableList.copyOf(postProcessingStages); } ImageQueryPipeline(Function<Path, List<ImageRecord>> imageQueryStage, Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> imageGrouper, Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> postProcessingStages); @Override Multimap<Long, ImageRecord> apply(Path path); Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> getPostProcessingStages(); Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> getImageGrouper(); }
ImageQueryPipeline implements Function<Path, Multimap<Long, ImageRecord>> { public Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> getPostProcessingStages() { return ImmutableList.copyOf(postProcessingStages); } ImageQueryPipeline(Function<Path, List<ImageRecord>> imageQueryStage, Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> imageGrouper, Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> postProcessingStages); @Override Multimap<Long, ImageRecord> apply(Path path); Collection<Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>>> getPostProcessingStages(); Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> getImageGrouper(); }
@Test public void testGroupedByTag() throws Exception { assertThat(cut.apply(images).get(HASH_A), containsInAnyOrder(imageA)); }
@Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> t) { Multimap<Long, ImageRecord> result = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(t); TagFilter tagFilter = new TagFilter(filterRepository); result = tagFilter.getFilterMatches(rs, tag, hammingDistance); return result; }
GroupByTagStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> t) { Multimap<Long, ImageRecord> result = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(t); TagFilter tagFilter = new TagFilter(filterRepository); result = tagFilter.getFilterMatches(rs, tag, hammingDistance); return result; } }
GroupByTagStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> t) { Multimap<Long, ImageRecord> result = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(t); TagFilter tagFilter = new TagFilter(filterRepository); result = tagFilter.getFilterMatches(rs, tag, hammingDistance); return result; } GroupByTagStage(FilterRepository filterRepository, Tag tag, int hammingDistance); GroupByTagStage(FilterRepository filterRepository, Tag tag); }
GroupByTagStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> t) { Multimap<Long, ImageRecord> result = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(t); TagFilter tagFilter = new TagFilter(filterRepository); result = tagFilter.getFilterMatches(rs, tag, hammingDistance); return result; } GroupByTagStage(FilterRepository filterRepository, Tag tag, int hammingDistance); GroupByTagStage(FilterRepository filterRepository, Tag tag); @Override Multimap<Long, ImageRecord> apply(Collection<ImageRecord> t); }
GroupByTagStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> t) { Multimap<Long, ImageRecord> result = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(t); TagFilter tagFilter = new TagFilter(filterRepository); result = tagFilter.getFilterMatches(rs, tag, hammingDistance); return result; } GroupByTagStage(FilterRepository filterRepository, Tag tag, int hammingDistance); GroupByTagStage(FilterRepository filterRepository, Tag tag); @Override Multimap<Long, ImageRecord> apply(Collection<ImageRecord> t); }
@Test public void testTagsWithDistance() throws Exception { cut = new GroupByTagStage(filterRepository, TAG, 1); assertThat(cut.apply(images).get(HASH_A), containsInAnyOrder(imageA, imageB)); }
@Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> t) { Multimap<Long, ImageRecord> result = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(t); TagFilter tagFilter = new TagFilter(filterRepository); result = tagFilter.getFilterMatches(rs, tag, hammingDistance); return result; }
GroupByTagStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> t) { Multimap<Long, ImageRecord> result = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(t); TagFilter tagFilter = new TagFilter(filterRepository); result = tagFilter.getFilterMatches(rs, tag, hammingDistance); return result; } }
GroupByTagStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> t) { Multimap<Long, ImageRecord> result = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(t); TagFilter tagFilter = new TagFilter(filterRepository); result = tagFilter.getFilterMatches(rs, tag, hammingDistance); return result; } GroupByTagStage(FilterRepository filterRepository, Tag tag, int hammingDistance); GroupByTagStage(FilterRepository filterRepository, Tag tag); }
GroupByTagStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> t) { Multimap<Long, ImageRecord> result = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(t); TagFilter tagFilter = new TagFilter(filterRepository); result = tagFilter.getFilterMatches(rs, tag, hammingDistance); return result; } GroupByTagStage(FilterRepository filterRepository, Tag tag, int hammingDistance); GroupByTagStage(FilterRepository filterRepository, Tag tag); @Override Multimap<Long, ImageRecord> apply(Collection<ImageRecord> t); }
GroupByTagStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> t) { Multimap<Long, ImageRecord> result = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(t); TagFilter tagFilter = new TagFilter(filterRepository); result = tagFilter.getFilterMatches(rs, tag, hammingDistance); return result; } GroupByTagStage(FilterRepository filterRepository, Tag tag, int hammingDistance); GroupByTagStage(FilterRepository filterRepository, Tag tag); @Override Multimap<Long, ImageRecord> apply(Collection<ImageRecord> t); }
@Test public void testRepositoryError() throws Exception { when(filterRepository.getByTag(TAG)).thenThrow(new RepositoryException("")); assertThat(cut.apply(images), is(EMPTY_MAP)); }
@Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> t) { Multimap<Long, ImageRecord> result = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(t); TagFilter tagFilter = new TagFilter(filterRepository); result = tagFilter.getFilterMatches(rs, tag, hammingDistance); return result; }
GroupByTagStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> t) { Multimap<Long, ImageRecord> result = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(t); TagFilter tagFilter = new TagFilter(filterRepository); result = tagFilter.getFilterMatches(rs, tag, hammingDistance); return result; } }
GroupByTagStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> t) { Multimap<Long, ImageRecord> result = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(t); TagFilter tagFilter = new TagFilter(filterRepository); result = tagFilter.getFilterMatches(rs, tag, hammingDistance); return result; } GroupByTagStage(FilterRepository filterRepository, Tag tag, int hammingDistance); GroupByTagStage(FilterRepository filterRepository, Tag tag); }
GroupByTagStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> t) { Multimap<Long, ImageRecord> result = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(t); TagFilter tagFilter = new TagFilter(filterRepository); result = tagFilter.getFilterMatches(rs, tag, hammingDistance); return result; } GroupByTagStage(FilterRepository filterRepository, Tag tag, int hammingDistance); GroupByTagStage(FilterRepository filterRepository, Tag tag); @Override Multimap<Long, ImageRecord> apply(Collection<ImageRecord> t); }
GroupByTagStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> t) { Multimap<Long, ImageRecord> result = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(t); TagFilter tagFilter = new TagFilter(filterRepository); result = tagFilter.getFilterMatches(rs, tag, hammingDistance); return result; } GroupByTagStage(FilterRepository filterRepository, Tag tag, int hammingDistance); GroupByTagStage(FilterRepository filterRepository, Tag tag); @Override Multimap<Long, ImageRecord> apply(Collection<ImageRecord> t); }
@Test public void testImagesWithIgnore() throws Exception { cut.build().apply(null); verify(imageRepository).getAll(); }
public ImageQueryPipeline build() { if (imageGrouper == null) { imageGrouper = new GroupImagesStage(hammingDistance); LOGGER.warn("No image group stage set, using {}", imageGrouper.getClass().getSimpleName()); } return new ImageQueryPipeline(imageQuery, imageGrouper, postProcessing); }
ImageQueryPipelineBuilder { public ImageQueryPipeline build() { if (imageGrouper == null) { imageGrouper = new GroupImagesStage(hammingDistance); LOGGER.warn("No image group stage set, using {}", imageGrouper.getClass().getSimpleName()); } return new ImageQueryPipeline(imageQuery, imageGrouper, postProcessing); } }
ImageQueryPipelineBuilder { public ImageQueryPipeline build() { if (imageGrouper == null) { imageGrouper = new GroupImagesStage(hammingDistance); LOGGER.warn("No image group stage set, using {}", imageGrouper.getClass().getSimpleName()); } return new ImageQueryPipeline(imageQuery, imageGrouper, postProcessing); } ImageQueryPipelineBuilder(ImageRepository imageRepository, FilterRepository filterRepository); }
ImageQueryPipelineBuilder { public ImageQueryPipeline build() { if (imageGrouper == null) { imageGrouper = new GroupImagesStage(hammingDistance); LOGGER.warn("No image group stage set, using {}", imageGrouper.getClass().getSimpleName()); } return new ImageQueryPipeline(imageQuery, imageGrouper, postProcessing); } ImageQueryPipelineBuilder(ImageRepository imageRepository, FilterRepository filterRepository); ImageQueryPipelineBuilder excludeIgnored(); ImageQueryPipelineBuilder excludeIgnored(boolean exclude); ImageQueryPipelineBuilder removeSingleImageGroups(); ImageQueryPipelineBuilder removeDuplicateGroups(); ImageQueryPipelineBuilder distance(int distance); ImageQueryPipelineBuilder groupByTag(Tag tag); ImageQueryPipelineBuilder groupAll(); ImageQueryPipeline build(); static ImageQueryPipelineBuilder newBuilder(ImageRepository imageRepository, FilterRepository filterRepository); }
ImageQueryPipelineBuilder { public ImageQueryPipeline build() { if (imageGrouper == null) { imageGrouper = new GroupImagesStage(hammingDistance); LOGGER.warn("No image group stage set, using {}", imageGrouper.getClass().getSimpleName()); } return new ImageQueryPipeline(imageQuery, imageGrouper, postProcessing); } ImageQueryPipelineBuilder(ImageRepository imageRepository, FilterRepository filterRepository); ImageQueryPipelineBuilder excludeIgnored(); ImageQueryPipelineBuilder excludeIgnored(boolean exclude); ImageQueryPipelineBuilder removeSingleImageGroups(); ImageQueryPipelineBuilder removeDuplicateGroups(); ImageQueryPipelineBuilder distance(int distance); ImageQueryPipelineBuilder groupByTag(Tag tag); ImageQueryPipelineBuilder groupAll(); ImageQueryPipeline build(); static ImageQueryPipelineBuilder newBuilder(ImageRepository imageRepository, FilterRepository filterRepository); }
@Test public void testIsEaSupportedNoParent() throws Exception { assertThat(cut.isEaSupported(relative), is(false)); }
@Override public boolean isEaSupported(Path path) { Path parent = path.getParent(); if (path.getRoot() != null && path.equals(path.getRoot())) { parent = path; } if (parent == null) { return false; } return eaSupport.getUnchecked(parent); }
ExtendedAttributeDirectoryCache implements ExtendedAttributeQuery { @Override public boolean isEaSupported(Path path) { Path parent = path.getParent(); if (path.getRoot() != null && path.equals(path.getRoot())) { parent = path; } if (parent == null) { return false; } return eaSupport.getUnchecked(parent); } }
ExtendedAttributeDirectoryCache implements ExtendedAttributeQuery { @Override public boolean isEaSupported(Path path) { Path parent = path.getParent(); if (path.getRoot() != null && path.equals(path.getRoot())) { parent = path; } if (parent == null) { return false; } return eaSupport.getUnchecked(parent); } @Inject ExtendedAttributeDirectoryCache(ExtendedAttributeQuery eaQuery); ExtendedAttributeDirectoryCache(ExtendedAttributeQuery eaQuery, int expireTime, TimeUnit expireUnit); }
ExtendedAttributeDirectoryCache implements ExtendedAttributeQuery { @Override public boolean isEaSupported(Path path) { Path parent = path.getParent(); if (path.getRoot() != null && path.equals(path.getRoot())) { parent = path; } if (parent == null) { return false; } return eaSupport.getUnchecked(parent); } @Inject ExtendedAttributeDirectoryCache(ExtendedAttributeQuery eaQuery); ExtendedAttributeDirectoryCache(ExtendedAttributeQuery eaQuery, int expireTime, TimeUnit expireUnit); @Override boolean isEaSupported(Path path); }
ExtendedAttributeDirectoryCache implements ExtendedAttributeQuery { @Override public boolean isEaSupported(Path path) { Path parent = path.getParent(); if (path.getRoot() != null && path.equals(path.getRoot())) { parent = path; } if (parent == null) { return false; } return eaSupport.getUnchecked(parent); } @Inject ExtendedAttributeDirectoryCache(ExtendedAttributeQuery eaQuery); ExtendedAttributeDirectoryCache(ExtendedAttributeQuery eaQuery, int expireTime, TimeUnit expireUnit); @Override boolean isEaSupported(Path path); }
@Test public void testRemoveSingleImageGroups() throws Exception { assertThat(cut.removeSingleImageGroups().build().getPostProcessingStages(), hasItem(instanceOf(RemoveSingleImageSetStage.class))); }
public ImageQueryPipelineBuilder removeSingleImageGroups() { this.postProcessing.add(new RemoveSingleImageSetStage()); return this; }
ImageQueryPipelineBuilder { public ImageQueryPipelineBuilder removeSingleImageGroups() { this.postProcessing.add(new RemoveSingleImageSetStage()); return this; } }
ImageQueryPipelineBuilder { public ImageQueryPipelineBuilder removeSingleImageGroups() { this.postProcessing.add(new RemoveSingleImageSetStage()); return this; } ImageQueryPipelineBuilder(ImageRepository imageRepository, FilterRepository filterRepository); }
ImageQueryPipelineBuilder { public ImageQueryPipelineBuilder removeSingleImageGroups() { this.postProcessing.add(new RemoveSingleImageSetStage()); return this; } ImageQueryPipelineBuilder(ImageRepository imageRepository, FilterRepository filterRepository); ImageQueryPipelineBuilder excludeIgnored(); ImageQueryPipelineBuilder excludeIgnored(boolean exclude); ImageQueryPipelineBuilder removeSingleImageGroups(); ImageQueryPipelineBuilder removeDuplicateGroups(); ImageQueryPipelineBuilder distance(int distance); ImageQueryPipelineBuilder groupByTag(Tag tag); ImageQueryPipelineBuilder groupAll(); ImageQueryPipeline build(); static ImageQueryPipelineBuilder newBuilder(ImageRepository imageRepository, FilterRepository filterRepository); }
ImageQueryPipelineBuilder { public ImageQueryPipelineBuilder removeSingleImageGroups() { this.postProcessing.add(new RemoveSingleImageSetStage()); return this; } ImageQueryPipelineBuilder(ImageRepository imageRepository, FilterRepository filterRepository); ImageQueryPipelineBuilder excludeIgnored(); ImageQueryPipelineBuilder excludeIgnored(boolean exclude); ImageQueryPipelineBuilder removeSingleImageGroups(); ImageQueryPipelineBuilder removeDuplicateGroups(); ImageQueryPipelineBuilder distance(int distance); ImageQueryPipelineBuilder groupByTag(Tag tag); ImageQueryPipelineBuilder groupAll(); ImageQueryPipeline build(); static ImageQueryPipelineBuilder newBuilder(ImageRepository imageRepository, FilterRepository filterRepository); }
@Test public void testRemoveDuplicateGroups() throws Exception { assertThat(cut.removeDuplicateGroups().build().getPostProcessingStages(), hasItem(instanceOf(RemoveDuplicateSetStage.class))); }
public ImageQueryPipelineBuilder removeDuplicateGroups() { this.postProcessing.add(new RemoveDuplicateSetStage()); return this; }
ImageQueryPipelineBuilder { public ImageQueryPipelineBuilder removeDuplicateGroups() { this.postProcessing.add(new RemoveDuplicateSetStage()); return this; } }
ImageQueryPipelineBuilder { public ImageQueryPipelineBuilder removeDuplicateGroups() { this.postProcessing.add(new RemoveDuplicateSetStage()); return this; } ImageQueryPipelineBuilder(ImageRepository imageRepository, FilterRepository filterRepository); }
ImageQueryPipelineBuilder { public ImageQueryPipelineBuilder removeDuplicateGroups() { this.postProcessing.add(new RemoveDuplicateSetStage()); return this; } ImageQueryPipelineBuilder(ImageRepository imageRepository, FilterRepository filterRepository); ImageQueryPipelineBuilder excludeIgnored(); ImageQueryPipelineBuilder excludeIgnored(boolean exclude); ImageQueryPipelineBuilder removeSingleImageGroups(); ImageQueryPipelineBuilder removeDuplicateGroups(); ImageQueryPipelineBuilder distance(int distance); ImageQueryPipelineBuilder groupByTag(Tag tag); ImageQueryPipelineBuilder groupAll(); ImageQueryPipeline build(); static ImageQueryPipelineBuilder newBuilder(ImageRepository imageRepository, FilterRepository filterRepository); }
ImageQueryPipelineBuilder { public ImageQueryPipelineBuilder removeDuplicateGroups() { this.postProcessing.add(new RemoveDuplicateSetStage()); return this; } ImageQueryPipelineBuilder(ImageRepository imageRepository, FilterRepository filterRepository); ImageQueryPipelineBuilder excludeIgnored(); ImageQueryPipelineBuilder excludeIgnored(boolean exclude); ImageQueryPipelineBuilder removeSingleImageGroups(); ImageQueryPipelineBuilder removeDuplicateGroups(); ImageQueryPipelineBuilder distance(int distance); ImageQueryPipelineBuilder groupByTag(Tag tag); ImageQueryPipelineBuilder groupAll(); ImageQueryPipeline build(); static ImageQueryPipelineBuilder newBuilder(ImageRepository imageRepository, FilterRepository filterRepository); }
@Test public void testNewBuilder() throws Exception { assertThat(ImageQueryPipelineBuilder.newBuilder(imageRepository, filterRepository), is(instanceOf(ImageQueryPipelineBuilder.class))); }
public static ImageQueryPipelineBuilder newBuilder(ImageRepository imageRepository, FilterRepository filterRepository) { return new ImageQueryPipelineBuilder(imageRepository, filterRepository); }
ImageQueryPipelineBuilder { public static ImageQueryPipelineBuilder newBuilder(ImageRepository imageRepository, FilterRepository filterRepository) { return new ImageQueryPipelineBuilder(imageRepository, filterRepository); } }
ImageQueryPipelineBuilder { public static ImageQueryPipelineBuilder newBuilder(ImageRepository imageRepository, FilterRepository filterRepository) { return new ImageQueryPipelineBuilder(imageRepository, filterRepository); } ImageQueryPipelineBuilder(ImageRepository imageRepository, FilterRepository filterRepository); }
ImageQueryPipelineBuilder { public static ImageQueryPipelineBuilder newBuilder(ImageRepository imageRepository, FilterRepository filterRepository) { return new ImageQueryPipelineBuilder(imageRepository, filterRepository); } ImageQueryPipelineBuilder(ImageRepository imageRepository, FilterRepository filterRepository); ImageQueryPipelineBuilder excludeIgnored(); ImageQueryPipelineBuilder excludeIgnored(boolean exclude); ImageQueryPipelineBuilder removeSingleImageGroups(); ImageQueryPipelineBuilder removeDuplicateGroups(); ImageQueryPipelineBuilder distance(int distance); ImageQueryPipelineBuilder groupByTag(Tag tag); ImageQueryPipelineBuilder groupAll(); ImageQueryPipeline build(); static ImageQueryPipelineBuilder newBuilder(ImageRepository imageRepository, FilterRepository filterRepository); }
ImageQueryPipelineBuilder { public static ImageQueryPipelineBuilder newBuilder(ImageRepository imageRepository, FilterRepository filterRepository) { return new ImageQueryPipelineBuilder(imageRepository, filterRepository); } ImageQueryPipelineBuilder(ImageRepository imageRepository, FilterRepository filterRepository); ImageQueryPipelineBuilder excludeIgnored(); ImageQueryPipelineBuilder excludeIgnored(boolean exclude); ImageQueryPipelineBuilder removeSingleImageGroups(); ImageQueryPipelineBuilder removeDuplicateGroups(); ImageQueryPipelineBuilder distance(int distance); ImageQueryPipelineBuilder groupByTag(Tag tag); ImageQueryPipelineBuilder groupAll(); ImageQueryPipeline build(); static ImageQueryPipelineBuilder newBuilder(ImageRepository imageRepository, FilterRepository filterRepository); }
@Test public void testGroupAll() throws Exception { ImageQueryPipeline pipeline = cut.groupAll().build(); assertThat(pipeline.getImageGrouper(), is(instanceOf(GroupImagesStage.class))); }
public ImageQueryPipelineBuilder groupAll() { this.imageGrouper = new GroupImagesStage(hammingDistance); return this; }
ImageQueryPipelineBuilder { public ImageQueryPipelineBuilder groupAll() { this.imageGrouper = new GroupImagesStage(hammingDistance); return this; } }
ImageQueryPipelineBuilder { public ImageQueryPipelineBuilder groupAll() { this.imageGrouper = new GroupImagesStage(hammingDistance); return this; } ImageQueryPipelineBuilder(ImageRepository imageRepository, FilterRepository filterRepository); }
ImageQueryPipelineBuilder { public ImageQueryPipelineBuilder groupAll() { this.imageGrouper = new GroupImagesStage(hammingDistance); return this; } ImageQueryPipelineBuilder(ImageRepository imageRepository, FilterRepository filterRepository); ImageQueryPipelineBuilder excludeIgnored(); ImageQueryPipelineBuilder excludeIgnored(boolean exclude); ImageQueryPipelineBuilder removeSingleImageGroups(); ImageQueryPipelineBuilder removeDuplicateGroups(); ImageQueryPipelineBuilder distance(int distance); ImageQueryPipelineBuilder groupByTag(Tag tag); ImageQueryPipelineBuilder groupAll(); ImageQueryPipeline build(); static ImageQueryPipelineBuilder newBuilder(ImageRepository imageRepository, FilterRepository filterRepository); }
ImageQueryPipelineBuilder { public ImageQueryPipelineBuilder groupAll() { this.imageGrouper = new GroupImagesStage(hammingDistance); return this; } ImageQueryPipelineBuilder(ImageRepository imageRepository, FilterRepository filterRepository); ImageQueryPipelineBuilder excludeIgnored(); ImageQueryPipelineBuilder excludeIgnored(boolean exclude); ImageQueryPipelineBuilder removeSingleImageGroups(); ImageQueryPipelineBuilder removeDuplicateGroups(); ImageQueryPipelineBuilder distance(int distance); ImageQueryPipelineBuilder groupByTag(Tag tag); ImageQueryPipelineBuilder groupAll(); ImageQueryPipeline build(); static ImageQueryPipelineBuilder newBuilder(ImageRepository imageRepository, FilterRepository filterRepository); }
@Test public void testRemoveDuplicateGroups() throws Exception { cut.apply(testMap); assertThat(testMap.containsKey(HASH_C), is(false)); }
@Override public Multimap<Long, ImageRecord> apply(Multimap<Long, ImageRecord> toPrune) { DuplicateUtil.removeDuplicateSets(toPrune); return toPrune; }
RemoveDuplicateSetStage implements Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Multimap<Long, ImageRecord> toPrune) { DuplicateUtil.removeDuplicateSets(toPrune); return toPrune; } }
RemoveDuplicateSetStage implements Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Multimap<Long, ImageRecord> toPrune) { DuplicateUtil.removeDuplicateSets(toPrune); return toPrune; } }
RemoveDuplicateSetStage implements Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Multimap<Long, ImageRecord> toPrune) { DuplicateUtil.removeDuplicateSets(toPrune); return toPrune; } @Override Multimap<Long, ImageRecord> apply(Multimap<Long, ImageRecord> toPrune); }
RemoveDuplicateSetStage implements Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Multimap<Long, ImageRecord> toPrune) { DuplicateUtil.removeDuplicateSets(toPrune); return toPrune; } @Override Multimap<Long, ImageRecord> apply(Multimap<Long, ImageRecord> toPrune); }
@Test public void testParameterReturned() throws Exception { assertThat(cut.apply(testMap), is(sameInstance(testMap))); }
@Override public Multimap<Long, ImageRecord> apply(Multimap<Long, ImageRecord> toPrune) { DuplicateUtil.removeDuplicateSets(toPrune); return toPrune; }
RemoveDuplicateSetStage implements Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Multimap<Long, ImageRecord> toPrune) { DuplicateUtil.removeDuplicateSets(toPrune); return toPrune; } }
RemoveDuplicateSetStage implements Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Multimap<Long, ImageRecord> toPrune) { DuplicateUtil.removeDuplicateSets(toPrune); return toPrune; } }
RemoveDuplicateSetStage implements Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Multimap<Long, ImageRecord> toPrune) { DuplicateUtil.removeDuplicateSets(toPrune); return toPrune; } @Override Multimap<Long, ImageRecord> apply(Multimap<Long, ImageRecord> toPrune); }
RemoveDuplicateSetStage implements Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Multimap<Long, ImageRecord> toPrune) { DuplicateUtil.removeDuplicateSets(toPrune); return toPrune; } @Override Multimap<Long, ImageRecord> apply(Multimap<Long, ImageRecord> toPrune); }
@Test public void testRemoveSingleImageGroup() throws Exception { cut.apply(testMap); assertThat(testMap.containsKey(HASH_A), is(false)); }
@Override public Multimap<Long, ImageRecord> apply(Multimap<Long, ImageRecord> toPrune) { DuplicateUtil.removeSingleImageGroups(toPrune); return toPrune; }
RemoveSingleImageSetStage implements Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Multimap<Long, ImageRecord> toPrune) { DuplicateUtil.removeSingleImageGroups(toPrune); return toPrune; } }
RemoveSingleImageSetStage implements Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Multimap<Long, ImageRecord> toPrune) { DuplicateUtil.removeSingleImageGroups(toPrune); return toPrune; } }
RemoveSingleImageSetStage implements Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Multimap<Long, ImageRecord> toPrune) { DuplicateUtil.removeSingleImageGroups(toPrune); return toPrune; } @Override Multimap<Long, ImageRecord> apply(Multimap<Long, ImageRecord> toPrune); }
RemoveSingleImageSetStage implements Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Multimap<Long, ImageRecord> toPrune) { DuplicateUtil.removeSingleImageGroups(toPrune); return toPrune; } @Override Multimap<Long, ImageRecord> apply(Multimap<Long, ImageRecord> toPrune); }
@Test public void testParameterReturned() throws Exception { assertThat(cut.apply(testMap), is(sameInstance(testMap))); }
@Override public Multimap<Long, ImageRecord> apply(Multimap<Long, ImageRecord> toPrune) { DuplicateUtil.removeSingleImageGroups(toPrune); return toPrune; }
RemoveSingleImageSetStage implements Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Multimap<Long, ImageRecord> toPrune) { DuplicateUtil.removeSingleImageGroups(toPrune); return toPrune; } }
RemoveSingleImageSetStage implements Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Multimap<Long, ImageRecord> toPrune) { DuplicateUtil.removeSingleImageGroups(toPrune); return toPrune; } }
RemoveSingleImageSetStage implements Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Multimap<Long, ImageRecord> toPrune) { DuplicateUtil.removeSingleImageGroups(toPrune); return toPrune; } @Override Multimap<Long, ImageRecord> apply(Multimap<Long, ImageRecord> toPrune); }
RemoveSingleImageSetStage implements Function<Multimap<Long, ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Multimap<Long, ImageRecord> toPrune) { DuplicateUtil.removeSingleImageGroups(toPrune); return toPrune; } @Override Multimap<Long, ImageRecord> apply(Multimap<Long, ImageRecord> toPrune); }
@Test public void testNoDuplicatesInGroup() throws Exception { assertThat(cut.apply(images).get(HASH_B), hasSize(1)); }
@Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> toGroup) { Multimap<Long, ImageRecord> resultMap = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(toGroup); Stopwatch sw = Stopwatch.createStarted(); toGroup.forEach(new Consumer<ImageRecord>() { @Override public void accept(ImageRecord t) { resultMap.putAll(t.getpHash(), rs.distanceMatch(t.getpHash(), hammingDistance).values()); } }); LOGGER.info("Built result map with {} pairs in {}, using hamming distance {}", resultMap.size(), sw, hammingDistance); return resultMap; }
GroupImagesStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> toGroup) { Multimap<Long, ImageRecord> resultMap = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(toGroup); Stopwatch sw = Stopwatch.createStarted(); toGroup.forEach(new Consumer<ImageRecord>() { @Override public void accept(ImageRecord t) { resultMap.putAll(t.getpHash(), rs.distanceMatch(t.getpHash(), hammingDistance).values()); } }); LOGGER.info("Built result map with {} pairs in {}, using hamming distance {}", resultMap.size(), sw, hammingDistance); return resultMap; } }
GroupImagesStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> toGroup) { Multimap<Long, ImageRecord> resultMap = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(toGroup); Stopwatch sw = Stopwatch.createStarted(); toGroup.forEach(new Consumer<ImageRecord>() { @Override public void accept(ImageRecord t) { resultMap.putAll(t.getpHash(), rs.distanceMatch(t.getpHash(), hammingDistance).values()); } }); LOGGER.info("Built result map with {} pairs in {}, using hamming distance {}", resultMap.size(), sw, hammingDistance); return resultMap; } GroupImagesStage(); GroupImagesStage(int hammingDistance); }
GroupImagesStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> toGroup) { Multimap<Long, ImageRecord> resultMap = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(toGroup); Stopwatch sw = Stopwatch.createStarted(); toGroup.forEach(new Consumer<ImageRecord>() { @Override public void accept(ImageRecord t) { resultMap.putAll(t.getpHash(), rs.distanceMatch(t.getpHash(), hammingDistance).values()); } }); LOGGER.info("Built result map with {} pairs in {}, using hamming distance {}", resultMap.size(), sw, hammingDistance); return resultMap; } GroupImagesStage(); GroupImagesStage(int hammingDistance); @Override Multimap<Long, ImageRecord> apply(Collection<ImageRecord> toGroup); int getHammingDistance(); }
GroupImagesStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> toGroup) { Multimap<Long, ImageRecord> resultMap = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(toGroup); Stopwatch sw = Stopwatch.createStarted(); toGroup.forEach(new Consumer<ImageRecord>() { @Override public void accept(ImageRecord t) { resultMap.putAll(t.getpHash(), rs.distanceMatch(t.getpHash(), hammingDistance).values()); } }); LOGGER.info("Built result map with {} pairs in {}, using hamming distance {}", resultMap.size(), sw, hammingDistance); return resultMap; } GroupImagesStage(); GroupImagesStage(int hammingDistance); @Override Multimap<Long, ImageRecord> apply(Collection<ImageRecord> toGroup); int getHammingDistance(); }
@Test public void testSortedByHash() throws Exception { assertThat(cut.apply(images).get(HASH_A), hasItem(imageA)); }
@Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> toGroup) { Multimap<Long, ImageRecord> resultMap = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(toGroup); Stopwatch sw = Stopwatch.createStarted(); toGroup.forEach(new Consumer<ImageRecord>() { @Override public void accept(ImageRecord t) { resultMap.putAll(t.getpHash(), rs.distanceMatch(t.getpHash(), hammingDistance).values()); } }); LOGGER.info("Built result map with {} pairs in {}, using hamming distance {}", resultMap.size(), sw, hammingDistance); return resultMap; }
GroupImagesStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> toGroup) { Multimap<Long, ImageRecord> resultMap = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(toGroup); Stopwatch sw = Stopwatch.createStarted(); toGroup.forEach(new Consumer<ImageRecord>() { @Override public void accept(ImageRecord t) { resultMap.putAll(t.getpHash(), rs.distanceMatch(t.getpHash(), hammingDistance).values()); } }); LOGGER.info("Built result map with {} pairs in {}, using hamming distance {}", resultMap.size(), sw, hammingDistance); return resultMap; } }
GroupImagesStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> toGroup) { Multimap<Long, ImageRecord> resultMap = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(toGroup); Stopwatch sw = Stopwatch.createStarted(); toGroup.forEach(new Consumer<ImageRecord>() { @Override public void accept(ImageRecord t) { resultMap.putAll(t.getpHash(), rs.distanceMatch(t.getpHash(), hammingDistance).values()); } }); LOGGER.info("Built result map with {} pairs in {}, using hamming distance {}", resultMap.size(), sw, hammingDistance); return resultMap; } GroupImagesStage(); GroupImagesStage(int hammingDistance); }
GroupImagesStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> toGroup) { Multimap<Long, ImageRecord> resultMap = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(toGroup); Stopwatch sw = Stopwatch.createStarted(); toGroup.forEach(new Consumer<ImageRecord>() { @Override public void accept(ImageRecord t) { resultMap.putAll(t.getpHash(), rs.distanceMatch(t.getpHash(), hammingDistance).values()); } }); LOGGER.info("Built result map with {} pairs in {}, using hamming distance {}", resultMap.size(), sw, hammingDistance); return resultMap; } GroupImagesStage(); GroupImagesStage(int hammingDistance); @Override Multimap<Long, ImageRecord> apply(Collection<ImageRecord> toGroup); int getHammingDistance(); }
GroupImagesStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> toGroup) { Multimap<Long, ImageRecord> resultMap = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(toGroup); Stopwatch sw = Stopwatch.createStarted(); toGroup.forEach(new Consumer<ImageRecord>() { @Override public void accept(ImageRecord t) { resultMap.putAll(t.getpHash(), rs.distanceMatch(t.getpHash(), hammingDistance).values()); } }); LOGGER.info("Built result map with {} pairs in {}, using hamming distance {}", resultMap.size(), sw, hammingDistance); return resultMap; } GroupImagesStage(); GroupImagesStage(int hammingDistance); @Override Multimap<Long, ImageRecord> apply(Collection<ImageRecord> toGroup); int getHammingDistance(); }
@Test public void testWrittenHashValue() throws Exception { cut.writeHash(tempFile, TEST_VALUE); assertThat(Long.parseUnsignedLong(ExtendedAttribute.readExtendedAttributeAsString(tempFile, testHashFullName), HEXADECIMAL_RADIX), is(TEST_VALUE)); }
public void writeHash(Path path, long hash) { try { ExtendedAttribute.setExtendedAttribute(path, hashFQN, Long.toHexString(hash)); ExtendedAttribute.setExtendedAttribute(path, timestampFQN, Long.toString(Files.getLastModifiedTime(path).toMillis())); } catch (IOException e) { LOGGER.warn("Failed to write hash to file {} ({})", path, e.toString()); } }
HashAttribute { public void writeHash(Path path, long hash) { try { ExtendedAttribute.setExtendedAttribute(path, hashFQN, Long.toHexString(hash)); ExtendedAttribute.setExtendedAttribute(path, timestampFQN, Long.toString(Files.getLastModifiedTime(path).toMillis())); } catch (IOException e) { LOGGER.warn("Failed to write hash to file {} ({})", path, e.toString()); } } }
HashAttribute { public void writeHash(Path path, long hash) { try { ExtendedAttribute.setExtendedAttribute(path, hashFQN, Long.toHexString(hash)); ExtendedAttribute.setExtendedAttribute(path, timestampFQN, Long.toString(Files.getLastModifiedTime(path).toMillis())); } catch (IOException e) { LOGGER.warn("Failed to write hash to file {} ({})", path, e.toString()); } } HashAttribute(String hashName); }
HashAttribute { public void writeHash(Path path, long hash) { try { ExtendedAttribute.setExtendedAttribute(path, hashFQN, Long.toHexString(hash)); ExtendedAttribute.setExtendedAttribute(path, timestampFQN, Long.toString(Files.getLastModifiedTime(path).toMillis())); } catch (IOException e) { LOGGER.warn("Failed to write hash to file {} ({})", path, e.toString()); } } HashAttribute(String hashName); boolean areAttributesValid(Path path); long readHash(Path path); void writeHash(Path path, long hash); void markCorrupted(Path path); boolean isCorrupted(Path path); String getHashFQN(); String getTimestampFQN(); String getCorruptNameFQN(); }
HashAttribute { public void writeHash(Path path, long hash) { try { ExtendedAttribute.setExtendedAttribute(path, hashFQN, Long.toHexString(hash)); ExtendedAttribute.setExtendedAttribute(path, timestampFQN, Long.toString(Files.getLastModifiedTime(path).toMillis())); } catch (IOException e) { LOGGER.warn("Failed to write hash to file {} ({})", path, e.toString()); } } HashAttribute(String hashName); boolean areAttributesValid(Path path); long readHash(Path path); void writeHash(Path path, long hash); void markCorrupted(Path path); boolean isCorrupted(Path path); String getHashFQN(); String getTimestampFQN(); String getCorruptNameFQN(); }
@Test public void testTransformDCT() throws Exception { double[] result = cut.transformDCT(Doubles.concat(testMatrix)); assertArrayEquals(EXPECTED, Doubles.concat(result), 0.1); }
public synchronized double[] transformDCT(double[] matrix) { for (int i = 0; i < matrixArea; i++) { this.matrix[i] = matrix[i]; } execute(range); return Doubles.concat(result); }
DCTKernel extends Kernel { public synchronized double[] transformDCT(double[] matrix) { for (int i = 0; i < matrixArea; i++) { this.matrix[i] = matrix[i]; } execute(range); return Doubles.concat(result); } }
DCTKernel extends Kernel { public synchronized double[] transformDCT(double[] matrix) { for (int i = 0; i < matrixArea; i++) { this.matrix[i] = matrix[i]; } execute(range); return Doubles.concat(result); } DCTKernel(); DCTKernel(int matrixSize); }
DCTKernel extends Kernel { public synchronized double[] transformDCT(double[] matrix) { for (int i = 0; i < matrixArea; i++) { this.matrix[i] = matrix[i]; } execute(range); return Doubles.concat(result); } DCTKernel(); DCTKernel(int matrixSize); void setDevice(Device device); @Override void run(); synchronized double[] transformDCT(double[] matrix); }
DCTKernel extends Kernel { public synchronized double[] transformDCT(double[] matrix) { for (int i = 0; i < matrixArea; i++) { this.matrix[i] = matrix[i]; } execute(range); return Doubles.concat(result); } DCTKernel(); DCTKernel(int matrixSize); void setDevice(Device device); @Override void run(); synchronized double[] transformDCT(double[] matrix); static final int DEFAULT_MATRIX_SIZE; }
@Test public void testHammingDistance() throws Exception { cut = new GroupImagesStage(1); assertThat(cut.apply(images).get(HASH_B), hasItems(imageA, imageB)); }
@Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> toGroup) { Multimap<Long, ImageRecord> resultMap = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(toGroup); Stopwatch sw = Stopwatch.createStarted(); toGroup.forEach(new Consumer<ImageRecord>() { @Override public void accept(ImageRecord t) { resultMap.putAll(t.getpHash(), rs.distanceMatch(t.getpHash(), hammingDistance).values()); } }); LOGGER.info("Built result map with {} pairs in {}, using hamming distance {}", resultMap.size(), sw, hammingDistance); return resultMap; }
GroupImagesStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> toGroup) { Multimap<Long, ImageRecord> resultMap = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(toGroup); Stopwatch sw = Stopwatch.createStarted(); toGroup.forEach(new Consumer<ImageRecord>() { @Override public void accept(ImageRecord t) { resultMap.putAll(t.getpHash(), rs.distanceMatch(t.getpHash(), hammingDistance).values()); } }); LOGGER.info("Built result map with {} pairs in {}, using hamming distance {}", resultMap.size(), sw, hammingDistance); return resultMap; } }
GroupImagesStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> toGroup) { Multimap<Long, ImageRecord> resultMap = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(toGroup); Stopwatch sw = Stopwatch.createStarted(); toGroup.forEach(new Consumer<ImageRecord>() { @Override public void accept(ImageRecord t) { resultMap.putAll(t.getpHash(), rs.distanceMatch(t.getpHash(), hammingDistance).values()); } }); LOGGER.info("Built result map with {} pairs in {}, using hamming distance {}", resultMap.size(), sw, hammingDistance); return resultMap; } GroupImagesStage(); GroupImagesStage(int hammingDistance); }
GroupImagesStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> toGroup) { Multimap<Long, ImageRecord> resultMap = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(toGroup); Stopwatch sw = Stopwatch.createStarted(); toGroup.forEach(new Consumer<ImageRecord>() { @Override public void accept(ImageRecord t) { resultMap.putAll(t.getpHash(), rs.distanceMatch(t.getpHash(), hammingDistance).values()); } }); LOGGER.info("Built result map with {} pairs in {}, using hamming distance {}", resultMap.size(), sw, hammingDistance); return resultMap; } GroupImagesStage(); GroupImagesStage(int hammingDistance); @Override Multimap<Long, ImageRecord> apply(Collection<ImageRecord> toGroup); int getHammingDistance(); }
GroupImagesStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { @Override public Multimap<Long, ImageRecord> apply(Collection<ImageRecord> toGroup) { Multimap<Long, ImageRecord> resultMap = MultimapBuilder.hashKeys().hashSetValues().build(); rs.build(toGroup); Stopwatch sw = Stopwatch.createStarted(); toGroup.forEach(new Consumer<ImageRecord>() { @Override public void accept(ImageRecord t) { resultMap.putAll(t.getpHash(), rs.distanceMatch(t.getpHash(), hammingDistance).values()); } }); LOGGER.info("Built result map with {} pairs in {}, using hamming distance {}", resultMap.size(), sw, hammingDistance); return resultMap; } GroupImagesStage(); GroupImagesStage(int hammingDistance); @Override Multimap<Long, ImageRecord> apply(Collection<ImageRecord> toGroup); int getHammingDistance(); }
@Test public void testGetHammingDistance() throws Exception { cut = new GroupImagesStage(DISTANCE); assertThat(cut.getHammingDistance(), is(DISTANCE)); }
public int getHammingDistance() { return hammingDistance; }
GroupImagesStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { public int getHammingDistance() { return hammingDistance; } }
GroupImagesStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { public int getHammingDistance() { return hammingDistance; } GroupImagesStage(); GroupImagesStage(int hammingDistance); }
GroupImagesStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { public int getHammingDistance() { return hammingDistance; } GroupImagesStage(); GroupImagesStage(int hammingDistance); @Override Multimap<Long, ImageRecord> apply(Collection<ImageRecord> toGroup); int getHammingDistance(); }
GroupImagesStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { public int getHammingDistance() { return hammingDistance; } GroupImagesStage(); GroupImagesStage(int hammingDistance); @Override Multimap<Long, ImageRecord> apply(Collection<ImageRecord> toGroup); int getHammingDistance(); }
@Test public void testDefaultDistance() throws Exception { assertThat(cut.getHammingDistance(), is(0)); }
public int getHammingDistance() { return hammingDistance; }
GroupImagesStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { public int getHammingDistance() { return hammingDistance; } }
GroupImagesStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { public int getHammingDistance() { return hammingDistance; } GroupImagesStage(); GroupImagesStage(int hammingDistance); }
GroupImagesStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { public int getHammingDistance() { return hammingDistance; } GroupImagesStage(); GroupImagesStage(int hammingDistance); @Override Multimap<Long, ImageRecord> apply(Collection<ImageRecord> toGroup); int getHammingDistance(); }
GroupImagesStage implements Function<Collection<ImageRecord>, Multimap<Long, ImageRecord>> { public int getHammingDistance() { return hammingDistance; } GroupImagesStage(); GroupImagesStage(int hammingDistance); @Override Multimap<Long, ImageRecord> apply(Collection<ImageRecord> toGroup); int getHammingDistance(); }
@Test public void testElementsAddedInOrder() { glp.run(); List<ResultGroup> testList = Lists.reverse(results); InOrder inOrder = inOrder(dlm); for (ResultGroup rg : testList) { inOrder.verify(dlm).addElement(rg); } }
@Override public void run() { this.logger.info("Populating group list with {} groups", groups.groupCount()); groupListModel.clear(); List<ResultGroup> resultGroups = groups.getAllGroups(); Collections.sort(resultGroups, new Comparator<ResultGroup>() { @Override public int compare(ResultGroup o1, ResultGroup o2) { return Long.compare(o1.getHash(), o2.getHash()); } }); for (ResultGroup g : resultGroups) { groupListModel.addElement(g); } this.logger.info("Finished populating group list"); }
GroupListPopulator implements Runnable { @Override public void run() { this.logger.info("Populating group list with {} groups", groups.groupCount()); groupListModel.clear(); List<ResultGroup> resultGroups = groups.getAllGroups(); Collections.sort(resultGroups, new Comparator<ResultGroup>() { @Override public int compare(ResultGroup o1, ResultGroup o2) { return Long.compare(o1.getHash(), o2.getHash()); } }); for (ResultGroup g : resultGroups) { groupListModel.addElement(g); } this.logger.info("Finished populating group list"); } }
GroupListPopulator implements Runnable { @Override public void run() { this.logger.info("Populating group list with {} groups", groups.groupCount()); groupListModel.clear(); List<ResultGroup> resultGroups = groups.getAllGroups(); Collections.sort(resultGroups, new Comparator<ResultGroup>() { @Override public int compare(ResultGroup o1, ResultGroup o2) { return Long.compare(o1.getHash(), o2.getHash()); } }); for (ResultGroup g : resultGroups) { groupListModel.addElement(g); } this.logger.info("Finished populating group list"); } GroupListPopulator(GroupList groups, DefaultListModel<ResultGroup> groupListModel); }
GroupListPopulator implements Runnable { @Override public void run() { this.logger.info("Populating group list with {} groups", groups.groupCount()); groupListModel.clear(); List<ResultGroup> resultGroups = groups.getAllGroups(); Collections.sort(resultGroups, new Comparator<ResultGroup>() { @Override public int compare(ResultGroup o1, ResultGroup o2) { return Long.compare(o1.getHash(), o2.getHash()); } }); for (ResultGroup g : resultGroups) { groupListModel.addElement(g); } this.logger.info("Finished populating group list"); } GroupListPopulator(GroupList groups, DefaultListModel<ResultGroup> groupListModel); @Override void run(); }
GroupListPopulator implements Runnable { @Override public void run() { this.logger.info("Populating group list with {} groups", groups.groupCount()); groupListModel.clear(); List<ResultGroup> resultGroups = groups.getAllGroups(); Collections.sort(resultGroups, new Comparator<ResultGroup>() { @Override public int compare(ResultGroup o1, ResultGroup o2) { return Long.compare(o1.getHash(), o2.getHash()); } }); for (ResultGroup g : resultGroups) { groupListModel.addElement(g); } this.logger.info("Finished populating group list"); } GroupListPopulator(GroupList groups, DefaultListModel<ResultGroup> groupListModel); @Override void run(); }
@Test public void testElementsAdded() { glp.run(); for (ResultGroup rg : results) { verify(dlm).addElement(rg); } }
@Override public void run() { this.logger.info("Populating group list with {} groups", groups.groupCount()); groupListModel.clear(); List<ResultGroup> resultGroups = groups.getAllGroups(); Collections.sort(resultGroups, new Comparator<ResultGroup>() { @Override public int compare(ResultGroup o1, ResultGroup o2) { return Long.compare(o1.getHash(), o2.getHash()); } }); for (ResultGroup g : resultGroups) { groupListModel.addElement(g); } this.logger.info("Finished populating group list"); }
GroupListPopulator implements Runnable { @Override public void run() { this.logger.info("Populating group list with {} groups", groups.groupCount()); groupListModel.clear(); List<ResultGroup> resultGroups = groups.getAllGroups(); Collections.sort(resultGroups, new Comparator<ResultGroup>() { @Override public int compare(ResultGroup o1, ResultGroup o2) { return Long.compare(o1.getHash(), o2.getHash()); } }); for (ResultGroup g : resultGroups) { groupListModel.addElement(g); } this.logger.info("Finished populating group list"); } }
GroupListPopulator implements Runnable { @Override public void run() { this.logger.info("Populating group list with {} groups", groups.groupCount()); groupListModel.clear(); List<ResultGroup> resultGroups = groups.getAllGroups(); Collections.sort(resultGroups, new Comparator<ResultGroup>() { @Override public int compare(ResultGroup o1, ResultGroup o2) { return Long.compare(o1.getHash(), o2.getHash()); } }); for (ResultGroup g : resultGroups) { groupListModel.addElement(g); } this.logger.info("Finished populating group list"); } GroupListPopulator(GroupList groups, DefaultListModel<ResultGroup> groupListModel); }
GroupListPopulator implements Runnable { @Override public void run() { this.logger.info("Populating group list with {} groups", groups.groupCount()); groupListModel.clear(); List<ResultGroup> resultGroups = groups.getAllGroups(); Collections.sort(resultGroups, new Comparator<ResultGroup>() { @Override public int compare(ResultGroup o1, ResultGroup o2) { return Long.compare(o1.getHash(), o2.getHash()); } }); for (ResultGroup g : resultGroups) { groupListModel.addElement(g); } this.logger.info("Finished populating group list"); } GroupListPopulator(GroupList groups, DefaultListModel<ResultGroup> groupListModel); @Override void run(); }
GroupListPopulator implements Runnable { @Override public void run() { this.logger.info("Populating group list with {} groups", groups.groupCount()); groupListModel.clear(); List<ResultGroup> resultGroups = groups.getAllGroups(); Collections.sort(resultGroups, new Comparator<ResultGroup>() { @Override public int compare(ResultGroup o1, ResultGroup o2) { return Long.compare(o1.getHash(), o2.getHash()); } }); for (ResultGroup g : resultGroups) { groupListModel.addElement(g); } this.logger.info("Finished populating group list"); } GroupListPopulator(GroupList groups, DefaultListModel<ResultGroup> groupListModel); @Override void run(); }
@Test public void testRepositoryException() throws Exception { when(filterRepository.getAll()).thenThrow(new RepositoryException("just testing!")); assertThat(cut.getFilterMatches(recordSearch, TAG_ALL, DISTANCE), is(emptyMultimap)); }
public Multimap<Long, ImageRecord> getFilterMatches(RecordSearch recordSearch, Tag tagToMatch, int hammingDistance) { Multimap<Long, ImageRecord> uniqueGroups = MultimapBuilder.hashKeys().hashSetValues().build(); List<FilterRecord> matchingFilters = Collections.emptyList(); try { matchingFilters = FilterRecord.getTags(filterRepository, tagToMatch); LOGGER.info("Found {} filters for tag {}", matchingFilters.size(), tagToMatch.getTag()); } catch (RepositoryException e) { LOGGER.error("Failed to query hashes for tag {}, reason: {}, cause: {}", tagToMatch.getTag(), e.toString(), e.getCause()); } Multimap<Long, ImageRecord> parallelGroups = Multimaps.synchronizedMultimap(uniqueGroups); matchingFilters.parallelStream().forEach(filter -> { Multimap<Long, ImageRecord> match = recordSearch.distanceMatch(filter.getpHash(), hammingDistance); parallelGroups.putAll(filter.getpHash(), match.values()); }); return uniqueGroups; }
TagFilter { public Multimap<Long, ImageRecord> getFilterMatches(RecordSearch recordSearch, Tag tagToMatch, int hammingDistance) { Multimap<Long, ImageRecord> uniqueGroups = MultimapBuilder.hashKeys().hashSetValues().build(); List<FilterRecord> matchingFilters = Collections.emptyList(); try { matchingFilters = FilterRecord.getTags(filterRepository, tagToMatch); LOGGER.info("Found {} filters for tag {}", matchingFilters.size(), tagToMatch.getTag()); } catch (RepositoryException e) { LOGGER.error("Failed to query hashes for tag {}, reason: {}, cause: {}", tagToMatch.getTag(), e.toString(), e.getCause()); } Multimap<Long, ImageRecord> parallelGroups = Multimaps.synchronizedMultimap(uniqueGroups); matchingFilters.parallelStream().forEach(filter -> { Multimap<Long, ImageRecord> match = recordSearch.distanceMatch(filter.getpHash(), hammingDistance); parallelGroups.putAll(filter.getpHash(), match.values()); }); return uniqueGroups; } }
TagFilter { public Multimap<Long, ImageRecord> getFilterMatches(RecordSearch recordSearch, Tag tagToMatch, int hammingDistance) { Multimap<Long, ImageRecord> uniqueGroups = MultimapBuilder.hashKeys().hashSetValues().build(); List<FilterRecord> matchingFilters = Collections.emptyList(); try { matchingFilters = FilterRecord.getTags(filterRepository, tagToMatch); LOGGER.info("Found {} filters for tag {}", matchingFilters.size(), tagToMatch.getTag()); } catch (RepositoryException e) { LOGGER.error("Failed to query hashes for tag {}, reason: {}, cause: {}", tagToMatch.getTag(), e.toString(), e.getCause()); } Multimap<Long, ImageRecord> parallelGroups = Multimaps.synchronizedMultimap(uniqueGroups); matchingFilters.parallelStream().forEach(filter -> { Multimap<Long, ImageRecord> match = recordSearch.distanceMatch(filter.getpHash(), hammingDistance); parallelGroups.putAll(filter.getpHash(), match.values()); }); return uniqueGroups; } TagFilter(FilterRepository filterRepository); }
TagFilter { public Multimap<Long, ImageRecord> getFilterMatches(RecordSearch recordSearch, Tag tagToMatch, int hammingDistance) { Multimap<Long, ImageRecord> uniqueGroups = MultimapBuilder.hashKeys().hashSetValues().build(); List<FilterRecord> matchingFilters = Collections.emptyList(); try { matchingFilters = FilterRecord.getTags(filterRepository, tagToMatch); LOGGER.info("Found {} filters for tag {}", matchingFilters.size(), tagToMatch.getTag()); } catch (RepositoryException e) { LOGGER.error("Failed to query hashes for tag {}, reason: {}, cause: {}", tagToMatch.getTag(), e.toString(), e.getCause()); } Multimap<Long, ImageRecord> parallelGroups = Multimaps.synchronizedMultimap(uniqueGroups); matchingFilters.parallelStream().forEach(filter -> { Multimap<Long, ImageRecord> match = recordSearch.distanceMatch(filter.getpHash(), hammingDistance); parallelGroups.putAll(filter.getpHash(), match.values()); }); return uniqueGroups; } TagFilter(FilterRepository filterRepository); Multimap<Long, ImageRecord> getFilterMatches(RecordSearch recordSearch, Tag tagToMatch, int hammingDistance); }
TagFilter { public Multimap<Long, ImageRecord> getFilterMatches(RecordSearch recordSearch, Tag tagToMatch, int hammingDistance) { Multimap<Long, ImageRecord> uniqueGroups = MultimapBuilder.hashKeys().hashSetValues().build(); List<FilterRecord> matchingFilters = Collections.emptyList(); try { matchingFilters = FilterRecord.getTags(filterRepository, tagToMatch); LOGGER.info("Found {} filters for tag {}", matchingFilters.size(), tagToMatch.getTag()); } catch (RepositoryException e) { LOGGER.error("Failed to query hashes for tag {}, reason: {}, cause: {}", tagToMatch.getTag(), e.toString(), e.getCause()); } Multimap<Long, ImageRecord> parallelGroups = Multimaps.synchronizedMultimap(uniqueGroups); matchingFilters.parallelStream().forEach(filter -> { Multimap<Long, ImageRecord> match = recordSearch.distanceMatch(filter.getpHash(), hammingDistance); parallelGroups.putAll(filter.getpHash(), match.values()); }); return uniqueGroups; } TagFilter(FilterRepository filterRepository); Multimap<Long, ImageRecord> getFilterMatches(RecordSearch recordSearch, Tag tagToMatch, int hammingDistance); }
@Test public void testMatchingTag() throws Exception { assertThat(cut.getFilterMatches(recordSearch, TAG, DISTANCE).get(1L), hasItem(image1)); }
public Multimap<Long, ImageRecord> getFilterMatches(RecordSearch recordSearch, Tag tagToMatch, int hammingDistance) { Multimap<Long, ImageRecord> uniqueGroups = MultimapBuilder.hashKeys().hashSetValues().build(); List<FilterRecord> matchingFilters = Collections.emptyList(); try { matchingFilters = FilterRecord.getTags(filterRepository, tagToMatch); LOGGER.info("Found {} filters for tag {}", matchingFilters.size(), tagToMatch.getTag()); } catch (RepositoryException e) { LOGGER.error("Failed to query hashes for tag {}, reason: {}, cause: {}", tagToMatch.getTag(), e.toString(), e.getCause()); } Multimap<Long, ImageRecord> parallelGroups = Multimaps.synchronizedMultimap(uniqueGroups); matchingFilters.parallelStream().forEach(filter -> { Multimap<Long, ImageRecord> match = recordSearch.distanceMatch(filter.getpHash(), hammingDistance); parallelGroups.putAll(filter.getpHash(), match.values()); }); return uniqueGroups; }
TagFilter { public Multimap<Long, ImageRecord> getFilterMatches(RecordSearch recordSearch, Tag tagToMatch, int hammingDistance) { Multimap<Long, ImageRecord> uniqueGroups = MultimapBuilder.hashKeys().hashSetValues().build(); List<FilterRecord> matchingFilters = Collections.emptyList(); try { matchingFilters = FilterRecord.getTags(filterRepository, tagToMatch); LOGGER.info("Found {} filters for tag {}", matchingFilters.size(), tagToMatch.getTag()); } catch (RepositoryException e) { LOGGER.error("Failed to query hashes for tag {}, reason: {}, cause: {}", tagToMatch.getTag(), e.toString(), e.getCause()); } Multimap<Long, ImageRecord> parallelGroups = Multimaps.synchronizedMultimap(uniqueGroups); matchingFilters.parallelStream().forEach(filter -> { Multimap<Long, ImageRecord> match = recordSearch.distanceMatch(filter.getpHash(), hammingDistance); parallelGroups.putAll(filter.getpHash(), match.values()); }); return uniqueGroups; } }
TagFilter { public Multimap<Long, ImageRecord> getFilterMatches(RecordSearch recordSearch, Tag tagToMatch, int hammingDistance) { Multimap<Long, ImageRecord> uniqueGroups = MultimapBuilder.hashKeys().hashSetValues().build(); List<FilterRecord> matchingFilters = Collections.emptyList(); try { matchingFilters = FilterRecord.getTags(filterRepository, tagToMatch); LOGGER.info("Found {} filters for tag {}", matchingFilters.size(), tagToMatch.getTag()); } catch (RepositoryException e) { LOGGER.error("Failed to query hashes for tag {}, reason: {}, cause: {}", tagToMatch.getTag(), e.toString(), e.getCause()); } Multimap<Long, ImageRecord> parallelGroups = Multimaps.synchronizedMultimap(uniqueGroups); matchingFilters.parallelStream().forEach(filter -> { Multimap<Long, ImageRecord> match = recordSearch.distanceMatch(filter.getpHash(), hammingDistance); parallelGroups.putAll(filter.getpHash(), match.values()); }); return uniqueGroups; } TagFilter(FilterRepository filterRepository); }
TagFilter { public Multimap<Long, ImageRecord> getFilterMatches(RecordSearch recordSearch, Tag tagToMatch, int hammingDistance) { Multimap<Long, ImageRecord> uniqueGroups = MultimapBuilder.hashKeys().hashSetValues().build(); List<FilterRecord> matchingFilters = Collections.emptyList(); try { matchingFilters = FilterRecord.getTags(filterRepository, tagToMatch); LOGGER.info("Found {} filters for tag {}", matchingFilters.size(), tagToMatch.getTag()); } catch (RepositoryException e) { LOGGER.error("Failed to query hashes for tag {}, reason: {}, cause: {}", tagToMatch.getTag(), e.toString(), e.getCause()); } Multimap<Long, ImageRecord> parallelGroups = Multimaps.synchronizedMultimap(uniqueGroups); matchingFilters.parallelStream().forEach(filter -> { Multimap<Long, ImageRecord> match = recordSearch.distanceMatch(filter.getpHash(), hammingDistance); parallelGroups.putAll(filter.getpHash(), match.values()); }); return uniqueGroups; } TagFilter(FilterRepository filterRepository); Multimap<Long, ImageRecord> getFilterMatches(RecordSearch recordSearch, Tag tagToMatch, int hammingDistance); }
TagFilter { public Multimap<Long, ImageRecord> getFilterMatches(RecordSearch recordSearch, Tag tagToMatch, int hammingDistance) { Multimap<Long, ImageRecord> uniqueGroups = MultimapBuilder.hashKeys().hashSetValues().build(); List<FilterRecord> matchingFilters = Collections.emptyList(); try { matchingFilters = FilterRecord.getTags(filterRepository, tagToMatch); LOGGER.info("Found {} filters for tag {}", matchingFilters.size(), tagToMatch.getTag()); } catch (RepositoryException e) { LOGGER.error("Failed to query hashes for tag {}, reason: {}, cause: {}", tagToMatch.getTag(), e.toString(), e.getCause()); } Multimap<Long, ImageRecord> parallelGroups = Multimaps.synchronizedMultimap(uniqueGroups); matchingFilters.parallelStream().forEach(filter -> { Multimap<Long, ImageRecord> match = recordSearch.distanceMatch(filter.getpHash(), hammingDistance); parallelGroups.putAll(filter.getpHash(), match.values()); }); return uniqueGroups; } TagFilter(FilterRepository filterRepository); Multimap<Long, ImageRecord> getFilterMatches(RecordSearch recordSearch, Tag tagToMatch, int hammingDistance); }
@Test public void testMatchingTagSecondImageNotIncluded() throws Exception { assertThat(cut.getFilterMatches(recordSearch, TAG, DISTANCE).get(1L), not(hasItem(image2))); }
public Multimap<Long, ImageRecord> getFilterMatches(RecordSearch recordSearch, Tag tagToMatch, int hammingDistance) { Multimap<Long, ImageRecord> uniqueGroups = MultimapBuilder.hashKeys().hashSetValues().build(); List<FilterRecord> matchingFilters = Collections.emptyList(); try { matchingFilters = FilterRecord.getTags(filterRepository, tagToMatch); LOGGER.info("Found {} filters for tag {}", matchingFilters.size(), tagToMatch.getTag()); } catch (RepositoryException e) { LOGGER.error("Failed to query hashes for tag {}, reason: {}, cause: {}", tagToMatch.getTag(), e.toString(), e.getCause()); } Multimap<Long, ImageRecord> parallelGroups = Multimaps.synchronizedMultimap(uniqueGroups); matchingFilters.parallelStream().forEach(filter -> { Multimap<Long, ImageRecord> match = recordSearch.distanceMatch(filter.getpHash(), hammingDistance); parallelGroups.putAll(filter.getpHash(), match.values()); }); return uniqueGroups; }
TagFilter { public Multimap<Long, ImageRecord> getFilterMatches(RecordSearch recordSearch, Tag tagToMatch, int hammingDistance) { Multimap<Long, ImageRecord> uniqueGroups = MultimapBuilder.hashKeys().hashSetValues().build(); List<FilterRecord> matchingFilters = Collections.emptyList(); try { matchingFilters = FilterRecord.getTags(filterRepository, tagToMatch); LOGGER.info("Found {} filters for tag {}", matchingFilters.size(), tagToMatch.getTag()); } catch (RepositoryException e) { LOGGER.error("Failed to query hashes for tag {}, reason: {}, cause: {}", tagToMatch.getTag(), e.toString(), e.getCause()); } Multimap<Long, ImageRecord> parallelGroups = Multimaps.synchronizedMultimap(uniqueGroups); matchingFilters.parallelStream().forEach(filter -> { Multimap<Long, ImageRecord> match = recordSearch.distanceMatch(filter.getpHash(), hammingDistance); parallelGroups.putAll(filter.getpHash(), match.values()); }); return uniqueGroups; } }
TagFilter { public Multimap<Long, ImageRecord> getFilterMatches(RecordSearch recordSearch, Tag tagToMatch, int hammingDistance) { Multimap<Long, ImageRecord> uniqueGroups = MultimapBuilder.hashKeys().hashSetValues().build(); List<FilterRecord> matchingFilters = Collections.emptyList(); try { matchingFilters = FilterRecord.getTags(filterRepository, tagToMatch); LOGGER.info("Found {} filters for tag {}", matchingFilters.size(), tagToMatch.getTag()); } catch (RepositoryException e) { LOGGER.error("Failed to query hashes for tag {}, reason: {}, cause: {}", tagToMatch.getTag(), e.toString(), e.getCause()); } Multimap<Long, ImageRecord> parallelGroups = Multimaps.synchronizedMultimap(uniqueGroups); matchingFilters.parallelStream().forEach(filter -> { Multimap<Long, ImageRecord> match = recordSearch.distanceMatch(filter.getpHash(), hammingDistance); parallelGroups.putAll(filter.getpHash(), match.values()); }); return uniqueGroups; } TagFilter(FilterRepository filterRepository); }
TagFilter { public Multimap<Long, ImageRecord> getFilterMatches(RecordSearch recordSearch, Tag tagToMatch, int hammingDistance) { Multimap<Long, ImageRecord> uniqueGroups = MultimapBuilder.hashKeys().hashSetValues().build(); List<FilterRecord> matchingFilters = Collections.emptyList(); try { matchingFilters = FilterRecord.getTags(filterRepository, tagToMatch); LOGGER.info("Found {} filters for tag {}", matchingFilters.size(), tagToMatch.getTag()); } catch (RepositoryException e) { LOGGER.error("Failed to query hashes for tag {}, reason: {}, cause: {}", tagToMatch.getTag(), e.toString(), e.getCause()); } Multimap<Long, ImageRecord> parallelGroups = Multimaps.synchronizedMultimap(uniqueGroups); matchingFilters.parallelStream().forEach(filter -> { Multimap<Long, ImageRecord> match = recordSearch.distanceMatch(filter.getpHash(), hammingDistance); parallelGroups.putAll(filter.getpHash(), match.values()); }); return uniqueGroups; } TagFilter(FilterRepository filterRepository); Multimap<Long, ImageRecord> getFilterMatches(RecordSearch recordSearch, Tag tagToMatch, int hammingDistance); }
TagFilter { public Multimap<Long, ImageRecord> getFilterMatches(RecordSearch recordSearch, Tag tagToMatch, int hammingDistance) { Multimap<Long, ImageRecord> uniqueGroups = MultimapBuilder.hashKeys().hashSetValues().build(); List<FilterRecord> matchingFilters = Collections.emptyList(); try { matchingFilters = FilterRecord.getTags(filterRepository, tagToMatch); LOGGER.info("Found {} filters for tag {}", matchingFilters.size(), tagToMatch.getTag()); } catch (RepositoryException e) { LOGGER.error("Failed to query hashes for tag {}, reason: {}, cause: {}", tagToMatch.getTag(), e.toString(), e.getCause()); } Multimap<Long, ImageRecord> parallelGroups = Multimaps.synchronizedMultimap(uniqueGroups); matchingFilters.parallelStream().forEach(filter -> { Multimap<Long, ImageRecord> match = recordSearch.distanceMatch(filter.getpHash(), hammingDistance); parallelGroups.putAll(filter.getpHash(), match.values()); }); return uniqueGroups; } TagFilter(FilterRepository filterRepository); Multimap<Long, ImageRecord> getFilterMatches(RecordSearch recordSearch, Tag tagToMatch, int hammingDistance); }
@Test public void testNewThread() throws Exception { Thread t = ntf.newThread(runnableMock); assertThat(t.getName(), is("test thread 0")); }
@Override public Thread newThread(Runnable r) { Thread thread = defaultThreadFactory.newThread(r); thread.setName(threadPrefix + " thread " + threadNumber); threadNumber++; return thread; }
NamedThreadFactory implements ThreadFactory { @Override public Thread newThread(Runnable r) { Thread thread = defaultThreadFactory.newThread(r); thread.setName(threadPrefix + " thread " + threadNumber); threadNumber++; return thread; } }
NamedThreadFactory implements ThreadFactory { @Override public Thread newThread(Runnable r) { Thread thread = defaultThreadFactory.newThread(r); thread.setName(threadPrefix + " thread " + threadNumber); threadNumber++; return thread; } NamedThreadFactory(String threadPrefix); }
NamedThreadFactory implements ThreadFactory { @Override public Thread newThread(Runnable r) { Thread thread = defaultThreadFactory.newThread(r); thread.setName(threadPrefix + " thread " + threadNumber); threadNumber++; return thread; } NamedThreadFactory(String threadPrefix); @Override Thread newThread(Runnable r); }
NamedThreadFactory implements ThreadFactory { @Override public Thread newThread(Runnable r) { Thread thread = defaultThreadFactory.newThread(r); thread.setName(threadPrefix + " thread " + threadNumber); threadNumber++; return thread; } NamedThreadFactory(String threadPrefix); @Override Thread newThread(Runnable r); }
@Test public void testNewThreadTwo() throws Exception { ntf.newThread(runnableMock); Thread t2 = ntf.newThread(runnableMock); assertThat(t2.getName(), is("test thread 1")); }
@Override public Thread newThread(Runnable r) { Thread thread = defaultThreadFactory.newThread(r); thread.setName(threadPrefix + " thread " + threadNumber); threadNumber++; return thread; }
NamedThreadFactory implements ThreadFactory { @Override public Thread newThread(Runnable r) { Thread thread = defaultThreadFactory.newThread(r); thread.setName(threadPrefix + " thread " + threadNumber); threadNumber++; return thread; } }
NamedThreadFactory implements ThreadFactory { @Override public Thread newThread(Runnable r) { Thread thread = defaultThreadFactory.newThread(r); thread.setName(threadPrefix + " thread " + threadNumber); threadNumber++; return thread; } NamedThreadFactory(String threadPrefix); }
NamedThreadFactory implements ThreadFactory { @Override public Thread newThread(Runnable r) { Thread thread = defaultThreadFactory.newThread(r); thread.setName(threadPrefix + " thread " + threadNumber); threadNumber++; return thread; } NamedThreadFactory(String threadPrefix); @Override Thread newThread(Runnable r); }
NamedThreadFactory implements ThreadFactory { @Override public Thread newThread(Runnable r) { Thread thread = defaultThreadFactory.newThread(r); thread.setName(threadPrefix + " thread " + threadNumber); threadNumber++; return thread; } NamedThreadFactory(String threadPrefix); @Override Thread newThread(Runnable r); }
@Test public void testWrittenTimeStamp() throws Exception { cut.writeHash(tempFile, TEST_VALUE); long timestamp = Files.getLastModifiedTime(tempFile).toMillis(); assertThat(Long.parseUnsignedLong(ExtendedAttribute.readExtendedAttributeAsString(tempFile, timestampFullName)), is(allOf(greaterThan(timestamp - TIMESTAMP_TOLERANCE), lessThan(timestamp + TIMESTAMP_TOLERANCE)))); }
public void writeHash(Path path, long hash) { try { ExtendedAttribute.setExtendedAttribute(path, hashFQN, Long.toHexString(hash)); ExtendedAttribute.setExtendedAttribute(path, timestampFQN, Long.toString(Files.getLastModifiedTime(path).toMillis())); } catch (IOException e) { LOGGER.warn("Failed to write hash to file {} ({})", path, e.toString()); } }
HashAttribute { public void writeHash(Path path, long hash) { try { ExtendedAttribute.setExtendedAttribute(path, hashFQN, Long.toHexString(hash)); ExtendedAttribute.setExtendedAttribute(path, timestampFQN, Long.toString(Files.getLastModifiedTime(path).toMillis())); } catch (IOException e) { LOGGER.warn("Failed to write hash to file {} ({})", path, e.toString()); } } }
HashAttribute { public void writeHash(Path path, long hash) { try { ExtendedAttribute.setExtendedAttribute(path, hashFQN, Long.toHexString(hash)); ExtendedAttribute.setExtendedAttribute(path, timestampFQN, Long.toString(Files.getLastModifiedTime(path).toMillis())); } catch (IOException e) { LOGGER.warn("Failed to write hash to file {} ({})", path, e.toString()); } } HashAttribute(String hashName); }
HashAttribute { public void writeHash(Path path, long hash) { try { ExtendedAttribute.setExtendedAttribute(path, hashFQN, Long.toHexString(hash)); ExtendedAttribute.setExtendedAttribute(path, timestampFQN, Long.toString(Files.getLastModifiedTime(path).toMillis())); } catch (IOException e) { LOGGER.warn("Failed to write hash to file {} ({})", path, e.toString()); } } HashAttribute(String hashName); boolean areAttributesValid(Path path); long readHash(Path path); void writeHash(Path path, long hash); void markCorrupted(Path path); boolean isCorrupted(Path path); String getHashFQN(); String getTimestampFQN(); String getCorruptNameFQN(); }
HashAttribute { public void writeHash(Path path, long hash) { try { ExtendedAttribute.setExtendedAttribute(path, hashFQN, Long.toHexString(hash)); ExtendedAttribute.setExtendedAttribute(path, timestampFQN, Long.toString(Files.getLastModifiedTime(path).toMillis())); } catch (IOException e) { LOGGER.warn("Failed to write hash to file {} ({})", path, e.toString()); } } HashAttribute(String hashName); boolean areAttributesValid(Path path); long readHash(Path path); void writeHash(Path path, long hash); void markCorrupted(Path path); boolean isCorrupted(Path path); String getHashFQN(); String getTimestampFQN(); String getCorruptNameFQN(); }
@Test public void testVisitFileOkProcessedFiles() throws Exception { cut.visitFile(path, attrs); assertThat(statistics.getProcessedFiles(), is(1)); }
@Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (isAcceptedFile(file)) { statistics.incrementFoundFiles(); fileCount++; boolean isHandled = false; for (HashHandler handler : handlers) { if (handler.handle(file)) { isHandled = true; break; } } statistics.incrementProcessedFiles(); if (!isHandled) { statistics.incrementFailedFiles(); LOGGER.error("No handler was able to process {}", file); } } return FileVisitResult.CONTINUE; }
ImageFindJobVisitor extends SimpleFileVisitor<Path> { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (isAcceptedFile(file)) { statistics.incrementFoundFiles(); fileCount++; boolean isHandled = false; for (HashHandler handler : handlers) { if (handler.handle(file)) { isHandled = true; break; } } statistics.incrementProcessedFiles(); if (!isHandled) { statistics.incrementFailedFiles(); LOGGER.error("No handler was able to process {}", file); } } return FileVisitResult.CONTINUE; } }
ImageFindJobVisitor extends SimpleFileVisitor<Path> { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (isAcceptedFile(file)) { statistics.incrementFoundFiles(); fileCount++; boolean isHandled = false; for (HashHandler handler : handlers) { if (handler.handle(file)) { isHandled = true; break; } } statistics.incrementProcessedFiles(); if (!isHandled) { statistics.incrementFailedFiles(); LOGGER.error("No handler was able to process {}", file); } } return FileVisitResult.CONTINUE; } ImageFindJobVisitor(Filter<Path> fileFilter, Collection<HashHandler> handlers, Statistics statistics); }
ImageFindJobVisitor extends SimpleFileVisitor<Path> { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (isAcceptedFile(file)) { statistics.incrementFoundFiles(); fileCount++; boolean isHandled = false; for (HashHandler handler : handlers) { if (handler.handle(file)) { isHandled = true; break; } } statistics.incrementProcessedFiles(); if (!isHandled) { statistics.incrementFailedFiles(); LOGGER.error("No handler was able to process {}", file); } } return FileVisitResult.CONTINUE; } ImageFindJobVisitor(Filter<Path> fileFilter, Collection<HashHandler> handlers, Statistics statistics); @Override FileVisitResult visitFile(Path file, BasicFileAttributes attrs); @Deprecated int getFileCount(); }
ImageFindJobVisitor extends SimpleFileVisitor<Path> { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (isAcceptedFile(file)) { statistics.incrementFoundFiles(); fileCount++; boolean isHandled = false; for (HashHandler handler : handlers) { if (handler.handle(file)) { isHandled = true; break; } } statistics.incrementProcessedFiles(); if (!isHandled) { statistics.incrementFailedFiles(); LOGGER.error("No handler was able to process {}", file); } } return FileVisitResult.CONTINUE; } ImageFindJobVisitor(Filter<Path> fileFilter, Collection<HashHandler> handlers, Statistics statistics); @Override FileVisitResult visitFile(Path file, BasicFileAttributes attrs); @Deprecated int getFileCount(); }
@Test public void testVisitFileOkFailedFiles() throws Exception { cut.visitFile(path, attrs); assertThat(statistics.getFailedFiles(), is(0)); }
@Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (isAcceptedFile(file)) { statistics.incrementFoundFiles(); fileCount++; boolean isHandled = false; for (HashHandler handler : handlers) { if (handler.handle(file)) { isHandled = true; break; } } statistics.incrementProcessedFiles(); if (!isHandled) { statistics.incrementFailedFiles(); LOGGER.error("No handler was able to process {}", file); } } return FileVisitResult.CONTINUE; }
ImageFindJobVisitor extends SimpleFileVisitor<Path> { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (isAcceptedFile(file)) { statistics.incrementFoundFiles(); fileCount++; boolean isHandled = false; for (HashHandler handler : handlers) { if (handler.handle(file)) { isHandled = true; break; } } statistics.incrementProcessedFiles(); if (!isHandled) { statistics.incrementFailedFiles(); LOGGER.error("No handler was able to process {}", file); } } return FileVisitResult.CONTINUE; } }
ImageFindJobVisitor extends SimpleFileVisitor<Path> { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (isAcceptedFile(file)) { statistics.incrementFoundFiles(); fileCount++; boolean isHandled = false; for (HashHandler handler : handlers) { if (handler.handle(file)) { isHandled = true; break; } } statistics.incrementProcessedFiles(); if (!isHandled) { statistics.incrementFailedFiles(); LOGGER.error("No handler was able to process {}", file); } } return FileVisitResult.CONTINUE; } ImageFindJobVisitor(Filter<Path> fileFilter, Collection<HashHandler> handlers, Statistics statistics); }
ImageFindJobVisitor extends SimpleFileVisitor<Path> { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (isAcceptedFile(file)) { statistics.incrementFoundFiles(); fileCount++; boolean isHandled = false; for (HashHandler handler : handlers) { if (handler.handle(file)) { isHandled = true; break; } } statistics.incrementProcessedFiles(); if (!isHandled) { statistics.incrementFailedFiles(); LOGGER.error("No handler was able to process {}", file); } } return FileVisitResult.CONTINUE; } ImageFindJobVisitor(Filter<Path> fileFilter, Collection<HashHandler> handlers, Statistics statistics); @Override FileVisitResult visitFile(Path file, BasicFileAttributes attrs); @Deprecated int getFileCount(); }
ImageFindJobVisitor extends SimpleFileVisitor<Path> { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (isAcceptedFile(file)) { statistics.incrementFoundFiles(); fileCount++; boolean isHandled = false; for (HashHandler handler : handlers) { if (handler.handle(file)) { isHandled = true; break; } } statistics.incrementProcessedFiles(); if (!isHandled) { statistics.incrementFailedFiles(); LOGGER.error("No handler was able to process {}", file); } } return FileVisitResult.CONTINUE; } ImageFindJobVisitor(Filter<Path> fileFilter, Collection<HashHandler> handlers, Statistics statistics); @Override FileVisitResult visitFile(Path file, BasicFileAttributes attrs); @Deprecated int getFileCount(); }
@Test public void testVisitNotHandled() throws Exception { when(handler.handle(path)).thenReturn(false); cut.visitFile(path, attrs); assertThat(statistics.getFailedFiles(), is(1)); }
@Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (isAcceptedFile(file)) { statistics.incrementFoundFiles(); fileCount++; boolean isHandled = false; for (HashHandler handler : handlers) { if (handler.handle(file)) { isHandled = true; break; } } statistics.incrementProcessedFiles(); if (!isHandled) { statistics.incrementFailedFiles(); LOGGER.error("No handler was able to process {}", file); } } return FileVisitResult.CONTINUE; }
ImageFindJobVisitor extends SimpleFileVisitor<Path> { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (isAcceptedFile(file)) { statistics.incrementFoundFiles(); fileCount++; boolean isHandled = false; for (HashHandler handler : handlers) { if (handler.handle(file)) { isHandled = true; break; } } statistics.incrementProcessedFiles(); if (!isHandled) { statistics.incrementFailedFiles(); LOGGER.error("No handler was able to process {}", file); } } return FileVisitResult.CONTINUE; } }
ImageFindJobVisitor extends SimpleFileVisitor<Path> { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (isAcceptedFile(file)) { statistics.incrementFoundFiles(); fileCount++; boolean isHandled = false; for (HashHandler handler : handlers) { if (handler.handle(file)) { isHandled = true; break; } } statistics.incrementProcessedFiles(); if (!isHandled) { statistics.incrementFailedFiles(); LOGGER.error("No handler was able to process {}", file); } } return FileVisitResult.CONTINUE; } ImageFindJobVisitor(Filter<Path> fileFilter, Collection<HashHandler> handlers, Statistics statistics); }
ImageFindJobVisitor extends SimpleFileVisitor<Path> { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (isAcceptedFile(file)) { statistics.incrementFoundFiles(); fileCount++; boolean isHandled = false; for (HashHandler handler : handlers) { if (handler.handle(file)) { isHandled = true; break; } } statistics.incrementProcessedFiles(); if (!isHandled) { statistics.incrementFailedFiles(); LOGGER.error("No handler was able to process {}", file); } } return FileVisitResult.CONTINUE; } ImageFindJobVisitor(Filter<Path> fileFilter, Collection<HashHandler> handlers, Statistics statistics); @Override FileVisitResult visitFile(Path file, BasicFileAttributes attrs); @Deprecated int getFileCount(); }
ImageFindJobVisitor extends SimpleFileVisitor<Path> { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (isAcceptedFile(file)) { statistics.incrementFoundFiles(); fileCount++; boolean isHandled = false; for (HashHandler handler : handlers) { if (handler.handle(file)) { isHandled = true; break; } } statistics.incrementProcessedFiles(); if (!isHandled) { statistics.incrementFailedFiles(); LOGGER.error("No handler was able to process {}", file); } } return FileVisitResult.CONTINUE; } ImageFindJobVisitor(Filter<Path> fileFilter, Collection<HashHandler> handlers, Statistics statistics); @Override FileVisitResult visitFile(Path file, BasicFileAttributes attrs); @Deprecated int getFileCount(); }
@Test public void testGetFileCount() throws Exception { cut.visitFile(path, attrs); assertThat(cut.getFileCount(), is(1)); }
@Deprecated public int getFileCount() { return fileCount; }
ImageFindJobVisitor extends SimpleFileVisitor<Path> { @Deprecated public int getFileCount() { return fileCount; } }
ImageFindJobVisitor extends SimpleFileVisitor<Path> { @Deprecated public int getFileCount() { return fileCount; } ImageFindJobVisitor(Filter<Path> fileFilter, Collection<HashHandler> handlers, Statistics statistics); }
ImageFindJobVisitor extends SimpleFileVisitor<Path> { @Deprecated public int getFileCount() { return fileCount; } ImageFindJobVisitor(Filter<Path> fileFilter, Collection<HashHandler> handlers, Statistics statistics); @Override FileVisitResult visitFile(Path file, BasicFileAttributes attrs); @Deprecated int getFileCount(); }
ImageFindJobVisitor extends SimpleFileVisitor<Path> { @Deprecated public int getFileCount() { return fileCount; } ImageFindJobVisitor(Filter<Path> fileFilter, Collection<HashHandler> handlers, Statistics statistics); @Override FileVisitResult visitFile(Path file, BasicFileAttributes attrs); @Deprecated int getFileCount(); }
@Test public void testRunAddFile() throws Exception { imageLoadJob.run(); verify(imageRepository).store(new ImageRecord(testImage.toString(), 0)); }
@Override public void run() { try { long hash = processFile(image); if (hashAttribute != null) { hashAttribute.writeHash(image, hash); } } catch (IIOException e) { LOGGER.warn("Failed to process image {} (IIO Error): {}", image, e.toString()); LOGGER.debug(EXCEPTION_STACKTRACE, image, e); statistics.incrementFailedFiles(); } catch (IOException e) { LOGGER.warn("Failed to load file {}: {}", image, e.toString()); statistics.incrementFailedFiles(); } catch (RepositoryException e) { LOGGER.warn("Failed to query repository for {}: {}", image, e.toString()); statistics.incrementFailedFiles(); } catch (ArrayIndexOutOfBoundsException e) { LOGGER.error("Failed to process image {}: {}", image, e.toString()); LOGGER.debug(EXCEPTION_STACKTRACE, image, e); statistics.incrementFailedFiles(); } }
ImageHashJob implements Runnable { @Override public void run() { try { long hash = processFile(image); if (hashAttribute != null) { hashAttribute.writeHash(image, hash); } } catch (IIOException e) { LOGGER.warn("Failed to process image {} (IIO Error): {}", image, e.toString()); LOGGER.debug(EXCEPTION_STACKTRACE, image, e); statistics.incrementFailedFiles(); } catch (IOException e) { LOGGER.warn("Failed to load file {}: {}", image, e.toString()); statistics.incrementFailedFiles(); } catch (RepositoryException e) { LOGGER.warn("Failed to query repository for {}: {}", image, e.toString()); statistics.incrementFailedFiles(); } catch (ArrayIndexOutOfBoundsException e) { LOGGER.error("Failed to process image {}: {}", image, e.toString()); LOGGER.debug(EXCEPTION_STACKTRACE, image, e); statistics.incrementFailedFiles(); } } }
ImageHashJob implements Runnable { @Override public void run() { try { long hash = processFile(image); if (hashAttribute != null) { hashAttribute.writeHash(image, hash); } } catch (IIOException e) { LOGGER.warn("Failed to process image {} (IIO Error): {}", image, e.toString()); LOGGER.debug(EXCEPTION_STACKTRACE, image, e); statistics.incrementFailedFiles(); } catch (IOException e) { LOGGER.warn("Failed to load file {}: {}", image, e.toString()); statistics.incrementFailedFiles(); } catch (RepositoryException e) { LOGGER.warn("Failed to query repository for {}: {}", image, e.toString()); statistics.incrementFailedFiles(); } catch (ArrayIndexOutOfBoundsException e) { LOGGER.error("Failed to process image {}: {}", image, e.toString()); LOGGER.debug(EXCEPTION_STACKTRACE, image, e); statistics.incrementFailedFiles(); } } ImageHashJob(Path image, ImagePHash hasher, ImageRepository imageRepository, Statistics statistics); }
ImageHashJob implements Runnable { @Override public void run() { try { long hash = processFile(image); if (hashAttribute != null) { hashAttribute.writeHash(image, hash); } } catch (IIOException e) { LOGGER.warn("Failed to process image {} (IIO Error): {}", image, e.toString()); LOGGER.debug(EXCEPTION_STACKTRACE, image, e); statistics.incrementFailedFiles(); } catch (IOException e) { LOGGER.warn("Failed to load file {}: {}", image, e.toString()); statistics.incrementFailedFiles(); } catch (RepositoryException e) { LOGGER.warn("Failed to query repository for {}: {}", image, e.toString()); statistics.incrementFailedFiles(); } catch (ArrayIndexOutOfBoundsException e) { LOGGER.error("Failed to process image {}: {}", image, e.toString()); LOGGER.debug(EXCEPTION_STACKTRACE, image, e); statistics.incrementFailedFiles(); } } ImageHashJob(Path image, ImagePHash hasher, ImageRepository imageRepository, Statistics statistics); final void setHashAttribute(HashAttribute hashAttribute); @Override void run(); }
ImageHashJob implements Runnable { @Override public void run() { try { long hash = processFile(image); if (hashAttribute != null) { hashAttribute.writeHash(image, hash); } } catch (IIOException e) { LOGGER.warn("Failed to process image {} (IIO Error): {}", image, e.toString()); LOGGER.debug(EXCEPTION_STACKTRACE, image, e); statistics.incrementFailedFiles(); } catch (IOException e) { LOGGER.warn("Failed to load file {}: {}", image, e.toString()); statistics.incrementFailedFiles(); } catch (RepositoryException e) { LOGGER.warn("Failed to query repository for {}: {}", image, e.toString()); statistics.incrementFailedFiles(); } catch (ArrayIndexOutOfBoundsException e) { LOGGER.error("Failed to process image {}: {}", image, e.toString()); LOGGER.debug(EXCEPTION_STACKTRACE, image, e); statistics.incrementFailedFiles(); } } ImageHashJob(Path image, ImagePHash hasher, ImageRepository imageRepository, Statistics statistics); final void setHashAttribute(HashAttribute hashAttribute); @Override void run(); }
@Test public void testRunIIOException() throws Exception { when(phw.getLongHash(any(InputStream.class))).thenThrow(IIOException.class); imageLoadJob.run(); verify(statistics).incrementFailedFiles(); }
@Override public void run() { try { long hash = processFile(image); if (hashAttribute != null) { hashAttribute.writeHash(image, hash); } } catch (IIOException e) { LOGGER.warn("Failed to process image {} (IIO Error): {}", image, e.toString()); LOGGER.debug(EXCEPTION_STACKTRACE, image, e); statistics.incrementFailedFiles(); } catch (IOException e) { LOGGER.warn("Failed to load file {}: {}", image, e.toString()); statistics.incrementFailedFiles(); } catch (RepositoryException e) { LOGGER.warn("Failed to query repository for {}: {}", image, e.toString()); statistics.incrementFailedFiles(); } catch (ArrayIndexOutOfBoundsException e) { LOGGER.error("Failed to process image {}: {}", image, e.toString()); LOGGER.debug(EXCEPTION_STACKTRACE, image, e); statistics.incrementFailedFiles(); } }
ImageHashJob implements Runnable { @Override public void run() { try { long hash = processFile(image); if (hashAttribute != null) { hashAttribute.writeHash(image, hash); } } catch (IIOException e) { LOGGER.warn("Failed to process image {} (IIO Error): {}", image, e.toString()); LOGGER.debug(EXCEPTION_STACKTRACE, image, e); statistics.incrementFailedFiles(); } catch (IOException e) { LOGGER.warn("Failed to load file {}: {}", image, e.toString()); statistics.incrementFailedFiles(); } catch (RepositoryException e) { LOGGER.warn("Failed to query repository for {}: {}", image, e.toString()); statistics.incrementFailedFiles(); } catch (ArrayIndexOutOfBoundsException e) { LOGGER.error("Failed to process image {}: {}", image, e.toString()); LOGGER.debug(EXCEPTION_STACKTRACE, image, e); statistics.incrementFailedFiles(); } } }
ImageHashJob implements Runnable { @Override public void run() { try { long hash = processFile(image); if (hashAttribute != null) { hashAttribute.writeHash(image, hash); } } catch (IIOException e) { LOGGER.warn("Failed to process image {} (IIO Error): {}", image, e.toString()); LOGGER.debug(EXCEPTION_STACKTRACE, image, e); statistics.incrementFailedFiles(); } catch (IOException e) { LOGGER.warn("Failed to load file {}: {}", image, e.toString()); statistics.incrementFailedFiles(); } catch (RepositoryException e) { LOGGER.warn("Failed to query repository for {}: {}", image, e.toString()); statistics.incrementFailedFiles(); } catch (ArrayIndexOutOfBoundsException e) { LOGGER.error("Failed to process image {}: {}", image, e.toString()); LOGGER.debug(EXCEPTION_STACKTRACE, image, e); statistics.incrementFailedFiles(); } } ImageHashJob(Path image, ImagePHash hasher, ImageRepository imageRepository, Statistics statistics); }
ImageHashJob implements Runnable { @Override public void run() { try { long hash = processFile(image); if (hashAttribute != null) { hashAttribute.writeHash(image, hash); } } catch (IIOException e) { LOGGER.warn("Failed to process image {} (IIO Error): {}", image, e.toString()); LOGGER.debug(EXCEPTION_STACKTRACE, image, e); statistics.incrementFailedFiles(); } catch (IOException e) { LOGGER.warn("Failed to load file {}: {}", image, e.toString()); statistics.incrementFailedFiles(); } catch (RepositoryException e) { LOGGER.warn("Failed to query repository for {}: {}", image, e.toString()); statistics.incrementFailedFiles(); } catch (ArrayIndexOutOfBoundsException e) { LOGGER.error("Failed to process image {}: {}", image, e.toString()); LOGGER.debug(EXCEPTION_STACKTRACE, image, e); statistics.incrementFailedFiles(); } } ImageHashJob(Path image, ImagePHash hasher, ImageRepository imageRepository, Statistics statistics); final void setHashAttribute(HashAttribute hashAttribute); @Override void run(); }
ImageHashJob implements Runnable { @Override public void run() { try { long hash = processFile(image); if (hashAttribute != null) { hashAttribute.writeHash(image, hash); } } catch (IIOException e) { LOGGER.warn("Failed to process image {} (IIO Error): {}", image, e.toString()); LOGGER.debug(EXCEPTION_STACKTRACE, image, e); statistics.incrementFailedFiles(); } catch (IOException e) { LOGGER.warn("Failed to load file {}: {}", image, e.toString()); statistics.incrementFailedFiles(); } catch (RepositoryException e) { LOGGER.warn("Failed to query repository for {}: {}", image, e.toString()); statistics.incrementFailedFiles(); } catch (ArrayIndexOutOfBoundsException e) { LOGGER.error("Failed to process image {}: {}", image, e.toString()); LOGGER.debug(EXCEPTION_STACKTRACE, image, e); statistics.incrementFailedFiles(); } } ImageHashJob(Path image, ImagePHash hasher, ImageRepository imageRepository, Statistics statistics); final void setHashAttribute(HashAttribute hashAttribute); @Override void run(); }
@Test public void testGroupByHashNumberOfGroups() throws Exception { Multimap<Long, ImageRecord> group = DuplicateUtil.groupByHash(records); assertThat(group.keySet().size(), is(10)); }
public static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords) { Multimap<Long, ImageRecord> groupedByHash = MultimapBuilder.hashKeys().hashSetValues().build(); logger.info("Grouping records by hash..."); for (ImageRecord ir : dbRecords) { groupedByHash.put(ir.getpHash(), ir); } logger.info("{} records, in {} groups", dbRecords.size(), groupedByHash.keySet().size()); return groupedByHash; }
DuplicateUtil { public static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords) { Multimap<Long, ImageRecord> groupedByHash = MultimapBuilder.hashKeys().hashSetValues().build(); logger.info("Grouping records by hash..."); for (ImageRecord ir : dbRecords) { groupedByHash.put(ir.getpHash(), ir); } logger.info("{} records, in {} groups", dbRecords.size(), groupedByHash.keySet().size()); return groupedByHash; } }
DuplicateUtil { public static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords) { Multimap<Long, ImageRecord> groupedByHash = MultimapBuilder.hashKeys().hashSetValues().build(); logger.info("Grouping records by hash..."); for (ImageRecord ir : dbRecords) { groupedByHash.put(ir.getpHash(), ir); } logger.info("{} records, in {} groups", dbRecords.size(), groupedByHash.keySet().size()); return groupedByHash; } }
DuplicateUtil { public static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords) { Multimap<Long, ImageRecord> groupedByHash = MultimapBuilder.hashKeys().hashSetValues().build(); logger.info("Grouping records by hash..."); for (ImageRecord ir : dbRecords) { groupedByHash.put(ir.getpHash(), ir); } logger.info("{} records, in {} groups", dbRecords.size(), groupedByHash.keySet().size()); return groupedByHash; } static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords); static void removeSingleImageGroups(Multimap<Long, ImageRecord> sourceGroups); static void removeDuplicateSets(Multimap<Long, ImageRecord> records); }
DuplicateUtil { public static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords) { Multimap<Long, ImageRecord> groupedByHash = MultimapBuilder.hashKeys().hashSetValues().build(); logger.info("Grouping records by hash..."); for (ImageRecord ir : dbRecords) { groupedByHash.put(ir.getpHash(), ir); } logger.info("{} records, in {} groups", dbRecords.size(), groupedByHash.keySet().size()); return groupedByHash; } static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords); static void removeSingleImageGroups(Multimap<Long, ImageRecord> sourceGroups); static void removeDuplicateSets(Multimap<Long, ImageRecord> records); }
@Test public void testGroupByHashSizeOfGroup() throws Exception { Multimap<Long, ImageRecord> group = DuplicateUtil.groupByHash(records); assertThat(group.get(5L).size(), is(3)); }
public static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords) { Multimap<Long, ImageRecord> groupedByHash = MultimapBuilder.hashKeys().hashSetValues().build(); logger.info("Grouping records by hash..."); for (ImageRecord ir : dbRecords) { groupedByHash.put(ir.getpHash(), ir); } logger.info("{} records, in {} groups", dbRecords.size(), groupedByHash.keySet().size()); return groupedByHash; }
DuplicateUtil { public static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords) { Multimap<Long, ImageRecord> groupedByHash = MultimapBuilder.hashKeys().hashSetValues().build(); logger.info("Grouping records by hash..."); for (ImageRecord ir : dbRecords) { groupedByHash.put(ir.getpHash(), ir); } logger.info("{} records, in {} groups", dbRecords.size(), groupedByHash.keySet().size()); return groupedByHash; } }
DuplicateUtil { public static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords) { Multimap<Long, ImageRecord> groupedByHash = MultimapBuilder.hashKeys().hashSetValues().build(); logger.info("Grouping records by hash..."); for (ImageRecord ir : dbRecords) { groupedByHash.put(ir.getpHash(), ir); } logger.info("{} records, in {} groups", dbRecords.size(), groupedByHash.keySet().size()); return groupedByHash; } }
DuplicateUtil { public static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords) { Multimap<Long, ImageRecord> groupedByHash = MultimapBuilder.hashKeys().hashSetValues().build(); logger.info("Grouping records by hash..."); for (ImageRecord ir : dbRecords) { groupedByHash.put(ir.getpHash(), ir); } logger.info("{} records, in {} groups", dbRecords.size(), groupedByHash.keySet().size()); return groupedByHash; } static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords); static void removeSingleImageGroups(Multimap<Long, ImageRecord> sourceGroups); static void removeDuplicateSets(Multimap<Long, ImageRecord> records); }
DuplicateUtil { public static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords) { Multimap<Long, ImageRecord> groupedByHash = MultimapBuilder.hashKeys().hashSetValues().build(); logger.info("Grouping records by hash..."); for (ImageRecord ir : dbRecords) { groupedByHash.put(ir.getpHash(), ir); } logger.info("{} records, in {} groups", dbRecords.size(), groupedByHash.keySet().size()); return groupedByHash; } static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords); static void removeSingleImageGroups(Multimap<Long, ImageRecord> sourceGroups); static void removeDuplicateSets(Multimap<Long, ImageRecord> records); }
@Test public void testGroupByHashEntryPath() throws Exception { Multimap<Long, ImageRecord> group = DuplicateUtil.groupByHash(records); assertThat(group.get(2L), hasItem(new ImageRecord("foo", 2L))); }
public static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords) { Multimap<Long, ImageRecord> groupedByHash = MultimapBuilder.hashKeys().hashSetValues().build(); logger.info("Grouping records by hash..."); for (ImageRecord ir : dbRecords) { groupedByHash.put(ir.getpHash(), ir); } logger.info("{} records, in {} groups", dbRecords.size(), groupedByHash.keySet().size()); return groupedByHash; }
DuplicateUtil { public static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords) { Multimap<Long, ImageRecord> groupedByHash = MultimapBuilder.hashKeys().hashSetValues().build(); logger.info("Grouping records by hash..."); for (ImageRecord ir : dbRecords) { groupedByHash.put(ir.getpHash(), ir); } logger.info("{} records, in {} groups", dbRecords.size(), groupedByHash.keySet().size()); return groupedByHash; } }
DuplicateUtil { public static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords) { Multimap<Long, ImageRecord> groupedByHash = MultimapBuilder.hashKeys().hashSetValues().build(); logger.info("Grouping records by hash..."); for (ImageRecord ir : dbRecords) { groupedByHash.put(ir.getpHash(), ir); } logger.info("{} records, in {} groups", dbRecords.size(), groupedByHash.keySet().size()); return groupedByHash; } }
DuplicateUtil { public static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords) { Multimap<Long, ImageRecord> groupedByHash = MultimapBuilder.hashKeys().hashSetValues().build(); logger.info("Grouping records by hash..."); for (ImageRecord ir : dbRecords) { groupedByHash.put(ir.getpHash(), ir); } logger.info("{} records, in {} groups", dbRecords.size(), groupedByHash.keySet().size()); return groupedByHash; } static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords); static void removeSingleImageGroups(Multimap<Long, ImageRecord> sourceGroups); static void removeDuplicateSets(Multimap<Long, ImageRecord> records); }
DuplicateUtil { public static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords) { Multimap<Long, ImageRecord> groupedByHash = MultimapBuilder.hashKeys().hashSetValues().build(); logger.info("Grouping records by hash..."); for (ImageRecord ir : dbRecords) { groupedByHash.put(ir.getpHash(), ir); } logger.info("{} records, in {} groups", dbRecords.size(), groupedByHash.keySet().size()); return groupedByHash; } static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords); static void removeSingleImageGroups(Multimap<Long, ImageRecord> sourceGroups); static void removeDuplicateSets(Multimap<Long, ImageRecord> records); }
@Test public void testRemoveDuplicateSetsIdenticalSets() throws Exception { Multimap<Long, ImageRecord> map = MultimapBuilder.hashKeys().hashSetValues().build(); map.putAll(1L, records); map.putAll(2L, records); DuplicateUtil.removeDuplicateSets(map); assertThat(map.keySet().size(), is(1)); }
public static void removeDuplicateSets(Multimap<Long, ImageRecord> records) { logger.info("Checking {} groups for duplicates", records.keySet().size()); Stopwatch sw = Stopwatch.createStarted(); Set<Collection<ImageRecord>> uniqueRecords = new HashSet<Collection<ImageRecord>>(records.keySet().size()); Iterator<Collection<ImageRecord>> recordIter = records.asMap().values().iterator(); long removedGroups = 0; while (recordIter.hasNext()) { Collection<ImageRecord> next = recordIter.next(); if (!uniqueRecords.add(next)) { recordIter.remove(); removedGroups++; } } logger.info("Checked groups in {}, removed {} identical groups", sw, removedGroups); }
DuplicateUtil { public static void removeDuplicateSets(Multimap<Long, ImageRecord> records) { logger.info("Checking {} groups for duplicates", records.keySet().size()); Stopwatch sw = Stopwatch.createStarted(); Set<Collection<ImageRecord>> uniqueRecords = new HashSet<Collection<ImageRecord>>(records.keySet().size()); Iterator<Collection<ImageRecord>> recordIter = records.asMap().values().iterator(); long removedGroups = 0; while (recordIter.hasNext()) { Collection<ImageRecord> next = recordIter.next(); if (!uniqueRecords.add(next)) { recordIter.remove(); removedGroups++; } } logger.info("Checked groups in {}, removed {} identical groups", sw, removedGroups); } }
DuplicateUtil { public static void removeDuplicateSets(Multimap<Long, ImageRecord> records) { logger.info("Checking {} groups for duplicates", records.keySet().size()); Stopwatch sw = Stopwatch.createStarted(); Set<Collection<ImageRecord>> uniqueRecords = new HashSet<Collection<ImageRecord>>(records.keySet().size()); Iterator<Collection<ImageRecord>> recordIter = records.asMap().values().iterator(); long removedGroups = 0; while (recordIter.hasNext()) { Collection<ImageRecord> next = recordIter.next(); if (!uniqueRecords.add(next)) { recordIter.remove(); removedGroups++; } } logger.info("Checked groups in {}, removed {} identical groups", sw, removedGroups); } }
DuplicateUtil { public static void removeDuplicateSets(Multimap<Long, ImageRecord> records) { logger.info("Checking {} groups for duplicates", records.keySet().size()); Stopwatch sw = Stopwatch.createStarted(); Set<Collection<ImageRecord>> uniqueRecords = new HashSet<Collection<ImageRecord>>(records.keySet().size()); Iterator<Collection<ImageRecord>> recordIter = records.asMap().values().iterator(); long removedGroups = 0; while (recordIter.hasNext()) { Collection<ImageRecord> next = recordIter.next(); if (!uniqueRecords.add(next)) { recordIter.remove(); removedGroups++; } } logger.info("Checked groups in {}, removed {} identical groups", sw, removedGroups); } static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords); static void removeSingleImageGroups(Multimap<Long, ImageRecord> sourceGroups); static void removeDuplicateSets(Multimap<Long, ImageRecord> records); }
DuplicateUtil { public static void removeDuplicateSets(Multimap<Long, ImageRecord> records) { logger.info("Checking {} groups for duplicates", records.keySet().size()); Stopwatch sw = Stopwatch.createStarted(); Set<Collection<ImageRecord>> uniqueRecords = new HashSet<Collection<ImageRecord>>(records.keySet().size()); Iterator<Collection<ImageRecord>> recordIter = records.asMap().values().iterator(); long removedGroups = 0; while (recordIter.hasNext()) { Collection<ImageRecord> next = recordIter.next(); if (!uniqueRecords.add(next)) { recordIter.remove(); removedGroups++; } } logger.info("Checked groups in {}, removed {} identical groups", sw, removedGroups); } static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords); static void removeSingleImageGroups(Multimap<Long, ImageRecord> sourceGroups); static void removeDuplicateSets(Multimap<Long, ImageRecord> records); }
@Test public void testAreAttributesValidNoHashOrTimestamp() throws Exception { assertThat(cut.areAttributesValid(tempFile), is(false)); }
public boolean areAttributesValid(Path path) { try { return ExtendedAttribute.isExtendedAttributeSet(path, hashFQN) && verifyTimestamp(path); } catch (IOException e) { LOGGER.error("Failed to check hash for {} ({})", path, e.toString()); } return false; }
HashAttribute { public boolean areAttributesValid(Path path) { try { return ExtendedAttribute.isExtendedAttributeSet(path, hashFQN) && verifyTimestamp(path); } catch (IOException e) { LOGGER.error("Failed to check hash for {} ({})", path, e.toString()); } return false; } }
HashAttribute { public boolean areAttributesValid(Path path) { try { return ExtendedAttribute.isExtendedAttributeSet(path, hashFQN) && verifyTimestamp(path); } catch (IOException e) { LOGGER.error("Failed to check hash for {} ({})", path, e.toString()); } return false; } HashAttribute(String hashName); }
HashAttribute { public boolean areAttributesValid(Path path) { try { return ExtendedAttribute.isExtendedAttributeSet(path, hashFQN) && verifyTimestamp(path); } catch (IOException e) { LOGGER.error("Failed to check hash for {} ({})", path, e.toString()); } return false; } HashAttribute(String hashName); boolean areAttributesValid(Path path); long readHash(Path path); void writeHash(Path path, long hash); void markCorrupted(Path path); boolean isCorrupted(Path path); String getHashFQN(); String getTimestampFQN(); String getCorruptNameFQN(); }
HashAttribute { public boolean areAttributesValid(Path path) { try { return ExtendedAttribute.isExtendedAttributeSet(path, hashFQN) && verifyTimestamp(path); } catch (IOException e) { LOGGER.error("Failed to check hash for {} ({})", path, e.toString()); } return false; } HashAttribute(String hashName); boolean areAttributesValid(Path path); long readHash(Path path); void writeHash(Path path, long hash); void markCorrupted(Path path); boolean isCorrupted(Path path); String getHashFQN(); String getTimestampFQN(); String getCorruptNameFQN(); }
@Test public void testRemoveDuplicateSetsNonIdenticalSets() throws Exception { Multimap<Long, ImageRecord> map = MultimapBuilder.hashKeys().hashSetValues().build(); map.putAll(1L, records); map.putAll(2L, records); map.put(2L, new ImageRecord("foo", 1L)); DuplicateUtil.removeDuplicateSets(map); assertThat(map.keySet().size(), is(2)); }
public static void removeDuplicateSets(Multimap<Long, ImageRecord> records) { logger.info("Checking {} groups for duplicates", records.keySet().size()); Stopwatch sw = Stopwatch.createStarted(); Set<Collection<ImageRecord>> uniqueRecords = new HashSet<Collection<ImageRecord>>(records.keySet().size()); Iterator<Collection<ImageRecord>> recordIter = records.asMap().values().iterator(); long removedGroups = 0; while (recordIter.hasNext()) { Collection<ImageRecord> next = recordIter.next(); if (!uniqueRecords.add(next)) { recordIter.remove(); removedGroups++; } } logger.info("Checked groups in {}, removed {} identical groups", sw, removedGroups); }
DuplicateUtil { public static void removeDuplicateSets(Multimap<Long, ImageRecord> records) { logger.info("Checking {} groups for duplicates", records.keySet().size()); Stopwatch sw = Stopwatch.createStarted(); Set<Collection<ImageRecord>> uniqueRecords = new HashSet<Collection<ImageRecord>>(records.keySet().size()); Iterator<Collection<ImageRecord>> recordIter = records.asMap().values().iterator(); long removedGroups = 0; while (recordIter.hasNext()) { Collection<ImageRecord> next = recordIter.next(); if (!uniqueRecords.add(next)) { recordIter.remove(); removedGroups++; } } logger.info("Checked groups in {}, removed {} identical groups", sw, removedGroups); } }
DuplicateUtil { public static void removeDuplicateSets(Multimap<Long, ImageRecord> records) { logger.info("Checking {} groups for duplicates", records.keySet().size()); Stopwatch sw = Stopwatch.createStarted(); Set<Collection<ImageRecord>> uniqueRecords = new HashSet<Collection<ImageRecord>>(records.keySet().size()); Iterator<Collection<ImageRecord>> recordIter = records.asMap().values().iterator(); long removedGroups = 0; while (recordIter.hasNext()) { Collection<ImageRecord> next = recordIter.next(); if (!uniqueRecords.add(next)) { recordIter.remove(); removedGroups++; } } logger.info("Checked groups in {}, removed {} identical groups", sw, removedGroups); } }
DuplicateUtil { public static void removeDuplicateSets(Multimap<Long, ImageRecord> records) { logger.info("Checking {} groups for duplicates", records.keySet().size()); Stopwatch sw = Stopwatch.createStarted(); Set<Collection<ImageRecord>> uniqueRecords = new HashSet<Collection<ImageRecord>>(records.keySet().size()); Iterator<Collection<ImageRecord>> recordIter = records.asMap().values().iterator(); long removedGroups = 0; while (recordIter.hasNext()) { Collection<ImageRecord> next = recordIter.next(); if (!uniqueRecords.add(next)) { recordIter.remove(); removedGroups++; } } logger.info("Checked groups in {}, removed {} identical groups", sw, removedGroups); } static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords); static void removeSingleImageGroups(Multimap<Long, ImageRecord> sourceGroups); static void removeDuplicateSets(Multimap<Long, ImageRecord> records); }
DuplicateUtil { public static void removeDuplicateSets(Multimap<Long, ImageRecord> records) { logger.info("Checking {} groups for duplicates", records.keySet().size()); Stopwatch sw = Stopwatch.createStarted(); Set<Collection<ImageRecord>> uniqueRecords = new HashSet<Collection<ImageRecord>>(records.keySet().size()); Iterator<Collection<ImageRecord>> recordIter = records.asMap().values().iterator(); long removedGroups = 0; while (recordIter.hasNext()) { Collection<ImageRecord> next = recordIter.next(); if (!uniqueRecords.add(next)) { recordIter.remove(); removedGroups++; } } logger.info("Checked groups in {}, removed {} identical groups", sw, removedGroups); } static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords); static void removeSingleImageGroups(Multimap<Long, ImageRecord> sourceGroups); static void removeDuplicateSets(Multimap<Long, ImageRecord> records); }
@Test public void testHashSumNoHashes() throws Exception { assertThat(DuplicateUtil.hashSum(Collections.emptyList()), is(BigInteger.ZERO)); }
protected static final BigInteger hashSum(Collection<Long> hashes) { BigInteger hashSum = BigInteger.ZERO; for (Long hash : hashes) { hashSum = hashSum.add(BigInteger.valueOf(hash)); } return hashSum; }
DuplicateUtil { protected static final BigInteger hashSum(Collection<Long> hashes) { BigInteger hashSum = BigInteger.ZERO; for (Long hash : hashes) { hashSum = hashSum.add(BigInteger.valueOf(hash)); } return hashSum; } }
DuplicateUtil { protected static final BigInteger hashSum(Collection<Long> hashes) { BigInteger hashSum = BigInteger.ZERO; for (Long hash : hashes) { hashSum = hashSum.add(BigInteger.valueOf(hash)); } return hashSum; } }
DuplicateUtil { protected static final BigInteger hashSum(Collection<Long> hashes) { BigInteger hashSum = BigInteger.ZERO; for (Long hash : hashes) { hashSum = hashSum.add(BigInteger.valueOf(hash)); } return hashSum; } static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords); static void removeSingleImageGroups(Multimap<Long, ImageRecord> sourceGroups); static void removeDuplicateSets(Multimap<Long, ImageRecord> records); }
DuplicateUtil { protected static final BigInteger hashSum(Collection<Long> hashes) { BigInteger hashSum = BigInteger.ZERO; for (Long hash : hashes) { hashSum = hashSum.add(BigInteger.valueOf(hash)); } return hashSum; } static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords); static void removeSingleImageGroups(Multimap<Long, ImageRecord> sourceGroups); static void removeDuplicateSets(Multimap<Long, ImageRecord> records); }
@Test public void testHashSum() throws Exception { List<Long> hashes = new LinkedList<Long>(); hashes.add(2L); hashes.add(3L); assertThat(DuplicateUtil.hashSum(hashes), is(new BigInteger("5"))); }
protected static final BigInteger hashSum(Collection<Long> hashes) { BigInteger hashSum = BigInteger.ZERO; for (Long hash : hashes) { hashSum = hashSum.add(BigInteger.valueOf(hash)); } return hashSum; }
DuplicateUtil { protected static final BigInteger hashSum(Collection<Long> hashes) { BigInteger hashSum = BigInteger.ZERO; for (Long hash : hashes) { hashSum = hashSum.add(BigInteger.valueOf(hash)); } return hashSum; } }
DuplicateUtil { protected static final BigInteger hashSum(Collection<Long> hashes) { BigInteger hashSum = BigInteger.ZERO; for (Long hash : hashes) { hashSum = hashSum.add(BigInteger.valueOf(hash)); } return hashSum; } }
DuplicateUtil { protected static final BigInteger hashSum(Collection<Long> hashes) { BigInteger hashSum = BigInteger.ZERO; for (Long hash : hashes) { hashSum = hashSum.add(BigInteger.valueOf(hash)); } return hashSum; } static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords); static void removeSingleImageGroups(Multimap<Long, ImageRecord> sourceGroups); static void removeDuplicateSets(Multimap<Long, ImageRecord> records); }
DuplicateUtil { protected static final BigInteger hashSum(Collection<Long> hashes) { BigInteger hashSum = BigInteger.ZERO; for (Long hash : hashes) { hashSum = hashSum.add(BigInteger.valueOf(hash)); } return hashSum; } static Multimap<Long, ImageRecord> groupByHash(Collection<ImageRecord> dbRecords); static void removeSingleImageGroups(Multimap<Long, ImageRecord> sourceGroups); static void removeDuplicateSets(Multimap<Long, ImageRecord> records); }
@Test public void testCompareEqual() throws Exception { assertThat(irc.compare(a, new ImageRecord("", 1L)), is(0)); }
@Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } @Override int compare(ImageRecord o1, ImageRecord o2); }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } @Override int compare(ImageRecord o1, ImageRecord o2); }
@Test public void testCompareLess() throws Exception { assertThat(irc.compare(a, b), is(-1)); }
@Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } @Override int compare(ImageRecord o1, ImageRecord o2); }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } @Override int compare(ImageRecord o1, ImageRecord o2); }
@Test public void testCompareLessLargerNumber() throws Exception { assertThat(irc.compare(a, c), is(-1)); }
@Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } @Override int compare(ImageRecord o1, ImageRecord o2); }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } @Override int compare(ImageRecord o1, ImageRecord o2); }
@Test public void testCompareGreater() throws Exception { assertThat(irc.compare(b, a), is(1)); }
@Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } @Override int compare(ImageRecord o1, ImageRecord o2); }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } @Override int compare(ImageRecord o1, ImageRecord o2); }
@Test public void testCompareGreaterLargerNumber() throws Exception { assertThat(irc.compare(c, a), is(1)); }
@Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } @Override int compare(ImageRecord o1, ImageRecord o2); }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } @Override int compare(ImageRecord o1, ImageRecord o2); }
@Test(expected = NullPointerException.class) public void testCompareFirstNull() throws Exception { irc.compare(null, b); }
@Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } @Override int compare(ImageRecord o1, ImageRecord o2); }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } @Override int compare(ImageRecord o1, ImageRecord o2); }
@Test(expected = NullPointerException.class) public void testCompareSecondNull() throws Exception { irc.compare(a, null); }
@Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } @Override int compare(ImageRecord o1, ImageRecord o2); }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } @Override int compare(ImageRecord o1, ImageRecord o2); }
@Test public void testAreAttributesValidHashButNoTimestamp() throws Exception { ExtendedAttribute.setExtendedAttribute(tempFile, testHashFullName, Long.toString(TEST_VALUE)); assertThat(cut.areAttributesValid(tempFile), is(false)); }
public boolean areAttributesValid(Path path) { try { return ExtendedAttribute.isExtendedAttributeSet(path, hashFQN) && verifyTimestamp(path); } catch (IOException e) { LOGGER.error("Failed to check hash for {} ({})", path, e.toString()); } return false; }
HashAttribute { public boolean areAttributesValid(Path path) { try { return ExtendedAttribute.isExtendedAttributeSet(path, hashFQN) && verifyTimestamp(path); } catch (IOException e) { LOGGER.error("Failed to check hash for {} ({})", path, e.toString()); } return false; } }
HashAttribute { public boolean areAttributesValid(Path path) { try { return ExtendedAttribute.isExtendedAttributeSet(path, hashFQN) && verifyTimestamp(path); } catch (IOException e) { LOGGER.error("Failed to check hash for {} ({})", path, e.toString()); } return false; } HashAttribute(String hashName); }
HashAttribute { public boolean areAttributesValid(Path path) { try { return ExtendedAttribute.isExtendedAttributeSet(path, hashFQN) && verifyTimestamp(path); } catch (IOException e) { LOGGER.error("Failed to check hash for {} ({})", path, e.toString()); } return false; } HashAttribute(String hashName); boolean areAttributesValid(Path path); long readHash(Path path); void writeHash(Path path, long hash); void markCorrupted(Path path); boolean isCorrupted(Path path); String getHashFQN(); String getTimestampFQN(); String getCorruptNameFQN(); }
HashAttribute { public boolean areAttributesValid(Path path) { try { return ExtendedAttribute.isExtendedAttributeSet(path, hashFQN) && verifyTimestamp(path); } catch (IOException e) { LOGGER.error("Failed to check hash for {} ({})", path, e.toString()); } return false; } HashAttribute(String hashName); boolean areAttributesValid(Path path); long readHash(Path path); void writeHash(Path path, long hash); void markCorrupted(Path path); boolean isCorrupted(Path path); String getHashFQN(); String getTimestampFQN(); String getCorruptNameFQN(); }
@Test(expected = NullPointerException.class) public void testCompareBothNull() throws Exception { irc.compare(null, null); }
@Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } @Override int compare(ImageRecord o1, ImageRecord o2); }
ImageRecordComperator implements Comparator<ImageRecord>, Serializable { @Override public int compare(ImageRecord o1, ImageRecord o2) { long l1 = o1.getpHash(); long l2 = o2.getpHash(); if (l1 < l2) { return -1; } else if (l1 == l2) { return 0; } else { return 1; } } @Override int compare(ImageRecord o1, ImageRecord o2); }
@Test public void testDistanceMatchRadius0Size() throws Exception { Multimap<Long, ImageRecord> result = cut.distanceMatch(2L, 0L); assertThat(result.keySet().size(), is(1)); }
public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } RecordSearch(); }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } RecordSearch(); void build(Collection<ImageRecord> dbRecords); List<Long> exactMatch(); Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance); }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } RecordSearch(); void build(Collection<ImageRecord> dbRecords); List<Long> exactMatch(); Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance); }
@Test public void testDistanceMatchRadius0Hash() throws Exception { Multimap<Long, ImageRecord> result = cut.distanceMatch(2L, 0L); assertThat(result.get(2L).size(), is(2)); }
public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } RecordSearch(); }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } RecordSearch(); void build(Collection<ImageRecord> dbRecords); List<Long> exactMatch(); Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance); }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } RecordSearch(); void build(Collection<ImageRecord> dbRecords); List<Long> exactMatch(); Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance); }
@Test public void testDistanceMatchRadius1Size() throws Exception { Multimap<Long, ImageRecord> result = cut.distanceMatch(2L, 1L); assertThat(result.keySet().size(), is(3)); }
public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } RecordSearch(); }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } RecordSearch(); void build(Collection<ImageRecord> dbRecords); List<Long> exactMatch(); Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance); }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } RecordSearch(); void build(Collection<ImageRecord> dbRecords); List<Long> exactMatch(); Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance); }
@Test public void testDistanceMatchRadius1Hash2() throws Exception { Multimap<Long, ImageRecord> result = cut.distanceMatch(2L, 1L); assertThat(result.containsKey(2L), is(true)); }
public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } RecordSearch(); }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } RecordSearch(); void build(Collection<ImageRecord> dbRecords); List<Long> exactMatch(); Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance); }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } RecordSearch(); void build(Collection<ImageRecord> dbRecords); List<Long> exactMatch(); Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance); }
@Test public void testDistanceMatchRadius1Hash3() throws Exception { Multimap<Long, ImageRecord> result = cut.distanceMatch(2L, 1L); assertThat(result.containsKey(3L), is(true)); }
public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } RecordSearch(); }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } RecordSearch(); void build(Collection<ImageRecord> dbRecords); List<Long> exactMatch(); Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance); }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } RecordSearch(); void build(Collection<ImageRecord> dbRecords); List<Long> exactMatch(); Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance); }
@Test public void testDistanceMatchRadius1Hash6() throws Exception { Multimap<Long, ImageRecord> result = cut.distanceMatch(2L, 1L); assertThat(result.containsKey(6L), is(true)); }
public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } RecordSearch(); }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } RecordSearch(); void build(Collection<ImageRecord> dbRecords); List<Long> exactMatch(); Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance); }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } RecordSearch(); void build(Collection<ImageRecord> dbRecords); List<Long> exactMatch(); Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance); }
@Test public void testDistanceMatchRadius2Size() throws Exception { Multimap<Long, ImageRecord> result = cut.distanceMatch(2L, 2L); assertThat(result.keySet().size(), is(4)); }
public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } RecordSearch(); }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } RecordSearch(); void build(Collection<ImageRecord> dbRecords); List<Long> exactMatch(); Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance); }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } RecordSearch(); void build(Collection<ImageRecord> dbRecords); List<Long> exactMatch(); Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance); }
@Test public void testDistanceMatchRadius2Hash1() throws Exception { Multimap<Long, ImageRecord> result = cut.distanceMatch(2L, 2L); assertThat(result.containsKey(1L), is(true)); }
public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } RecordSearch(); }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } RecordSearch(); void build(Collection<ImageRecord> dbRecords); List<Long> exactMatch(); Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance); }
RecordSearch { public Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance) { Multimap<Long, ImageRecord> searchResult = MultimapBuilder.hashKeys().hashSetValues().build(); Set<Long> resultKeys = bkTree.searchWithin(hash, (double) hammingDistance); for (Long key : resultKeys) { searchResult.putAll(key, imagesGroupedByHash.get(key)); } return searchResult; } RecordSearch(); void build(Collection<ImageRecord> dbRecords); List<Long> exactMatch(); Multimap<Long, ImageRecord> distanceMatch(long hash, long hammingDistance); }
@Ignore("Not implemented yet") @Test public void testMoveToDnw() throws Exception { List<Path> files = createTempTestFiles(1); Path file = files.get(0); dupOp.moveToDnw(file); assertThat(Files.exists(file), is(true)); verify(imageRepository).remove(new ImageRecord(file.toString(), TEST_HASH)); verify(filterRepository).store(new FilterRecord(anyLong(), TAG_DNW)); }
public void moveToDnw(Path path) { logger.info("Method not implemented"); }
DuplicateOperations { public void moveToDnw(Path path) { logger.info("Method not implemented"); } }
DuplicateOperations { public void moveToDnw(Path path) { logger.info("Method not implemented"); } @Inject DuplicateOperations(FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); DuplicateOperations(FileSystem fileSystem, FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); }
DuplicateOperations { public void moveToDnw(Path path) { logger.info("Method not implemented"); } @Inject DuplicateOperations(FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); DuplicateOperations(FileSystem fileSystem, FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); void moveToDnw(Path path); void deleteAll(Collection<Result> records); void remove(Collection<ImageRecord> records); void deleteFile(Result result); void deleteFile(Path path); void markAll(Collection<Result> records, Tag tag); void markDnwAndDelete(Collection<Result> records); void markAs(Result result, Tag tag); void markAs(Path path, Tag tag); void markAs(ImageRecord image, Tag tag); void markDirectoryAs(Path directory, Tag tag); void markDirectoryAndChildrenAs(Path rootDirectory, Tag tag); List<ImageRecord> findMissingFiles(Path directory); void ignore(Result result); List<Tag> getFilterTags(); }
DuplicateOperations { public void moveToDnw(Path path) { logger.info("Method not implemented"); } @Inject DuplicateOperations(FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); DuplicateOperations(FileSystem fileSystem, FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); void moveToDnw(Path path); void deleteAll(Collection<Result> records); void remove(Collection<ImageRecord> records); void deleteFile(Result result); void deleteFile(Path path); void markAll(Collection<Result> records, Tag tag); void markDnwAndDelete(Collection<Result> records); void markAs(Result result, Tag tag); void markAs(Path path, Tag tag); void markAs(ImageRecord image, Tag tag); void markDirectoryAs(Path directory, Tag tag); void markDirectoryAndChildrenAs(Path rootDirectory, Tag tag); List<ImageRecord> findMissingFiles(Path directory); void ignore(Result result); List<Tag> getFilterTags(); }
@Test public void testAreAttributesValidHashAndModifiedFile() throws Exception { ExtendedAttribute.setExtendedAttribute(tempFile, testHashFullName, Long.toString(TEST_VALUE)); long timestamp = Files.getLastModifiedTime(tempFile).toMillis(); timestamp += TimeUnit.MILLISECONDS.convert(1, TimeUnit.HOURS); ExtendedAttribute.setExtendedAttribute(tempFile, timestampFullName, Long.toString(timestamp)); assertThat(cut.areAttributesValid(tempFile), is(false)); }
public boolean areAttributesValid(Path path) { try { return ExtendedAttribute.isExtendedAttributeSet(path, hashFQN) && verifyTimestamp(path); } catch (IOException e) { LOGGER.error("Failed to check hash for {} ({})", path, e.toString()); } return false; }
HashAttribute { public boolean areAttributesValid(Path path) { try { return ExtendedAttribute.isExtendedAttributeSet(path, hashFQN) && verifyTimestamp(path); } catch (IOException e) { LOGGER.error("Failed to check hash for {} ({})", path, e.toString()); } return false; } }
HashAttribute { public boolean areAttributesValid(Path path) { try { return ExtendedAttribute.isExtendedAttributeSet(path, hashFQN) && verifyTimestamp(path); } catch (IOException e) { LOGGER.error("Failed to check hash for {} ({})", path, e.toString()); } return false; } HashAttribute(String hashName); }
HashAttribute { public boolean areAttributesValid(Path path) { try { return ExtendedAttribute.isExtendedAttributeSet(path, hashFQN) && verifyTimestamp(path); } catch (IOException e) { LOGGER.error("Failed to check hash for {} ({})", path, e.toString()); } return false; } HashAttribute(String hashName); boolean areAttributesValid(Path path); long readHash(Path path); void writeHash(Path path, long hash); void markCorrupted(Path path); boolean isCorrupted(Path path); String getHashFQN(); String getTimestampFQN(); String getCorruptNameFQN(); }
HashAttribute { public boolean areAttributesValid(Path path) { try { return ExtendedAttribute.isExtendedAttributeSet(path, hashFQN) && verifyTimestamp(path); } catch (IOException e) { LOGGER.error("Failed to check hash for {} ({})", path, e.toString()); } return false; } HashAttribute(String hashName); boolean areAttributesValid(Path path); long readHash(Path path); void writeHash(Path path, long hash); void markCorrupted(Path path); boolean isCorrupted(Path path); String getHashFQN(); String getTimestampFQN(); String getCorruptNameFQN(); }
@Test public void testDeleteAll() throws Exception { List<Path> files = createTempTestFiles(10); LinkedList<Result> records = new LinkedList<>(); for (Path p : files) { records.add(new Result(resultGroup, new ImageRecord(p.toString(), 0))); } dupOp.deleteAll(records); assertFilesDoNotExist(files); verify(imageRepository, times(10)).remove(any(ImageRecord.class)); }
public void deleteAll(Collection<Result> records) { for (Result result : records) { deleteFile(result); } }
DuplicateOperations { public void deleteAll(Collection<Result> records) { for (Result result : records) { deleteFile(result); } } }
DuplicateOperations { public void deleteAll(Collection<Result> records) { for (Result result : records) { deleteFile(result); } } @Inject DuplicateOperations(FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); DuplicateOperations(FileSystem fileSystem, FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); }
DuplicateOperations { public void deleteAll(Collection<Result> records) { for (Result result : records) { deleteFile(result); } } @Inject DuplicateOperations(FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); DuplicateOperations(FileSystem fileSystem, FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); void moveToDnw(Path path); void deleteAll(Collection<Result> records); void remove(Collection<ImageRecord> records); void deleteFile(Result result); void deleteFile(Path path); void markAll(Collection<Result> records, Tag tag); void markDnwAndDelete(Collection<Result> records); void markAs(Result result, Tag tag); void markAs(Path path, Tag tag); void markAs(ImageRecord image, Tag tag); void markDirectoryAs(Path directory, Tag tag); void markDirectoryAndChildrenAs(Path rootDirectory, Tag tag); List<ImageRecord> findMissingFiles(Path directory); void ignore(Result result); List<Tag> getFilterTags(); }
DuplicateOperations { public void deleteAll(Collection<Result> records) { for (Result result : records) { deleteFile(result); } } @Inject DuplicateOperations(FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); DuplicateOperations(FileSystem fileSystem, FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); void moveToDnw(Path path); void deleteAll(Collection<Result> records); void remove(Collection<ImageRecord> records); void deleteFile(Result result); void deleteFile(Path path); void markAll(Collection<Result> records, Tag tag); void markDnwAndDelete(Collection<Result> records); void markAs(Result result, Tag tag); void markAs(Path path, Tag tag); void markAs(ImageRecord image, Tag tag); void markDirectoryAs(Path directory, Tag tag); void markDirectoryAndChildrenAs(Path rootDirectory, Tag tag); List<ImageRecord> findMissingFiles(Path directory); void ignore(Result result); List<Tag> getFilterTags(); }
@Test public void testDeleteFile() throws Exception { List<Path> files = createTempTestFiles(1); Path file = files.get(0); assertThat(GUARD_MSG, Files.exists(file), is(true)); dupOp.deleteFile(file); assertThat(Files.exists(file), is(false)); verify(imageRepository).remove(new ImageRecord(file.toString(), 0)); }
public void deleteFile(Result result) { deleteFile(fileSystem.getPath(result.getImageRecord().getPath())); result.remove(); }
DuplicateOperations { public void deleteFile(Result result) { deleteFile(fileSystem.getPath(result.getImageRecord().getPath())); result.remove(); } }
DuplicateOperations { public void deleteFile(Result result) { deleteFile(fileSystem.getPath(result.getImageRecord().getPath())); result.remove(); } @Inject DuplicateOperations(FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); DuplicateOperations(FileSystem fileSystem, FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); }
DuplicateOperations { public void deleteFile(Result result) { deleteFile(fileSystem.getPath(result.getImageRecord().getPath())); result.remove(); } @Inject DuplicateOperations(FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); DuplicateOperations(FileSystem fileSystem, FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); void moveToDnw(Path path); void deleteAll(Collection<Result> records); void remove(Collection<ImageRecord> records); void deleteFile(Result result); void deleteFile(Path path); void markAll(Collection<Result> records, Tag tag); void markDnwAndDelete(Collection<Result> records); void markAs(Result result, Tag tag); void markAs(Path path, Tag tag); void markAs(ImageRecord image, Tag tag); void markDirectoryAs(Path directory, Tag tag); void markDirectoryAndChildrenAs(Path rootDirectory, Tag tag); List<ImageRecord> findMissingFiles(Path directory); void ignore(Result result); List<Tag> getFilterTags(); }
DuplicateOperations { public void deleteFile(Result result) { deleteFile(fileSystem.getPath(result.getImageRecord().getPath())); result.remove(); } @Inject DuplicateOperations(FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); DuplicateOperations(FileSystem fileSystem, FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); void moveToDnw(Path path); void deleteAll(Collection<Result> records); void remove(Collection<ImageRecord> records); void deleteFile(Result result); void deleteFile(Path path); void markAll(Collection<Result> records, Tag tag); void markDnwAndDelete(Collection<Result> records); void markAs(Result result, Tag tag); void markAs(Path path, Tag tag); void markAs(ImageRecord image, Tag tag); void markDirectoryAs(Path directory, Tag tag); void markDirectoryAndChildrenAs(Path rootDirectory, Tag tag); List<ImageRecord> findMissingFiles(Path directory); void ignore(Result result); List<Tag> getFilterTags(); }
@Test public void testMarkDnwAndDelete() throws Exception { List<Path> files = createTempTestFiles(RECORD_NUMBER); LinkedList<Result> records = new LinkedList<>(); for (Path p : files) { records.add(new Result(resultGroup, new ImageRecord(p.toString(), 0))); } dupOp.markDnwAndDelete(records); assertFilesDoNotExist(files); ArgumentCaptor<FilterRecord> capture = ArgumentCaptor.forClass(FilterRecord.class); verify(filterRepository, times(RECORD_NUMBER)).store(capture.capture()); verify(imageRepository, times(RECORD_NUMBER)).remove(any(ImageRecord.class)); for (FilterRecord record : capture.getAllValues()) { assertThat(record.getTag(), is(TAG_DNW)); } }
public void markDnwAndDelete(Collection<Result> records) { for (Result result : records) { ImageRecord ir = result.getImageRecord(); Path path = fileSystem.getPath(ir.getPath()); try { markAs(ir, TAG_DNW); deleteFile(result); } catch (RepositoryException e) { logger.warn("Failed to add filter entry for {} - {}", path, e.getMessage()); } } }
DuplicateOperations { public void markDnwAndDelete(Collection<Result> records) { for (Result result : records) { ImageRecord ir = result.getImageRecord(); Path path = fileSystem.getPath(ir.getPath()); try { markAs(ir, TAG_DNW); deleteFile(result); } catch (RepositoryException e) { logger.warn("Failed to add filter entry for {} - {}", path, e.getMessage()); } } } }
DuplicateOperations { public void markDnwAndDelete(Collection<Result> records) { for (Result result : records) { ImageRecord ir = result.getImageRecord(); Path path = fileSystem.getPath(ir.getPath()); try { markAs(ir, TAG_DNW); deleteFile(result); } catch (RepositoryException e) { logger.warn("Failed to add filter entry for {} - {}", path, e.getMessage()); } } } @Inject DuplicateOperations(FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); DuplicateOperations(FileSystem fileSystem, FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); }
DuplicateOperations { public void markDnwAndDelete(Collection<Result> records) { for (Result result : records) { ImageRecord ir = result.getImageRecord(); Path path = fileSystem.getPath(ir.getPath()); try { markAs(ir, TAG_DNW); deleteFile(result); } catch (RepositoryException e) { logger.warn("Failed to add filter entry for {} - {}", path, e.getMessage()); } } } @Inject DuplicateOperations(FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); DuplicateOperations(FileSystem fileSystem, FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); void moveToDnw(Path path); void deleteAll(Collection<Result> records); void remove(Collection<ImageRecord> records); void deleteFile(Result result); void deleteFile(Path path); void markAll(Collection<Result> records, Tag tag); void markDnwAndDelete(Collection<Result> records); void markAs(Result result, Tag tag); void markAs(Path path, Tag tag); void markAs(ImageRecord image, Tag tag); void markDirectoryAs(Path directory, Tag tag); void markDirectoryAndChildrenAs(Path rootDirectory, Tag tag); List<ImageRecord> findMissingFiles(Path directory); void ignore(Result result); List<Tag> getFilterTags(); }
DuplicateOperations { public void markDnwAndDelete(Collection<Result> records) { for (Result result : records) { ImageRecord ir = result.getImageRecord(); Path path = fileSystem.getPath(ir.getPath()); try { markAs(ir, TAG_DNW); deleteFile(result); } catch (RepositoryException e) { logger.warn("Failed to add filter entry for {} - {}", path, e.getMessage()); } } } @Inject DuplicateOperations(FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); DuplicateOperations(FileSystem fileSystem, FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); void moveToDnw(Path path); void deleteAll(Collection<Result> records); void remove(Collection<ImageRecord> records); void deleteFile(Result result); void deleteFile(Path path); void markAll(Collection<Result> records, Tag tag); void markDnwAndDelete(Collection<Result> records); void markAs(Result result, Tag tag); void markAs(Path path, Tag tag); void markAs(ImageRecord image, Tag tag); void markDirectoryAs(Path directory, Tag tag); void markDirectoryAndChildrenAs(Path rootDirectory, Tag tag); List<ImageRecord> findMissingFiles(Path directory); void ignore(Result result); List<Tag> getFilterTags(); }
@Test public void testMarkAsNotInDb() throws Exception { List<Path> files = createTempTestFiles(1); Path file = files.get(0); dupOp.markAs(file, TAG_FOO); verify(imageRepository).getByPath(any(Path.class)); verify(filterRepository, never()).store(any(FilterRecord.class)); }
public void markAs(Result result, Tag tag) { try { markAs(result.getImageRecord(), tag); } catch (RepositoryException e) { logger.warn(FILTER_ADD_FAILED_MESSAGE, result.getImageRecord().getPath(), e.getMessage()); } }
DuplicateOperations { public void markAs(Result result, Tag tag) { try { markAs(result.getImageRecord(), tag); } catch (RepositoryException e) { logger.warn(FILTER_ADD_FAILED_MESSAGE, result.getImageRecord().getPath(), e.getMessage()); } } }
DuplicateOperations { public void markAs(Result result, Tag tag) { try { markAs(result.getImageRecord(), tag); } catch (RepositoryException e) { logger.warn(FILTER_ADD_FAILED_MESSAGE, result.getImageRecord().getPath(), e.getMessage()); } } @Inject DuplicateOperations(FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); DuplicateOperations(FileSystem fileSystem, FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); }
DuplicateOperations { public void markAs(Result result, Tag tag) { try { markAs(result.getImageRecord(), tag); } catch (RepositoryException e) { logger.warn(FILTER_ADD_FAILED_MESSAGE, result.getImageRecord().getPath(), e.getMessage()); } } @Inject DuplicateOperations(FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); DuplicateOperations(FileSystem fileSystem, FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); void moveToDnw(Path path); void deleteAll(Collection<Result> records); void remove(Collection<ImageRecord> records); void deleteFile(Result result); void deleteFile(Path path); void markAll(Collection<Result> records, Tag tag); void markDnwAndDelete(Collection<Result> records); void markAs(Result result, Tag tag); void markAs(Path path, Tag tag); void markAs(ImageRecord image, Tag tag); void markDirectoryAs(Path directory, Tag tag); void markDirectoryAndChildrenAs(Path rootDirectory, Tag tag); List<ImageRecord> findMissingFiles(Path directory); void ignore(Result result); List<Tag> getFilterTags(); }
DuplicateOperations { public void markAs(Result result, Tag tag) { try { markAs(result.getImageRecord(), tag); } catch (RepositoryException e) { logger.warn(FILTER_ADD_FAILED_MESSAGE, result.getImageRecord().getPath(), e.getMessage()); } } @Inject DuplicateOperations(FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); DuplicateOperations(FileSystem fileSystem, FilterRepository filterRepository, TagRepository tagRepository, ImageRepository imageRepository, IgnoreRepository ignoreRepository); void moveToDnw(Path path); void deleteAll(Collection<Result> records); void remove(Collection<ImageRecord> records); void deleteFile(Result result); void deleteFile(Path path); void markAll(Collection<Result> records, Tag tag); void markDnwAndDelete(Collection<Result> records); void markAs(Result result, Tag tag); void markAs(Path path, Tag tag); void markAs(ImageRecord image, Tag tag); void markDirectoryAs(Path directory, Tag tag); void markDirectoryAndChildrenAs(Path rootDirectory, Tag tag); List<ImageRecord> findMissingFiles(Path directory); void ignore(Result result); List<Tag> getFilterTags(); }