target
stringlengths 20
113k
| src_fm
stringlengths 11
86.3k
| src_fm_fc
stringlengths 21
86.4k
| src_fm_fc_co
stringlengths 30
86.4k
| src_fm_fc_ms
stringlengths 42
86.8k
| src_fm_fc_ms_ff
stringlengths 43
86.8k
|
---|---|---|---|---|---|
@Test public void parseMixedTest() { Set<AgentIdentifier> expected = Stream.of( AgentIdentifier.newBuilder() .setType(AgentIdentifierType.ORCID.name()) .setValue("https: .build(), AgentIdentifier.newBuilder() .setType(AgentIdentifierType.WIKIDATA.name()) .setValue("wikidata.org/wiki/0000") .build(), AgentIdentifier.newBuilder() .setType(AgentIdentifierType.OTHER.name()) .setValue("something") .build()) .collect(Collectors.toSet()); String raw = "wikidata.org/wiki/0000| something|0000-0002-0144-1997"; Set<AgentIdentifier> set = AgentIdentifierParser.parse(raw); assertFalse(set.isEmpty()); assertEquals(expected, set); }
|
public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); }
|
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } }
|
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } }
|
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } static Set<AgentIdentifier> parse(String raw); }
|
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } static Set<AgentIdentifier> parse(String raw); }
|
@Test public void sha1Test() { String value = "af91c6ca-da34-4e49-ace3-3b125dbeab3c"; String expected = "3521a4e173f1c42a18d431d128720dc60e430a73"; String result = HashUtils.getSha1(value); Assert.assertEquals(expected, result); }
|
public static String getSha1(String... strings) { return getHash("SHA-1", strings); }
|
HashUtils { public static String getSha1(String... strings) { return getHash("SHA-1", strings); } }
|
HashUtils { public static String getSha1(String... strings) { return getHash("SHA-1", strings); } }
|
HashUtils { public static String getSha1(String... strings) { return getHash("SHA-1", strings); } static String getSha1(String... strings); }
|
HashUtils { public static String getSha1(String... strings) { return getHash("SHA-1", strings); } static String getSha1(String... strings); }
|
@Test public void sha1TwoValueTest() { String value1 = "af91c6ca-da34-4e49-ace3-3b125dbeab3c"; String value2 = "f033adff-4dc4-4d20-9da0-4ed24cf59b61"; String expected = "74cf926f4871c8f98acf392b098e406ab82765b5"; String result = HashUtils.getSha1(value1, value2); Assert.assertEquals(expected, result); }
|
public static String getSha1(String... strings) { return getHash("SHA-1", strings); }
|
HashUtils { public static String getSha1(String... strings) { return getHash("SHA-1", strings); } }
|
HashUtils { public static String getSha1(String... strings) { return getHash("SHA-1", strings); } }
|
HashUtils { public static String getSha1(String... strings) { return getHash("SHA-1", strings); } static String getSha1(String... strings); }
|
HashUtils { public static String getSha1(String... strings) { return getHash("SHA-1", strings); } static String getSha1(String... strings); }
|
@Test public void nullTest() { Integer year = null; Integer month = null; Integer day = null; Optional<Temporal> temporal = TemporalUtils.getTemporal(year, month, day); assertFalse(temporal.isPresent()); }
|
public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day); }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day); }
|
@Test public void nullYearTest() { Integer year = null; Integer month = 10; Integer day = 1; Optional<Temporal> temporal = TemporalUtils.getTemporal(year, month, day); assertFalse(temporal.isPresent()); }
|
public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day); }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day); }
|
@Test public void findHighestPriorityTest() { String expected = "Aa"; Set<PrioritizedProperty> set = new TreeSet<>(Comparator.comparing(PrioritizedProperty::getProperty)); set.add(new PrioritizedProperty(PrioritizedPropertyNameEnum.COLLECTOR_NAME, 1, "Aa")); set.add(new PrioritizedProperty(PrioritizedPropertyNameEnum.COLLECTOR_NAME, 1, "Bb")); set.add(new PrioritizedProperty(PrioritizedPropertyNameEnum.COLLECTOR_NAME, 1, "Cc")); String result = PropertyPrioritizer.findHighestPriority(set); Assert.assertEquals(expected, result); }
|
protected static String findHighestPriority(Set<PrioritizedProperty> props) { return props.stream() .min( Comparator.comparing(PrioritizedProperty::getPriority) .thenComparing(PrioritizedProperty::getProperty)) .map(PrioritizedProperty::getProperty) .orElse(null); }
|
PropertyPrioritizer { protected static String findHighestPriority(Set<PrioritizedProperty> props) { return props.stream() .min( Comparator.comparing(PrioritizedProperty::getPriority) .thenComparing(PrioritizedProperty::getProperty)) .map(PrioritizedProperty::getProperty) .orElse(null); } }
|
PropertyPrioritizer { protected static String findHighestPriority(Set<PrioritizedProperty> props) { return props.stream() .min( Comparator.comparing(PrioritizedProperty::getPriority) .thenComparing(PrioritizedProperty::getProperty)) .map(PrioritizedProperty::getProperty) .orElse(null); } }
|
PropertyPrioritizer { protected static String findHighestPriority(Set<PrioritizedProperty> props) { return props.stream() .min( Comparator.comparing(PrioritizedProperty::getPriority) .thenComparing(PrioritizedProperty::getProperty)) .map(PrioritizedProperty::getProperty) .orElse(null); } abstract void resolvePriorities(); void addPrioritizedProperty(PrioritizedProperty prop); }
|
PropertyPrioritizer { protected static String findHighestPriority(Set<PrioritizedProperty> props) { return props.stream() .min( Comparator.comparing(PrioritizedProperty::getPriority) .thenComparing(PrioritizedProperty::getProperty)) .map(PrioritizedProperty::getProperty) .orElse(null); } abstract void resolvePriorities(); void addPrioritizedProperty(PrioritizedProperty prop); }
|
@Test public void nullYearMonthTest() { Integer year = null; Integer month = null; Integer day = 1; Optional<Temporal> temporal = TemporalUtils.getTemporal(year, month, day); assertFalse(temporal.isPresent()); }
|
public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day); }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day); }
|
@Test public void yearTest() { Integer year = 2000; Integer month = null; Integer day = null; Year expected = Year.of(year); Optional<Temporal> temporal = TemporalUtils.getTemporal(year, month, day); assertTrue(temporal.isPresent()); assertEquals(expected, temporal.get()); }
|
public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day); }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day); }
|
@Test public void yearMonthTest() { Integer year = 2000; Integer month = 10; Integer day = null; YearMonth expected = YearMonth.of(year, month); Optional<Temporal> temporal = TemporalUtils.getTemporal(year, month, day); assertTrue(temporal.isPresent()); assertEquals(expected, temporal.get()); }
|
public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day); }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day); }
|
@Test public void localDateTest() { Integer year = 2000; Integer month = 10; Integer day = 10; LocalDate expected = LocalDate.of(year, month, day); Optional<Temporal> temporal = TemporalUtils.getTemporal(year, month, day); assertTrue(temporal.isPresent()); assertEquals(expected, temporal.get()); }
|
public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day); }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day); }
|
@Test public void monthNullTest() { Integer year = 2000; Integer month = null; Integer day = 10; Year expected = Year.of(year); Optional<Temporal> temporal = TemporalUtils.getTemporal(year, month, day); assertTrue(temporal.isPresent()); assertEquals(expected, temporal.get()); }
|
public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day); }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day); }
|
@Test public void wrongDayMonthTest() { Integer year = 2000; Integer month = 11; Integer day = 31; Optional<Temporal> temporal = TemporalUtils.getTemporal(year, month, day); assertFalse(temporal.isPresent()); }
|
public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day); }
|
TemporalUtils { public static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day) { try { if (year != null && month != null && day != null) { return Optional.of(LocalDate.of(year, month, day)); } if (year != null && month != null) { return Optional.of(YearMonth.of(year, month)); } if (year != null) { return Optional.of(Year.of(year)); } } catch (RuntimeException ex) { log.warn(ex.getLocalizedMessage()); } return Optional.empty(); } static Optional<Temporal> getTemporal(Integer year, Integer month, Integer day); }
|
@Test public void createVerbatimToInterpretedMetricsTest() { IngestMetrics metrics = IngestMetricsBuilder.createVerbatimToInterpretedMetrics(); metrics.incMetric(BASIC_RECORDS_COUNT); metrics.incMetric(LOCATION_RECORDS_COUNT); metrics.incMetric(METADATA_RECORDS_COUNT); metrics.incMetric(TAXON_RECORDS_COUNT); metrics.incMetric(TEMPORAL_RECORDS_COUNT); metrics.incMetric(VERBATIM_RECORDS_COUNT); metrics.incMetric(AUDUBON_RECORDS_COUNT); metrics.incMetric(IMAGE_RECORDS_COUNT); metrics.incMetric(MEASUREMENT_OR_FACT_RECORDS_COUNT); metrics.incMetric(MULTIMEDIA_RECORDS_COUNT); metrics.incMetric(FILTER_ER_BASED_ON_GBIF_ID); metrics.incMetric(UNIQUE_GBIF_IDS_COUNT); metrics.incMetric(DUPLICATE_GBIF_IDS_COUNT); metrics.incMetric(IDENTICAL_GBIF_OBJECTS_COUNT); metrics.incMetric(INVALID_GBIF_ID_COUNT); metrics.incMetric(UNIQUE_IDS_COUNT); metrics.incMetric(DUPLICATE_IDS_COUNT); metrics.incMetric(IDENTICAL_OBJECTS_COUNT); MetricResults result = metrics.getMetricsResult(); Map<String, Long> map = new HashMap<>(); result .allMetrics() .getCounters() .forEach(mr -> map.put(mr.getName().getName(), mr.getAttempted())); Assert.assertEquals(18, map.size()); Assert.assertEquals(Long.valueOf(1L), map.get(BASIC_RECORDS_COUNT)); Assert.assertEquals(Long.valueOf(1L), map.get(LOCATION_RECORDS_COUNT)); Assert.assertEquals(Long.valueOf(1L), map.get(METADATA_RECORDS_COUNT)); Assert.assertEquals(Long.valueOf(1L), map.get(TAXON_RECORDS_COUNT)); Assert.assertEquals(Long.valueOf(1L), map.get(TEMPORAL_RECORDS_COUNT)); Assert.assertEquals(Long.valueOf(1L), map.get(VERBATIM_RECORDS_COUNT)); Assert.assertEquals(Long.valueOf(1L), map.get(AUDUBON_RECORDS_COUNT)); Assert.assertEquals(Long.valueOf(1L), map.get(IMAGE_RECORDS_COUNT)); Assert.assertEquals(Long.valueOf(1L), map.get(MEASUREMENT_OR_FACT_RECORDS_COUNT)); Assert.assertEquals(Long.valueOf(1L), map.get(MULTIMEDIA_RECORDS_COUNT)); Assert.assertEquals(Long.valueOf(1L), map.get(FILTER_ER_BASED_ON_GBIF_ID)); Assert.assertEquals(Long.valueOf(1L), map.get(UNIQUE_GBIF_IDS_COUNT)); Assert.assertEquals(Long.valueOf(1L), map.get(DUPLICATE_GBIF_IDS_COUNT)); Assert.assertEquals(Long.valueOf(1L), map.get(IDENTICAL_GBIF_OBJECTS_COUNT)); Assert.assertEquals(Long.valueOf(1L), map.get(INVALID_GBIF_ID_COUNT)); Assert.assertEquals(Long.valueOf(1L), map.get(UNIQUE_IDS_COUNT)); Assert.assertEquals(Long.valueOf(1L), map.get(DUPLICATE_IDS_COUNT)); Assert.assertEquals(Long.valueOf(1L), map.get(IDENTICAL_OBJECTS_COUNT)); }
|
public static IngestMetrics createVerbatimToInterpretedMetrics() { return IngestMetrics.create() .addMetric(BasicTransform.class, BASIC_RECORDS_COUNT) .addMetric(LocationTransform.class, LOCATION_RECORDS_COUNT) .addMetric(MetadataTransform.class, METADATA_RECORDS_COUNT) .addMetric(TaxonomyTransform.class, TAXON_RECORDS_COUNT) .addMetric(GrscicollTransform.class, GRSCICOLL_RECORDS_COUNT) .addMetric(TemporalTransform.class, TEMPORAL_RECORDS_COUNT) .addMetric(VerbatimTransform.class, VERBATIM_RECORDS_COUNT) .addMetric(AudubonTransform.class, AUDUBON_RECORDS_COUNT) .addMetric(ImageTransform.class, IMAGE_RECORDS_COUNT) .addMetric(MeasurementOrFactTransform.class, MEASUREMENT_OR_FACT_RECORDS_COUNT) .addMetric(MultimediaTransform.class, MULTIMEDIA_RECORDS_COUNT) .addMetric(FilterExtendedRecordTransform.class, FILTER_ER_BASED_ON_GBIF_ID) .addMetric(UniqueGbifIdTransform.class, UNIQUE_GBIF_IDS_COUNT) .addMetric(UniqueGbifIdTransform.class, DUPLICATE_GBIF_IDS_COUNT) .addMetric(UniqueGbifIdTransform.class, IDENTICAL_GBIF_OBJECTS_COUNT) .addMetric(UniqueGbifIdTransform.class, INVALID_GBIF_ID_COUNT) .addMetric(UniqueIdTransform.class, UNIQUE_IDS_COUNT) .addMetric(UniqueIdTransform.class, DUPLICATE_IDS_COUNT) .addMetric(UniqueIdTransform.class, IDENTICAL_OBJECTS_COUNT) .addMetric(OccurrenceExtensionTransform.class, OCCURRENCE_EXT_COUNT); }
|
IngestMetricsBuilder { public static IngestMetrics createVerbatimToInterpretedMetrics() { return IngestMetrics.create() .addMetric(BasicTransform.class, BASIC_RECORDS_COUNT) .addMetric(LocationTransform.class, LOCATION_RECORDS_COUNT) .addMetric(MetadataTransform.class, METADATA_RECORDS_COUNT) .addMetric(TaxonomyTransform.class, TAXON_RECORDS_COUNT) .addMetric(GrscicollTransform.class, GRSCICOLL_RECORDS_COUNT) .addMetric(TemporalTransform.class, TEMPORAL_RECORDS_COUNT) .addMetric(VerbatimTransform.class, VERBATIM_RECORDS_COUNT) .addMetric(AudubonTransform.class, AUDUBON_RECORDS_COUNT) .addMetric(ImageTransform.class, IMAGE_RECORDS_COUNT) .addMetric(MeasurementOrFactTransform.class, MEASUREMENT_OR_FACT_RECORDS_COUNT) .addMetric(MultimediaTransform.class, MULTIMEDIA_RECORDS_COUNT) .addMetric(FilterExtendedRecordTransform.class, FILTER_ER_BASED_ON_GBIF_ID) .addMetric(UniqueGbifIdTransform.class, UNIQUE_GBIF_IDS_COUNT) .addMetric(UniqueGbifIdTransform.class, DUPLICATE_GBIF_IDS_COUNT) .addMetric(UniqueGbifIdTransform.class, IDENTICAL_GBIF_OBJECTS_COUNT) .addMetric(UniqueGbifIdTransform.class, INVALID_GBIF_ID_COUNT) .addMetric(UniqueIdTransform.class, UNIQUE_IDS_COUNT) .addMetric(UniqueIdTransform.class, DUPLICATE_IDS_COUNT) .addMetric(UniqueIdTransform.class, IDENTICAL_OBJECTS_COUNT) .addMetric(OccurrenceExtensionTransform.class, OCCURRENCE_EXT_COUNT); } }
|
IngestMetricsBuilder { public static IngestMetrics createVerbatimToInterpretedMetrics() { return IngestMetrics.create() .addMetric(BasicTransform.class, BASIC_RECORDS_COUNT) .addMetric(LocationTransform.class, LOCATION_RECORDS_COUNT) .addMetric(MetadataTransform.class, METADATA_RECORDS_COUNT) .addMetric(TaxonomyTransform.class, TAXON_RECORDS_COUNT) .addMetric(GrscicollTransform.class, GRSCICOLL_RECORDS_COUNT) .addMetric(TemporalTransform.class, TEMPORAL_RECORDS_COUNT) .addMetric(VerbatimTransform.class, VERBATIM_RECORDS_COUNT) .addMetric(AudubonTransform.class, AUDUBON_RECORDS_COUNT) .addMetric(ImageTransform.class, IMAGE_RECORDS_COUNT) .addMetric(MeasurementOrFactTransform.class, MEASUREMENT_OR_FACT_RECORDS_COUNT) .addMetric(MultimediaTransform.class, MULTIMEDIA_RECORDS_COUNT) .addMetric(FilterExtendedRecordTransform.class, FILTER_ER_BASED_ON_GBIF_ID) .addMetric(UniqueGbifIdTransform.class, UNIQUE_GBIF_IDS_COUNT) .addMetric(UniqueGbifIdTransform.class, DUPLICATE_GBIF_IDS_COUNT) .addMetric(UniqueGbifIdTransform.class, IDENTICAL_GBIF_OBJECTS_COUNT) .addMetric(UniqueGbifIdTransform.class, INVALID_GBIF_ID_COUNT) .addMetric(UniqueIdTransform.class, UNIQUE_IDS_COUNT) .addMetric(UniqueIdTransform.class, DUPLICATE_IDS_COUNT) .addMetric(UniqueIdTransform.class, IDENTICAL_OBJECTS_COUNT) .addMetric(OccurrenceExtensionTransform.class, OCCURRENCE_EXT_COUNT); } }
|
IngestMetricsBuilder { public static IngestMetrics createVerbatimToInterpretedMetrics() { return IngestMetrics.create() .addMetric(BasicTransform.class, BASIC_RECORDS_COUNT) .addMetric(LocationTransform.class, LOCATION_RECORDS_COUNT) .addMetric(MetadataTransform.class, METADATA_RECORDS_COUNT) .addMetric(TaxonomyTransform.class, TAXON_RECORDS_COUNT) .addMetric(GrscicollTransform.class, GRSCICOLL_RECORDS_COUNT) .addMetric(TemporalTransform.class, TEMPORAL_RECORDS_COUNT) .addMetric(VerbatimTransform.class, VERBATIM_RECORDS_COUNT) .addMetric(AudubonTransform.class, AUDUBON_RECORDS_COUNT) .addMetric(ImageTransform.class, IMAGE_RECORDS_COUNT) .addMetric(MeasurementOrFactTransform.class, MEASUREMENT_OR_FACT_RECORDS_COUNT) .addMetric(MultimediaTransform.class, MULTIMEDIA_RECORDS_COUNT) .addMetric(FilterExtendedRecordTransform.class, FILTER_ER_BASED_ON_GBIF_ID) .addMetric(UniqueGbifIdTransform.class, UNIQUE_GBIF_IDS_COUNT) .addMetric(UniqueGbifIdTransform.class, DUPLICATE_GBIF_IDS_COUNT) .addMetric(UniqueGbifIdTransform.class, IDENTICAL_GBIF_OBJECTS_COUNT) .addMetric(UniqueGbifIdTransform.class, INVALID_GBIF_ID_COUNT) .addMetric(UniqueIdTransform.class, UNIQUE_IDS_COUNT) .addMetric(UniqueIdTransform.class, DUPLICATE_IDS_COUNT) .addMetric(UniqueIdTransform.class, IDENTICAL_OBJECTS_COUNT) .addMetric(OccurrenceExtensionTransform.class, OCCURRENCE_EXT_COUNT); } static IngestMetrics createVerbatimToInterpretedMetrics(); static IngestMetrics createInterpretedToEsIndexMetrics(); static IngestMetrics createInterpretedToHdfsViewMetrics(); }
|
IngestMetricsBuilder { public static IngestMetrics createVerbatimToInterpretedMetrics() { return IngestMetrics.create() .addMetric(BasicTransform.class, BASIC_RECORDS_COUNT) .addMetric(LocationTransform.class, LOCATION_RECORDS_COUNT) .addMetric(MetadataTransform.class, METADATA_RECORDS_COUNT) .addMetric(TaxonomyTransform.class, TAXON_RECORDS_COUNT) .addMetric(GrscicollTransform.class, GRSCICOLL_RECORDS_COUNT) .addMetric(TemporalTransform.class, TEMPORAL_RECORDS_COUNT) .addMetric(VerbatimTransform.class, VERBATIM_RECORDS_COUNT) .addMetric(AudubonTransform.class, AUDUBON_RECORDS_COUNT) .addMetric(ImageTransform.class, IMAGE_RECORDS_COUNT) .addMetric(MeasurementOrFactTransform.class, MEASUREMENT_OR_FACT_RECORDS_COUNT) .addMetric(MultimediaTransform.class, MULTIMEDIA_RECORDS_COUNT) .addMetric(FilterExtendedRecordTransform.class, FILTER_ER_BASED_ON_GBIF_ID) .addMetric(UniqueGbifIdTransform.class, UNIQUE_GBIF_IDS_COUNT) .addMetric(UniqueGbifIdTransform.class, DUPLICATE_GBIF_IDS_COUNT) .addMetric(UniqueGbifIdTransform.class, IDENTICAL_GBIF_OBJECTS_COUNT) .addMetric(UniqueGbifIdTransform.class, INVALID_GBIF_ID_COUNT) .addMetric(UniqueIdTransform.class, UNIQUE_IDS_COUNT) .addMetric(UniqueIdTransform.class, DUPLICATE_IDS_COUNT) .addMetric(UniqueIdTransform.class, IDENTICAL_OBJECTS_COUNT) .addMetric(OccurrenceExtensionTransform.class, OCCURRENCE_EXT_COUNT); } static IngestMetrics createVerbatimToInterpretedMetrics(); static IngestMetrics createInterpretedToEsIndexMetrics(); static IngestMetrics createInterpretedToHdfsViewMetrics(); }
|
@Test public void createInterpretedToEsIndexMetricsTest() { IngestMetrics metrics = IngestMetricsBuilder.createInterpretedToEsIndexMetrics(); metrics.incMetric(AVRO_TO_JSON_COUNT); MetricResults result = metrics.getMetricsResult(); Map<String, Long> map = new HashMap<>(); result .allMetrics() .getCounters() .forEach(mr -> map.put(mr.getName().getName(), mr.getAttempted())); Assert.assertEquals(1, map.size()); Assert.assertEquals(Long.valueOf(1L), map.get(AVRO_TO_JSON_COUNT)); }
|
public static IngestMetrics createInterpretedToEsIndexMetrics() { return IngestMetrics.create().addMetric(GbifJsonTransform.class, AVRO_TO_JSON_COUNT); }
|
IngestMetricsBuilder { public static IngestMetrics createInterpretedToEsIndexMetrics() { return IngestMetrics.create().addMetric(GbifJsonTransform.class, AVRO_TO_JSON_COUNT); } }
|
IngestMetricsBuilder { public static IngestMetrics createInterpretedToEsIndexMetrics() { return IngestMetrics.create().addMetric(GbifJsonTransform.class, AVRO_TO_JSON_COUNT); } }
|
IngestMetricsBuilder { public static IngestMetrics createInterpretedToEsIndexMetrics() { return IngestMetrics.create().addMetric(GbifJsonTransform.class, AVRO_TO_JSON_COUNT); } static IngestMetrics createVerbatimToInterpretedMetrics(); static IngestMetrics createInterpretedToEsIndexMetrics(); static IngestMetrics createInterpretedToHdfsViewMetrics(); }
|
IngestMetricsBuilder { public static IngestMetrics createInterpretedToEsIndexMetrics() { return IngestMetrics.create().addMetric(GbifJsonTransform.class, AVRO_TO_JSON_COUNT); } static IngestMetrics createVerbatimToInterpretedMetrics(); static IngestMetrics createInterpretedToEsIndexMetrics(); static IngestMetrics createInterpretedToHdfsViewMetrics(); }
|
@Test public void createInterpretedToHdfsViewMetricsTest() { IngestMetrics metrics = IngestMetricsBuilder.createInterpretedToHdfsViewMetrics(); metrics.incMetric(AVRO_TO_HDFS_COUNT); MetricResults result = metrics.getMetricsResult(); Map<String, Long> map = new HashMap<>(); result .allMetrics() .getCounters() .forEach(mr -> map.put(mr.getName().getName(), mr.getAttempted())); Assert.assertEquals(1, map.size()); Assert.assertEquals(Long.valueOf(1L), map.get(AVRO_TO_HDFS_COUNT)); }
|
public static IngestMetrics createInterpretedToHdfsViewMetrics() { return IngestMetrics.create() .addMetric(OccurrenceHdfsRecordConverterTransform.class, AVRO_TO_HDFS_COUNT); }
|
IngestMetricsBuilder { public static IngestMetrics createInterpretedToHdfsViewMetrics() { return IngestMetrics.create() .addMetric(OccurrenceHdfsRecordConverterTransform.class, AVRO_TO_HDFS_COUNT); } }
|
IngestMetricsBuilder { public static IngestMetrics createInterpretedToHdfsViewMetrics() { return IngestMetrics.create() .addMetric(OccurrenceHdfsRecordConverterTransform.class, AVRO_TO_HDFS_COUNT); } }
|
IngestMetricsBuilder { public static IngestMetrics createInterpretedToHdfsViewMetrics() { return IngestMetrics.create() .addMetric(OccurrenceHdfsRecordConverterTransform.class, AVRO_TO_HDFS_COUNT); } static IngestMetrics createVerbatimToInterpretedMetrics(); static IngestMetrics createInterpretedToEsIndexMetrics(); static IngestMetrics createInterpretedToHdfsViewMetrics(); }
|
IngestMetricsBuilder { public static IngestMetrics createInterpretedToHdfsViewMetrics() { return IngestMetrics.create() .addMetric(OccurrenceHdfsRecordConverterTransform.class, AVRO_TO_HDFS_COUNT); } static IngestMetrics createVerbatimToInterpretedMetrics(); static IngestMetrics createInterpretedToEsIndexMetrics(); static IngestMetrics createInterpretedToHdfsViewMetrics(); }
|
@Test public void testSparkRunnerCommand() { String expected = "spark2-submit --conf spark.default.parallelism=1 --conf spark.executor.memoryOverhead=1 " + "--conf spark.dynamicAllocation.enabled=false " + "--class org.gbif.Test --master yarn --deploy-mode cluster --executor-memory 1G --executor-cores 1 --num-executors 1 " + "--driver-memory 4G java.jar --datasetId=de7ffb5e-c07b-42dc-8a88-f67a4465fe3d --attempt=1 --runner=SparkRunner " + "--metaFileName=interpreted-to-hdfs.yml --inputPath=tmp --targetPath=target --hdfsSiteConfig=hdfs.xml " + "--coreSiteConfig=core.xml --numberOfShards=10 --properties=/path/ws.config"; HdfsViewConfiguration config = new HdfsViewConfiguration(); config.distributedConfig.jarPath = "java.jar"; config.distributedConfig.mainClass = "org.gbif.Test"; config.sparkConfig.executorMemoryGbMax = 10; config.sparkConfig.executorMemoryGbMin = 1; config.sparkConfig.executorCores = 1; config.sparkConfig.executorNumbersMin = 1; config.sparkConfig.executorNumbersMax = 2; config.sparkConfig.memoryOverhead = 1; config.sparkConfig.driverMemory = "4G"; config.distributedConfig.deployMode = "cluster"; config.processRunner = StepRunner.DISTRIBUTED.name(); config.pipelinesConfig = "/path/ws.config"; config.repositoryTargetPath = "target"; config.stepConfig.coreSiteConfig = "core.xml"; config.stepConfig.hdfsSiteConfig = "hdfs.xml"; config.stepConfig.repositoryPath = "tmp"; UUID datasetId = UUID.fromString("de7ffb5e-c07b-42dc-8a88-f67a4465fe3d"); int attempt = 1; Set<String> steps = Collections.singleton(RecordType.ALL.name()); ValidationResult vr = new ValidationResult(); PipelinesInterpretedMessage message = new PipelinesInterpretedMessage( datasetId, attempt, steps, null, false, null, EndpointType.DWC_ARCHIVE, vr); ProcessBuilder builder = ProcessRunnerBuilder.builder() .config(config) .message(message) .sparkParallelism(1) .sparkExecutorMemory("1G") .sparkExecutorNumbers(1) .numberOfShards(10) .build() .get(); String result = builder.command().get(2); assertEquals(expected, result); }
|
ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); }
|
ProcessRunnerBuilder { ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); } }
|
ProcessRunnerBuilder { ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); } }
|
ProcessRunnerBuilder { ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); } String[] buildOptions(); }
|
ProcessRunnerBuilder { ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); } String[] buildOptions(); }
|
@Test public void reverseFindHighestPriorityTest() { String expected = "Aa"; Set<PrioritizedProperty> set = new TreeSet<>(Comparator.comparing(PrioritizedProperty::getProperty).reversed()); set.add(new PrioritizedProperty(PrioritizedPropertyNameEnum.COLLECTOR_NAME, 1, "Cc")); set.add(new PrioritizedProperty(PrioritizedPropertyNameEnum.COLLECTOR_NAME, 1, "Bb")); set.add(new PrioritizedProperty(PrioritizedPropertyNameEnum.COLLECTOR_NAME, 1, "Aa")); String result = PropertyPrioritizer.findHighestPriority(set); Assert.assertEquals(expected, result); }
|
protected static String findHighestPriority(Set<PrioritizedProperty> props) { return props.stream() .min( Comparator.comparing(PrioritizedProperty::getPriority) .thenComparing(PrioritizedProperty::getProperty)) .map(PrioritizedProperty::getProperty) .orElse(null); }
|
PropertyPrioritizer { protected static String findHighestPriority(Set<PrioritizedProperty> props) { return props.stream() .min( Comparator.comparing(PrioritizedProperty::getPriority) .thenComparing(PrioritizedProperty::getProperty)) .map(PrioritizedProperty::getProperty) .orElse(null); } }
|
PropertyPrioritizer { protected static String findHighestPriority(Set<PrioritizedProperty> props) { return props.stream() .min( Comparator.comparing(PrioritizedProperty::getPriority) .thenComparing(PrioritizedProperty::getProperty)) .map(PrioritizedProperty::getProperty) .orElse(null); } }
|
PropertyPrioritizer { protected static String findHighestPriority(Set<PrioritizedProperty> props) { return props.stream() .min( Comparator.comparing(PrioritizedProperty::getPriority) .thenComparing(PrioritizedProperty::getProperty)) .map(PrioritizedProperty::getProperty) .orElse(null); } abstract void resolvePriorities(); void addPrioritizedProperty(PrioritizedProperty prop); }
|
PropertyPrioritizer { protected static String findHighestPriority(Set<PrioritizedProperty> props) { return props.stream() .min( Comparator.comparing(PrioritizedProperty::getPriority) .thenComparing(PrioritizedProperty::getProperty)) .map(PrioritizedProperty::getProperty) .orElse(null); } abstract void resolvePriorities(); void addPrioritizedProperty(PrioritizedProperty prop); }
|
@Test public void testSparkRunnerCommandFull() { String expected = "sudo -u user spark2-submit --conf spark.metrics.conf=metrics.properties --conf \"spark.driver.extraClassPath=logstash-gelf.jar\" " + "--driver-java-options \"-Dlog4j.configuration=file:log4j.properties\" --queue pipelines --conf spark.default.parallelism=1 " + "--conf spark.executor.memoryOverhead=1 --conf spark.dynamicAllocation.enabled=false " + "--class org.gbif.Test --master yarn --deploy-mode cluster " + "--executor-memory 1G --executor-cores 1 --num-executors 1 --driver-memory 4G java.jar --datasetId=de7ffb5e-c07b-42dc-8a88-f67a4465fe3d " + "--attempt=1 --runner=SparkRunner --metaFileName=interpreted-to-hdfs.yml --inputPath=tmp --targetPath=target --hdfsSiteConfig=hdfs.xml " + "--coreSiteConfig=core.xml --numberOfShards=10 --properties=/path/ws.config"; HdfsViewConfiguration config = new HdfsViewConfiguration(); config.distributedConfig.jarPath = "java.jar"; config.distributedConfig.mainClass = "org.gbif.Test"; config.sparkConfig.executorMemoryGbMax = 10; config.sparkConfig.executorMemoryGbMin = 1; config.sparkConfig.executorCores = 1; config.sparkConfig.executorNumbersMin = 1; config.sparkConfig.executorNumbersMax = 2; config.sparkConfig.memoryOverhead = 1; config.sparkConfig.driverMemory = "4G"; config.distributedConfig.metricsPropertiesPath = "metrics.properties"; config.distributedConfig.extraClassPath = "logstash-gelf.jar"; config.distributedConfig.driverJavaOptions = "-Dlog4j.configuration=file:log4j.properties"; config.distributedConfig.deployMode = "cluster"; config.processRunner = StepRunner.DISTRIBUTED.name(); config.pipelinesConfig = "/path/ws.config"; config.repositoryTargetPath = "target"; config.distributedConfig.yarnQueue = "pipelines"; config.distributedConfig.otherUser = "user"; config.stepConfig.coreSiteConfig = "core.xml"; config.stepConfig.hdfsSiteConfig = "hdfs.xml"; config.stepConfig.repositoryPath = "tmp"; UUID datasetId = UUID.fromString("de7ffb5e-c07b-42dc-8a88-f67a4465fe3d"); int attempt = 1; Set<String> steps = Collections.singleton(RecordType.ALL.name()); ValidationResult vr = new ValidationResult(); PipelinesInterpretedMessage message = new PipelinesInterpretedMessage( datasetId, attempt, steps, 100L, false, null, EndpointType.DWC_ARCHIVE, vr); ProcessBuilder builder = ProcessRunnerBuilder.builder() .config(config) .message(message) .sparkParallelism(1) .sparkExecutorMemory("1G") .sparkExecutorNumbers(1) .numberOfShards(10) .build() .get(); String result = builder.command().get(2); assertEquals(expected, result); }
|
ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); }
|
ProcessRunnerBuilder { ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); } }
|
ProcessRunnerBuilder { ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); } }
|
ProcessRunnerBuilder { ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); } String[] buildOptions(); }
|
ProcessRunnerBuilder { ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); } String[] buildOptions(); }
|
@Test public void testSparkRunnerCommand() { String expected = "spark2-submit --conf spark.default.parallelism=1 --conf spark.executor.memoryOverhead=1 --conf spark.dynamicAllocation.enabled=false " + "--conf spark.yarn.am.waitTime=360s " + "--class org.gbif.Test --master yarn --deploy-mode cluster --executor-memory 1G --executor-cores 1 --num-executors 1 " + "--driver-memory 4G java.jar --datasetId=de7ffb5e-c07b-42dc-8a88-f67a4465fe3d --attempt=1 --interpretationTypes=ALL " + "--runner=SparkRunner --targetPath=tmp --metaFileName=verbatim-to-interpreted.yml --inputPath=verbatim.avro " + "--avroCompressionType=SNAPPY --avroSyncInterval=1 --hdfsSiteConfig=hdfs.xml --coreSiteConfig=core.xml " + "--properties=/path/ws.config --endPointType=DWC_ARCHIVE --tripletValid=true --occurrenceIdValid=true --useExtendedRecordId=true"; InterpreterConfiguration config = new InterpreterConfiguration(); config.distributedConfig.jarPath = "java.jar"; config.distributedConfig.mainClass = "org.gbif.Test"; config.sparkConfig.executorMemoryGbMax = 10; config.sparkConfig.executorMemoryGbMin = 1; config.sparkConfig.executorCores = 1; config.sparkConfig.executorNumbersMin = 1; config.sparkConfig.executorNumbersMax = 2; config.sparkConfig.memoryOverhead = 1; config.avroConfig.compressionType = "SNAPPY"; config.avroConfig.syncInterval = 1; config.pipelinesConfig = "/path/ws.config"; config.sparkConfig.driverMemory = "4G"; config.distributedConfig.deployMode = "cluster"; config.processRunner = StepRunner.DISTRIBUTED.name(); config.stepConfig.repositoryPath = "tmp"; config.stepConfig.coreSiteConfig = "core.xml"; config.stepConfig.hdfsSiteConfig = "hdfs.xml"; UUID datasetId = UUID.fromString("de7ffb5e-c07b-42dc-8a88-f67a4465fe3d"); int attempt = 1; Set<String> types = Collections.singleton(RecordType.ALL.name()); Set<String> steps = Collections.singleton(StepType.VERBATIM_TO_INTERPRETED.name()); PipelinesVerbatimMessage message = new PipelinesVerbatimMessage( datasetId, attempt, types, steps, null, EndpointType.DWC_ARCHIVE, "something", new ValidationResult(true, true, true, null), null, EXECUTION_ID); ProcessBuilder builder = ProcessRunnerBuilder.builder() .config(config) .message(message) .inputPath("verbatim.avro") .sparkParallelism(1) .sparkExecutorMemory("1G") .sparkExecutorNumbers(1) .build() .get(); String result = builder.command().get(2); assertEquals(expected, result); }
|
ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); }
|
ProcessRunnerBuilder { ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); } }
|
ProcessRunnerBuilder { ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); } }
|
ProcessRunnerBuilder { ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); } String[] buildOptions(); }
|
ProcessRunnerBuilder { ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); } String[] buildOptions(); }
|
@Test public void testSparkRunnerCommandFull() { String expected = "sudo -u user spark2-submit --conf spark.metrics.conf=metrics.properties --conf \"spark.driver.extraClassPath=logstash-gelf.jar\" " + "--driver-java-options \"-Dlog4j.configuration=file:log4j.properties\" --queue pipelines --conf spark.default.parallelism=1 " + "--conf spark.executor.memoryOverhead=1 --conf spark.dynamicAllocation.enabled=false --conf spark.yarn.am.waitTime=360s " + "--class org.gbif.Test --master yarn " + "--deploy-mode cluster --executor-memory 1G --executor-cores 1 --num-executors 1 --driver-memory 4G java.jar " + "--datasetId=de7ffb5e-c07b-42dc-8a88-f67a4465fe3d --attempt=1 --interpretationTypes=ALL --runner=SparkRunner " + "--targetPath=tmp --metaFileName=verbatim-to-interpreted.yml --inputPath=verbatim.avro --avroCompressionType=SNAPPY " + "--avroSyncInterval=1 --hdfsSiteConfig=hdfs.xml --coreSiteConfig=core.xml --properties=/path/ws.config --endPointType=DWC_ARCHIVE"; InterpreterConfiguration config = new InterpreterConfiguration(); config.distributedConfig.jarPath = "java.jar"; config.distributedConfig.mainClass = "org.gbif.Test"; config.sparkConfig.executorMemoryGbMax = 10; config.sparkConfig.executorMemoryGbMin = 1; config.sparkConfig.executorCores = 1; config.sparkConfig.executorNumbersMin = 1; config.sparkConfig.executorNumbersMax = 2; config.sparkConfig.memoryOverhead = 1; config.avroConfig.compressionType = "SNAPPY"; config.avroConfig.syncInterval = 1; config.pipelinesConfig = "/path/ws.config"; config.sparkConfig.driverMemory = "4G"; config.distributedConfig.metricsPropertiesPath = "metrics.properties"; config.distributedConfig.extraClassPath = "logstash-gelf.jar"; config.distributedConfig.driverJavaOptions = "-Dlog4j.configuration=file:log4j.properties"; config.distributedConfig.deployMode = "cluster"; config.processRunner = StepRunner.DISTRIBUTED.name(); config.distributedConfig.yarnQueue = "pipelines"; config.distributedConfig.otherUser = "user"; config.stepConfig.hdfsSiteConfig = "hdfs.xml"; config.stepConfig.coreSiteConfig = "core.xml"; config.stepConfig.repositoryPath = "tmp"; UUID datasetId = UUID.fromString("de7ffb5e-c07b-42dc-8a88-f67a4465fe3d"); int attempt = 1; Set<String> types = Collections.singleton(RecordType.ALL.name()); Set<String> steps = Collections.singleton(StepType.VERBATIM_TO_INTERPRETED.name()); PipelinesVerbatimMessage message = new PipelinesVerbatimMessage( datasetId, attempt, types, steps, null, EndpointType.DWC_ARCHIVE, null, null, null, EXECUTION_ID); ProcessBuilder builder = ProcessRunnerBuilder.builder() .config(config) .message(message) .inputPath("verbatim.avro") .sparkParallelism(1) .sparkExecutorMemory("1G") .sparkExecutorNumbers(1) .build() .get(); String result = builder.command().get(2); assertEquals(expected, result); }
|
ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); }
|
ProcessRunnerBuilder { ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); } }
|
ProcessRunnerBuilder { ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); } }
|
ProcessRunnerBuilder { ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); } String[] buildOptions(); }
|
ProcessRunnerBuilder { ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); } String[] buildOptions(); }
|
@Test public void testSparkRunnerCommand() { String expected = "spark2-submit --conf spark.default.parallelism=1 --conf spark.executor.memoryOverhead=1 " + "--conf spark.dynamicAllocation.enabled=false " + "--class org.gbif.Test --master yarn --deploy-mode cluster " + "--executor-memory 1G --executor-cores 1 --num-executors 1 --driver-memory 4G java.jar " + "--datasetId=de7ffb5e-c07b-42dc-8a88-f67a4465fe3d --attempt=1 --runner=SparkRunner --inputPath=tmp " + "--targetPath=tmp --metaFileName=interpreted-to-index.yml --hdfsSiteConfig=hdfs.xml " + "--coreSiteConfig=core.xml --esHosts=http: IndexingConfiguration config = new IndexingConfiguration(); config.distributedConfig.jarPath = "java.jar"; config.distributedConfig.mainClass = "org.gbif.Test"; config.sparkConfig.executorMemoryGbMax = 10; config.sparkConfig.executorMemoryGbMin = 1; config.sparkConfig.executorCores = 1; config.sparkConfig.executorNumbersMin = 1; config.sparkConfig.executorNumbersMax = 2; config.sparkConfig.memoryOverhead = 1; config.sparkConfig.driverMemory = "4G"; config.distributedConfig.deployMode = "cluster"; config.processRunner = StepRunner.DISTRIBUTED.name(); config.esConfig.hosts = new String[] {"http: config.pipelinesConfig = "/path/ws.config"; config.stepConfig.coreSiteConfig = "core.xml"; config.stepConfig.repositoryPath = "tmp"; config.stepConfig.hdfsSiteConfig = "hdfs.xml"; UUID datasetId = UUID.fromString("de7ffb5e-c07b-42dc-8a88-f67a4465fe3d"); int attempt = 1; Set<String> steps = Collections.singleton(RecordType.ALL.name()); ValidationResult vr = new ValidationResult(); PipelinesInterpretedMessage message = new PipelinesInterpretedMessage( datasetId, attempt, steps, null, false, null, EndpointType.DWC_ARCHIVE, vr); String indexName = "occurrence"; ProcessBuilder builder = ProcessRunnerBuilder.builder() .config(config) .message(message) .esIndexName(indexName) .sparkParallelism(1) .sparkExecutorMemory("1G") .sparkExecutorNumbers(1) .build() .get(); String result = builder.command().get(2); assertEquals(expected, result); }
|
ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); }
|
ProcessRunnerBuilder { ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); } }
|
ProcessRunnerBuilder { ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); } }
|
ProcessRunnerBuilder { ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); } String[] buildOptions(); }
|
ProcessRunnerBuilder { ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); } String[] buildOptions(); }
|
@Test public void testSparkRunnerCommandFull() { String expected = "spark2-submit --conf spark.metrics.conf=metrics.properties " + "--conf \"spark.driver.extraClassPath=logstash-gelf.jar\" " + "--driver-java-options \"-Dlog4j.configuration=file:log4j.properties\" --queue pipelines --conf spark.default.parallelism=1 " + "--conf spark.executor.memoryOverhead=1 --conf spark.dynamicAllocation.enabled=false " + "--class org.gbif.Test --master yarn " + "--deploy-mode cluster --executor-memory 1G --executor-cores 1 --num-executors 1 --driver-memory 4G java.jar " + "--datasetId=de7ffb5e-c07b-42dc-8a88-f67a4465fe3d --attempt=1 --runner=SparkRunner --inputPath=tmp --targetPath=tmp " + "--metaFileName=interpreted-to-index.yml --hdfsSiteConfig=hdfs.xml --coreSiteConfig=core.xml " + "--esHosts=http: IndexingConfiguration config = new IndexingConfiguration(); config.distributedConfig.jarPath = "java.jar"; config.distributedConfig.mainClass = "org.gbif.Test"; config.sparkConfig.executorMemoryGbMax = 10; config.sparkConfig.executorMemoryGbMin = 1; config.sparkConfig.executorCores = 1; config.sparkConfig.executorNumbersMin = 1; config.sparkConfig.executorNumbersMax = 2; config.sparkConfig.memoryOverhead = 1; config.sparkConfig.driverMemory = "4G"; config.distributedConfig.metricsPropertiesPath = "metrics.properties"; config.distributedConfig.extraClassPath = "logstash-gelf.jar"; config.distributedConfig.driverJavaOptions = "-Dlog4j.configuration=file:log4j.properties"; config.distributedConfig.deployMode = "cluster"; config.processRunner = StepRunner.DISTRIBUTED.name(); config.esConfig.hosts = new String[] {"http: config.distributedConfig.yarnQueue = "pipelines"; config.pipelinesConfig = "/path/ws.config"; config.stepConfig.hdfsSiteConfig = "hdfs.xml"; config.stepConfig.repositoryPath = "tmp"; config.stepConfig.coreSiteConfig = "core.xml"; UUID datasetId = UUID.fromString("de7ffb5e-c07b-42dc-8a88-f67a4465fe3d"); int attempt = 1; Set<String> steps = Collections.singleton(RecordType.ALL.name()); ValidationResult vr = new ValidationResult(); PipelinesInterpretedMessage message = new PipelinesInterpretedMessage( datasetId, attempt, steps, 100L, false, null, EndpointType.DWC_ARCHIVE, vr); String indexName = "occurrence"; ProcessBuilder builder = ProcessRunnerBuilder.builder() .config(config) .message(message) .sparkParallelism(1) .sparkExecutorMemory("1G") .sparkExecutorNumbers(1) .esIndexName(indexName) .build() .get(); String result = builder.command().get(2); assertEquals(expected, result); }
|
ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); }
|
ProcessRunnerBuilder { ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); } }
|
ProcessRunnerBuilder { ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); } }
|
ProcessRunnerBuilder { ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); } String[] buildOptions(); }
|
ProcessRunnerBuilder { ProcessBuilder get() { if (StepRunner.DISTRIBUTED.name().equals(config.processRunner)) { return buildSpark(); } throw new IllegalArgumentException("Wrong runner type - " + config.processRunner); } String[] buildOptions(); }
|
@Test public void testGetColumn() { assertEquals("scientificName", Columns.column(DwcTerm.scientificName)); assertEquals("countryCode", Columns.column(DwcTerm.countryCode)); assertEquals("v_catalogNumber", Columns.column(DwcTerm.catalogNumber)); assertEquals("class", Columns.column(DwcTerm.class_)); assertEquals("order", Columns.column(DwcTerm.order)); assertEquals("kingdomKey", Columns.column(GbifTerm.kingdomKey)); assertEquals("taxonKey", Columns.column(GbifTerm.taxonKey)); assertEquals("v_occurrenceID", Columns.column(DwcTerm.occurrenceID)); assertEquals("v_taxonID", Columns.column(DwcTerm.taxonID)); assertEquals("basisOfRecord", Columns.column(DwcTerm.basisOfRecord)); assertEquals("taxonKey", Columns.column(GbifTerm.taxonKey)); }
|
public static String column(Term term) { if (term instanceof GbifInternalTerm || TermUtils.isOccurrenceJavaProperty(term) || GbifTerm.mediaType == term) { return column(term, ""); } else if (TermUtils.isInterpretedSourceTerm(term)) { throw new IllegalArgumentException( "The term " + term + " is interpreted and only relevant for verbatim values"); } else { return verbatimColumn(term); } }
|
Columns { public static String column(Term term) { if (term instanceof GbifInternalTerm || TermUtils.isOccurrenceJavaProperty(term) || GbifTerm.mediaType == term) { return column(term, ""); } else if (TermUtils.isInterpretedSourceTerm(term)) { throw new IllegalArgumentException( "The term " + term + " is interpreted and only relevant for verbatim values"); } else { return verbatimColumn(term); } } }
|
Columns { public static String column(Term term) { if (term instanceof GbifInternalTerm || TermUtils.isOccurrenceJavaProperty(term) || GbifTerm.mediaType == term) { return column(term, ""); } else if (TermUtils.isInterpretedSourceTerm(term)) { throw new IllegalArgumentException( "The term " + term + " is interpreted and only relevant for verbatim values"); } else { return verbatimColumn(term); } } }
|
Columns { public static String column(Term term) { if (term instanceof GbifInternalTerm || TermUtils.isOccurrenceJavaProperty(term) || GbifTerm.mediaType == term) { return column(term, ""); } else if (TermUtils.isInterpretedSourceTerm(term)) { throw new IllegalArgumentException( "The term " + term + " is interpreted and only relevant for verbatim values"); } else { return verbatimColumn(term); } } static String column(Term term); static String verbatimColumn(Term term); }
|
Columns { public static String column(Term term) { if (term instanceof GbifInternalTerm || TermUtils.isOccurrenceJavaProperty(term) || GbifTerm.mediaType == term) { return column(term, ""); } else if (TermUtils.isInterpretedSourceTerm(term)) { throw new IllegalArgumentException( "The term " + term + " is interpreted and only relevant for verbatim values"); } else { return verbatimColumn(term); } } static String column(Term term); static String verbatimColumn(Term term); static final String OCCURRENCE_COLUMN_FAMILY; static final byte[] CF; static final String COUNTER_COLUMN; static final String LOOKUP_KEY_COLUMN; static final String LOOKUP_LOCK_COLUMN; static final String LOOKUP_STATUS_COLUMN; }
|
@Test(expected = IllegalArgumentException.class) public void testGetVerbatimColumnIllegal() { Columns.verbatimColumn(GbifInternalTerm.crawlId); }
|
public static String verbatimColumn(Term term) { if (term instanceof GbifInternalTerm) { throw new IllegalArgumentException( "Internal terms (like the tried [" + term.simpleName() + "]) do not exist as verbatim columns"); } return column(term, VERBATIM_TERM_PREFIX); }
|
Columns { public static String verbatimColumn(Term term) { if (term instanceof GbifInternalTerm) { throw new IllegalArgumentException( "Internal terms (like the tried [" + term.simpleName() + "]) do not exist as verbatim columns"); } return column(term, VERBATIM_TERM_PREFIX); } }
|
Columns { public static String verbatimColumn(Term term) { if (term instanceof GbifInternalTerm) { throw new IllegalArgumentException( "Internal terms (like the tried [" + term.simpleName() + "]) do not exist as verbatim columns"); } return column(term, VERBATIM_TERM_PREFIX); } }
|
Columns { public static String verbatimColumn(Term term) { if (term instanceof GbifInternalTerm) { throw new IllegalArgumentException( "Internal terms (like the tried [" + term.simpleName() + "]) do not exist as verbatim columns"); } return column(term, VERBATIM_TERM_PREFIX); } static String column(Term term); static String verbatimColumn(Term term); }
|
Columns { public static String verbatimColumn(Term term) { if (term instanceof GbifInternalTerm) { throw new IllegalArgumentException( "Internal terms (like the tried [" + term.simpleName() + "]) do not exist as verbatim columns"); } return column(term, VERBATIM_TERM_PREFIX); } static String column(Term term); static String verbatimColumn(Term term); static final String OCCURRENCE_COLUMN_FAMILY; static final byte[] CF; static final String COUNTER_COLUMN; static final String LOOKUP_KEY_COLUMN; static final String LOOKUP_LOCK_COLUMN; static final String LOOKUP_STATUS_COLUMN; }
|
@Test(expected = IllegalArgumentException.class) public void testGetColumnIllegal3() { Columns.column(DwcTerm.country); }
|
public static String column(Term term) { if (term instanceof GbifInternalTerm || TermUtils.isOccurrenceJavaProperty(term) || GbifTerm.mediaType == term) { return column(term, ""); } else if (TermUtils.isInterpretedSourceTerm(term)) { throw new IllegalArgumentException( "The term " + term + " is interpreted and only relevant for verbatim values"); } else { return verbatimColumn(term); } }
|
Columns { public static String column(Term term) { if (term instanceof GbifInternalTerm || TermUtils.isOccurrenceJavaProperty(term) || GbifTerm.mediaType == term) { return column(term, ""); } else if (TermUtils.isInterpretedSourceTerm(term)) { throw new IllegalArgumentException( "The term " + term + " is interpreted and only relevant for verbatim values"); } else { return verbatimColumn(term); } } }
|
Columns { public static String column(Term term) { if (term instanceof GbifInternalTerm || TermUtils.isOccurrenceJavaProperty(term) || GbifTerm.mediaType == term) { return column(term, ""); } else if (TermUtils.isInterpretedSourceTerm(term)) { throw new IllegalArgumentException( "The term " + term + " is interpreted and only relevant for verbatim values"); } else { return verbatimColumn(term); } } }
|
Columns { public static String column(Term term) { if (term instanceof GbifInternalTerm || TermUtils.isOccurrenceJavaProperty(term) || GbifTerm.mediaType == term) { return column(term, ""); } else if (TermUtils.isInterpretedSourceTerm(term)) { throw new IllegalArgumentException( "The term " + term + " is interpreted and only relevant for verbatim values"); } else { return verbatimColumn(term); } } static String column(Term term); static String verbatimColumn(Term term); }
|
Columns { public static String column(Term term) { if (term instanceof GbifInternalTerm || TermUtils.isOccurrenceJavaProperty(term) || GbifTerm.mediaType == term) { return column(term, ""); } else if (TermUtils.isInterpretedSourceTerm(term)) { throw new IllegalArgumentException( "The term " + term + " is interpreted and only relevant for verbatim values"); } else { return verbatimColumn(term); } } static String column(Term term); static String verbatimColumn(Term term); static final String OCCURRENCE_COLUMN_FAMILY; static final byte[] CF; static final String COUNTER_COLUMN; static final String LOOKUP_KEY_COLUMN; static final String LOOKUP_LOCK_COLUMN; static final String LOOKUP_STATUS_COLUMN; }
|
@Test public void testGetVerbatimColumn() { assertEquals("v_basisOfRecord", Columns.verbatimColumn(DwcTerm.basisOfRecord)); }
|
public static String verbatimColumn(Term term) { if (term instanceof GbifInternalTerm) { throw new IllegalArgumentException( "Internal terms (like the tried [" + term.simpleName() + "]) do not exist as verbatim columns"); } return column(term, VERBATIM_TERM_PREFIX); }
|
Columns { public static String verbatimColumn(Term term) { if (term instanceof GbifInternalTerm) { throw new IllegalArgumentException( "Internal terms (like the tried [" + term.simpleName() + "]) do not exist as verbatim columns"); } return column(term, VERBATIM_TERM_PREFIX); } }
|
Columns { public static String verbatimColumn(Term term) { if (term instanceof GbifInternalTerm) { throw new IllegalArgumentException( "Internal terms (like the tried [" + term.simpleName() + "]) do not exist as verbatim columns"); } return column(term, VERBATIM_TERM_PREFIX); } }
|
Columns { public static String verbatimColumn(Term term) { if (term instanceof GbifInternalTerm) { throw new IllegalArgumentException( "Internal terms (like the tried [" + term.simpleName() + "]) do not exist as verbatim columns"); } return column(term, VERBATIM_TERM_PREFIX); } static String column(Term term); static String verbatimColumn(Term term); }
|
Columns { public static String verbatimColumn(Term term) { if (term instanceof GbifInternalTerm) { throw new IllegalArgumentException( "Internal terms (like the tried [" + term.simpleName() + "]) do not exist as verbatim columns"); } return column(term, VERBATIM_TERM_PREFIX); } static String column(Term term); static String verbatimColumn(Term term); static final String OCCURRENCE_COLUMN_FAMILY; static final byte[] CF; static final String COUNTER_COLUMN; static final String LOOKUP_KEY_COLUMN; static final String LOOKUP_LOCK_COLUMN; static final String LOOKUP_STATUS_COLUMN; }
|
@Test public void testUtf8a() throws IOException { String xml = Resources.toString( Resources.getResource("id_extraction/abcd1_umlaut.xml"), StandardCharsets.UTF_8); RawXmlOccurrence rawRecord = createFakeOcc(xml); List<RawOccurrenceRecord> results = XmlFragmentParser.parseRecord(rawRecord); assertEquals(1, results.size()); assertEquals("Oschütz", results.get(0).getCollectorName()); }
|
public static List<RawOccurrenceRecord> parseRecord(RawXmlOccurrence xmlRecord) { return parseRecord(xmlRecord.getXml(), xmlRecord.getSchemaType()); }
|
XmlFragmentParser { public static List<RawOccurrenceRecord> parseRecord(RawXmlOccurrence xmlRecord) { return parseRecord(xmlRecord.getXml(), xmlRecord.getSchemaType()); } }
|
XmlFragmentParser { public static List<RawOccurrenceRecord> parseRecord(RawXmlOccurrence xmlRecord) { return parseRecord(xmlRecord.getXml(), xmlRecord.getSchemaType()); } private XmlFragmentParser(); }
|
XmlFragmentParser { public static List<RawOccurrenceRecord> parseRecord(RawXmlOccurrence xmlRecord) { return parseRecord(xmlRecord.getXml(), xmlRecord.getSchemaType()); } private XmlFragmentParser(); static List<RawOccurrenceRecord> parseRecord(RawXmlOccurrence xmlRecord); static List<RawOccurrenceRecord> parseRecord(String xml, OccurrenceSchemaType schemaType); static List<RawOccurrenceRecord> parseRecord(byte[] xml, OccurrenceSchemaType schemaType); static RawOccurrenceRecord parseRecord(
byte[] xml, OccurrenceSchemaType schemaType, String unitQualifier); static Set<IdentifierExtractionResult> extractIdentifiers(
UUID datasetKey,
byte[] xml,
OccurrenceSchemaType schemaType,
boolean useTriplet,
boolean useOccurrenceId); }
|
XmlFragmentParser { public static List<RawOccurrenceRecord> parseRecord(RawXmlOccurrence xmlRecord) { return parseRecord(xmlRecord.getXml(), xmlRecord.getSchemaType()); } private XmlFragmentParser(); static List<RawOccurrenceRecord> parseRecord(RawXmlOccurrence xmlRecord); static List<RawOccurrenceRecord> parseRecord(String xml, OccurrenceSchemaType schemaType); static List<RawOccurrenceRecord> parseRecord(byte[] xml, OccurrenceSchemaType schemaType); static RawOccurrenceRecord parseRecord(
byte[] xml, OccurrenceSchemaType schemaType, String unitQualifier); static Set<IdentifierExtractionResult> extractIdentifiers(
UUID datasetKey,
byte[] xml,
OccurrenceSchemaType schemaType,
boolean useTriplet,
boolean useOccurrenceId); }
|
@Test public void testIdExtractionSimple() throws IOException { String xml = Resources.toString( Resources.getResource("id_extraction/abcd1_simple.xml"), StandardCharsets.UTF_8); UUID datasetKey = UUID.randomUUID(); Triplet target = new Triplet( datasetKey, "TLMF", "Tiroler Landesmuseum Ferdinandeum", "82D45C93-B297-490E-B7B0-E0A9BEED1326", null); byte[] xmlBytes = xml.getBytes(StandardCharsets.UTF_8); Set<IdentifierExtractionResult> extractionResults = XmlFragmentParser.extractIdentifiers( datasetKey, xmlBytes, OccurrenceSchemaType.ABCD_1_2, true, true); Set<UniqueIdentifier> ids = extractionResults.iterator().next().getUniqueIdentifiers(); assertEquals(1, ids.size()); UniqueIdentifier id = ids.iterator().next(); assertEquals(datasetKey, id.getDatasetKey()); assertEquals(OccurrenceKeyHelper.buildKey(target), id.getUniqueString()); }
|
public static Set<IdentifierExtractionResult> extractIdentifiers( UUID datasetKey, byte[] xml, OccurrenceSchemaType schemaType, boolean useTriplet, boolean useOccurrenceId) { Set<IdentifierExtractionResult> results = Sets.newHashSet(); List<RawOccurrenceRecord> records = parseRecord(xml, schemaType); if (records != null && !records.isEmpty()) { for (RawOccurrenceRecord record : records) { Set<UniqueIdentifier> ids = Sets.newHashSet(); if (useTriplet) { Triplet triplet = null; try { triplet = new Triplet( datasetKey, record.getInstitutionCode(), record.getCollectionCode(), record.getCatalogueNumber(), record.getUnitQualifier()); } catch (IllegalArgumentException e) { log.info( "No holy triplet for an xml snippet in dataset [{}] and schema [{}], got error [{}]", datasetKey.toString(), schemaType.toString(), e.getMessage()); } if (triplet != null) { ids.add(triplet); } } if (useOccurrenceId && record.getIdentifierRecords() != null && !record.getIdentifierRecords().isEmpty()) { for (IdentifierRecord idRecord : record.getIdentifierRecords()) { if ((idRecord.getIdentifierType() == 1 || idRecord.getIdentifierType() == 7) && idRecord.getIdentifier() != null) { ids.add(new PublisherProvidedUniqueIdentifier(datasetKey, idRecord.getIdentifier())); } } } if (!ids.isEmpty()) { results.add(new IdentifierExtractionResult(ids, record.getUnitQualifier())); } } } return results; }
|
XmlFragmentParser { public static Set<IdentifierExtractionResult> extractIdentifiers( UUID datasetKey, byte[] xml, OccurrenceSchemaType schemaType, boolean useTriplet, boolean useOccurrenceId) { Set<IdentifierExtractionResult> results = Sets.newHashSet(); List<RawOccurrenceRecord> records = parseRecord(xml, schemaType); if (records != null && !records.isEmpty()) { for (RawOccurrenceRecord record : records) { Set<UniqueIdentifier> ids = Sets.newHashSet(); if (useTriplet) { Triplet triplet = null; try { triplet = new Triplet( datasetKey, record.getInstitutionCode(), record.getCollectionCode(), record.getCatalogueNumber(), record.getUnitQualifier()); } catch (IllegalArgumentException e) { log.info( "No holy triplet for an xml snippet in dataset [{}] and schema [{}], got error [{}]", datasetKey.toString(), schemaType.toString(), e.getMessage()); } if (triplet != null) { ids.add(triplet); } } if (useOccurrenceId && record.getIdentifierRecords() != null && !record.getIdentifierRecords().isEmpty()) { for (IdentifierRecord idRecord : record.getIdentifierRecords()) { if ((idRecord.getIdentifierType() == 1 || idRecord.getIdentifierType() == 7) && idRecord.getIdentifier() != null) { ids.add(new PublisherProvidedUniqueIdentifier(datasetKey, idRecord.getIdentifier())); } } } if (!ids.isEmpty()) { results.add(new IdentifierExtractionResult(ids, record.getUnitQualifier())); } } } return results; } }
|
XmlFragmentParser { public static Set<IdentifierExtractionResult> extractIdentifiers( UUID datasetKey, byte[] xml, OccurrenceSchemaType schemaType, boolean useTriplet, boolean useOccurrenceId) { Set<IdentifierExtractionResult> results = Sets.newHashSet(); List<RawOccurrenceRecord> records = parseRecord(xml, schemaType); if (records != null && !records.isEmpty()) { for (RawOccurrenceRecord record : records) { Set<UniqueIdentifier> ids = Sets.newHashSet(); if (useTriplet) { Triplet triplet = null; try { triplet = new Triplet( datasetKey, record.getInstitutionCode(), record.getCollectionCode(), record.getCatalogueNumber(), record.getUnitQualifier()); } catch (IllegalArgumentException e) { log.info( "No holy triplet for an xml snippet in dataset [{}] and schema [{}], got error [{}]", datasetKey.toString(), schemaType.toString(), e.getMessage()); } if (triplet != null) { ids.add(triplet); } } if (useOccurrenceId && record.getIdentifierRecords() != null && !record.getIdentifierRecords().isEmpty()) { for (IdentifierRecord idRecord : record.getIdentifierRecords()) { if ((idRecord.getIdentifierType() == 1 || idRecord.getIdentifierType() == 7) && idRecord.getIdentifier() != null) { ids.add(new PublisherProvidedUniqueIdentifier(datasetKey, idRecord.getIdentifier())); } } } if (!ids.isEmpty()) { results.add(new IdentifierExtractionResult(ids, record.getUnitQualifier())); } } } return results; } private XmlFragmentParser(); }
|
XmlFragmentParser { public static Set<IdentifierExtractionResult> extractIdentifiers( UUID datasetKey, byte[] xml, OccurrenceSchemaType schemaType, boolean useTriplet, boolean useOccurrenceId) { Set<IdentifierExtractionResult> results = Sets.newHashSet(); List<RawOccurrenceRecord> records = parseRecord(xml, schemaType); if (records != null && !records.isEmpty()) { for (RawOccurrenceRecord record : records) { Set<UniqueIdentifier> ids = Sets.newHashSet(); if (useTriplet) { Triplet triplet = null; try { triplet = new Triplet( datasetKey, record.getInstitutionCode(), record.getCollectionCode(), record.getCatalogueNumber(), record.getUnitQualifier()); } catch (IllegalArgumentException e) { log.info( "No holy triplet for an xml snippet in dataset [{}] and schema [{}], got error [{}]", datasetKey.toString(), schemaType.toString(), e.getMessage()); } if (triplet != null) { ids.add(triplet); } } if (useOccurrenceId && record.getIdentifierRecords() != null && !record.getIdentifierRecords().isEmpty()) { for (IdentifierRecord idRecord : record.getIdentifierRecords()) { if ((idRecord.getIdentifierType() == 1 || idRecord.getIdentifierType() == 7) && idRecord.getIdentifier() != null) { ids.add(new PublisherProvidedUniqueIdentifier(datasetKey, idRecord.getIdentifier())); } } } if (!ids.isEmpty()) { results.add(new IdentifierExtractionResult(ids, record.getUnitQualifier())); } } } return results; } private XmlFragmentParser(); static List<RawOccurrenceRecord> parseRecord(RawXmlOccurrence xmlRecord); static List<RawOccurrenceRecord> parseRecord(String xml, OccurrenceSchemaType schemaType); static List<RawOccurrenceRecord> parseRecord(byte[] xml, OccurrenceSchemaType schemaType); static RawOccurrenceRecord parseRecord(
byte[] xml, OccurrenceSchemaType schemaType, String unitQualifier); static Set<IdentifierExtractionResult> extractIdentifiers(
UUID datasetKey,
byte[] xml,
OccurrenceSchemaType schemaType,
boolean useTriplet,
boolean useOccurrenceId); }
|
XmlFragmentParser { public static Set<IdentifierExtractionResult> extractIdentifiers( UUID datasetKey, byte[] xml, OccurrenceSchemaType schemaType, boolean useTriplet, boolean useOccurrenceId) { Set<IdentifierExtractionResult> results = Sets.newHashSet(); List<RawOccurrenceRecord> records = parseRecord(xml, schemaType); if (records != null && !records.isEmpty()) { for (RawOccurrenceRecord record : records) { Set<UniqueIdentifier> ids = Sets.newHashSet(); if (useTriplet) { Triplet triplet = null; try { triplet = new Triplet( datasetKey, record.getInstitutionCode(), record.getCollectionCode(), record.getCatalogueNumber(), record.getUnitQualifier()); } catch (IllegalArgumentException e) { log.info( "No holy triplet for an xml snippet in dataset [{}] and schema [{}], got error [{}]", datasetKey.toString(), schemaType.toString(), e.getMessage()); } if (triplet != null) { ids.add(triplet); } } if (useOccurrenceId && record.getIdentifierRecords() != null && !record.getIdentifierRecords().isEmpty()) { for (IdentifierRecord idRecord : record.getIdentifierRecords()) { if ((idRecord.getIdentifierType() == 1 || idRecord.getIdentifierType() == 7) && idRecord.getIdentifier() != null) { ids.add(new PublisherProvidedUniqueIdentifier(datasetKey, idRecord.getIdentifier())); } } } if (!ids.isEmpty()) { results.add(new IdentifierExtractionResult(ids, record.getUnitQualifier())); } } } return results; } private XmlFragmentParser(); static List<RawOccurrenceRecord> parseRecord(RawXmlOccurrence xmlRecord); static List<RawOccurrenceRecord> parseRecord(String xml, OccurrenceSchemaType schemaType); static List<RawOccurrenceRecord> parseRecord(byte[] xml, OccurrenceSchemaType schemaType); static RawOccurrenceRecord parseRecord(
byte[] xml, OccurrenceSchemaType schemaType, String unitQualifier); static Set<IdentifierExtractionResult> extractIdentifiers(
UUID datasetKey,
byte[] xml,
OccurrenceSchemaType schemaType,
boolean useTriplet,
boolean useOccurrenceId); }
|
@Test public void parseIndexesTest() { String path = "/responses/alias-indexes.json"; Set<String> indexes = HttpResponseParser.parseIndexesInAliasResponse(getEntityFromResponse(path)); assertEquals(2, indexes.size()); assertTrue(indexes.contains("idx1")); assertTrue(indexes.contains("idx2")); }
|
static Set<String> parseIndexesInAliasResponse(HttpEntity entity) { return JsonHandler.readValue(entity).keySet(); }
|
HttpResponseParser { static Set<String> parseIndexesInAliasResponse(HttpEntity entity) { return JsonHandler.readValue(entity).keySet(); } }
|
HttpResponseParser { static Set<String> parseIndexesInAliasResponse(HttpEntity entity) { return JsonHandler.readValue(entity).keySet(); } }
|
HttpResponseParser { static Set<String> parseIndexesInAliasResponse(HttpEntity entity) { return JsonHandler.readValue(entity).keySet(); } }
|
HttpResponseParser { static Set<String> parseIndexesInAliasResponse(HttpEntity entity) { return JsonHandler.readValue(entity).keySet(); } }
|
@Test public void testIdExtractionMultipleIdsUnitQualifier() throws IOException { String xml = Resources.toString( Resources.getResource("id_extraction/abcd2_multi.xml"), StandardCharsets.UTF_8); UUID datasetKey = UUID.randomUUID(); byte[] xmlBytes = xml.getBytes(StandardCharsets.UTF_8); Set<IdentifierExtractionResult> extractionResults = XmlFragmentParser.extractIdentifiers( datasetKey, xmlBytes, OccurrenceSchemaType.ABCD_2_0_6, true, true); Triplet triplet1 = new Triplet( datasetKey, "BGBM", "Bridel Herbar", "Bridel-1-428", "Grimmia alpicola Sw. ex Hedw."); Triplet triplet2 = new Triplet( datasetKey, "BGBM", "Bridel Herbar", "Bridel-1-428", "Schistidium agassizii Sull. & Lesq. in Sull."); assertEquals(2, extractionResults.size()); for (IdentifierExtractionResult result : extractionResults) { String uniqueId = result.getUniqueIdentifiers().iterator().next().getUniqueString(); assertTrue( uniqueId.equals(OccurrenceKeyHelper.buildKey(triplet1)) || uniqueId.equals(OccurrenceKeyHelper.buildKey(triplet2))); } }
|
public static Set<IdentifierExtractionResult> extractIdentifiers( UUID datasetKey, byte[] xml, OccurrenceSchemaType schemaType, boolean useTriplet, boolean useOccurrenceId) { Set<IdentifierExtractionResult> results = Sets.newHashSet(); List<RawOccurrenceRecord> records = parseRecord(xml, schemaType); if (records != null && !records.isEmpty()) { for (RawOccurrenceRecord record : records) { Set<UniqueIdentifier> ids = Sets.newHashSet(); if (useTriplet) { Triplet triplet = null; try { triplet = new Triplet( datasetKey, record.getInstitutionCode(), record.getCollectionCode(), record.getCatalogueNumber(), record.getUnitQualifier()); } catch (IllegalArgumentException e) { log.info( "No holy triplet for an xml snippet in dataset [{}] and schema [{}], got error [{}]", datasetKey.toString(), schemaType.toString(), e.getMessage()); } if (triplet != null) { ids.add(triplet); } } if (useOccurrenceId && record.getIdentifierRecords() != null && !record.getIdentifierRecords().isEmpty()) { for (IdentifierRecord idRecord : record.getIdentifierRecords()) { if ((idRecord.getIdentifierType() == 1 || idRecord.getIdentifierType() == 7) && idRecord.getIdentifier() != null) { ids.add(new PublisherProvidedUniqueIdentifier(datasetKey, idRecord.getIdentifier())); } } } if (!ids.isEmpty()) { results.add(new IdentifierExtractionResult(ids, record.getUnitQualifier())); } } } return results; }
|
XmlFragmentParser { public static Set<IdentifierExtractionResult> extractIdentifiers( UUID datasetKey, byte[] xml, OccurrenceSchemaType schemaType, boolean useTriplet, boolean useOccurrenceId) { Set<IdentifierExtractionResult> results = Sets.newHashSet(); List<RawOccurrenceRecord> records = parseRecord(xml, schemaType); if (records != null && !records.isEmpty()) { for (RawOccurrenceRecord record : records) { Set<UniqueIdentifier> ids = Sets.newHashSet(); if (useTriplet) { Triplet triplet = null; try { triplet = new Triplet( datasetKey, record.getInstitutionCode(), record.getCollectionCode(), record.getCatalogueNumber(), record.getUnitQualifier()); } catch (IllegalArgumentException e) { log.info( "No holy triplet for an xml snippet in dataset [{}] and schema [{}], got error [{}]", datasetKey.toString(), schemaType.toString(), e.getMessage()); } if (triplet != null) { ids.add(triplet); } } if (useOccurrenceId && record.getIdentifierRecords() != null && !record.getIdentifierRecords().isEmpty()) { for (IdentifierRecord idRecord : record.getIdentifierRecords()) { if ((idRecord.getIdentifierType() == 1 || idRecord.getIdentifierType() == 7) && idRecord.getIdentifier() != null) { ids.add(new PublisherProvidedUniqueIdentifier(datasetKey, idRecord.getIdentifier())); } } } if (!ids.isEmpty()) { results.add(new IdentifierExtractionResult(ids, record.getUnitQualifier())); } } } return results; } }
|
XmlFragmentParser { public static Set<IdentifierExtractionResult> extractIdentifiers( UUID datasetKey, byte[] xml, OccurrenceSchemaType schemaType, boolean useTriplet, boolean useOccurrenceId) { Set<IdentifierExtractionResult> results = Sets.newHashSet(); List<RawOccurrenceRecord> records = parseRecord(xml, schemaType); if (records != null && !records.isEmpty()) { for (RawOccurrenceRecord record : records) { Set<UniqueIdentifier> ids = Sets.newHashSet(); if (useTriplet) { Triplet triplet = null; try { triplet = new Triplet( datasetKey, record.getInstitutionCode(), record.getCollectionCode(), record.getCatalogueNumber(), record.getUnitQualifier()); } catch (IllegalArgumentException e) { log.info( "No holy triplet for an xml snippet in dataset [{}] and schema [{}], got error [{}]", datasetKey.toString(), schemaType.toString(), e.getMessage()); } if (triplet != null) { ids.add(triplet); } } if (useOccurrenceId && record.getIdentifierRecords() != null && !record.getIdentifierRecords().isEmpty()) { for (IdentifierRecord idRecord : record.getIdentifierRecords()) { if ((idRecord.getIdentifierType() == 1 || idRecord.getIdentifierType() == 7) && idRecord.getIdentifier() != null) { ids.add(new PublisherProvidedUniqueIdentifier(datasetKey, idRecord.getIdentifier())); } } } if (!ids.isEmpty()) { results.add(new IdentifierExtractionResult(ids, record.getUnitQualifier())); } } } return results; } private XmlFragmentParser(); }
|
XmlFragmentParser { public static Set<IdentifierExtractionResult> extractIdentifiers( UUID datasetKey, byte[] xml, OccurrenceSchemaType schemaType, boolean useTriplet, boolean useOccurrenceId) { Set<IdentifierExtractionResult> results = Sets.newHashSet(); List<RawOccurrenceRecord> records = parseRecord(xml, schemaType); if (records != null && !records.isEmpty()) { for (RawOccurrenceRecord record : records) { Set<UniqueIdentifier> ids = Sets.newHashSet(); if (useTriplet) { Triplet triplet = null; try { triplet = new Triplet( datasetKey, record.getInstitutionCode(), record.getCollectionCode(), record.getCatalogueNumber(), record.getUnitQualifier()); } catch (IllegalArgumentException e) { log.info( "No holy triplet for an xml snippet in dataset [{}] and schema [{}], got error [{}]", datasetKey.toString(), schemaType.toString(), e.getMessage()); } if (triplet != null) { ids.add(triplet); } } if (useOccurrenceId && record.getIdentifierRecords() != null && !record.getIdentifierRecords().isEmpty()) { for (IdentifierRecord idRecord : record.getIdentifierRecords()) { if ((idRecord.getIdentifierType() == 1 || idRecord.getIdentifierType() == 7) && idRecord.getIdentifier() != null) { ids.add(new PublisherProvidedUniqueIdentifier(datasetKey, idRecord.getIdentifier())); } } } if (!ids.isEmpty()) { results.add(new IdentifierExtractionResult(ids, record.getUnitQualifier())); } } } return results; } private XmlFragmentParser(); static List<RawOccurrenceRecord> parseRecord(RawXmlOccurrence xmlRecord); static List<RawOccurrenceRecord> parseRecord(String xml, OccurrenceSchemaType schemaType); static List<RawOccurrenceRecord> parseRecord(byte[] xml, OccurrenceSchemaType schemaType); static RawOccurrenceRecord parseRecord(
byte[] xml, OccurrenceSchemaType schemaType, String unitQualifier); static Set<IdentifierExtractionResult> extractIdentifiers(
UUID datasetKey,
byte[] xml,
OccurrenceSchemaType schemaType,
boolean useTriplet,
boolean useOccurrenceId); }
|
XmlFragmentParser { public static Set<IdentifierExtractionResult> extractIdentifiers( UUID datasetKey, byte[] xml, OccurrenceSchemaType schemaType, boolean useTriplet, boolean useOccurrenceId) { Set<IdentifierExtractionResult> results = Sets.newHashSet(); List<RawOccurrenceRecord> records = parseRecord(xml, schemaType); if (records != null && !records.isEmpty()) { for (RawOccurrenceRecord record : records) { Set<UniqueIdentifier> ids = Sets.newHashSet(); if (useTriplet) { Triplet triplet = null; try { triplet = new Triplet( datasetKey, record.getInstitutionCode(), record.getCollectionCode(), record.getCatalogueNumber(), record.getUnitQualifier()); } catch (IllegalArgumentException e) { log.info( "No holy triplet for an xml snippet in dataset [{}] and schema [{}], got error [{}]", datasetKey.toString(), schemaType.toString(), e.getMessage()); } if (triplet != null) { ids.add(triplet); } } if (useOccurrenceId && record.getIdentifierRecords() != null && !record.getIdentifierRecords().isEmpty()) { for (IdentifierRecord idRecord : record.getIdentifierRecords()) { if ((idRecord.getIdentifierType() == 1 || idRecord.getIdentifierType() == 7) && idRecord.getIdentifier() != null) { ids.add(new PublisherProvidedUniqueIdentifier(datasetKey, idRecord.getIdentifier())); } } } if (!ids.isEmpty()) { results.add(new IdentifierExtractionResult(ids, record.getUnitQualifier())); } } } return results; } private XmlFragmentParser(); static List<RawOccurrenceRecord> parseRecord(RawXmlOccurrence xmlRecord); static List<RawOccurrenceRecord> parseRecord(String xml, OccurrenceSchemaType schemaType); static List<RawOccurrenceRecord> parseRecord(byte[] xml, OccurrenceSchemaType schemaType); static RawOccurrenceRecord parseRecord(
byte[] xml, OccurrenceSchemaType schemaType, String unitQualifier); static Set<IdentifierExtractionResult> extractIdentifiers(
UUID datasetKey,
byte[] xml,
OccurrenceSchemaType schemaType,
boolean useTriplet,
boolean useOccurrenceId); }
|
@Test public void testIdExtractionWithTripletAndDwcOccurrenceId() throws IOException { String xml = Resources.toString( Resources.getResource("id_extraction/triplet_and_dwc_id.xml"), StandardCharsets.UTF_8); UUID datasetKey = UUID.randomUUID(); byte[] xmlBytes = xml.getBytes(StandardCharsets.UTF_8); Set<IdentifierExtractionResult> extractionResults = XmlFragmentParser.extractIdentifiers( datasetKey, xmlBytes, OccurrenceSchemaType.DWC_1_4, true, true); Set<UniqueIdentifier> ids = extractionResults.iterator().next().getUniqueIdentifiers(); PublisherProvidedUniqueIdentifier pubProvided = new PublisherProvidedUniqueIdentifier(datasetKey, "UGENT:vertebrata:50058"); Triplet triplet = new Triplet(datasetKey, "UGENT", "vertebrata", "50058", null); assertEquals(2, ids.size()); for (UniqueIdentifier id : ids) { assertTrue( id.getUniqueString().equals(OccurrenceKeyHelper.buildKey(triplet)) || id.getUniqueString().equals(OccurrenceKeyHelper.buildKey(pubProvided))); } }
|
public static Set<IdentifierExtractionResult> extractIdentifiers( UUID datasetKey, byte[] xml, OccurrenceSchemaType schemaType, boolean useTriplet, boolean useOccurrenceId) { Set<IdentifierExtractionResult> results = Sets.newHashSet(); List<RawOccurrenceRecord> records = parseRecord(xml, schemaType); if (records != null && !records.isEmpty()) { for (RawOccurrenceRecord record : records) { Set<UniqueIdentifier> ids = Sets.newHashSet(); if (useTriplet) { Triplet triplet = null; try { triplet = new Triplet( datasetKey, record.getInstitutionCode(), record.getCollectionCode(), record.getCatalogueNumber(), record.getUnitQualifier()); } catch (IllegalArgumentException e) { log.info( "No holy triplet for an xml snippet in dataset [{}] and schema [{}], got error [{}]", datasetKey.toString(), schemaType.toString(), e.getMessage()); } if (triplet != null) { ids.add(triplet); } } if (useOccurrenceId && record.getIdentifierRecords() != null && !record.getIdentifierRecords().isEmpty()) { for (IdentifierRecord idRecord : record.getIdentifierRecords()) { if ((idRecord.getIdentifierType() == 1 || idRecord.getIdentifierType() == 7) && idRecord.getIdentifier() != null) { ids.add(new PublisherProvidedUniqueIdentifier(datasetKey, idRecord.getIdentifier())); } } } if (!ids.isEmpty()) { results.add(new IdentifierExtractionResult(ids, record.getUnitQualifier())); } } } return results; }
|
XmlFragmentParser { public static Set<IdentifierExtractionResult> extractIdentifiers( UUID datasetKey, byte[] xml, OccurrenceSchemaType schemaType, boolean useTriplet, boolean useOccurrenceId) { Set<IdentifierExtractionResult> results = Sets.newHashSet(); List<RawOccurrenceRecord> records = parseRecord(xml, schemaType); if (records != null && !records.isEmpty()) { for (RawOccurrenceRecord record : records) { Set<UniqueIdentifier> ids = Sets.newHashSet(); if (useTriplet) { Triplet triplet = null; try { triplet = new Triplet( datasetKey, record.getInstitutionCode(), record.getCollectionCode(), record.getCatalogueNumber(), record.getUnitQualifier()); } catch (IllegalArgumentException e) { log.info( "No holy triplet for an xml snippet in dataset [{}] and schema [{}], got error [{}]", datasetKey.toString(), schemaType.toString(), e.getMessage()); } if (triplet != null) { ids.add(triplet); } } if (useOccurrenceId && record.getIdentifierRecords() != null && !record.getIdentifierRecords().isEmpty()) { for (IdentifierRecord idRecord : record.getIdentifierRecords()) { if ((idRecord.getIdentifierType() == 1 || idRecord.getIdentifierType() == 7) && idRecord.getIdentifier() != null) { ids.add(new PublisherProvidedUniqueIdentifier(datasetKey, idRecord.getIdentifier())); } } } if (!ids.isEmpty()) { results.add(new IdentifierExtractionResult(ids, record.getUnitQualifier())); } } } return results; } }
|
XmlFragmentParser { public static Set<IdentifierExtractionResult> extractIdentifiers( UUID datasetKey, byte[] xml, OccurrenceSchemaType schemaType, boolean useTriplet, boolean useOccurrenceId) { Set<IdentifierExtractionResult> results = Sets.newHashSet(); List<RawOccurrenceRecord> records = parseRecord(xml, schemaType); if (records != null && !records.isEmpty()) { for (RawOccurrenceRecord record : records) { Set<UniqueIdentifier> ids = Sets.newHashSet(); if (useTriplet) { Triplet triplet = null; try { triplet = new Triplet( datasetKey, record.getInstitutionCode(), record.getCollectionCode(), record.getCatalogueNumber(), record.getUnitQualifier()); } catch (IllegalArgumentException e) { log.info( "No holy triplet for an xml snippet in dataset [{}] and schema [{}], got error [{}]", datasetKey.toString(), schemaType.toString(), e.getMessage()); } if (triplet != null) { ids.add(triplet); } } if (useOccurrenceId && record.getIdentifierRecords() != null && !record.getIdentifierRecords().isEmpty()) { for (IdentifierRecord idRecord : record.getIdentifierRecords()) { if ((idRecord.getIdentifierType() == 1 || idRecord.getIdentifierType() == 7) && idRecord.getIdentifier() != null) { ids.add(new PublisherProvidedUniqueIdentifier(datasetKey, idRecord.getIdentifier())); } } } if (!ids.isEmpty()) { results.add(new IdentifierExtractionResult(ids, record.getUnitQualifier())); } } } return results; } private XmlFragmentParser(); }
|
XmlFragmentParser { public static Set<IdentifierExtractionResult> extractIdentifiers( UUID datasetKey, byte[] xml, OccurrenceSchemaType schemaType, boolean useTriplet, boolean useOccurrenceId) { Set<IdentifierExtractionResult> results = Sets.newHashSet(); List<RawOccurrenceRecord> records = parseRecord(xml, schemaType); if (records != null && !records.isEmpty()) { for (RawOccurrenceRecord record : records) { Set<UniqueIdentifier> ids = Sets.newHashSet(); if (useTriplet) { Triplet triplet = null; try { triplet = new Triplet( datasetKey, record.getInstitutionCode(), record.getCollectionCode(), record.getCatalogueNumber(), record.getUnitQualifier()); } catch (IllegalArgumentException e) { log.info( "No holy triplet for an xml snippet in dataset [{}] and schema [{}], got error [{}]", datasetKey.toString(), schemaType.toString(), e.getMessage()); } if (triplet != null) { ids.add(triplet); } } if (useOccurrenceId && record.getIdentifierRecords() != null && !record.getIdentifierRecords().isEmpty()) { for (IdentifierRecord idRecord : record.getIdentifierRecords()) { if ((idRecord.getIdentifierType() == 1 || idRecord.getIdentifierType() == 7) && idRecord.getIdentifier() != null) { ids.add(new PublisherProvidedUniqueIdentifier(datasetKey, idRecord.getIdentifier())); } } } if (!ids.isEmpty()) { results.add(new IdentifierExtractionResult(ids, record.getUnitQualifier())); } } } return results; } private XmlFragmentParser(); static List<RawOccurrenceRecord> parseRecord(RawXmlOccurrence xmlRecord); static List<RawOccurrenceRecord> parseRecord(String xml, OccurrenceSchemaType schemaType); static List<RawOccurrenceRecord> parseRecord(byte[] xml, OccurrenceSchemaType schemaType); static RawOccurrenceRecord parseRecord(
byte[] xml, OccurrenceSchemaType schemaType, String unitQualifier); static Set<IdentifierExtractionResult> extractIdentifiers(
UUID datasetKey,
byte[] xml,
OccurrenceSchemaType schemaType,
boolean useTriplet,
boolean useOccurrenceId); }
|
XmlFragmentParser { public static Set<IdentifierExtractionResult> extractIdentifiers( UUID datasetKey, byte[] xml, OccurrenceSchemaType schemaType, boolean useTriplet, boolean useOccurrenceId) { Set<IdentifierExtractionResult> results = Sets.newHashSet(); List<RawOccurrenceRecord> records = parseRecord(xml, schemaType); if (records != null && !records.isEmpty()) { for (RawOccurrenceRecord record : records) { Set<UniqueIdentifier> ids = Sets.newHashSet(); if (useTriplet) { Triplet triplet = null; try { triplet = new Triplet( datasetKey, record.getInstitutionCode(), record.getCollectionCode(), record.getCatalogueNumber(), record.getUnitQualifier()); } catch (IllegalArgumentException e) { log.info( "No holy triplet for an xml snippet in dataset [{}] and schema [{}], got error [{}]", datasetKey.toString(), schemaType.toString(), e.getMessage()); } if (triplet != null) { ids.add(triplet); } } if (useOccurrenceId && record.getIdentifierRecords() != null && !record.getIdentifierRecords().isEmpty()) { for (IdentifierRecord idRecord : record.getIdentifierRecords()) { if ((idRecord.getIdentifierType() == 1 || idRecord.getIdentifierType() == 7) && idRecord.getIdentifier() != null) { ids.add(new PublisherProvidedUniqueIdentifier(datasetKey, idRecord.getIdentifier())); } } } if (!ids.isEmpty()) { results.add(new IdentifierExtractionResult(ids, record.getUnitQualifier())); } } } return results; } private XmlFragmentParser(); static List<RawOccurrenceRecord> parseRecord(RawXmlOccurrence xmlRecord); static List<RawOccurrenceRecord> parseRecord(String xml, OccurrenceSchemaType schemaType); static List<RawOccurrenceRecord> parseRecord(byte[] xml, OccurrenceSchemaType schemaType); static RawOccurrenceRecord parseRecord(
byte[] xml, OccurrenceSchemaType schemaType, String unitQualifier); static Set<IdentifierExtractionResult> extractIdentifiers(
UUID datasetKey,
byte[] xml,
OccurrenceSchemaType schemaType,
boolean useTriplet,
boolean useOccurrenceId); }
|
@Test public void testIdExtractTapir() throws IOException { String xml = Resources.toString( Resources.getResource("id_extraction/tapir_triplet_contains_unrecorded.xml"), StandardCharsets.UTF_8); byte[] xmlBytes = xml.getBytes(StandardCharsets.UTF_8); Set<IdentifierExtractionResult> extractionResults = XmlFragmentParser.extractIdentifiers( UUID.randomUUID(), xmlBytes, OccurrenceSchemaType.DWC_1_4, true, true); assertFalse(extractionResults.isEmpty()); }
|
public static Set<IdentifierExtractionResult> extractIdentifiers( UUID datasetKey, byte[] xml, OccurrenceSchemaType schemaType, boolean useTriplet, boolean useOccurrenceId) { Set<IdentifierExtractionResult> results = Sets.newHashSet(); List<RawOccurrenceRecord> records = parseRecord(xml, schemaType); if (records != null && !records.isEmpty()) { for (RawOccurrenceRecord record : records) { Set<UniqueIdentifier> ids = Sets.newHashSet(); if (useTriplet) { Triplet triplet = null; try { triplet = new Triplet( datasetKey, record.getInstitutionCode(), record.getCollectionCode(), record.getCatalogueNumber(), record.getUnitQualifier()); } catch (IllegalArgumentException e) { log.info( "No holy triplet for an xml snippet in dataset [{}] and schema [{}], got error [{}]", datasetKey.toString(), schemaType.toString(), e.getMessage()); } if (triplet != null) { ids.add(triplet); } } if (useOccurrenceId && record.getIdentifierRecords() != null && !record.getIdentifierRecords().isEmpty()) { for (IdentifierRecord idRecord : record.getIdentifierRecords()) { if ((idRecord.getIdentifierType() == 1 || idRecord.getIdentifierType() == 7) && idRecord.getIdentifier() != null) { ids.add(new PublisherProvidedUniqueIdentifier(datasetKey, idRecord.getIdentifier())); } } } if (!ids.isEmpty()) { results.add(new IdentifierExtractionResult(ids, record.getUnitQualifier())); } } } return results; }
|
XmlFragmentParser { public static Set<IdentifierExtractionResult> extractIdentifiers( UUID datasetKey, byte[] xml, OccurrenceSchemaType schemaType, boolean useTriplet, boolean useOccurrenceId) { Set<IdentifierExtractionResult> results = Sets.newHashSet(); List<RawOccurrenceRecord> records = parseRecord(xml, schemaType); if (records != null && !records.isEmpty()) { for (RawOccurrenceRecord record : records) { Set<UniqueIdentifier> ids = Sets.newHashSet(); if (useTriplet) { Triplet triplet = null; try { triplet = new Triplet( datasetKey, record.getInstitutionCode(), record.getCollectionCode(), record.getCatalogueNumber(), record.getUnitQualifier()); } catch (IllegalArgumentException e) { log.info( "No holy triplet for an xml snippet in dataset [{}] and schema [{}], got error [{}]", datasetKey.toString(), schemaType.toString(), e.getMessage()); } if (triplet != null) { ids.add(triplet); } } if (useOccurrenceId && record.getIdentifierRecords() != null && !record.getIdentifierRecords().isEmpty()) { for (IdentifierRecord idRecord : record.getIdentifierRecords()) { if ((idRecord.getIdentifierType() == 1 || idRecord.getIdentifierType() == 7) && idRecord.getIdentifier() != null) { ids.add(new PublisherProvidedUniqueIdentifier(datasetKey, idRecord.getIdentifier())); } } } if (!ids.isEmpty()) { results.add(new IdentifierExtractionResult(ids, record.getUnitQualifier())); } } } return results; } }
|
XmlFragmentParser { public static Set<IdentifierExtractionResult> extractIdentifiers( UUID datasetKey, byte[] xml, OccurrenceSchemaType schemaType, boolean useTriplet, boolean useOccurrenceId) { Set<IdentifierExtractionResult> results = Sets.newHashSet(); List<RawOccurrenceRecord> records = parseRecord(xml, schemaType); if (records != null && !records.isEmpty()) { for (RawOccurrenceRecord record : records) { Set<UniqueIdentifier> ids = Sets.newHashSet(); if (useTriplet) { Triplet triplet = null; try { triplet = new Triplet( datasetKey, record.getInstitutionCode(), record.getCollectionCode(), record.getCatalogueNumber(), record.getUnitQualifier()); } catch (IllegalArgumentException e) { log.info( "No holy triplet for an xml snippet in dataset [{}] and schema [{}], got error [{}]", datasetKey.toString(), schemaType.toString(), e.getMessage()); } if (triplet != null) { ids.add(triplet); } } if (useOccurrenceId && record.getIdentifierRecords() != null && !record.getIdentifierRecords().isEmpty()) { for (IdentifierRecord idRecord : record.getIdentifierRecords()) { if ((idRecord.getIdentifierType() == 1 || idRecord.getIdentifierType() == 7) && idRecord.getIdentifier() != null) { ids.add(new PublisherProvidedUniqueIdentifier(datasetKey, idRecord.getIdentifier())); } } } if (!ids.isEmpty()) { results.add(new IdentifierExtractionResult(ids, record.getUnitQualifier())); } } } return results; } private XmlFragmentParser(); }
|
XmlFragmentParser { public static Set<IdentifierExtractionResult> extractIdentifiers( UUID datasetKey, byte[] xml, OccurrenceSchemaType schemaType, boolean useTriplet, boolean useOccurrenceId) { Set<IdentifierExtractionResult> results = Sets.newHashSet(); List<RawOccurrenceRecord> records = parseRecord(xml, schemaType); if (records != null && !records.isEmpty()) { for (RawOccurrenceRecord record : records) { Set<UniqueIdentifier> ids = Sets.newHashSet(); if (useTriplet) { Triplet triplet = null; try { triplet = new Triplet( datasetKey, record.getInstitutionCode(), record.getCollectionCode(), record.getCatalogueNumber(), record.getUnitQualifier()); } catch (IllegalArgumentException e) { log.info( "No holy triplet for an xml snippet in dataset [{}] and schema [{}], got error [{}]", datasetKey.toString(), schemaType.toString(), e.getMessage()); } if (triplet != null) { ids.add(triplet); } } if (useOccurrenceId && record.getIdentifierRecords() != null && !record.getIdentifierRecords().isEmpty()) { for (IdentifierRecord idRecord : record.getIdentifierRecords()) { if ((idRecord.getIdentifierType() == 1 || idRecord.getIdentifierType() == 7) && idRecord.getIdentifier() != null) { ids.add(new PublisherProvidedUniqueIdentifier(datasetKey, idRecord.getIdentifier())); } } } if (!ids.isEmpty()) { results.add(new IdentifierExtractionResult(ids, record.getUnitQualifier())); } } } return results; } private XmlFragmentParser(); static List<RawOccurrenceRecord> parseRecord(RawXmlOccurrence xmlRecord); static List<RawOccurrenceRecord> parseRecord(String xml, OccurrenceSchemaType schemaType); static List<RawOccurrenceRecord> parseRecord(byte[] xml, OccurrenceSchemaType schemaType); static RawOccurrenceRecord parseRecord(
byte[] xml, OccurrenceSchemaType schemaType, String unitQualifier); static Set<IdentifierExtractionResult> extractIdentifiers(
UUID datasetKey,
byte[] xml,
OccurrenceSchemaType schemaType,
boolean useTriplet,
boolean useOccurrenceId); }
|
XmlFragmentParser { public static Set<IdentifierExtractionResult> extractIdentifiers( UUID datasetKey, byte[] xml, OccurrenceSchemaType schemaType, boolean useTriplet, boolean useOccurrenceId) { Set<IdentifierExtractionResult> results = Sets.newHashSet(); List<RawOccurrenceRecord> records = parseRecord(xml, schemaType); if (records != null && !records.isEmpty()) { for (RawOccurrenceRecord record : records) { Set<UniqueIdentifier> ids = Sets.newHashSet(); if (useTriplet) { Triplet triplet = null; try { triplet = new Triplet( datasetKey, record.getInstitutionCode(), record.getCollectionCode(), record.getCatalogueNumber(), record.getUnitQualifier()); } catch (IllegalArgumentException e) { log.info( "No holy triplet for an xml snippet in dataset [{}] and schema [{}], got error [{}]", datasetKey.toString(), schemaType.toString(), e.getMessage()); } if (triplet != null) { ids.add(triplet); } } if (useOccurrenceId && record.getIdentifierRecords() != null && !record.getIdentifierRecords().isEmpty()) { for (IdentifierRecord idRecord : record.getIdentifierRecords()) { if ((idRecord.getIdentifierType() == 1 || idRecord.getIdentifierType() == 7) && idRecord.getIdentifier() != null) { ids.add(new PublisherProvidedUniqueIdentifier(datasetKey, idRecord.getIdentifier())); } } } if (!ids.isEmpty()) { results.add(new IdentifierExtractionResult(ids, record.getUnitQualifier())); } } } return results; } private XmlFragmentParser(); static List<RawOccurrenceRecord> parseRecord(RawXmlOccurrence xmlRecord); static List<RawOccurrenceRecord> parseRecord(String xml, OccurrenceSchemaType schemaType); static List<RawOccurrenceRecord> parseRecord(byte[] xml, OccurrenceSchemaType schemaType); static RawOccurrenceRecord parseRecord(
byte[] xml, OccurrenceSchemaType schemaType, String unitQualifier); static Set<IdentifierExtractionResult> extractIdentifiers(
UUID datasetKey,
byte[] xml,
OccurrenceSchemaType schemaType,
boolean useTriplet,
boolean useOccurrenceId); }
|
@Test public void testIdExtractManisBlankCC() throws IOException { String xml = Resources.toString( Resources.getResource("id_extraction/manis_no_cc.xml"), StandardCharsets.UTF_8); byte[] xmlBytes = xml.getBytes(StandardCharsets.UTF_8); Set<IdentifierExtractionResult> extractionResults = XmlFragmentParser.extractIdentifiers( UUID.randomUUID(), xmlBytes, OccurrenceSchemaType.DWC_MANIS, true, true); assertTrue(extractionResults.isEmpty()); }
|
public static Set<IdentifierExtractionResult> extractIdentifiers( UUID datasetKey, byte[] xml, OccurrenceSchemaType schemaType, boolean useTriplet, boolean useOccurrenceId) { Set<IdentifierExtractionResult> results = Sets.newHashSet(); List<RawOccurrenceRecord> records = parseRecord(xml, schemaType); if (records != null && !records.isEmpty()) { for (RawOccurrenceRecord record : records) { Set<UniqueIdentifier> ids = Sets.newHashSet(); if (useTriplet) { Triplet triplet = null; try { triplet = new Triplet( datasetKey, record.getInstitutionCode(), record.getCollectionCode(), record.getCatalogueNumber(), record.getUnitQualifier()); } catch (IllegalArgumentException e) { log.info( "No holy triplet for an xml snippet in dataset [{}] and schema [{}], got error [{}]", datasetKey.toString(), schemaType.toString(), e.getMessage()); } if (triplet != null) { ids.add(triplet); } } if (useOccurrenceId && record.getIdentifierRecords() != null && !record.getIdentifierRecords().isEmpty()) { for (IdentifierRecord idRecord : record.getIdentifierRecords()) { if ((idRecord.getIdentifierType() == 1 || idRecord.getIdentifierType() == 7) && idRecord.getIdentifier() != null) { ids.add(new PublisherProvidedUniqueIdentifier(datasetKey, idRecord.getIdentifier())); } } } if (!ids.isEmpty()) { results.add(new IdentifierExtractionResult(ids, record.getUnitQualifier())); } } } return results; }
|
XmlFragmentParser { public static Set<IdentifierExtractionResult> extractIdentifiers( UUID datasetKey, byte[] xml, OccurrenceSchemaType schemaType, boolean useTriplet, boolean useOccurrenceId) { Set<IdentifierExtractionResult> results = Sets.newHashSet(); List<RawOccurrenceRecord> records = parseRecord(xml, schemaType); if (records != null && !records.isEmpty()) { for (RawOccurrenceRecord record : records) { Set<UniqueIdentifier> ids = Sets.newHashSet(); if (useTriplet) { Triplet triplet = null; try { triplet = new Triplet( datasetKey, record.getInstitutionCode(), record.getCollectionCode(), record.getCatalogueNumber(), record.getUnitQualifier()); } catch (IllegalArgumentException e) { log.info( "No holy triplet for an xml snippet in dataset [{}] and schema [{}], got error [{}]", datasetKey.toString(), schemaType.toString(), e.getMessage()); } if (triplet != null) { ids.add(triplet); } } if (useOccurrenceId && record.getIdentifierRecords() != null && !record.getIdentifierRecords().isEmpty()) { for (IdentifierRecord idRecord : record.getIdentifierRecords()) { if ((idRecord.getIdentifierType() == 1 || idRecord.getIdentifierType() == 7) && idRecord.getIdentifier() != null) { ids.add(new PublisherProvidedUniqueIdentifier(datasetKey, idRecord.getIdentifier())); } } } if (!ids.isEmpty()) { results.add(new IdentifierExtractionResult(ids, record.getUnitQualifier())); } } } return results; } }
|
XmlFragmentParser { public static Set<IdentifierExtractionResult> extractIdentifiers( UUID datasetKey, byte[] xml, OccurrenceSchemaType schemaType, boolean useTriplet, boolean useOccurrenceId) { Set<IdentifierExtractionResult> results = Sets.newHashSet(); List<RawOccurrenceRecord> records = parseRecord(xml, schemaType); if (records != null && !records.isEmpty()) { for (RawOccurrenceRecord record : records) { Set<UniqueIdentifier> ids = Sets.newHashSet(); if (useTriplet) { Triplet triplet = null; try { triplet = new Triplet( datasetKey, record.getInstitutionCode(), record.getCollectionCode(), record.getCatalogueNumber(), record.getUnitQualifier()); } catch (IllegalArgumentException e) { log.info( "No holy triplet for an xml snippet in dataset [{}] and schema [{}], got error [{}]", datasetKey.toString(), schemaType.toString(), e.getMessage()); } if (triplet != null) { ids.add(triplet); } } if (useOccurrenceId && record.getIdentifierRecords() != null && !record.getIdentifierRecords().isEmpty()) { for (IdentifierRecord idRecord : record.getIdentifierRecords()) { if ((idRecord.getIdentifierType() == 1 || idRecord.getIdentifierType() == 7) && idRecord.getIdentifier() != null) { ids.add(new PublisherProvidedUniqueIdentifier(datasetKey, idRecord.getIdentifier())); } } } if (!ids.isEmpty()) { results.add(new IdentifierExtractionResult(ids, record.getUnitQualifier())); } } } return results; } private XmlFragmentParser(); }
|
XmlFragmentParser { public static Set<IdentifierExtractionResult> extractIdentifiers( UUID datasetKey, byte[] xml, OccurrenceSchemaType schemaType, boolean useTriplet, boolean useOccurrenceId) { Set<IdentifierExtractionResult> results = Sets.newHashSet(); List<RawOccurrenceRecord> records = parseRecord(xml, schemaType); if (records != null && !records.isEmpty()) { for (RawOccurrenceRecord record : records) { Set<UniqueIdentifier> ids = Sets.newHashSet(); if (useTriplet) { Triplet triplet = null; try { triplet = new Triplet( datasetKey, record.getInstitutionCode(), record.getCollectionCode(), record.getCatalogueNumber(), record.getUnitQualifier()); } catch (IllegalArgumentException e) { log.info( "No holy triplet for an xml snippet in dataset [{}] and schema [{}], got error [{}]", datasetKey.toString(), schemaType.toString(), e.getMessage()); } if (triplet != null) { ids.add(triplet); } } if (useOccurrenceId && record.getIdentifierRecords() != null && !record.getIdentifierRecords().isEmpty()) { for (IdentifierRecord idRecord : record.getIdentifierRecords()) { if ((idRecord.getIdentifierType() == 1 || idRecord.getIdentifierType() == 7) && idRecord.getIdentifier() != null) { ids.add(new PublisherProvidedUniqueIdentifier(datasetKey, idRecord.getIdentifier())); } } } if (!ids.isEmpty()) { results.add(new IdentifierExtractionResult(ids, record.getUnitQualifier())); } } } return results; } private XmlFragmentParser(); static List<RawOccurrenceRecord> parseRecord(RawXmlOccurrence xmlRecord); static List<RawOccurrenceRecord> parseRecord(String xml, OccurrenceSchemaType schemaType); static List<RawOccurrenceRecord> parseRecord(byte[] xml, OccurrenceSchemaType schemaType); static RawOccurrenceRecord parseRecord(
byte[] xml, OccurrenceSchemaType schemaType, String unitQualifier); static Set<IdentifierExtractionResult> extractIdentifiers(
UUID datasetKey,
byte[] xml,
OccurrenceSchemaType schemaType,
boolean useTriplet,
boolean useOccurrenceId); }
|
XmlFragmentParser { public static Set<IdentifierExtractionResult> extractIdentifiers( UUID datasetKey, byte[] xml, OccurrenceSchemaType schemaType, boolean useTriplet, boolean useOccurrenceId) { Set<IdentifierExtractionResult> results = Sets.newHashSet(); List<RawOccurrenceRecord> records = parseRecord(xml, schemaType); if (records != null && !records.isEmpty()) { for (RawOccurrenceRecord record : records) { Set<UniqueIdentifier> ids = Sets.newHashSet(); if (useTriplet) { Triplet triplet = null; try { triplet = new Triplet( datasetKey, record.getInstitutionCode(), record.getCollectionCode(), record.getCatalogueNumber(), record.getUnitQualifier()); } catch (IllegalArgumentException e) { log.info( "No holy triplet for an xml snippet in dataset [{}] and schema [{}], got error [{}]", datasetKey.toString(), schemaType.toString(), e.getMessage()); } if (triplet != null) { ids.add(triplet); } } if (useOccurrenceId && record.getIdentifierRecords() != null && !record.getIdentifierRecords().isEmpty()) { for (IdentifierRecord idRecord : record.getIdentifierRecords()) { if ((idRecord.getIdentifierType() == 1 || idRecord.getIdentifierType() == 7) && idRecord.getIdentifier() != null) { ids.add(new PublisherProvidedUniqueIdentifier(datasetKey, idRecord.getIdentifier())); } } } if (!ids.isEmpty()) { results.add(new IdentifierExtractionResult(ids, record.getUnitQualifier())); } } } return results; } private XmlFragmentParser(); static List<RawOccurrenceRecord> parseRecord(RawXmlOccurrence xmlRecord); static List<RawOccurrenceRecord> parseRecord(String xml, OccurrenceSchemaType schemaType); static List<RawOccurrenceRecord> parseRecord(byte[] xml, OccurrenceSchemaType schemaType); static RawOccurrenceRecord parseRecord(
byte[] xml, OccurrenceSchemaType schemaType, String unitQualifier); static Set<IdentifierExtractionResult> extractIdentifiers(
UUID datasetKey,
byte[] xml,
OccurrenceSchemaType schemaType,
boolean useTriplet,
boolean useOccurrenceId); }
|
@Test public void testAbcd1() throws IOException { String xml = Resources.toString( Resources.getResource("response_schema/abcd1.xml"), StandardCharsets.UTF_8); OccurrenceSchemaType result = detector.detectSchema(xml); assertEquals(OccurrenceSchemaType.ABCD_1_2, result); }
|
public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } ResponseSchemaDetector(); }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } ResponseSchemaDetector(); OccurrenceSchemaType detectSchema(String xml); Map<ResponseElementEnum, String> getResponseElements(OccurrenceSchemaType schemaType); }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } ResponseSchemaDetector(); OccurrenceSchemaType detectSchema(String xml); Map<ResponseElementEnum, String> getResponseElements(OccurrenceSchemaType schemaType); }
|
@Test public void testAbcd2() throws IOException { String xml = Resources.toString( Resources.getResource("response_schema/abcd2.xml"), StandardCharsets.UTF_8); OccurrenceSchemaType result = detector.detectSchema(xml); assertEquals(OccurrenceSchemaType.ABCD_2_0_6, result); }
|
public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } ResponseSchemaDetector(); }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } ResponseSchemaDetector(); OccurrenceSchemaType detectSchema(String xml); Map<ResponseElementEnum, String> getResponseElements(OccurrenceSchemaType schemaType); }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } ResponseSchemaDetector(); OccurrenceSchemaType detectSchema(String xml); Map<ResponseElementEnum, String> getResponseElements(OccurrenceSchemaType schemaType); }
|
@Test public void testDwc1_0() throws IOException { String xml = Resources.toString( Resources.getResource("response_schema/dwc_1_0.xml"), StandardCharsets.UTF_8); OccurrenceSchemaType result = detector.detectSchema(xml); assertEquals(OccurrenceSchemaType.DWC_1_0, result); }
|
public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } ResponseSchemaDetector(); }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } ResponseSchemaDetector(); OccurrenceSchemaType detectSchema(String xml); Map<ResponseElementEnum, String> getResponseElements(OccurrenceSchemaType schemaType); }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } ResponseSchemaDetector(); OccurrenceSchemaType detectSchema(String xml); Map<ResponseElementEnum, String> getResponseElements(OccurrenceSchemaType schemaType); }
|
@Test public void testDwc1_4() throws IOException { String xml = Resources.toString( Resources.getResource("response_schema/dwc_1_4.xml"), StandardCharsets.UTF_8); OccurrenceSchemaType result = detector.detectSchema(xml); assertEquals(OccurrenceSchemaType.DWC_1_4, result); }
|
public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } ResponseSchemaDetector(); }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } ResponseSchemaDetector(); OccurrenceSchemaType detectSchema(String xml); Map<ResponseElementEnum, String> getResponseElements(OccurrenceSchemaType schemaType); }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } ResponseSchemaDetector(); OccurrenceSchemaType detectSchema(String xml); Map<ResponseElementEnum, String> getResponseElements(OccurrenceSchemaType schemaType); }
|
@Test public void testTapirDwc1_4() throws IOException { String xml = Resources.toString( Resources.getResource("response_schema/tapir_dwc_1_4_contains_unrecorded.xml"), StandardCharsets.UTF_8); OccurrenceSchemaType result = detector.detectSchema(xml); assertEquals(OccurrenceSchemaType.DWC_1_4, result); }
|
public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } ResponseSchemaDetector(); }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } ResponseSchemaDetector(); OccurrenceSchemaType detectSchema(String xml); Map<ResponseElementEnum, String> getResponseElements(OccurrenceSchemaType schemaType); }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } ResponseSchemaDetector(); OccurrenceSchemaType detectSchema(String xml); Map<ResponseElementEnum, String> getResponseElements(OccurrenceSchemaType schemaType); }
|
@Test public void testTapirDwc1_4_2() throws IOException { String xml = Resources.toString( Resources.getResource("response_schema/tapir_dwc_1_4_s2.xml"), StandardCharsets.UTF_8); OccurrenceSchemaType result = detector.detectSchema(xml); assertEquals(OccurrenceSchemaType.DWC_1_4, result); }
|
public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } ResponseSchemaDetector(); }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } ResponseSchemaDetector(); OccurrenceSchemaType detectSchema(String xml); Map<ResponseElementEnum, String> getResponseElements(OccurrenceSchemaType schemaType); }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } ResponseSchemaDetector(); OccurrenceSchemaType detectSchema(String xml); Map<ResponseElementEnum, String> getResponseElements(OccurrenceSchemaType schemaType); }
|
@Test(expected = IllegalArgumentException.class) public void swapIndexInAliasNullAliasTest() { EsIndex.swapIndexInAliases(EsConfig.from(DUMMY_HOST), null, "index_1"); thrown.expectMessage("aliases are required"); }
|
public static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index) { Preconditions.checkArgument(aliases != null && !aliases.isEmpty(), "alias is required"); Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); swapIndexInAliases( config, aliases, index, Collections.emptySet(), Searching.getDefaultSearchSettings()); }
|
EsIndex { public static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index) { Preconditions.checkArgument(aliases != null && !aliases.isEmpty(), "alias is required"); Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); swapIndexInAliases( config, aliases, index, Collections.emptySet(), Searching.getDefaultSearchSettings()); } }
|
EsIndex { public static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index) { Preconditions.checkArgument(aliases != null && !aliases.isEmpty(), "alias is required"); Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); swapIndexInAliases( config, aliases, index, Collections.emptySet(), Searching.getDefaultSearchSettings()); } }
|
EsIndex { public static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index) { Preconditions.checkArgument(aliases != null && !aliases.isEmpty(), "alias is required"); Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); swapIndexInAliases( config, aliases, index, Collections.emptySet(), Searching.getDefaultSearchSettings()); } static String createIndex(EsConfig config, IndexParams indexParams); static Optional<String> createIndexIfNotExists(EsConfig config, IndexParams indexParams); static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index); static void swapIndexInAliases(
EsConfig config,
Set<String> aliases,
String index,
Set<String> extraIdxToRemove,
Map<String, String> settings); static long countDocuments(EsConfig config, String index); static Set<String> deleteRecordsByDatasetId(
EsConfig config,
String[] aliases,
String datasetKey,
Predicate<String> indexesToDelete,
int timeoutSec,
int attempts); static Set<String> findDatasetIndexesInAliases(
EsConfig config, String[] aliases, String datasetKey); }
|
EsIndex { public static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index) { Preconditions.checkArgument(aliases != null && !aliases.isEmpty(), "alias is required"); Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); swapIndexInAliases( config, aliases, index, Collections.emptySet(), Searching.getDefaultSearchSettings()); } static String createIndex(EsConfig config, IndexParams indexParams); static Optional<String> createIndexIfNotExists(EsConfig config, IndexParams indexParams); static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index); static void swapIndexInAliases(
EsConfig config,
Set<String> aliases,
String index,
Set<String> extraIdxToRemove,
Map<String, String> settings); static long countDocuments(EsConfig config, String index); static Set<String> deleteRecordsByDatasetId(
EsConfig config,
String[] aliases,
String datasetKey,
Predicate<String> indexesToDelete,
int timeoutSec,
int attempts); static Set<String> findDatasetIndexesInAliases(
EsConfig config, String[] aliases, String datasetKey); }
|
@Test public void testDwcManis() throws IOException { String xml = Resources.toString( Resources.getResource("response_schema/dwc_manis.xml"), StandardCharsets.UTF_8); OccurrenceSchemaType result = detector.detectSchema(xml); assertEquals(OccurrenceSchemaType.DWC_MANIS, result); }
|
public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } ResponseSchemaDetector(); }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } ResponseSchemaDetector(); OccurrenceSchemaType detectSchema(String xml); Map<ResponseElementEnum, String> getResponseElements(OccurrenceSchemaType schemaType); }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } ResponseSchemaDetector(); OccurrenceSchemaType detectSchema(String xml); Map<ResponseElementEnum, String> getResponseElements(OccurrenceSchemaType schemaType); }
|
@Test public void testDwc2009() throws IOException { String xml = Resources.toString( Resources.getResource("response_schema/dwc_2009.xml"), StandardCharsets.UTF_8); OccurrenceSchemaType result = detector.detectSchema(xml); assertEquals(OccurrenceSchemaType.DWC_2009, result); }
|
public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } ResponseSchemaDetector(); }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } ResponseSchemaDetector(); OccurrenceSchemaType detectSchema(String xml); Map<ResponseElementEnum, String> getResponseElements(OccurrenceSchemaType schemaType); }
|
ResponseSchemaDetector { public OccurrenceSchemaType detectSchema(String xml) { OccurrenceSchemaType result = null; for (OccurrenceSchemaType schema : schemaSearchOrder) { log.debug("Checking for schema [{}]", schema); boolean success = checkElements(xml, distinctiveElements.get(schema).values()); if (success) { result = schema; break; } } if (result == null) { log.warn("Could not determine schema for xml [{}]", xml); } return result; } ResponseSchemaDetector(); OccurrenceSchemaType detectSchema(String xml); Map<ResponseElementEnum, String> getResponseElements(OccurrenceSchemaType schemaType); }
|
@Test public void skipFunctionTest() { final Map<String, ExtendedRecord> input = createErMap("1_1", "2_2", "3_3", "4_4"); final Map<String, BasicRecord> expected = createBrIdMap("1_1", "2_2", "3_3", "4_4"); UniqueGbifIdTransform gbifIdTransform = UniqueGbifIdTransform.builder() .erMap(input) .basicTransform(basicTransform) .skipTransform(true) .build() .run(); Map<String, BasicRecord> brMap = gbifIdTransform.getBrMap(); Map<String, BasicRecord> brInvalidMap = gbifIdTransform.getBrInvalidMap(); Assert.assertEquals(expected.size(), brMap.size()); Assert.assertEquals(0, brInvalidMap.size()); assertMap(expected, brMap); }
|
public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } UniqueGbifIdTransform run(); }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } UniqueGbifIdTransform run(); }
|
@Test public void withoutDuplicatesTest() { final Map<String, ExtendedRecord> input = createErMap("1_1", "2_2", "3_3", "4_4", "5_5", "6_6"); final Map<String, BasicRecord> expected = createBrGbifIdMap("1_1", "2_2", "3_3", "4_4", "5_5", "6_6"); UniqueGbifIdTransform gbifIdTransform = UniqueGbifIdTransform.builder().erMap(input).basicTransform(basicTransform).build().run(); Map<String, BasicRecord> brMap = gbifIdTransform.getBrMap(); Map<String, BasicRecord> brInvalidMap = gbifIdTransform.getBrInvalidMap(); Assert.assertEquals(expected.size(), brMap.size()); Assert.assertEquals(0, brInvalidMap.size()); assertMap(expected, brMap); }
|
public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } UniqueGbifIdTransform run(); }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } UniqueGbifIdTransform run(); }
|
@Test public void allDuplicatesTest() { final Map<String, ExtendedRecord> input = createErMap("1_1", "2_1", "3_1", "4_1", "5_1", "6_1"); final Map<String, BasicRecord> expectedNormal = createBrGbifIdMap("4_1"); final Map<String, BasicRecord> expectedInvalid = createBrIdMap("1_1", "2_1", "3_1", "5_1", "6_1"); UniqueGbifIdTransform gbifIdTransform = UniqueGbifIdTransform.builder().erMap(input).basicTransform(basicTransform).build().run(); Map<String, BasicRecord> brMap = gbifIdTransform.getBrMap(); Map<String, BasicRecord> brInvalidMap = gbifIdTransform.getBrInvalidMap(); Assert.assertEquals(expectedNormal.size(), brMap.size()); Assert.assertEquals(expectedInvalid.size(), brInvalidMap.size()); assertMap(expectedNormal, brMap); assertMap(expectedInvalid, brInvalidMap); }
|
public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } UniqueGbifIdTransform run(); }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } UniqueGbifIdTransform run(); }
|
@Test public void noGbifIdTest() { final Map<String, ExtendedRecord> input = createErMap("1", "2", "3", "4", "5", "6"); final Map<String, BasicRecord> expectedInvalid = createBrIdMap("1", "2", "3", "4", "5", "6"); UniqueGbifIdTransform gbifIdTransform = UniqueGbifIdTransform.builder().erMap(input).basicTransform(basicTransform).build().run(); Map<String, BasicRecord> brMap = gbifIdTransform.getBrMap(); Map<String, BasicRecord> brInvalidMap = gbifIdTransform.getBrInvalidMap(); Assert.assertEquals(0, brMap.size()); Assert.assertEquals(expectedInvalid.size(), brInvalidMap.size()); assertMap(expectedInvalid, brInvalidMap); }
|
public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } UniqueGbifIdTransform run(); }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } UniqueGbifIdTransform run(); }
|
@Test public void oneValueTest() { final Map<String, ExtendedRecord> input = createErMap("1_1"); final Map<String, BasicRecord> expectedNormal = createBrGbifIdMap("1_1"); UniqueGbifIdTransform gbifIdTransform = UniqueGbifIdTransform.builder().erMap(input).basicTransform(basicTransform).build().run(); Map<String, BasicRecord> brMap = gbifIdTransform.getBrMap(); Map<String, BasicRecord> brInvalidMap = gbifIdTransform.getBrInvalidMap(); Assert.assertEquals(expectedNormal.size(), brMap.size()); Assert.assertEquals(0, brInvalidMap.size()); assertMap(expectedNormal, brMap); }
|
public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } UniqueGbifIdTransform run(); }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } UniqueGbifIdTransform run(); }
|
@Test public void oneWithoutGbifIdValueTest() { final Map<String, ExtendedRecord> input = createErMap("1"); final Map<String, BasicRecord> expectedInvalid = createBrIdMap("1"); UniqueGbifIdTransform gbifIdTransform = UniqueGbifIdTransform.builder().erMap(input).basicTransform(basicTransform).build().run(); Map<String, BasicRecord> brMap = gbifIdTransform.getBrMap(); Map<String, BasicRecord> brInvalidMap = gbifIdTransform.getBrInvalidMap(); Assert.assertEquals(0, brMap.size()); Assert.assertEquals(expectedInvalid.size(), brInvalidMap.size()); assertMap(expectedInvalid, brInvalidMap); }
|
public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } UniqueGbifIdTransform run(); }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } UniqueGbifIdTransform run(); }
|
@Test public void mixedValuesSyncTest() { final Map<String, ExtendedRecord> input = createErMap("1", "2_2", "3_3", "4_1", "5", "6_6"); final Map<String, BasicRecord> expectedNormal = createBrGbifIdMap("2_2", "3_3", "4_1", "6_6"); final Map<String, BasicRecord> expectedInvalid = createBrIdMap("1", "5"); UniqueGbifIdTransform gbifIdTransform = UniqueGbifIdTransform.builder() .erMap(input) .basicTransform(basicTransform) .useSyncMode(true) .build() .run(); Map<String, BasicRecord> brMap = gbifIdTransform.getBrMap(); Map<String, BasicRecord> brInvalidMap = gbifIdTransform.getBrInvalidMap(); Assert.assertEquals(expectedNormal.size(), brMap.size()); Assert.assertEquals(expectedInvalid.size(), brInvalidMap.size()); assertMap(expectedNormal, brMap); assertMap(expectedInvalid, brInvalidMap); }
|
public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } UniqueGbifIdTransform run(); }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } UniqueGbifIdTransform run(); }
|
@Test public void mixedValuesAsyncTest() { final Map<String, ExtendedRecord> input = createErMap("1", "2_2", "3_3", "4_1", "5", "6_6"); final Map<String, BasicRecord> expectedNormal = createBrGbifIdMap("2_2", "3_3", "4_1", "6_6"); final Map<String, BasicRecord> expectedInvalid = createBrIdMap("1", "5"); UniqueGbifIdTransform gbifIdTransform = UniqueGbifIdTransform.builder() .erMap(input) .basicTransform(basicTransform) .useSyncMode(false) .build() .run(); Map<String, BasicRecord> brMap = gbifIdTransform.getBrMap(); Map<String, BasicRecord> brInvalidMap = gbifIdTransform.getBrInvalidMap(); Assert.assertEquals(expectedNormal.size(), brMap.size()); Assert.assertEquals(expectedInvalid.size(), brInvalidMap.size()); assertMap(expectedNormal, brMap); assertMap(expectedInvalid, brInvalidMap); }
|
public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } UniqueGbifIdTransform run(); }
|
UniqueGbifIdTransform { public UniqueGbifIdTransform run() { return useSyncMode ? runSync() : runAsync(); } UniqueGbifIdTransform run(); }
|
@Test(expected = IllegalArgumentException.class) public void swapIndexInAliasEmptyAliasTest() { EsIndex.swapIndexInAliases(EsConfig.from(DUMMY_HOST), Collections.singleton(""), "index_1"); thrown.expectMessage("aliases are required"); }
|
public static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index) { Preconditions.checkArgument(aliases != null && !aliases.isEmpty(), "alias is required"); Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); swapIndexInAliases( config, aliases, index, Collections.emptySet(), Searching.getDefaultSearchSettings()); }
|
EsIndex { public static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index) { Preconditions.checkArgument(aliases != null && !aliases.isEmpty(), "alias is required"); Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); swapIndexInAliases( config, aliases, index, Collections.emptySet(), Searching.getDefaultSearchSettings()); } }
|
EsIndex { public static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index) { Preconditions.checkArgument(aliases != null && !aliases.isEmpty(), "alias is required"); Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); swapIndexInAliases( config, aliases, index, Collections.emptySet(), Searching.getDefaultSearchSettings()); } }
|
EsIndex { public static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index) { Preconditions.checkArgument(aliases != null && !aliases.isEmpty(), "alias is required"); Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); swapIndexInAliases( config, aliases, index, Collections.emptySet(), Searching.getDefaultSearchSettings()); } static String createIndex(EsConfig config, IndexParams indexParams); static Optional<String> createIndexIfNotExists(EsConfig config, IndexParams indexParams); static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index); static void swapIndexInAliases(
EsConfig config,
Set<String> aliases,
String index,
Set<String> extraIdxToRemove,
Map<String, String> settings); static long countDocuments(EsConfig config, String index); static Set<String> deleteRecordsByDatasetId(
EsConfig config,
String[] aliases,
String datasetKey,
Predicate<String> indexesToDelete,
int timeoutSec,
int attempts); static Set<String> findDatasetIndexesInAliases(
EsConfig config, String[] aliases, String datasetKey); }
|
EsIndex { public static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index) { Preconditions.checkArgument(aliases != null && !aliases.isEmpty(), "alias is required"); Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); swapIndexInAliases( config, aliases, index, Collections.emptySet(), Searching.getDefaultSearchSettings()); } static String createIndex(EsConfig config, IndexParams indexParams); static Optional<String> createIndexIfNotExists(EsConfig config, IndexParams indexParams); static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index); static void swapIndexInAliases(
EsConfig config,
Set<String> aliases,
String index,
Set<String> extraIdxToRemove,
Map<String, String> settings); static long countDocuments(EsConfig config, String index); static Set<String> deleteRecordsByDatasetId(
EsConfig config,
String[] aliases,
String datasetKey,
Predicate<String> indexesToDelete,
int timeoutSec,
int attempts); static Set<String> findDatasetIndexesInAliases(
EsConfig config, String[] aliases, String datasetKey); }
|
@Test public void extensionContainsOccurrenceTest() { String id = "1"; String somethingCore = "somethingCore"; String somethingExt = "somethingExt"; Map<String, String> ext1 = new HashMap<>(2); ext1.put(DwcTerm.occurrenceID.qualifiedName(), id); ext1.put(somethingExt, somethingExt); Map<String, String> ext2 = new HashMap<>(2); ext2.put(DwcTerm.occurrenceID.qualifiedName(), id); ext2.put(somethingExt, somethingExt); Map<String, String> ext3 = new HashMap<>(2); ext3.put(DwcTerm.occurrenceID.qualifiedName(), id); ext3.put(somethingExt, somethingExt); ExtendedRecord er = ExtendedRecord.newBuilder() .setId(id) .setCoreTerms(Collections.singletonMap(somethingCore, somethingCore)) .setExtensions( Collections.singletonMap( Occurrence.qualifiedName(), Arrays.asList(ext1, ext2, ext3))) .build(); final List<ExtendedRecord> expected = createCollection( false, false, id + "_" + somethingCore + "_" + somethingExt, id + "_" + somethingCore + "_" + somethingExt, id + "_" + somethingCore + "_" + somethingExt); PCollection<ExtendedRecord> result = p.apply(Create.of(er)).apply(OccurrenceExtensionTransform.create()); PAssert.that(result).containsInAnyOrder(expected); p.run(); }
|
public static SingleOutput<ExtendedRecord, ExtendedRecord> create() { return ParDo.of(new OccurrenceExtensionTransform()); }
|
OccurrenceExtensionTransform extends DoFn<ExtendedRecord, ExtendedRecord> { public static SingleOutput<ExtendedRecord, ExtendedRecord> create() { return ParDo.of(new OccurrenceExtensionTransform()); } }
|
OccurrenceExtensionTransform extends DoFn<ExtendedRecord, ExtendedRecord> { public static SingleOutput<ExtendedRecord, ExtendedRecord> create() { return ParDo.of(new OccurrenceExtensionTransform()); } }
|
OccurrenceExtensionTransform extends DoFn<ExtendedRecord, ExtendedRecord> { public static SingleOutput<ExtendedRecord, ExtendedRecord> create() { return ParDo.of(new OccurrenceExtensionTransform()); } static SingleOutput<ExtendedRecord, ExtendedRecord> create(); void setCounterFn(SerializableConsumer<String> counterFn); @ProcessElement void processElement(@Element ExtendedRecord er, OutputReceiver<ExtendedRecord> out); void convert(ExtendedRecord er, Consumer<ExtendedRecord> resultConsumer); }
|
OccurrenceExtensionTransform extends DoFn<ExtendedRecord, ExtendedRecord> { public static SingleOutput<ExtendedRecord, ExtendedRecord> create() { return ParDo.of(new OccurrenceExtensionTransform()); } static SingleOutput<ExtendedRecord, ExtendedRecord> create(); void setCounterFn(SerializableConsumer<String> counterFn); @ProcessElement void processElement(@Element ExtendedRecord er, OutputReceiver<ExtendedRecord> out); void convert(ExtendedRecord er, Consumer<ExtendedRecord> resultConsumer); }
|
@Test public void occurrenceExtensionIsEmptyTest() { String id = "1"; String somethingCore = "somethingCore"; Map<String, String> ext = new HashMap<>(2); ext.put(DwcTerm.occurrenceID.qualifiedName(), id); ext.put(somethingCore, somethingCore); ExtendedRecord er = ExtendedRecord.newBuilder() .setId(id) .setCoreTerms(ext) .setExtensions( Collections.singletonMap(Occurrence.qualifiedName(), Collections.emptyList())) .build(); final List<ExtendedRecord> expected = createCollection(true, false, id + "_" + somethingCore); PCollection<ExtendedRecord> result = p.apply(Create.of(er)).apply(OccurrenceExtensionTransform.create()); PAssert.that(result).containsInAnyOrder(expected); p.run(); }
|
public static SingleOutput<ExtendedRecord, ExtendedRecord> create() { return ParDo.of(new OccurrenceExtensionTransform()); }
|
OccurrenceExtensionTransform extends DoFn<ExtendedRecord, ExtendedRecord> { public static SingleOutput<ExtendedRecord, ExtendedRecord> create() { return ParDo.of(new OccurrenceExtensionTransform()); } }
|
OccurrenceExtensionTransform extends DoFn<ExtendedRecord, ExtendedRecord> { public static SingleOutput<ExtendedRecord, ExtendedRecord> create() { return ParDo.of(new OccurrenceExtensionTransform()); } }
|
OccurrenceExtensionTransform extends DoFn<ExtendedRecord, ExtendedRecord> { public static SingleOutput<ExtendedRecord, ExtendedRecord> create() { return ParDo.of(new OccurrenceExtensionTransform()); } static SingleOutput<ExtendedRecord, ExtendedRecord> create(); void setCounterFn(SerializableConsumer<String> counterFn); @ProcessElement void processElement(@Element ExtendedRecord er, OutputReceiver<ExtendedRecord> out); void convert(ExtendedRecord er, Consumer<ExtendedRecord> resultConsumer); }
|
OccurrenceExtensionTransform extends DoFn<ExtendedRecord, ExtendedRecord> { public static SingleOutput<ExtendedRecord, ExtendedRecord> create() { return ParDo.of(new OccurrenceExtensionTransform()); } static SingleOutput<ExtendedRecord, ExtendedRecord> create(); void setCounterFn(SerializableConsumer<String> counterFn); @ProcessElement void processElement(@Element ExtendedRecord er, OutputReceiver<ExtendedRecord> out); void convert(ExtendedRecord er, Consumer<ExtendedRecord> resultConsumer); }
|
@Test public void noOccurrenceExtensionTest() { String id = "1"; String somethingCore = "somethingCore"; Map<String, String> ext = new HashMap<>(2); ext.put(DwcTerm.occurrenceID.qualifiedName(), id); ext.put(somethingCore, somethingCore); ExtendedRecord er = ExtendedRecord.newBuilder().setId(id).setCoreTerms(ext).build(); final List<ExtendedRecord> expected = createCollection(false, false, id + "_" + somethingCore); PCollection<ExtendedRecord> result = p.apply(Create.of(er)).apply(OccurrenceExtensionTransform.create()); PAssert.that(result).containsInAnyOrder(expected); p.run(); }
|
public static SingleOutput<ExtendedRecord, ExtendedRecord> create() { return ParDo.of(new OccurrenceExtensionTransform()); }
|
OccurrenceExtensionTransform extends DoFn<ExtendedRecord, ExtendedRecord> { public static SingleOutput<ExtendedRecord, ExtendedRecord> create() { return ParDo.of(new OccurrenceExtensionTransform()); } }
|
OccurrenceExtensionTransform extends DoFn<ExtendedRecord, ExtendedRecord> { public static SingleOutput<ExtendedRecord, ExtendedRecord> create() { return ParDo.of(new OccurrenceExtensionTransform()); } }
|
OccurrenceExtensionTransform extends DoFn<ExtendedRecord, ExtendedRecord> { public static SingleOutput<ExtendedRecord, ExtendedRecord> create() { return ParDo.of(new OccurrenceExtensionTransform()); } static SingleOutput<ExtendedRecord, ExtendedRecord> create(); void setCounterFn(SerializableConsumer<String> counterFn); @ProcessElement void processElement(@Element ExtendedRecord er, OutputReceiver<ExtendedRecord> out); void convert(ExtendedRecord er, Consumer<ExtendedRecord> resultConsumer); }
|
OccurrenceExtensionTransform extends DoFn<ExtendedRecord, ExtendedRecord> { public static SingleOutput<ExtendedRecord, ExtendedRecord> create() { return ParDo.of(new OccurrenceExtensionTransform()); } static SingleOutput<ExtendedRecord, ExtendedRecord> create(); void setCounterFn(SerializableConsumer<String> counterFn); @ProcessElement void processElement(@Element ExtendedRecord er, OutputReceiver<ExtendedRecord> out); void convert(ExtendedRecord er, Consumer<ExtendedRecord> resultConsumer); }
|
@Test public void checkRecordTypeAllValueTest() { Set<String> set = Collections.singleton(RecordType.ALL.name()); boolean result = CheckTransforms.checkRecordType(set, RecordType.BASIC, RecordType.AUDUBON); Assert.assertTrue(result); }
|
public static boolean checkRecordType(Set<String> types, InterpretationType... type) { boolean matchType = Arrays.stream(type).anyMatch(x -> types.contains(x.name())); boolean all = Arrays.stream(type).anyMatch(x -> types.contains(x.all())); return all || matchType; }
|
CheckTransforms extends PTransform<PCollection<T>, PCollection<T>> { public static boolean checkRecordType(Set<String> types, InterpretationType... type) { boolean matchType = Arrays.stream(type).anyMatch(x -> types.contains(x.name())); boolean all = Arrays.stream(type).anyMatch(x -> types.contains(x.all())); return all || matchType; } }
|
CheckTransforms extends PTransform<PCollection<T>, PCollection<T>> { public static boolean checkRecordType(Set<String> types, InterpretationType... type) { boolean matchType = Arrays.stream(type).anyMatch(x -> types.contains(x.name())); boolean all = Arrays.stream(type).anyMatch(x -> types.contains(x.all())); return all || matchType; } }
|
CheckTransforms extends PTransform<PCollection<T>, PCollection<T>> { public static boolean checkRecordType(Set<String> types, InterpretationType... type) { boolean matchType = Arrays.stream(type).anyMatch(x -> types.contains(x.name())); boolean all = Arrays.stream(type).anyMatch(x -> types.contains(x.all())); return all || matchType; } @Override PCollection<T> expand(PCollection<T> input); static boolean checkRecordType(Set<String> types, InterpretationType... type); }
|
CheckTransforms extends PTransform<PCollection<T>, PCollection<T>> { public static boolean checkRecordType(Set<String> types, InterpretationType... type) { boolean matchType = Arrays.stream(type).anyMatch(x -> types.contains(x.name())); boolean all = Arrays.stream(type).anyMatch(x -> types.contains(x.all())); return all || matchType; } @Override PCollection<T> expand(PCollection<T> input); static boolean checkRecordType(Set<String> types, InterpretationType... type); }
|
@Test public void checkRecordTypeMatchValueTest() { Set<String> set = Collections.singleton(RecordType.BASIC.name()); boolean result = CheckTransforms.checkRecordType(set, RecordType.BASIC, RecordType.AUDUBON); Assert.assertTrue(result); }
|
public static boolean checkRecordType(Set<String> types, InterpretationType... type) { boolean matchType = Arrays.stream(type).anyMatch(x -> types.contains(x.name())); boolean all = Arrays.stream(type).anyMatch(x -> types.contains(x.all())); return all || matchType; }
|
CheckTransforms extends PTransform<PCollection<T>, PCollection<T>> { public static boolean checkRecordType(Set<String> types, InterpretationType... type) { boolean matchType = Arrays.stream(type).anyMatch(x -> types.contains(x.name())); boolean all = Arrays.stream(type).anyMatch(x -> types.contains(x.all())); return all || matchType; } }
|
CheckTransforms extends PTransform<PCollection<T>, PCollection<T>> { public static boolean checkRecordType(Set<String> types, InterpretationType... type) { boolean matchType = Arrays.stream(type).anyMatch(x -> types.contains(x.name())); boolean all = Arrays.stream(type).anyMatch(x -> types.contains(x.all())); return all || matchType; } }
|
CheckTransforms extends PTransform<PCollection<T>, PCollection<T>> { public static boolean checkRecordType(Set<String> types, InterpretationType... type) { boolean matchType = Arrays.stream(type).anyMatch(x -> types.contains(x.name())); boolean all = Arrays.stream(type).anyMatch(x -> types.contains(x.all())); return all || matchType; } @Override PCollection<T> expand(PCollection<T> input); static boolean checkRecordType(Set<String> types, InterpretationType... type); }
|
CheckTransforms extends PTransform<PCollection<T>, PCollection<T>> { public static boolean checkRecordType(Set<String> types, InterpretationType... type) { boolean matchType = Arrays.stream(type).anyMatch(x -> types.contains(x.name())); boolean all = Arrays.stream(type).anyMatch(x -> types.contains(x.all())); return all || matchType; } @Override PCollection<T> expand(PCollection<T> input); static boolean checkRecordType(Set<String> types, InterpretationType... type); }
|
@Test public void checkRecordTypeMatchManyValueTest() { Set<String> set = new HashSet<>(); set.add(RecordType.BASIC.name()); set.add(RecordType.AUDUBON.name()); boolean result = CheckTransforms.checkRecordType(set, RecordType.BASIC, RecordType.AUDUBON); Assert.assertTrue(result); }
|
public static boolean checkRecordType(Set<String> types, InterpretationType... type) { boolean matchType = Arrays.stream(type).anyMatch(x -> types.contains(x.name())); boolean all = Arrays.stream(type).anyMatch(x -> types.contains(x.all())); return all || matchType; }
|
CheckTransforms extends PTransform<PCollection<T>, PCollection<T>> { public static boolean checkRecordType(Set<String> types, InterpretationType... type) { boolean matchType = Arrays.stream(type).anyMatch(x -> types.contains(x.name())); boolean all = Arrays.stream(type).anyMatch(x -> types.contains(x.all())); return all || matchType; } }
|
CheckTransforms extends PTransform<PCollection<T>, PCollection<T>> { public static boolean checkRecordType(Set<String> types, InterpretationType... type) { boolean matchType = Arrays.stream(type).anyMatch(x -> types.contains(x.name())); boolean all = Arrays.stream(type).anyMatch(x -> types.contains(x.all())); return all || matchType; } }
|
CheckTransforms extends PTransform<PCollection<T>, PCollection<T>> { public static boolean checkRecordType(Set<String> types, InterpretationType... type) { boolean matchType = Arrays.stream(type).anyMatch(x -> types.contains(x.name())); boolean all = Arrays.stream(type).anyMatch(x -> types.contains(x.all())); return all || matchType; } @Override PCollection<T> expand(PCollection<T> input); static boolean checkRecordType(Set<String> types, InterpretationType... type); }
|
CheckTransforms extends PTransform<PCollection<T>, PCollection<T>> { public static boolean checkRecordType(Set<String> types, InterpretationType... type) { boolean matchType = Arrays.stream(type).anyMatch(x -> types.contains(x.name())); boolean all = Arrays.stream(type).anyMatch(x -> types.contains(x.all())); return all || matchType; } @Override PCollection<T> expand(PCollection<T> input); static boolean checkRecordType(Set<String> types, InterpretationType... type); }
|
@Test public void checkRecordTypeMismatchOneValueTest() { Set<String> set = Collections.singleton(RecordType.AMPLIFICATION.name()); boolean result = CheckTransforms.checkRecordType(set, RecordType.BASIC, RecordType.AUDUBON); Assert.assertFalse(result); }
|
public static boolean checkRecordType(Set<String> types, InterpretationType... type) { boolean matchType = Arrays.stream(type).anyMatch(x -> types.contains(x.name())); boolean all = Arrays.stream(type).anyMatch(x -> types.contains(x.all())); return all || matchType; }
|
CheckTransforms extends PTransform<PCollection<T>, PCollection<T>> { public static boolean checkRecordType(Set<String> types, InterpretationType... type) { boolean matchType = Arrays.stream(type).anyMatch(x -> types.contains(x.name())); boolean all = Arrays.stream(type).anyMatch(x -> types.contains(x.all())); return all || matchType; } }
|
CheckTransforms extends PTransform<PCollection<T>, PCollection<T>> { public static boolean checkRecordType(Set<String> types, InterpretationType... type) { boolean matchType = Arrays.stream(type).anyMatch(x -> types.contains(x.name())); boolean all = Arrays.stream(type).anyMatch(x -> types.contains(x.all())); return all || matchType; } }
|
CheckTransforms extends PTransform<PCollection<T>, PCollection<T>> { public static boolean checkRecordType(Set<String> types, InterpretationType... type) { boolean matchType = Arrays.stream(type).anyMatch(x -> types.contains(x.name())); boolean all = Arrays.stream(type).anyMatch(x -> types.contains(x.all())); return all || matchType; } @Override PCollection<T> expand(PCollection<T> input); static boolean checkRecordType(Set<String> types, InterpretationType... type); }
|
CheckTransforms extends PTransform<PCollection<T>, PCollection<T>> { public static boolean checkRecordType(Set<String> types, InterpretationType... type) { boolean matchType = Arrays.stream(type).anyMatch(x -> types.contains(x.name())); boolean all = Arrays.stream(type).anyMatch(x -> types.contains(x.all())); return all || matchType; } @Override PCollection<T> expand(PCollection<T> input); static boolean checkRecordType(Set<String> types, InterpretationType... type); }
|
@Test public void checkRecordTypeMismatchManyValueTest() { Set<String> set = new HashSet<>(); set.add(RecordType.AMPLIFICATION.name()); set.add(RecordType.IMAGE.name()); boolean result = CheckTransforms.checkRecordType(set, RecordType.BASIC, RecordType.AUDUBON); Assert.assertFalse(result); }
|
public static boolean checkRecordType(Set<String> types, InterpretationType... type) { boolean matchType = Arrays.stream(type).anyMatch(x -> types.contains(x.name())); boolean all = Arrays.stream(type).anyMatch(x -> types.contains(x.all())); return all || matchType; }
|
CheckTransforms extends PTransform<PCollection<T>, PCollection<T>> { public static boolean checkRecordType(Set<String> types, InterpretationType... type) { boolean matchType = Arrays.stream(type).anyMatch(x -> types.contains(x.name())); boolean all = Arrays.stream(type).anyMatch(x -> types.contains(x.all())); return all || matchType; } }
|
CheckTransforms extends PTransform<PCollection<T>, PCollection<T>> { public static boolean checkRecordType(Set<String> types, InterpretationType... type) { boolean matchType = Arrays.stream(type).anyMatch(x -> types.contains(x.name())); boolean all = Arrays.stream(type).anyMatch(x -> types.contains(x.all())); return all || matchType; } }
|
CheckTransforms extends PTransform<PCollection<T>, PCollection<T>> { public static boolean checkRecordType(Set<String> types, InterpretationType... type) { boolean matchType = Arrays.stream(type).anyMatch(x -> types.contains(x.name())); boolean all = Arrays.stream(type).anyMatch(x -> types.contains(x.all())); return all || matchType; } @Override PCollection<T> expand(PCollection<T> input); static boolean checkRecordType(Set<String> types, InterpretationType... type); }
|
CheckTransforms extends PTransform<PCollection<T>, PCollection<T>> { public static boolean checkRecordType(Set<String> types, InterpretationType... type) { boolean matchType = Arrays.stream(type).anyMatch(x -> types.contains(x.name())); boolean all = Arrays.stream(type).anyMatch(x -> types.contains(x.all())); return all || matchType; } @Override PCollection<T> expand(PCollection<T> input); static boolean checkRecordType(Set<String> types, InterpretationType... type); }
|
@Test public void hashIdTest() { final String datasetId = "f349d447-1c92-4637-ab32-8ae559497032"; final List<ExtendedRecord> input = createCollection("0001_1", "0002_2", "0003_3"); final List<ExtendedRecord> expected = createCollection( "20d8ab138ab4c919cbf32f5d9e667812077a0ee4_1", "1122dc31ba32e386e3a36719699fdb5fb1d2912f_2", "f2b1c436ad680263d74bf1498bf7433d9bb4b31a_3"); PCollection<ExtendedRecord> result = p.apply(Create.of(input)).apply(HashIdTransform.create(datasetId)); PAssert.that(result).containsInAnyOrder(expected); p.run(); }
|
public static SingleOutput<ExtendedRecord, ExtendedRecord> create(String datasetId) { return ParDo.of(new HashIdTransform(datasetId)); }
|
HashIdTransform extends DoFn<ExtendedRecord, ExtendedRecord> { public static SingleOutput<ExtendedRecord, ExtendedRecord> create(String datasetId) { return ParDo.of(new HashIdTransform(datasetId)); } }
|
HashIdTransform extends DoFn<ExtendedRecord, ExtendedRecord> { public static SingleOutput<ExtendedRecord, ExtendedRecord> create(String datasetId) { return ParDo.of(new HashIdTransform(datasetId)); } }
|
HashIdTransform extends DoFn<ExtendedRecord, ExtendedRecord> { public static SingleOutput<ExtendedRecord, ExtendedRecord> create(String datasetId) { return ParDo.of(new HashIdTransform(datasetId)); } static SingleOutput<ExtendedRecord, ExtendedRecord> create(String datasetId); @ProcessElement void processElement(@Element ExtendedRecord er, OutputReceiver<ExtendedRecord> out); }
|
HashIdTransform extends DoFn<ExtendedRecord, ExtendedRecord> { public static SingleOutput<ExtendedRecord, ExtendedRecord> create(String datasetId) { return ParDo.of(new HashIdTransform(datasetId)); } static SingleOutput<ExtendedRecord, ExtendedRecord> create(String datasetId); @ProcessElement void processElement(@Element ExtendedRecord er, OutputReceiver<ExtendedRecord> out); }
|
@Test public void emptyLrTest() { SerializableSupplier<KeyValueStore<LatLng, GeocodeResponse>> geocodeKvStore = () -> GeocodeKvStore.create(new KeyValueTestStoreStub<>()); ExtendedRecord er = ExtendedRecord.newBuilder().setId("777").build(); MetadataRecord mdr = MetadataRecord.newBuilder().setId("777").build(); PCollectionView<MetadataRecord> metadataView = p.apply("Create test metadata", Create.of(mdr)) .apply("Convert into view", View.asSingleton()); PCollection<LocationRecord> recordCollection = p.apply(Create.of(er)) .apply( LocationTransform.builder() .geocodeKvStoreSupplier(geocodeKvStore) .metadataView(metadataView) .create() .interpret()) .apply("Cleaning Date created", ParDo.of(new RemoveDateCreated())); PAssert.that(recordCollection).empty(); p.run(); }
|
@Override public SingleOutput<ExtendedRecord, LocationRecord> interpret() { return ParDo.of(this).withSideInputs(metadataView); }
|
LocationTransform extends Transform<ExtendedRecord, LocationRecord> { @Override public SingleOutput<ExtendedRecord, LocationRecord> interpret() { return ParDo.of(this).withSideInputs(metadataView); } }
|
LocationTransform extends Transform<ExtendedRecord, LocationRecord> { @Override public SingleOutput<ExtendedRecord, LocationRecord> interpret() { return ParDo.of(this).withSideInputs(metadataView); } @Builder(buildMethodName = "create") private LocationTransform(
SerializableSupplier<KeyValueStore<LatLng, GeocodeResponse>> geocodeKvStoreSupplier,
PCollectionView<MetadataRecord> metadataView); }
|
LocationTransform extends Transform<ExtendedRecord, LocationRecord> { @Override public SingleOutput<ExtendedRecord, LocationRecord> interpret() { return ParDo.of(this).withSideInputs(metadataView); } @Builder(buildMethodName = "create") private LocationTransform(
SerializableSupplier<KeyValueStore<LatLng, GeocodeResponse>> geocodeKvStoreSupplier,
PCollectionView<MetadataRecord> metadataView); MapElements<LocationRecord, KV<String, LocationRecord>> toKv(); LocationTransform counterFn(SerializableConsumer<String> counterFn); @Override SingleOutput<ExtendedRecord, LocationRecord> interpret(); @Setup void setup(); @Teardown void tearDown(); @Override Optional<LocationRecord> convert(ExtendedRecord source); @Override @ProcessElement void processElement(ProcessContext c); Optional<LocationRecord> processElement(ExtendedRecord source, MetadataRecord mdr); }
|
LocationTransform extends Transform<ExtendedRecord, LocationRecord> { @Override public SingleOutput<ExtendedRecord, LocationRecord> interpret() { return ParDo.of(this).withSideInputs(metadataView); } @Builder(buildMethodName = "create") private LocationTransform(
SerializableSupplier<KeyValueStore<LatLng, GeocodeResponse>> geocodeKvStoreSupplier,
PCollectionView<MetadataRecord> metadataView); MapElements<LocationRecord, KV<String, LocationRecord>> toKv(); LocationTransform counterFn(SerializableConsumer<String> counterFn); @Override SingleOutput<ExtendedRecord, LocationRecord> interpret(); @Setup void setup(); @Teardown void tearDown(); @Override Optional<LocationRecord> convert(ExtendedRecord source); @Override @ProcessElement void processElement(ProcessContext c); Optional<LocationRecord> processElement(ExtendedRecord source, MetadataRecord mdr); }
|
@Test(expected = IllegalArgumentException.class) public void swapIndexInAliasNullIndexTest() { EsIndex.swapIndexInAliases(EsConfig.from(DUMMY_HOST), Collections.singleton("alias"), null); thrown.expectMessage("index is required"); }
|
public static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index) { Preconditions.checkArgument(aliases != null && !aliases.isEmpty(), "alias is required"); Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); swapIndexInAliases( config, aliases, index, Collections.emptySet(), Searching.getDefaultSearchSettings()); }
|
EsIndex { public static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index) { Preconditions.checkArgument(aliases != null && !aliases.isEmpty(), "alias is required"); Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); swapIndexInAliases( config, aliases, index, Collections.emptySet(), Searching.getDefaultSearchSettings()); } }
|
EsIndex { public static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index) { Preconditions.checkArgument(aliases != null && !aliases.isEmpty(), "alias is required"); Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); swapIndexInAliases( config, aliases, index, Collections.emptySet(), Searching.getDefaultSearchSettings()); } }
|
EsIndex { public static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index) { Preconditions.checkArgument(aliases != null && !aliases.isEmpty(), "alias is required"); Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); swapIndexInAliases( config, aliases, index, Collections.emptySet(), Searching.getDefaultSearchSettings()); } static String createIndex(EsConfig config, IndexParams indexParams); static Optional<String> createIndexIfNotExists(EsConfig config, IndexParams indexParams); static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index); static void swapIndexInAliases(
EsConfig config,
Set<String> aliases,
String index,
Set<String> extraIdxToRemove,
Map<String, String> settings); static long countDocuments(EsConfig config, String index); static Set<String> deleteRecordsByDatasetId(
EsConfig config,
String[] aliases,
String datasetKey,
Predicate<String> indexesToDelete,
int timeoutSec,
int attempts); static Set<String> findDatasetIndexesInAliases(
EsConfig config, String[] aliases, String datasetKey); }
|
EsIndex { public static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index) { Preconditions.checkArgument(aliases != null && !aliases.isEmpty(), "alias is required"); Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); swapIndexInAliases( config, aliases, index, Collections.emptySet(), Searching.getDefaultSearchSettings()); } static String createIndex(EsConfig config, IndexParams indexParams); static Optional<String> createIndexIfNotExists(EsConfig config, IndexParams indexParams); static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index); static void swapIndexInAliases(
EsConfig config,
Set<String> aliases,
String index,
Set<String> extraIdxToRemove,
Map<String, String> settings); static long countDocuments(EsConfig config, String index); static Set<String> deleteRecordsByDatasetId(
EsConfig config,
String[] aliases,
String datasetKey,
Predicate<String> indexesToDelete,
int timeoutSec,
int attempts); static Set<String> findDatasetIndexesInAliases(
EsConfig config, String[] aliases, String datasetKey); }
|
@Test public void transformationTest() { KeyValueTestStoreStub<LatLng, GeocodeResponse> kvStore = new KeyValueTestStoreStub<>(); kvStore.put(new LatLng(56.26d, 9.51d), toGeocodeResponse(Country.DENMARK)); kvStore.put(new LatLng(36.21d, 138.25d), toGeocodeResponse(Country.JAPAN)); kvStore.put(new LatLng(88.21d, -32.01d), toGeocodeResponse(null)); SerializableSupplier<KeyValueStore<LatLng, GeocodeResponse>> geocodeKvStore = () -> GeocodeKvStore.create(kvStore); final String[] denmark = { "0", Country.DENMARK.getTitle(), Country.DENMARK.getIso2LetterCode(), "EUROPE", "100.0", "110.0", "111.0", "200.0", "Ocean", "220.0", "222.0", "30.0", "0.00001", "56.26", "9.51", "Copenhagen", "GEODETIC_DATUM_ASSUMED_WGS84", "155.5", "44.5", "105.0", "5.0", "false", "DNK", "DNK.2_1", "DNK.2.14_1", null, "Denmark", "Midtjylland", "Silkeborg", null }; final String[] japan = { "1", Country.JAPAN.getTitle(), Country.JAPAN.getIso2LetterCode(), "ASIA", "100.0", "110.0", "111.0", "200.0", "Ocean", "220.0", "222.0", "30.0", "0.00001", "36.21", "138.25", "Tokyo", "GEODETIC_DATUM_ASSUMED_WGS84", "155.5", "44.5", "105.0", "5.0", "true", "JPN", "JPN.26_1", "JPN.26.40_1", null, "Japan", "Nagano", "Nagawa", null }; final String[] arctic = { "2", null, null, null, "-80.0", "-40.0", "0.0", "5.0", "Arctic Ocean", "0.0", "-1.5", "500.0", "0.01", "88.21", "-32.01", null, "GEODETIC_DATUM_ASSUMED_WGS84", "2.5", "2.5", "-60.0", "20.0", null, null, null, null, null, null, null, null, null }; final MetadataRecord mdr = MetadataRecord.newBuilder() .setId("0") .setDatasetPublishingCountry(Country.DENMARK.getIso2LetterCode()) .setDatasetKey(UUID.randomUUID().toString()) .build(); final List<ExtendedRecord> records = createExtendedRecordList(mdr, denmark, japan, arctic); final List<LocationRecord> locations = createLocationList(mdr, denmark, japan, arctic); PCollectionView<MetadataRecord> metadataView = p.apply("Create test metadata", Create.of(mdr)) .apply("Convert into view", View.asSingleton()); PCollection<LocationRecord> recordCollection = p.apply(Create.of(records)) .apply( LocationTransform.builder() .geocodeKvStoreSupplier(geocodeKvStore) .metadataView(metadataView) .create() .interpret()) .apply("Cleaning Date created", ParDo.of(new RemoveDateCreated())); PAssert.that(recordCollection).containsInAnyOrder(locations); p.run(); }
|
@Override public SingleOutput<ExtendedRecord, LocationRecord> interpret() { return ParDo.of(this).withSideInputs(metadataView); }
|
LocationTransform extends Transform<ExtendedRecord, LocationRecord> { @Override public SingleOutput<ExtendedRecord, LocationRecord> interpret() { return ParDo.of(this).withSideInputs(metadataView); } }
|
LocationTransform extends Transform<ExtendedRecord, LocationRecord> { @Override public SingleOutput<ExtendedRecord, LocationRecord> interpret() { return ParDo.of(this).withSideInputs(metadataView); } @Builder(buildMethodName = "create") private LocationTransform(
SerializableSupplier<KeyValueStore<LatLng, GeocodeResponse>> geocodeKvStoreSupplier,
PCollectionView<MetadataRecord> metadataView); }
|
LocationTransform extends Transform<ExtendedRecord, LocationRecord> { @Override public SingleOutput<ExtendedRecord, LocationRecord> interpret() { return ParDo.of(this).withSideInputs(metadataView); } @Builder(buildMethodName = "create") private LocationTransform(
SerializableSupplier<KeyValueStore<LatLng, GeocodeResponse>> geocodeKvStoreSupplier,
PCollectionView<MetadataRecord> metadataView); MapElements<LocationRecord, KV<String, LocationRecord>> toKv(); LocationTransform counterFn(SerializableConsumer<String> counterFn); @Override SingleOutput<ExtendedRecord, LocationRecord> interpret(); @Setup void setup(); @Teardown void tearDown(); @Override Optional<LocationRecord> convert(ExtendedRecord source); @Override @ProcessElement void processElement(ProcessContext c); Optional<LocationRecord> processElement(ExtendedRecord source, MetadataRecord mdr); }
|
LocationTransform extends Transform<ExtendedRecord, LocationRecord> { @Override public SingleOutput<ExtendedRecord, LocationRecord> interpret() { return ParDo.of(this).withSideInputs(metadataView); } @Builder(buildMethodName = "create") private LocationTransform(
SerializableSupplier<KeyValueStore<LatLng, GeocodeResponse>> geocodeKvStoreSupplier,
PCollectionView<MetadataRecord> metadataView); MapElements<LocationRecord, KV<String, LocationRecord>> toKv(); LocationTransform counterFn(SerializableConsumer<String> counterFn); @Override SingleOutput<ExtendedRecord, LocationRecord> interpret(); @Setup void setup(); @Teardown void tearDown(); @Override Optional<LocationRecord> convert(ExtendedRecord source); @Override @ProcessElement void processElement(ProcessContext c); Optional<LocationRecord> processElement(ExtendedRecord source, MetadataRecord mdr); }
|
@Test public void sameLinkToObjectTest() { PipelinesConfig pc = new PipelinesConfig(); WsConfig wc = new WsConfig(); wc.setWsUrl("https: pc.setGbifApi(wc); SerializableSupplier<MetadataServiceClient> supplierOne = MetadataServiceClientFactory.getInstanceSupplier(pc); SerializableSupplier<MetadataServiceClient> supplierTwo = MetadataServiceClientFactory.getInstanceSupplier(pc); Assert.assertSame(supplierOne.get(), supplierTwo.get()); }
|
public static SerializableSupplier<MetadataServiceClient> getInstanceSupplier( PipelinesConfig config) { return () -> getInstance(config); }
|
MetadataServiceClientFactory { public static SerializableSupplier<MetadataServiceClient> getInstanceSupplier( PipelinesConfig config) { return () -> getInstance(config); } }
|
MetadataServiceClientFactory { public static SerializableSupplier<MetadataServiceClient> getInstanceSupplier( PipelinesConfig config) { return () -> getInstance(config); } @SneakyThrows private MetadataServiceClientFactory(PipelinesConfig config); }
|
MetadataServiceClientFactory { public static SerializableSupplier<MetadataServiceClient> getInstanceSupplier( PipelinesConfig config) { return () -> getInstance(config); } @SneakyThrows private MetadataServiceClientFactory(PipelinesConfig config); static MetadataServiceClient getInstance(PipelinesConfig config); static SerializableSupplier<MetadataServiceClient> createSupplier(PipelinesConfig config); static SerializableSupplier<MetadataServiceClient> getInstanceSupplier(
PipelinesConfig config); }
|
MetadataServiceClientFactory { public static SerializableSupplier<MetadataServiceClient> getInstanceSupplier( PipelinesConfig config) { return () -> getInstance(config); } @SneakyThrows private MetadataServiceClientFactory(PipelinesConfig config); static MetadataServiceClient getInstance(PipelinesConfig config); static SerializableSupplier<MetadataServiceClient> createSupplier(PipelinesConfig config); static SerializableSupplier<MetadataServiceClient> getInstanceSupplier(
PipelinesConfig config); }
|
@Test public void newObjectTest() { PipelinesConfig pc = new PipelinesConfig(); WsConfig wc = new WsConfig(); wc.setWsUrl("https: pc.setGbifApi(wc); SerializableSupplier<MetadataServiceClient> supplierOne = MetadataServiceClientFactory.createSupplier(pc); SerializableSupplier<MetadataServiceClient> supplierTwo = MetadataServiceClientFactory.createSupplier(pc); Assert.assertNotSame(supplierOne.get(), supplierTwo.get()); }
|
public static SerializableSupplier<MetadataServiceClient> createSupplier(PipelinesConfig config) { return () -> new MetadataServiceClientFactory(config).client; }
|
MetadataServiceClientFactory { public static SerializableSupplier<MetadataServiceClient> createSupplier(PipelinesConfig config) { return () -> new MetadataServiceClientFactory(config).client; } }
|
MetadataServiceClientFactory { public static SerializableSupplier<MetadataServiceClient> createSupplier(PipelinesConfig config) { return () -> new MetadataServiceClientFactory(config).client; } @SneakyThrows private MetadataServiceClientFactory(PipelinesConfig config); }
|
MetadataServiceClientFactory { public static SerializableSupplier<MetadataServiceClient> createSupplier(PipelinesConfig config) { return () -> new MetadataServiceClientFactory(config).client; } @SneakyThrows private MetadataServiceClientFactory(PipelinesConfig config); static MetadataServiceClient getInstance(PipelinesConfig config); static SerializableSupplier<MetadataServiceClient> createSupplier(PipelinesConfig config); static SerializableSupplier<MetadataServiceClient> getInstanceSupplier(
PipelinesConfig config); }
|
MetadataServiceClientFactory { public static SerializableSupplier<MetadataServiceClient> createSupplier(PipelinesConfig config) { return () -> new MetadataServiceClientFactory(config).client; } @SneakyThrows private MetadataServiceClientFactory(PipelinesConfig config); static MetadataServiceClient getInstance(PipelinesConfig config); static SerializableSupplier<MetadataServiceClient> createSupplier(PipelinesConfig config); static SerializableSupplier<MetadataServiceClient> getInstanceSupplier(
PipelinesConfig config); }
|
@Test public void locationFeaturesInterpreterTest() { LocationRecord locationRecord = LocationRecord.newBuilder().setId("777").build(); LocationFeatureRecord record = LocationFeatureRecord.newBuilder().setId("777").build(); KeyValueStore<LatLng, String> kvStore = new KeyValueStore<LatLng, String>() { @Override public String get(LatLng latLng) { return "{\"layers: \"{\"cb1\":\"1\",\"cb2\":\"2\",\"cb3\":\"3\"}}"; } @Override public void close() { } }; Map<String, String> resultMap = new HashMap<>(); resultMap.put("cb1", "1"); resultMap.put("cb2", "2"); resultMap.put("cb3", "3"); LocationFeatureRecord result = LocationFeatureRecord.newBuilder().setId("777").setItems(resultMap).build(); LocationFeatureInterpreter.interpret(kvStore).accept(locationRecord, record); Assert.assertEquals(result, record); }
|
public static BiConsumer<LocationRecord, LocationFeatureRecord> interpret( KeyValueStore<LatLng, String> kvStore) { return (lr, asr) -> { if (kvStore != null) { try { String json = kvStore.get(new LatLng(lr.getDecimalLatitude(), lr.getDecimalLongitude())); if (!Strings.isNullOrEmpty(json)) { json = json.substring(11, json.length() - 1); ObjectMapper objectMapper = new ObjectMapper(); Map<String, String> map = objectMapper.readValue(json, new TypeReference<HashMap<String, String>>() {}); asr.setItems(map); } } catch (NoSuchElementException | NullPointerException | IOException ex) { log.error(ex.getMessage(), ex); } } }; }
|
LocationFeatureInterpreter { public static BiConsumer<LocationRecord, LocationFeatureRecord> interpret( KeyValueStore<LatLng, String> kvStore) { return (lr, asr) -> { if (kvStore != null) { try { String json = kvStore.get(new LatLng(lr.getDecimalLatitude(), lr.getDecimalLongitude())); if (!Strings.isNullOrEmpty(json)) { json = json.substring(11, json.length() - 1); ObjectMapper objectMapper = new ObjectMapper(); Map<String, String> map = objectMapper.readValue(json, new TypeReference<HashMap<String, String>>() {}); asr.setItems(map); } } catch (NoSuchElementException | NullPointerException | IOException ex) { log.error(ex.getMessage(), ex); } } }; } }
|
LocationFeatureInterpreter { public static BiConsumer<LocationRecord, LocationFeatureRecord> interpret( KeyValueStore<LatLng, String> kvStore) { return (lr, asr) -> { if (kvStore != null) { try { String json = kvStore.get(new LatLng(lr.getDecimalLatitude(), lr.getDecimalLongitude())); if (!Strings.isNullOrEmpty(json)) { json = json.substring(11, json.length() - 1); ObjectMapper objectMapper = new ObjectMapper(); Map<String, String> map = objectMapper.readValue(json, new TypeReference<HashMap<String, String>>() {}); asr.setItems(map); } } catch (NoSuchElementException | NullPointerException | IOException ex) { log.error(ex.getMessage(), ex); } } }; } }
|
LocationFeatureInterpreter { public static BiConsumer<LocationRecord, LocationFeatureRecord> interpret( KeyValueStore<LatLng, String> kvStore) { return (lr, asr) -> { if (kvStore != null) { try { String json = kvStore.get(new LatLng(lr.getDecimalLatitude(), lr.getDecimalLongitude())); if (!Strings.isNullOrEmpty(json)) { json = json.substring(11, json.length() - 1); ObjectMapper objectMapper = new ObjectMapper(); Map<String, String> map = objectMapper.readValue(json, new TypeReference<HashMap<String, String>>() {}); asr.setItems(map); } } catch (NoSuchElementException | NullPointerException | IOException ex) { log.error(ex.getMessage(), ex); } } }; } static BiConsumer<LocationRecord, LocationFeatureRecord> interpret(
KeyValueStore<LatLng, String> kvStore); }
|
LocationFeatureInterpreter { public static BiConsumer<LocationRecord, LocationFeatureRecord> interpret( KeyValueStore<LatLng, String> kvStore) { return (lr, asr) -> { if (kvStore != null) { try { String json = kvStore.get(new LatLng(lr.getDecimalLatitude(), lr.getDecimalLongitude())); if (!Strings.isNullOrEmpty(json)) { json = json.substring(11, json.length() - 1); ObjectMapper objectMapper = new ObjectMapper(); Map<String, String> map = objectMapper.readValue(json, new TypeReference<HashMap<String, String>>() {}); asr.setItems(map); } } catch (NoSuchElementException | NullPointerException | IOException ex) { log.error(ex.getMessage(), ex); } } }; } static BiConsumer<LocationRecord, LocationFeatureRecord> interpret(
KeyValueStore<LatLng, String> kvStore); }
|
@Test public void imageTest() { Map<String, String> ext1 = new HashMap<>(); ext1.put(DcTerm.identifier.qualifiedName(), "http: ext1.put(DcTerm.references.qualifiedName(), "http: ext1.put(DcTerm.created.qualifiedName(), "2010"); ext1.put(DcTerm.title.qualifiedName(), "Tt1"); ext1.put(DcTerm.description.qualifiedName(), "Desc1"); ext1.put(DcTerm.spatial.qualifiedName(), "Sp1"); ext1.put(DcTerm.format.qualifiedName(), "jpeg"); ext1.put(DcTerm.creator.qualifiedName(), "Cr1"); ext1.put(DcTerm.contributor.qualifiedName(), "Cont1"); ext1.put(DcTerm.publisher.qualifiedName(), "Pub1"); ext1.put(DcTerm.audience.qualifiedName(), "Aud1"); ext1.put(DcTerm.license.qualifiedName(), "Lic1"); ext1.put(DcTerm.rightsHolder.qualifiedName(), "Rh1"); ext1.put(DwcTerm.datasetID.qualifiedName(), "1"); ext1.put("http: ext1.put("http: Map<String, String> ext2 = new HashMap<>(); ext2.put(DcTerm.identifier.qualifiedName(), "http: ext2.put(DcTerm.references.qualifiedName(), "http: ext2.put(DcTerm.created.qualifiedName(), "2010/12/12"); ext2.put(DcTerm.title.qualifiedName(), "Tt2"); ext2.put(DcTerm.description.qualifiedName(), "Desc2"); ext2.put(DcTerm.spatial.qualifiedName(), "Sp2"); ext2.put(DcTerm.format.qualifiedName(), "jpeg"); ext2.put(DcTerm.creator.qualifiedName(), "Cr2"); ext2.put(DcTerm.contributor.qualifiedName(), "Cont2"); ext2.put(DcTerm.publisher.qualifiedName(), "Pub2"); ext2.put(DcTerm.audience.qualifiedName(), "Aud2"); ext2.put(DcTerm.license.qualifiedName(), "Lic2"); ext2.put(DcTerm.rightsHolder.qualifiedName(), "Rh2"); ext2.put(DwcTerm.datasetID.qualifiedName(), "1"); ext2.put("http: ext2.put("http: Map<String, String> ext3 = new HashMap<>(); ext3.put(DcTerm.created.qualifiedName(), "not a date"); Map<String, List<Map<String, String>>> ext = new HashMap<>(); ext.put(Extension.IMAGE.getRowType(), Arrays.asList(ext1, ext2, ext3)); ExtendedRecord record = ExtendedRecord.newBuilder().setId("id").setExtensions(ext).build(); String result = "{\"id\": \"id\", \"created\": 0, \"imageItems\": [{\"identifier\": \"http: + "\"http: + "\"latitude\": 60.4, \"longitude\": -131.3, \"format\": \"jpeg\", \"created\": \"2010\", \"creator\": " + "\"Cr1\", \"contributor\": \"Cont1\", \"publisher\": \"Pub1\", \"audience\": \"Aud1\", \"license\": " + "\"Lic1\", \"rightsHolder\": \"Rh1\", \"datasetId\": \"1\"}, {\"identifier\": \"http: + "\"references\": \"http: + "\"Sp2\", \"latitude\": -131.3, \"longitude\": 360.4, \"format\": \"jpeg\", \"created\": \"2010-12-12\", " + "\"creator\": \"Cr2\", \"contributor\": \"Cont2\", \"publisher\": \"Pub2\", \"audience\": \"Aud2\", \"license\": " + "\"Lic2\", \"rightsHolder\": \"Rh2\", \"datasetId\": \"1\"}], \"issues\": {\"issueList\": [" + "\"MULTIMEDIA_DATE_INVALID\", \"MULTIMEDIA_URI_INVALID\"]}}"; ImageRecord ir = ImageRecord.newBuilder().setId(record.getId()).setCreated(0L).build(); ImageInterpreter.builder().create().interpret(record, ir); Assert.assertEquals(result, ir.toString()); }
|
public void interpret(ExtendedRecord er, ImageRecord mr) { Objects.requireNonNull(er); Objects.requireNonNull(mr); Result<Image> result = handler.convert(er); mr.setImageItems(result.getList()); mr.getIssues().setIssueList(result.getIssuesAsList()); }
|
ImageInterpreter { public void interpret(ExtendedRecord er, ImageRecord mr) { Objects.requireNonNull(er); Objects.requireNonNull(mr); Result<Image> result = handler.convert(er); mr.setImageItems(result.getList()); mr.getIssues().setIssueList(result.getIssuesAsList()); } }
|
ImageInterpreter { public void interpret(ExtendedRecord er, ImageRecord mr) { Objects.requireNonNull(er); Objects.requireNonNull(mr); Result<Image> result = handler.convert(er); mr.setImageItems(result.getList()); mr.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private ImageInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); }
|
ImageInterpreter { public void interpret(ExtendedRecord er, ImageRecord mr) { Objects.requireNonNull(er); Objects.requireNonNull(mr); Result<Image> result = handler.convert(er); mr.setImageItems(result.getList()); mr.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private ImageInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpret(ExtendedRecord er, ImageRecord mr); }
|
ImageInterpreter { public void interpret(ExtendedRecord er, ImageRecord mr) { Objects.requireNonNull(er); Objects.requireNonNull(mr); Result<Image> result = handler.convert(er); mr.setImageItems(result.getList()); mr.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private ImageInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpret(ExtendedRecord er, ImageRecord mr); }
|
@Test public void measurementOrFactTest() { String expected = "{\"id\": \"id\", \"created\": 0, \"measurementOrFactItems\": [{\"id\": \"Id1\", \"type\": \"Type1\", \"value\": \"1.5\", " + "\"accuracy\": \"Accurancy1\", \"unit\": \"Unit1\", \"determinedDate\": \"2010/2011\", \"determinedBy\": " + "\"By1\", \"method\": \"Method1\", \"remarks\": \"Remarks1\", \"determinedDateParsed\": {\"gte\": \"2010\", " + "\"lte\": \"2011\"}, \"valueParsed\": 1.5}, {\"id\": \"Id2\", \"type\": \"Type2\", \"value\": \"Value2\"," + " \"accuracy\": \"Accurancy2\", \"unit\": \"Unit2\", \"determinedDate\": \"2010/12/12\", \"determinedBy\": " + "\"By2\", \"method\": \"Method2\", \"remarks\": \"Remarks2\", \"determinedDateParsed\": {\"gte\": \"2010-12-12\", " + "\"lte\": null}, \"valueParsed\": null}, {\"id\": null, \"type\": null, \"value\": \"1\", \"accuracy\": null, " + "\"unit\": null, \"determinedDate\": \"not a date\", \"determinedBy\": null, \"method\": null, \"remarks\": null, " + "\"determinedDateParsed\": {\"gte\": null, \"lte\": null}, \"valueParsed\": 1.0}], \"issues\": {\"issueList\": " + "[]}}"; Map<String, String> ext1 = new HashMap<>(); ext1.put(DwcTerm.measurementID.qualifiedName(), "Id1"); ext1.put(DwcTerm.measurementType.qualifiedName(), "Type1"); ext1.put(DwcTerm.measurementValue.qualifiedName(), "1.5"); ext1.put(DwcTerm.measurementAccuracy.qualifiedName(), "Accurancy1"); ext1.put(DwcTerm.measurementUnit.qualifiedName(), "Unit1"); ext1.put(DwcTerm.measurementDeterminedBy.qualifiedName(), "By1"); ext1.put(DwcTerm.measurementMethod.qualifiedName(), "Method1"); ext1.put(DwcTerm.measurementRemarks.qualifiedName(), "Remarks1"); ext1.put(DwcTerm.measurementDeterminedDate.qualifiedName(), "2010/2011"); Map<String, String> ext2 = new HashMap<>(); ext2.put(DwcTerm.measurementID.qualifiedName(), "Id2"); ext2.put(DwcTerm.measurementType.qualifiedName(), "Type2"); ext2.put(DwcTerm.measurementValue.qualifiedName(), "Value2"); ext2.put(DwcTerm.measurementAccuracy.qualifiedName(), "Accurancy2"); ext2.put(DwcTerm.measurementUnit.qualifiedName(), "Unit2"); ext2.put(DwcTerm.measurementDeterminedBy.qualifiedName(), "By2"); ext2.put(DwcTerm.measurementMethod.qualifiedName(), "Method2"); ext2.put(DwcTerm.measurementRemarks.qualifiedName(), "Remarks2"); ext2.put(DwcTerm.measurementDeterminedDate.qualifiedName(), "2010/12/12"); Map<String, String> ext3 = new HashMap<>(); ext3.put(DwcTerm.measurementValue.qualifiedName(), "1"); ext3.put(DwcTerm.measurementDeterminedDate.qualifiedName(), "not a date"); Map<String, List<Map<String, String>>> ext = new HashMap<>(); ext.put(Extension.MEASUREMENT_OR_FACT.getRowType(), Arrays.asList(ext1, ext2, ext3)); ExtendedRecord record = ExtendedRecord.newBuilder().setId("id").setExtensions(ext).build(); MeasurementOrFactRecord mfr = MeasurementOrFactRecord.newBuilder().setId(record.getId()).setCreated(0L).build(); MeasurementOrFactInterpreter.builder().create().interpret(record, mfr); Assert.assertEquals(expected, mfr.toString()); }
|
public void interpret(ExtendedRecord er, MeasurementOrFactRecord mfr) { Objects.requireNonNull(er); Objects.requireNonNull(mfr); Result<MeasurementOrFact> result = handler.convert(er); mfr.setMeasurementOrFactItems(result.getList()); mfr.getIssues().setIssueList(result.getIssuesAsList()); }
|
MeasurementOrFactInterpreter { public void interpret(ExtendedRecord er, MeasurementOrFactRecord mfr) { Objects.requireNonNull(er); Objects.requireNonNull(mfr); Result<MeasurementOrFact> result = handler.convert(er); mfr.setMeasurementOrFactItems(result.getList()); mfr.getIssues().setIssueList(result.getIssuesAsList()); } }
|
MeasurementOrFactInterpreter { public void interpret(ExtendedRecord er, MeasurementOrFactRecord mfr) { Objects.requireNonNull(er); Objects.requireNonNull(mfr); Result<MeasurementOrFact> result = handler.convert(er); mfr.setMeasurementOrFactItems(result.getList()); mfr.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private MeasurementOrFactInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); }
|
MeasurementOrFactInterpreter { public void interpret(ExtendedRecord er, MeasurementOrFactRecord mfr) { Objects.requireNonNull(er); Objects.requireNonNull(mfr); Result<MeasurementOrFact> result = handler.convert(er); mfr.setMeasurementOrFactItems(result.getList()); mfr.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private MeasurementOrFactInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpret(ExtendedRecord er, MeasurementOrFactRecord mfr); }
|
MeasurementOrFactInterpreter { public void interpret(ExtendedRecord er, MeasurementOrFactRecord mfr) { Objects.requireNonNull(er); Objects.requireNonNull(mfr); Result<MeasurementOrFact> result = handler.convert(er); mfr.setMeasurementOrFactItems(result.getList()); mfr.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private MeasurementOrFactInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpret(ExtendedRecord er, MeasurementOrFactRecord mfr); }
|
@Test public void audubonTest() { String expected = "{\"id\": \"id\", \"created\": null, \"audubonItems\": [{\"creator\": \"Jose Padial\", " + "\"creatorUri\": null, \"providerLiteral\": \"CM\", \"provider\": null, \"metadataCreatorLiteral\": null, " + "\"metadataCreator\": null, \"metadataProviderLiteral\": null, \"metadataProvider\": null, \"rights\": " + "\"http: + "\"rightsUri\": \"http: + "\"Carnegie Museum of Natural History Herps Collection (CM:Herps)\", \"usageTerms\": " + "\"CC0 1.0 (Public-domain)\", \"webStatement\": null, \"licenseLogoUrl\": null, \"credit\": null, " + "\"attributionLogoUrl\": null, \"attributionLinkUrl\": null, \"fundingAttribution\": null, \"source\": null, " + "\"sourceUri\": null, \"description\": \"Photo taken in 2013\", \"caption\": null, \"language\": null, " + "\"languageUri\": null, \"physicalSetting\": null, \"cvTerm\": null, \"subjectCategoryVocabulary\": null, " + "\"tag\": null, \"locationShown\": null, \"worldRegion\": null, \"countryCode\": null, \"countryName\": null, " + "\"provinceState\": null, \"city\": null, \"sublocation\": null, \"identifier\": \"1b384fd8-8559-42ba-980f-22661a4b5f75\", " + "\"type\": \"StillImage\", \"typeUri\": null, \"subtypeLiteral\": \"Photograph\", \"subtype\": null, \"title\": " + "\"AMBYSTOMA MACULATUM\", \"modified\": \"2017-08-15\", \"metadataDate\": null, \"metadataLanguageLiteral\": null, " + "\"metadataLanguage\": null, \"providerManagedId\": null, \"rating\": null, \"commenterLiteral\": null, " + "\"commenter\": null, \"comments\": null, \"reviewerLiteral\": null, \"reviewer\": null, \"reviewerComments\": null, " + "\"available\": null, \"hasServiceAccessPoint\": null, \"idOfContainingCollection\": null, \"relatedResourceId\": null, " + "\"providerId\": null, \"derivedFrom\": null, \"associatedSpecimenReference\": \"urn:catalog:CM:Herps:156879\", " + "\"associatedObservationReference\": null, \"locationCreated\": null, \"digitizationDate\": null, \"captureDevice\": null, " + "\"resourceCreationTechnique\": null, \"accessUri\": \"https: + "\"format\": \"image/jpeg\", \"formatUri\": null, \"variantLiteral\": null, \"variant\": null, \"variantDescription\": null, " + "\"furtherInformationUrl\": null, \"licensingException\": null, \"serviceExpectation\": \"online\", \"hashFunction\": null, " + "\"hashValue\": null, \"PixelXDimension\": null, \"PixelYDimension\": null, \"taxonCoverage\": null, \"scientificName\": null, " + "\"identificationQualifier\": null, \"vernacularName\": null, \"nameAccordingTo\": null, \"scientificNameId\": null, " + "\"otherScientificName\": null, \"identifiedBy\": null, \"dateIdentified\": null, \"taxonCount\": null, \"subjectPart\": null, " + "\"sex\": null, \"lifeStage\": null, \"subjectOrientation\": null, \"preparations\": null, \"temporal\": null, " + "\"createDate\": \"2010-12-10\", \"timeOfDay\": null}], \"issues\": {\"issueList\": []}}"; Map<String, List<Map<String, String>>> ext = new HashMap<>(1); Map<String, String> audubon = new HashMap<>(16); audubon.put("http: audubon.put("http: audubon.put("http: audubon.put("http: audubon.put("http: audubon.put("http: audubon.put("http: audubon.put("http: audubon.put( "http: audubon.put("http: audubon.put("http: audubon.put("http: audubon.put("http: audubon.put( "http: "https: audubon.put( "http: "Carnegie Museum of Natural History Herps Collection (CM:Herps)"); audubon.put("http: ext.put("http: ExtendedRecord er = ExtendedRecord.newBuilder().setId("id").setExtensions(ext).build(); AudubonRecord ar = AudubonRecord.newBuilder().setId("id").build(); AudubonInterpreter.builder().create().interpret(er, ar); Assert.assertEquals(expected, ar.toString()); }
|
public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private AudubonInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private AudubonInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpret(ExtendedRecord er, AudubonRecord ar); }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private AudubonInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpret(ExtendedRecord er, AudubonRecord ar); }
|
@Test public void wrongFormatTest() { String expected = "{\"id\": \"id\", \"created\": null, \"audubonItems\": [{\"creator\": null, \"creatorUri\": null, " + "\"providerLiteral\": null, \"provider\": null, \"metadataCreatorLiteral\": null, \"metadataCreator\": null, " + "\"metadataProviderLiteral\": null, \"metadataProvider\": null, \"rights\": \"CC0 4.0\", \"rightsUri\": \"CC0 4.0\", " + "\"owner\": \"Naturalis Biodiversity Center\", \"usageTerms\": null, \"webStatement\": null, \"licenseLogoUrl\": null, " + "\"credit\": null, \"attributionLogoUrl\": null, \"attributionLinkUrl\": null, \"fundingAttribution\": null, " + "\"source\": null, \"sourceUri\": null, \"description\": null, \"caption\": \"ZMA.AVES.11080\", \"language\": null, " + "\"languageUri\": null, \"physicalSetting\": null, \"cvTerm\": null, \"subjectCategoryVocabulary\": null, \"tag\": null, " + "\"locationShown\": null, \"worldRegion\": null, \"countryCode\": null, \"countryName\": null, \"provinceState\": null, " + "\"city\": null, \"sublocation\": null, \"identifier\": \"http: + "\"type\": \"MovingImage\", \"typeUri\": null, \"subtypeLiteral\": null, \"subtype\": null, \"title\": null, \"modified\": null, " + "\"metadataDate\": null, \"metadataLanguageLiteral\": null, \"metadataLanguage\": null, \"providerManagedId\": null, " + "\"rating\": null, \"commenterLiteral\": null, \"commenter\": null, \"comments\": null, \"reviewerLiteral\": null, " + "\"reviewer\": null, \"reviewerComments\": null, \"available\": null, \"hasServiceAccessPoint\": null, " + "\"idOfContainingCollection\": null, \"relatedResourceId\": null, \"providerId\": null, \"derivedFrom\": null, " + "\"associatedSpecimenReference\": null, \"associatedObservationReference\": null, \"locationCreated\": null, " + "\"digitizationDate\": null, \"captureDevice\": null, \"resourceCreationTechnique\": null, \"accessUri\": " + "\"http: + "\"variantLiteral\": null, \"variant\": \"ac:GoodQuality\", \"variantDescription\": null, \"furtherInformationUrl\": null, " + "\"licensingException\": null, \"serviceExpectation\": null, \"hashFunction\": null, \"hashValue\": null, " + "\"PixelXDimension\": null, \"PixelYDimension\": null, \"taxonCoverage\": null, \"scientificName\": null, " + "\"identificationQualifier\": null, \"vernacularName\": null, \"nameAccordingTo\": null, \"scientificNameId\": null, " + "\"otherScientificName\": null, \"identifiedBy\": null, \"dateIdentified\": null, \"taxonCount\": null, " + "\"subjectPart\": null, \"sex\": null, \"lifeStage\": null, \"subjectOrientation\": null, \"preparations\": null, " + "\"temporal\": null, \"createDate\": null, \"timeOfDay\": null}], \"issues\": {\"issueList\": []}}"; Map<String, List<Map<String, String>>> ext = new HashMap<>(1); Map<String, String> audubon1 = new HashMap<>(8); audubon1.put("http: audubon1.put( "http: "http: audubon1.put("http: audubon1.put("http: audubon1.put("http: audubon1.put( "http: "http: audubon1.put("http: audubon1.put("http: ext.put("http: ExtendedRecord er = ExtendedRecord.newBuilder().setId("id").setExtensions(ext).build(); AudubonRecord ar = AudubonRecord.newBuilder().setId("id").build(); AudubonInterpreter.builder().create().interpret(er, ar); Assert.assertEquals(expected, ar.toString()); }
|
public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private AudubonInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private AudubonInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpret(ExtendedRecord er, AudubonRecord ar); }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private AudubonInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpret(ExtendedRecord er, AudubonRecord ar); }
|
@Test public void dateIssueTest() { String expected = "{\"id\": \"id\", \"created\": null, \"audubonItems\": [{\"creator\": null, \"creatorUri\": null, \"providerLiteral\": " + "null, \"provider\": null, \"metadataCreatorLiteral\": null, \"metadataCreator\": null, \"metadataProviderLiteral\": null, " + "\"metadataProvider\": null, \"rights\": null, \"rightsUri\": null, \"owner\": null, \"usageTerms\": null, " + "\"webStatement\": null, \"licenseLogoUrl\": null, \"credit\": null, \"attributionLogoUrl\": null, " + "\"attributionLinkUrl\": null, \"fundingAttribution\": null, \"source\": null, \"sourceUri\": null, \"description\": null, " + "\"caption\": null, \"language\": null, \"languageUri\": null, \"physicalSetting\": null, \"cvTerm\": null, " + "\"subjectCategoryVocabulary\": null, \"tag\": null, \"locationShown\": null, \"worldRegion\": null, \"countryCode\": null, " + "\"countryName\": null, \"provinceState\": null, \"city\": null, \"sublocation\": null, \"identifier\": null, \"type\": " + "\"StillImage\", \"typeUri\": null, \"subtypeLiteral\": null, \"subtype\": null, \"title\": null, \"modified\": null, " + "\"metadataDate\": null, \"metadataLanguageLiteral\": null, \"metadataLanguage\": null, \"providerManagedId\": null, " + "\"rating\": null, \"commenterLiteral\": null, \"commenter\": null, \"comments\": null, \"reviewerLiteral\": null, " + "\"reviewer\": null, \"reviewerComments\": null, \"available\": null, \"hasServiceAccessPoint\": null, " + "\"idOfContainingCollection\": null, \"relatedResourceId\": null, \"providerId\": null, \"derivedFrom\": null, " + "\"associatedSpecimenReference\": null, \"associatedObservationReference\": null, \"locationCreated\": null, " + "\"digitizationDate\": null, \"captureDevice\": null, \"resourceCreationTechnique\": null, \"accessUri\": null, " + "\"format\": null, \"formatUri\": null, \"variantLiteral\": null, \"variant\": null, \"variantDescription\": null, " + "\"furtherInformationUrl\": null, \"licensingException\": null, \"serviceExpectation\": null, \"hashFunction\": null, " + "\"hashValue\": null, \"PixelXDimension\": null, \"PixelYDimension\": null, \"taxonCoverage\": null, \"scientificName\": null, " + "\"identificationQualifier\": null, \"vernacularName\": null, \"nameAccordingTo\": null, \"scientificNameId\": null, " + "\"otherScientificName\": null, \"identifiedBy\": null, \"dateIdentified\": null, \"taxonCount\": null, \"subjectPart\": null, " + "\"sex\": null, \"lifeStage\": null, \"subjectOrientation\": null, \"preparations\": null, \"temporal\": null, " + "\"createDate\": null, \"timeOfDay\": null}], \"issues\": {\"issueList\": [\"MULTIMEDIA_DATE_INVALID\"]}}"; Map<String, List<Map<String, String>>> ext = new HashMap<>(1); Map<String, String> audubon = new HashMap<>(2); audubon.put("http: audubon.put("http: ext.put("http: ExtendedRecord er = ExtendedRecord.newBuilder().setId("id").setExtensions(ext).build(); AudubonRecord ar = AudubonRecord.newBuilder().setId("id").build(); AudubonInterpreter.builder().create().interpret(er, ar); Assert.assertEquals(expected, ar.toString()); }
|
public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private AudubonInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private AudubonInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpret(ExtendedRecord er, AudubonRecord ar); }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private AudubonInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpret(ExtendedRecord er, AudubonRecord ar); }
|
@Test public void swappedValuesTest() { String expected = "{\"id\": \"id\", \"created\": null, \"audubonItems\": [{\"creator\": null, \"creatorUri\": \"Jerome Fischer\", " + "\"providerLiteral\": null, \"provider\": null, \"metadataCreatorLiteral\": null, \"metadataCreator\": null," + " \"metadataProviderLiteral\": null, \"metadataProvider\": null, \"rights\": \"http: + "\"rightsUri\": \"http: + "\"webStatement\": null, \"licenseLogoUrl\": null, \"credit\": null, \"attributionLogoUrl\": null, \"attributionLinkUrl\": null," + " \"fundingAttribution\": null, \"source\": null, \"sourceUri\": null, \"description\": \"27 s\", \"caption\": null, \"language\": null, " + "\"languageUri\": null, \"physicalSetting\": null, \"cvTerm\": null, \"subjectCategoryVocabulary\": null, \"tag\": null, " + "\"locationShown\": null, \"worldRegion\": null, \"countryCode\": null, \"countryName\": null, \"provinceState\": null, " + "\"city\": null, \"sublocation\": null, \"identifier\": \"https: + "\"type\": \"Sound\", \"typeUri\": null, \"subtypeLiteral\": null, \"subtype\": null, \"title\": null, \"modified\": null, " + "\"metadataDate\": null, \"metadataLanguageLiteral\": null, \"metadataLanguage\": null, \"providerManagedId\": null, " + "\"rating\": null, \"commenterLiteral\": null, \"commenter\": null, \"comments\": null, \"reviewerLiteral\": null," + " \"reviewer\": null, \"reviewerComments\": null, \"available\": null, \"hasServiceAccessPoint\": null, \"idOfContainingCollection\": null, " + "\"relatedResourceId\": null, \"providerId\": null, \"derivedFrom\": null, \"associatedSpecimenReference\": null, " + "\"associatedObservationReference\": null, \"locationCreated\": null, \"digitizationDate\": null, \"captureDevice\": null, " + "\"resourceCreationTechnique\": \"bitrate: 320000 bps; bitrate mode: cbr; audio sampling rate: 44100 Hz; number of channels: 2; lossy\", " + "\"accessUri\": \"https: + "\"format\": \"audio/mpeg\", \"formatUri\": null, \"variantLiteral\": \"ac:BestQuality\", \"variant\": null, \"variantDescription\": null, " + "\"furtherInformationUrl\": null, \"licensingException\": null, \"serviceExpectation\": null, \"hashFunction\": null, \"hashValue\": null, " + "\"PixelXDimension\": null, \"PixelYDimension\": null, \"taxonCoverage\": null, \"scientificName\": null, \"identificationQualifier\": null, " + "\"vernacularName\": null, \"nameAccordingTo\": null, \"scientificNameId\": null, \"otherScientificName\": null, \"identifiedBy\": null, " + "\"dateIdentified\": null, \"taxonCount\": null, \"subjectPart\": null, \"sex\": null, \"lifeStage\": null, \"subjectOrientation\": null, " + "\"preparations\": null, \"temporal\": null, \"createDate\": null, \"timeOfDay\": null}, {\"creator\": null, \"creatorUri\": " + "\"Stichting Xeno-canto voor Natuurgeluiden\", \"providerLiteral\": null, \"provider\": null, \"metadataCreatorLiteral\": null, " + "\"metadataCreator\": null, \"metadataProviderLiteral\": null, \"metadataProvider\": null, \"rights\": \"http: + "\"rightsUri\": \"http: + "\"usageTerms\": null, \"webStatement\": null, \"licenseLogoUrl\": null, \"credit\": null, \"attributionLogoUrl\": null, " + "\"attributionLinkUrl\": null, \"fundingAttribution\": null, \"source\": null, \"sourceUri\": null, \"description\": null, " + "\"caption\": \"Sonogram of the first ten seconds of the sound recording\", \"language\": null, \"languageUri\": null, " + "\"physicalSetting\": null, \"cvTerm\": null, \"subjectCategoryVocabulary\": null, \"tag\": null, \"locationShown\": null, " + "\"worldRegion\": null, \"countryCode\": null, \"countryName\": null, \"provinceState\": null, \"city\": null, \"sublocation\": null, " + "\"identifier\": \"https: + "\"subtypeLiteral\": null, \"subtype\": null, \"title\": null, \"modified\": null, \"metadataDate\": null, " + "\"metadataLanguageLiteral\": null, \"metadataLanguage\": null, \"providerManagedId\": null, \"rating\": null, \"commenterLiteral\": null, " + "\"commenter\": null, \"comments\": null, \"reviewerLiteral\": null, \"reviewer\": null, \"reviewerComments\": null, \"available\": null, " + "\"hasServiceAccessPoint\": null, \"idOfContainingCollection\": null, \"relatedResourceId\": null, \"providerId\": null," + " \"derivedFrom\": null, \"associatedSpecimenReference\": null, \"associatedObservationReference\": null, \"locationCreated\": null, " + "\"digitizationDate\": null, \"captureDevice\": null, \"resourceCreationTechnique\": null, \"accessUri\": " + "\"https: + "\"variantLiteral\": \"ac:MediumQuality\", \"variant\": null, \"variantDescription\": null, \"furtherInformationUrl\": null, " + "\"licensingException\": null, \"serviceExpectation\": null, \"hashFunction\": null, \"hashValue\": null, \"PixelXDimension\": null, " + "\"PixelYDimension\": null, \"taxonCoverage\": null, \"scientificName\": null, \"identificationQualifier\": null, \"vernacularName\": null, " + "\"nameAccordingTo\": null, \"scientificNameId\": null, \"otherScientificName\": null, \"identifiedBy\": null, \"dateIdentified\": null, " + "\"taxonCount\": null, \"subjectPart\": null, \"sex\": null, \"lifeStage\": null, \"subjectOrientation\": null, \"preparations\": null," + " \"temporal\": null, \"createDate\": null, \"timeOfDay\": null}], \"issues\": {\"issueList\": []}}"; Map<String, List<Map<String, String>>> ext = new HashMap<>(2); Map<String, String> audubon1 = new HashMap<>(10); audubon1.put("http: audubon1.put("http: audubon1.put( "http: "https: audubon1.put("http: audubon1.put( "http: "bitrate: 320000 bps; bitrate mode: cbr; audio sampling rate: 44100 Hz; number of channels: 2; lossy"); audubon1.put("http: audubon1.put( "http: "https: audubon1.put("http: audubon1.put("http: audubon1.put("http: Map<String, String> audubon2 = new HashMap<>(9); audubon2.put("http: audubon2.put("http: audubon2.put( "http: "Sonogram of the first ten seconds of the sound recording"); audubon2.put( "http: "https: audubon2.put("http: audubon2.put( "http: "https: audubon2.put("http: audubon2.put( "http: audubon2.put("http: ext.put("http: ExtendedRecord er = ExtendedRecord.newBuilder().setId("id").setExtensions(ext).build(); AudubonRecord ar = AudubonRecord.newBuilder().setId("id").build(); AudubonInterpreter.builder().create().interpret(er, ar); Assert.assertEquals(expected, ar.toString()); }
|
public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private AudubonInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private AudubonInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpret(ExtendedRecord er, AudubonRecord ar); }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private AudubonInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpret(ExtendedRecord er, AudubonRecord ar); }
|
@Test(expected = IllegalArgumentException.class) public void swapIndexInAliasEmptyIndexTest() { EsIndex.swapIndexInAliases(EsConfig.from(DUMMY_HOST), Collections.singleton("alias"), ""); thrown.expectMessage("index is required"); }
|
public static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index) { Preconditions.checkArgument(aliases != null && !aliases.isEmpty(), "alias is required"); Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); swapIndexInAliases( config, aliases, index, Collections.emptySet(), Searching.getDefaultSearchSettings()); }
|
EsIndex { public static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index) { Preconditions.checkArgument(aliases != null && !aliases.isEmpty(), "alias is required"); Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); swapIndexInAliases( config, aliases, index, Collections.emptySet(), Searching.getDefaultSearchSettings()); } }
|
EsIndex { public static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index) { Preconditions.checkArgument(aliases != null && !aliases.isEmpty(), "alias is required"); Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); swapIndexInAliases( config, aliases, index, Collections.emptySet(), Searching.getDefaultSearchSettings()); } }
|
EsIndex { public static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index) { Preconditions.checkArgument(aliases != null && !aliases.isEmpty(), "alias is required"); Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); swapIndexInAliases( config, aliases, index, Collections.emptySet(), Searching.getDefaultSearchSettings()); } static String createIndex(EsConfig config, IndexParams indexParams); static Optional<String> createIndexIfNotExists(EsConfig config, IndexParams indexParams); static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index); static void swapIndexInAliases(
EsConfig config,
Set<String> aliases,
String index,
Set<String> extraIdxToRemove,
Map<String, String> settings); static long countDocuments(EsConfig config, String index); static Set<String> deleteRecordsByDatasetId(
EsConfig config,
String[] aliases,
String datasetKey,
Predicate<String> indexesToDelete,
int timeoutSec,
int attempts); static Set<String> findDatasetIndexesInAliases(
EsConfig config, String[] aliases, String datasetKey); }
|
EsIndex { public static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index) { Preconditions.checkArgument(aliases != null && !aliases.isEmpty(), "alias is required"); Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); swapIndexInAliases( config, aliases, index, Collections.emptySet(), Searching.getDefaultSearchSettings()); } static String createIndex(EsConfig config, IndexParams indexParams); static Optional<String> createIndexIfNotExists(EsConfig config, IndexParams indexParams); static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index); static void swapIndexInAliases(
EsConfig config,
Set<String> aliases,
String index,
Set<String> extraIdxToRemove,
Map<String, String> settings); static long countDocuments(EsConfig config, String index); static Set<String> deleteRecordsByDatasetId(
EsConfig config,
String[] aliases,
String datasetKey,
Predicate<String> indexesToDelete,
int timeoutSec,
int attempts); static Set<String> findDatasetIndexesInAliases(
EsConfig config, String[] aliases, String datasetKey); }
|
@Test public void licensePriorityTest() { Map<String, List<Map<String, String>>> ext = new HashMap<>(1); Map<String, String> audubon1 = new HashMap<>(2); audubon1.put( "http: "https: audubon1.put( "http: "http: ext.put("http: ExtendedRecord er = ExtendedRecord.newBuilder().setId("id").setExtensions(ext).build(); AudubonRecord ar = AudubonRecord.newBuilder().setId("id").build(); AudubonRecord expected = AudubonRecord.newBuilder() .setId("id") .setAudubonItems( Collections.singletonList( Audubon.newBuilder() .setRights("http: .setRightsUri("http: .build())) .build(); AudubonInterpreter.builder().create().interpret(er, ar); Assert.assertEquals(expected, ar); }
|
public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private AudubonInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private AudubonInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpret(ExtendedRecord er, AudubonRecord ar); }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private AudubonInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpret(ExtendedRecord er, AudubonRecord ar); }
|
@Test public void licenseTest() { Map<String, List<Map<String, String>>> ext = new HashMap<>(1); Map<String, String> audubon1 = new HashMap<>(1); audubon1.put( "http: "http: ext.put("http: ExtendedRecord er = ExtendedRecord.newBuilder().setId("id").setExtensions(ext).build(); AudubonRecord ar = AudubonRecord.newBuilder().setId("id").build(); AudubonRecord expected = AudubonRecord.newBuilder() .setId("id") .setAudubonItems( Collections.singletonList( Audubon.newBuilder() .setRights("http: .setRightsUri("http: .build())) .build(); AudubonInterpreter.builder().create().interpret(er, ar); Assert.assertEquals(expected, ar); }
|
public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private AudubonInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private AudubonInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpret(ExtendedRecord er, AudubonRecord ar); }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private AudubonInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpret(ExtendedRecord er, AudubonRecord ar); }
|
@Test public void licenseUriTest() { Map<String, List<Map<String, String>>> ext = new HashMap<>(1); Map<String, String> audubon1 = new HashMap<>(1); audubon1.put( "http: "https: ext.put("http: ExtendedRecord er = ExtendedRecord.newBuilder().setId("id").setExtensions(ext).build(); AudubonRecord ar = AudubonRecord.newBuilder().setId("id").build(); AudubonRecord expected = AudubonRecord.newBuilder() .setId("id") .setAudubonItems( Collections.singletonList( Audubon.newBuilder() .setRights("http: .setRightsUri("http: .build())) .build(); AudubonInterpreter.builder().create().interpret(er, ar); Assert.assertEquals(expected, ar); }
|
public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private AudubonInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private AudubonInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpret(ExtendedRecord er, AudubonRecord ar); }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private AudubonInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpret(ExtendedRecord er, AudubonRecord ar); }
|
@Test public void accessUriTest() { Map<String, List<Map<String, String>>> ext = new HashMap<>(1); Map<String, String> audubon1 = new HashMap<>(4); audubon1.put( "http: "https: audubon1.put( "http: "https: audubon1.put( "http: "https: audubon1.put("http: ext.put("http: ExtendedRecord er = ExtendedRecord.newBuilder().setId("id").setExtensions(ext).build(); AudubonRecord ar = AudubonRecord.newBuilder().setId("id").build(); AudubonRecord expected = AudubonRecord.newBuilder() .setId("id") .setAudubonItems( Collections.singletonList( Audubon.newBuilder() .setRights("http: .setRightsUri("http: .setAccessUri( "https: .setIdentifier( "https: .setFormat("image/jpeg") .setType("StillImage") .setMetadataDate("2019-07-12 06:30:57.0") .build())) .build(); AudubonInterpreter.builder().create().interpret(er, ar); Assert.assertEquals(expected, ar); }
|
public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private AudubonInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private AudubonInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpret(ExtendedRecord er, AudubonRecord ar); }
|
AudubonInterpreter { public void interpret(ExtendedRecord er, AudubonRecord ar) { Objects.requireNonNull(er); Objects.requireNonNull(ar); Result<Audubon> result = handler.convert(er); ar.setAudubonItems(result.getList()); ar.getIssues().setIssueList(result.getIssuesAsList()); } @Builder(buildMethodName = "create") private AudubonInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpret(ExtendedRecord er, AudubonRecord ar); }
|
@Test public void interpretIndividaulCountTest() { Map<String, String> coreMap = new HashMap<>(); coreMap.put(DwcTerm.individualCount.qualifiedName(), "2"); ExtendedRecord er = ExtendedRecord.newBuilder().setId(ID).setCoreTerms(coreMap).build(); BasicRecord br = BasicRecord.newBuilder().setId(ID).build(); BasicInterpreter.interpretIndividualCount(er, br); Assert.assertEquals(Integer.valueOf(2), br.getIndividualCount()); }
|
public static void interpretIndividualCount(ExtendedRecord er, BasicRecord br) { Consumer<Optional<Integer>> fn = parseResult -> { if (parseResult.isPresent()) { br.setIndividualCount(parseResult.get()); } else { addIssue(br, INDIVIDUAL_COUNT_INVALID); } }; SimpleTypeParser.parsePositiveInt(er, DwcTerm.individualCount, fn); }
|
BasicInterpreter { public static void interpretIndividualCount(ExtendedRecord er, BasicRecord br) { Consumer<Optional<Integer>> fn = parseResult -> { if (parseResult.isPresent()) { br.setIndividualCount(parseResult.get()); } else { addIssue(br, INDIVIDUAL_COUNT_INVALID); } }; SimpleTypeParser.parsePositiveInt(er, DwcTerm.individualCount, fn); } }
|
BasicInterpreter { public static void interpretIndividualCount(ExtendedRecord er, BasicRecord br) { Consumer<Optional<Integer>> fn = parseResult -> { if (parseResult.isPresent()) { br.setIndividualCount(parseResult.get()); } else { addIssue(br, INDIVIDUAL_COUNT_INVALID); } }; SimpleTypeParser.parsePositiveInt(er, DwcTerm.individualCount, fn); } }
|
BasicInterpreter { public static void interpretIndividualCount(ExtendedRecord er, BasicRecord br) { Consumer<Optional<Integer>> fn = parseResult -> { if (parseResult.isPresent()) { br.setIndividualCount(parseResult.get()); } else { addIssue(br, INDIVIDUAL_COUNT_INVALID); } }; SimpleTypeParser.parsePositiveInt(er, DwcTerm.individualCount, fn); } static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService,
boolean isTripletValid,
boolean isOccurrenceIdValid,
boolean useExtendedRecordId,
BiConsumer<ExtendedRecord, BasicRecord> gbifIdFn); static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService, boolean isTripletValid, boolean isOccurrenceIdValid); static BiConsumer<ExtendedRecord, BasicRecord> interpretCopyGbifId(); static void interpretIndividualCount(ExtendedRecord er, BasicRecord br); static void interpretTypeStatus(ExtendedRecord er, BasicRecord br); static void interpretLifeStage(ExtendedRecord er, BasicRecord br); static void interpretEstablishmentMeans(ExtendedRecord er, BasicRecord br); static void interpretSex(ExtendedRecord er, BasicRecord br); static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br); static void interpretReferences(ExtendedRecord er, BasicRecord br); static void interpretTypifiedName(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br); static void interpretRelativeOrganismQuantity(BasicRecord br); static void interpretLicense(ExtendedRecord er, BasicRecord br); static void interpretIdentifiedByIds(ExtendedRecord er, BasicRecord br); static void interpretRecordedByIds(ExtendedRecord er, BasicRecord br); static BiConsumer<ExtendedRecord, BasicRecord> interpretOccurrenceStatus(
KeyValueStore<String, OccurrenceStatus> occStatusKvStore); }
|
BasicInterpreter { public static void interpretIndividualCount(ExtendedRecord er, BasicRecord br) { Consumer<Optional<Integer>> fn = parseResult -> { if (parseResult.isPresent()) { br.setIndividualCount(parseResult.get()); } else { addIssue(br, INDIVIDUAL_COUNT_INVALID); } }; SimpleTypeParser.parsePositiveInt(er, DwcTerm.individualCount, fn); } static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService,
boolean isTripletValid,
boolean isOccurrenceIdValid,
boolean useExtendedRecordId,
BiConsumer<ExtendedRecord, BasicRecord> gbifIdFn); static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService, boolean isTripletValid, boolean isOccurrenceIdValid); static BiConsumer<ExtendedRecord, BasicRecord> interpretCopyGbifId(); static void interpretIndividualCount(ExtendedRecord er, BasicRecord br); static void interpretTypeStatus(ExtendedRecord er, BasicRecord br); static void interpretLifeStage(ExtendedRecord er, BasicRecord br); static void interpretEstablishmentMeans(ExtendedRecord er, BasicRecord br); static void interpretSex(ExtendedRecord er, BasicRecord br); static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br); static void interpretReferences(ExtendedRecord er, BasicRecord br); static void interpretTypifiedName(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br); static void interpretRelativeOrganismQuantity(BasicRecord br); static void interpretLicense(ExtendedRecord er, BasicRecord br); static void interpretIdentifiedByIds(ExtendedRecord er, BasicRecord br); static void interpretRecordedByIds(ExtendedRecord er, BasicRecord br); static BiConsumer<ExtendedRecord, BasicRecord> interpretOccurrenceStatus(
KeyValueStore<String, OccurrenceStatus> occStatusKvStore); static final String GBIF_ID_INVALID; }
|
@Test public void interpretIndividaulCountNegativedTest() { Map<String, String> coreMap = new HashMap<>(); coreMap.put(DwcTerm.individualCount.qualifiedName(), "-2"); ExtendedRecord er = ExtendedRecord.newBuilder().setId(ID).setCoreTerms(coreMap).build(); BasicRecord br = BasicRecord.newBuilder().setId(ID).build(); BasicInterpreter.interpretIndividualCount(er, br); Assert.assertNull(br.getIndividualCount()); Assert.assertTrue( br.getIssues().getIssueList().contains(OccurrenceIssue.INDIVIDUAL_COUNT_INVALID.name())); }
|
public static void interpretIndividualCount(ExtendedRecord er, BasicRecord br) { Consumer<Optional<Integer>> fn = parseResult -> { if (parseResult.isPresent()) { br.setIndividualCount(parseResult.get()); } else { addIssue(br, INDIVIDUAL_COUNT_INVALID); } }; SimpleTypeParser.parsePositiveInt(er, DwcTerm.individualCount, fn); }
|
BasicInterpreter { public static void interpretIndividualCount(ExtendedRecord er, BasicRecord br) { Consumer<Optional<Integer>> fn = parseResult -> { if (parseResult.isPresent()) { br.setIndividualCount(parseResult.get()); } else { addIssue(br, INDIVIDUAL_COUNT_INVALID); } }; SimpleTypeParser.parsePositiveInt(er, DwcTerm.individualCount, fn); } }
|
BasicInterpreter { public static void interpretIndividualCount(ExtendedRecord er, BasicRecord br) { Consumer<Optional<Integer>> fn = parseResult -> { if (parseResult.isPresent()) { br.setIndividualCount(parseResult.get()); } else { addIssue(br, INDIVIDUAL_COUNT_INVALID); } }; SimpleTypeParser.parsePositiveInt(er, DwcTerm.individualCount, fn); } }
|
BasicInterpreter { public static void interpretIndividualCount(ExtendedRecord er, BasicRecord br) { Consumer<Optional<Integer>> fn = parseResult -> { if (parseResult.isPresent()) { br.setIndividualCount(parseResult.get()); } else { addIssue(br, INDIVIDUAL_COUNT_INVALID); } }; SimpleTypeParser.parsePositiveInt(er, DwcTerm.individualCount, fn); } static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService,
boolean isTripletValid,
boolean isOccurrenceIdValid,
boolean useExtendedRecordId,
BiConsumer<ExtendedRecord, BasicRecord> gbifIdFn); static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService, boolean isTripletValid, boolean isOccurrenceIdValid); static BiConsumer<ExtendedRecord, BasicRecord> interpretCopyGbifId(); static void interpretIndividualCount(ExtendedRecord er, BasicRecord br); static void interpretTypeStatus(ExtendedRecord er, BasicRecord br); static void interpretLifeStage(ExtendedRecord er, BasicRecord br); static void interpretEstablishmentMeans(ExtendedRecord er, BasicRecord br); static void interpretSex(ExtendedRecord er, BasicRecord br); static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br); static void interpretReferences(ExtendedRecord er, BasicRecord br); static void interpretTypifiedName(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br); static void interpretRelativeOrganismQuantity(BasicRecord br); static void interpretLicense(ExtendedRecord er, BasicRecord br); static void interpretIdentifiedByIds(ExtendedRecord er, BasicRecord br); static void interpretRecordedByIds(ExtendedRecord er, BasicRecord br); static BiConsumer<ExtendedRecord, BasicRecord> interpretOccurrenceStatus(
KeyValueStore<String, OccurrenceStatus> occStatusKvStore); }
|
BasicInterpreter { public static void interpretIndividualCount(ExtendedRecord er, BasicRecord br) { Consumer<Optional<Integer>> fn = parseResult -> { if (parseResult.isPresent()) { br.setIndividualCount(parseResult.get()); } else { addIssue(br, INDIVIDUAL_COUNT_INVALID); } }; SimpleTypeParser.parsePositiveInt(er, DwcTerm.individualCount, fn); } static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService,
boolean isTripletValid,
boolean isOccurrenceIdValid,
boolean useExtendedRecordId,
BiConsumer<ExtendedRecord, BasicRecord> gbifIdFn); static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService, boolean isTripletValid, boolean isOccurrenceIdValid); static BiConsumer<ExtendedRecord, BasicRecord> interpretCopyGbifId(); static void interpretIndividualCount(ExtendedRecord er, BasicRecord br); static void interpretTypeStatus(ExtendedRecord er, BasicRecord br); static void interpretLifeStage(ExtendedRecord er, BasicRecord br); static void interpretEstablishmentMeans(ExtendedRecord er, BasicRecord br); static void interpretSex(ExtendedRecord er, BasicRecord br); static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br); static void interpretReferences(ExtendedRecord er, BasicRecord br); static void interpretTypifiedName(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br); static void interpretRelativeOrganismQuantity(BasicRecord br); static void interpretLicense(ExtendedRecord er, BasicRecord br); static void interpretIdentifiedByIds(ExtendedRecord er, BasicRecord br); static void interpretRecordedByIds(ExtendedRecord er, BasicRecord br); static BiConsumer<ExtendedRecord, BasicRecord> interpretOccurrenceStatus(
KeyValueStore<String, OccurrenceStatus> occStatusKvStore); static final String GBIF_ID_INVALID; }
|
@Test public void interpretIndividaulCountInvalidTest() { Map<String, String> coreMap = new HashMap<>(); coreMap.put(DwcTerm.individualCount.qualifiedName(), "2.666666667"); ExtendedRecord er = ExtendedRecord.newBuilder().setId(ID).setCoreTerms(coreMap).build(); BasicRecord br = BasicRecord.newBuilder().setId(ID).build(); BasicInterpreter.interpretIndividualCount(er, br); Assert.assertNull(br.getIndividualCount()); Assert.assertTrue( br.getIssues().getIssueList().contains(OccurrenceIssue.INDIVIDUAL_COUNT_INVALID.name())); }
|
public static void interpretIndividualCount(ExtendedRecord er, BasicRecord br) { Consumer<Optional<Integer>> fn = parseResult -> { if (parseResult.isPresent()) { br.setIndividualCount(parseResult.get()); } else { addIssue(br, INDIVIDUAL_COUNT_INVALID); } }; SimpleTypeParser.parsePositiveInt(er, DwcTerm.individualCount, fn); }
|
BasicInterpreter { public static void interpretIndividualCount(ExtendedRecord er, BasicRecord br) { Consumer<Optional<Integer>> fn = parseResult -> { if (parseResult.isPresent()) { br.setIndividualCount(parseResult.get()); } else { addIssue(br, INDIVIDUAL_COUNT_INVALID); } }; SimpleTypeParser.parsePositiveInt(er, DwcTerm.individualCount, fn); } }
|
BasicInterpreter { public static void interpretIndividualCount(ExtendedRecord er, BasicRecord br) { Consumer<Optional<Integer>> fn = parseResult -> { if (parseResult.isPresent()) { br.setIndividualCount(parseResult.get()); } else { addIssue(br, INDIVIDUAL_COUNT_INVALID); } }; SimpleTypeParser.parsePositiveInt(er, DwcTerm.individualCount, fn); } }
|
BasicInterpreter { public static void interpretIndividualCount(ExtendedRecord er, BasicRecord br) { Consumer<Optional<Integer>> fn = parseResult -> { if (parseResult.isPresent()) { br.setIndividualCount(parseResult.get()); } else { addIssue(br, INDIVIDUAL_COUNT_INVALID); } }; SimpleTypeParser.parsePositiveInt(er, DwcTerm.individualCount, fn); } static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService,
boolean isTripletValid,
boolean isOccurrenceIdValid,
boolean useExtendedRecordId,
BiConsumer<ExtendedRecord, BasicRecord> gbifIdFn); static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService, boolean isTripletValid, boolean isOccurrenceIdValid); static BiConsumer<ExtendedRecord, BasicRecord> interpretCopyGbifId(); static void interpretIndividualCount(ExtendedRecord er, BasicRecord br); static void interpretTypeStatus(ExtendedRecord er, BasicRecord br); static void interpretLifeStage(ExtendedRecord er, BasicRecord br); static void interpretEstablishmentMeans(ExtendedRecord er, BasicRecord br); static void interpretSex(ExtendedRecord er, BasicRecord br); static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br); static void interpretReferences(ExtendedRecord er, BasicRecord br); static void interpretTypifiedName(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br); static void interpretRelativeOrganismQuantity(BasicRecord br); static void interpretLicense(ExtendedRecord er, BasicRecord br); static void interpretIdentifiedByIds(ExtendedRecord er, BasicRecord br); static void interpretRecordedByIds(ExtendedRecord er, BasicRecord br); static BiConsumer<ExtendedRecord, BasicRecord> interpretOccurrenceStatus(
KeyValueStore<String, OccurrenceStatus> occStatusKvStore); }
|
BasicInterpreter { public static void interpretIndividualCount(ExtendedRecord er, BasicRecord br) { Consumer<Optional<Integer>> fn = parseResult -> { if (parseResult.isPresent()) { br.setIndividualCount(parseResult.get()); } else { addIssue(br, INDIVIDUAL_COUNT_INVALID); } }; SimpleTypeParser.parsePositiveInt(er, DwcTerm.individualCount, fn); } static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService,
boolean isTripletValid,
boolean isOccurrenceIdValid,
boolean useExtendedRecordId,
BiConsumer<ExtendedRecord, BasicRecord> gbifIdFn); static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService, boolean isTripletValid, boolean isOccurrenceIdValid); static BiConsumer<ExtendedRecord, BasicRecord> interpretCopyGbifId(); static void interpretIndividualCount(ExtendedRecord er, BasicRecord br); static void interpretTypeStatus(ExtendedRecord er, BasicRecord br); static void interpretLifeStage(ExtendedRecord er, BasicRecord br); static void interpretEstablishmentMeans(ExtendedRecord er, BasicRecord br); static void interpretSex(ExtendedRecord er, BasicRecord br); static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br); static void interpretReferences(ExtendedRecord er, BasicRecord br); static void interpretTypifiedName(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br); static void interpretRelativeOrganismQuantity(BasicRecord br); static void interpretLicense(ExtendedRecord er, BasicRecord br); static void interpretIdentifiedByIds(ExtendedRecord er, BasicRecord br); static void interpretRecordedByIds(ExtendedRecord er, BasicRecord br); static BiConsumer<ExtendedRecord, BasicRecord> interpretOccurrenceStatus(
KeyValueStore<String, OccurrenceStatus> occStatusKvStore); static final String GBIF_ID_INVALID; }
|
@Test public void interpretSampleSizeValueTest() { Map<String, String> coreMap = new HashMap<>(); coreMap.put(DwcTerm.sampleSizeValue.qualifiedName(), " value "); ExtendedRecord er = ExtendedRecord.newBuilder().setId(ID).setCoreTerms(coreMap).build(); BasicRecord br = BasicRecord.newBuilder().setId(ID).build(); BasicInterpreter.interpretSampleSizeValue(er, br); Assert.assertNull(br.getSampleSizeValue()); }
|
public static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br) { extractOptValue(er, DwcTerm.sampleSizeValue) .map(String::trim) .map(NumberParser::parseDouble) .filter(x -> !x.isInfinite() && !x.isNaN()) .ifPresent(br::setSampleSizeValue); }
|
BasicInterpreter { public static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br) { extractOptValue(er, DwcTerm.sampleSizeValue) .map(String::trim) .map(NumberParser::parseDouble) .filter(x -> !x.isInfinite() && !x.isNaN()) .ifPresent(br::setSampleSizeValue); } }
|
BasicInterpreter { public static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br) { extractOptValue(er, DwcTerm.sampleSizeValue) .map(String::trim) .map(NumberParser::parseDouble) .filter(x -> !x.isInfinite() && !x.isNaN()) .ifPresent(br::setSampleSizeValue); } }
|
BasicInterpreter { public static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br) { extractOptValue(er, DwcTerm.sampleSizeValue) .map(String::trim) .map(NumberParser::parseDouble) .filter(x -> !x.isInfinite() && !x.isNaN()) .ifPresent(br::setSampleSizeValue); } static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService,
boolean isTripletValid,
boolean isOccurrenceIdValid,
boolean useExtendedRecordId,
BiConsumer<ExtendedRecord, BasicRecord> gbifIdFn); static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService, boolean isTripletValid, boolean isOccurrenceIdValid); static BiConsumer<ExtendedRecord, BasicRecord> interpretCopyGbifId(); static void interpretIndividualCount(ExtendedRecord er, BasicRecord br); static void interpretTypeStatus(ExtendedRecord er, BasicRecord br); static void interpretLifeStage(ExtendedRecord er, BasicRecord br); static void interpretEstablishmentMeans(ExtendedRecord er, BasicRecord br); static void interpretSex(ExtendedRecord er, BasicRecord br); static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br); static void interpretReferences(ExtendedRecord er, BasicRecord br); static void interpretTypifiedName(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br); static void interpretRelativeOrganismQuantity(BasicRecord br); static void interpretLicense(ExtendedRecord er, BasicRecord br); static void interpretIdentifiedByIds(ExtendedRecord er, BasicRecord br); static void interpretRecordedByIds(ExtendedRecord er, BasicRecord br); static BiConsumer<ExtendedRecord, BasicRecord> interpretOccurrenceStatus(
KeyValueStore<String, OccurrenceStatus> occStatusKvStore); }
|
BasicInterpreter { public static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br) { extractOptValue(er, DwcTerm.sampleSizeValue) .map(String::trim) .map(NumberParser::parseDouble) .filter(x -> !x.isInfinite() && !x.isNaN()) .ifPresent(br::setSampleSizeValue); } static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService,
boolean isTripletValid,
boolean isOccurrenceIdValid,
boolean useExtendedRecordId,
BiConsumer<ExtendedRecord, BasicRecord> gbifIdFn); static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService, boolean isTripletValid, boolean isOccurrenceIdValid); static BiConsumer<ExtendedRecord, BasicRecord> interpretCopyGbifId(); static void interpretIndividualCount(ExtendedRecord er, BasicRecord br); static void interpretTypeStatus(ExtendedRecord er, BasicRecord br); static void interpretLifeStage(ExtendedRecord er, BasicRecord br); static void interpretEstablishmentMeans(ExtendedRecord er, BasicRecord br); static void interpretSex(ExtendedRecord er, BasicRecord br); static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br); static void interpretReferences(ExtendedRecord er, BasicRecord br); static void interpretTypifiedName(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br); static void interpretRelativeOrganismQuantity(BasicRecord br); static void interpretLicense(ExtendedRecord er, BasicRecord br); static void interpretIdentifiedByIds(ExtendedRecord er, BasicRecord br); static void interpretRecordedByIds(ExtendedRecord er, BasicRecord br); static BiConsumer<ExtendedRecord, BasicRecord> interpretOccurrenceStatus(
KeyValueStore<String, OccurrenceStatus> occStatusKvStore); static final String GBIF_ID_INVALID; }
|
@Test public void interpretSampleSizeUnitTest() { Map<String, String> coreMap = new HashMap<>(); coreMap.put(DwcTerm.sampleSizeUnit.qualifiedName(), " value "); ExtendedRecord er = ExtendedRecord.newBuilder().setId(ID).setCoreTerms(coreMap).build(); BasicRecord br = BasicRecord.newBuilder().setId(ID).build(); BasicInterpreter.interpretSampleSizeUnit(er, br); Assert.assertEquals("value", br.getSampleSizeUnit()); }
|
public static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br) { extractOptValue(er, DwcTerm.sampleSizeUnit).map(String::trim).ifPresent(br::setSampleSizeUnit); }
|
BasicInterpreter { public static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br) { extractOptValue(er, DwcTerm.sampleSizeUnit).map(String::trim).ifPresent(br::setSampleSizeUnit); } }
|
BasicInterpreter { public static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br) { extractOptValue(er, DwcTerm.sampleSizeUnit).map(String::trim).ifPresent(br::setSampleSizeUnit); } }
|
BasicInterpreter { public static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br) { extractOptValue(er, DwcTerm.sampleSizeUnit).map(String::trim).ifPresent(br::setSampleSizeUnit); } static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService,
boolean isTripletValid,
boolean isOccurrenceIdValid,
boolean useExtendedRecordId,
BiConsumer<ExtendedRecord, BasicRecord> gbifIdFn); static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService, boolean isTripletValid, boolean isOccurrenceIdValid); static BiConsumer<ExtendedRecord, BasicRecord> interpretCopyGbifId(); static void interpretIndividualCount(ExtendedRecord er, BasicRecord br); static void interpretTypeStatus(ExtendedRecord er, BasicRecord br); static void interpretLifeStage(ExtendedRecord er, BasicRecord br); static void interpretEstablishmentMeans(ExtendedRecord er, BasicRecord br); static void interpretSex(ExtendedRecord er, BasicRecord br); static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br); static void interpretReferences(ExtendedRecord er, BasicRecord br); static void interpretTypifiedName(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br); static void interpretRelativeOrganismQuantity(BasicRecord br); static void interpretLicense(ExtendedRecord er, BasicRecord br); static void interpretIdentifiedByIds(ExtendedRecord er, BasicRecord br); static void interpretRecordedByIds(ExtendedRecord er, BasicRecord br); static BiConsumer<ExtendedRecord, BasicRecord> interpretOccurrenceStatus(
KeyValueStore<String, OccurrenceStatus> occStatusKvStore); }
|
BasicInterpreter { public static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br) { extractOptValue(er, DwcTerm.sampleSizeUnit).map(String::trim).ifPresent(br::setSampleSizeUnit); } static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService,
boolean isTripletValid,
boolean isOccurrenceIdValid,
boolean useExtendedRecordId,
BiConsumer<ExtendedRecord, BasicRecord> gbifIdFn); static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService, boolean isTripletValid, boolean isOccurrenceIdValid); static BiConsumer<ExtendedRecord, BasicRecord> interpretCopyGbifId(); static void interpretIndividualCount(ExtendedRecord er, BasicRecord br); static void interpretTypeStatus(ExtendedRecord er, BasicRecord br); static void interpretLifeStage(ExtendedRecord er, BasicRecord br); static void interpretEstablishmentMeans(ExtendedRecord er, BasicRecord br); static void interpretSex(ExtendedRecord er, BasicRecord br); static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br); static void interpretReferences(ExtendedRecord er, BasicRecord br); static void interpretTypifiedName(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br); static void interpretRelativeOrganismQuantity(BasicRecord br); static void interpretLicense(ExtendedRecord er, BasicRecord br); static void interpretIdentifiedByIds(ExtendedRecord er, BasicRecord br); static void interpretRecordedByIds(ExtendedRecord er, BasicRecord br); static BiConsumer<ExtendedRecord, BasicRecord> interpretOccurrenceStatus(
KeyValueStore<String, OccurrenceStatus> occStatusKvStore); static final String GBIF_ID_INVALID; }
|
@Test public void interpretOrganismQuantityTest() { Map<String, String> coreMap = new HashMap<>(); coreMap.put(DwcTerm.organismQuantity.qualifiedName(), " value "); ExtendedRecord er = ExtendedRecord.newBuilder().setId(ID).setCoreTerms(coreMap).build(); BasicRecord br = BasicRecord.newBuilder().setId(ID).build(); BasicInterpreter.interpretOrganismQuantity(er, br); Assert.assertNull(br.getOrganismQuantity()); }
|
public static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br) { extractOptValue(er, DwcTerm.organismQuantity) .map(String::trim) .map(NumberParser::parseDouble) .filter(x -> !x.isInfinite() && !x.isNaN()) .ifPresent(br::setOrganismQuantity); }
|
BasicInterpreter { public static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br) { extractOptValue(er, DwcTerm.organismQuantity) .map(String::trim) .map(NumberParser::parseDouble) .filter(x -> !x.isInfinite() && !x.isNaN()) .ifPresent(br::setOrganismQuantity); } }
|
BasicInterpreter { public static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br) { extractOptValue(er, DwcTerm.organismQuantity) .map(String::trim) .map(NumberParser::parseDouble) .filter(x -> !x.isInfinite() && !x.isNaN()) .ifPresent(br::setOrganismQuantity); } }
|
BasicInterpreter { public static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br) { extractOptValue(er, DwcTerm.organismQuantity) .map(String::trim) .map(NumberParser::parseDouble) .filter(x -> !x.isInfinite() && !x.isNaN()) .ifPresent(br::setOrganismQuantity); } static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService,
boolean isTripletValid,
boolean isOccurrenceIdValid,
boolean useExtendedRecordId,
BiConsumer<ExtendedRecord, BasicRecord> gbifIdFn); static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService, boolean isTripletValid, boolean isOccurrenceIdValid); static BiConsumer<ExtendedRecord, BasicRecord> interpretCopyGbifId(); static void interpretIndividualCount(ExtendedRecord er, BasicRecord br); static void interpretTypeStatus(ExtendedRecord er, BasicRecord br); static void interpretLifeStage(ExtendedRecord er, BasicRecord br); static void interpretEstablishmentMeans(ExtendedRecord er, BasicRecord br); static void interpretSex(ExtendedRecord er, BasicRecord br); static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br); static void interpretReferences(ExtendedRecord er, BasicRecord br); static void interpretTypifiedName(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br); static void interpretRelativeOrganismQuantity(BasicRecord br); static void interpretLicense(ExtendedRecord er, BasicRecord br); static void interpretIdentifiedByIds(ExtendedRecord er, BasicRecord br); static void interpretRecordedByIds(ExtendedRecord er, BasicRecord br); static BiConsumer<ExtendedRecord, BasicRecord> interpretOccurrenceStatus(
KeyValueStore<String, OccurrenceStatus> occStatusKvStore); }
|
BasicInterpreter { public static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br) { extractOptValue(er, DwcTerm.organismQuantity) .map(String::trim) .map(NumberParser::parseDouble) .filter(x -> !x.isInfinite() && !x.isNaN()) .ifPresent(br::setOrganismQuantity); } static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService,
boolean isTripletValid,
boolean isOccurrenceIdValid,
boolean useExtendedRecordId,
BiConsumer<ExtendedRecord, BasicRecord> gbifIdFn); static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService, boolean isTripletValid, boolean isOccurrenceIdValid); static BiConsumer<ExtendedRecord, BasicRecord> interpretCopyGbifId(); static void interpretIndividualCount(ExtendedRecord er, BasicRecord br); static void interpretTypeStatus(ExtendedRecord er, BasicRecord br); static void interpretLifeStage(ExtendedRecord er, BasicRecord br); static void interpretEstablishmentMeans(ExtendedRecord er, BasicRecord br); static void interpretSex(ExtendedRecord er, BasicRecord br); static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br); static void interpretReferences(ExtendedRecord er, BasicRecord br); static void interpretTypifiedName(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br); static void interpretRelativeOrganismQuantity(BasicRecord br); static void interpretLicense(ExtendedRecord er, BasicRecord br); static void interpretIdentifiedByIds(ExtendedRecord er, BasicRecord br); static void interpretRecordedByIds(ExtendedRecord er, BasicRecord br); static BiConsumer<ExtendedRecord, BasicRecord> interpretOccurrenceStatus(
KeyValueStore<String, OccurrenceStatus> occStatusKvStore); static final String GBIF_ID_INVALID; }
|
@Test(expected = IllegalArgumentException.class) public void swapIndexInAliasWrongFormatIndexTest() { EsIndex.swapIndexInAliases(EsConfig.from(DUMMY_HOST), Collections.singleton("alias"), "index"); thrown.expectMessage(CoreMatchers.containsString("index has to follow the pattern")); }
|
public static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index) { Preconditions.checkArgument(aliases != null && !aliases.isEmpty(), "alias is required"); Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); swapIndexInAliases( config, aliases, index, Collections.emptySet(), Searching.getDefaultSearchSettings()); }
|
EsIndex { public static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index) { Preconditions.checkArgument(aliases != null && !aliases.isEmpty(), "alias is required"); Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); swapIndexInAliases( config, aliases, index, Collections.emptySet(), Searching.getDefaultSearchSettings()); } }
|
EsIndex { public static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index) { Preconditions.checkArgument(aliases != null && !aliases.isEmpty(), "alias is required"); Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); swapIndexInAliases( config, aliases, index, Collections.emptySet(), Searching.getDefaultSearchSettings()); } }
|
EsIndex { public static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index) { Preconditions.checkArgument(aliases != null && !aliases.isEmpty(), "alias is required"); Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); swapIndexInAliases( config, aliases, index, Collections.emptySet(), Searching.getDefaultSearchSettings()); } static String createIndex(EsConfig config, IndexParams indexParams); static Optional<String> createIndexIfNotExists(EsConfig config, IndexParams indexParams); static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index); static void swapIndexInAliases(
EsConfig config,
Set<String> aliases,
String index,
Set<String> extraIdxToRemove,
Map<String, String> settings); static long countDocuments(EsConfig config, String index); static Set<String> deleteRecordsByDatasetId(
EsConfig config,
String[] aliases,
String datasetKey,
Predicate<String> indexesToDelete,
int timeoutSec,
int attempts); static Set<String> findDatasetIndexesInAliases(
EsConfig config, String[] aliases, String datasetKey); }
|
EsIndex { public static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index) { Preconditions.checkArgument(aliases != null && !aliases.isEmpty(), "alias is required"); Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); swapIndexInAliases( config, aliases, index, Collections.emptySet(), Searching.getDefaultSearchSettings()); } static String createIndex(EsConfig config, IndexParams indexParams); static Optional<String> createIndexIfNotExists(EsConfig config, IndexParams indexParams); static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index); static void swapIndexInAliases(
EsConfig config,
Set<String> aliases,
String index,
Set<String> extraIdxToRemove,
Map<String, String> settings); static long countDocuments(EsConfig config, String index); static Set<String> deleteRecordsByDatasetId(
EsConfig config,
String[] aliases,
String datasetKey,
Predicate<String> indexesToDelete,
int timeoutSec,
int attempts); static Set<String> findDatasetIndexesInAliases(
EsConfig config, String[] aliases, String datasetKey); }
|
@Test public void interpretOrganismQuantityTypeTest() { Map<String, String> coreMap = new HashMap<>(); coreMap.put(DwcTerm.organismQuantityType.qualifiedName(), " value "); ExtendedRecord er = ExtendedRecord.newBuilder().setId(ID).setCoreTerms(coreMap).build(); BasicRecord br = BasicRecord.newBuilder().setId(ID).build(); BasicInterpreter.interpretOrganismQuantityType(er, br); Assert.assertEquals("value", br.getOrganismQuantityType()); }
|
public static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br) { extractOptValue(er, DwcTerm.organismQuantityType) .map(String::trim) .ifPresent(br::setOrganismQuantityType); }
|
BasicInterpreter { public static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br) { extractOptValue(er, DwcTerm.organismQuantityType) .map(String::trim) .ifPresent(br::setOrganismQuantityType); } }
|
BasicInterpreter { public static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br) { extractOptValue(er, DwcTerm.organismQuantityType) .map(String::trim) .ifPresent(br::setOrganismQuantityType); } }
|
BasicInterpreter { public static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br) { extractOptValue(er, DwcTerm.organismQuantityType) .map(String::trim) .ifPresent(br::setOrganismQuantityType); } static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService,
boolean isTripletValid,
boolean isOccurrenceIdValid,
boolean useExtendedRecordId,
BiConsumer<ExtendedRecord, BasicRecord> gbifIdFn); static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService, boolean isTripletValid, boolean isOccurrenceIdValid); static BiConsumer<ExtendedRecord, BasicRecord> interpretCopyGbifId(); static void interpretIndividualCount(ExtendedRecord er, BasicRecord br); static void interpretTypeStatus(ExtendedRecord er, BasicRecord br); static void interpretLifeStage(ExtendedRecord er, BasicRecord br); static void interpretEstablishmentMeans(ExtendedRecord er, BasicRecord br); static void interpretSex(ExtendedRecord er, BasicRecord br); static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br); static void interpretReferences(ExtendedRecord er, BasicRecord br); static void interpretTypifiedName(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br); static void interpretRelativeOrganismQuantity(BasicRecord br); static void interpretLicense(ExtendedRecord er, BasicRecord br); static void interpretIdentifiedByIds(ExtendedRecord er, BasicRecord br); static void interpretRecordedByIds(ExtendedRecord er, BasicRecord br); static BiConsumer<ExtendedRecord, BasicRecord> interpretOccurrenceStatus(
KeyValueStore<String, OccurrenceStatus> occStatusKvStore); }
|
BasicInterpreter { public static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br) { extractOptValue(er, DwcTerm.organismQuantityType) .map(String::trim) .ifPresent(br::setOrganismQuantityType); } static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService,
boolean isTripletValid,
boolean isOccurrenceIdValid,
boolean useExtendedRecordId,
BiConsumer<ExtendedRecord, BasicRecord> gbifIdFn); static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService, boolean isTripletValid, boolean isOccurrenceIdValid); static BiConsumer<ExtendedRecord, BasicRecord> interpretCopyGbifId(); static void interpretIndividualCount(ExtendedRecord er, BasicRecord br); static void interpretTypeStatus(ExtendedRecord er, BasicRecord br); static void interpretLifeStage(ExtendedRecord er, BasicRecord br); static void interpretEstablishmentMeans(ExtendedRecord er, BasicRecord br); static void interpretSex(ExtendedRecord er, BasicRecord br); static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br); static void interpretReferences(ExtendedRecord er, BasicRecord br); static void interpretTypifiedName(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br); static void interpretRelativeOrganismQuantity(BasicRecord br); static void interpretLicense(ExtendedRecord er, BasicRecord br); static void interpretIdentifiedByIds(ExtendedRecord er, BasicRecord br); static void interpretRecordedByIds(ExtendedRecord er, BasicRecord br); static BiConsumer<ExtendedRecord, BasicRecord> interpretOccurrenceStatus(
KeyValueStore<String, OccurrenceStatus> occStatusKvStore); static final String GBIF_ID_INVALID; }
|
@Test public void interpretRelativeOrganismQuantityTest() { Map<String, String> coreMap = new HashMap<>(); coreMap.put(DwcTerm.sampleSizeValue.qualifiedName(), "2"); coreMap.put(DwcTerm.sampleSizeUnit.qualifiedName(), "some type "); coreMap.put(DwcTerm.organismQuantity.qualifiedName(), "10"); coreMap.put(DwcTerm.organismQuantityType.qualifiedName(), " Some Type"); ExtendedRecord er = ExtendedRecord.newBuilder().setId(ID).setCoreTerms(coreMap).build(); BasicRecord br = BasicRecord.newBuilder().setId(ID).build(); BasicInterpreter.interpretOrganismQuantityType(er, br); BasicInterpreter.interpretOrganismQuantity(er, br); BasicInterpreter.interpretSampleSizeUnit(er, br); BasicInterpreter.interpretSampleSizeValue(er, br); BasicInterpreter.interpretRelativeOrganismQuantity(br); Assert.assertEquals(Double.valueOf(5d), br.getRelativeOrganismQuantity()); }
|
public static void interpretRelativeOrganismQuantity(BasicRecord br) { if (!Strings.isNullOrEmpty(br.getOrganismQuantityType()) && !Strings.isNullOrEmpty(br.getSampleSizeUnit()) && br.getOrganismQuantityType().equalsIgnoreCase(br.getSampleSizeUnit())) { Double organismQuantity = br.getOrganismQuantity(); Double sampleSizeValue = br.getSampleSizeValue(); if (organismQuantity != null && sampleSizeValue != null) { double result = organismQuantity / sampleSizeValue; if (!Double.isNaN(result) && !Double.isInfinite(result)) { br.setRelativeOrganismQuantity(organismQuantity / sampleSizeValue); } } } }
|
BasicInterpreter { public static void interpretRelativeOrganismQuantity(BasicRecord br) { if (!Strings.isNullOrEmpty(br.getOrganismQuantityType()) && !Strings.isNullOrEmpty(br.getSampleSizeUnit()) && br.getOrganismQuantityType().equalsIgnoreCase(br.getSampleSizeUnit())) { Double organismQuantity = br.getOrganismQuantity(); Double sampleSizeValue = br.getSampleSizeValue(); if (organismQuantity != null && sampleSizeValue != null) { double result = organismQuantity / sampleSizeValue; if (!Double.isNaN(result) && !Double.isInfinite(result)) { br.setRelativeOrganismQuantity(organismQuantity / sampleSizeValue); } } } } }
|
BasicInterpreter { public static void interpretRelativeOrganismQuantity(BasicRecord br) { if (!Strings.isNullOrEmpty(br.getOrganismQuantityType()) && !Strings.isNullOrEmpty(br.getSampleSizeUnit()) && br.getOrganismQuantityType().equalsIgnoreCase(br.getSampleSizeUnit())) { Double organismQuantity = br.getOrganismQuantity(); Double sampleSizeValue = br.getSampleSizeValue(); if (organismQuantity != null && sampleSizeValue != null) { double result = organismQuantity / sampleSizeValue; if (!Double.isNaN(result) && !Double.isInfinite(result)) { br.setRelativeOrganismQuantity(organismQuantity / sampleSizeValue); } } } } }
|
BasicInterpreter { public static void interpretRelativeOrganismQuantity(BasicRecord br) { if (!Strings.isNullOrEmpty(br.getOrganismQuantityType()) && !Strings.isNullOrEmpty(br.getSampleSizeUnit()) && br.getOrganismQuantityType().equalsIgnoreCase(br.getSampleSizeUnit())) { Double organismQuantity = br.getOrganismQuantity(); Double sampleSizeValue = br.getSampleSizeValue(); if (organismQuantity != null && sampleSizeValue != null) { double result = organismQuantity / sampleSizeValue; if (!Double.isNaN(result) && !Double.isInfinite(result)) { br.setRelativeOrganismQuantity(organismQuantity / sampleSizeValue); } } } } static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService,
boolean isTripletValid,
boolean isOccurrenceIdValid,
boolean useExtendedRecordId,
BiConsumer<ExtendedRecord, BasicRecord> gbifIdFn); static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService, boolean isTripletValid, boolean isOccurrenceIdValid); static BiConsumer<ExtendedRecord, BasicRecord> interpretCopyGbifId(); static void interpretIndividualCount(ExtendedRecord er, BasicRecord br); static void interpretTypeStatus(ExtendedRecord er, BasicRecord br); static void interpretLifeStage(ExtendedRecord er, BasicRecord br); static void interpretEstablishmentMeans(ExtendedRecord er, BasicRecord br); static void interpretSex(ExtendedRecord er, BasicRecord br); static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br); static void interpretReferences(ExtendedRecord er, BasicRecord br); static void interpretTypifiedName(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br); static void interpretRelativeOrganismQuantity(BasicRecord br); static void interpretLicense(ExtendedRecord er, BasicRecord br); static void interpretIdentifiedByIds(ExtendedRecord er, BasicRecord br); static void interpretRecordedByIds(ExtendedRecord er, BasicRecord br); static BiConsumer<ExtendedRecord, BasicRecord> interpretOccurrenceStatus(
KeyValueStore<String, OccurrenceStatus> occStatusKvStore); }
|
BasicInterpreter { public static void interpretRelativeOrganismQuantity(BasicRecord br) { if (!Strings.isNullOrEmpty(br.getOrganismQuantityType()) && !Strings.isNullOrEmpty(br.getSampleSizeUnit()) && br.getOrganismQuantityType().equalsIgnoreCase(br.getSampleSizeUnit())) { Double organismQuantity = br.getOrganismQuantity(); Double sampleSizeValue = br.getSampleSizeValue(); if (organismQuantity != null && sampleSizeValue != null) { double result = organismQuantity / sampleSizeValue; if (!Double.isNaN(result) && !Double.isInfinite(result)) { br.setRelativeOrganismQuantity(organismQuantity / sampleSizeValue); } } } } static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService,
boolean isTripletValid,
boolean isOccurrenceIdValid,
boolean useExtendedRecordId,
BiConsumer<ExtendedRecord, BasicRecord> gbifIdFn); static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService, boolean isTripletValid, boolean isOccurrenceIdValid); static BiConsumer<ExtendedRecord, BasicRecord> interpretCopyGbifId(); static void interpretIndividualCount(ExtendedRecord er, BasicRecord br); static void interpretTypeStatus(ExtendedRecord er, BasicRecord br); static void interpretLifeStage(ExtendedRecord er, BasicRecord br); static void interpretEstablishmentMeans(ExtendedRecord er, BasicRecord br); static void interpretSex(ExtendedRecord er, BasicRecord br); static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br); static void interpretReferences(ExtendedRecord er, BasicRecord br); static void interpretTypifiedName(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br); static void interpretRelativeOrganismQuantity(BasicRecord br); static void interpretLicense(ExtendedRecord er, BasicRecord br); static void interpretIdentifiedByIds(ExtendedRecord er, BasicRecord br); static void interpretRecordedByIds(ExtendedRecord er, BasicRecord br); static BiConsumer<ExtendedRecord, BasicRecord> interpretOccurrenceStatus(
KeyValueStore<String, OccurrenceStatus> occStatusKvStore); static final String GBIF_ID_INVALID; }
|
@Test public void interpretLicenseTest() { Map<String, String> coreMap = new HashMap<>(); coreMap.put( "http: ExtendedRecord er = ExtendedRecord.newBuilder().setId(ID).setCoreTerms(coreMap).build(); BasicRecord br = BasicRecord.newBuilder().setId(ID).build(); BasicInterpreter.interpretLicense(er, br); Assert.assertEquals(License.CC_BY_NC_4_0.name(), br.getLicense()); }
|
public static void interpretLicense(ExtendedRecord er, BasicRecord br) { String license = extractOptValue(er, DcTerm.license) .map(BasicInterpreter::getLicense) .map(License::name) .orElse(License.UNSPECIFIED.name()); br.setLicense(license); }
|
BasicInterpreter { public static void interpretLicense(ExtendedRecord er, BasicRecord br) { String license = extractOptValue(er, DcTerm.license) .map(BasicInterpreter::getLicense) .map(License::name) .orElse(License.UNSPECIFIED.name()); br.setLicense(license); } }
|
BasicInterpreter { public static void interpretLicense(ExtendedRecord er, BasicRecord br) { String license = extractOptValue(er, DcTerm.license) .map(BasicInterpreter::getLicense) .map(License::name) .orElse(License.UNSPECIFIED.name()); br.setLicense(license); } }
|
BasicInterpreter { public static void interpretLicense(ExtendedRecord er, BasicRecord br) { String license = extractOptValue(er, DcTerm.license) .map(BasicInterpreter::getLicense) .map(License::name) .orElse(License.UNSPECIFIED.name()); br.setLicense(license); } static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService,
boolean isTripletValid,
boolean isOccurrenceIdValid,
boolean useExtendedRecordId,
BiConsumer<ExtendedRecord, BasicRecord> gbifIdFn); static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService, boolean isTripletValid, boolean isOccurrenceIdValid); static BiConsumer<ExtendedRecord, BasicRecord> interpretCopyGbifId(); static void interpretIndividualCount(ExtendedRecord er, BasicRecord br); static void interpretTypeStatus(ExtendedRecord er, BasicRecord br); static void interpretLifeStage(ExtendedRecord er, BasicRecord br); static void interpretEstablishmentMeans(ExtendedRecord er, BasicRecord br); static void interpretSex(ExtendedRecord er, BasicRecord br); static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br); static void interpretReferences(ExtendedRecord er, BasicRecord br); static void interpretTypifiedName(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br); static void interpretRelativeOrganismQuantity(BasicRecord br); static void interpretLicense(ExtendedRecord er, BasicRecord br); static void interpretIdentifiedByIds(ExtendedRecord er, BasicRecord br); static void interpretRecordedByIds(ExtendedRecord er, BasicRecord br); static BiConsumer<ExtendedRecord, BasicRecord> interpretOccurrenceStatus(
KeyValueStore<String, OccurrenceStatus> occStatusKvStore); }
|
BasicInterpreter { public static void interpretLicense(ExtendedRecord er, BasicRecord br) { String license = extractOptValue(er, DcTerm.license) .map(BasicInterpreter::getLicense) .map(License::name) .orElse(License.UNSPECIFIED.name()); br.setLicense(license); } static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService,
boolean isTripletValid,
boolean isOccurrenceIdValid,
boolean useExtendedRecordId,
BiConsumer<ExtendedRecord, BasicRecord> gbifIdFn); static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService, boolean isTripletValid, boolean isOccurrenceIdValid); static BiConsumer<ExtendedRecord, BasicRecord> interpretCopyGbifId(); static void interpretIndividualCount(ExtendedRecord er, BasicRecord br); static void interpretTypeStatus(ExtendedRecord er, BasicRecord br); static void interpretLifeStage(ExtendedRecord er, BasicRecord br); static void interpretEstablishmentMeans(ExtendedRecord er, BasicRecord br); static void interpretSex(ExtendedRecord er, BasicRecord br); static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br); static void interpretReferences(ExtendedRecord er, BasicRecord br); static void interpretTypifiedName(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br); static void interpretRelativeOrganismQuantity(BasicRecord br); static void interpretLicense(ExtendedRecord er, BasicRecord br); static void interpretIdentifiedByIds(ExtendedRecord er, BasicRecord br); static void interpretRecordedByIds(ExtendedRecord er, BasicRecord br); static BiConsumer<ExtendedRecord, BasicRecord> interpretOccurrenceStatus(
KeyValueStore<String, OccurrenceStatus> occStatusKvStore); static final String GBIF_ID_INVALID; }
|
@Test public void interpretBasisOfRecordTest() { Map<String, String> coreMap = new HashMap<>(); coreMap.put(DwcTerm.basisOfRecord.qualifiedName(), "LIVING_SPECIMEN"); ExtendedRecord er = ExtendedRecord.newBuilder().setId(ID).setCoreTerms(coreMap).build(); BasicRecord br = BasicRecord.newBuilder().setId(ID).build(); BasicInterpreter.interpretBasisOfRecord(er, br); Assert.assertEquals("LIVING_SPECIMEN", br.getBasisOfRecord()); }
|
public static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br) { Function<ParseResult<BasisOfRecord>, BasicRecord> fn = parseResult -> { if (parseResult.isSuccessful()) { br.setBasisOfRecord(parseResult.getPayload().name()); } else { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } return br; }; VocabularyParser.basisOfRecordParser().map(er, fn); if (br.getBasisOfRecord() == null || br.getBasisOfRecord().isEmpty()) { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } }
|
BasicInterpreter { public static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br) { Function<ParseResult<BasisOfRecord>, BasicRecord> fn = parseResult -> { if (parseResult.isSuccessful()) { br.setBasisOfRecord(parseResult.getPayload().name()); } else { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } return br; }; VocabularyParser.basisOfRecordParser().map(er, fn); if (br.getBasisOfRecord() == null || br.getBasisOfRecord().isEmpty()) { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } } }
|
BasicInterpreter { public static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br) { Function<ParseResult<BasisOfRecord>, BasicRecord> fn = parseResult -> { if (parseResult.isSuccessful()) { br.setBasisOfRecord(parseResult.getPayload().name()); } else { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } return br; }; VocabularyParser.basisOfRecordParser().map(er, fn); if (br.getBasisOfRecord() == null || br.getBasisOfRecord().isEmpty()) { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } } }
|
BasicInterpreter { public static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br) { Function<ParseResult<BasisOfRecord>, BasicRecord> fn = parseResult -> { if (parseResult.isSuccessful()) { br.setBasisOfRecord(parseResult.getPayload().name()); } else { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } return br; }; VocabularyParser.basisOfRecordParser().map(er, fn); if (br.getBasisOfRecord() == null || br.getBasisOfRecord().isEmpty()) { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } } static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService,
boolean isTripletValid,
boolean isOccurrenceIdValid,
boolean useExtendedRecordId,
BiConsumer<ExtendedRecord, BasicRecord> gbifIdFn); static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService, boolean isTripletValid, boolean isOccurrenceIdValid); static BiConsumer<ExtendedRecord, BasicRecord> interpretCopyGbifId(); static void interpretIndividualCount(ExtendedRecord er, BasicRecord br); static void interpretTypeStatus(ExtendedRecord er, BasicRecord br); static void interpretLifeStage(ExtendedRecord er, BasicRecord br); static void interpretEstablishmentMeans(ExtendedRecord er, BasicRecord br); static void interpretSex(ExtendedRecord er, BasicRecord br); static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br); static void interpretReferences(ExtendedRecord er, BasicRecord br); static void interpretTypifiedName(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br); static void interpretRelativeOrganismQuantity(BasicRecord br); static void interpretLicense(ExtendedRecord er, BasicRecord br); static void interpretIdentifiedByIds(ExtendedRecord er, BasicRecord br); static void interpretRecordedByIds(ExtendedRecord er, BasicRecord br); static BiConsumer<ExtendedRecord, BasicRecord> interpretOccurrenceStatus(
KeyValueStore<String, OccurrenceStatus> occStatusKvStore); }
|
BasicInterpreter { public static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br) { Function<ParseResult<BasisOfRecord>, BasicRecord> fn = parseResult -> { if (parseResult.isSuccessful()) { br.setBasisOfRecord(parseResult.getPayload().name()); } else { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } return br; }; VocabularyParser.basisOfRecordParser().map(er, fn); if (br.getBasisOfRecord() == null || br.getBasisOfRecord().isEmpty()) { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } } static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService,
boolean isTripletValid,
boolean isOccurrenceIdValid,
boolean useExtendedRecordId,
BiConsumer<ExtendedRecord, BasicRecord> gbifIdFn); static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService, boolean isTripletValid, boolean isOccurrenceIdValid); static BiConsumer<ExtendedRecord, BasicRecord> interpretCopyGbifId(); static void interpretIndividualCount(ExtendedRecord er, BasicRecord br); static void interpretTypeStatus(ExtendedRecord er, BasicRecord br); static void interpretLifeStage(ExtendedRecord er, BasicRecord br); static void interpretEstablishmentMeans(ExtendedRecord er, BasicRecord br); static void interpretSex(ExtendedRecord er, BasicRecord br); static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br); static void interpretReferences(ExtendedRecord er, BasicRecord br); static void interpretTypifiedName(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br); static void interpretRelativeOrganismQuantity(BasicRecord br); static void interpretLicense(ExtendedRecord er, BasicRecord br); static void interpretIdentifiedByIds(ExtendedRecord er, BasicRecord br); static void interpretRecordedByIds(ExtendedRecord er, BasicRecord br); static BiConsumer<ExtendedRecord, BasicRecord> interpretOccurrenceStatus(
KeyValueStore<String, OccurrenceStatus> occStatusKvStore); static final String GBIF_ID_INVALID; }
|
@Test public void interpretBasisOfRecordNullTest() { Map<String, String> coreMap = new HashMap<>(); coreMap.put(DwcTerm.basisOfRecord.qualifiedName(), null); ExtendedRecord er = ExtendedRecord.newBuilder().setId(ID).setCoreTerms(coreMap).build(); BasicRecord br = BasicRecord.newBuilder().setId(ID).build(); BasicInterpreter.interpretBasisOfRecord(er, br); Assert.assertEquals("UNKNOWN", br.getBasisOfRecord()); assertIssueSize(br, 1); assertIssue(OccurrenceIssue.BASIS_OF_RECORD_INVALID, br); }
|
public static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br) { Function<ParseResult<BasisOfRecord>, BasicRecord> fn = parseResult -> { if (parseResult.isSuccessful()) { br.setBasisOfRecord(parseResult.getPayload().name()); } else { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } return br; }; VocabularyParser.basisOfRecordParser().map(er, fn); if (br.getBasisOfRecord() == null || br.getBasisOfRecord().isEmpty()) { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } }
|
BasicInterpreter { public static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br) { Function<ParseResult<BasisOfRecord>, BasicRecord> fn = parseResult -> { if (parseResult.isSuccessful()) { br.setBasisOfRecord(parseResult.getPayload().name()); } else { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } return br; }; VocabularyParser.basisOfRecordParser().map(er, fn); if (br.getBasisOfRecord() == null || br.getBasisOfRecord().isEmpty()) { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } } }
|
BasicInterpreter { public static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br) { Function<ParseResult<BasisOfRecord>, BasicRecord> fn = parseResult -> { if (parseResult.isSuccessful()) { br.setBasisOfRecord(parseResult.getPayload().name()); } else { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } return br; }; VocabularyParser.basisOfRecordParser().map(er, fn); if (br.getBasisOfRecord() == null || br.getBasisOfRecord().isEmpty()) { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } } }
|
BasicInterpreter { public static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br) { Function<ParseResult<BasisOfRecord>, BasicRecord> fn = parseResult -> { if (parseResult.isSuccessful()) { br.setBasisOfRecord(parseResult.getPayload().name()); } else { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } return br; }; VocabularyParser.basisOfRecordParser().map(er, fn); if (br.getBasisOfRecord() == null || br.getBasisOfRecord().isEmpty()) { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } } static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService,
boolean isTripletValid,
boolean isOccurrenceIdValid,
boolean useExtendedRecordId,
BiConsumer<ExtendedRecord, BasicRecord> gbifIdFn); static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService, boolean isTripletValid, boolean isOccurrenceIdValid); static BiConsumer<ExtendedRecord, BasicRecord> interpretCopyGbifId(); static void interpretIndividualCount(ExtendedRecord er, BasicRecord br); static void interpretTypeStatus(ExtendedRecord er, BasicRecord br); static void interpretLifeStage(ExtendedRecord er, BasicRecord br); static void interpretEstablishmentMeans(ExtendedRecord er, BasicRecord br); static void interpretSex(ExtendedRecord er, BasicRecord br); static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br); static void interpretReferences(ExtendedRecord er, BasicRecord br); static void interpretTypifiedName(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br); static void interpretRelativeOrganismQuantity(BasicRecord br); static void interpretLicense(ExtendedRecord er, BasicRecord br); static void interpretIdentifiedByIds(ExtendedRecord er, BasicRecord br); static void interpretRecordedByIds(ExtendedRecord er, BasicRecord br); static BiConsumer<ExtendedRecord, BasicRecord> interpretOccurrenceStatus(
KeyValueStore<String, OccurrenceStatus> occStatusKvStore); }
|
BasicInterpreter { public static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br) { Function<ParseResult<BasisOfRecord>, BasicRecord> fn = parseResult -> { if (parseResult.isSuccessful()) { br.setBasisOfRecord(parseResult.getPayload().name()); } else { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } return br; }; VocabularyParser.basisOfRecordParser().map(er, fn); if (br.getBasisOfRecord() == null || br.getBasisOfRecord().isEmpty()) { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } } static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService,
boolean isTripletValid,
boolean isOccurrenceIdValid,
boolean useExtendedRecordId,
BiConsumer<ExtendedRecord, BasicRecord> gbifIdFn); static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService, boolean isTripletValid, boolean isOccurrenceIdValid); static BiConsumer<ExtendedRecord, BasicRecord> interpretCopyGbifId(); static void interpretIndividualCount(ExtendedRecord er, BasicRecord br); static void interpretTypeStatus(ExtendedRecord er, BasicRecord br); static void interpretLifeStage(ExtendedRecord er, BasicRecord br); static void interpretEstablishmentMeans(ExtendedRecord er, BasicRecord br); static void interpretSex(ExtendedRecord er, BasicRecord br); static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br); static void interpretReferences(ExtendedRecord er, BasicRecord br); static void interpretTypifiedName(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br); static void interpretRelativeOrganismQuantity(BasicRecord br); static void interpretLicense(ExtendedRecord er, BasicRecord br); static void interpretIdentifiedByIds(ExtendedRecord er, BasicRecord br); static void interpretRecordedByIds(ExtendedRecord er, BasicRecord br); static BiConsumer<ExtendedRecord, BasicRecord> interpretOccurrenceStatus(
KeyValueStore<String, OccurrenceStatus> occStatusKvStore); static final String GBIF_ID_INVALID; }
|
@Test public void interpretBasisOfRecordRubbishTest() { Map<String, String> coreMap = new HashMap<>(); coreMap.put(DwcTerm.basisOfRecord.qualifiedName(), "adwadaw"); ExtendedRecord er = ExtendedRecord.newBuilder().setId(ID).setCoreTerms(coreMap).build(); BasicRecord br = BasicRecord.newBuilder().setId(ID).build(); BasicInterpreter.interpretBasisOfRecord(er, br); Assert.assertEquals("UNKNOWN", br.getBasisOfRecord()); assertIssueSize(br, 1); assertIssue(OccurrenceIssue.BASIS_OF_RECORD_INVALID, br); }
|
public static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br) { Function<ParseResult<BasisOfRecord>, BasicRecord> fn = parseResult -> { if (parseResult.isSuccessful()) { br.setBasisOfRecord(parseResult.getPayload().name()); } else { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } return br; }; VocabularyParser.basisOfRecordParser().map(er, fn); if (br.getBasisOfRecord() == null || br.getBasisOfRecord().isEmpty()) { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } }
|
BasicInterpreter { public static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br) { Function<ParseResult<BasisOfRecord>, BasicRecord> fn = parseResult -> { if (parseResult.isSuccessful()) { br.setBasisOfRecord(parseResult.getPayload().name()); } else { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } return br; }; VocabularyParser.basisOfRecordParser().map(er, fn); if (br.getBasisOfRecord() == null || br.getBasisOfRecord().isEmpty()) { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } } }
|
BasicInterpreter { public static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br) { Function<ParseResult<BasisOfRecord>, BasicRecord> fn = parseResult -> { if (parseResult.isSuccessful()) { br.setBasisOfRecord(parseResult.getPayload().name()); } else { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } return br; }; VocabularyParser.basisOfRecordParser().map(er, fn); if (br.getBasisOfRecord() == null || br.getBasisOfRecord().isEmpty()) { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } } }
|
BasicInterpreter { public static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br) { Function<ParseResult<BasisOfRecord>, BasicRecord> fn = parseResult -> { if (parseResult.isSuccessful()) { br.setBasisOfRecord(parseResult.getPayload().name()); } else { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } return br; }; VocabularyParser.basisOfRecordParser().map(er, fn); if (br.getBasisOfRecord() == null || br.getBasisOfRecord().isEmpty()) { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } } static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService,
boolean isTripletValid,
boolean isOccurrenceIdValid,
boolean useExtendedRecordId,
BiConsumer<ExtendedRecord, BasicRecord> gbifIdFn); static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService, boolean isTripletValid, boolean isOccurrenceIdValid); static BiConsumer<ExtendedRecord, BasicRecord> interpretCopyGbifId(); static void interpretIndividualCount(ExtendedRecord er, BasicRecord br); static void interpretTypeStatus(ExtendedRecord er, BasicRecord br); static void interpretLifeStage(ExtendedRecord er, BasicRecord br); static void interpretEstablishmentMeans(ExtendedRecord er, BasicRecord br); static void interpretSex(ExtendedRecord er, BasicRecord br); static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br); static void interpretReferences(ExtendedRecord er, BasicRecord br); static void interpretTypifiedName(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br); static void interpretRelativeOrganismQuantity(BasicRecord br); static void interpretLicense(ExtendedRecord er, BasicRecord br); static void interpretIdentifiedByIds(ExtendedRecord er, BasicRecord br); static void interpretRecordedByIds(ExtendedRecord er, BasicRecord br); static BiConsumer<ExtendedRecord, BasicRecord> interpretOccurrenceStatus(
KeyValueStore<String, OccurrenceStatus> occStatusKvStore); }
|
BasicInterpreter { public static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br) { Function<ParseResult<BasisOfRecord>, BasicRecord> fn = parseResult -> { if (parseResult.isSuccessful()) { br.setBasisOfRecord(parseResult.getPayload().name()); } else { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } return br; }; VocabularyParser.basisOfRecordParser().map(er, fn); if (br.getBasisOfRecord() == null || br.getBasisOfRecord().isEmpty()) { br.setBasisOfRecord(BasisOfRecord.UNKNOWN.name()); addIssue(br, BASIS_OF_RECORD_INVALID); } } static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService,
boolean isTripletValid,
boolean isOccurrenceIdValid,
boolean useExtendedRecordId,
BiConsumer<ExtendedRecord, BasicRecord> gbifIdFn); static BiConsumer<ExtendedRecord, BasicRecord> interpretGbifId(
HBaseLockingKeyService keygenService, boolean isTripletValid, boolean isOccurrenceIdValid); static BiConsumer<ExtendedRecord, BasicRecord> interpretCopyGbifId(); static void interpretIndividualCount(ExtendedRecord er, BasicRecord br); static void interpretTypeStatus(ExtendedRecord er, BasicRecord br); static void interpretLifeStage(ExtendedRecord er, BasicRecord br); static void interpretEstablishmentMeans(ExtendedRecord er, BasicRecord br); static void interpretSex(ExtendedRecord er, BasicRecord br); static void interpretBasisOfRecord(ExtendedRecord er, BasicRecord br); static void interpretReferences(ExtendedRecord er, BasicRecord br); static void interpretTypifiedName(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeValue(ExtendedRecord er, BasicRecord br); static void interpretSampleSizeUnit(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantity(ExtendedRecord er, BasicRecord br); static void interpretOrganismQuantityType(ExtendedRecord er, BasicRecord br); static void interpretRelativeOrganismQuantity(BasicRecord br); static void interpretLicense(ExtendedRecord er, BasicRecord br); static void interpretIdentifiedByIds(ExtendedRecord er, BasicRecord br); static void interpretRecordedByIds(ExtendedRecord er, BasicRecord br); static BiConsumer<ExtendedRecord, BasicRecord> interpretOccurrenceStatus(
KeyValueStore<String, OccurrenceStatus> occStatusKvStore); static final String GBIF_ID_INVALID; }
|
@Test public void testYearMonth() { Map<String, String> map = new HashMap<>(); map.put(DwcTerm.eventDate.qualifiedName(), "1879-10"); ExtendedRecord er = ExtendedRecord.newBuilder().setId("1").setCoreTerms(map).build(); TemporalRecord tr = TemporalRecord.newBuilder().setId("1").build(); TemporalInterpreter interpreter = TemporalInterpreter.builder().create(); interpreter.interpretTemporal(er, tr); assertEquals("1879-10", tr.getEventDate().getGte()); assertNull(tr.getEventDate().getLte()); assertEquals(1879, tr.getYear().intValue()); assertEquals(10, tr.getMonth().intValue()); assertNull(tr.getDay()); assertEquals(0, tr.getIssues().getIssueList().size()); }
|
public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); }
|
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } }
|
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); }
|
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
|
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
|
@Test public void testYear() { Map<String, String> map = new HashMap<>(); map.put(DwcTerm.eventDate.qualifiedName(), "1879"); ExtendedRecord er = ExtendedRecord.newBuilder().setId("1").setCoreTerms(map).build(); TemporalRecord tr = TemporalRecord.newBuilder().setId("1").build(); TemporalInterpreter interpreter = TemporalInterpreter.builder().create(); interpreter.interpretTemporal(er, tr); assertEquals("1879", tr.getEventDate().getGte()); assertNull(tr.getEventDate().getLte()); assertEquals(1879, tr.getYear().intValue()); assertNull(tr.getMonth()); assertNull(tr.getDay()); assertEquals(0, tr.getIssues().getIssueList().size()); }
|
public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); }
|
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } }
|
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); }
|
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
|
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
|
@Test public void testLikelyIdentified() { Map<String, String> map = new HashMap<>(); map.put(DwcTerm.year.qualifiedName(), "1879"); map.put(DwcTerm.month.qualifiedName(), "11 "); map.put(DwcTerm.day.qualifiedName(), "1"); map.put(DwcTerm.eventDate.qualifiedName(), "1.11.1879"); map.put(DcTerm.modified.qualifiedName(), "2014-01-11"); ExtendedRecord er = ExtendedRecord.newBuilder().setId("1").setCoreTerms(map).build(); TemporalRecord tr = TemporalRecord.newBuilder().setId("1").build(); TemporalInterpreter interpreter = TemporalInterpreter.builder().create(); er.getCoreTerms().put(DwcTerm.dateIdentified.qualifiedName(), "1987-01-31"); interpreter.interpretDateIdentified(er, tr); assertEquals(0, tr.getIssues().getIssueList().size()); er.getCoreTerms().put(DwcTerm.dateIdentified.qualifiedName(), "1787-03-27"); interpreter.interpretDateIdentified(er, tr); assertEquals(0, tr.getIssues().getIssueList().size()); er.getCoreTerms().put(DwcTerm.dateIdentified.qualifiedName(), "2014-01-11"); interpreter.interpretDateIdentified(er, tr); assertEquals(0, tr.getIssues().getIssueList().size()); er.getCoreTerms().put(DwcTerm.dateIdentified.qualifiedName(), "1997"); interpreter.interpretDateIdentified(er, tr); assertEquals(0, tr.getIssues().getIssueList().size()); Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC")); er.getCoreTerms() .put(DwcTerm.dateIdentified.qualifiedName(), (cal.get(Calendar.YEAR) + 1) + "-01-11"); interpreter.interpretDateIdentified(er, tr); assertEquals(1, tr.getIssues().getIssueList().size()); assertEquals( OccurrenceIssue.IDENTIFIED_DATE_UNLIKELY.name(), tr.getIssues().getIssueList().iterator().next()); er.getCoreTerms().put(DwcTerm.dateIdentified.qualifiedName(), "1599-01-11"); interpreter.interpretDateIdentified(er, tr); assertEquals(1, tr.getIssues().getIssueList().size()); assertEquals( OccurrenceIssue.IDENTIFIED_DATE_UNLIKELY.name(), tr.getIssues().getIssueList().iterator().next()); }
|
public void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr) { if (hasValue(er, DwcTerm.dateIdentified)) { String value = extractValue(er, DwcTerm.dateIdentified); String normalizedValue = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(value)).orElse(value); LocalDate upperBound = LocalDate.now().plusDays(1); Range<LocalDate> validRecordedDateRange = Range.closed(MIN_LOCAL_DATE, upperBound); OccurrenceParseResult<TemporalAccessor> parsed = temporalParser.parseLocalDate( normalizedValue, validRecordedDateRange, OccurrenceIssue.IDENTIFIED_DATE_UNLIKELY); if (parsed.isSuccessful()) { Optional.ofNullable(parsed.getPayload()) .map(TemporalAccessor::toString) .ifPresent(tr::setDateIdentified); } addIssueSet(tr, parsed.getIssues()); } }
|
TemporalInterpreter implements Serializable { public void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr) { if (hasValue(er, DwcTerm.dateIdentified)) { String value = extractValue(er, DwcTerm.dateIdentified); String normalizedValue = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(value)).orElse(value); LocalDate upperBound = LocalDate.now().plusDays(1); Range<LocalDate> validRecordedDateRange = Range.closed(MIN_LOCAL_DATE, upperBound); OccurrenceParseResult<TemporalAccessor> parsed = temporalParser.parseLocalDate( normalizedValue, validRecordedDateRange, OccurrenceIssue.IDENTIFIED_DATE_UNLIKELY); if (parsed.isSuccessful()) { Optional.ofNullable(parsed.getPayload()) .map(TemporalAccessor::toString) .ifPresent(tr::setDateIdentified); } addIssueSet(tr, parsed.getIssues()); } } }
|
TemporalInterpreter implements Serializable { public void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr) { if (hasValue(er, DwcTerm.dateIdentified)) { String value = extractValue(er, DwcTerm.dateIdentified); String normalizedValue = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(value)).orElse(value); LocalDate upperBound = LocalDate.now().plusDays(1); Range<LocalDate> validRecordedDateRange = Range.closed(MIN_LOCAL_DATE, upperBound); OccurrenceParseResult<TemporalAccessor> parsed = temporalParser.parseLocalDate( normalizedValue, validRecordedDateRange, OccurrenceIssue.IDENTIFIED_DATE_UNLIKELY); if (parsed.isSuccessful()) { Optional.ofNullable(parsed.getPayload()) .map(TemporalAccessor::toString) .ifPresent(tr::setDateIdentified); } addIssueSet(tr, parsed.getIssues()); } } @Builder(buildMethodName = "create") private TemporalInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); }
|
TemporalInterpreter implements Serializable { public void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr) { if (hasValue(er, DwcTerm.dateIdentified)) { String value = extractValue(er, DwcTerm.dateIdentified); String normalizedValue = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(value)).orElse(value); LocalDate upperBound = LocalDate.now().plusDays(1); Range<LocalDate> validRecordedDateRange = Range.closed(MIN_LOCAL_DATE, upperBound); OccurrenceParseResult<TemporalAccessor> parsed = temporalParser.parseLocalDate( normalizedValue, validRecordedDateRange, OccurrenceIssue.IDENTIFIED_DATE_UNLIKELY); if (parsed.isSuccessful()) { Optional.ofNullable(parsed.getPayload()) .map(TemporalAccessor::toString) .ifPresent(tr::setDateIdentified); } addIssueSet(tr, parsed.getIssues()); } } @Builder(buildMethodName = "create") private TemporalInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
|
TemporalInterpreter implements Serializable { public void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr) { if (hasValue(er, DwcTerm.dateIdentified)) { String value = extractValue(er, DwcTerm.dateIdentified); String normalizedValue = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(value)).orElse(value); LocalDate upperBound = LocalDate.now().plusDays(1); Range<LocalDate> validRecordedDateRange = Range.closed(MIN_LOCAL_DATE, upperBound); OccurrenceParseResult<TemporalAccessor> parsed = temporalParser.parseLocalDate( normalizedValue, validRecordedDateRange, OccurrenceIssue.IDENTIFIED_DATE_UNLIKELY); if (parsed.isSuccessful()) { Optional.ofNullable(parsed.getPayload()) .map(TemporalAccessor::toString) .ifPresent(tr::setDateIdentified); } addIssueSet(tr, parsed.getIssues()); } } @Builder(buildMethodName = "create") private TemporalInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
|
@Test public void testLikelyModified() { Map<String, String> map = new HashMap<>(); map.put(DwcTerm.year.qualifiedName(), "1879"); map.put(DwcTerm.month.qualifiedName(), "11 "); map.put(DwcTerm.day.qualifiedName(), "1"); map.put(DwcTerm.eventDate.qualifiedName(), "1.11.1879"); map.put(DwcTerm.dateIdentified.qualifiedName(), "1987-01-31"); ExtendedRecord er = ExtendedRecord.newBuilder().setId("1").setCoreTerms(map).build(); TemporalInterpreter interpreter = TemporalInterpreter.builder().create(); TemporalRecord tr = TemporalRecord.newBuilder().setId("1").build(); er.getCoreTerms().put(DcTerm.modified.qualifiedName(), "2014-01-11"); interpreter.interpretModified(er, tr); assertEquals(0, tr.getIssues().getIssueList().size()); tr = TemporalRecord.newBuilder().setId("1").build(); Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC")); er.getCoreTerms().put(DcTerm.modified.qualifiedName(), (cal.get(Calendar.YEAR) + 1) + "-01-11"); interpreter.interpretModified(er, tr); assertEquals(1, tr.getIssues().getIssueList().size()); assertEquals( OccurrenceIssue.MODIFIED_DATE_UNLIKELY.name(), tr.getIssues().getIssueList().iterator().next()); tr = TemporalRecord.newBuilder().setId("1").build(); er.getCoreTerms().put(DcTerm.modified.qualifiedName(), "1969-12-31"); interpreter.interpretModified(er, tr); assertEquals(1, tr.getIssues().getIssueList().size()); assertEquals( OccurrenceIssue.MODIFIED_DATE_UNLIKELY.name(), tr.getIssues().getIssueList().iterator().next()); tr = TemporalRecord.newBuilder().setId("1").build(); er.getCoreTerms().put(DcTerm.modified.qualifiedName(), "2018-10-15 16:21:48"); interpreter.interpretModified(er, tr); assertEquals(0, tr.getIssues().getIssueList().size()); assertEquals("2018-10-15T16:21:48", tr.getModified()); }
|
public void interpretModified(ExtendedRecord er, TemporalRecord tr) { if (hasValue(er, DcTerm.modified)) { String value = extractValue(er, DcTerm.modified); String normalizedValue = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(value)).orElse(value); LocalDate upperBound = LocalDate.now().plusDays(1); Range<LocalDate> validModifiedDateRange = Range.closed(MIN_EPOCH_LOCAL_DATE, upperBound); OccurrenceParseResult<TemporalAccessor> parsed = temporalParser.parseLocalDate( normalizedValue, validModifiedDateRange, OccurrenceIssue.MODIFIED_DATE_UNLIKELY); if (parsed.isSuccessful()) { Optional.ofNullable(parsed.getPayload()) .map(TemporalAccessor::toString) .ifPresent(tr::setModified); } addIssueSet(tr, parsed.getIssues()); } }
|
TemporalInterpreter implements Serializable { public void interpretModified(ExtendedRecord er, TemporalRecord tr) { if (hasValue(er, DcTerm.modified)) { String value = extractValue(er, DcTerm.modified); String normalizedValue = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(value)).orElse(value); LocalDate upperBound = LocalDate.now().plusDays(1); Range<LocalDate> validModifiedDateRange = Range.closed(MIN_EPOCH_LOCAL_DATE, upperBound); OccurrenceParseResult<TemporalAccessor> parsed = temporalParser.parseLocalDate( normalizedValue, validModifiedDateRange, OccurrenceIssue.MODIFIED_DATE_UNLIKELY); if (parsed.isSuccessful()) { Optional.ofNullable(parsed.getPayload()) .map(TemporalAccessor::toString) .ifPresent(tr::setModified); } addIssueSet(tr, parsed.getIssues()); } } }
|
TemporalInterpreter implements Serializable { public void interpretModified(ExtendedRecord er, TemporalRecord tr) { if (hasValue(er, DcTerm.modified)) { String value = extractValue(er, DcTerm.modified); String normalizedValue = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(value)).orElse(value); LocalDate upperBound = LocalDate.now().plusDays(1); Range<LocalDate> validModifiedDateRange = Range.closed(MIN_EPOCH_LOCAL_DATE, upperBound); OccurrenceParseResult<TemporalAccessor> parsed = temporalParser.parseLocalDate( normalizedValue, validModifiedDateRange, OccurrenceIssue.MODIFIED_DATE_UNLIKELY); if (parsed.isSuccessful()) { Optional.ofNullable(parsed.getPayload()) .map(TemporalAccessor::toString) .ifPresent(tr::setModified); } addIssueSet(tr, parsed.getIssues()); } } @Builder(buildMethodName = "create") private TemporalInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); }
|
TemporalInterpreter implements Serializable { public void interpretModified(ExtendedRecord er, TemporalRecord tr) { if (hasValue(er, DcTerm.modified)) { String value = extractValue(er, DcTerm.modified); String normalizedValue = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(value)).orElse(value); LocalDate upperBound = LocalDate.now().plusDays(1); Range<LocalDate> validModifiedDateRange = Range.closed(MIN_EPOCH_LOCAL_DATE, upperBound); OccurrenceParseResult<TemporalAccessor> parsed = temporalParser.parseLocalDate( normalizedValue, validModifiedDateRange, OccurrenceIssue.MODIFIED_DATE_UNLIKELY); if (parsed.isSuccessful()) { Optional.ofNullable(parsed.getPayload()) .map(TemporalAccessor::toString) .ifPresent(tr::setModified); } addIssueSet(tr, parsed.getIssues()); } } @Builder(buildMethodName = "create") private TemporalInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
|
TemporalInterpreter implements Serializable { public void interpretModified(ExtendedRecord er, TemporalRecord tr) { if (hasValue(er, DcTerm.modified)) { String value = extractValue(er, DcTerm.modified); String normalizedValue = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(value)).orElse(value); LocalDate upperBound = LocalDate.now().plusDays(1); Range<LocalDate> validModifiedDateRange = Range.closed(MIN_EPOCH_LOCAL_DATE, upperBound); OccurrenceParseResult<TemporalAccessor> parsed = temporalParser.parseLocalDate( normalizedValue, validModifiedDateRange, OccurrenceIssue.MODIFIED_DATE_UNLIKELY); if (parsed.isSuccessful()) { Optional.ofNullable(parsed.getPayload()) .map(TemporalAccessor::toString) .ifPresent(tr::setModified); } addIssueSet(tr, parsed.getIssues()); } } @Builder(buildMethodName = "create") private TemporalInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
|
@Test(expected = IllegalArgumentException.class) public void countIndexDocumentsNullIndexTest() { EsIndex.countDocuments(EsConfig.from(DUMMY_HOST), null); thrown.expectMessage("index is required"); }
|
public static long countDocuments(EsConfig config, String index) { Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); log.info("Counting documents from index {}", index); try (EsClient esClient = EsClient.from(config)) { return EsService.countIndexDocuments(esClient, index); } }
|
EsIndex { public static long countDocuments(EsConfig config, String index) { Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); log.info("Counting documents from index {}", index); try (EsClient esClient = EsClient.from(config)) { return EsService.countIndexDocuments(esClient, index); } } }
|
EsIndex { public static long countDocuments(EsConfig config, String index) { Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); log.info("Counting documents from index {}", index); try (EsClient esClient = EsClient.from(config)) { return EsService.countIndexDocuments(esClient, index); } } }
|
EsIndex { public static long countDocuments(EsConfig config, String index) { Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); log.info("Counting documents from index {}", index); try (EsClient esClient = EsClient.from(config)) { return EsService.countIndexDocuments(esClient, index); } } static String createIndex(EsConfig config, IndexParams indexParams); static Optional<String> createIndexIfNotExists(EsConfig config, IndexParams indexParams); static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index); static void swapIndexInAliases(
EsConfig config,
Set<String> aliases,
String index,
Set<String> extraIdxToRemove,
Map<String, String> settings); static long countDocuments(EsConfig config, String index); static Set<String> deleteRecordsByDatasetId(
EsConfig config,
String[] aliases,
String datasetKey,
Predicate<String> indexesToDelete,
int timeoutSec,
int attempts); static Set<String> findDatasetIndexesInAliases(
EsConfig config, String[] aliases, String datasetKey); }
|
EsIndex { public static long countDocuments(EsConfig config, String index) { Preconditions.checkArgument(!Strings.isNullOrEmpty(index), "index is required"); log.info("Counting documents from index {}", index); try (EsClient esClient = EsClient.from(config)) { return EsService.countIndexDocuments(esClient, index); } } static String createIndex(EsConfig config, IndexParams indexParams); static Optional<String> createIndexIfNotExists(EsConfig config, IndexParams indexParams); static void swapIndexInAliases(EsConfig config, Set<String> aliases, String index); static void swapIndexInAliases(
EsConfig config,
Set<String> aliases,
String index,
Set<String> extraIdxToRemove,
Map<String, String> settings); static long countDocuments(EsConfig config, String index); static Set<String> deleteRecordsByDatasetId(
EsConfig config,
String[] aliases,
String datasetKey,
Predicate<String> indexesToDelete,
int timeoutSec,
int attempts); static Set<String> findDatasetIndexesInAliases(
EsConfig config, String[] aliases, String datasetKey); }
|
@Test public void testLikelyRecorded() { Map<String, String> map = new HashMap<>(); Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC")); map.put(DwcTerm.eventDate.qualifiedName(), "24.12." + (cal.get(Calendar.YEAR) + 1)); ExtendedRecord er = ExtendedRecord.newBuilder().setId("1").setCoreTerms(map).build(); TemporalInterpreter interpreter = TemporalInterpreter.builder().create(); TemporalRecord tr = TemporalRecord.newBuilder().setId("1").build(); interpreter.interpretTemporal(er, tr); assertEquals(1, tr.getIssues().getIssueList().size()); assertEquals( OccurrenceIssue.RECORDED_DATE_UNLIKELY.name(), tr.getIssues().getIssueList().iterator().next()); }
|
public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); }
|
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } }
|
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); }
|
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
|
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
|
@Test public void testYearMonthRangeInverted() { Map<String, String> map = new HashMap<>(); map.put(DwcTerm.eventDate.qualifiedName(), "2005-11/2004-02"); ExtendedRecord er = ExtendedRecord.newBuilder().setId("1").setCoreTerms(map).build(); TemporalRecord tr = TemporalRecord.newBuilder().setId("1").build(); TemporalInterpreter interpreter = TemporalInterpreter.builder().create(); interpreter.interpretTemporal(er, tr); assertEquals(2005, tr.getYear().intValue()); assertEquals(11, tr.getMonth().intValue()); assertNull(tr.getDay()); assertEquals("2005-11", tr.getEventDate().getGte()); assertNull(tr.getEventDate().getLte()); assertEquals(1, tr.getIssues().getIssueList().size()); }
|
public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); }
|
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } }
|
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); }
|
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
|
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
|
@Test public void testYearMonthRange() { Map<String, String> map = new HashMap<>(); map.put(DwcTerm.eventDate.qualifiedName(), "2004-11/2005-02"); ExtendedRecord er = ExtendedRecord.newBuilder().setId("1").setCoreTerms(map).build(); TemporalRecord tr = TemporalRecord.newBuilder().setId("1").build(); TemporalInterpreter interpreter = TemporalInterpreter.builder().create(); interpreter.interpretTemporal(er, tr); assertEquals(2004, tr.getYear().intValue()); assertEquals(11, tr.getMonth().intValue()); assertEquals("2004-11", tr.getEventDate().getGte()); assertEquals("2005-02", tr.getEventDate().getLte()); assertEquals(0, tr.getIssues().getIssueList().size()); }
|
public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); }
|
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } }
|
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); }
|
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
|
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
|
@Test public void testIsoYmRange() { Map<String, String> map = new HashMap<>(); map.put(DwcTerm.eventDate.qualifiedName(), "2004-02/12"); ExtendedRecord er = ExtendedRecord.newBuilder().setId("1").setCoreTerms(map).build(); TemporalRecord tr = TemporalRecord.newBuilder().setId("1").build(); TemporalInterpreter interpreter = TemporalInterpreter.builder().create(); interpreter.interpretTemporal(er, tr); assertEquals(2004, tr.getYear().intValue()); assertEquals(2, tr.getMonth().intValue()); assertEquals("2004-02", tr.getEventDate().getGte()); assertEquals("2004-12", tr.getEventDate().getLte()); assertEquals(0, tr.getIssues().getIssueList().size()); }
|
public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); }
|
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } }
|
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); }
|
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
|
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
|
@Test public void testIsoTimeWithoutT() { Map<String, String> map = new HashMap<>(); map.put(DwcTerm.eventDate.qualifiedName(), "2011-09-13 09:29:08"); ExtendedRecord er = ExtendedRecord.newBuilder().setId("1").setCoreTerms(map).build(); TemporalRecord tr = TemporalRecord.newBuilder().setId("1").build(); TemporalInterpreter interpreter = TemporalInterpreter.builder().create(); interpreter.interpretTemporal(er, tr); assertEquals(2011, tr.getYear().intValue()); assertEquals(9, tr.getMonth().intValue()); assertEquals(13, tr.getDay().intValue()); assertEquals("2011-09-13T09:29:08", tr.getEventDate().getGte()); assertNull(tr.getEventDate().getLte()); assertEquals(0, tr.getIssues().getIssueList().size()); }
|
public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); }
|
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } }
|
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); }
|
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
|
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter(
List<DateComponentOrdering> orderings,
SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.